From 7e0ad4d1e22a3f1ab6962e882ae04458fa897ede Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 15 Aug 2024 13:39:17 -0400 Subject: [PATCH 01/41] Start work with OAuth --- backend/main.py | 19 +++++++++++++++++++ requirements.txt | 4 +++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/backend/main.py b/backend/main.py index 71f0030..6cb1918 100644 --- a/backend/main.py +++ b/backend/main.py @@ -11,9 +11,13 @@ from apscheduler.triggers.interval import IntervalTrigger from dotenv import load_dotenv from fastapi import FastAPI, Request, Response from fastapi.middleware.cors import CORSMiddleware +from fastapi_oauth2.claims import Claims +from fastapi_oauth2.client import OAuth2Client +from fastapi_oauth2.config import OAuth2Config from prisma import Prisma from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler from slack_bolt.async_app import AsyncAck, AsyncApp +from social_core.backends.slack import SlackOAuth2 load_dotenv() @@ -22,6 +26,20 @@ active_streams: List[Dict[str, str | bool]] = [] scheduler = AsyncIOScheduler() +oauth2_config = OAuth2Config( + allow_http=False, + jwt_secret=os.getenv("JWT_SECRET"), + jwt_expires=os.getenv("JWT_EXPIRES"), + jwt_algorithm=os.getenv("JWT_ALGORITHM"), + clients=[ + OAuth2Client( + backend=SlackOAuth2, + client_id=os.environ["SLACK_TOKEN"], + client_secret=os.environ["SLACK_SIGNING_SECRET"], + ) + ], +) + async def update_active(): global active_stream @@ -146,6 +164,7 @@ async def get_stream_by_key(stream_key: str): stream if stream else Response(status_code=404, content="404: Stream not found") ) + @api.get("/api/v1/active_stream") async def get_active_stream(): return active_stream["name"] if "name" in active_stream else "" diff --git a/requirements.txt b/requirements.txt index b5a1063..598a42f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,4 +5,6 @@ requests python-dotenv prisma fastapi-utils -httpx \ No newline at end of file +httpx +apscheduler +fastapi-oauth2 From 36e70429deae91916dc4cf8fad3563b895f7dd79 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 15 Aug 2024 17:57:02 +0000 Subject: [PATCH 02/41] fix what would have been a really bad time with the db --- .../20240815175119_pls_work/migration.sql | 41 +++++++++++++++++++ backend/schema.prisma | 4 +- 2 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 backend/migrations/20240815175119_pls_work/migration.sql diff --git a/backend/migrations/20240815175119_pls_work/migration.sql b/backend/migrations/20240815175119_pls_work/migration.sql new file mode 100644 index 0000000..0d0ad12 --- /dev/null +++ b/backend/migrations/20240815175119_pls_work/migration.sql @@ -0,0 +1,41 @@ +/* + Warnings: + + - You are about to drop the column `active` on the `Stream` table. All the data in the column will be lost. + - You are about to drop the column `focused` on the `Stream` table. All the data in the column will be lost. + - The primary key for the `User` table will be changed. If it partially fails, the table could be left without primary key constraint. + - You are about to drop the column `slackId` on the `User` table. All the data in the column will be lost. + - Added the required column `user_id` to the `Stream` table without a default value. This is not possible if the table is not empty. + - The required column `id` was added to the `User` table with a prisma-level default value. This is not possible if the table is not empty. Please add this column as optional, then populate it before making it required. + - Added the required column `slack_id` to the `User` table without a default value. This is not possible if the table is not empty. + +*/ +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_Stream" ( + "id" TEXT NOT NULL PRIMARY KEY, + "created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "is_live" BOOLEAN NOT NULL DEFAULT false, + "is_focused" BOOLEAN NOT NULL DEFAULT false, + "key" TEXT NOT NULL, + "user_id" TEXT NOT NULL, + CONSTRAINT "Stream_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); +INSERT INTO "new_Stream" ("id", "key") SELECT "id", "key" FROM "Stream"; +DROP TABLE "Stream"; +ALTER TABLE "new_Stream" RENAME TO "Stream"; +CREATE UNIQUE INDEX "Stream_key_key" ON "Stream"("key"); +CREATE UNIQUE INDEX "Stream_user_id_key" ON "Stream"("user_id"); +CREATE TABLE "new_User" ( + "id" TEXT NOT NULL PRIMARY KEY, + "created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "slack_id" TEXT NOT NULL, + "name" TEXT NOT NULL +); +INSERT INTO "new_User" ("name") SELECT "name" FROM "User"; +DROP TABLE "User"; +ALTER TABLE "new_User" RENAME TO "User"; +CREATE UNIQUE INDEX "User_slack_id_key" ON "User"("slack_id"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/backend/schema.prisma b/backend/schema.prisma index a4ff3c8..178b024 100644 --- a/backend/schema.prisma +++ b/backend/schema.prisma @@ -1,7 +1,7 @@ generator client { provider = "prisma-client-py" interface = "asyncio" - recursive_type_depth = 5 + recursive_type_depth = "5" } datasource db { @@ -23,6 +23,6 @@ model Stream { is_live Boolean @default(false) is_focused Boolean @default(false) key String @unique @default(uuid()) - user User @relation(fields: [user_id], references: [id]) user_id String @unique + user User @relation(fields: [user_id], references: [id]) } From 2d8079f6e87b1e455366c2b4f0df548638708dba Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 15 Aug 2024 18:04:37 +0000 Subject: [PATCH 03/41] my rapper name is lil TypeError --- backend/main.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/main.py b/backend/main.py index 6cb1918..96e1c33 100644 --- a/backend/main.py +++ b/backend/main.py @@ -28,9 +28,9 @@ scheduler = AsyncIOScheduler() oauth2_config = OAuth2Config( allow_http=False, - jwt_secret=os.getenv("JWT_SECRET"), - jwt_expires=os.getenv("JWT_EXPIRES"), - jwt_algorithm=os.getenv("JWT_ALGORITHM"), + jwt_secret=os.environ["JWT_SECRET"], + jwt_expires=os.environ["JWT_EXPIRES"], + jwt_algorithm=os.environ["JWT_ALGORITHM"], clients=[ OAuth2Client( backend=SlackOAuth2, From e669a755ece8c00d416fa30c4ec44981bc8ba936 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 15 Aug 2024 19:00:38 +0000 Subject: [PATCH 04/41] promptly package python projects properly (breaking changes, sorry) --- {backend => onboard-live-backend}/main.py | 10 ++ .../20240719191810_init/migration.sql | 0 .../20240815175119_pls_work/migration.sql | 0 .../migrations/migration_lock.toml | 0 .../schema.prisma | 0 pyproject.toml | 96 +++++++++++++++++++ requirements.txt | 10 -- 7 files changed, 106 insertions(+), 10 deletions(-) rename {backend => onboard-live-backend}/main.py (99%) rename {backend => onboard-live-backend}/migrations/20240719191810_init/migration.sql (100%) rename {backend => onboard-live-backend}/migrations/20240815175119_pls_work/migration.sql (100%) rename {backend => onboard-live-backend}/migrations/migration_lock.toml (100%) rename {backend => onboard-live-backend}/schema.prisma (100%) create mode 100644 pyproject.toml delete mode 100644 requirements.txt diff --git a/backend/main.py b/onboard-live-backend/main.py similarity index 99% rename from backend/main.py rename to onboard-live-backend/main.py index 96e1c33..f020828 100644 --- a/backend/main.py +++ b/onboard-live-backend/main.py @@ -5,6 +5,7 @@ from random import choice from secrets import token_hex from typing import Dict, List +import fastapi import httpx from apscheduler.schedulers.asyncio import AsyncIOScheduler from apscheduler.triggers.interval import IntervalTrigger @@ -18,6 +19,7 @@ from prisma import Prisma from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler from slack_bolt.async_app import AsyncAck, AsyncApp from social_core.backends.slack import SlackOAuth2 +import uvicorn load_dotenv() @@ -562,3 +564,11 @@ async def handle_some_action(ack): @api.post("/slack/events") async def slack_event_endpoint(req: Request): return await bolt_handler.handle(req) + + +def main(): + uvicorn.run(api) + + +if __name__ == "__main__": + main() diff --git a/backend/migrations/20240719191810_init/migration.sql b/onboard-live-backend/migrations/20240719191810_init/migration.sql similarity index 100% rename from backend/migrations/20240719191810_init/migration.sql rename to onboard-live-backend/migrations/20240719191810_init/migration.sql diff --git a/backend/migrations/20240815175119_pls_work/migration.sql b/onboard-live-backend/migrations/20240815175119_pls_work/migration.sql similarity index 100% rename from backend/migrations/20240815175119_pls_work/migration.sql rename to onboard-live-backend/migrations/20240815175119_pls_work/migration.sql diff --git a/backend/migrations/migration_lock.toml b/onboard-live-backend/migrations/migration_lock.toml similarity index 100% rename from backend/migrations/migration_lock.toml rename to onboard-live-backend/migrations/migration_lock.toml diff --git a/backend/schema.prisma b/onboard-live-backend/schema.prisma similarity index 100% rename from backend/schema.prisma rename to onboard-live-backend/schema.prisma diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..5cb361c --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,96 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "backend" +version = "0.0.1" +authors = [{ name = "Micha Albert", email = "micha@2231puppy.tech" }] +description = "Backend for OnBoard Live, A Hack Club YSWS grant program" +readme = "README.md" +requires-python = ">=3.9" +dependencies = [ + "aiohttp==3.9.5", + "aiosignal==1.3.1", + "annotated-types==0.7.0", + "anyio==4.4.0", + "APScheduler==3.10.4", + "attrs==23.2.0", + "black==24.4.2", + "certifi==2024.7.4", + "cffi==1.17.0", + "charset-normalizer==3.3.2", + "click==8.1.7", + "cryptography==43.0.0", + "defusedxml==0.8.0rc2", + "dnspython==2.6.1", + "ecdsa==0.19.0", + "email_validator==2.2.0", + "fastapi==0.112.0", + "fastapi-cli==0.0.4", + "fastapi-oauth2==1.0.0", + "fastapi-utils==0.7.0", + "frozenlist==1.4.1", + "h11==0.14.0", + "httpcore==1.0.5", + "httptools==0.6.1", + "httpx==0.27.0", + "idna==3.7", + "Jinja2==3.1.4", + "markdown-it-py==3.0.0", + "MarkupSafe==2.1.5", + "mdurl==0.1.2", + "multidict==6.0.5", + "mypy==1.11.0", + "mypy-extensions==1.0.0", + "nodeenv==1.9.1", + "oauthlib==3.2.2", + "packaging==24.1", + "pathspec==0.12.1", + "platformdirs==4.2.2", + "prisma==0.14.0", + "psutil==5.9.8", + "pyasn1==0.6.0", + "pycparser==2.22", + "pydantic==2.8.2", + "pydantic_core==2.20.1", + "Pygments==2.18.0", + "PyJWT==2.9.0", + "python-dotenv==1.0.1", + "python-jose==3.3.0", + "python-multipart==0.0.9", + "python3-openid==3.2.0", + "pytz==2024.1", + "PyYAML==6.0.1", + "requests==2.32.3", + "requests-oauthlib==2.0.0", + "rich==13.7.1", + "rsa==4.9", + "shellingham==1.5.4", + "six==1.16.0", + "slack_bolt==1.20.0", + "slack_sdk==3.31.0", + "sniffio==1.3.1", + "social-auth-core==4.5.4", + "starlette==0.37.2", + "tomlkit==0.13.0", + "typer==0.12.3", + "typing-inspect==0.9.0", + "typing_extensions==4.12.2", + "tzlocal==5.2", + "urllib3==2.2.2", + "uvicorn[standard]==0.30.6", + "uvloop==0.19.0", + "watchfiles==0.22.0", + "websockets==12.0", + "yarl==1.9.4", +] + +[project.urls] +Homepage = "https://github.com/MichaByte/OnBoard-Live" + +[tool.hatch.build.targets.wheel] +packages = ["backend"] + +[project.scripts] +start = "main:main" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 598a42f..0000000 --- a/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -fastapi -uvicorn[standard] -slack-bolt -requests -python-dotenv -prisma -fastapi-utils -httpx -apscheduler -fastapi-oauth2 From f57c38250a51076b99c56d76a31d560a4b384007 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 15 Aug 2024 19:24:05 +0000 Subject: [PATCH 05/41] finally working packaging --- {onboard-live-backend => onboard_live_backend}/main.py | 2 +- .../migrations/20240719191810_init/migration.sql | 0 .../migrations/20240815175119_pls_work/migration.sql | 0 .../migrations/migration_lock.toml | 0 .../schema.prisma | 0 pyproject.toml | 6 +++--- 6 files changed, 4 insertions(+), 4 deletions(-) rename {onboard-live-backend => onboard_live_backend}/main.py (99%) rename {onboard-live-backend => onboard_live_backend}/migrations/20240719191810_init/migration.sql (100%) rename {onboard-live-backend => onboard_live_backend}/migrations/20240815175119_pls_work/migration.sql (100%) rename {onboard-live-backend => onboard_live_backend}/migrations/migration_lock.toml (100%) rename {onboard-live-backend => onboard_live_backend}/schema.prisma (100%) diff --git a/onboard-live-backend/main.py b/onboard_live_backend/main.py similarity index 99% rename from onboard-live-backend/main.py rename to onboard_live_backend/main.py index f020828..6d4247f 100644 --- a/onboard-live-backend/main.py +++ b/onboard_live_backend/main.py @@ -21,7 +21,7 @@ from slack_bolt.async_app import AsyncAck, AsyncApp from social_core.backends.slack import SlackOAuth2 import uvicorn -load_dotenv() +load_dotenv(dotenv_path="./.env") active_stream: Dict[str, str | bool] = {} active_streams: List[Dict[str, str | bool]] = [] diff --git a/onboard-live-backend/migrations/20240719191810_init/migration.sql b/onboard_live_backend/migrations/20240719191810_init/migration.sql similarity index 100% rename from onboard-live-backend/migrations/20240719191810_init/migration.sql rename to onboard_live_backend/migrations/20240719191810_init/migration.sql diff --git a/onboard-live-backend/migrations/20240815175119_pls_work/migration.sql b/onboard_live_backend/migrations/20240815175119_pls_work/migration.sql similarity index 100% rename from onboard-live-backend/migrations/20240815175119_pls_work/migration.sql rename to onboard_live_backend/migrations/20240815175119_pls_work/migration.sql diff --git a/onboard-live-backend/migrations/migration_lock.toml b/onboard_live_backend/migrations/migration_lock.toml similarity index 100% rename from onboard-live-backend/migrations/migration_lock.toml rename to onboard_live_backend/migrations/migration_lock.toml diff --git a/onboard-live-backend/schema.prisma b/onboard_live_backend/schema.prisma similarity index 100% rename from onboard-live-backend/schema.prisma rename to onboard_live_backend/schema.prisma diff --git a/pyproject.toml b/pyproject.toml index 5cb361c..b8267cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -name = "backend" +name = "onboard_live_backend" version = "0.0.1" authors = [{ name = "Micha Albert", email = "micha@2231puppy.tech" }] description = "Backend for OnBoard Live, A Hack Club YSWS grant program" @@ -90,7 +90,7 @@ dependencies = [ Homepage = "https://github.com/MichaByte/OnBoard-Live" [tool.hatch.build.targets.wheel] -packages = ["backend"] +packages = ["onboard_live_backend"] [project.scripts] -start = "main:main" +onboard-live-backend-start = "onboard_live_backend.main:main" From 25dbca5c56115c3ae103752b054a76af124dae68 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Sat, 17 Aug 2024 22:25:32 +0000 Subject: [PATCH 06/41] 50% done with submission flow i think --- onboard_live_backend/main.py | 128 ++++++++++++------ .../20240816163601_add_pr_model/migration.sql | 10 ++ .../migration.sql | 15 ++ .../migration.sql | 23 ++++ .../migration.sql | 25 ++++ .../migration.sql | 39 ++++++ onboard_live_backend/schema.prisma | 22 ++- 7 files changed, 218 insertions(+), 44 deletions(-) create mode 100644 onboard_live_backend/migrations/20240816163601_add_pr_model/migration.sql create mode 100644 onboard_live_backend/migrations/20240816164220_make_pr_model_optional/migration.sql create mode 100644 onboard_live_backend/migrations/20240816164920_add_pr_number_field/migration.sql create mode 100644 onboard_live_backend/migrations/20240817173150_add_sec_token/migration.sql create mode 100644 onboard_live_backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index 6d4247f..1aa2f52 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -1,3 +1,5 @@ +import hashlib +import hmac import json import os from contextlib import asynccontextmanager @@ -5,21 +7,16 @@ from random import choice from secrets import token_hex from typing import Dict, List -import fastapi import httpx +import uvicorn from apscheduler.schedulers.asyncio import AsyncIOScheduler from apscheduler.triggers.interval import IntervalTrigger from dotenv import load_dotenv -from fastapi import FastAPI, Request, Response +from fastapi import FastAPI, HTTPException, Request, Response from fastapi.middleware.cors import CORSMiddleware -from fastapi_oauth2.claims import Claims -from fastapi_oauth2.client import OAuth2Client -from fastapi_oauth2.config import OAuth2Config from prisma import Prisma from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler from slack_bolt.async_app import AsyncAck, AsyncApp -from social_core.backends.slack import SlackOAuth2 -import uvicorn load_dotenv(dotenv_path="./.env") @@ -28,19 +25,27 @@ active_streams: List[Dict[str, str | bool]] = [] scheduler = AsyncIOScheduler() -oauth2_config = OAuth2Config( - allow_http=False, - jwt_secret=os.environ["JWT_SECRET"], - jwt_expires=os.environ["JWT_EXPIRES"], - jwt_algorithm=os.environ["JWT_ALGORITHM"], - clients=[ - OAuth2Client( - backend=SlackOAuth2, - client_id=os.environ["SLACK_TOKEN"], - client_secret=os.environ["SLACK_SIGNING_SECRET"], + +def verify_gh_signature(payload_body, secret_token, signature_header): + """Verify that the payload was sent from GitHub by validating SHA256. + + Raise and return 403 if not authorized. + + Args: + payload_body: original request body to verify (request.body()) + secret_token: GitHub app webhook token (WEBHOOK_SECRET) + signature_header: header received from GitHub (x-hub-signature-256) + """ + if not signature_header: + raise HTTPException( + status_code=403, detail="x-hub-signature-256 header is missing!" ) - ], -) + hash_object = hmac.new( + secret_token.encode("utf-8"), msg=payload_body, digestmod=hashlib.sha256 + ) + expected_signature = "sha256=" + hash_object.hexdigest() + if not hmac.compare_digest(expected_signature, signature_header): + raise HTTPException(status_code=403, detail="Request signatures didn't match!") async def update_active(): @@ -75,17 +80,13 @@ async def update_active(): ) new_stream = choice(active_streams) print(f"found new stream to make active: {new_stream}") - try: - await db.connect() - except Exception as e: - print(e) print(f"trying to find user associated with stream {active_stream['name']}") old_active_stream_user = await db.user.find_first(where={"id": (await db.stream.find_first(where={"key": str(active_stream["name"])})).user_id}) # type: ignore await bolt.client.chat_postMessage(channel="C07ERCGG989", text=f"Hey <@{old_active_stream_user.slack_id}>, you're no longer in focus!") # type: ignore active_stream = new_stream active_stream_user = await db.user.find_first(where={"id": (await db.stream.find_first(where={"key": str(active_stream["name"])})).user_id}) # type: ignore await bolt.client.chat_postMessage(channel="C07ERCGG989", text=f"Hey <@{active_stream_user.slack_id}>, you're in focus! Make sure to tell us what you're working on!") # type: ignore - await db.disconnect() + return True async def check_for_new(): @@ -124,19 +125,16 @@ async def lifespan(app: FastAPI): scheduler.start() scheduler.add_job(update_active, IntervalTrigger(seconds=5 * 60)) scheduler.add_job(check_for_new, IntervalTrigger(seconds=3)) - try: - await db.connect() - except Exception: - pass + await db.connect() async with httpx.AsyncClient() as client: for stream in await db.stream.find_many(): await client.post( "http://127.0.0.1:9997/v3/config/paths/add/" + stream.key, json={"name": stream.key}, ) - await db.disconnect() yield scheduler.shutdown() + await db.disconnect() api = FastAPI(lifespan=lifespan) # type: ignore @@ -157,11 +155,48 @@ bolt = AsyncApp( bolt_handler = AsyncSlackRequestHandler(bolt) +@api.post("/api/v1/github/pr_event") +async def pr_event(request: Request): + verify_gh_signature( + await request.body(), + os.environ["GH_HOOK_SECRET"], + request.headers.get("x-hub-signature-256"), + ) + body = json.loads(await request.body()) + if body["action"] == "labeled": + if body["label"]["id"] == 7336079497: + print("Added label has same id as OBL label!") + async with httpx.AsyncClient() as client: + db_pr = await db.pullrequest.create({"github_id": body["number"]}) + db_pr_token = db_pr.token + await client.post( + f"https://api.github.com/repos/hackclub/OnBoard/issues/{body["issue"]["number"]}/comments", + headers={ + "Authorization": f"token {os.environ['GH_TOKEN']}", + "Accept": "application/vnd.github.v3+json", + }, + json={ + "body": f"Hey, I'm Micha, a.k.a `@mra` on Slack! It looks like this is an OnBoard Live submission. If that sounds right, then go to the #onboard-live channel on Slack and send the message `/onboard-live-submit {db_pr_token}`. Doing that helps us link this pull request to your Slack account lets you select your sessions for review.\n###### If you have no clue what OnBoard Live is, please disregard this automated message!" + }, + ) + elif "created" in body and "comment" in body: + if body["comment"]["user"]["id"] == body["issue"]["user"]["id"]: + db_pr = await db.pullrequest.find_first(where={"github_id": body["issue"]["number"]}) + if db_pr: + if db_pr.possible_users: + for user in db_pr.possible_users: + if hashlib.sha256(bytes(f"{db_pr.secondary_token}+{user.slack_id}", encoding="utf-8")).hexdigest() in body["comment"]["body"]: + # Yay, the user who ran the Slack submit command is the same user who submitted the PR! + db_pr.user = user + break + else: + print("possible users was none") + return + + @api.get("/api/v1/stream_key/{stream_key}") async def get_stream_by_key(stream_key: str): - await db.connect() stream = await db.stream.find_first(where={"key": stream_key}) - await db.disconnect() return ( stream if stream else Response(status_code=404, content="404: Stream not found") ) @@ -220,10 +255,6 @@ async def deny(ack, body): @bolt.action("approve") async def approve(ack, body): await ack() - try: - await db.connect() - except Exception: - pass message = body["message"] applicant_slack_id = message["blocks"][len(message) - 3]["text"]["text"].split( ": " @@ -262,7 +293,6 @@ async def approve(ack, body): channel=sumbitter_convo["channel"]["id"], text=f"Welcome to OnBoard Live! Your stream key is {new_stream.key}. To use your stream key the easy way, go to . You can also use it in OBS with the server URL of rtmp://live.onboard.hackclub.com:1935", ) - await db.disconnect() @bolt.view("apply") @@ -287,9 +317,6 @@ async def handle_application_submission(ack, body): channel=sumbitter_convo["channel"]["id"], text=f"Your application has been submitted! We will review it shortly. Please do not send another application - If you haven't heard back in over 48 hours, or you forgot something in your application, please message <@{os.environ['ADMIN_SLACK_ID']}>! Here's a copy of your responses for your reference:\nSome info on your project(s): {body['view']['state']['values']['project-info']['project-info-body']['value']}\n{f'Please fill out ! We can only approve your application once this is done.' if not user_verified else ''}", ) - admin_convo = await bolt.client.conversations_open( - users=os.environ["ADMIN_SLACK_ID"], return_im=True - ) will_behave = True # boxes = body["view"]["state"]["values"]["kAgeY"]["checkboxes"]["selected_options"] # if len(boxes) == 1 and boxes[0]["value"] == "value-1": @@ -376,6 +403,29 @@ async def handle_application_submission(ack, body): ) +@bolt.command("/onboard-live-submit") +async def submit(ack: AsyncAck, command): + await ack() + user_id = command["user_id"] + channel_id = command["channel_id"] + text = command["text"] + db_pr = await db.pullrequest.find_first(where={"token": text}) + db_user = await db.user.find_first_or_raise(where={"slack_id": user_id}) + if db_pr is None: + await bolt.client.chat_postEphemeral( + channel=channel_id, + user=user_id, + text="There doesn't seem to be a PR open with that token! If this seems like a mistake, please message <@U05C64XMMHV> about it!", + ) + return + await db.pullrequest.update(where={"id": db_pr.id}, data={"possible_users": {"set": [{"id": db_user.id}]}}) + await bolt.client.chat_postEphemeral( + channel=channel_id, + user=user_id, + text=f"Please go to and add a comment containing the secret code `{hashlib.sha256(bytes(f"{db_pr.secondary_token}+{user_id}", encoding="utf-8")).hexdigest()}`. This helps us make sure this is your PR!", + ) + + @bolt.command("/onboard-live-apply") async def apply(ack: AsyncAck, command): await ack() diff --git a/onboard_live_backend/migrations/20240816163601_add_pr_model/migration.sql b/onboard_live_backend/migrations/20240816163601_add_pr_model/migration.sql new file mode 100644 index 0000000..28b85a1 --- /dev/null +++ b/onboard_live_backend/migrations/20240816163601_add_pr_model/migration.sql @@ -0,0 +1,10 @@ +-- CreateTable +CREATE TABLE "PullRequest" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "userId" TEXT NOT NULL, + "token" TEXT NOT NULL, + CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); + +-- CreateIndex +CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token"); diff --git a/onboard_live_backend/migrations/20240816164220_make_pr_model_optional/migration.sql b/onboard_live_backend/migrations/20240816164220_make_pr_model_optional/migration.sql new file mode 100644 index 0000000..e1751ed --- /dev/null +++ b/onboard_live_backend/migrations/20240816164220_make_pr_model_optional/migration.sql @@ -0,0 +1,15 @@ +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_PullRequest" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "userId" TEXT, + "token" TEXT NOT NULL, + CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); +INSERT INTO "new_PullRequest" ("id", "token", "userId") SELECT "id", "token", "userId" FROM "PullRequest"; +DROP TABLE "PullRequest"; +ALTER TABLE "new_PullRequest" RENAME TO "PullRequest"; +CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/onboard_live_backend/migrations/20240816164920_add_pr_number_field/migration.sql b/onboard_live_backend/migrations/20240816164920_add_pr_number_field/migration.sql new file mode 100644 index 0000000..de12636 --- /dev/null +++ b/onboard_live_backend/migrations/20240816164920_add_pr_number_field/migration.sql @@ -0,0 +1,23 @@ +/* + Warnings: + + - Added the required column `github_id` to the `PullRequest` table without a default value. This is not possible if the table is not empty. + +*/ +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_PullRequest" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "github_id" INTEGER NOT NULL, + "userId" TEXT, + "token" TEXT NOT NULL, + CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); +INSERT INTO "new_PullRequest" ("id", "token", "userId") SELECT "id", "token", "userId" FROM "PullRequest"; +DROP TABLE "PullRequest"; +ALTER TABLE "new_PullRequest" RENAME TO "PullRequest"; +CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id"); +CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/onboard_live_backend/migrations/20240817173150_add_sec_token/migration.sql b/onboard_live_backend/migrations/20240817173150_add_sec_token/migration.sql new file mode 100644 index 0000000..f9cdd01 --- /dev/null +++ b/onboard_live_backend/migrations/20240817173150_add_sec_token/migration.sql @@ -0,0 +1,25 @@ +/* + Warnings: + + - The required column `secondary_token` was added to the `PullRequest` table with a prisma-level default value. This is not possible if the table is not empty. Please add this column as optional, then populate it before making it required. + +*/ +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_PullRequest" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "github_id" INTEGER NOT NULL, + "userId" TEXT, + "token" TEXT NOT NULL, + "secondary_token" TEXT NOT NULL, + CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); +INSERT INTO "new_PullRequest" ("github_id", "id", "token", "userId") SELECT "github_id", "id", "token", "userId" FROM "PullRequest"; +DROP TABLE "PullRequest"; +ALTER TABLE "new_PullRequest" RENAME TO "PullRequest"; +CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id"); +CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token"); +CREATE UNIQUE INDEX "PullRequest_secondary_token_key" ON "PullRequest"("secondary_token"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/onboard_live_backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql b/onboard_live_backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql new file mode 100644 index 0000000..b58ceeb --- /dev/null +++ b/onboard_live_backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql @@ -0,0 +1,39 @@ +/* + Warnings: + + - You are about to drop the column `userId` on the `PullRequest` table. All the data in the column will be lost. + +*/ +-- CreateTable +CREATE TABLE "_PullRequestToPossibleUser" ( + "A" INTEGER NOT NULL, + "B" TEXT NOT NULL, + CONSTRAINT "_PullRequestToPossibleUser_A_fkey" FOREIGN KEY ("A") REFERENCES "PullRequest" ("id") ON DELETE CASCADE ON UPDATE CASCADE, + CONSTRAINT "_PullRequestToPossibleUser_B_fkey" FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE +); + +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_PullRequest" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "github_id" INTEGER NOT NULL, + "known_user_id" TEXT, + "token" TEXT NOT NULL, + "secondary_token" TEXT NOT NULL, + CONSTRAINT "PullRequest_known_user_id_fkey" FOREIGN KEY ("known_user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); +INSERT INTO "new_PullRequest" ("github_id", "id", "secondary_token", "token") SELECT "github_id", "id", "secondary_token", "token" FROM "PullRequest"; +DROP TABLE "PullRequest"; +ALTER TABLE "new_PullRequest" RENAME TO "PullRequest"; +CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id"); +CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token"); +CREATE UNIQUE INDEX "PullRequest_secondary_token_key" ON "PullRequest"("secondary_token"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; + +-- CreateIndex +CREATE UNIQUE INDEX "_PullRequestToPossibleUser_AB_unique" ON "_PullRequestToPossibleUser"("A", "B"); + +-- CreateIndex +CREATE INDEX "_PullRequestToPossibleUser_B_index" ON "_PullRequestToPossibleUser"("B"); diff --git a/onboard_live_backend/schema.prisma b/onboard_live_backend/schema.prisma index 178b024..f10598b 100644 --- a/onboard_live_backend/schema.prisma +++ b/onboard_live_backend/schema.prisma @@ -10,11 +10,13 @@ datasource db { } model User { - id String @id @default(cuid()) - created_at DateTime @default(now()) - slack_id String @unique - name String - stream Stream? + id String @id @default(cuid()) + created_at DateTime @default(now()) + slack_id String @unique + name String + stream Stream? + possible_pulls PullRequest[] @relation(name: "PullRequestToPossibleUser") // pull requests that this user has tried to claim via the /onboard-live-submit command on Slack + known_pulls PullRequest[] @relation(name: "PullRequestToKnownUser") // pull requests that have been verified to belong to this user } model Stream { @@ -26,3 +28,13 @@ model Stream { user_id String @unique user User @relation(fields: [user_id], references: [id]) } + +model PullRequest { + id Int @id @default(autoincrement()) + github_id Int @unique + user User? @relation(name: "PullRequestToKnownUser", fields: [known_user_id], references: [id]) + known_user_id String? + token String @unique @default(uuid()) + secondary_token String @unique @default(uuid()) + possible_users User[] @relation(name: "PullRequestToPossibleUser") +} From 6afebea7e1c6c33fa4755891b99abf8f5056cd21 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Fri, 23 Aug 2024 17:40:25 +0000 Subject: [PATCH 07/41] working submission (oauth) flow! --- onboard_live_backend/main.py | 73 +++++++++++-------- .../migration.sql | 10 +++ .../migration.sql | 26 +++++++ .../migration.sql | 2 + .../migration.sql | 25 +++++++ .../migration.sql | 21 ++++++ .../migration.sql | 22 ++++++ .../migration.sql | 22 ++++++ onboard_live_backend/schema.prisma | 27 +++---- 9 files changed, 183 insertions(+), 45 deletions(-) create mode 100644 onboard_live_backend/migrations/20240821135629_prep_for_oauth/migration.sql create mode 100644 onboard_live_backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql create mode 100644 onboard_live_backend/migrations/20240823141852_add_gh_token_field/migration.sql create mode 100644 onboard_live_backend/migrations/20240823145449_more_pr_schema_work/migration.sql create mode 100644 onboard_live_backend/migrations/20240823151408_remove_duplicated_field/migration.sql create mode 100644 onboard_live_backend/migrations/20240823151722_make_gh_user_id_required/migration.sql create mode 100644 onboard_live_backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index 1aa2f52..3469d79 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -7,6 +7,7 @@ from random import choice from secrets import token_hex from typing import Dict, List +from fastapi.responses import HTMLResponse, RedirectResponse import httpx import uvicorn from apscheduler.schedulers.asyncio import AsyncIOScheduler @@ -17,6 +18,7 @@ from fastapi.middleware.cors import CORSMiddleware from prisma import Prisma from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler from slack_bolt.async_app import AsyncAck, AsyncApp +from cryptography.fernet import Fernet load_dotenv(dotenv_path="./.env") @@ -25,6 +27,8 @@ active_streams: List[Dict[str, str | bool]] = [] scheduler = AsyncIOScheduler() +FERNET = Fernet(os.environ["FERNET_KEY"]) + def verify_gh_signature(payload_body, secret_token, signature_header): """Verify that the payload was sent from GitHub by validating SHA256. @@ -155,6 +159,41 @@ bolt = AsyncApp( bolt_handler = AsyncSlackRequestHandler(bolt) +@api.get("/auth/github/login") +async def github_redirect(request: Request): + return RedirectResponse( + f"https://github.com/login/oauth/authorize?client_id={os.environ['GH_CLIENT_ID']}&redirect_uri=https://live.onboard.hackclub.com/auth/github/callback&scopes=read:user&state={request.query_params["state"]}" + ) + + +@api.get("/auth/github/callback") +async def github_callback(request: Request): + code: str = request.query_params["code"] + state: str = request.query_params["state"] + user_id, pr_id = FERNET.decrypt(bytes.fromhex(state)).decode().split("+") + db_user = await db.user.find_first_or_raise(where={"slack_id": user_id}) + db_pr = await db.pullrequest.find_first_or_raise(where={"github_id": int(pr_id)}) + async with httpx.AsyncClient() as client: + token = ( + await client.post( + "https://github.com/login/oauth/access_token", + json={ + "client_id": os.environ["GH_CLIENT_ID"], + "client_secret": os.environ["GH_CLIENT_SECRET"], + "code": code, + "redirect_uri": "https://live.onboard.hackclub.com/auth/github/callback", + }, + headers={"Accept": "application/json"}, + ) + ).json()["access_token"] + + gh_user = (await client.get("https://api.github.com/user", headers={"Accept": "application/vnd.github.v3+json", "Authorization": f"Bearer {token}"})).json()["id"] + if gh_user == db_pr.gh_user_id: + await db.pullrequest.update({"user": {"connect": {"id": db_user.id}}, "gh_user_id": gh_user}, {"id": db_pr.id}) + return HTMLResponse("

Success! Your PR has been linked to your account. Check your Slack DMs for the next steps!

") + return HTMLResponse("

Looks like something went wrong! DM @mra on slack.

", status_code=403) + + @api.post("/api/v1/github/pr_event") async def pr_event(request: Request): verify_gh_signature( @@ -166,31 +205,7 @@ async def pr_event(request: Request): if body["action"] == "labeled": if body["label"]["id"] == 7336079497: print("Added label has same id as OBL label!") - async with httpx.AsyncClient() as client: - db_pr = await db.pullrequest.create({"github_id": body["number"]}) - db_pr_token = db_pr.token - await client.post( - f"https://api.github.com/repos/hackclub/OnBoard/issues/{body["issue"]["number"]}/comments", - headers={ - "Authorization": f"token {os.environ['GH_TOKEN']}", - "Accept": "application/vnd.github.v3+json", - }, - json={ - "body": f"Hey, I'm Micha, a.k.a `@mra` on Slack! It looks like this is an OnBoard Live submission. If that sounds right, then go to the #onboard-live channel on Slack and send the message `/onboard-live-submit {db_pr_token}`. Doing that helps us link this pull request to your Slack account lets you select your sessions for review.\n###### If you have no clue what OnBoard Live is, please disregard this automated message!" - }, - ) - elif "created" in body and "comment" in body: - if body["comment"]["user"]["id"] == body["issue"]["user"]["id"]: - db_pr = await db.pullrequest.find_first(where={"github_id": body["issue"]["number"]}) - if db_pr: - if db_pr.possible_users: - for user in db_pr.possible_users: - if hashlib.sha256(bytes(f"{db_pr.secondary_token}+{user.slack_id}", encoding="utf-8")).hexdigest() in body["comment"]["body"]: - # Yay, the user who ran the Slack submit command is the same user who submitted the PR! - db_pr.user = user - break - else: - print("possible users was none") + await db.pullrequest.create({"github_id": body["pull_request"]["number"], "gh_user_id": body["pull_request"]["user"]["id"]}) return @@ -409,20 +424,18 @@ async def submit(ack: AsyncAck, command): user_id = command["user_id"] channel_id = command["channel_id"] text = command["text"] - db_pr = await db.pullrequest.find_first(where={"token": text}) - db_user = await db.user.find_first_or_raise(where={"slack_id": user_id}) + db_pr = await db.pullrequest.find_first(where={"github_id": int(text)}) if db_pr is None: await bolt.client.chat_postEphemeral( channel=channel_id, user=user_id, - text="There doesn't seem to be a PR open with that token! If this seems like a mistake, please message <@U05C64XMMHV> about it!", + text="There doesn't seem to be a PR open with that ID! If this seems like a mistake, please message <@U05C64XMMHV> about it!", ) return - await db.pullrequest.update(where={"id": db_pr.id}, data={"possible_users": {"set": [{"id": db_user.id}]}}) await bolt.client.chat_postEphemeral( channel=channel_id, user=user_id, - text=f"Please go to and add a comment containing the secret code `{hashlib.sha256(bytes(f"{db_pr.secondary_token}+{user_id}", encoding="utf-8")).hexdigest()}`. This helps us make sure this is your PR!", + text=f"Please to authenticate with GitHub. This helps us verify that this is your PR!", ) diff --git a/onboard_live_backend/migrations/20240821135629_prep_for_oauth/migration.sql b/onboard_live_backend/migrations/20240821135629_prep_for_oauth/migration.sql new file mode 100644 index 0000000..4b1ef4e --- /dev/null +++ b/onboard_live_backend/migrations/20240821135629_prep_for_oauth/migration.sql @@ -0,0 +1,10 @@ +/* + Warnings: + + - You are about to drop the `_PullRequestToPossibleUser` table. If the table is not empty, all the data it contains will be lost. + +*/ +-- DropTable +PRAGMA foreign_keys=off; +DROP TABLE "_PullRequestToPossibleUser"; +PRAGMA foreign_keys=on; diff --git a/onboard_live_backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql b/onboard_live_backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql new file mode 100644 index 0000000..09deab9 --- /dev/null +++ b/onboard_live_backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql @@ -0,0 +1,26 @@ +/* + Warnings: + + - You are about to drop the column `known_user_id` on the `PullRequest` table. All the data in the column will be lost. + - You are about to drop the column `secondary_token` on the `PullRequest` table. All the data in the column will be lost. + - You are about to drop the column `token` on the `PullRequest` table. All the data in the column will be lost. + +*/ +-- AlterTable +ALTER TABLE "User" ADD COLUMN "github_user_id" TEXT; + +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_PullRequest" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "github_id" INTEGER NOT NULL, + "user_id" TEXT, + CONSTRAINT "PullRequest_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); +INSERT INTO "new_PullRequest" ("github_id", "id") SELECT "github_id", "id" FROM "PullRequest"; +DROP TABLE "PullRequest"; +ALTER TABLE "new_PullRequest" RENAME TO "PullRequest"; +CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/onboard_live_backend/migrations/20240823141852_add_gh_token_field/migration.sql b/onboard_live_backend/migrations/20240823141852_add_gh_token_field/migration.sql new file mode 100644 index 0000000..994913c --- /dev/null +++ b/onboard_live_backend/migrations/20240823141852_add_gh_token_field/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "User" ADD COLUMN "github_token" TEXT; diff --git a/onboard_live_backend/migrations/20240823145449_more_pr_schema_work/migration.sql b/onboard_live_backend/migrations/20240823145449_more_pr_schema_work/migration.sql new file mode 100644 index 0000000..cce7526 --- /dev/null +++ b/onboard_live_backend/migrations/20240823145449_more_pr_schema_work/migration.sql @@ -0,0 +1,25 @@ +/* + Warnings: + + - You are about to drop the column `github_token` on the `User` table. All the data in the column will be lost. + +*/ +-- AlterTable +ALTER TABLE "PullRequest" ADD COLUMN "gh_user_id" TEXT; + +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_User" ( + "id" TEXT NOT NULL PRIMARY KEY, + "created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "slack_id" TEXT NOT NULL, + "name" TEXT NOT NULL, + "github_user_id" TEXT +); +INSERT INTO "new_User" ("created_at", "github_user_id", "id", "name", "slack_id") SELECT "created_at", "github_user_id", "id", "name", "slack_id" FROM "User"; +DROP TABLE "User"; +ALTER TABLE "new_User" RENAME TO "User"; +CREATE UNIQUE INDEX "User_slack_id_key" ON "User"("slack_id"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/onboard_live_backend/migrations/20240823151408_remove_duplicated_field/migration.sql b/onboard_live_backend/migrations/20240823151408_remove_duplicated_field/migration.sql new file mode 100644 index 0000000..815c31e --- /dev/null +++ b/onboard_live_backend/migrations/20240823151408_remove_duplicated_field/migration.sql @@ -0,0 +1,21 @@ +/* + Warnings: + + - You are about to drop the column `github_user_id` on the `User` table. All the data in the column will be lost. + +*/ +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_User" ( + "id" TEXT NOT NULL PRIMARY KEY, + "created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "slack_id" TEXT NOT NULL, + "name" TEXT NOT NULL +); +INSERT INTO "new_User" ("created_at", "id", "name", "slack_id") SELECT "created_at", "id", "name", "slack_id" FROM "User"; +DROP TABLE "User"; +ALTER TABLE "new_User" RENAME TO "User"; +CREATE UNIQUE INDEX "User_slack_id_key" ON "User"("slack_id"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/onboard_live_backend/migrations/20240823151722_make_gh_user_id_required/migration.sql b/onboard_live_backend/migrations/20240823151722_make_gh_user_id_required/migration.sql new file mode 100644 index 0000000..57b2450 --- /dev/null +++ b/onboard_live_backend/migrations/20240823151722_make_gh_user_id_required/migration.sql @@ -0,0 +1,22 @@ +/* + Warnings: + + - Made the column `gh_user_id` on table `PullRequest` required. This step will fail if there are existing NULL values in that column. + +*/ +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_PullRequest" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "github_id" INTEGER NOT NULL, + "user_id" TEXT, + "gh_user_id" TEXT NOT NULL, + CONSTRAINT "PullRequest_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); +INSERT INTO "new_PullRequest" ("gh_user_id", "github_id", "id", "user_id") SELECT "gh_user_id", "github_id", "id", "user_id" FROM "PullRequest"; +DROP TABLE "PullRequest"; +ALTER TABLE "new_PullRequest" RENAME TO "PullRequest"; +CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/onboard_live_backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql b/onboard_live_backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql new file mode 100644 index 0000000..ccfb293 --- /dev/null +++ b/onboard_live_backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql @@ -0,0 +1,22 @@ +/* + Warnings: + + - You are about to alter the column `gh_user_id` on the `PullRequest` table. The data in that column could be lost. The data in that column will be cast from `String` to `Int`. + +*/ +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_PullRequest" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "github_id" INTEGER NOT NULL, + "user_id" TEXT, + "gh_user_id" INTEGER NOT NULL, + CONSTRAINT "PullRequest_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); +INSERT INTO "new_PullRequest" ("gh_user_id", "github_id", "id", "user_id") SELECT "gh_user_id", "github_id", "id", "user_id" FROM "PullRequest"; +DROP TABLE "PullRequest"; +ALTER TABLE "new_PullRequest" RENAME TO "PullRequest"; +CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/onboard_live_backend/schema.prisma b/onboard_live_backend/schema.prisma index f10598b..59ff94a 100644 --- a/onboard_live_backend/schema.prisma +++ b/onboard_live_backend/schema.prisma @@ -1,7 +1,7 @@ generator client { provider = "prisma-client-py" - interface = "asyncio" recursive_type_depth = "5" + interface = "asyncio" } datasource db { @@ -10,13 +10,12 @@ datasource db { } model User { - id String @id @default(cuid()) - created_at DateTime @default(now()) - slack_id String @unique - name String - stream Stream? - possible_pulls PullRequest[] @relation(name: "PullRequestToPossibleUser") // pull requests that this user has tried to claim via the /onboard-live-submit command on Slack - known_pulls PullRequest[] @relation(name: "PullRequestToKnownUser") // pull requests that have been verified to belong to this user + id String @id @default(cuid()) + created_at DateTime @default(now()) + slack_id String @unique + name String + pull_requests PullRequest[] @relation("PullRequestToUser") + stream Stream? } model Stream { @@ -30,11 +29,9 @@ model Stream { } model PullRequest { - id Int @id @default(autoincrement()) - github_id Int @unique - user User? @relation(name: "PullRequestToKnownUser", fields: [known_user_id], references: [id]) - known_user_id String? - token String @unique @default(uuid()) - secondary_token String @unique @default(uuid()) - possible_users User[] @relation(name: "PullRequestToPossibleUser") + id Int @id @default(autoincrement()) + github_id Int @unique + user_id String? + gh_user_id Int + user User? @relation("PullRequestToUser", fields: [user_id], references: [id]) } From aa385fd6486087acc5d147380e464bddee5935b2 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 29 Aug 2024 16:32:07 +0000 Subject: [PATCH 08/41] fix sum bugz and add basic slack modal --- onboard_live_backend/main.py | 85 ++++++++++++++++++++++++++++++++---- 1 file changed, 76 insertions(+), 9 deletions(-) diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index 3469d79..d969b89 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -7,18 +7,19 @@ from random import choice from secrets import token_hex from typing import Dict, List -from fastapi.responses import HTMLResponse, RedirectResponse +from aiofiles.os import listdir as async_listdir import httpx import uvicorn from apscheduler.schedulers.asyncio import AsyncIOScheduler from apscheduler.triggers.interval import IntervalTrigger +from cryptography.fernet import Fernet from dotenv import load_dotenv from fastapi import FastAPI, HTTPException, Request, Response from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import HTMLResponse, RedirectResponse from prisma import Prisma from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler from slack_bolt.async_app import AsyncAck, AsyncApp -from cryptography.fernet import Fernet load_dotenv(dotenv_path="./.env") @@ -52,6 +53,18 @@ def verify_gh_signature(payload_body, secret_token, signature_header): raise HTTPException(status_code=403, detail="Request signatures didn't match!") +async def get_recording_list(stream_key: str) -> List[str]: + try: + files = await async_listdir(f"/home/onboard/recordings/{stream_key}") + except FileNotFoundError: + return [] + for file in files: + split_file = file.split("/") + filename_without_ext = split_file[len(split_file) - 1].split(".")[0] + files[files.index(file)] = filename_without_ext + return files + + async def update_active(): global active_stream global active_streams @@ -162,7 +175,7 @@ bolt_handler = AsyncSlackRequestHandler(bolt) @api.get("/auth/github/login") async def github_redirect(request: Request): return RedirectResponse( - f"https://github.com/login/oauth/authorize?client_id={os.environ['GH_CLIENT_ID']}&redirect_uri=https://live.onboard.hackclub.com/auth/github/callback&scopes=read:user&state={request.query_params["state"]}" + f"https://github.com/login/oauth/authorize?client_id={os.environ['GH_CLIENT_ID']}&redirect_uri=https://live.onboard.hackclub.com/auth/github/callback&scopes=read:user&state={request.query_params['state']}" ) @@ -172,6 +185,7 @@ async def github_callback(request: Request): state: str = request.query_params["state"] user_id, pr_id = FERNET.decrypt(bytes.fromhex(state)).decode().split("+") db_user = await db.user.find_first_or_raise(where={"slack_id": user_id}) + user_stream_key = (await db.stream.find_first_or_raise(where={"user_id": db_user.id})).key db_pr = await db.pullrequest.find_first_or_raise(where={"github_id": int(pr_id)}) async with httpx.AsyncClient() as client: token = ( @@ -187,11 +201,59 @@ async def github_callback(request: Request): ) ).json()["access_token"] - gh_user = (await client.get("https://api.github.com/user", headers={"Accept": "application/vnd.github.v3+json", "Authorization": f"Bearer {token}"})).json()["id"] + gh_user: int = ( + await client.get( + "https://api.github.com/user", + headers={ + "Accept": "application/vnd.github.v3+json", + "Authorization": f"Bearer {token}", + }, + ) + ).json()["id"] if gh_user == db_pr.gh_user_id: - await db.pullrequest.update({"user": {"connect": {"id": db_user.id}}, "gh_user_id": gh_user}, {"id": db_pr.id}) - return HTMLResponse("

Success! Your PR has been linked to your account. Check your Slack DMs for the next steps!

") - return HTMLResponse("

Looks like something went wrong! DM @mra on slack.

", status_code=403) + await db.pullrequest.update( + {"user": {"connect": {"id": db_user.id}}, "gh_user_id": gh_user}, + {"id": db_pr.id}, + ) + stream_recs = await get_recording_list(user_stream_key) + if stream_recs == []: + return HTMLResponse( + "

You don't have any sessions to submit! Please DM @mra on Slack if you think this is a mistake.

" + ) + await bolt.client.chat_postMessage( + channel=user_id, + text="Select your OnBoard Live sessions!", + blocks=[ + { + "type": "header", + "text": { + "type": "plain_text", + "text": "Select your sessions for review!", + "emoji": True, + }, + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "This is a section block with checkboxes.", + }, + "accessory": { + "type": "checkboxes", + "options": [ + json.loads("""{{"text": {{ "type": "mrkdwn", "text": "Your session on {pretty_time}"}}, "description": {{"type": "mrkdwn", "text": "You streamed for {length}"}}, "value": "checkbox-{filename}"}}""".format(pretty_time=recording, length=1, filename=recording)) for recording in stream_recs + ], + "action_id": "checkboxes", + }, + }, + ], + ) + return HTMLResponse( + "

Success! Your PR has been linked to your account. Check your Slack DMs for the next steps!

" + ) + return HTMLResponse( + f"

Looks like something went wrong! DM @mra on slack.

This info might be of use to them: {FERNET.encrypt(bytes(str(db_pr.gh_user_id) + " " + str(gh_user) + " " + user_id + " " + pr_id + " " + state, encoding='utf-8'))}

", status_code=403 + ) @api.post("/api/v1/github/pr_event") @@ -205,7 +267,12 @@ async def pr_event(request: Request): if body["action"] == "labeled": if body["label"]["id"] == 7336079497: print("Added label has same id as OBL label!") - await db.pullrequest.create({"github_id": body["pull_request"]["number"], "gh_user_id": body["pull_request"]["user"]["id"]}) + await db.pullrequest.create( + { + "github_id": body["pull_request"]["number"], + "gh_user_id": body["pull_request"]["user"]["id"], + } + ) return @@ -435,7 +502,7 @@ async def submit(ack: AsyncAck, command): await bolt.client.chat_postEphemeral( channel=channel_id, user=user_id, - text=f"Please to authenticate with GitHub. This helps us verify that this is your PR!", + text=f"Please to authenticate with GitHub. This helps us verify that this is your PR!", ) From 3d08445e0f3e6adacc0da41ff0bf97206333c2df Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Fri, 30 Aug 2024 20:08:20 +0000 Subject: [PATCH 09/41] use mediamtx api instead of fs to get recordings --- onboard_live_backend/main.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index d969b89..da5ea6e 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -7,7 +7,6 @@ from random import choice from secrets import token_hex from typing import Dict, List -from aiofiles.os import listdir as async_listdir import httpx import uvicorn from apscheduler.schedulers.asyncio import AsyncIOScheduler @@ -54,15 +53,8 @@ def verify_gh_signature(payload_body, secret_token, signature_header): async def get_recording_list(stream_key: str) -> List[str]: - try: - files = await async_listdir(f"/home/onboard/recordings/{stream_key}") - except FileNotFoundError: - return [] - for file in files: - split_file = file.split("/") - filename_without_ext = split_file[len(split_file) - 1].split(".")[0] - files[files.index(file)] = filename_without_ext - return files + async with httpx.AsyncClient() as client: + return [recording["start"] for recording in (await client.get(f"http://localhost:9997/v3/recordings/get/{stream_key}")).json()["segments"]] async def update_active(): From 75882ddccbd3f07f19b03825b907858e7cb221b7 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Sat, 31 Aug 2024 00:05:35 +0000 Subject: [PATCH 10/41] fix security issue and run black --- onboard_live_backend/main.py | 81 ++++++++++++++++++++++++++++++++---- 1 file changed, 74 insertions(+), 7 deletions(-) diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index da5ea6e..404e0d9 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -3,10 +3,12 @@ import hmac import json import os from contextlib import asynccontextmanager +from datetime import datetime from random import choice from secrets import token_hex from typing import Dict, List +import cv2 import httpx import uvicorn from apscheduler.schedulers.asyncio import AsyncIOScheduler @@ -19,6 +21,7 @@ from fastapi.responses import HTMLResponse, RedirectResponse from prisma import Prisma from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler from slack_bolt.async_app import AsyncAck, AsyncApp +from yarl import URL load_dotenv(dotenv_path="./.env") @@ -30,6 +33,15 @@ scheduler = AsyncIOScheduler() FERNET = Fernet(os.environ["FERNET_KEY"]) +def get_recording_duration(timestamp, stream_key): + vid = cv2.VideoCapture( + f"/home/onboard/recordings/{stream_key}/{datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ').strftime('%Y-%m-%d_%H-%M-%S-%f')}.mp4" + ) + return int( + (vid.get(cv2.CAP_PROP_FRAME_COUNT) / vid.get(cv2.CAP_PROP_FPS)) / 60 + ) # seconds to minutes + + def verify_gh_signature(payload_body, secret_token, signature_header): """Verify that the payload was sent from GitHub by validating SHA256. @@ -54,7 +66,14 @@ def verify_gh_signature(payload_body, secret_token, signature_header): async def get_recording_list(stream_key: str) -> List[str]: async with httpx.AsyncClient() as client: - return [recording["start"] for recording in (await client.get(f"http://localhost:9997/v3/recordings/get/{stream_key}")).json()["segments"]] + return [ + recording["start"] + for recording in ( + await client.get( + f"http://localhost:9997/v3/recordings/get/{stream_key}" + ) + ).json()["segments"] + ] async def update_active(): @@ -167,7 +186,19 @@ bolt_handler = AsyncSlackRequestHandler(bolt) @api.get("/auth/github/login") async def github_redirect(request: Request): return RedirectResponse( - f"https://github.com/login/oauth/authorize?client_id={os.environ['GH_CLIENT_ID']}&redirect_uri=https://live.onboard.hackclub.com/auth/github/callback&scopes=read:user&state={request.query_params['state']}" + str( + URL.build( + scheme="https", + host="github.com", + path="/login/oauth/authorize", + query={ + "client_id": os.environ["GH_CLIENT_ID"], + "redirect_uri": "https://live.onboard.hackclub.com/auth/github/callback", + "scopes": "read:user", + "state": request.query_params["state"], + }, + ) + ) ) @@ -177,7 +208,9 @@ async def github_callback(request: Request): state: str = request.query_params["state"] user_id, pr_id = FERNET.decrypt(bytes.fromhex(state)).decode().split("+") db_user = await db.user.find_first_or_raise(where={"slack_id": user_id}) - user_stream_key = (await db.stream.find_first_or_raise(where={"user_id": db_user.id})).key + user_stream_key = ( + await db.stream.find_first_or_raise(where={"user_id": db_user.id}) + ).key db_pr = await db.pullrequest.find_first_or_raise(where={"github_id": int(pr_id)}) async with httpx.AsyncClient() as client: token = ( @@ -228,23 +261,57 @@ async def github_callback(request: Request): "type": "section", "text": { "type": "mrkdwn", - "text": "This is a section block with checkboxes.", + "text": f"Here are all your sessions. Select the ones associated with OnBoard pull request #{pr_id}:", }, "accessory": { "type": "checkboxes", "options": [ - json.loads("""{{"text": {{ "type": "mrkdwn", "text": "Your session on {pretty_time}"}}, "description": {{"type": "mrkdwn", "text": "You streamed for {length}"}}, "value": "checkbox-{filename}"}}""".format(pretty_time=recording, length=1, filename=recording)) for recording in stream_recs + json.loads( + """{{"text": {{ "type": "mrkdwn", "text": "Your session on {pretty_time}"}}, "description": {{"type": "mrkdwn", "text": "You streamed for {length} {minute_or_minutes}"}}, "value": "checkbox-{filename}"}}""".format( + pretty_time=recording, + length=get_recording_duration( + recording, user_stream_key + ), + minute_or_minutes=( + "minute" + if get_recording_duration( + recording, user_stream_key + ) + == 1 + else "minutes" + ), + filename=recording, + ) + ) + for recording in stream_recs ], "action_id": "checkboxes", }, }, + { + "type": "actions", + "elements": [ + { + "type": "button", + "text": { + "type": "plain_text", + "emoji": True, + "text": "Submit", + }, + "style": "primary", + "value": "submit_sessions", + "action_id": "submit_sessions", + }, + ], + }, ], ) return HTMLResponse( - "

Success! Your PR has been linked to your account. Check your Slack DMs for the next steps!

" + "

Success! Your PR has been linked to your Slack account. Check your Slack DMs for the next steps!

" ) return HTMLResponse( - f"

Looks like something went wrong! DM @mra on slack.

This info might be of use to them: {FERNET.encrypt(bytes(str(db_pr.gh_user_id) + " " + str(gh_user) + " " + user_id + " " + pr_id + " " + state, encoding='utf-8'))}

", status_code=403 + f"

Looks like something went wrong! DM @mra on slack.

This info might be of use to them: {FERNET.encrypt(bytes(str(db_pr.gh_user_id) + " " + str(gh_user) + " " + user_id + " " + pr_id + " " + state, encoding='utf-8'))}

", + status_code=403, ) From 3241ab8ccb3dc9c7829507f476143361d8b4db22 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Sat, 31 Aug 2024 00:07:07 +0000 Subject: [PATCH 11/41] remove print statements --- onboard_live_backend/main.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index 404e0d9..fc8d8e1 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -90,25 +90,15 @@ async def update_active(): if stream["ready"] and stream not in active_streams: active_streams.append(stream) if len(active_streams) == 0: - print("No active streams") return if active_stream == {}: - print("No current active stream, picking new one...") active_stream = choice(active_streams) return if len(active_streams) == 1: return - print( - f"starting to pick new active stream (switching away from {active_stream['name']})" - ) new_stream = choice(active_streams) while new_stream["name"] == active_stream["name"]: - print( - f"re-attemppting to pick active stream since we picked {new_stream} again" - ) new_stream = choice(active_streams) - print(f"found new stream to make active: {new_stream}") - print(f"trying to find user associated with stream {active_stream['name']}") old_active_stream_user = await db.user.find_first(where={"id": (await db.stream.find_first(where={"key": str(active_stream["name"])})).user_id}) # type: ignore await bolt.client.chat_postMessage(channel="C07ERCGG989", text=f"Hey <@{old_active_stream_user.slack_id}>, you're no longer in focus!") # type: ignore active_stream = new_stream @@ -143,7 +133,6 @@ async def check_for_new(): if stream not in active_streams_simple: active_streams.append({"name": stream, "ready": True}) if len(active_streams) == 0: - print("No active streams") active_stream = {} @@ -325,7 +314,6 @@ async def pr_event(request: Request): body = json.loads(await request.body()) if body["action"] == "labeled": if body["label"]["id"] == 7336079497: - print("Added label has same id as OBL label!") await db.pullrequest.create( { "github_id": body["pull_request"]["number"], From a2f34fbf0b76524adcd8f6fd22f13804d755625c Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Sat, 31 Aug 2024 00:44:04 +0000 Subject: [PATCH 12/41] keep fernet key in memory and rotate it safely --- onboard_live_backend/main.py | 33 +++++++++++++++++++++++++++------ 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index fc8d8e1..e1d195f 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -4,8 +4,7 @@ import json import os from contextlib import asynccontextmanager from datetime import datetime -from random import choice -from secrets import token_hex +from secrets import choice, token_hex from typing import Dict, List import cv2 @@ -30,7 +29,23 @@ active_streams: List[Dict[str, str | bool]] = [] scheduler = AsyncIOScheduler() -FERNET = Fernet(os.environ["FERNET_KEY"]) +FERNET_KEY = Fernet.generate_key() +FERNET_KEY_USERS = [] + +if FERNET_KEY == "": + raise TypeError("No Fernet key found, exiting...") + +FERNET = Fernet(FERNET_KEY) + + +async def rotate_fernet_key(): + global FERNET_KEY + global FERNET + if FERNET_KEY_USERS == []: + FERNET_KEY = Fernet.generate_key() + FERNET = Fernet(FERNET_KEY) + else: + print("not rotating key since we have a pending verification") def get_recording_duration(timestamp, stream_key): @@ -140,8 +155,10 @@ async def check_for_new(): async def lifespan(app: FastAPI): await update_active() scheduler.start() - scheduler.add_job(update_active, IntervalTrigger(seconds=5 * 60)) + scheduler.add_job(update_active, IntervalTrigger(minutes=5)) scheduler.add_job(check_for_new, IntervalTrigger(seconds=3)) + scheduler.add_job(rotate_fernet_key, IntervalTrigger(minutes=30)) + await rotate_fernet_key() await db.connect() async with httpx.AsyncClient() as client: for stream in await db.stream.find_many(): @@ -196,6 +213,8 @@ async def github_callback(request: Request): code: str = request.query_params["code"] state: str = request.query_params["state"] user_id, pr_id = FERNET.decrypt(bytes.fromhex(state)).decode().split("+") + if user_id in FERNET_KEY_USERS: + FERNET_KEY_USERS.remove(user_id) db_user = await db.user.find_first_or_raise(where={"slack_id": user_id}) user_stream_key = ( await db.stream.find_first_or_raise(where={"user_id": db_user.id}) @@ -299,8 +318,8 @@ async def github_callback(request: Request): "

Success! Your PR has been linked to your Slack account. Check your Slack DMs for the next steps!

" ) return HTMLResponse( - f"

Looks like something went wrong! DM @mra on slack.

This info might be of use to them: {FERNET.encrypt(bytes(str(db_pr.gh_user_id) + " " + str(gh_user) + " " + user_id + " " + pr_id + " " + state, encoding='utf-8'))}

", - status_code=403, + "

Looks like something went wrong! DM @mra on slack.

", + status_code=500, ) @@ -546,6 +565,8 @@ async def submit(ack: AsyncAck, command): text="There doesn't seem to be a PR open with that ID! If this seems like a mistake, please message <@U05C64XMMHV> about it!", ) return + if user_id not in FERNET_KEY_USERS: + FERNET_KEY_USERS.append(user_id) await bolt.client.chat_postEphemeral( channel=channel_id, user=user_id, From cc60aae2bb1de8f804191e2363dbdd3915c50469 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Sat, 31 Aug 2024 00:44:58 +0000 Subject: [PATCH 13/41] remove extra type check (it's not as dumb as it looks) --- onboard_live_backend/main.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index e1d195f..8525cf2 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -32,8 +32,6 @@ scheduler = AsyncIOScheduler() FERNET_KEY = Fernet.generate_key() FERNET_KEY_USERS = [] -if FERNET_KEY == "": - raise TypeError("No Fernet key found, exiting...") FERNET = Fernet(FERNET_KEY) From b062611feca67d0621b973f08addfbd5497de5d0 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Sat, 31 Aug 2024 13:57:54 +0000 Subject: [PATCH 14/41] Add empty handler for session submit and rename check func --- onboard_live_backend/main.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index 8525cf2..75cf63b 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -375,6 +375,11 @@ async def handle_app_home_opened_events(body, logger, event, client): ) +@bolt.action("submit_sessions") +async def submit_sessions(ack, body): + await ack() + + @bolt.action("deny") async def deny(ack, body): await ack() @@ -753,7 +758,11 @@ async def apply(ack: AsyncAck, command): @bolt.action("checkboxes") -async def handle_some_action(ack): +async def checkboxes(ack): + """ + AFAICT there needs to be *an* action for the checkboxes, but I process their data elsewhere (on submit) + To avoid warnings in Slack, I'm just ACKing it here and doing nothing :) + """ await ack() From 492a644f569e6b1b418d7fc3a8a8548dfef80d4c Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Tue, 3 Sep 2024 13:57:33 +0000 Subject: [PATCH 15/41] slack modal work --- onboard_live_backend/main.py | 30 ++++++++++++++++++- .../migration.sql | 14 +++++++++ .../migration.sql | 26 ++++++++++++++++ .../migration.sql | 2 ++ .../migration.sql | 8 +++++ onboard_live_backend/schema.prisma | 18 +++++++++-- 6 files changed, 94 insertions(+), 4 deletions(-) create mode 100644 onboard_live_backend/migrations/20240831153915_add_session_model/migration.sql create mode 100644 onboard_live_backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql create mode 100644 onboard_live_backend/migrations/20240831222739_remove_unique_constraint/migration.sql create mode 100644 onboard_live_backend/migrations/20240831222937_add_different_unique_constraint/migration.sql diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index 75cf63b..6ab7243 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -264,6 +264,7 @@ async def github_callback(request: Request): }, }, { + "block_id": "session-checks", "type": "section", "text": { "type": "mrkdwn", @@ -376,8 +377,35 @@ async def handle_app_home_opened_events(body, logger, event, client): @bolt.action("submit_sessions") -async def submit_sessions(ack, body): +async def submit_sessions(ack: AsyncAck, body): await ack() + selected_sessions_ts: List[str] = [ + i["text"]["text"].split("session on ")[1] + for i in body["state"]["values"]["session-checks"]["checkboxes"][ + "selected_options" + ] + ] + pr_id = int( + body["message"]["blocks"][1]["text"]["text"].split("#")[1].split(":")[0] + ) # don't tell my mom she raised a monster + db_pr = await db.pullrequest.find_first_or_raise(where={"github_id": pr_id}) + if db_pr.user_id: + stream_key = ( + await db.stream.find_first_or_raise(where={"user_id": db_pr.user_id}) + ).key + for session in selected_sessions_ts: + await db.session.create( + { + "pull": {"connect": {"id": db_pr.id}}, + "timestamp": session, + "filename": f"/home/onboard/recordings/{stream_key}/{datetime.strptime(session, '%Y-%m-%dT%H:%M:%S.%fZ').strftime('%Y-%m-%d_%H-%M-%S-%f')}.mp4", + "duration": get_recording_duration(session, stream_key), + } + ) + await bolt.client.chat_delete( + channel=body["container"]["channel_id"], ts=body["message"]["ts"] + ) + print(pr_id, selected_sessions_ts) @bolt.action("deny") diff --git a/onboard_live_backend/migrations/20240831153915_add_session_model/migration.sql b/onboard_live_backend/migrations/20240831153915_add_session_model/migration.sql new file mode 100644 index 0000000..8fece81 --- /dev/null +++ b/onboard_live_backend/migrations/20240831153915_add_session_model/migration.sql @@ -0,0 +1,14 @@ +-- CreateTable +CREATE TABLE "Session" ( + "id" TEXT NOT NULL PRIMARY KEY, + "user_id" TEXT NOT NULL, + "timestamp" TEXT NOT NULL, + "filename" TEXT NOT NULL, + "duration" INTEGER NOT NULL, + "reviewed" BOOLEAN NOT NULL DEFAULT false, + "approved" BOOLEAN NOT NULL DEFAULT false, + CONSTRAINT "Session_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); + +-- CreateIndex +CREATE UNIQUE INDEX "Session_user_id_key" ON "Session"("user_id"); diff --git a/onboard_live_backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql b/onboard_live_backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql new file mode 100644 index 0000000..64e8757 --- /dev/null +++ b/onboard_live_backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql @@ -0,0 +1,26 @@ +/* + Warnings: + + - You are about to drop the column `user_id` on the `Session` table. All the data in the column will be lost. + - Added the required column `pr_id` to the `Session` table without a default value. This is not possible if the table is not empty. + +*/ +-- RedefineTables +PRAGMA defer_foreign_keys=ON; +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_Session" ( + "id" TEXT NOT NULL PRIMARY KEY, + "pr_id" INTEGER NOT NULL, + "timestamp" TEXT NOT NULL, + "filename" TEXT NOT NULL, + "duration" INTEGER NOT NULL, + "reviewed" BOOLEAN NOT NULL DEFAULT false, + "approved" BOOLEAN NOT NULL DEFAULT false, + CONSTRAINT "Session_pr_id_fkey" FOREIGN KEY ("pr_id") REFERENCES "PullRequest" ("id") ON DELETE RESTRICT ON UPDATE CASCADE +); +INSERT INTO "new_Session" ("approved", "duration", "filename", "id", "reviewed", "timestamp") SELECT "approved", "duration", "filename", "id", "reviewed", "timestamp" FROM "Session"; +DROP TABLE "Session"; +ALTER TABLE "new_Session" RENAME TO "Session"; +CREATE UNIQUE INDEX "Session_pr_id_key" ON "Session"("pr_id"); +PRAGMA foreign_keys=ON; +PRAGMA defer_foreign_keys=OFF; diff --git a/onboard_live_backend/migrations/20240831222739_remove_unique_constraint/migration.sql b/onboard_live_backend/migrations/20240831222739_remove_unique_constraint/migration.sql new file mode 100644 index 0000000..cf824ee --- /dev/null +++ b/onboard_live_backend/migrations/20240831222739_remove_unique_constraint/migration.sql @@ -0,0 +1,2 @@ +-- DropIndex +DROP INDEX "Session_pr_id_key"; diff --git a/onboard_live_backend/migrations/20240831222937_add_different_unique_constraint/migration.sql b/onboard_live_backend/migrations/20240831222937_add_different_unique_constraint/migration.sql new file mode 100644 index 0000000..25d4f10 --- /dev/null +++ b/onboard_live_backend/migrations/20240831222937_add_different_unique_constraint/migration.sql @@ -0,0 +1,8 @@ +/* + Warnings: + + - A unique constraint covering the columns `[filename]` on the table `Session` will be added. If there are existing duplicate values, this will fail. + +*/ +-- CreateIndex +CREATE UNIQUE INDEX "Session_filename_key" ON "Session"("filename"); diff --git a/onboard_live_backend/schema.prisma b/onboard_live_backend/schema.prisma index 59ff94a..3ac9bf4 100644 --- a/onboard_live_backend/schema.prisma +++ b/onboard_live_backend/schema.prisma @@ -29,9 +29,21 @@ model Stream { } model PullRequest { - id Int @id @default(autoincrement()) - github_id Int @unique + id Int @id @default(autoincrement()) + github_id Int @unique user_id String? gh_user_id Int - user User? @relation("PullRequestToUser", fields: [user_id], references: [id]) + user User? @relation("PullRequestToUser", fields: [user_id], references: [id]) + sessions Session[] +} + +model Session { + id String @id @default(cuid()) + pr_id Int + pull PullRequest @relation(fields: [pr_id], references: [id]) + timestamp String + filename String @unique + duration Int // in minutes + reviewed Boolean @default(false) + approved Boolean @default(false) } From ecb8a80f8b108a017b2d0e4a712af63e4840b42f Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Fri, 6 Sep 2024 17:39:06 +0000 Subject: [PATCH 16/41] hotfix for slack checkbox limit --- onboard_live_backend/main.py | 89 ++++++++++++++++++++++++------------ 1 file changed, 59 insertions(+), 30 deletions(-) diff --git a/onboard_live_backend/main.py b/onboard_live_backend/main.py index 6ab7243..8d3cabe 100644 --- a/onboard_live_backend/main.py +++ b/onboard_live_backend/main.py @@ -259,7 +259,7 @@ async def github_callback(request: Request): "type": "header", "text": { "type": "plain_text", - "text": "Select your sessions for review!", + "text": "Select your sessions for review!\nCopy and paste the lines of sessions that you want associated with this PR into the box!", "emoji": True, }, }, @@ -270,31 +270,59 @@ async def github_callback(request: Request): "type": "mrkdwn", "text": f"Here are all your sessions. Select the ones associated with OnBoard pull request #{pr_id}:", }, - "accessory": { - "type": "checkboxes", - "options": [ - json.loads( - """{{"text": {{ "type": "mrkdwn", "text": "Your session on {pretty_time}"}}, "description": {{"type": "mrkdwn", "text": "You streamed for {length} {minute_or_minutes}"}}, "value": "checkbox-{filename}"}}""".format( - pretty_time=recording, - length=get_recording_duration( - recording, user_stream_key - ), - minute_or_minutes=( - "minute" - if get_recording_duration( - recording, user_stream_key - ) - == 1 - else "minutes" - ), - filename=recording, - ) - ) - for recording in stream_recs - ], - "action_id": "checkboxes", + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": f"{'\n'.join([recording + ' for ' + str(get_recording_duration(recording, user_stream_key)) + 'minutes' for recording in stream_recs])}", # type: ignore }, }, + { + "type": "input", + "block_id": "session-input", + "element": { + "type": "plain_text_input", + "multiline": True, + "action_id": "plain_text_input-action", + }, + "label": { + "type": "plain_text", + "text": "Paste the lines here (DO NOT EDIT THEM, ONE ON EACH LINE)", + "emoji": False, + }, + }, + # "block_id": "session-checks", + # "type": "section", + # "text": { + # "type": "mrkdwn", + # "text": f"Here are all your sessions. Select the ones associated with OnBoard pull request #{pr_id}:", + # }, + # "accessory": { + # "type": "checkboxes", + # "options": [ + # json.loads( + # """{{"text": {{ "type": "mrkdwn", "text": "Your session on {pretty_time}"}}, "description": {{"type": "mrkdwn", "text": "You streamed for {length} {minute_or_minutes}"}}, "value": "checkbox-{filename}"}}""".format( + # pretty_time=recording, + # length=get_recording_duration( + # recording, user_stream_key + # ), + # minute_or_minutes=( + # "minute" + # if get_recording_duration( + # recording, user_stream_key + # ) + # == 1 + # else "minutes" + # ), + # filename=recording, + # ) + # ) + # for recording in stream_recs + # ], + # "action_id": "checkboxes", + # }, + # }, { "type": "actions", "elements": [ @@ -379,12 +407,13 @@ async def handle_app_home_opened_events(body, logger, event, client): @bolt.action("submit_sessions") async def submit_sessions(ack: AsyncAck, body): await ack() - selected_sessions_ts: List[str] = [ - i["text"]["text"].split("session on ")[1] - for i in body["state"]["values"]["session-checks"]["checkboxes"][ - "selected_options" - ] - ] + selected_sessions_ts: List[str] = [] + print(body["state"]["values"]) + for session in body["state"]["values"]["session-input"]["plain_text_input-action"][ + "value" + ].split("\n"): + selected_sessions_ts.append(session.split(" for ")[0]) + pr_id = int( body["message"]["blocks"][1]["text"]["text"].split("#")[1].split(":")[0] ) # don't tell my mom she raised a monster From e75f8c944a24364fd042c82ec5d07eed9ce77985 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Mon, 9 Sep 2024 16:37:02 -0400 Subject: [PATCH 17/41] major refactoring + docker progress --- .gitignore | 3 +- active-stream-proxy/Dockerfile | 11 ++ active-stream-proxy/main.py | 17 ++++ active-stream-proxy/requirements.txt | 1 + backend/Dockerfile | 21 ++++ {onboard_live_backend => backend}/main.py | 10 +- .../20240719191810_init/migration.sql | 0 .../20240815175119_pls_work/migration.sql | 0 .../20240816163601_add_pr_model/migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migration.sql | 0 .../migrations/migration_lock.toml | 0 backend/requirements.txt | 79 ++++++++++++++ .../schema.prisma | 4 +- docker-compose.yml | 42 ++++++++ live-stream/Dockerfile | 7 ++ live-stream/run.sh | 6 ++ mediamtx/Dockerfile | 5 + mediamtx/mediamtx.yml | 13 +++ pyproject.toml | 96 ------------------ tiling-frontend/Dockerfile | 16 +++ tiling-frontend/bun.lockb | Bin 104718 -> 132687 bytes 35 files changed, 227 insertions(+), 104 deletions(-) create mode 100644 active-stream-proxy/Dockerfile create mode 100644 active-stream-proxy/main.py create mode 100644 active-stream-proxy/requirements.txt create mode 100644 backend/Dockerfile rename {onboard_live_backend => backend}/main.py (98%) rename {onboard_live_backend => backend}/migrations/20240719191810_init/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240815175119_pls_work/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240816163601_add_pr_model/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240816164220_make_pr_model_optional/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240816164920_add_pr_number_field/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240817173150_add_sec_token/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240817193412_refactor_pull_user_relation/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240821135629_prep_for_oauth/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240823141450_refactor_schema_for_oauth/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240823141852_add_gh_token_field/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240823145449_more_pr_schema_work/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240823151408_remove_duplicated_field/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240823151722_make_gh_user_id_required/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240823152458_make_gh_user_id_an_int/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240831153915_add_session_model/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240831222739_remove_unique_constraint/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/20240831222937_add_different_unique_constraint/migration.sql (100%) rename {onboard_live_backend => backend}/migrations/migration_lock.toml (100%) create mode 100644 backend/requirements.txt rename {onboard_live_backend => backend}/schema.prisma (95%) create mode 100644 docker-compose.yml create mode 100644 live-stream/Dockerfile create mode 100644 live-stream/run.sh create mode 100644 mediamtx/Dockerfile create mode 100644 mediamtx/mediamtx.yml delete mode 100644 pyproject.toml create mode 100644 tiling-frontend/Dockerfile diff --git a/.gitignore b/.gitignore index 4820029..60851d9 100644 --- a/.gitignore +++ b/.gitignore @@ -187,4 +187,5 @@ cython_debug/ #.idea/ *.dat -dev.db* \ No newline at end of file +dev.db* +.*env diff --git a/active-stream-proxy/Dockerfile b/active-stream-proxy/Dockerfile new file mode 100644 index 0000000..620fead --- /dev/null +++ b/active-stream-proxy/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.12-alpine + +WORKDIR /usr/src/app + +COPY requirements.txt ./ + +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +CMD [ "python", "./main.py" ] diff --git a/active-stream-proxy/main.py b/active-stream-proxy/main.py new file mode 100644 index 0000000..7243917 --- /dev/null +++ b/active-stream-proxy/main.py @@ -0,0 +1,17 @@ +import subprocess +import time +import requests + +active_stream = requests.get("http://backend:8000/api/v1/active_stream").text +old_active_stream = active_stream + +proc = None + +while True: + proc = subprocess.Popen([f"ffmpeg -re -i rtmp://mediamtx:1935/{active_stream} -c:a copy rtmp://host.containers.internal:1936/active-input"], shell=True) + time.sleep(3) + active_stream = requests.get("http://backend:8000/api/v1/active_stream").text + if old_active_stream is not active_stream: + proc.terminate() + proc = subprocess.Popen([f"ffmpeg -re -i rtmp://mediamtx:1935/{active_stream} -c:a copy rtmp://host.containers.internal:1936/active-input"], shell=True) + old_active_stream = active_stream diff --git a/active-stream-proxy/requirements.txt b/active-stream-proxy/requirements.txt new file mode 100644 index 0000000..f229360 --- /dev/null +++ b/active-stream-proxy/requirements.txt @@ -0,0 +1 @@ +requests diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..f00ed05 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +EXPOSE 8000 + +WORKDIR /usr/src/app + +COPY requirements.txt ./ + +RUN apt-get update + +RUN apt-get install -y python3-opencv + +RUN pip install --no-cache-dir -r requirements.txt + +COPY main.py schema.prisma . + +COPY migrations . + +RUN prisma generate + +CMD [ "fastapi", "run", "main.py" ] diff --git a/onboard_live_backend/main.py b/backend/main.py similarity index 98% rename from onboard_live_backend/main.py rename to backend/main.py index 8d3cabe..f54fea6 100644 --- a/onboard_live_backend/main.py +++ b/backend/main.py @@ -83,7 +83,7 @@ async def get_recording_list(stream_key: str) -> List[str]: recording["start"] for recording in ( await client.get( - f"http://localhost:9997/v3/recordings/get/{stream_key}" + f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/recordings/get/{stream_key}" ) ).json()["segments"] ] @@ -93,7 +93,7 @@ async def update_active(): global active_stream global active_streams async with httpx.AsyncClient() as client: - streams_raw = (await client.get("http://localhost:9997/v3/paths/list")).json()[ + streams_raw = (await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list")).json()[ "items" ] streams = [] @@ -124,7 +124,7 @@ async def check_for_new(): global active_stream global active_streams async with httpx.AsyncClient() as client: - streams_raw = (await client.get("http://localhost:9997/v3/paths/list")).json()[ + streams_raw = (await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list")).json()[ "items" ] streams_simple = [] @@ -161,7 +161,7 @@ async def lifespan(app: FastAPI): async with httpx.AsyncClient() as client: for stream in await db.stream.find_many(): await client.post( - "http://127.0.0.1:9997/v3/config/paths/add/" + stream.key, + f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/" + stream.key, json={"name": stream.key}, ) yield @@ -494,7 +494,7 @@ async def approve(ack, body): ) async with httpx.AsyncClient() as client: await client.post( - "http://127.0.0.1:9997/v3/config/paths/add/" + new_stream.key, + f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/" + new_stream.key, json={"name": new_stream.key}, ) await bolt.client.chat_postMessage( diff --git a/onboard_live_backend/migrations/20240719191810_init/migration.sql b/backend/migrations/20240719191810_init/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240719191810_init/migration.sql rename to backend/migrations/20240719191810_init/migration.sql diff --git a/onboard_live_backend/migrations/20240815175119_pls_work/migration.sql b/backend/migrations/20240815175119_pls_work/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240815175119_pls_work/migration.sql rename to backend/migrations/20240815175119_pls_work/migration.sql diff --git a/onboard_live_backend/migrations/20240816163601_add_pr_model/migration.sql b/backend/migrations/20240816163601_add_pr_model/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240816163601_add_pr_model/migration.sql rename to backend/migrations/20240816163601_add_pr_model/migration.sql diff --git a/onboard_live_backend/migrations/20240816164220_make_pr_model_optional/migration.sql b/backend/migrations/20240816164220_make_pr_model_optional/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240816164220_make_pr_model_optional/migration.sql rename to backend/migrations/20240816164220_make_pr_model_optional/migration.sql diff --git a/onboard_live_backend/migrations/20240816164920_add_pr_number_field/migration.sql b/backend/migrations/20240816164920_add_pr_number_field/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240816164920_add_pr_number_field/migration.sql rename to backend/migrations/20240816164920_add_pr_number_field/migration.sql diff --git a/onboard_live_backend/migrations/20240817173150_add_sec_token/migration.sql b/backend/migrations/20240817173150_add_sec_token/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240817173150_add_sec_token/migration.sql rename to backend/migrations/20240817173150_add_sec_token/migration.sql diff --git a/onboard_live_backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql b/backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql rename to backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql diff --git a/onboard_live_backend/migrations/20240821135629_prep_for_oauth/migration.sql b/backend/migrations/20240821135629_prep_for_oauth/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240821135629_prep_for_oauth/migration.sql rename to backend/migrations/20240821135629_prep_for_oauth/migration.sql diff --git a/onboard_live_backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql b/backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql rename to backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql diff --git a/onboard_live_backend/migrations/20240823141852_add_gh_token_field/migration.sql b/backend/migrations/20240823141852_add_gh_token_field/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240823141852_add_gh_token_field/migration.sql rename to backend/migrations/20240823141852_add_gh_token_field/migration.sql diff --git a/onboard_live_backend/migrations/20240823145449_more_pr_schema_work/migration.sql b/backend/migrations/20240823145449_more_pr_schema_work/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240823145449_more_pr_schema_work/migration.sql rename to backend/migrations/20240823145449_more_pr_schema_work/migration.sql diff --git a/onboard_live_backend/migrations/20240823151408_remove_duplicated_field/migration.sql b/backend/migrations/20240823151408_remove_duplicated_field/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240823151408_remove_duplicated_field/migration.sql rename to backend/migrations/20240823151408_remove_duplicated_field/migration.sql diff --git a/onboard_live_backend/migrations/20240823151722_make_gh_user_id_required/migration.sql b/backend/migrations/20240823151722_make_gh_user_id_required/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240823151722_make_gh_user_id_required/migration.sql rename to backend/migrations/20240823151722_make_gh_user_id_required/migration.sql diff --git a/onboard_live_backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql b/backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql rename to backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql diff --git a/onboard_live_backend/migrations/20240831153915_add_session_model/migration.sql b/backend/migrations/20240831153915_add_session_model/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240831153915_add_session_model/migration.sql rename to backend/migrations/20240831153915_add_session_model/migration.sql diff --git a/onboard_live_backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql b/backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql rename to backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql diff --git a/onboard_live_backend/migrations/20240831222739_remove_unique_constraint/migration.sql b/backend/migrations/20240831222739_remove_unique_constraint/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240831222739_remove_unique_constraint/migration.sql rename to backend/migrations/20240831222739_remove_unique_constraint/migration.sql diff --git a/onboard_live_backend/migrations/20240831222937_add_different_unique_constraint/migration.sql b/backend/migrations/20240831222937_add_different_unique_constraint/migration.sql similarity index 100% rename from onboard_live_backend/migrations/20240831222937_add_different_unique_constraint/migration.sql rename to backend/migrations/20240831222937_add_different_unique_constraint/migration.sql diff --git a/onboard_live_backend/migrations/migration_lock.toml b/backend/migrations/migration_lock.toml similarity index 100% rename from onboard_live_backend/migrations/migration_lock.toml rename to backend/migrations/migration_lock.toml diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..47b78db --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,79 @@ +aiofiles==24.1.0 +aiohttp==3.9.5 +aiosignal==1.3.1 +annotated-types==0.7.0 +anyio==4.4.0 +APScheduler==3.10.4 +attrs==23.2.0 +black==24.4.2 +build==1.2.1 +certifi==2024.7.4 +cffi==1.17.0 +charset-normalizer==3.3.2 +click==8.1.7 +cryptography==43.0.0 +defusedxml==0.8.0rc2 +dnspython==2.6.1 +ecdsa==0.19.0 +email_validator==2.2.0 +fastapi==0.112.0 +fastapi-cli==0.0.4 +fastapi-oauth2==1.0.0 +fastapi-utils==0.7.0 +frozenlist==1.4.1 +h11==0.14.0 +httpcore==1.0.5 +httptools==0.6.1 +httpx==0.27.0 +idna==3.7 +Jinja2==3.1.4 +markdown-it-py==3.0.0 +MarkupSafe==2.1.5 +mdurl==0.1.2 +multidict==6.0.5 +mypy==1.11.0 +mypy-extensions==1.0.0 +nodeenv==1.9.1 +numpy==2.1.0 +oauthlib==3.2.2 +opencv-python==4.10.0.84 +packaging==24.1 +pathspec==0.12.1 +platformdirs==4.2.2 +prisma==0.14.0 +psutil==5.9.8 +pyasn1==0.6.0 +pycparser==2.22 +pydantic==2.8.2 +pydantic_core==2.20.1 +Pygments==2.18.0 +PyJWT==2.9.0 +pyproject_hooks==1.1.0 +python-dotenv==1.0.1 +python-jose==3.3.0 +python-multipart==0.0.9 +python3-openid==3.2.0 +pytz==2024.1 +PyYAML==6.0.1 +requests==2.32.3 +requests-oauthlib==2.0.0 +rich==13.7.1 +rsa==4.9 +shellingham==1.5.4 +six==1.16.0 +slack_bolt==1.20.0 +slack_sdk==3.31.0 +sniffio==1.3.1 +social-auth-core==4.5.4 +starlette==0.37.2 +tomlkit==0.13.0 +typer==0.12.3 +typing-inspect==0.9.0 +typing_extensions==4.12.2 +tzlocal==5.2 +urllib3==2.2.2 +uvicorn==0.30.6 +uvloop==0.19.0 +watchfiles==0.22.0 +websockets==12.0 +yarl==1.9.4 diff --git a/onboard_live_backend/schema.prisma b/backend/schema.prisma similarity index 95% rename from onboard_live_backend/schema.prisma rename to backend/schema.prisma index 3ac9bf4..0edb7f5 100644 --- a/onboard_live_backend/schema.prisma +++ b/backend/schema.prisma @@ -5,8 +5,8 @@ generator client { } datasource db { - provider = "sqlite" - url = "file:./dev.db" + provider = "postgresql" + url = "file:./db/dev.db" } model User { diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..7e4064b --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,42 @@ +services: + mediamtx: + network_mode: host + build: + context: ./mediamtx + dockerfile: Dockerfile + web-frontend: + build: + context: ./tiling-frontend + dockerfile: Dockerfile + volumes: + - tiling_frontend_build:/usr/src/app/dist + live-stream: + depends_on: + web-frontend: + condition: service_completed_successfully + build: + context: ./live-stream + dockerfile: Dockerfile + volumes: + - tiling_frontend_build:/html + environment: + YT_STREAM_KEY: ${YT_STREAM_KEY} + backend: + env_file: .backend.env + build: + context: ./backend + dockerfile: Dockerfile + volumes: + - ./backend/db:/usr/src/app/db + active-stream-proxy: + build: + context: ./active-stream-proxy + dockerfile: Dockerfile + depends_on: + web-frontend: + condition: service_completed_successfully + mediamtx: + condition: service_started +volumes: + mediamtx_recordings: + tiling_frontend_build: diff --git a/live-stream/Dockerfile b/live-stream/Dockerfile new file mode 100644 index 0000000..60bc333 --- /dev/null +++ b/live-stream/Dockerfile @@ -0,0 +1,7 @@ +FROM alpine:3.20 + +RUN apk add --no-cache gstreamer gst-plugins-bad + +RUN apk add --no-cache ffmpeg + +ENTRYPOINT [""] diff --git a/live-stream/run.sh b/live-stream/run.sh new file mode 100644 index 0000000..35dc0f1 --- /dev/null +++ b/live-stream/run.sh @@ -0,0 +1,6 @@ +gst-launch-1.0 -e wpesrc location="https://en.wikipedia.org/wiki/Main_Page" \ + ! videoconvert ! videoscale ! videorate \ + ! "video/x-raw, format=BGRA, width=854, height=480, framerate=30/1" \ + ! videoconvert \ + ! x264enc speed-preset=1 \ + ! filesink location=/dev/stdout | ffmpeg -re -y -i - -listen 1 -i rtmp://127.0.0.1:1936/active-input -c:v copy -c:a aac -map 0:v:0 -map 1:a:0 -t 10 -f flv rtmp://x.rtmp.youtube.com/live2/no-way-am-i-doing-that-again diff --git a/mediamtx/Dockerfile b/mediamtx/Dockerfile new file mode 100644 index 0000000..922516d --- /dev/null +++ b/mediamtx/Dockerfile @@ -0,0 +1,5 @@ +FROM bluenviron/mediamtx + +COPY . / + +ENTRYPOINT ["/mediamtx"] diff --git a/mediamtx/mediamtx.yml b/mediamtx/mediamtx.yml new file mode 100644 index 0000000..ccb9689 --- /dev/null +++ b/mediamtx/mediamtx.yml @@ -0,0 +1,13 @@ +playback: yes +playbackAddress: :9996 +playbackTrustedProxies: [ '127.0.0.1' ] +api: yes +pathDefaults: + record: yes + # Path of recording segments. + # Extension is added automatically. + # Available variables are %path (path name), %Y %m %d %H %M %S %f %s (time in strftime format) + recordPath: /recordings/%path/%Y-%m-%d_%H-%M-%S-%f + recordDeleteAfter: 0s +webrtcICEServers2: + - url: stun:stun.l.google.com:19302 diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index b8267cf..0000000 --- a/pyproject.toml +++ /dev/null @@ -1,96 +0,0 @@ -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[project] -name = "onboard_live_backend" -version = "0.0.1" -authors = [{ name = "Micha Albert", email = "micha@2231puppy.tech" }] -description = "Backend for OnBoard Live, A Hack Club YSWS grant program" -readme = "README.md" -requires-python = ">=3.9" -dependencies = [ - "aiohttp==3.9.5", - "aiosignal==1.3.1", - "annotated-types==0.7.0", - "anyio==4.4.0", - "APScheduler==3.10.4", - "attrs==23.2.0", - "black==24.4.2", - "certifi==2024.7.4", - "cffi==1.17.0", - "charset-normalizer==3.3.2", - "click==8.1.7", - "cryptography==43.0.0", - "defusedxml==0.8.0rc2", - "dnspython==2.6.1", - "ecdsa==0.19.0", - "email_validator==2.2.0", - "fastapi==0.112.0", - "fastapi-cli==0.0.4", - "fastapi-oauth2==1.0.0", - "fastapi-utils==0.7.0", - "frozenlist==1.4.1", - "h11==0.14.0", - "httpcore==1.0.5", - "httptools==0.6.1", - "httpx==0.27.0", - "idna==3.7", - "Jinja2==3.1.4", - "markdown-it-py==3.0.0", - "MarkupSafe==2.1.5", - "mdurl==0.1.2", - "multidict==6.0.5", - "mypy==1.11.0", - "mypy-extensions==1.0.0", - "nodeenv==1.9.1", - "oauthlib==3.2.2", - "packaging==24.1", - "pathspec==0.12.1", - "platformdirs==4.2.2", - "prisma==0.14.0", - "psutil==5.9.8", - "pyasn1==0.6.0", - "pycparser==2.22", - "pydantic==2.8.2", - "pydantic_core==2.20.1", - "Pygments==2.18.0", - "PyJWT==2.9.0", - "python-dotenv==1.0.1", - "python-jose==3.3.0", - "python-multipart==0.0.9", - "python3-openid==3.2.0", - "pytz==2024.1", - "PyYAML==6.0.1", - "requests==2.32.3", - "requests-oauthlib==2.0.0", - "rich==13.7.1", - "rsa==4.9", - "shellingham==1.5.4", - "six==1.16.0", - "slack_bolt==1.20.0", - "slack_sdk==3.31.0", - "sniffio==1.3.1", - "social-auth-core==4.5.4", - "starlette==0.37.2", - "tomlkit==0.13.0", - "typer==0.12.3", - "typing-inspect==0.9.0", - "typing_extensions==4.12.2", - "tzlocal==5.2", - "urllib3==2.2.2", - "uvicorn[standard]==0.30.6", - "uvloop==0.19.0", - "watchfiles==0.22.0", - "websockets==12.0", - "yarl==1.9.4", -] - -[project.urls] -Homepage = "https://github.com/MichaByte/OnBoard-Live" - -[tool.hatch.build.targets.wheel] -packages = ["onboard_live_backend"] - -[project.scripts] -onboard-live-backend-start = "onboard_live_backend.main:main" diff --git a/tiling-frontend/Dockerfile b/tiling-frontend/Dockerfile new file mode 100644 index 0000000..969c7b6 --- /dev/null +++ b/tiling-frontend/Dockerfile @@ -0,0 +1,16 @@ +FROM docker.io/oven/bun:slim AS base +WORKDIR /usr/src/app + +FROM base AS install +RUN mkdir -p /temp/dev +COPY package.json bun.lockb /temp/dev/ +RUN cd /temp/dev && bun install + +RUN mkdir -p /temp/prod +COPY package.json bun.lockb /temp/prod/ +RUN cd /temp/prod && bun install --production + +FROM base AS release +COPY --from=install /temp/dev/node_modules node_modules +COPY . . +RUN bun --bun run build diff --git a/tiling-frontend/bun.lockb b/tiling-frontend/bun.lockb index ac9ea20925231ff24fecce59b7419adf4bbe9919..cb123246a238c848398c2510bd0ba66ade2ca8af 100755 GIT binary patch delta 31610 zcmeIbcUV+O(+4_d1Z7YpNR*sJz#&Ho17^Sg2FxguBqPDBgKI<=(@~FEOsi{F%sH$9 z%-J>PEM`~NbboaMjQhIZckg|k@4xpv)Kph@b#--hbvQGJI+HimKK-lCR1epZB>Qhv%$D*e`{zniu2>$$Zo%P_a>Vy`+0Y5TP0GP>fXb+v6!+K%A~;ln^CD^8zQ zVym7}OD3}hF&NkixTi=r1GWI|1Z)E=19k$w4Uu-h2Z1Sn5wIih2w(@`1YlcWm5A+u ziBB%l51|FMa~#-GCTm!{7YRLN)~ zE)a17unEeefsKI!MC>5q`Xc_ju2B9^#OFl3SH!D<8=(Gd0jrD0iW&JL?h9-LhV~*3 z6|tL$%|(13c1BN%r$d?oxLqI&pK+JT zkSy+n4=P;;8X6aGXd;+@Au#zxDe`Ij1^|g?0*PMCzSeS4 z$3qQj?jUkWy&_;5jkJ`Uf+0CcvW$Y<9Dlg#OIQp2E}jVt7b!joOx=?O%4BZ9^@C(G zG*EmOGDqC8nxGr_t(?10*4 zYM>{h5eP8DMK!a^l@Msqx~Wt)IXj2Bug)VB;a!` z1?O1aN|3V^d^Cn@fl0gEoRq@!>=d;uJ3Tu+Ntv3{7QLh42?HjtY6MIT>}(@=4<>Oz zZdRgfBWT`>5|hvjECfDMw0~BCM@E56)(3giP+>t1)wK149zCvrt!Ni*j;bL$P8l5$C5spMfc|y!L`$qykgJ-GHe9cy;ce z9GPrY2SLGlVtHPADnD8u+hI7-e(^^%SQl7YimE`9pzJuI;(d{p6e}o7%1leO(4EE}P~)Kh6} zy+M;=gR#7j$+OdQ((@7v3K}Dy=w@Aoq2hq4$F^d=S2sb9BQP~+)m`9!37R^#8#Ir* z$<(}awd)~_O*3F>@ZT1I|3MHh=eJ&hBL9>#6+I?}ld}d=e`SgJX*_O&_W6nh15>gJ z;kHa4LG!l>f}rG-0Y!=M0%-+*gnXI}H-M=J=S7^9o1CJ|Pf6|pnoLVmiF)1@nE2G!;*(m8#%!f|rUE9LOM8Q~m;7_@Xo7mw zGJszW2c{RMq!!6!yEB9?WoD%mL2+3*XxO?|acM^RPMxaS>gbOZS=uj`cK&1Cpk_u) zi(yNAFSTE^{8M13Hh;d+8tFf$!>2=Ac5C&U+bi)@<@2^WqXt{0pSd$3;f>$l_4ag~ zxiP`Y*C@#OjMIYds~w-}jUBk#EBExAX=Wq*8+$K5ZTYz9uP^%pKOIkrOCG%;<+NLH zhaF2Z1{MwZ*3-I;YLcQP`nLMayg8K(2V`%r8(DXYY|V*-zxu{J{IGSjYn|mKWsYv` zmxNEu&)CyD`pJ=n<90P)`L+IUmcOfSt$VaZ*8X65=S#c$&D*GFbHAkYccrA**28&q z=e0A*e;SuGEOt`kk?N{vLqelX7oF>PVeIQ}Ma{MUO1}T}UWwP6w`-oP?4v$Vy8Of7 zPrwgxAXltWUxmR2?F-oE9kdY8U^Z+_9w!pJDAv!niny&cmJ z$-kYSI@X|9R@NA1{;NMmzkmu0i>Y7~d%~@4R3!f+V=yD@;e+$+?&ph^jtJM8SK*y)ipPoyV+It>c zVo`a>_)`7g93zvMsC~aAm#>~1)3x4Vhs$wXoCCX3{|Y<7AR_34azkO=+&1&W*Uy~!cK=yOVq%3)dj)&UpqNp! zQHCn!F^vLLvmoSkME$ds@OBbD~XS~6KrR?jFzyPlRz)}2qKASLAON2(K_ z*Fak)>&>U~kV@xMeeQ0z z)KjEd3A$im6H)`KQ@g8EawD0n1<%*2IyJF6b*egLgsqn6W8&EoYZWt(t-$9+R$-%( zH!xosxKs5zbhc$EzWbU&HTa~uCnM{_%>e+^94?`-LPi?MF z#hdeEfzrcBwWLzHt_3^ME}ThYE9_L-J1pQg>`Cho<~18-uhMpFD3gWoLQ;@wPAO(1 zt8h>$K7l3=tHrD0jpL>tC^@f63R~eo?KrjqxwBY>lZv^< zMmeeE^{m;6PT@=_TY;>_tioBPxQJViaK4{$j)95>HZoZNC@t2|CQ#7{6jo|96=xI3 zOkov`$T^}KsT9Vx0w2Q~Is`J&Y(*oL_B`Z8uqRDJn5S%%i%Q{TCzGj!+NOa_E?eQE zVh+)#;xkIfw&)P7sbWhStC$&V1wL=G3RjiF6er_WDA#7EH4fCC0IC_=&M-u~lJeP; zjYF6SR^g^n{N@OUci+nNBiU0dIunY?Ql7+pG~8yPbQ8wlR(o z$$V-NQVDFSd#Dx?ttg9W&XzP$DK0eT4{XH@HLvh?m8?&Vq_a^<6|;sd!RI%&La9=; zbQ4@dn~bDTgCYag;;Dn6Xsl|pXRQOZKY;4XH~qm~Cd=Vd$=K1F@~PuUrSd6nrA*e1 zQi>9!+M+%`*4h_A391Ks$YecuUKXhcK2_HfCj>r~h7@04dmmCl>35`B@THx+cymJf zLZq5PItCj(yTX?Es1$+TlG~rP3sejPC3>D?pmsT^Shk&Mh@zH{;54<^vu=S*d$z(? zrI?F6au&FRYoPWsqWJb)e5t*Yo^U!p6|;<0_^TAxkp~8}Jk2doQ6DBk=gG~`N>flm z@24YG)BBsC!bu`y#g+u96iNPq4AgQ7)Sd|{pKq@Y%pcFEdLt!R)GbgZ8_4HHpgZIE z)DfhH3VFfOXskmjfzPW4!w42mMT#Fa`H`kj4I8v( z+u4R{@d?UWjTF{)q>OQs700_}3R@DQVpg#g_`Jd@LRAU}%tEpW#@Z%OyE7<$(a@fQ z6m|>LevVWOp9;jhisI!=W=q0UinpMlf#4^M2dfBIDbgZ@(dEZOu^1FdXUN9PdA1^4 zB{zy>byeYtmXQLZjKz>+Yb#Ny6x)#366`YGjlX~*r^Z@{HKI*3Veomk(H=(>tLGS^ z_yZ|&7J=&vC}ASdBnoRT)RFO1Y8WU|Mwl}7u@tliB{+F6P|euV#&A!hpf$NCR^XeU z1XrYKW(wt~jIX1J2L;<;(oM4tRLl@5tkp2&XHY)0dMK1wQbYrz7kxm{8Y4*9gd`Le zRwTtcP~^(B`Ify}3-$pg^vD1u*n*t33{((lt9=EjF8tsq+hE<}Q$vvoq?Do(Dg4po zK-)%9l5OC+OdGbMm5Le7Dq5?wH=%^z1eh0WNo$qDBU;czn>`y6$YijJHY&sU$fI?& zmP{to0_d`gp$l8wK%o-@&*#;KYKlFeU^5zV!$3t{Oe9Pov4bj5WO6MkGb{y#2~w;h z3RLm}Bb*dED4+!bix~ud1BDd>7CYNEP~p)|CWBjG49}VdDsn+lV_4`wm4kxWi}h)= z_3Y6SiDw|*l%1w>XCn-C6jP9!O1Zp6-h(2GW68A+)bs8j)R&_M$>3{f4@9mXKa$&! zqS4}=S@8*!P#3n<_K$^@v_~jXkis82NjhyoQt%NPfwv-6n>UVkoFGB)w4tC#IzQfe zyTvln9z8b|boq$frZnz)-8%}sqtS4L;oPU&C=8GXhGJyNsGgo+Q1n_KcfaI4Sok^w z>a_&bgjxbm9jeP)W(IOeQ-(e30>2k)V5x;~8+WcQ!>sBEiZsPA+63y&00r0J?F`*x zd1-o8$n{39F4}-4?1VP@;U*acqV>0eBJV^SkopZ2wGSOZg~FrA+b~XT1NCNtqJ>73 zCX9^Y40361*5Um`r>o#K3N{XQj{_ynGt8dlpsJlx?}^CCdotP+&d^a={g>Q-Cj z7vz#J>hL4h3ODaW@#ir`F{qmPdPJmzlbgJUw2EM^DUv}!S#cWd1Vug7;eGIL z2SwuzJsbiRwGsut!)Xn+vFIn*tu|k;C#VQs3iOaK@5kzzh0E{sV=H6AF_|sTu@#R3x~tr{{U1QR?iGe ze2QQ^UX~&g6d8|SzU0eO*op1K6(5jE{gTmYuI+;*2+cW!$TL#ei5oaHn;%_y9PgD!7uw&Hejp*GbI1ViXp#|!B)nF%iS|s-HzdkA(_Jd2>)*s zs8|h3@I0(L@&}o0WyhxWdJs)O6V?Dw7hnmXi?9*9r;{0bqLa@5Ur>`|j4Xfs44a_@ zHsi1V1FZhP5?n2p#x9Dd1WY3q4Imst-vm+uJ1u_^HU;zpP(CIHe-S3z=K#oJ0{|qr z06=_&0J;cM{y-vdX<~A%p#Z8s0zlUYEJ^%TodJpjjuf(aOahAmRB<$buAgD@!wGz; zfEjjq7f&_So&q4LQ^itEtb@GSV)-1goG|gv6=}jGXFhQ}8kf0L)y8b6j3ZLY;G%?zz2G0YC;R1jJ zUIx(hGfaF}0MxUq09s$}11SFifbt&!NX}D{ehy3uzXH%jn9AQ$|Ec0T09~4x^4|jp ze*n;>iAmt!eD05!_&VB2Ad+?;@?15`;-$EvhYIGQAFH>Y))ZRWuf{DKP$I zW+FBRrutUE#AhR7J7Bs9Q_^0<4#2R0TIMVyWR1lFS6~w0F6Mgx)Ach<4f=@ue!#@) zj}J0!5O5t}6=jN;_?wI6t@wP{Ur79jNr5(EMZ!c!i@2?rPneSQoB`qXBJMy0F2V}n zZX&ITDXTkupnW0n4@^Di1qR~jO&=nrWP(Wl3{z1bv79g^`{IKvkp@h=MImL1nA-m# zL!!x4gGI)lVN;Y(Mmbq#s>r8_ss0S)Q%5+l{3lq=3z!22s<1$;P$pK;#FVudAB2~P z<%B7@RHT1~sc0EKNYUR!z8cK;Unn3GtrZzGG4)^r@~LQ}h&PFRKf|=1Y!}N3Q^Pw& znlRD3MS8bdN&r)GkBIk*`Gl#$evu|j4IBX`#VUZQsFFTJO!+58S`$;&DSQw@V_bVvUnk|i{ciZna_dqn*25%IrA#Qz=<|9eFI+0pU8 zM?~5){$Cvt@%8`z!4a{1Xi|f=5w5Apoj>h5HY0Ihl~GIA5rc|#uhk07TwuKEW1Zxx z*GYR%Hrq4v(;IiAr?o8VSnfSxx;t)OOxoYW9v}R^ykuVNgY)dk_2%m4l~ad*ob$>1 z)c1jzP0KguO^+TqOP*1;ZO4|^=C^)0s?Do}`sH8yCWJWck-drxbvie}e`NWX4H22U zkM(r$Hu4yV-I9@g)Bgs1i>xAES_ZE^t?tPk>lL-k?f85b|M^3n-mEqEaX{qa{W=p$ z*>t<&7qKN5n>DxaUMW9OVis`Ne0W&z{_DRhy$#-)PsrK$qJSN-!Cd_{;Jfp&%FDge z{9S4<-rF|dn#<<%{{0<0*LREj8tj$aEx-QGKkjXweDv$^{k`Yq1XS&r%#=kUDP~x=IX}p{(d_pu+ikQ4y~iwHEeA%^U(I%L6aYD+PCuh zw_bHtT-4C+o~Cv~Cyw0j<+*H9=gO1XJtB{}-riHO)xB-%g?n9s-S^Ggda3)+3Aqa< zpRjm;X~d>alW?_~Ch{_ru!FTr_99V#E%w36|w0o_#G2pC8@H zeaVocH!SpnCo9_aTv-*PE`JcB>&cdGGFMyIesS&Kx)~l3F5Y_w%uD(ZQ(5Q6L1UMg zy%YSqb-Z;!wQ=_G(+y*@wnld{duP z*z8_C+2YXTRlk3qlKm>S?l6W8FE>+{EngVgBxCu_u`_lZ>Ds09d7EofKmDe2)%j@9 z3H9h+r-~m%9yDFUjLiGoebMZli|+T)Pa82QreN9cN1l}UKRYpj{$WH??}?^*OKzAh zDBLr$PIB>ho3N^BR)6|;(+{6}@9L93mJYGL_-gI0)n_6vZT`dJhIRV$Ved}I=QF39 zJn8xJm5)#NZ@Vw-SDt5Il$)!Y_!NZ2X&Jm5d3=iRUwM9pH+*{UbCp?*>oMj2gK^gV zbh};qI%dN&rwwzTR&^SGWcTXMPu>sp-dUzQJj3#oz3fZ5hIY?1wL2a2qIVOI0nM^y z7ffef>A7Xsu-f_W_L-0Ub$s(CwbvdrxUx+-Fa72ki@&!o)nC@{&ZOCm9v(mOOH4`n5uGwW`;v_tRIodZ`6>wflK-Y;RtZ*PjJJaRMt z<0~`gwd-5YUfHEf-#xbm#UjGKqG?bCJVep}pWv)SIDk~W5K(<{7R zZ*9Z2+hVQ`sgwJq=P(D~vUwjjA9>q6t>d#6wcPJKkW; zTDxD}OX42=7FMyoxP8M0%e>e>Tz_w&q1{VO?RH#g!c1Aze%Q;ZW+CZodal+k&5fut z!P-shWmwqsDbvS1G=FTJv+iJ~y0xE7vL76|vThIX$twQK46)oi`~uj|wOdw(t-fAt5gd9Wr*gghx~l;>@grV!H=Q|qed?@rQSr6dwOh^AeXMj|HDBCC z$8^QY(Vj1^o!i$`J$@)NvOKE&5bIZiE6$ABk$<^hTCc==>;4{cWaNeIZQE3R@142r zmy$4FyX;TvZ)<4xR#Uq|pTlpa)xUfr(dl@KRj0F=C$c7aU-dottMYAf?4ZtD;!JK{ zX|Q6hqs8aSc}Bn0d$f7dh*KY38@>Ae&1mqPxmPxKab$aMGgn94UqAF?)~v?2le7jr zvHW~uMO(9qo~^P*?qD`vdv>Vc?u4(m)5<-QC*JHaC3}By=C+&gTu&@t8XtUTWB3R&FXD8SMR(ydE4vpryh9E-?XvLisZMQ z`v*VO8$8%@?cj9{+xr|#$e3yMZNk|2T?@zj`S&=h9+ywI(CcV);Cn^os&}oQEs3_! z(C#luHDIk*tuEGg-!tLXBf}=%Lz*;qePO+GSdR-y%WWP#jB1t{Q~Hhj{MKkd@KxW*e>Hy;<`UR(&$V{-{E9WS`=qJe z#wE{fw=7H?eOEsxrvBZtEvMZ5y!dL^#J?>&&6siH>yqpDqe7z>K0Isj-uQKlx{_Vc z`djHW{=*DPi3=rn5$2pY~@$=Xu)W$aT^!E+qP-$sGv_1-}M;THZR}8 zYhAAqvgIGoTt0tXo~mv2*SBA72Jf-Y9P5$Y{d(cSgcs%S-wyp#Lp$lW6;e{G#r0U* zzbCx?JZP&)g?^nTTbge_`E=#8DKn0LU*q37^61z{R~~!|Uew=0^|)mI*^J-fGBZ;< zce(pZ)Y_F@`fuwnY36L!dZ)R%Wqe2bgf^DF>#&1XEV+7TUs!|pPj(jAAMl9$-nVv+ z+lt|1Ep*4a95f%Ve`UG(ndYw-T;K0!Q97&FB~|TZpSCB&XlN(>(!*=_BIcp%@A0i} zx3DlCU$*?t`sCc?$%{PKTJCsIc+?_S@pN_K?+Lj%%55IwEw(u< z{~BadFU7j&74{^wTQj>svmwUwM`W4l`*c{|vD5HFHxK`|YqiCgNj6D`yM9d0x&CeJ z^PYR1&OGk@$=N9WxU=i^>s<`2yKW!QK5@$JUL7^G`>r{%FJ1FzKC@{4_TsfGcMO6D z#eIDIWaj5$x03bKhJ~(rT5#j#i!bLYGm9*0&)4cyu|IBD=<3jyt4!~WSr?mH|JQA= z@7HEW?7|Vs@=J@}PS4CAzql#;@}-+{i+sf1`Ja09zV>OeBX*1_TK(e>_ShZlJ!K5u2|tA1{yp&f&?bZHf8y;MAG_x*&yhO7<4!ctER>onu* zR@*~khW~xht331GU*j?#FWtQ3yT#*Eea>DAja`~qH)&6-bJHld+BcU)FT8oK1FPI^ zu2$YCd$V)@u_M~O%v-EH@g4(eD7G71%-!?ciI2s{DS5wpZ9Lt zH#c|7`X`QV%hNX*PxutK?J#}+FP#FUpXNwIt=!N#7hi@RI=^MYxt?wX%lC|V*6r1U zdrP-uW!Jiq@3QY@_SC;$ues%D5!d3bY3%*!{bv0#qU~dsW3_sp+nzBw^W4hy>@?b`H5$)Cx@)+ zt9AL%sKW`hS9K3m_Wb;we(aO9)7G?|(;45C!_RbY26XOMl@#VXyxyVVbt}5(6ptur zu3c|**Qp0*l>YW`eBa***hi7|z!#_^V!z!i!gnV$!zs_fOQfX=QbCs^9XfvKN02 z9kwy%*`l;Q7q(_J-)q*T31tkPu;{lwrD~Jje|+w3^QpA+{ieMe)xNE*th{x2 z(uVg>G!(9{DK|-N5@vsWRb-~_w6x~es-o0GR_%QDX>q~CWohh_#jDp34YW8NF*AOr zli7tfw^!sEj!W*d@7zS4=c`WdXi>hUb?5sUg7q~8ueuwX(D__!zYk5+9pi?EcCBlE zx3b|v^Tv79&^ciobO4K?MScKz+qv%TsK?nXWBzOKBqu(bCf(~r$u zE20NBxzc~8hn0R{i|yB{3fDCp)LUo3`g0cHu9FXVDYClX-5&S2sbK=wOhd4dreN>J zS56)-4brpO&pbKZ+#(Kzm8-`#z;cF>En>VAD( zp8kNL0oQx>o*`dYxa7hP?~lcg$DV&+x?t-y?T9855|U>ak&==n=qU&Og8#|1S{lm3 zkA0r_@^$W9-NfVC)^6#>e?Fh`ZD4Tcv#xu6RdZeXXt%uOpE{?hUf!c4b#G6r&sBbH zKm5*HyIV(ED;7J`6AhAJ8b;~TDtx~8dfiz!)^70M+S=is#qvPAUo$6l`aH_>^SlAm zMg$K$wDhR@*6o)*@e3o>trx`{AG3I3@ajvk!}`qPrtH1Wm4wXU`Fn-JssV(7H@v+_UFO{Zu_<;52z6=GuKr3c_DX4%ks9n zYreyi&b@qER(%Z_o@GDvP!n~FftMl;oA2r0|8HxvW5-|SL=>~y%(%y86IafdYv^$= z^rBlr$LM7GJzX*}9do5i%O?Mn!un&}F+cgD0VkA3_wq#|@)M&a*z1ADynxETAr+w4H|%txD6 zk8fDz@U)wTTuV*4o_%WdpJx^o74W(A_k9(U)aA1)I`q#ljcjo@Y|H*9W!ced()!1| zOv;LD|K*{6T!L<3d8>A#x)1&{X#9$S*-gJ+qz3^cg=u@1F0HJaU*9l&BGv0B>79M) zcBFg56>Dy+Y!&I$G3dc=x#Ict{n9Ll4_T45 zHXSoP`Mm4Qj&-LLJe_7gqTJZy-D0<}otl=QvzT;geH@a%t+Dz+=xM4ijU3>)K;vS1q-gIu(=JI|G zg>5wjhfR26e7Cas^&QXWop~aE{Q8Vtz-q7Yhx}U{>{e7Kr^A|_-5cxPTKYZh^S2eJ z?L(KH+Vkrn+nB>QUL{Pp)I#NW_5%5VWC=Tl{bhZ;+@4{t0y!|O&xUxpBg0Mva$?xK zK+X)?bYr}{5yMUga$(q~K#du;*`|28E5ptOa%0#xK<*41T^=uQ!mvw#lnnbfkO#wd z+#E0WWY|?eUJNVW5-;~=*d9PW47&-)mtl3c#>@Q}wjYo`!|ntMU|6GV@$x{1%>W8w z*n>d94BK#fyu2yH<^zQ=tiz6Yc__mU1qx%>b3ow?>$Wpqu0qFvBG56QNOWvhJkyLV z+0|9uT+^7nOLrWf-*Mj;ttD#?RTQ0QP^>+&Rhu(kx1C;`bz_3h?BL2q5#yQ;se8`p zwPJm6T+_MHb*`^=`75^m-h$o6`)uDo)vzUfFCtx9*;k#u4y&{)ebjM|^MZsaSFN(W zt{%)8y_7kj9<@Vp$*953XVdc5I>sIDP`qlM&+e*8#i{z6*paiUnt5Cg`t@D3pN8O; znu1gJ-}G-jN#=n_~NJarn>|8wah69e=s; z(wQZP1~h*f>Kst9*KfP;>HJ}uN0L^Wf}30AkNkSJFwfTc@Xc9u-i>Wv%cNyjt>k;F zHgrzjV*jdcO2c8bmfl{m;E1x_ts$SLFPdMgb4X~uy=+~l_cq#7yOn7u+*(s^&ULx* z%p}Kgru8mg-StOen)-H8^Y<}}n>ARMeRjsTO_wg!?lyYv$C9dkPOW-JbX{7p|I(7* ztWHd-^KoXg`kgwAAFC;to@J9Rtw9$PdWRdiY)xpon0sQ=!nfVlgVVnaNqStiDf~fn z{G$E`JH)Tt*g|V;tq+%f9dskz>u7$g&znYnXP;}_`BDGE&fCvvXdI;}_`)Onm&3Zx zKFjBZ=oKBz=~8!QgSA=7cdz-KQZRaPdu_%v4O1C5TkJLatnHp-^FMWswyk3~$A3Wl z@D+|TzjU&W(~uj@-r3caiD83xcV*hL)9~4jeS**SY~-G<_~*ho_>5&=?-{Lq1O2MY zF5YX#PTgCN8Ct$z?@|W;#TdE2f%YXlz*x6@@m`iG&)eTkjwiac6dCD->12sB<#B&Z zlADZj5&i)_DJz{17(vf`>}6jcVYRoo;VBxvGHYPim^C=BrF{wiwq$~)Y)KNESD~Yw z?u%#rkTZqPF{sqh?(UE0CWUHw`(IJEZemhyK0P6o)AeL{em+4cvScV*SBg?# zmQ=p?3>F~LDLXScJwFf6Q>AI>q=(1)c>%YPOa0?Dt^cE?*~I-1%MH$V^TEhmsP)6g z2)XW=c7j_?=)mP`Ghym)--Qc;WnsLR2#;85(lm0HMQ;@DON8gjip4yYSdT3GtC$xd z=F#(J`0pkDiWKwcX*>E}50_dd9BPHX(S_B>xGPNt9=V^iiZqZf7yC0n*3FB$U}j%rr*& z6@VHOekbE&((n&4Q(0G$Z+izMaG7`EJaiccPOCQ4L``NOO{ish(#;*v1fT?X06YO+ z0D5fB4nTpm8~~00C%_EIoCTl<4Q~Q&0d52C0PX_lPV52TA>a{!ZX4*fk9HjRM}myL z(z^;63>X3!3K#|$0T>A=28;sy0vHV#3m69&510Tb0Zar;0+1t>0?5Q3``T&p@vM@0*iVfB%<;+EH8 z0_mTnW$u6`fc{`h2V?@W0J(rXKt3P^;137@tN`yy!0&(zKqjC9unZ6dpnn#iXEYT6 zT>w4DS%!L>0Q4);X22G}RtEn|vki&0fOUZNfQhI)39tld`k#j&;9x)qAQV8008Mgt zKofwH;YR8(4(grY-38bU*bAV!YJ>8%!0Q0(0RsVp0MP(?SX2){^H(3R7`%G{`v7!r zu?w&rumeEP0#8PrUjfB{HlU*btpOK6U!;ZN5)#({w*hwmcLDbR9{|q)CjqAbhX7ir zs12wM7y)`HU>IOHAO^4>`3C?80W|mj02~JF0dxoS0Q3Y*0WZz8SURH6q^HSCbCu>M z%|%*SVgPi2dk*FofD-^+(0TxU0DZSWkH#7S8URcHrhp5`zX&)1ppE}Dpf{iwAOX-8 zKtC6~0a&R~;UqHJ0q9p63(%*Krq#(1^mM>jz%|76@V>uhao30QCTM0Tuvq5;H)3fGL2S zrvZTSDUTe9uo1uzU<@z;&;h;62nTHfa7Nk@-~dPhO~Oz+m-u-EayCYc71M_cF=Emz(^1_NOtYhBHQF41ewCa_@bU2S zz`Q!o+1oOv+<7BrQ3H1tej6lv7`;?n%HnVWjS+}<8AI6RGuF$IGZ(=&ZX=t{rZ z*$=fC42HLdKirv9gTY@40(#)Vw3Q9aTK`qU&=(_Q1SJF7p4hs0`sSfECHdS*k}bs* zUGn?Lj4B=Hb2SXJImV2!U{-UMK$cQq(qnaExmJ&}57zLV<2ryLK#F8~P*&Bt(Gz(@ z4TBWx^xEjuR^GF&9;hj)Uyoa8#+X|=z*&ucSC+*wj0ntMh{NYPan zT6*m~|0JnL4POCgYYsz5;aK&|T0B{}Zs5@x1}R|c>Q?E!^HnbMYf83o*{BLX9dC{? zna=G3WhsS64STzPW2J$Ae~d|>hd0fj4%|y>X)*LwKxH@2HeWvWG=9S~c+sFqF%HKa zC>p=M<-`a%Mk~PE!xt+Y7iNK$yt$25Kw;bfqS|tkfGnkWk?V|(B?iZp2i4RX$|ZWD zK`B7x*<*Q%)0US;*Dy#SCtb8X`p32Z{=KH;2&dN&vZWZ65Bm4pFy_{dLGBh!~_7YKn;%v0If*KOJ#eg3$HZx(lhT9kst z+P*4NPVBygsbSd98Co*#j(4!WG=}*U*kJX6@0-||U)VP8n{l?IJd2@_6(~$G_f*4V#H}{MJY1Z%*jYM7y|6)R{L1TVDMh^1U2x~>m^$}-c?rBJjZeS6uu4=D zzP*Lhw+Dk1Ot?$m=imRHn)pFv@WbJptJ@S}dvU$NCr&1Asy$<*o9-Yi^9GLG8hcpL zg;P3!WeRtTIC(o;SV}>4?|eJCuCbrqY+h|2nsy?)FdjBuGAYdN=1(?Prxq`St73xs zU?-C`aOQ?Pz?r3hye4S|H)GwRx7Bb6o+1|+nVDRiJ5*TDeT71nQpDLu$8OHb?_%#< zQ|%?^?FjGFapub0vDYO#GJO>t8u9o3#l5&l7s%+(-5^xNF-}0EIZHy*xl$$2Vy**G z8#%+qXhsTz`|R=Dxt%-@wM8?u$HK!kBggg;+-v@dH| zQlm*JHvkOGI&PyYV__vlIM!eNZB5Z!r}{N~u>Af;a2>(emQtwWNaNM%4cEsH1EXld z4V)eggCJ03myj?T++-KVPs$QU?S}!vgmb5HlHw&d-7%^2XOrYjHQESTTmHMcqECoO zbcP=tMQ{dK&=WmpnsZUEu;)x}1W{t9&3 zT)FS4Z5fGU8jW7R0Zw5Nb@hXIhG256sa!iZh!-@F|1b~DxD#$T|B1?p4*R3x;w%vc z)9T+HQ=A^+tg@5>Gw%*&j-E^T^HYue?r<+K?X0RJJ1cH?vZ>nDyM{rUXib|i4aMPL z%DCJn7?ppQD*DoHZV&PO`{3N=UV}kiKZvWRWXx$&Yq*v)Wq)c9G4qFp7*U)gMlhDN zREV~yCdHlV2W0v0`+K>tF=^~KjBq9pw>)l!Ck({zG?rYYCnm@o?w%(`TBAnw7XRN+>t8w?M)ZQj^Z61c@aj4@~7!;BZw z?HC{Kr4QK852rNI z@WSDX+03N{GVWcZ6OPWi(gSOug2%2I4sl;EU5mXdyCDVcw%t5$ry{d@`T zD7?4dNMZiHRm+@*k2Z{GC{&_>k-}(8;e1ho;{cUNfwrZ9zbK)*rs|ti zEi8$`652tE(JjRa1}pUtrwS>Gw-iknCFE@oBE|WZ;tivO+zbs1XCSo{SzL;6OzqRw z>m`$7k4rI-MWJX;icT&?Nk)mkaHl85HJ9QmqeN7(Sq%RzRIwD1S(K*s_LfOMIZ45p zQ6%ap#Z8yOH=_iaQNN@gq@;k&DDm;|^}x~XC#Dvq{nHPOneZ^%Ajk`r6&5IS5Kdb* zrdvR&;jxVLn({b}#`^XxwxH6Z81*!#)&SnSy0q>#|)0@(^f zBSmeOf=7$pR-@>r9_x+cqU@&)@#3W5@KTU!-d5f)icAV0FNLud8zO1#g=4N1=$cAm z1?{95^J1*)GrTx{P^75yQt)eD9K0Fg#Q5`47;L_dFPCG-m{w2I>^NarlR{vNa^Y`3 z?HVSqw@iAA#XOU3PaBTioryYFL#ah6{JInw8YQB2rQamQNZ9I@D8lzqz8>CQQj~fr zST>blE(9QwIelx6ONzIR5+9F18Z0Riz7&yLYzS&gvGS#u-6+9qrJENiIz9#IR#TL2 zz9D|Fq=5NS-0m82Qiy#i)OSrCDd@fs{QE$uoH^G?IK*l;^kX|xT>S#I7~mV?=&J#k zP70|nh4w}Xj6)?-DI8EX5+{ zEAdv~H%ZfMF#Tko6jzk$9#nT=SiJB$ho9U{V1Q}ymfjv9MKMQ?U|2AIBVF%)voR1UfIhVL*y>uEh^0t(3b_hrXwjLYt!wEupA$3nd09`S|y+{s+6w zz*iQ>kb^G^ilz4wNa4{@2kzvdhAQ>D1Hh8p{m{XM`eoX3<`XL6yX5MZ8c1Q%dFA+R zBp7w*xa_U_`k7lRV=X!3j2wKqQ~h28DO5V@h!f!_YWu07H>~~>?Zo4Y^d1E%oIBrF z-h%&92OW}LjUYvB=hf!NS$YYB6bxRju3r0paswa^mGo+sv)xvmiF&u_ft(phvlLz^ zy~9DHuK_}be$rPuUO>4Y)mA&~fOa$zDEdiX$qN5bNwdyR`Wh_k=hBNJq!{*SM{r7+ z^lqLZueWf^0~uHBH>U$}4{$3EcIjz>gBRh^CB*7BuC*2QsVWhp#kBg|9U1gK@*~Hh~Kb#(fymM{v+SX;vd1 z4O+M!O^CZ?_5C}9XefB)nrin*UUjSqK{D}+rJcdJ?GnE<*~`5~3li0fLwly$@F_tl zAMv}WTKzXLcuGE`{N;Yj;A_q4%9-)kR>{e0cg1<-49b{Iy z(wNo(%-NbZvq&cxW9A@u=^J$EeI3tc-AQX&I?LREd5996`|zd>ZdNGM*0Ot&py}?4 z3n_Qfyj0MXjw|T9@V$`SFqtzCW7^8ClDX6{ra0gy>nW|YP(ymxhW4YG0XehR)vFP8 zF`2W%LS^-nrHp0)_HJ=0lRr=4GQyd;4F461Nhh3NW&imtiBErF;gDX*A-xy@4f1joS_HWu>I z46e3{@sXEja7}^5uRP_uGq?#Vc*T(n?i8}E3^N6<_|Ng{pUGK8U^oO$JKe#v8Ge%w zn#0YAU<%a^`0bw7^)W&ICz!$YzZE8?XAQy|jav6S*Qm;UqYM8K7RtGAzRj{W#29r( zZTcxm`c>+m<^Q~?@aZVNy-{Xe>VG%(YkV^b83xEW9NedqYGBjL z{pSb@ZWG@`ryE)n6mI9ul^NoTyw;ISm}6p5VQyZ2N@_Y@MkaJvnbbceDN|A%XYFlM z!5Gc8PGT(88rhyoPYp_mvosDw9))Q`v(mGKmzgD}ClARj%1%kl5f5ZayzZ)BMoLnl z5@*8xQf7K~UT%J&MCITGQz^+2iCI~>NuC3W(vvckS@}g`se>{rH!)c$h?i;ZO2lK}JsB&{AF@&)l-aq-MOi8QtG)OEE-1<)e<@I6gbGsf2c{?shGb`@=VS`+XX6ro zWvn9pXCAcozeyP|jdA8~O=q-C`9`VB^l~wtpz-Ej4`J+`e$)|6KW1Pcy!nx^cNT7f zz#$}1BDzc}Uj0{?j{kIoZ8*cWjFr7+PBLozK%;GM&R{ZQZ&qVf@YE0-X|7NAPsk>O zrhK`LqZxbU&&GzB#KF}ZGB_W+!_iV>x`I=K65W}5Rmf;@&V3kD{QOPs>Lv=yDA16c zQK00{;54jBiPG)Z13E~O&kZ{9Vsi6)P+5400q+?1FUgBh7QOv-vk)!P!y@lt{usvx>5F z@J1|UdQM?Vek$HEDJ-ADGUWY}0$PK9maU{c(wm#sld&@sZxO*Mrg-xOaFHDtTi@)V z$j=^%?$N0edf`oum7`^nuu%~6<`orTDy zFT3RDa6wX0e*TbDtSxEzxkWUJ{nN9O^T3>#mO@)pjxrVIRH7N~Yyu4T)1pG?`11mO z2DI1f1i&}GSg6!L2m%i;P|4p`Q#*JAWqMAUa!`75VgH=G?53J@1Wuvo zh-Q{DDWibsJWSbwbg6_7RZT0oGck;fSv3tstJBIr zez$k75scCr0;dg{?&u4t#N8Or*mE5Uu-U*TQwD>Tvhl7y5K62*{48ln)unEW(j*d7 ziN4K*yp-(R46!EmqST@+96(_vtWp45b&p8(M4h_aD8-ExRzO z`2Gigq9xLzddmQNb-H@ptA}4IP=HU&Z!WQzanQ#^Kn^}>*gbrGKt$tXD}?IPdk~$L0&ilYh=)#qe&|HHPVM)^7%VXUoS5& z?*PAmRJunsA@^ejd(L}KvHn0_R0}41RY}3><$5k%D zoI1xWwbZn%>y)LL=8$C$ly)?8$TTHA4W{1T`weXEvF<(RKKHrzkG_ZB{=M&7@0#8< zew($Az2LRrt!j%RLPL}9M9ds|sNe7zrjZ**?I~XK(U&(K^c}c#ZRO(ii8UH^Dp|%P zdb}17Y~Y(?(iAmBsx3*bev6-PP&&;+l7gYw!9n1zx*i5LL-zqc3BD>zQd96ja6|A0 zFon+nHvx|THwJeHHvos|>I2{umg}$(&YK+UzD##y^JF5N_;6yNufL{<#+ytYL;?v=e zP;k9wl7vRMZXulN_*HYwfy)q%inVu*Z=pGC6qu^FE<_tjj-o8P)1K`phfW300@Ki$ z3?@4}Z{+D;x(nISxP327k|4U0@IkQxI%?!9Yo)n=Hkii6c!X2`4F=OVp8|Vw#;FKx zp6vuvcRBU&axl50RclF|lqFzteO8X$IU3cM>PBh)%dr=Qk8s*Y-iHBI^G9$Ea7*+L zReutg`Xmh*l0KK;@@!FRLp7+l4G5*`4FywgjCAA|P0G)bMimw0$6~0ShPTjeR|yzH z#PtrCns>FWB(((pWR)Zo;Q9oNmb%u0k(FyB;%QVq51pEO2s*jzH}nR2rPS3E-9*(1 z0h3=$U|KIK(TCLSvvtk_)A~{YrtT~l;lSXKq;Vq(P(4X{wxi~!Y)4UXA@q-7M-AEp zreRVJ#-FPXKFB?}HfpG^dwWu%n9RIEx}i6-g4J)q;wPoVl5hI{?&x zqVW!AF={q7S<5dUTvIB=df^y5&hC^XW0qzjf)We@Q;kz~&ML@ugcmxpzv!iP*SQp} z;LpHhAKFK={}?)@D=H|=auiOIE+d@c^K$di)S}`ddw#Y9{k*;}8kmTP!rZ(<`v^%= z*B{i@nWojy4z7XtL11cehklx8R)eX4CE%KTfOlBHD#{8WMVRx*K*}r3C6f53-T{Hj z2WaM_osMxRR$2m`vU3jDD%ZGCjeFHQ?KLp%M9PrLgXj4_>Lv0)^<6B7pQ&$S$GO?h zri3v`ibDdKH}bQxLSE))GnIQt($jo}U%crKqzqL`^)$j>htyvU`wh|%Rmw17aPT4K z4xSL~YEYAEMx+bUNHy#-B!?>Xt}aQ#RB1z%{Rom)fd**SKuW>J^D}`q zwuPH5Hu-_V2U`+YCthZ;v3dLqJ`Ztokj?Z7dL@+y2gRGr7(l&M$x$V3sgmwhNjnRY_~Bq-&76s&=ZdgmYqFYUTjm4^PXzig@=Z%uA z>U+VK@l;!P{6Bpa48^en>Ogp@>*{9^zQY?i?K@{DFS(<@lJ z;`orp@vMTMf$odh9j_)xh17*4R?f?s+ms8?X@GgCH3`I`-bNd4s7Wf%XkjzWLs)wr zoD|RY^E1%zLGP&M(4J?6*jOnq!{e< z3#;-QG_2(=PrjzHRq22=9%}?f_xZ+FHj$ULr15d4rA@hq7_wu0O>--Y;Td5z)652v zl*orz;@Ri?OqfkEHPCrg8Yc>)-{UHI6{&JtEN*SC92YG zNYAKJ-$+VMS-u8Ii@gG=s~Xm!wd&2tcx5&u8rGg_FYM)Jt4(PbWej|KGpmvfO&fev z{>#w%^0R^Q%J-0H%y@8nq?K8DMx0HV5v`3Cj0p7E3DVT^YRBm1v8Q=PI~$wN%i7tL z{Ro8#l^tnSeu9S9(@1GyH8p|9)cPw^Ai3Lr09pd&$xPfFZ&P~5YIz`4nAKDQtxzrT zCZu##>V}@sTY*P3?I_WP*vs z8?&_oq%3vbu7ZSB8dJU+W=wZgN~w}wgp{j>-Gh`y5@xSC(Z)E>z~^3GmS|IIz^CLC znI@m9BQ$mCFg*(iTL$v}0#dRnHN?#7spd0*n>*W-ub`s}+E8KsybNJ|6SWRjyF{4_ zjdEw?WVVxMbg{{I6M1=;1SO)AW+-v=I`%hnSDUgLVM#ET)PejhG#b{Jkx5o13dbbW zL?1Z?+?-@n-ar_Q70u#XXxcQODb&1+W-qA|DjOPAM4K=_K}&_EjqxNb3!QmzI7TNV z)RSWFN#P1mZcKD-DmnwAd559^8MTWUZLX!z3# zRhC0UeYNFCISUP)ggHwEo4aeSK~d29L(|+q1NC`maa38;UPuGfe(~#pMNgHoAX!OL zwnD-mttTk^E=0{W7`7~mXC&L0lb0piOv`)fflhTvJQPlKC7mty9!Oq6D^cu`tWi~f?Uvt z2ig;q&3)hpSW0|Nx>flR8d<8oFvavG53Xn#uZ)I7wS`rxRerlKADo$>+|~opgw9qa zBu$dAWMLbo`92j|55B?@uk3?F13+`H+)wK;G_QqKPVUDmvJ#Z_2&7gbcl6>VXyhl% zg!)!f%l>K|n#U`dBx&LOtx7pGs*!3XAMMWr9SO46Q+%)^LFw?6G3KzY%afnt6^;bc zdIUz${->PQqj7v_ZdLqns7yyj>Xsny9Kg$S5@hdmUXhca3_=7AOHXbOvnq3;X)7{i zyDy!W=e95Pmeez<UrFbp8O;dpscpW*nV4jTa zQ+N?6cphd?=o0`+KN+CsaZLG5QDeavfog`+z?9Gh(Bo1+A7vB5p3}pLDWh3B&(_0< zWj@3isWxmrG%w&KfUK7S^bnISNWnu)%_;+^87lyKh$(!P&TGK*5L0dcP6{4kO7|L2 z4R{mZEzldVv-tqNRroqU4>85x1Ss7tfSxKQ|5HSz&Od_bd6+4~ zpVZJtnbQ5NhZ9qUe${ni(tp$ShnezI3k8*uK(41RmZ^ocz?86#&c0y$N%eHD52g&v zV6wC591Nz1n8YV_ZU80^G}gJ99^M>`^rcdW9zpNk^gNEKz(_5D6a^;JXnat{vEb_9 z4iu;}*(d7pUG(tBu}uA+q#F=Zi@NLFQx7L5v6s%tI`^hPJj4okfUZBx6qK%qKaM@9 z|1)4fHiLCTViKR$^~W(qJ)_4HlQ;w)TZsiAmgy z51QZa=;04D1#QO%b?tk4yqm)isz;E)4&8v5#GN|tB8B@D`cr>NyP;8!AJAhz*7+b= z;dz*;y7bW8B9LFGbNHKG?KHnzGmnA;<+jRcgQ6Ihw>Q#w*UrOhLj`{GsOUd#ziT?*wuTOn=@a1Iry2M9a zPUr7JJp%Oyi3eUu=Q+oH__Qm@@-2xUf*SCd4-dbZEdMC+$yd|)$578gy(96K-=*`3 zpZoCn-zCdGNxTAT$O%-VB3b@L;?dXA`Dv&xT}zhlO8f%UIbZnjr0BH|peIW79*U?_6Yp*9W#xrgX1vitK2Ul(ljgV*7xL_Z+q3BhC>w2qAps69YvYra-G@Io~=i2-}R8+ z3)*`0_AHr9>l^n3vYff<|4PYx9nZbX*~+c=?taMF9=`WpB)V|m)QZ+0DMbOlOMl5MV#oB&J&CINIjrQ<%mnbUE-Cx8}8$zoSu);%2^puUdMmmGxvo$~1$fK|YJ zU;(fYSOhEvmH-@}GxSnG0A)Zq5RPz~T9J}ina2A0w1gZ8Sb!k$OB%D5Qe#H|IY2Hj z3UC5>Kmm{m(7Rv^@OLCz3%m-91abiMqO=C+2GAE8vjAFWUIShS@Dd`q5H=l{0nm3Y zu?TAm#4%iRXa}J^5D(DHYADbW*aXAPz!phV_G67pcR=0=SP-`!yaCt<6amFRcYt1G zHUe(|ZvwLsHV2pnlmJNx>jrcMjzj+p_#8M5oCVGSUjzRDegv)nhk#G$O?o$kJ;3__ zLk2S70noQg6M%`pBme_heS?|@ejcC~*c2cY=mYcx(tv(If8Z&gJJ1721g;{@cfdj5 z1K>koAH7NMhfo~`-oO)p4?wSGTY$HKtpHsadmDHGwoij;Hyj8I0(t`15q<;s0r&`@ zH?emBdMVBTdI9w9$xVP>>)r*Dfyz1vC3C+=p~=wLo4i2YI45l%I)EKAm1ZOI zX&?Z209*#X1jx7V0+)b9;9KA#@C}dv)CVpA{{W~DvH;4<2Al^d-T>KAItr(F3a7jg z?yqM0J(OzzWla9OB8j#`Sj$qX1o@VlPi`Q0PzLvbdw|h&`o4(1)1mKqegWE8SL| zUH^*zj-L?|9nm%-2186l4PzrM{gBiT({fp_rOmgcf4*KOgVqtzm|$YlbIea19L84p z(pOS7V6`}M@cia)cWhUWF)?L0Ys4Ce6~ozdwoWv#vlt7FI19{n)bp!5WMSE}o~#Y3 zNA)-+^6boPxdwvBg79d zh&8TJ{yem-q|M8R9=HvRiwBgVpj zg^77&XWZ-zp4^9hJM{~f+s-cbkli$KCX>amD3;wX|Zd+rYR9+Olp)44e1O8h1<$@do8>Tpi`{-!~aGzkH6{AV!?0I&>Fz$)dvQBGN&9`h+$VH+0J#QfO=OvfIG8&FUQf%7c@O-|OU#*(Rnr&<9h* z7UUpT`ijF27GNEsycVX2oV3&j58MWuu!fm2`a2DnyuXfl?q+w)$D-CuBsDJmE$w)& z(XH(8BDaBYO>jlS&d6+f|7?T)164f&96~6Jxz$LpDuAbpG=qXmgAlA4iDjmPo zBlW=dSKJ21joR`1x}&BM28$q2hDNpq`0+^ z)vF#8(HiTqznGiDqUHPkVmFyo7X=HMzeiMSUvVo3IqAM`gpH#Pa@x4z=ymD&vnlQp zqlGOOcBLXCmxXy5C5nd6vOuG}m&IN((XtMXY8?o#VuZQQZn-eFy2BcWL7{hgtf(~# ztCw+;I_!(T6&@e`)-y1O(nifEq;(`Y!qAXvMEx2M(eiueHmz0?w-Y1oyDZ$_g8ea#JbvS1c1lATDpT{xU9 z#WGA!IxqXhEjud?V%X6#jYUM7D2|ZbK5?1sjH|~hXYJ_n+P#{u%B&iL4NcML_X z^({i1c4AgOtJ|bsV{OSc?j0|$zx}(w=XQ=(-5f(Ttu1!s!wJR(<>5WEYBYX+Km`n9 zP(u_HD}Eq@CQa2{+;#r`kvBuy_$<@CgxRa*z|{WoN7alw%-eo>vSN{I<-4%7Mnt!X zuu8u*74H{dVAT`z3eYfZJjq6AiP$s|1?g9wUDq}^)~@_))jRIw?+BkU%p7Fg@=ZHq zf8TmIcAeYcR#VY%3UL}nSIW!#M}CM2tU4RsQRk1JC!}5C4NCu&I4~9+Y8Q7Y zq4644uUGm@BiyG$cjc z#xBM5Y=c3ur4U7GTb>_!8GYIUGM*i?7CGY<%qtCSArFV~?M|f*JdZQ?vgh^psSs(q=J8cpw zJI{9a#?`js{zMjFq!ba8Sdi{zi*ez-hqC$l^!L{`b*Iv@HT=I;T%(bqbwgX@`uxe% zp?l0n^0As36Wq99zxGC>tIOxE{lOjc=&<@-gCA|a?ghQ;wW5RmxJ)Bw%^yZHs~q%D zfWGBDO4`29^nFQ>)3iT1Ed+|OQ_;=ZYPwaNv#4!ZK8rS|m@w(c)SLz`gk&Ly@_<_Lq zfziS9r=}i4OdD-0J|%*uBd?3XIvwTg5YG`U5T8#sT47cX0lzz02}Dow=bD4x{qyU}7uKAB1>$KbQI9_K4uGB99Q2rNoa|6!ezMrT5IGsYVi-0u zXxiloD>vymp<7ianVH%Ke~&oq)HpGjMZzMCM)yHs9p3I}*GL#D?>zB?7tSaxaXz6( zvuS{w{-73!$xId>EMn2x?ylE5=1*#^wLjWo{4|0O4l3>cbI;FEYdW5^)!Xm?WUOTT znqtJc51)VQt@Y9FhMh|h|5(hr%GXjv^CfIV)8A6GAB@l!H{NKoLDn z?cElOz~&_^pl=j(I!>FF|3k0UqhisA_#Fabd=b;6uwxk99-{4mDLj z7ct)8YqMg+pa<|f4-vpwZ;SD}j-3ZjIL?o3Z9`4z3<~#Xha-7aUr`Q2d3|4Tg)>)M z<98el^hfY&ejWlhNnl8W8HhPx# From 8747a8ffe3cde9dc826ad134f09c595d50965c3a Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Mon, 9 Sep 2024 20:03:16 -0400 Subject: [PATCH 18/41] more WIP docker stuff --- active-stream-proxy/Dockerfile | 2 ++ active-stream-proxy/main.py | 15 ++++++++++++--- backend/Dockerfile | 6 ++++-- backend/main.py | 12 ++++++------ backend/schema.prisma | 2 +- docker-compose.yml | 13 ++++++++++--- mediamtx/mediamtx.yml | 9 +++++++++ 7 files changed, 44 insertions(+), 15 deletions(-) diff --git a/active-stream-proxy/Dockerfile b/active-stream-proxy/Dockerfile index 620fead..292aec2 100644 --- a/active-stream-proxy/Dockerfile +++ b/active-stream-proxy/Dockerfile @@ -2,6 +2,8 @@ FROM python:3.12-alpine WORKDIR /usr/src/app +RUN apk add --no-cache ffmpeg + COPY requirements.txt ./ RUN pip install --no-cache-dir -r requirements.txt diff --git a/active-stream-proxy/main.py b/active-stream-proxy/main.py index 7243917..10d3453 100644 --- a/active-stream-proxy/main.py +++ b/active-stream-proxy/main.py @@ -2,16 +2,25 @@ import subprocess import time import requests +time.sleep(5) + active_stream = requests.get("http://backend:8000/api/v1/active_stream").text +print(active_stream) + old_active_stream = active_stream proc = None +if active_stream != "": + proc = subprocess.Popen(["/bin/ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a copy", "rtmp://host.containers.internal:1936/active-input"]) + + while True: - proc = subprocess.Popen([f"ffmpeg -re -i rtmp://mediamtx:1935/{active_stream} -c:a copy rtmp://host.containers.internal:1936/active-input"], shell=True) time.sleep(3) active_stream = requests.get("http://backend:8000/api/v1/active_stream").text if old_active_stream is not active_stream: - proc.terminate() - proc = subprocess.Popen([f"ffmpeg -re -i rtmp://mediamtx:1935/{active_stream} -c:a copy rtmp://host.containers.internal:1936/active-input"], shell=True) + if proc: + proc.terminate() + print("e") + proc = subprocess.Popen(["/bin/ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a copy", "rtmp://host.containers.internal:1936/active-input"]) old_active_stream = active_stream diff --git a/backend/Dockerfile b/backend/Dockerfile index f00ed05..2598748 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -12,10 +12,12 @@ RUN apt-get install -y python3-opencv RUN pip install --no-cache-dir -r requirements.txt -COPY main.py schema.prisma . +COPY schema.prisma . -COPY migrations . +ADD migrations . RUN prisma generate +COPY main.py . + CMD [ "fastapi", "run", "main.py" ] diff --git a/backend/main.py b/backend/main.py index f54fea6..f854c1b 100644 --- a/backend/main.py +++ b/backend/main.py @@ -6,6 +6,7 @@ from contextlib import asynccontextmanager from datetime import datetime from secrets import choice, token_hex from typing import Dict, List +import time import cv2 import httpx @@ -151,12 +152,6 @@ async def check_for_new(): @asynccontextmanager async def lifespan(app: FastAPI): - await update_active() - scheduler.start() - scheduler.add_job(update_active, IntervalTrigger(minutes=5)) - scheduler.add_job(check_for_new, IntervalTrigger(seconds=3)) - scheduler.add_job(rotate_fernet_key, IntervalTrigger(minutes=30)) - await rotate_fernet_key() await db.connect() async with httpx.AsyncClient() as client: for stream in await db.stream.find_many(): @@ -164,6 +159,11 @@ async def lifespan(app: FastAPI): f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/" + stream.key, json={"name": stream.key}, ) + scheduler.start() + scheduler.add_job(update_active, IntervalTrigger(minutes=5)) + scheduler.add_job(check_for_new, IntervalTrigger(seconds=3)) + scheduler.add_job(rotate_fernet_key, IntervalTrigger(minutes=30)) + await rotate_fernet_key() yield scheduler.shutdown() await db.disconnect() diff --git a/backend/schema.prisma b/backend/schema.prisma index 0edb7f5..dca2c2e 100644 --- a/backend/schema.prisma +++ b/backend/schema.prisma @@ -5,7 +5,7 @@ generator client { } datasource db { - provider = "postgresql" + provider = "sqlite" url = "file:./db/dev.db" } diff --git a/docker-compose.yml b/docker-compose.yml index 7e4064b..12132ed 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,9 +1,12 @@ services: mediamtx: - network_mode: host build: context: ./mediamtx dockerfile: Dockerfile + ports: + - "8889:8889" + - "1935:1935" + - "9997:9997" web-frontend: build: context: ./tiling-frontend @@ -12,8 +15,8 @@ services: - tiling_frontend_build:/usr/src/app/dist live-stream: depends_on: - web-frontend: - condition: service_completed_successfully + active_stream_proxy: + condition: service_started build: context: ./live-stream dockerfile: Dockerfile @@ -23,6 +26,8 @@ services: YT_STREAM_KEY: ${YT_STREAM_KEY} backend: env_file: .backend.env + ports: + - "8000:8000" build: context: ./backend dockerfile: Dockerfile @@ -37,6 +42,8 @@ services: condition: service_completed_successfully mediamtx: condition: service_started + backend: + condition: service_started volumes: mediamtx_recordings: tiling_frontend_build: diff --git a/mediamtx/mediamtx.yml b/mediamtx/mediamtx.yml index ccb9689..6ba7b24 100644 --- a/mediamtx/mediamtx.yml +++ b/mediamtx/mediamtx.yml @@ -11,3 +11,12 @@ pathDefaults: recordDeleteAfter: 0s webrtcICEServers2: - url: stun:stun.l.google.com:19302 +authInternalUsers: + # Username. 'any' means any user, including anonymous ones. +- user: any + # Password. Not used in case of 'any' user. + pass: + # IPs or networks allowed to use this user. An empty list means any IP. + ips: [] + permissions: + - action: api From e468e0b9ab9684579591f646d3cb083c7b2c48db Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Tue, 10 Sep 2024 07:39:22 -0400 Subject: [PATCH 19/41] start fixing stream stuff --- active-stream-proxy/main.py | 10 +++++++--- live-stream/Dockerfile | 8 ++++++-- live-stream/run.sh | 4 +++- 3 files changed, 16 insertions(+), 6 deletions(-) mode change 100644 => 100755 live-stream/run.sh diff --git a/active-stream-proxy/main.py b/active-stream-proxy/main.py index 10d3453..aa909ec 100644 --- a/active-stream-proxy/main.py +++ b/active-stream-proxy/main.py @@ -12,7 +12,9 @@ old_active_stream = active_stream proc = None if active_stream != "": - proc = subprocess.Popen(["/bin/ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a copy", "rtmp://host.containers.internal:1936/active-input"]) + proc = subprocess.Popen(["/bin/ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a copy", "rtmp://live-stream:1936/active-input"]) +else: + proc = subprocess.Popen(["/bin/ffmpeg", "-i", "anullsrc", "-c:a copy", "rtmp://live-stream:1936/active-input"]) while True: @@ -21,6 +23,8 @@ while True: if old_active_stream is not active_stream: if proc: proc.terminate() - print("e") - proc = subprocess.Popen(["/bin/ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a copy", "rtmp://host.containers.internal:1936/active-input"]) + if active_stream != "": + proc = subprocess.Popen(["/bin/ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a copy", "rtmp://live-stream:1936/active-input"]) + else: + proc = subprocess.Popen(["/bin/ffmpeg", "-i", "anullsrc", "-c:a copy", "rtmp://live-stream:1936/active-input"]) old_active_stream = active_stream diff --git a/live-stream/Dockerfile b/live-stream/Dockerfile index 60bc333..0ce2d99 100644 --- a/live-stream/Dockerfile +++ b/live-stream/Dockerfile @@ -1,7 +1,11 @@ FROM alpine:3.20 -RUN apk add --no-cache gstreamer gst-plugins-bad +RUN apk add --no-cache gstreamer gstreamer-tools gst-plugins-bad RUN apk add --no-cache ffmpeg -ENTRYPOINT [""] +RUN apk add --no-cache bash libwpe libwpe-dev + +COPY run.sh / + +ENTRYPOINT ["/run.sh"] diff --git a/live-stream/run.sh b/live-stream/run.sh old mode 100644 new mode 100755 index 35dc0f1..a0f7b8f --- a/live-stream/run.sh +++ b/live-stream/run.sh @@ -1,6 +1,8 @@ +#!/bin/bash + gst-launch-1.0 -e wpesrc location="https://en.wikipedia.org/wiki/Main_Page" \ ! videoconvert ! videoscale ! videorate \ ! "video/x-raw, format=BGRA, width=854, height=480, framerate=30/1" \ ! videoconvert \ ! x264enc speed-preset=1 \ - ! filesink location=/dev/stdout | ffmpeg -re -y -i - -listen 1 -i rtmp://127.0.0.1:1936/active-input -c:v copy -c:a aac -map 0:v:0 -map 1:a:0 -t 10 -f flv rtmp://x.rtmp.youtube.com/live2/no-way-am-i-doing-that-again + ! filesink location=/dev/stdout | ffmpeg -re -y -i - -listen 1 -i rtmp://0.0.0.0:1936/active-input -c:v copy -c:a aac -map 0:v:0 -map 1:a:0 -t 10 -f flv rtmp://x.rtmp.youtube.com/live2/no-way-am-i-doing-that-again From 35c2a98835dd83c85f2d8a391f3de6345f1c722a Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Tue, 10 Sep 2024 13:32:34 -0400 Subject: [PATCH 20/41] kinda working --- active-stream-proxy/main.py | 15 +++++++-------- docker-compose.yml | 7 +++++-- live-stream/Dockerfile | 14 ++++++++++---- live-stream/run.sh | 12 +++++++++--- mediamtx/mediamtx.yml | 3 +++ tiling-frontend/Dockerfile | 2 ++ 6 files changed, 36 insertions(+), 17 deletions(-) diff --git a/active-stream-proxy/main.py b/active-stream-proxy/main.py index aa909ec..c24f4b9 100644 --- a/active-stream-proxy/main.py +++ b/active-stream-proxy/main.py @@ -2,9 +2,9 @@ import subprocess import time import requests -time.sleep(5) +time.sleep(10) -active_stream = requests.get("http://backend:8000/api/v1/active_stream").text +active_stream = requests.get("http://backend:8000/api/v1/active_stream").text.replace('"', '') print(active_stream) old_active_stream = active_stream @@ -12,19 +12,18 @@ old_active_stream = active_stream proc = None if active_stream != "": - proc = subprocess.Popen(["/bin/ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a copy", "rtmp://live-stream:1936/active-input"]) + proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) else: - proc = subprocess.Popen(["/bin/ffmpeg", "-i", "anullsrc", "-c:a copy", "rtmp://live-stream:1936/active-input"]) - + proc = subprocess.Popen(["ffmpeg", "-f", "lavfi", "-i", "anullsrc", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) while True: time.sleep(3) - active_stream = requests.get("http://backend:8000/api/v1/active_stream").text + active_stream = requests.get("http://backend:8000/api/v1/active_stream").text.replace('""', '') if old_active_stream is not active_stream: if proc: proc.terminate() if active_stream != "": - proc = subprocess.Popen(["/bin/ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a copy", "rtmp://live-stream:1936/active-input"]) + proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) else: - proc = subprocess.Popen(["/bin/ffmpeg", "-i", "anullsrc", "-c:a copy", "rtmp://live-stream:1936/active-input"]) + proc = subprocess.Popen(["ffmpeg", "-f", "lavfi", "-i", "anullsrc", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) old_active_stream = active_stream diff --git a/docker-compose.yml b/docker-compose.yml index 12132ed..8b1191f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,12 +8,16 @@ services: - "1935:1935" - "9997:9997" web-frontend: + ports: + - "4173:4173" build: context: ./tiling-frontend dockerfile: Dockerfile volumes: - tiling_frontend_build:/usr/src/app/dist live-stream: + ports: + - "1936:1936" depends_on: active_stream_proxy: condition: service_started @@ -22,8 +26,7 @@ services: dockerfile: Dockerfile volumes: - tiling_frontend_build:/html - environment: - YT_STREAM_KEY: ${YT_STREAM_KEY} + env_file: .stream.env backend: env_file: .backend.env ports: diff --git a/live-stream/Dockerfile b/live-stream/Dockerfile index 0ce2d99..16b42f4 100644 --- a/live-stream/Dockerfile +++ b/live-stream/Dockerfile @@ -1,10 +1,16 @@ -FROM alpine:3.20 +FROM ubuntu:22.04 -RUN apk add --no-cache gstreamer gstreamer-tools gst-plugins-bad +ARG DEBIAN_FRONTEND=noninteractive -RUN apk add --no-cache ffmpeg +RUN apt update -RUN apk add --no-cache bash libwpe libwpe-dev +RUN apt install -y ffmpeg + +RUN apt install -y libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libgstreamer-plugins-bad1.0-dev gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly gstreamer1.0-libav gstreamer1.0-tools gstreamer1.0-x gstreamer1.0-alsa gstreamer1.0-wpe + +RUN rm -rf /var/lib/apt/lists/* + +RUN apt clean COPY run.sh / diff --git a/live-stream/run.sh b/live-stream/run.sh index a0f7b8f..a7f9df5 100755 --- a/live-stream/run.sh +++ b/live-stream/run.sh @@ -1,8 +1,14 @@ #!/bin/bash -gst-launch-1.0 -e wpesrc location="https://en.wikipedia.org/wiki/Main_Page" \ +sleep 1 + +LIBGL_ALWAYS_SOFTWARE=true gst-launch-1.0 -e wpesrc location="http://web-frontend:4173/" \ + ! queue \ ! videoconvert ! videoscale ! videorate \ - ! "video/x-raw, format=BGRA, width=854, height=480, framerate=30/1" \ + ! "video/x-raw, format=BGRA, width=1920, height=1080, framerate=30/1" \ ! videoconvert \ ! x264enc speed-preset=1 \ - ! filesink location=/dev/stdout | ffmpeg -re -y -i - -listen 1 -i rtmp://0.0.0.0:1936/active-input -c:v copy -c:a aac -map 0:v:0 -map 1:a:0 -t 10 -f flv rtmp://x.rtmp.youtube.com/live2/no-way-am-i-doing-that-again + ! filesink location=/dev/stdout | ffmpeg -re -y -i - -listen 1 -i rtmp://0.0.0.0:1936/active-input -c:v copy -c:a libmp3lame -map 0:v:0 -map 1:a:0 -g 90 -framerate 30 -movflags faststart -bufsize 14000k -f flv rtmp://x.rtmp.youtube.com/live2/$YT_STREAM_KEY + + + diff --git a/mediamtx/mediamtx.yml b/mediamtx/mediamtx.yml index 6ba7b24..db3e986 100644 --- a/mediamtx/mediamtx.yml +++ b/mediamtx/mediamtx.yml @@ -20,3 +20,6 @@ authInternalUsers: ips: [] permissions: - action: api + - action: publish + - action: playback + - action: read diff --git a/tiling-frontend/Dockerfile b/tiling-frontend/Dockerfile index 969c7b6..e766ca8 100644 --- a/tiling-frontend/Dockerfile +++ b/tiling-frontend/Dockerfile @@ -14,3 +14,5 @@ FROM base AS release COPY --from=install /temp/dev/node_modules node_modules COPY . . RUN bun --bun run build +ENTRYPOINT ["bun", "--bun", "run", "preview", "--host"] + From c7746071fcc53cc07c6f91bde7c6b8700a498940 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Tue, 10 Sep 2024 16:39:28 -0400 Subject: [PATCH 21/41] mostly working but goofy color stuff broken --- active-stream-proxy/main.py | 2 +- live-stream/run.sh | 10 ++-------- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/active-stream-proxy/main.py b/active-stream-proxy/main.py index c24f4b9..0ba867b 100644 --- a/active-stream-proxy/main.py +++ b/active-stream-proxy/main.py @@ -2,7 +2,7 @@ import subprocess import time import requests -time.sleep(10) +time.sleep(8) active_stream = requests.get("http://backend:8000/api/v1/active_stream").text.replace('"', '') print(active_stream) diff --git a/live-stream/run.sh b/live-stream/run.sh index a7f9df5..51acfd9 100755 --- a/live-stream/run.sh +++ b/live-stream/run.sh @@ -3,12 +3,6 @@ sleep 1 LIBGL_ALWAYS_SOFTWARE=true gst-launch-1.0 -e wpesrc location="http://web-frontend:4173/" \ - ! queue \ ! videoconvert ! videoscale ! videorate \ - ! "video/x-raw, format=BGRA, width=1920, height=1080, framerate=30/1" \ - ! videoconvert \ - ! x264enc speed-preset=1 \ - ! filesink location=/dev/stdout | ffmpeg -re -y -i - -listen 1 -i rtmp://0.0.0.0:1936/active-input -c:v copy -c:a libmp3lame -map 0:v:0 -map 1:a:0 -g 90 -framerate 30 -movflags faststart -bufsize 14000k -f flv rtmp://x.rtmp.youtube.com/live2/$YT_STREAM_KEY - - - + ! "video/x-raw, format=NV12, width=1920, height=1080, framerate=30/1" \ + ! filesink location=/dev/stdout | ffmpeg -re -y -f rawvideo -pixel_format nv12 -video_size 1920x1080 -framerate 30 -i - -listen 1 -i rtmp://0.0.0.0:1936/active-input -filter_complex "[0:v][0:v]overlay=-50:0[bg]; [bg][0:v]overlay=-50-W,format=nv12[out]" -map "[out]" -c:v libx264 -x264-params keyint=60 -preset ultrafast -b:v 6800k -c:a copy -map 1:a:0 -movflags faststart -f flv -pix_fmt nv12 rtmp://x.rtmp.youtube.com/live2/$YT_STREAM_KEY From 1c91c56d308dcd7fa43319e9b8b42b9c0cf93114 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Wed, 11 Sep 2024 16:10:23 -0400 Subject: [PATCH 22/41] wip --- active-stream-proxy/main.py | 6 +++--- docker-compose.yml | 33 ++++++++++++++++----------------- live-stream/Dockerfile | 16 +++++++++------- live-stream/run.sh | 22 +++++++++++++++++----- 4 files changed, 45 insertions(+), 32 deletions(-) diff --git a/active-stream-proxy/main.py b/active-stream-proxy/main.py index 0ba867b..862ee5a 100644 --- a/active-stream-proxy/main.py +++ b/active-stream-proxy/main.py @@ -12,18 +12,18 @@ old_active_stream = active_stream proc = None if active_stream != "": - proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) + proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://host.containers.internal:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) else: proc = subprocess.Popen(["ffmpeg", "-f", "lavfi", "-i", "anullsrc", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) while True: time.sleep(3) - active_stream = requests.get("http://backend:8000/api/v1/active_stream").text.replace('""', '') + active_stream = requests.get("http://backend:8000/api/v1/active_stream").text.replace('"', '') if old_active_stream is not active_stream: if proc: proc.terminate() if active_stream != "": - proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://mediamtx:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) + proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://host.containers.internal:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) else: proc = subprocess.Popen(["ffmpeg", "-f", "lavfi", "-i", "anullsrc", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"]) old_active_stream = active_stream diff --git a/docker-compose.yml b/docker-compose.yml index 8b1191f..9a0ad09 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,12 +1,13 @@ services: mediamtx: + network_mode: "host" build: context: ./mediamtx dockerfile: Dockerfile - ports: - - "8889:8889" - - "1935:1935" - - "9997:9997" + # ports: + # - "8889:8889" + # - "1935:1935" + # - "9997:9997" web-frontend: ports: - "4173:4173" @@ -16,8 +17,6 @@ services: volumes: - tiling_frontend_build:/usr/src/app/dist live-stream: - ports: - - "1936:1936" depends_on: active_stream_proxy: condition: service_started @@ -36,17 +35,17 @@ services: dockerfile: Dockerfile volumes: - ./backend/db:/usr/src/app/db - active-stream-proxy: - build: - context: ./active-stream-proxy - dockerfile: Dockerfile - depends_on: - web-frontend: - condition: service_completed_successfully - mediamtx: - condition: service_started - backend: - condition: service_started + # active-stream-proxy: + # build: + # context: ./active-stream-proxy + # dockerfile: Dockerfile + # depends_on: + # web-frontend: + # condition: service_completed_successfully + # mediamtx: + # condition: service_started + # backend: + # condition: service_started volumes: mediamtx_recordings: tiling_frontend_build: diff --git a/live-stream/Dockerfile b/live-stream/Dockerfile index 16b42f4..0b62ee1 100644 --- a/live-stream/Dockerfile +++ b/live-stream/Dockerfile @@ -1,17 +1,19 @@ -FROM ubuntu:22.04 - -ARG DEBIAN_FRONTEND=noninteractive +FROM ubuntu:latest RUN apt update -RUN apt install -y ffmpeg +RUN apt install -y ffmpeg xvfb software-properties-common dbus-x11 -RUN apt install -y libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libgstreamer-plugins-bad1.0-dev gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly gstreamer1.0-libav gstreamer1.0-tools gstreamer1.0-x gstreamer1.0-alsa gstreamer1.0-wpe +RUN add-apt-repository -y ppa:xtradeb/apps + +RUN apt update + +RUN apt install -y chromium RUN rm -rf /var/lib/apt/lists/* RUN apt clean -COPY run.sh / +COPY run.sh . -ENTRYPOINT ["/run.sh"] +ENTRYPOINT ["./run.sh"] diff --git a/live-stream/run.sh b/live-stream/run.sh index 51acfd9..f18c419 100755 --- a/live-stream/run.sh +++ b/live-stream/run.sh @@ -1,8 +1,20 @@ #!/bin/bash -sleep 1 +export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" +dbus-daemon --config-file=/usr/share/dbus-1/system.conf --print-address +export LIBGL_ALWAYS_INDIRECT=1 -LIBGL_ALWAYS_SOFTWARE=true gst-launch-1.0 -e wpesrc location="http://web-frontend:4173/" \ - ! videoconvert ! videoscale ! videorate \ - ! "video/x-raw, format=NV12, width=1920, height=1080, framerate=30/1" \ - ! filesink location=/dev/stdout | ffmpeg -re -y -f rawvideo -pixel_format nv12 -video_size 1920x1080 -framerate 30 -i - -listen 1 -i rtmp://0.0.0.0:1936/active-input -filter_complex "[0:v][0:v]overlay=-50:0[bg]; [bg][0:v]overlay=-50-W,format=nv12[out]" -map "[out]" -c:v libx264 -x264-params keyint=60 -preset ultrafast -b:v 6800k -c:a copy -map 1:a:0 -movflags faststart -f flv -pix_fmt nv12 rtmp://x.rtmp.youtube.com/live2/$YT_STREAM_KEY +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 1920x1080 -draw_mouse 0 -i :99.0 -f alsa -ac 2 -i hw:0 -vcodec libx264 -preset medium -b:v 7000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$YT_STREAM_KEY" & + +DISPLAY=:99 xvfb-run \ + --server-num 99 \ + -s "-nocursor -ac -screen 0 1920x1080x24" \ + dbus-launch chromium \ + --temp-profile \ + --window-size=1920,1080 \ + --disable-gpu \ + --window-position=0,0 \ + --no-sandbox \ + --hide-scrollbars \ + --disable-setuid-sandbox \ + --app=http://web-frontend:4173 From 5774a351e1424d92a19deacb1e37eb47b46f3afd Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 10:56:33 -0400 Subject: [PATCH 23/41] IT WORKS!!!111!!!1!!1 --- docker-compose.yml | 12 +++++++----- live-stream/Dockerfile | 10 ++++++++-- live-stream/run.sh | 23 +++++++---------------- live-stream/user_run.sh | 26 ++++++++++++++++++++++++++ 4 files changed, 48 insertions(+), 23 deletions(-) create mode 100755 live-stream/user_run.sh diff --git a/docker-compose.yml b/docker-compose.yml index 9a0ad09..8b99572 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,21 +15,23 @@ services: context: ./tiling-frontend dockerfile: Dockerfile volumes: - - tiling_frontend_build:/usr/src/app/dist + - tiling_frontend_build:/usr/src/app/dist live-stream: + network_mode: "host" + env_file: .stream.env depends_on: - active_stream_proxy: + backend: + condition: service_started + web-frontend: condition: service_started build: context: ./live-stream dockerfile: Dockerfile volumes: - tiling_frontend_build:/html - env_file: .stream.env backend: + network_mode: "host" env_file: .backend.env - ports: - - "8000:8000" build: context: ./backend dockerfile: Dockerfile diff --git a/live-stream/Dockerfile b/live-stream/Dockerfile index 0b62ee1..ba4c3b8 100644 --- a/live-stream/Dockerfile +++ b/live-stream/Dockerfile @@ -2,7 +2,7 @@ FROM ubuntu:latest RUN apt update -RUN apt install -y ffmpeg xvfb software-properties-common dbus-x11 +RUN apt install -y ffmpeg xvfb software-properties-common dbus-x11 pulseaudio RUN add-apt-repository -y ppa:xtradeb/apps @@ -10,10 +10,16 @@ RUN apt update RUN apt install -y chromium +RUN apt install -y sudo + RUN rm -rf /var/lib/apt/lists/* RUN apt clean -COPY run.sh . +RUN useradd -ms /bin/bash stream + +COPY run.sh ./ + +COPY user_run.sh /home/stream ENTRYPOINT ["./run.sh"] diff --git a/live-stream/run.sh b/live-stream/run.sh index f18c419..bac6bb7 100755 --- a/live-stream/run.sh +++ b/live-stream/run.sh @@ -1,20 +1,11 @@ #!/bin/bash -export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" -dbus-daemon --config-file=/usr/share/dbus-1/system.conf --print-address -export LIBGL_ALWAYS_INDIRECT=1 +dbus-daemon --config-file=/usr/share/dbus-1/system.conf & -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 1920x1080 -draw_mouse 0 -i :99.0 -f alsa -ac 2 -i hw:0 -vcodec libx264 -preset medium -b:v 7000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$YT_STREAM_KEY" & +echo $YT_STREAM_KEY >/home/stream/key.txt -DISPLAY=:99 xvfb-run \ - --server-num 99 \ - -s "-nocursor -ac -screen 0 1920x1080x24" \ - dbus-launch chromium \ - --temp-profile \ - --window-size=1920,1080 \ - --disable-gpu \ - --window-position=0,0 \ - --no-sandbox \ - --hide-scrollbars \ - --disable-setuid-sandbox \ - --app=http://web-frontend:4173 +chown stream /home/stream/key.txt + +chown stream /home/stream/user_run.sh + +sudo -i -u stream bash /home/stream/user_run.sh diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh new file mode 100755 index 0000000..720c2a5 --- /dev/null +++ b/live-stream/user_run.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +pulseaudio -D & +sleep 2 + +pacmd load-module module-null-sink sink_name=VirtSink +pacmd update-sink-proplist VirtSink device.description=VirtSink + +pacmd load-module module-loopback sink=VirtSink + +export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" +export LIBGL_ALWAYS_INDIRECT=1 + +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & + +DISPLAY=:99 xvfb-run \ + --server-num 99 \ + -s "-nocursor -ac -screen 0 1920x1080x24" \ + dbus-launch chromium \ + --temp-profile \ + --window-size=1920,1080 \ + --disable-gpu \ + --window-position=0,0 \ + --hide-scrollbars \ + --autoplay-policy=no-user-gesture-required \ + --app=http://localhost:4173 From 29683d43a81b2c9201ebce71685b82daaf426aa6 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 13:16:33 -0400 Subject: [PATCH 24/41] add caddy service --- caddy/Caddyfile | 12 ++++++++++++ docker-compose.yml | 15 +++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 caddy/Caddyfile diff --git a/caddy/Caddyfile b/caddy/Caddyfile new file mode 100644 index 0000000..0aa0398 --- /dev/null +++ b/caddy/Caddyfile @@ -0,0 +1,12 @@ +live.onboard.hackclub.com { + reverse_proxy localhost:8889 + handle /slack/* { + reverse_proxy localhost:8000 + } + handle /api/v1/github/* { + reverse_proxy localhost:8000 + } + handle /auth/* { + reverse_proxy localhost:8000 + } +} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 8b99572..38cc09e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -37,6 +37,19 @@ services: dockerfile: Dockerfile volumes: - ./backend/db:/usr/src/app/db + caddy: + image: docker.io/caddy:alpine + restart: unless-stopped + cap_add: + - NET_ADMIN + ports: + - "80:80" + - "443:443" + - "443:443/udp" + volumes: + - $PWD/caddy/Caddyfile:/etc/caddy/Caddyfile + - caddy_data:/data + - caddy_config:/config= # active-stream-proxy: # build: # context: ./active-stream-proxy @@ -51,3 +64,5 @@ services: volumes: mediamtx_recordings: tiling_frontend_build: + caddy_data: + caddy_config: From 347a42a69ef589906904ee7f589848667d80015c Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 13:23:07 -0400 Subject: [PATCH 25/41] make mediamtx name fully qualified --- mediamtx/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediamtx/Dockerfile b/mediamtx/Dockerfile index 922516d..ce4ca1c 100644 --- a/mediamtx/Dockerfile +++ b/mediamtx/Dockerfile @@ -1,4 +1,4 @@ -FROM bluenviron/mediamtx +FROM docker.io/bluenviron/mediamtx COPY . / From 3bedaef131e99636850ed97dac80a326a5cf18d2 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 13:27:01 -0400 Subject: [PATCH 26/41] fix Caddyfile --- caddy/Caddyfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/caddy/Caddyfile b/caddy/Caddyfile index 0aa0398..127ced7 100644 --- a/caddy/Caddyfile +++ b/caddy/Caddyfile @@ -1,12 +1,12 @@ live.onboard.hackclub.com { - reverse_proxy localhost:8889 + reverse_proxy host.containers.internal:8889 handle /slack/* { - reverse_proxy localhost:8000 + reverse_proxy host.containers.internal:8000 } handle /api/v1/github/* { - reverse_proxy localhost:8000 + reverse_proxy host.containers.internal:8000 } handle /auth/* { - reverse_proxy localhost:8000 + reverse_proxy host.containers.internal:8000 } } \ No newline at end of file From ed6a3b957e7e67316523fdaaa8dcc7b9a0ff8c72 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 15:28:01 -0400 Subject: [PATCH 27/41] increase stream bitrate --- live-stream/user_run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 720c2a5..82a142d 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -11,7 +11,7 @@ pacmd load-module module-loopback sink=VirtSink export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 10000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & DISPLAY=:99 xvfb-run \ --server-num 99 \ From 9ef29b47ecd2c42ed2cce2bd4c5c577547653ef2 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 15:29:19 -0400 Subject: [PATCH 28/41] remove audio loopback, it caused feedback --- live-stream/user_run.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 82a142d..3ea24a8 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -6,8 +6,6 @@ sleep 2 pacmd load-module module-null-sink sink_name=VirtSink pacmd update-sink-proplist VirtSink device.description=VirtSink -pacmd load-module module-loopback sink=VirtSink - export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 From 43628ca33bfb4df4d5a748b7230da3ef77218829 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 15:35:08 -0400 Subject: [PATCH 29/41] increase stream quality a lot --- live-stream/user_run.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 3ea24a8..3822da4 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -9,14 +9,14 @@ pacmd update-sink-proplist VirtSink device.description=VirtSink export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 10000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 2560x1440 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset slow -b:v 20000k -framerate 60 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & DISPLAY=:99 xvfb-run \ --server-num 99 \ - -s "-nocursor -ac -screen 0 1920x1080x24" \ + -s "-nocursor -ac -screen 0 2560x1440x24" \ dbus-launch chromium \ --temp-profile \ - --window-size=1920,1080 \ + --window-size=2560,1440 \ --disable-gpu \ --window-position=0,0 \ --hide-scrollbars \ From a52d38e982823b7925586a431e2831149047303a Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 15:38:06 -0400 Subject: [PATCH 30/41] whoops, too far --- live-stream/user_run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 3822da4..137f29e 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -9,7 +9,7 @@ pacmd update-sink-proplist VirtSink device.description=VirtSink export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 2560x1440 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset slow -b:v 20000k -framerate 60 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 2560x1440 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset slow -b:v 15000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & DISPLAY=:99 xvfb-run \ --server-num 99 \ From 512b740ccdf853825c9a9e9298a47ce4876bf7b4 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 15:39:57 -0400 Subject: [PATCH 31/41] still too far... --- live-stream/user_run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 137f29e..20802cd 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -9,7 +9,7 @@ pacmd update-sink-proplist VirtSink device.description=VirtSink export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 2560x1440 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset slow -b:v 15000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 2560x1440 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 15000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & DISPLAY=:99 xvfb-run \ --server-num 99 \ From ee467d25651966e798709d730ef89fcc8ffbccf5 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 15:41:34 -0400 Subject: [PATCH 32/41] this should do it --- live-stream/user_run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 20802cd..89d35a5 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -9,7 +9,7 @@ pacmd update-sink-proplist VirtSink device.description=VirtSink export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 2560x1440 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 15000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 2560x1440 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset faster -b:v 15000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & DISPLAY=:99 xvfb-run \ --server-num 99 \ From 7df3bde506788f1f5af6eb317549fb802c03897e Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 15:49:35 -0400 Subject: [PATCH 33/41] try higher quality 1080p instead of 1440p --- live-stream/user_run.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 89d35a5..107f362 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -9,14 +9,14 @@ pacmd update-sink-proplist VirtSink device.description=VirtSink export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 30 -s 2560x1440 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset faster -b:v 15000k -framerate 30 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 60 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & DISPLAY=:99 xvfb-run \ --server-num 99 \ - -s "-nocursor -ac -screen 0 2560x1440x24" \ + -s "-nocursor -ac -screen 0 1920x1080x24" \ dbus-launch chromium \ --temp-profile \ - --window-size=2560,1440 \ + --window-size=1920,1080 \ --disable-gpu \ --window-position=0,0 \ --hide-scrollbars \ From ba295dda7eef46aa01d233e4fb6e1522b0503d79 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 15:54:20 -0400 Subject: [PATCH 34/41] modify keyframe interval --- live-stream/user_run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 107f362..64bf5fe 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -9,7 +9,7 @@ pacmd update-sink-proplist VirtSink device.description=VirtSink export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 60 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 60 -g 120 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & DISPLAY=:99 xvfb-run \ --server-num 99 \ From 743692cac91c1745b5e8bf56200075a1fc728665 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Thu, 12 Sep 2024 15:56:59 -0400 Subject: [PATCH 35/41] it was fine before... --- live-stream/user_run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 64bf5fe..107f362 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -9,7 +9,7 @@ pacmd update-sink-proplist VirtSink device.description=VirtSink export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 60 -g 120 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & +bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 60 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & DISPLAY=:99 xvfb-run \ --server-num 99 \ From 5b17a6aef5c782edf7ae6fea06d5972e69f54a26 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Sat, 14 Sep 2024 21:48:04 -0400 Subject: [PATCH 36/41] use uvicorn directly, fix f-string error --- backend/Dockerfile | 2 +- backend/main.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index 2598748..07d7839 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -20,4 +20,4 @@ RUN prisma generate COPY main.py . -CMD [ "fastapi", "run", "main.py" ] +CMD [ "uvicorn", "main:api", "--log-level", "warning", "--workers", "4", "--host", "0.0.0.0", "--port", "8000" ] diff --git a/backend/main.py b/backend/main.py index f854c1b..c3f5bcd 100644 --- a/backend/main.py +++ b/backend/main.py @@ -275,7 +275,7 @@ async def github_callback(request: Request): "type": "section", "text": { "type": "mrkdwn", - "text": f"{'\n'.join([recording + ' for ' + str(get_recording_duration(recording, user_stream_key)) + 'minutes' for recording in stream_recs])}", # type: ignore + "text": '\n'.join([recording + ' for ' + str(get_recording_duration(recording, user_stream_key)) + 'minutes' for recording in stream_recs]), # type: ignore }, }, { From 8343f69bb8be47e4b6e9a7d8a45011a7ef525526 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Tue, 17 Sep 2024 18:45:46 -0400 Subject: [PATCH 37/41] make this not crash as much hopefully --- docker-compose.yml | 3 +++ live-stream/user_run.sh | 7 ++++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 38cc09e..b69f146 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,6 @@ services: mediamtx: + restart: unless-stopped network_mode: "host" build: context: ./mediamtx @@ -18,6 +19,7 @@ services: - tiling_frontend_build:/usr/src/app/dist live-stream: network_mode: "host" + restart: unless-stopped env_file: .stream.env depends_on: backend: @@ -31,6 +33,7 @@ services: - tiling_frontend_build:/html backend: network_mode: "host" + restart: unless-stopped env_file: .backend.env build: context: ./backend diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 107f362..6d82394 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -9,9 +9,8 @@ pacmd update-sink-proplist VirtSink device.description=VirtSink export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" export LIBGL_ALWAYS_INDIRECT=1 -bash -c "sleep 5 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 60 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" & -DISPLAY=:99 xvfb-run \ +bash -c "DISPLAY=:99 xvfb-run \ --server-num 99 \ -s "-nocursor -ac -screen 0 1920x1080x24" \ dbus-launch chromium \ @@ -21,4 +20,6 @@ DISPLAY=:99 xvfb-run \ --window-position=0,0 \ --hide-scrollbars \ --autoplay-policy=no-user-gesture-required \ - --app=http://localhost:4173 + --app=http://localhost:4173" & disown + +bash -c "sleep 3 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 60 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)" From 5c998c5f3af5e12451e36c5b16deffc0e4132d6c Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Tue, 17 Sep 2024 18:59:25 -0400 Subject: [PATCH 38/41] =?UTF-8?q?i=20forgor=20=F0=9F=92=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index b69f146..bf6c5e2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,6 +5,8 @@ services: build: context: ./mediamtx dockerfile: Dockerfile + volumes: + - mediamtx_recordings:/recordings # ports: # - "8889:8889" # - "1935:1935" From 31a1a3bb34b78f434ce871cd6a9d09cacd841188 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Sat, 21 Sep 2024 06:49:25 +0200 Subject: [PATCH 39/41] try to fix chromium running out of ram --- docker-compose.yml | 6 ++++++ live-stream/user_run.sh | 2 +- tiling-frontend/src/App.svelte | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index bf6c5e2..ec7f857 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -20,6 +20,12 @@ services: volumes: - tiling_frontend_build:/usr/src/app/dist live-stream: + deploy: + resources: + limits: + memory: 8192M + reservations: + memory: 8192M network_mode: "host" restart: unless-stopped env_file: .stream.env diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index 6d82394..f1ce8a0 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -12,7 +12,7 @@ export LIBGL_ALWAYS_INDIRECT=1 bash -c "DISPLAY=:99 xvfb-run \ --server-num 99 \ - -s "-nocursor -ac -screen 0 1920x1080x24" \ + -s \"-nocursor -ac -screen 0 1920x1080x24\" \ dbus-launch chromium \ --temp-profile \ --window-size=1920,1080 \ diff --git a/tiling-frontend/src/App.svelte b/tiling-frontend/src/App.svelte index 417eaa8..9cea327 100644 --- a/tiling-frontend/src/App.svelte +++ b/tiling-frontend/src/App.svelte @@ -51,7 +51,7 @@ pathData = newData; setTimeout(() => { for (const video in videos) { - const hlsInstance = new hls({ progressive: false }); + const hlsInstance = new hls({ backBufferLength: 2 }); hlsInstance.loadSource( `http://localhost:8888/${video}/index.m3u8`, ); From 33e2e7eeb14cdada8d622f31398b4ac1c0ea6900 Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Mon, 23 Sep 2024 07:10:30 -0400 Subject: [PATCH 40/41] take another shot at fixing OOM errors in chromium --- docker-compose.yml | 14 +++----------- live-stream/user_run.sh | 2 +- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index ec7f857..158a698 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,6 +27,7 @@ services: reservations: memory: 8192M network_mode: "host" + shm_size: '8gb' restart: unless-stopped env_file: .stream.env depends_on: @@ -48,6 +49,8 @@ services: dockerfile: Dockerfile volumes: - ./backend/db:/usr/src/app/db + mediamtx: + condition: service_started caddy: image: docker.io/caddy:alpine restart: unless-stopped @@ -61,17 +64,6 @@ services: - $PWD/caddy/Caddyfile:/etc/caddy/Caddyfile - caddy_data:/data - caddy_config:/config= - # active-stream-proxy: - # build: - # context: ./active-stream-proxy - # dockerfile: Dockerfile - # depends_on: - # web-frontend: - # condition: service_completed_successfully - # mediamtx: - # condition: service_started - # backend: - # condition: service_started volumes: mediamtx_recordings: tiling_frontend_build: diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh index f1ce8a0..c60d68c 100755 --- a/live-stream/user_run.sh +++ b/live-stream/user_run.sh @@ -6,7 +6,7 @@ sleep 2 pacmd load-module module-null-sink sink_name=VirtSink pacmd update-sink-proplist VirtSink device.description=VirtSink -export CHROMIUM_FLAGS="--disable-software-rasterizer --disable-dev-shm-usage" +export CHROMIUM_FLAGS="--disable-software-rasterizer" export LIBGL_ALWAYS_INDIRECT=1 From ebbfe4ba149641e2cd782ceba8282ac262adf72c Mon Sep 17 00:00:00 2001 From: Micha Albert Date: Fri, 4 Oct 2024 13:32:32 -0400 Subject: [PATCH 41/41] Format code --- backend/main.py | 83 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 55 insertions(+), 28 deletions(-) diff --git a/backend/main.py b/backend/main.py index c3f5bcd..cede227 100644 --- a/backend/main.py +++ b/backend/main.py @@ -6,7 +6,6 @@ from contextlib import asynccontextmanager from datetime import datetime from secrets import choice, token_hex from typing import Dict, List -import time import cv2 import httpx @@ -94,9 +93,9 @@ async def update_active(): global active_stream global active_streams async with httpx.AsyncClient() as client: - streams_raw = (await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list")).json()[ - "items" - ] + streams_raw = ( + await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list") + ).json()["items"] streams = [] for stream in streams_raw: streams.append({"name": stream["name"], "ready": stream["ready"]}) @@ -113,11 +112,33 @@ async def update_active(): new_stream = choice(active_streams) while new_stream["name"] == active_stream["name"]: new_stream = choice(active_streams) - old_active_stream_user = await db.user.find_first(where={"id": (await db.stream.find_first(where={"key": str(active_stream["name"])})).user_id}) # type: ignore - await bolt.client.chat_postMessage(channel="C07ERCGG989", text=f"Hey <@{old_active_stream_user.slack_id}>, you're no longer in focus!") # type: ignore + old_active_stream_user = await db.user.find_first( + where={ + "id": ( + await db.stream.find_first( + where={"key": str(active_stream["name"])} + ) + ).user_id # type: ignore + } + ) + await bolt.client.chat_postMessage( + channel="C07ERCGG989", + text=f"Hey <@{old_active_stream_user.slack_id}>, you're no longer in focus!", # type: ignore + ) active_stream = new_stream - active_stream_user = await db.user.find_first(where={"id": (await db.stream.find_first(where={"key": str(active_stream["name"])})).user_id}) # type: ignore - await bolt.client.chat_postMessage(channel="C07ERCGG989", text=f"Hey <@{active_stream_user.slack_id}>, you're in focus! Make sure to tell us what you're working on!") # type: ignore + active_stream_user = await db.user.find_first( + where={ + "id": ( + await db.stream.find_first( + where={"key": str(active_stream["name"])} + ) + ).user_id # type: ignore + } + ) + await bolt.client.chat_postMessage( + channel="C07ERCGG989", + text=f"Hey <@{active_stream_user.slack_id}>, you're in focus! Make sure to tell us what you're working on!", # type: ignore + ) return True @@ -125,9 +146,9 @@ async def check_for_new(): global active_stream global active_streams async with httpx.AsyncClient() as client: - streams_raw = (await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list")).json()[ - "items" - ] + streams_raw = ( + await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list") + ).json()["items"] streams_simple = [] for stream in streams_raw: if stream["ready"]: @@ -151,12 +172,13 @@ async def check_for_new(): @asynccontextmanager -async def lifespan(app: FastAPI): +async def lifespan(): await db.connect() async with httpx.AsyncClient() as client: for stream in await db.stream.find_many(): await client.post( - f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/" + stream.key, + f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/" + + stream.key, json={"name": stream.key}, ) scheduler.start() @@ -275,7 +297,19 @@ async def github_callback(request: Request): "type": "section", "text": { "type": "mrkdwn", - "text": '\n'.join([recording + ' for ' + str(get_recording_duration(recording, user_stream_key)) + 'minutes' for recording in stream_recs]), # type: ignore + "text": "\n".join( + [ + recording + + " for " + + str( + get_recording_duration( + recording, user_stream_key + ) + ) + + "minutes" + for recording in stream_recs + ] + ), # type: ignore }, }, { @@ -383,7 +417,7 @@ async def get_active_stream(): @bolt.event("app_home_opened") -async def handle_app_home_opened_events(body, logger, event, client): +async def handle_app_home_opened_events(event, client): await client.views_publish( user_id=event["user"], # the view object that appears in the app home @@ -443,14 +477,10 @@ async def deny(ack, body): message = body["message"] applicant_slack_id = message["blocks"][len(message) - 3]["text"]["text"].split( ": " - )[ - 1 - ] # I hate it. You hate it. We all hate it. Carry on. + )[1] # I hate it. You hate it. We all hate it. Carry on. applicant_name = message["blocks"][len(message) - 7]["text"]["text"].split( "Name: " - )[ - 1 - ] # oops i did it again + )[1] # oops i did it again await bolt.client.chat_delete( channel=body["container"]["channel_id"], ts=message["ts"] ) @@ -466,14 +496,10 @@ async def approve(ack, body): message = body["message"] applicant_slack_id = message["blocks"][len(message) - 3]["text"]["text"].split( ": " - )[ - 1 - ] # I hate it. You hate it. We all hate it. Carry on. + )[1] # I hate it. You hate it. We all hate it. Carry on. applicant_name = message["blocks"][len(message) - 7]["text"]["text"].split( "Name: " - )[ - 1 - ] # oops i did it again + )[1] # oops i did it again await bolt.client.chat_delete( channel=body["container"]["channel_id"], ts=message["ts"] ) @@ -494,7 +520,8 @@ async def approve(ack, body): ) async with httpx.AsyncClient() as client: await client.post( - f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/" + new_stream.key, + f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/" + + new_stream.key, json={"name": new_stream.key}, ) await bolt.client.chat_postMessage(