Merge pull request #2 from MichaByte/review-flow

Merge development branch into main
This commit is contained in:
Micha Albert 2024-10-04 13:34:47 -04:00 committed by GitHub
commit 2b1d2b32ec
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
36 changed files with 1084 additions and 84 deletions

1
.gitignore vendored
View file

@ -188,3 +188,4 @@ cython_debug/
*.dat *.dat
dev.db* dev.db*
.*env

View file

@ -0,0 +1,13 @@
FROM python:3.12-alpine
WORKDIR /usr/src/app
RUN apk add --no-cache ffmpeg
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD [ "python", "./main.py" ]

View file

@ -0,0 +1,29 @@
import subprocess
import time
import requests
time.sleep(8)
active_stream = requests.get("http://backend:8000/api/v1/active_stream").text.replace('"', '')
print(active_stream)
old_active_stream = active_stream
proc = None
if active_stream != "":
proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://host.containers.internal:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"])
else:
proc = subprocess.Popen(["ffmpeg", "-f", "lavfi", "-i", "anullsrc", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"])
while True:
time.sleep(3)
active_stream = requests.get("http://backend:8000/api/v1/active_stream").text.replace('"', '')
if old_active_stream is not active_stream:
if proc:
proc.terminate()
if active_stream != "":
proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://host.containers.internal:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"])
else:
proc = subprocess.Popen(["ffmpeg", "-f", "lavfi", "-i", "anullsrc", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"])
old_active_stream = active_stream

View file

@ -0,0 +1 @@
requests

23
backend/Dockerfile Normal file
View file

@ -0,0 +1,23 @@
FROM python:3.12-slim
EXPOSE 8000
WORKDIR /usr/src/app
COPY requirements.txt ./
RUN apt-get update
RUN apt-get install -y python3-opencv
RUN pip install --no-cache-dir -r requirements.txt
COPY schema.prisma .
ADD migrations .
RUN prisma generate
COPY main.py .
CMD [ "uvicorn", "main:api", "--log-level", "warning", "--workers", "4", "--host", "0.0.0.0", "--port", "8000" ]

View file

@ -1,35 +1,101 @@
import hashlib
import hmac
import json import json
import os import os
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from random import choice from datetime import datetime
from secrets import token_hex from secrets import choice, token_hex
from typing import Dict, List from typing import Dict, List
import cv2
import httpx import httpx
import uvicorn
from apscheduler.schedulers.asyncio import AsyncIOScheduler from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.interval import IntervalTrigger from apscheduler.triggers.interval import IntervalTrigger
from cryptography.fernet import Fernet
from dotenv import load_dotenv from dotenv import load_dotenv
from fastapi import FastAPI, Request, Response from fastapi import FastAPI, HTTPException, Request, Response
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import HTMLResponse, RedirectResponse
from prisma import Prisma from prisma import Prisma
from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler
from slack_bolt.async_app import AsyncAck, AsyncApp from slack_bolt.async_app import AsyncAck, AsyncApp
from yarl import URL
load_dotenv() load_dotenv(dotenv_path="./.env")
active_stream: Dict[str, str | bool] = {} active_stream: Dict[str, str | bool] = {}
active_streams: List[Dict[str, str | bool]] = [] active_streams: List[Dict[str, str | bool]] = []
scheduler = AsyncIOScheduler() scheduler = AsyncIOScheduler()
FERNET_KEY = Fernet.generate_key()
FERNET_KEY_USERS = []
FERNET = Fernet(FERNET_KEY)
async def rotate_fernet_key():
global FERNET_KEY
global FERNET
if FERNET_KEY_USERS == []:
FERNET_KEY = Fernet.generate_key()
FERNET = Fernet(FERNET_KEY)
else:
print("not rotating key since we have a pending verification")
def get_recording_duration(timestamp, stream_key):
vid = cv2.VideoCapture(
f"/home/onboard/recordings/{stream_key}/{datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ').strftime('%Y-%m-%d_%H-%M-%S-%f')}.mp4"
)
return int(
(vid.get(cv2.CAP_PROP_FRAME_COUNT) / vid.get(cv2.CAP_PROP_FPS)) / 60
) # seconds to minutes
def verify_gh_signature(payload_body, secret_token, signature_header):
"""Verify that the payload was sent from GitHub by validating SHA256.
Raise and return 403 if not authorized.
Args:
payload_body: original request body to verify (request.body())
secret_token: GitHub app webhook token (WEBHOOK_SECRET)
signature_header: header received from GitHub (x-hub-signature-256)
"""
if not signature_header:
raise HTTPException(
status_code=403, detail="x-hub-signature-256 header is missing!"
)
hash_object = hmac.new(
secret_token.encode("utf-8"), msg=payload_body, digestmod=hashlib.sha256
)
expected_signature = "sha256=" + hash_object.hexdigest()
if not hmac.compare_digest(expected_signature, signature_header):
raise HTTPException(status_code=403, detail="Request signatures didn't match!")
async def get_recording_list(stream_key: str) -> List[str]:
async with httpx.AsyncClient() as client:
return [
recording["start"]
for recording in (
await client.get(
f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/recordings/get/{stream_key}"
)
).json()["segments"]
]
async def update_active(): async def update_active():
global active_stream global active_stream
global active_streams global active_streams
async with httpx.AsyncClient() as client: async with httpx.AsyncClient() as client:
streams_raw = (await client.get("http://localhost:9997/v3/paths/list")).json()[ streams_raw = (
"items" await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list")
] ).json()["items"]
streams = [] streams = []
for stream in streams_raw: for stream in streams_raw:
streams.append({"name": stream["name"], "ready": stream["ready"]}) streams.append({"name": stream["name"], "ready": stream["ready"]})
@ -37,44 +103,52 @@ async def update_active():
if stream["ready"] and stream not in active_streams: if stream["ready"] and stream not in active_streams:
active_streams.append(stream) active_streams.append(stream)
if len(active_streams) == 0: if len(active_streams) == 0:
print("No active streams")
return return
if active_stream == {}: if active_stream == {}:
print("No current active stream, picking new one...")
active_stream = choice(active_streams) active_stream = choice(active_streams)
return return
if len(active_streams) == 1: if len(active_streams) == 1:
return return
print(
f"starting to pick new active stream (switching away from {active_stream['name']})"
)
new_stream = choice(active_streams) new_stream = choice(active_streams)
while new_stream["name"] == active_stream["name"]: while new_stream["name"] == active_stream["name"]:
print(
f"re-attemppting to pick active stream since we picked {new_stream} again"
)
new_stream = choice(active_streams) new_stream = choice(active_streams)
print(f"found new stream to make active: {new_stream}") old_active_stream_user = await db.user.find_first(
try: where={
await db.connect() "id": (
except Exception as e: await db.stream.find_first(
print(e) where={"key": str(active_stream["name"])}
print(f"trying to find user associated with stream {active_stream['name']}") )
old_active_stream_user = await db.user.find_first(where={"id": (await db.stream.find_first(where={"key": str(active_stream["name"])})).user_id}) # type: ignore ).user_id # type: ignore
await bolt.client.chat_postMessage(channel="C07ERCGG989", text=f"Hey <@{old_active_stream_user.slack_id}>, you're no longer in focus!") # type: ignore }
)
await bolt.client.chat_postMessage(
channel="C07ERCGG989",
text=f"Hey <@{old_active_stream_user.slack_id}>, you're no longer in focus!", # type: ignore
)
active_stream = new_stream active_stream = new_stream
active_stream_user = await db.user.find_first(where={"id": (await db.stream.find_first(where={"key": str(active_stream["name"])})).user_id}) # type: ignore active_stream_user = await db.user.find_first(
await bolt.client.chat_postMessage(channel="C07ERCGG989", text=f"Hey <@{active_stream_user.slack_id}>, you're in focus! Make sure to tell us what you're working on!") # type: ignore where={
await db.disconnect() "id": (
await db.stream.find_first(
where={"key": str(active_stream["name"])}
)
).user_id # type: ignore
}
)
await bolt.client.chat_postMessage(
channel="C07ERCGG989",
text=f"Hey <@{active_stream_user.slack_id}>, you're in focus! Make sure to tell us what you're working on!", # type: ignore
)
return True
async def check_for_new(): async def check_for_new():
global active_stream global active_stream
global active_streams global active_streams
async with httpx.AsyncClient() as client: async with httpx.AsyncClient() as client:
streams_raw = (await client.get("http://localhost:9997/v3/paths/list")).json()[ streams_raw = (
"items" await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list")
] ).json()["items"]
streams_simple = [] streams_simple = []
for stream in streams_raw: for stream in streams_raw:
if stream["ready"]: if stream["ready"]:
@ -94,29 +168,27 @@ async def check_for_new():
if stream not in active_streams_simple: if stream not in active_streams_simple:
active_streams.append({"name": stream, "ready": True}) active_streams.append({"name": stream, "ready": True})
if len(active_streams) == 0: if len(active_streams) == 0:
print("No active streams")
active_stream = {} active_stream = {}
@asynccontextmanager @asynccontextmanager
async def lifespan(app: FastAPI): async def lifespan():
await update_active()
scheduler.start()
scheduler.add_job(update_active, IntervalTrigger(seconds=5 * 60))
scheduler.add_job(check_for_new, IntervalTrigger(seconds=3))
try:
await db.connect() await db.connect()
except Exception:
pass
async with httpx.AsyncClient() as client: async with httpx.AsyncClient() as client:
for stream in await db.stream.find_many(): for stream in await db.stream.find_many():
await client.post( await client.post(
"http://127.0.0.1:9997/v3/config/paths/add/" + stream.key, f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/"
+ stream.key,
json={"name": stream.key}, json={"name": stream.key},
) )
await db.disconnect() scheduler.start()
scheduler.add_job(update_active, IntervalTrigger(minutes=5))
scheduler.add_job(check_for_new, IntervalTrigger(seconds=3))
scheduler.add_job(rotate_fernet_key, IntervalTrigger(minutes=30))
await rotate_fernet_key()
yield yield
scheduler.shutdown() scheduler.shutdown()
await db.disconnect()
api = FastAPI(lifespan=lifespan) # type: ignore api = FastAPI(lifespan=lifespan) # type: ignore
@ -137,22 +209,215 @@ bolt = AsyncApp(
bolt_handler = AsyncSlackRequestHandler(bolt) bolt_handler = AsyncSlackRequestHandler(bolt)
@api.get("/auth/github/login")
async def github_redirect(request: Request):
return RedirectResponse(
str(
URL.build(
scheme="https",
host="github.com",
path="/login/oauth/authorize",
query={
"client_id": os.environ["GH_CLIENT_ID"],
"redirect_uri": "https://live.onboard.hackclub.com/auth/github/callback",
"scopes": "read:user",
"state": request.query_params["state"],
},
)
)
)
@api.get("/auth/github/callback")
async def github_callback(request: Request):
code: str = request.query_params["code"]
state: str = request.query_params["state"]
user_id, pr_id = FERNET.decrypt(bytes.fromhex(state)).decode().split("+")
if user_id in FERNET_KEY_USERS:
FERNET_KEY_USERS.remove(user_id)
db_user = await db.user.find_first_or_raise(where={"slack_id": user_id})
user_stream_key = (
await db.stream.find_first_or_raise(where={"user_id": db_user.id})
).key
db_pr = await db.pullrequest.find_first_or_raise(where={"github_id": int(pr_id)})
async with httpx.AsyncClient() as client:
token = (
await client.post(
"https://github.com/login/oauth/access_token",
json={
"client_id": os.environ["GH_CLIENT_ID"],
"client_secret": os.environ["GH_CLIENT_SECRET"],
"code": code,
"redirect_uri": "https://live.onboard.hackclub.com/auth/github/callback",
},
headers={"Accept": "application/json"},
)
).json()["access_token"]
gh_user: int = (
await client.get(
"https://api.github.com/user",
headers={
"Accept": "application/vnd.github.v3+json",
"Authorization": f"Bearer {token}",
},
)
).json()["id"]
if gh_user == db_pr.gh_user_id:
await db.pullrequest.update(
{"user": {"connect": {"id": db_user.id}}, "gh_user_id": gh_user},
{"id": db_pr.id},
)
stream_recs = await get_recording_list(user_stream_key)
if stream_recs == []:
return HTMLResponse(
"<h1>You don't have any sessions to submit! Please DM @mra on Slack if you think this is a mistake.</h1>"
)
await bolt.client.chat_postMessage(
channel=user_id,
text="Select your OnBoard Live sessions!",
blocks=[
{
"type": "header",
"text": {
"type": "plain_text",
"text": "Select your sessions for review!\nCopy and paste the lines of sessions that you want associated with this PR into the box!",
"emoji": True,
},
},
{
"block_id": "session-checks",
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"Here are all your sessions. Select the ones associated with OnBoard pull request #{pr_id}:",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "\n".join(
[
recording
+ " for "
+ str(
get_recording_duration(
recording, user_stream_key
)
)
+ "minutes"
for recording in stream_recs
]
), # type: ignore
},
},
{
"type": "input",
"block_id": "session-input",
"element": {
"type": "plain_text_input",
"multiline": True,
"action_id": "plain_text_input-action",
},
"label": {
"type": "plain_text",
"text": "Paste the lines here (DO NOT EDIT THEM, ONE ON EACH LINE)",
"emoji": False,
},
},
# "block_id": "session-checks",
# "type": "section",
# "text": {
# "type": "mrkdwn",
# "text": f"Here are all your sessions. Select the ones associated with OnBoard pull request #{pr_id}:",
# },
# "accessory": {
# "type": "checkboxes",
# "options": [
# json.loads(
# """{{"text": {{ "type": "mrkdwn", "text": "Your session on {pretty_time}"}}, "description": {{"type": "mrkdwn", "text": "You streamed for {length} {minute_or_minutes}"}}, "value": "checkbox-{filename}"}}""".format(
# pretty_time=recording,
# length=get_recording_duration(
# recording, user_stream_key
# ),
# minute_or_minutes=(
# "minute"
# if get_recording_duration(
# recording, user_stream_key
# )
# == 1
# else "minutes"
# ),
# filename=recording,
# )
# )
# for recording in stream_recs
# ],
# "action_id": "checkboxes",
# },
# },
{
"type": "actions",
"elements": [
{
"type": "button",
"text": {
"type": "plain_text",
"emoji": True,
"text": "Submit",
},
"style": "primary",
"value": "submit_sessions",
"action_id": "submit_sessions",
},
],
},
],
)
return HTMLResponse(
"<h1>Success! Your PR has been linked to your Slack account. Check your Slack DMs for the next steps!</h1>"
)
return HTMLResponse(
"<h1>Looks like something went wrong! DM @mra on slack.</h1>",
status_code=500,
)
@api.post("/api/v1/github/pr_event")
async def pr_event(request: Request):
verify_gh_signature(
await request.body(),
os.environ["GH_HOOK_SECRET"],
request.headers.get("x-hub-signature-256"),
)
body = json.loads(await request.body())
if body["action"] == "labeled":
if body["label"]["id"] == 7336079497:
await db.pullrequest.create(
{
"github_id": body["pull_request"]["number"],
"gh_user_id": body["pull_request"]["user"]["id"],
}
)
return
@api.get("/api/v1/stream_key/{stream_key}") @api.get("/api/v1/stream_key/{stream_key}")
async def get_stream_by_key(stream_key: str): async def get_stream_by_key(stream_key: str):
await db.connect()
stream = await db.stream.find_first(where={"key": stream_key}) stream = await db.stream.find_first(where={"key": stream_key})
await db.disconnect()
return ( return (
stream if stream else Response(status_code=404, content="404: Stream not found") stream if stream else Response(status_code=404, content="404: Stream not found")
) )
@api.get("/api/v1/active_stream") @api.get("/api/v1/active_stream")
async def get_active_stream(): async def get_active_stream():
return active_stream["name"] if "name" in active_stream else "" return active_stream["name"] if "name" in active_stream else ""
@bolt.event("app_home_opened") @bolt.event("app_home_opened")
async def handle_app_home_opened_events(body, logger, event, client): async def handle_app_home_opened_events(event, client):
await client.views_publish( await client.views_publish(
user_id=event["user"], user_id=event["user"],
# the view object that appears in the app home # the view object that appears in the app home
@ -173,20 +438,49 @@ async def handle_app_home_opened_events(body, logger, event, client):
) )
@bolt.action("submit_sessions")
async def submit_sessions(ack: AsyncAck, body):
await ack()
selected_sessions_ts: List[str] = []
print(body["state"]["values"])
for session in body["state"]["values"]["session-input"]["plain_text_input-action"][
"value"
].split("\n"):
selected_sessions_ts.append(session.split(" for ")[0])
pr_id = int(
body["message"]["blocks"][1]["text"]["text"].split("#")[1].split(":")[0]
) # don't tell my mom she raised a monster
db_pr = await db.pullrequest.find_first_or_raise(where={"github_id": pr_id})
if db_pr.user_id:
stream_key = (
await db.stream.find_first_or_raise(where={"user_id": db_pr.user_id})
).key
for session in selected_sessions_ts:
await db.session.create(
{
"pull": {"connect": {"id": db_pr.id}},
"timestamp": session,
"filename": f"/home/onboard/recordings/{stream_key}/{datetime.strptime(session, '%Y-%m-%dT%H:%M:%S.%fZ').strftime('%Y-%m-%d_%H-%M-%S-%f')}.mp4",
"duration": get_recording_duration(session, stream_key),
}
)
await bolt.client.chat_delete(
channel=body["container"]["channel_id"], ts=body["message"]["ts"]
)
print(pr_id, selected_sessions_ts)
@bolt.action("deny") @bolt.action("deny")
async def deny(ack, body): async def deny(ack, body):
await ack() await ack()
message = body["message"] message = body["message"]
applicant_slack_id = message["blocks"][len(message) - 3]["text"]["text"].split( applicant_slack_id = message["blocks"][len(message) - 3]["text"]["text"].split(
": " ": "
)[ )[1] # I hate it. You hate it. We all hate it. Carry on.
1
] # I hate it. You hate it. We all hate it. Carry on.
applicant_name = message["blocks"][len(message) - 7]["text"]["text"].split( applicant_name = message["blocks"][len(message) - 7]["text"]["text"].split(
"Name: " "Name: "
)[ )[1] # oops i did it again
1
] # oops i did it again
await bolt.client.chat_delete( await bolt.client.chat_delete(
channel=body["container"]["channel_id"], ts=message["ts"] channel=body["container"]["channel_id"], ts=message["ts"]
) )
@ -199,21 +493,13 @@ async def deny(ack, body):
@bolt.action("approve") @bolt.action("approve")
async def approve(ack, body): async def approve(ack, body):
await ack() await ack()
try:
await db.connect()
except Exception:
pass
message = body["message"] message = body["message"]
applicant_slack_id = message["blocks"][len(message) - 3]["text"]["text"].split( applicant_slack_id = message["blocks"][len(message) - 3]["text"]["text"].split(
": " ": "
)[ )[1] # I hate it. You hate it. We all hate it. Carry on.
1
] # I hate it. You hate it. We all hate it. Carry on.
applicant_name = message["blocks"][len(message) - 7]["text"]["text"].split( applicant_name = message["blocks"][len(message) - 7]["text"]["text"].split(
"Name: " "Name: "
)[ )[1] # oops i did it again
1
] # oops i did it again
await bolt.client.chat_delete( await bolt.client.chat_delete(
channel=body["container"]["channel_id"], ts=message["ts"] channel=body["container"]["channel_id"], ts=message["ts"]
) )
@ -234,14 +520,14 @@ async def approve(ack, body):
) )
async with httpx.AsyncClient() as client: async with httpx.AsyncClient() as client:
await client.post( await client.post(
"http://127.0.0.1:9997/v3/config/paths/add/" + new_stream.key, f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/"
+ new_stream.key,
json={"name": new_stream.key}, json={"name": new_stream.key},
) )
await bolt.client.chat_postMessage( await bolt.client.chat_postMessage(
channel=sumbitter_convo["channel"]["id"], channel=sumbitter_convo["channel"]["id"],
text=f"Welcome to OnBoard Live! Your stream key is {new_stream.key}. To use your stream key the easy way, go to <https://live.onboard.hackclub.com/{new_stream.key}/publish|this link>. You can also use it in OBS with the server URL of rtmp://live.onboard.hackclub.com:1935", text=f"Welcome to OnBoard Live! Your stream key is {new_stream.key}. To use your stream key the easy way, go to <https://live.onboard.hackclub.com/{new_stream.key}/publish|this link>. You can also use it in OBS with the server URL of rtmp://live.onboard.hackclub.com:1935",
) )
await db.disconnect()
@bolt.view("apply") @bolt.view("apply")
@ -266,9 +552,6 @@ async def handle_application_submission(ack, body):
channel=sumbitter_convo["channel"]["id"], channel=sumbitter_convo["channel"]["id"],
text=f"Your application has been submitted! We will review it shortly. Please do not send another application - If you haven't heard back in over 48 hours, or you forgot something in your application, please message <@{os.environ['ADMIN_SLACK_ID']}>! Here's a copy of your responses for your reference:\nSome info on your project(s): {body['view']['state']['values']['project-info']['project-info-body']['value']}\n{f'Please fill out <https://forms.hackclub.com/eligibility?program=Onboard%20Live&slack_id={user}|the verification form>! We can only approve your application once this is done.' if not user_verified else ''}", text=f"Your application has been submitted! We will review it shortly. Please do not send another application - If you haven't heard back in over 48 hours, or you forgot something in your application, please message <@{os.environ['ADMIN_SLACK_ID']}>! Here's a copy of your responses for your reference:\nSome info on your project(s): {body['view']['state']['values']['project-info']['project-info-body']['value']}\n{f'Please fill out <https://forms.hackclub.com/eligibility?program=Onboard%20Live&slack_id={user}|the verification form>! We can only approve your application once this is done.' if not user_verified else ''}",
) )
admin_convo = await bolt.client.conversations_open(
users=os.environ["ADMIN_SLACK_ID"], return_im=True
)
will_behave = True will_behave = True
# boxes = body["view"]["state"]["values"]["kAgeY"]["checkboxes"]["selected_options"] # boxes = body["view"]["state"]["values"]["kAgeY"]["checkboxes"]["selected_options"]
# if len(boxes) == 1 and boxes[0]["value"] == "value-1": # if len(boxes) == 1 and boxes[0]["value"] == "value-1":
@ -355,6 +638,29 @@ async def handle_application_submission(ack, body):
) )
@bolt.command("/onboard-live-submit")
async def submit(ack: AsyncAck, command):
await ack()
user_id = command["user_id"]
channel_id = command["channel_id"]
text = command["text"]
db_pr = await db.pullrequest.find_first(where={"github_id": int(text)})
if db_pr is None:
await bolt.client.chat_postEphemeral(
channel=channel_id,
user=user_id,
text="There doesn't seem to be a PR open with that ID! If this seems like a mistake, please message <@U05C64XMMHV> about it!",
)
return
if user_id not in FERNET_KEY_USERS:
FERNET_KEY_USERS.append(user_id)
await bolt.client.chat_postEphemeral(
channel=channel_id,
user=user_id,
text=f"Please <https://live.onboard.hackclub.com/auth/github/login?state={FERNET.encrypt(bytes(f'{user_id}+{db_pr.github_id}', 'utf-8')).hex()}|click here> to authenticate with GitHub. This helps us verify that this is your PR!",
)
@bolt.command("/onboard-live-apply") @bolt.command("/onboard-live-apply")
async def apply(ack: AsyncAck, command): async def apply(ack: AsyncAck, command):
await ack() await ack()
@ -536,10 +842,22 @@ async def apply(ack: AsyncAck, command):
@bolt.action("checkboxes") @bolt.action("checkboxes")
async def handle_some_action(ack): async def checkboxes(ack):
"""
AFAICT there needs to be *an* action for the checkboxes, but I process their data elsewhere (on submit)
To avoid warnings in Slack, I'm just ACKing it here and doing nothing :)
"""
await ack() await ack()
@api.post("/slack/events") @api.post("/slack/events")
async def slack_event_endpoint(req: Request): async def slack_event_endpoint(req: Request):
return await bolt_handler.handle(req) return await bolt_handler.handle(req)
def main():
uvicorn.run(api)
if __name__ == "__main__":
main()

View file

@ -0,0 +1,41 @@
/*
Warnings:
- You are about to drop the column `active` on the `Stream` table. All the data in the column will be lost.
- You are about to drop the column `focused` on the `Stream` table. All the data in the column will be lost.
- The primary key for the `User` table will be changed. If it partially fails, the table could be left without primary key constraint.
- You are about to drop the column `slackId` on the `User` table. All the data in the column will be lost.
- Added the required column `user_id` to the `Stream` table without a default value. This is not possible if the table is not empty.
- The required column `id` was added to the `User` table with a prisma-level default value. This is not possible if the table is not empty. Please add this column as optional, then populate it before making it required.
- Added the required column `slack_id` to the `User` table without a default value. This is not possible if the table is not empty.
*/
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Stream" (
"id" TEXT NOT NULL PRIMARY KEY,
"created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"is_live" BOOLEAN NOT NULL DEFAULT false,
"is_focused" BOOLEAN NOT NULL DEFAULT false,
"key" TEXT NOT NULL,
"user_id" TEXT NOT NULL,
CONSTRAINT "Stream_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_Stream" ("id", "key") SELECT "id", "key" FROM "Stream";
DROP TABLE "Stream";
ALTER TABLE "new_Stream" RENAME TO "Stream";
CREATE UNIQUE INDEX "Stream_key_key" ON "Stream"("key");
CREATE UNIQUE INDEX "Stream_user_id_key" ON "Stream"("user_id");
CREATE TABLE "new_User" (
"id" TEXT NOT NULL PRIMARY KEY,
"created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"slack_id" TEXT NOT NULL,
"name" TEXT NOT NULL
);
INSERT INTO "new_User" ("name") SELECT "name" FROM "User";
DROP TABLE "User";
ALTER TABLE "new_User" RENAME TO "User";
CREATE UNIQUE INDEX "User_slack_id_key" ON "User"("slack_id");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,10 @@
-- CreateTable
CREATE TABLE "PullRequest" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"userId" TEXT NOT NULL,
"token" TEXT NOT NULL,
CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");

View file

@ -0,0 +1,15 @@
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_PullRequest" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"userId" TEXT,
"token" TEXT NOT NULL,
CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_PullRequest" ("id", "token", "userId") SELECT "id", "token", "userId" FROM "PullRequest";
DROP TABLE "PullRequest";
ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,23 @@
/*
Warnings:
- Added the required column `github_id` to the `PullRequest` table without a default value. This is not possible if the table is not empty.
*/
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_PullRequest" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"github_id" INTEGER NOT NULL,
"userId" TEXT,
"token" TEXT NOT NULL,
CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_PullRequest" ("id", "token", "userId") SELECT "id", "token", "userId" FROM "PullRequest";
DROP TABLE "PullRequest";
ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,25 @@
/*
Warnings:
- The required column `secondary_token` was added to the `PullRequest` table with a prisma-level default value. This is not possible if the table is not empty. Please add this column as optional, then populate it before making it required.
*/
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_PullRequest" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"github_id" INTEGER NOT NULL,
"userId" TEXT,
"token" TEXT NOT NULL,
"secondary_token" TEXT NOT NULL,
CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_PullRequest" ("github_id", "id", "token", "userId") SELECT "github_id", "id", "token", "userId" FROM "PullRequest";
DROP TABLE "PullRequest";
ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");
CREATE UNIQUE INDEX "PullRequest_secondary_token_key" ON "PullRequest"("secondary_token");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,39 @@
/*
Warnings:
- You are about to drop the column `userId` on the `PullRequest` table. All the data in the column will be lost.
*/
-- CreateTable
CREATE TABLE "_PullRequestToPossibleUser" (
"A" INTEGER NOT NULL,
"B" TEXT NOT NULL,
CONSTRAINT "_PullRequestToPossibleUser_A_fkey" FOREIGN KEY ("A") REFERENCES "PullRequest" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "_PullRequestToPossibleUser_B_fkey" FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_PullRequest" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"github_id" INTEGER NOT NULL,
"known_user_id" TEXT,
"token" TEXT NOT NULL,
"secondary_token" TEXT NOT NULL,
CONSTRAINT "PullRequest_known_user_id_fkey" FOREIGN KEY ("known_user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_PullRequest" ("github_id", "id", "secondary_token", "token") SELECT "github_id", "id", "secondary_token", "token" FROM "PullRequest";
DROP TABLE "PullRequest";
ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");
CREATE UNIQUE INDEX "PullRequest_secondary_token_key" ON "PullRequest"("secondary_token");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;
-- CreateIndex
CREATE UNIQUE INDEX "_PullRequestToPossibleUser_AB_unique" ON "_PullRequestToPossibleUser"("A", "B");
-- CreateIndex
CREATE INDEX "_PullRequestToPossibleUser_B_index" ON "_PullRequestToPossibleUser"("B");

View file

@ -0,0 +1,10 @@
/*
Warnings:
- You are about to drop the `_PullRequestToPossibleUser` table. If the table is not empty, all the data it contains will be lost.
*/
-- DropTable
PRAGMA foreign_keys=off;
DROP TABLE "_PullRequestToPossibleUser";
PRAGMA foreign_keys=on;

View file

@ -0,0 +1,26 @@
/*
Warnings:
- You are about to drop the column `known_user_id` on the `PullRequest` table. All the data in the column will be lost.
- You are about to drop the column `secondary_token` on the `PullRequest` table. All the data in the column will be lost.
- You are about to drop the column `token` on the `PullRequest` table. All the data in the column will be lost.
*/
-- AlterTable
ALTER TABLE "User" ADD COLUMN "github_user_id" TEXT;
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_PullRequest" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"github_id" INTEGER NOT NULL,
"user_id" TEXT,
CONSTRAINT "PullRequest_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_PullRequest" ("github_id", "id") SELECT "github_id", "id" FROM "PullRequest";
DROP TABLE "PullRequest";
ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "User" ADD COLUMN "github_token" TEXT;

View file

@ -0,0 +1,25 @@
/*
Warnings:
- You are about to drop the column `github_token` on the `User` table. All the data in the column will be lost.
*/
-- AlterTable
ALTER TABLE "PullRequest" ADD COLUMN "gh_user_id" TEXT;
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_User" (
"id" TEXT NOT NULL PRIMARY KEY,
"created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"slack_id" TEXT NOT NULL,
"name" TEXT NOT NULL,
"github_user_id" TEXT
);
INSERT INTO "new_User" ("created_at", "github_user_id", "id", "name", "slack_id") SELECT "created_at", "github_user_id", "id", "name", "slack_id" FROM "User";
DROP TABLE "User";
ALTER TABLE "new_User" RENAME TO "User";
CREATE UNIQUE INDEX "User_slack_id_key" ON "User"("slack_id");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,21 @@
/*
Warnings:
- You are about to drop the column `github_user_id` on the `User` table. All the data in the column will be lost.
*/
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_User" (
"id" TEXT NOT NULL PRIMARY KEY,
"created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"slack_id" TEXT NOT NULL,
"name" TEXT NOT NULL
);
INSERT INTO "new_User" ("created_at", "id", "name", "slack_id") SELECT "created_at", "id", "name", "slack_id" FROM "User";
DROP TABLE "User";
ALTER TABLE "new_User" RENAME TO "User";
CREATE UNIQUE INDEX "User_slack_id_key" ON "User"("slack_id");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,22 @@
/*
Warnings:
- Made the column `gh_user_id` on table `PullRequest` required. This step will fail if there are existing NULL values in that column.
*/
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_PullRequest" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"github_id" INTEGER NOT NULL,
"user_id" TEXT,
"gh_user_id" TEXT NOT NULL,
CONSTRAINT "PullRequest_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_PullRequest" ("gh_user_id", "github_id", "id", "user_id") SELECT "gh_user_id", "github_id", "id", "user_id" FROM "PullRequest";
DROP TABLE "PullRequest";
ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,22 @@
/*
Warnings:
- You are about to alter the column `gh_user_id` on the `PullRequest` table. The data in that column could be lost. The data in that column will be cast from `String` to `Int`.
*/
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_PullRequest" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"github_id" INTEGER NOT NULL,
"user_id" TEXT,
"gh_user_id" INTEGER NOT NULL,
CONSTRAINT "PullRequest_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_PullRequest" ("gh_user_id", "github_id", "id", "user_id") SELECT "gh_user_id", "github_id", "id", "user_id" FROM "PullRequest";
DROP TABLE "PullRequest";
ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,14 @@
-- CreateTable
CREATE TABLE "Session" (
"id" TEXT NOT NULL PRIMARY KEY,
"user_id" TEXT NOT NULL,
"timestamp" TEXT NOT NULL,
"filename" TEXT NOT NULL,
"duration" INTEGER NOT NULL,
"reviewed" BOOLEAN NOT NULL DEFAULT false,
"approved" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "Session_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
-- CreateIndex
CREATE UNIQUE INDEX "Session_user_id_key" ON "Session"("user_id");

View file

@ -0,0 +1,26 @@
/*
Warnings:
- You are about to drop the column `user_id` on the `Session` table. All the data in the column will be lost.
- Added the required column `pr_id` to the `Session` table without a default value. This is not possible if the table is not empty.
*/
-- RedefineTables
PRAGMA defer_foreign_keys=ON;
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_Session" (
"id" TEXT NOT NULL PRIMARY KEY,
"pr_id" INTEGER NOT NULL,
"timestamp" TEXT NOT NULL,
"filename" TEXT NOT NULL,
"duration" INTEGER NOT NULL,
"reviewed" BOOLEAN NOT NULL DEFAULT false,
"approved" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "Session_pr_id_fkey" FOREIGN KEY ("pr_id") REFERENCES "PullRequest" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_Session" ("approved", "duration", "filename", "id", "reviewed", "timestamp") SELECT "approved", "duration", "filename", "id", "reviewed", "timestamp" FROM "Session";
DROP TABLE "Session";
ALTER TABLE "new_Session" RENAME TO "Session";
CREATE UNIQUE INDEX "Session_pr_id_key" ON "Session"("pr_id");
PRAGMA foreign_keys=ON;
PRAGMA defer_foreign_keys=OFF;

View file

@ -0,0 +1,2 @@
-- DropIndex
DROP INDEX "Session_pr_id_key";

View file

@ -0,0 +1,8 @@
/*
Warnings:
- A unique constraint covering the columns `[filename]` on the table `Session` will be added. If there are existing duplicate values, this will fail.
*/
-- CreateIndex
CREATE UNIQUE INDEX "Session_filename_key" ON "Session"("filename");

79
backend/requirements.txt Normal file
View file

@ -0,0 +1,79 @@
aiofiles==24.1.0
aiohttp==3.9.5
aiosignal==1.3.1
annotated-types==0.7.0
anyio==4.4.0
APScheduler==3.10.4
attrs==23.2.0
black==24.4.2
build==1.2.1
certifi==2024.7.4
cffi==1.17.0
charset-normalizer==3.3.2
click==8.1.7
cryptography==43.0.0
defusedxml==0.8.0rc2
dnspython==2.6.1
ecdsa==0.19.0
email_validator==2.2.0
fastapi==0.112.0
fastapi-cli==0.0.4
fastapi-oauth2==1.0.0
fastapi-utils==0.7.0
frozenlist==1.4.1
h11==0.14.0
httpcore==1.0.5
httptools==0.6.1
httpx==0.27.0
idna==3.7
Jinja2==3.1.4
markdown-it-py==3.0.0
MarkupSafe==2.1.5
mdurl==0.1.2
multidict==6.0.5
mypy==1.11.0
mypy-extensions==1.0.0
nodeenv==1.9.1
numpy==2.1.0
oauthlib==3.2.2
opencv-python==4.10.0.84
packaging==24.1
pathspec==0.12.1
platformdirs==4.2.2
prisma==0.14.0
psutil==5.9.8
pyasn1==0.6.0
pycparser==2.22
pydantic==2.8.2
pydantic_core==2.20.1
Pygments==2.18.0
PyJWT==2.9.0
pyproject_hooks==1.1.0
python-dotenv==1.0.1
python-jose==3.3.0
python-multipart==0.0.9
python3-openid==3.2.0
pytz==2024.1
PyYAML==6.0.1
requests==2.32.3
requests-oauthlib==2.0.0
rich==13.7.1
rsa==4.9
shellingham==1.5.4
six==1.16.0
slack_bolt==1.20.0
slack_sdk==3.31.0
sniffio==1.3.1
social-auth-core==4.5.4
starlette==0.37.2
tomlkit==0.13.0
typer==0.12.3
typing-inspect==0.9.0
typing_extensions==4.12.2
tzlocal==5.2
urllib3==2.2.2
uvicorn==0.30.6
uvloop==0.19.0
watchfiles==0.22.0
websockets==12.0
yarl==1.9.4

View file

@ -1,12 +1,12 @@
generator client { generator client {
provider = "prisma-client-py" provider = "prisma-client-py"
recursive_type_depth = "5"
interface = "asyncio" interface = "asyncio"
recursive_type_depth = 5
} }
datasource db { datasource db {
provider = "sqlite" provider = "sqlite"
url = "file:./dev.db" url = "file:./db/dev.db"
} }
model User { model User {
@ -14,6 +14,7 @@ model User {
created_at DateTime @default(now()) created_at DateTime @default(now())
slack_id String @unique slack_id String @unique
name String name String
pull_requests PullRequest[] @relation("PullRequestToUser")
stream Stream? stream Stream?
} }
@ -23,6 +24,26 @@ model Stream {
is_live Boolean @default(false) is_live Boolean @default(false)
is_focused Boolean @default(false) is_focused Boolean @default(false)
key String @unique @default(uuid()) key String @unique @default(uuid())
user User @relation(fields: [user_id], references: [id])
user_id String @unique user_id String @unique
user User @relation(fields: [user_id], references: [id])
}
model PullRequest {
id Int @id @default(autoincrement())
github_id Int @unique
user_id String?
gh_user_id Int
user User? @relation("PullRequestToUser", fields: [user_id], references: [id])
sessions Session[]
}
model Session {
id String @id @default(cuid())
pr_id Int
pull PullRequest @relation(fields: [pr_id], references: [id])
timestamp String
filename String @unique
duration Int // in minutes
reviewed Boolean @default(false)
approved Boolean @default(false)
} }

12
caddy/Caddyfile Normal file
View file

@ -0,0 +1,12 @@
live.onboard.hackclub.com {
reverse_proxy host.containers.internal:8889
handle /slack/* {
reverse_proxy host.containers.internal:8000
}
handle /api/v1/github/* {
reverse_proxy host.containers.internal:8000
}
handle /auth/* {
reverse_proxy host.containers.internal:8000
}
}

71
docker-compose.yml Normal file
View file

@ -0,0 +1,71 @@
services:
mediamtx:
restart: unless-stopped
network_mode: "host"
build:
context: ./mediamtx
dockerfile: Dockerfile
volumes:
- mediamtx_recordings:/recordings
# ports:
# - "8889:8889"
# - "1935:1935"
# - "9997:9997"
web-frontend:
ports:
- "4173:4173"
build:
context: ./tiling-frontend
dockerfile: Dockerfile
volumes:
- tiling_frontend_build:/usr/src/app/dist
live-stream:
deploy:
resources:
limits:
memory: 8192M
reservations:
memory: 8192M
network_mode: "host"
shm_size: '8gb'
restart: unless-stopped
env_file: .stream.env
depends_on:
backend:
condition: service_started
web-frontend:
condition: service_started
build:
context: ./live-stream
dockerfile: Dockerfile
volumes:
- tiling_frontend_build:/html
backend:
network_mode: "host"
restart: unless-stopped
env_file: .backend.env
build:
context: ./backend
dockerfile: Dockerfile
volumes:
- ./backend/db:/usr/src/app/db
mediamtx:
condition: service_started
caddy:
image: docker.io/caddy:alpine
restart: unless-stopped
cap_add:
- NET_ADMIN
ports:
- "80:80"
- "443:443"
- "443:443/udp"
volumes:
- $PWD/caddy/Caddyfile:/etc/caddy/Caddyfile
- caddy_data:/data
- caddy_config:/config=
volumes:
mediamtx_recordings:
tiling_frontend_build:
caddy_data:
caddy_config:

25
live-stream/Dockerfile Normal file
View file

@ -0,0 +1,25 @@
FROM ubuntu:latest
RUN apt update
RUN apt install -y ffmpeg xvfb software-properties-common dbus-x11 pulseaudio
RUN add-apt-repository -y ppa:xtradeb/apps
RUN apt update
RUN apt install -y chromium
RUN apt install -y sudo
RUN rm -rf /var/lib/apt/lists/*
RUN apt clean
RUN useradd -ms /bin/bash stream
COPY run.sh ./
COPY user_run.sh /home/stream
ENTRYPOINT ["./run.sh"]

11
live-stream/run.sh Executable file
View file

@ -0,0 +1,11 @@
#!/bin/bash
dbus-daemon --config-file=/usr/share/dbus-1/system.conf &
echo $YT_STREAM_KEY >/home/stream/key.txt
chown stream /home/stream/key.txt
chown stream /home/stream/user_run.sh
sudo -i -u stream bash /home/stream/user_run.sh

25
live-stream/user_run.sh Executable file
View file

@ -0,0 +1,25 @@
#!/bin/bash
pulseaudio -D &
sleep 2
pacmd load-module module-null-sink sink_name=VirtSink
pacmd update-sink-proplist VirtSink device.description=VirtSink
export CHROMIUM_FLAGS="--disable-software-rasterizer"
export LIBGL_ALWAYS_INDIRECT=1
bash -c "DISPLAY=:99 xvfb-run \
--server-num 99 \
-s \"-nocursor -ac -screen 0 1920x1080x24\" \
dbus-launch chromium \
--temp-profile \
--window-size=1920,1080 \
--disable-gpu \
--window-position=0,0 \
--hide-scrollbars \
--autoplay-policy=no-user-gesture-required \
--app=http://localhost:4173" & disown
bash -c "sleep 3 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 60 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)"

5
mediamtx/Dockerfile Normal file
View file

@ -0,0 +1,5 @@
FROM docker.io/bluenviron/mediamtx
COPY . /
ENTRYPOINT ["/mediamtx"]

25
mediamtx/mediamtx.yml Normal file
View file

@ -0,0 +1,25 @@
playback: yes
playbackAddress: :9996
playbackTrustedProxies: [ '127.0.0.1' ]
api: yes
pathDefaults:
record: yes
# Path of recording segments.
# Extension is added automatically.
# Available variables are %path (path name), %Y %m %d %H %M %S %f %s (time in strftime format)
recordPath: /recordings/%path/%Y-%m-%d_%H-%M-%S-%f
recordDeleteAfter: 0s
webrtcICEServers2:
- url: stun:stun.l.google.com:19302
authInternalUsers:
# Username. 'any' means any user, including anonymous ones.
- user: any
# Password. Not used in case of 'any' user.
pass:
# IPs or networks allowed to use this user. An empty list means any IP.
ips: []
permissions:
- action: api
- action: publish
- action: playback
- action: read

View file

@ -1,8 +0,0 @@
fastapi
uvicorn[standard]
slack-bolt
requests
python-dotenv
prisma
fastapi-utils
httpx

View file

@ -0,0 +1,18 @@
FROM docker.io/oven/bun:slim AS base
WORKDIR /usr/src/app
FROM base AS install
RUN mkdir -p /temp/dev
COPY package.json bun.lockb /temp/dev/
RUN cd /temp/dev && bun install
RUN mkdir -p /temp/prod
COPY package.json bun.lockb /temp/prod/
RUN cd /temp/prod && bun install --production
FROM base AS release
COPY --from=install /temp/dev/node_modules node_modules
COPY . .
RUN bun --bun run build
ENTRYPOINT ["bun", "--bun", "run", "preview", "--host"]

Binary file not shown.

View file

@ -51,7 +51,7 @@
pathData = newData; pathData = newData;
setTimeout(() => { setTimeout(() => {
for (const video in videos) { for (const video in videos) {
const hlsInstance = new hls({ progressive: false }); const hlsInstance = new hls({ backBufferLength: 2 });
hlsInstance.loadSource( hlsInstance.loadSource(
`http://localhost:8888/${video}/index.m3u8`, `http://localhost:8888/${video}/index.m3u8`,
); );