diff --git a/.gitignore b/.gitignore
index 4820029..60851d9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -187,4 +187,5 @@ cython_debug/
#.idea/
*.dat
-dev.db*
\ No newline at end of file
+dev.db*
+.*env
diff --git a/active-stream-proxy/Dockerfile b/active-stream-proxy/Dockerfile
new file mode 100644
index 0000000..292aec2
--- /dev/null
+++ b/active-stream-proxy/Dockerfile
@@ -0,0 +1,13 @@
+FROM python:3.12-alpine
+
+WORKDIR /usr/src/app
+
+RUN apk add --no-cache ffmpeg
+
+COPY requirements.txt ./
+
+RUN pip install --no-cache-dir -r requirements.txt
+
+COPY . .
+
+CMD [ "python", "./main.py" ]
diff --git a/active-stream-proxy/main.py b/active-stream-proxy/main.py
new file mode 100644
index 0000000..862ee5a
--- /dev/null
+++ b/active-stream-proxy/main.py
@@ -0,0 +1,29 @@
+import subprocess
+import time
+import requests
+
+time.sleep(8)
+
+active_stream = requests.get("http://backend:8000/api/v1/active_stream").text.replace('"', '')
+print(active_stream)
+
+old_active_stream = active_stream
+
+proc = None
+
+if active_stream != "":
+ proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://host.containers.internal:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"])
+else:
+ proc = subprocess.Popen(["ffmpeg", "-f", "lavfi", "-i", "anullsrc", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"])
+
+while True:
+ time.sleep(3)
+ active_stream = requests.get("http://backend:8000/api/v1/active_stream").text.replace('"', '')
+ if old_active_stream is not active_stream:
+ if proc:
+ proc.terminate()
+ if active_stream != "":
+ proc = subprocess.Popen(["ffmpeg", "-re", "-i", f"rtmp://host.containers.internal:1935/{active_stream}", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"])
+ else:
+ proc = subprocess.Popen(["ffmpeg", "-f", "lavfi", "-i", "anullsrc", "-c:a", "libmp3lame", "-f", "flv", "rtmp://host.containers.internal:1936/active-input"])
+ old_active_stream = active_stream
diff --git a/active-stream-proxy/requirements.txt b/active-stream-proxy/requirements.txt
new file mode 100644
index 0000000..f229360
--- /dev/null
+++ b/active-stream-proxy/requirements.txt
@@ -0,0 +1 @@
+requests
diff --git a/backend/Dockerfile b/backend/Dockerfile
new file mode 100644
index 0000000..07d7839
--- /dev/null
+++ b/backend/Dockerfile
@@ -0,0 +1,23 @@
+FROM python:3.12-slim
+
+EXPOSE 8000
+
+WORKDIR /usr/src/app
+
+COPY requirements.txt ./
+
+RUN apt-get update
+
+RUN apt-get install -y python3-opencv
+
+RUN pip install --no-cache-dir -r requirements.txt
+
+COPY schema.prisma .
+
+ADD migrations .
+
+RUN prisma generate
+
+COPY main.py .
+
+CMD [ "uvicorn", "main:api", "--log-level", "warning", "--workers", "4", "--host", "0.0.0.0", "--port", "8000" ]
diff --git a/backend/main.py b/backend/main.py
index 71f0030..cede227 100644
--- a/backend/main.py
+++ b/backend/main.py
@@ -1,35 +1,101 @@
+import hashlib
+import hmac
import json
import os
from contextlib import asynccontextmanager
-from random import choice
-from secrets import token_hex
+from datetime import datetime
+from secrets import choice, token_hex
from typing import Dict, List
+import cv2
import httpx
+import uvicorn
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.interval import IntervalTrigger
+from cryptography.fernet import Fernet
from dotenv import load_dotenv
-from fastapi import FastAPI, Request, Response
+from fastapi import FastAPI, HTTPException, Request, Response
from fastapi.middleware.cors import CORSMiddleware
+from fastapi.responses import HTMLResponse, RedirectResponse
from prisma import Prisma
from slack_bolt.adapter.fastapi.async_handler import AsyncSlackRequestHandler
from slack_bolt.async_app import AsyncAck, AsyncApp
+from yarl import URL
-load_dotenv()
+load_dotenv(dotenv_path="./.env")
active_stream: Dict[str, str | bool] = {}
active_streams: List[Dict[str, str | bool]] = []
scheduler = AsyncIOScheduler()
+FERNET_KEY = Fernet.generate_key()
+FERNET_KEY_USERS = []
+
+
+FERNET = Fernet(FERNET_KEY)
+
+
+async def rotate_fernet_key():
+ global FERNET_KEY
+ global FERNET
+ if FERNET_KEY_USERS == []:
+ FERNET_KEY = Fernet.generate_key()
+ FERNET = Fernet(FERNET_KEY)
+ else:
+ print("not rotating key since we have a pending verification")
+
+
+def get_recording_duration(timestamp, stream_key):
+ vid = cv2.VideoCapture(
+ f"/home/onboard/recordings/{stream_key}/{datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ').strftime('%Y-%m-%d_%H-%M-%S-%f')}.mp4"
+ )
+ return int(
+ (vid.get(cv2.CAP_PROP_FRAME_COUNT) / vid.get(cv2.CAP_PROP_FPS)) / 60
+ ) # seconds to minutes
+
+
+def verify_gh_signature(payload_body, secret_token, signature_header):
+ """Verify that the payload was sent from GitHub by validating SHA256.
+
+ Raise and return 403 if not authorized.
+
+ Args:
+ payload_body: original request body to verify (request.body())
+ secret_token: GitHub app webhook token (WEBHOOK_SECRET)
+ signature_header: header received from GitHub (x-hub-signature-256)
+ """
+ if not signature_header:
+ raise HTTPException(
+ status_code=403, detail="x-hub-signature-256 header is missing!"
+ )
+ hash_object = hmac.new(
+ secret_token.encode("utf-8"), msg=payload_body, digestmod=hashlib.sha256
+ )
+ expected_signature = "sha256=" + hash_object.hexdigest()
+ if not hmac.compare_digest(expected_signature, signature_header):
+ raise HTTPException(status_code=403, detail="Request signatures didn't match!")
+
+
+async def get_recording_list(stream_key: str) -> List[str]:
+ async with httpx.AsyncClient() as client:
+ return [
+ recording["start"]
+ for recording in (
+ await client.get(
+ f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/recordings/get/{stream_key}"
+ )
+ ).json()["segments"]
+ ]
+
async def update_active():
global active_stream
global active_streams
async with httpx.AsyncClient() as client:
- streams_raw = (await client.get("http://localhost:9997/v3/paths/list")).json()[
- "items"
- ]
+ streams_raw = (
+ await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list")
+ ).json()["items"]
streams = []
for stream in streams_raw:
streams.append({"name": stream["name"], "ready": stream["ready"]})
@@ -37,44 +103,52 @@ async def update_active():
if stream["ready"] and stream not in active_streams:
active_streams.append(stream)
if len(active_streams) == 0:
- print("No active streams")
return
if active_stream == {}:
- print("No current active stream, picking new one...")
active_stream = choice(active_streams)
return
if len(active_streams) == 1:
return
- print(
- f"starting to pick new active stream (switching away from {active_stream['name']})"
- )
new_stream = choice(active_streams)
while new_stream["name"] == active_stream["name"]:
- print(
- f"re-attemppting to pick active stream since we picked {new_stream} again"
- )
new_stream = choice(active_streams)
- print(f"found new stream to make active: {new_stream}")
- try:
- await db.connect()
- except Exception as e:
- print(e)
- print(f"trying to find user associated with stream {active_stream['name']}")
- old_active_stream_user = await db.user.find_first(where={"id": (await db.stream.find_first(where={"key": str(active_stream["name"])})).user_id}) # type: ignore
- await bolt.client.chat_postMessage(channel="C07ERCGG989", text=f"Hey <@{old_active_stream_user.slack_id}>, you're no longer in focus!") # type: ignore
+ old_active_stream_user = await db.user.find_first(
+ where={
+ "id": (
+ await db.stream.find_first(
+ where={"key": str(active_stream["name"])}
+ )
+ ).user_id # type: ignore
+ }
+ )
+ await bolt.client.chat_postMessage(
+ channel="C07ERCGG989",
+ text=f"Hey <@{old_active_stream_user.slack_id}>, you're no longer in focus!", # type: ignore
+ )
active_stream = new_stream
- active_stream_user = await db.user.find_first(where={"id": (await db.stream.find_first(where={"key": str(active_stream["name"])})).user_id}) # type: ignore
- await bolt.client.chat_postMessage(channel="C07ERCGG989", text=f"Hey <@{active_stream_user.slack_id}>, you're in focus! Make sure to tell us what you're working on!") # type: ignore
- await db.disconnect()
+ active_stream_user = await db.user.find_first(
+ where={
+ "id": (
+ await db.stream.find_first(
+ where={"key": str(active_stream["name"])}
+ )
+ ).user_id # type: ignore
+ }
+ )
+ await bolt.client.chat_postMessage(
+ channel="C07ERCGG989",
+ text=f"Hey <@{active_stream_user.slack_id}>, you're in focus! Make sure to tell us what you're working on!", # type: ignore
+ )
+ return True
async def check_for_new():
global active_stream
global active_streams
async with httpx.AsyncClient() as client:
- streams_raw = (await client.get("http://localhost:9997/v3/paths/list")).json()[
- "items"
- ]
+ streams_raw = (
+ await client.get(f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/paths/list")
+ ).json()["items"]
streams_simple = []
for stream in streams_raw:
if stream["ready"]:
@@ -94,29 +168,27 @@ async def check_for_new():
if stream not in active_streams_simple:
active_streams.append({"name": stream, "ready": True})
if len(active_streams) == 0:
- print("No active streams")
active_stream = {}
@asynccontextmanager
-async def lifespan(app: FastAPI):
- await update_active()
- scheduler.start()
- scheduler.add_job(update_active, IntervalTrigger(seconds=5 * 60))
- scheduler.add_job(check_for_new, IntervalTrigger(seconds=3))
- try:
- await db.connect()
- except Exception:
- pass
+async def lifespan():
+ await db.connect()
async with httpx.AsyncClient() as client:
for stream in await db.stream.find_many():
await client.post(
- "http://127.0.0.1:9997/v3/config/paths/add/" + stream.key,
+ f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/"
+ + stream.key,
json={"name": stream.key},
)
- await db.disconnect()
+ scheduler.start()
+ scheduler.add_job(update_active, IntervalTrigger(minutes=5))
+ scheduler.add_job(check_for_new, IntervalTrigger(seconds=3))
+ scheduler.add_job(rotate_fernet_key, IntervalTrigger(minutes=30))
+ await rotate_fernet_key()
yield
scheduler.shutdown()
+ await db.disconnect()
api = FastAPI(lifespan=lifespan) # type: ignore
@@ -137,22 +209,215 @@ bolt = AsyncApp(
bolt_handler = AsyncSlackRequestHandler(bolt)
+@api.get("/auth/github/login")
+async def github_redirect(request: Request):
+ return RedirectResponse(
+ str(
+ URL.build(
+ scheme="https",
+ host="github.com",
+ path="/login/oauth/authorize",
+ query={
+ "client_id": os.environ["GH_CLIENT_ID"],
+ "redirect_uri": "https://live.onboard.hackclub.com/auth/github/callback",
+ "scopes": "read:user",
+ "state": request.query_params["state"],
+ },
+ )
+ )
+ )
+
+
+@api.get("/auth/github/callback")
+async def github_callback(request: Request):
+ code: str = request.query_params["code"]
+ state: str = request.query_params["state"]
+ user_id, pr_id = FERNET.decrypt(bytes.fromhex(state)).decode().split("+")
+ if user_id in FERNET_KEY_USERS:
+ FERNET_KEY_USERS.remove(user_id)
+ db_user = await db.user.find_first_or_raise(where={"slack_id": user_id})
+ user_stream_key = (
+ await db.stream.find_first_or_raise(where={"user_id": db_user.id})
+ ).key
+ db_pr = await db.pullrequest.find_first_or_raise(where={"github_id": int(pr_id)})
+ async with httpx.AsyncClient() as client:
+ token = (
+ await client.post(
+ "https://github.com/login/oauth/access_token",
+ json={
+ "client_id": os.environ["GH_CLIENT_ID"],
+ "client_secret": os.environ["GH_CLIENT_SECRET"],
+ "code": code,
+ "redirect_uri": "https://live.onboard.hackclub.com/auth/github/callback",
+ },
+ headers={"Accept": "application/json"},
+ )
+ ).json()["access_token"]
+
+ gh_user: int = (
+ await client.get(
+ "https://api.github.com/user",
+ headers={
+ "Accept": "application/vnd.github.v3+json",
+ "Authorization": f"Bearer {token}",
+ },
+ )
+ ).json()["id"]
+ if gh_user == db_pr.gh_user_id:
+ await db.pullrequest.update(
+ {"user": {"connect": {"id": db_user.id}}, "gh_user_id": gh_user},
+ {"id": db_pr.id},
+ )
+ stream_recs = await get_recording_list(user_stream_key)
+ if stream_recs == []:
+ return HTMLResponse(
+ "
You don't have any sessions to submit! Please DM @mra on Slack if you think this is a mistake.
"
+ )
+ await bolt.client.chat_postMessage(
+ channel=user_id,
+ text="Select your OnBoard Live sessions!",
+ blocks=[
+ {
+ "type": "header",
+ "text": {
+ "type": "plain_text",
+ "text": "Select your sessions for review!\nCopy and paste the lines of sessions that you want associated with this PR into the box!",
+ "emoji": True,
+ },
+ },
+ {
+ "block_id": "session-checks",
+ "type": "section",
+ "text": {
+ "type": "mrkdwn",
+ "text": f"Here are all your sessions. Select the ones associated with OnBoard pull request #{pr_id}:",
+ },
+ },
+ {
+ "type": "section",
+ "text": {
+ "type": "mrkdwn",
+ "text": "\n".join(
+ [
+ recording
+ + " for "
+ + str(
+ get_recording_duration(
+ recording, user_stream_key
+ )
+ )
+ + "minutes"
+ for recording in stream_recs
+ ]
+ ), # type: ignore
+ },
+ },
+ {
+ "type": "input",
+ "block_id": "session-input",
+ "element": {
+ "type": "plain_text_input",
+ "multiline": True,
+ "action_id": "plain_text_input-action",
+ },
+ "label": {
+ "type": "plain_text",
+ "text": "Paste the lines here (DO NOT EDIT THEM, ONE ON EACH LINE)",
+ "emoji": False,
+ },
+ },
+ # "block_id": "session-checks",
+ # "type": "section",
+ # "text": {
+ # "type": "mrkdwn",
+ # "text": f"Here are all your sessions. Select the ones associated with OnBoard pull request #{pr_id}:",
+ # },
+ # "accessory": {
+ # "type": "checkboxes",
+ # "options": [
+ # json.loads(
+ # """{{"text": {{ "type": "mrkdwn", "text": "Your session on {pretty_time}"}}, "description": {{"type": "mrkdwn", "text": "You streamed for {length} {minute_or_minutes}"}}, "value": "checkbox-{filename}"}}""".format(
+ # pretty_time=recording,
+ # length=get_recording_duration(
+ # recording, user_stream_key
+ # ),
+ # minute_or_minutes=(
+ # "minute"
+ # if get_recording_duration(
+ # recording, user_stream_key
+ # )
+ # == 1
+ # else "minutes"
+ # ),
+ # filename=recording,
+ # )
+ # )
+ # for recording in stream_recs
+ # ],
+ # "action_id": "checkboxes",
+ # },
+ # },
+ {
+ "type": "actions",
+ "elements": [
+ {
+ "type": "button",
+ "text": {
+ "type": "plain_text",
+ "emoji": True,
+ "text": "Submit",
+ },
+ "style": "primary",
+ "value": "submit_sessions",
+ "action_id": "submit_sessions",
+ },
+ ],
+ },
+ ],
+ )
+ return HTMLResponse(
+ "Success! Your PR has been linked to your Slack account. Check your Slack DMs for the next steps!
"
+ )
+ return HTMLResponse(
+ "Looks like something went wrong! DM @mra on slack.
",
+ status_code=500,
+ )
+
+
+@api.post("/api/v1/github/pr_event")
+async def pr_event(request: Request):
+ verify_gh_signature(
+ await request.body(),
+ os.environ["GH_HOOK_SECRET"],
+ request.headers.get("x-hub-signature-256"),
+ )
+ body = json.loads(await request.body())
+ if body["action"] == "labeled":
+ if body["label"]["id"] == 7336079497:
+ await db.pullrequest.create(
+ {
+ "github_id": body["pull_request"]["number"],
+ "gh_user_id": body["pull_request"]["user"]["id"],
+ }
+ )
+ return
+
+
@api.get("/api/v1/stream_key/{stream_key}")
async def get_stream_by_key(stream_key: str):
- await db.connect()
stream = await db.stream.find_first(where={"key": stream_key})
- await db.disconnect()
return (
stream if stream else Response(status_code=404, content="404: Stream not found")
)
+
@api.get("/api/v1/active_stream")
async def get_active_stream():
return active_stream["name"] if "name" in active_stream else ""
@bolt.event("app_home_opened")
-async def handle_app_home_opened_events(body, logger, event, client):
+async def handle_app_home_opened_events(event, client):
await client.views_publish(
user_id=event["user"],
# the view object that appears in the app home
@@ -173,20 +438,49 @@ async def handle_app_home_opened_events(body, logger, event, client):
)
+@bolt.action("submit_sessions")
+async def submit_sessions(ack: AsyncAck, body):
+ await ack()
+ selected_sessions_ts: List[str] = []
+ print(body["state"]["values"])
+ for session in body["state"]["values"]["session-input"]["plain_text_input-action"][
+ "value"
+ ].split("\n"):
+ selected_sessions_ts.append(session.split(" for ")[0])
+
+ pr_id = int(
+ body["message"]["blocks"][1]["text"]["text"].split("#")[1].split(":")[0]
+ ) # don't tell my mom she raised a monster
+ db_pr = await db.pullrequest.find_first_or_raise(where={"github_id": pr_id})
+ if db_pr.user_id:
+ stream_key = (
+ await db.stream.find_first_or_raise(where={"user_id": db_pr.user_id})
+ ).key
+ for session in selected_sessions_ts:
+ await db.session.create(
+ {
+ "pull": {"connect": {"id": db_pr.id}},
+ "timestamp": session,
+ "filename": f"/home/onboard/recordings/{stream_key}/{datetime.strptime(session, '%Y-%m-%dT%H:%M:%S.%fZ').strftime('%Y-%m-%d_%H-%M-%S-%f')}.mp4",
+ "duration": get_recording_duration(session, stream_key),
+ }
+ )
+ await bolt.client.chat_delete(
+ channel=body["container"]["channel_id"], ts=body["message"]["ts"]
+ )
+ print(pr_id, selected_sessions_ts)
+
+
@bolt.action("deny")
async def deny(ack, body):
await ack()
message = body["message"]
applicant_slack_id = message["blocks"][len(message) - 3]["text"]["text"].split(
": "
- )[
- 1
- ] # I hate it. You hate it. We all hate it. Carry on.
+ )[1] # I hate it. You hate it. We all hate it. Carry on.
applicant_name = message["blocks"][len(message) - 7]["text"]["text"].split(
"Name: "
- )[
- 1
- ] # oops i did it again
+ )[1] # oops i did it again
await bolt.client.chat_delete(
channel=body["container"]["channel_id"], ts=message["ts"]
)
@@ -199,21 +493,13 @@ async def deny(ack, body):
@bolt.action("approve")
async def approve(ack, body):
await ack()
- try:
- await db.connect()
- except Exception:
- pass
message = body["message"]
applicant_slack_id = message["blocks"][len(message) - 3]["text"]["text"].split(
": "
- )[
- 1
- ] # I hate it. You hate it. We all hate it. Carry on.
+ )[1] # I hate it. You hate it. We all hate it. Carry on.
applicant_name = message["blocks"][len(message) - 7]["text"]["text"].split(
"Name: "
- )[
- 1
- ] # oops i did it again
+ )[1] # oops i did it again
await bolt.client.chat_delete(
channel=body["container"]["channel_id"], ts=message["ts"]
)
@@ -234,14 +520,14 @@ async def approve(ack, body):
)
async with httpx.AsyncClient() as client:
await client.post(
- "http://127.0.0.1:9997/v3/config/paths/add/" + new_stream.key,
+ f"http://{os.environ['MEDIAMTX_IP']}:9997/v3/config/paths/add/"
+ + new_stream.key,
json={"name": new_stream.key},
)
await bolt.client.chat_postMessage(
channel=sumbitter_convo["channel"]["id"],
text=f"Welcome to OnBoard Live! Your stream key is {new_stream.key}. To use your stream key the easy way, go to . You can also use it in OBS with the server URL of rtmp://live.onboard.hackclub.com:1935",
)
- await db.disconnect()
@bolt.view("apply")
@@ -266,9 +552,6 @@ async def handle_application_submission(ack, body):
channel=sumbitter_convo["channel"]["id"],
text=f"Your application has been submitted! We will review it shortly. Please do not send another application - If you haven't heard back in over 48 hours, or you forgot something in your application, please message <@{os.environ['ADMIN_SLACK_ID']}>! Here's a copy of your responses for your reference:\nSome info on your project(s): {body['view']['state']['values']['project-info']['project-info-body']['value']}\n{f'Please fill out ! We can only approve your application once this is done.' if not user_verified else ''}",
)
- admin_convo = await bolt.client.conversations_open(
- users=os.environ["ADMIN_SLACK_ID"], return_im=True
- )
will_behave = True
# boxes = body["view"]["state"]["values"]["kAgeY"]["checkboxes"]["selected_options"]
# if len(boxes) == 1 and boxes[0]["value"] == "value-1":
@@ -355,6 +638,29 @@ async def handle_application_submission(ack, body):
)
+@bolt.command("/onboard-live-submit")
+async def submit(ack: AsyncAck, command):
+ await ack()
+ user_id = command["user_id"]
+ channel_id = command["channel_id"]
+ text = command["text"]
+ db_pr = await db.pullrequest.find_first(where={"github_id": int(text)})
+ if db_pr is None:
+ await bolt.client.chat_postEphemeral(
+ channel=channel_id,
+ user=user_id,
+ text="There doesn't seem to be a PR open with that ID! If this seems like a mistake, please message <@U05C64XMMHV> about it!",
+ )
+ return
+ if user_id not in FERNET_KEY_USERS:
+ FERNET_KEY_USERS.append(user_id)
+ await bolt.client.chat_postEphemeral(
+ channel=channel_id,
+ user=user_id,
+ text=f"Please to authenticate with GitHub. This helps us verify that this is your PR!",
+ )
+
+
@bolt.command("/onboard-live-apply")
async def apply(ack: AsyncAck, command):
await ack()
@@ -536,10 +842,22 @@ async def apply(ack: AsyncAck, command):
@bolt.action("checkboxes")
-async def handle_some_action(ack):
+async def checkboxes(ack):
+ """
+ AFAICT there needs to be *an* action for the checkboxes, but I process their data elsewhere (on submit)
+ To avoid warnings in Slack, I'm just ACKing it here and doing nothing :)
+ """
await ack()
@api.post("/slack/events")
async def slack_event_endpoint(req: Request):
return await bolt_handler.handle(req)
+
+
+def main():
+ uvicorn.run(api)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/backend/migrations/20240815175119_pls_work/migration.sql b/backend/migrations/20240815175119_pls_work/migration.sql
new file mode 100644
index 0000000..0d0ad12
--- /dev/null
+++ b/backend/migrations/20240815175119_pls_work/migration.sql
@@ -0,0 +1,41 @@
+/*
+ Warnings:
+
+ - You are about to drop the column `active` on the `Stream` table. All the data in the column will be lost.
+ - You are about to drop the column `focused` on the `Stream` table. All the data in the column will be lost.
+ - The primary key for the `User` table will be changed. If it partially fails, the table could be left without primary key constraint.
+ - You are about to drop the column `slackId` on the `User` table. All the data in the column will be lost.
+ - Added the required column `user_id` to the `Stream` table without a default value. This is not possible if the table is not empty.
+ - The required column `id` was added to the `User` table with a prisma-level default value. This is not possible if the table is not empty. Please add this column as optional, then populate it before making it required.
+ - Added the required column `slack_id` to the `User` table without a default value. This is not possible if the table is not empty.
+
+*/
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_Stream" (
+ "id" TEXT NOT NULL PRIMARY KEY,
+ "created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "is_live" BOOLEAN NOT NULL DEFAULT false,
+ "is_focused" BOOLEAN NOT NULL DEFAULT false,
+ "key" TEXT NOT NULL,
+ "user_id" TEXT NOT NULL,
+ CONSTRAINT "Stream_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
+);
+INSERT INTO "new_Stream" ("id", "key") SELECT "id", "key" FROM "Stream";
+DROP TABLE "Stream";
+ALTER TABLE "new_Stream" RENAME TO "Stream";
+CREATE UNIQUE INDEX "Stream_key_key" ON "Stream"("key");
+CREATE UNIQUE INDEX "Stream_user_id_key" ON "Stream"("user_id");
+CREATE TABLE "new_User" (
+ "id" TEXT NOT NULL PRIMARY KEY,
+ "created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "slack_id" TEXT NOT NULL,
+ "name" TEXT NOT NULL
+);
+INSERT INTO "new_User" ("name") SELECT "name" FROM "User";
+DROP TABLE "User";
+ALTER TABLE "new_User" RENAME TO "User";
+CREATE UNIQUE INDEX "User_slack_id_key" ON "User"("slack_id");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240816163601_add_pr_model/migration.sql b/backend/migrations/20240816163601_add_pr_model/migration.sql
new file mode 100644
index 0000000..28b85a1
--- /dev/null
+++ b/backend/migrations/20240816163601_add_pr_model/migration.sql
@@ -0,0 +1,10 @@
+-- CreateTable
+CREATE TABLE "PullRequest" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "userId" TEXT NOT NULL,
+ "token" TEXT NOT NULL,
+ CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
+);
+
+-- CreateIndex
+CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");
diff --git a/backend/migrations/20240816164220_make_pr_model_optional/migration.sql b/backend/migrations/20240816164220_make_pr_model_optional/migration.sql
new file mode 100644
index 0000000..e1751ed
--- /dev/null
+++ b/backend/migrations/20240816164220_make_pr_model_optional/migration.sql
@@ -0,0 +1,15 @@
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_PullRequest" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "userId" TEXT,
+ "token" TEXT NOT NULL,
+ CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
+);
+INSERT INTO "new_PullRequest" ("id", "token", "userId") SELECT "id", "token", "userId" FROM "PullRequest";
+DROP TABLE "PullRequest";
+ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
+CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240816164920_add_pr_number_field/migration.sql b/backend/migrations/20240816164920_add_pr_number_field/migration.sql
new file mode 100644
index 0000000..de12636
--- /dev/null
+++ b/backend/migrations/20240816164920_add_pr_number_field/migration.sql
@@ -0,0 +1,23 @@
+/*
+ Warnings:
+
+ - Added the required column `github_id` to the `PullRequest` table without a default value. This is not possible if the table is not empty.
+
+*/
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_PullRequest" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "github_id" INTEGER NOT NULL,
+ "userId" TEXT,
+ "token" TEXT NOT NULL,
+ CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
+);
+INSERT INTO "new_PullRequest" ("id", "token", "userId") SELECT "id", "token", "userId" FROM "PullRequest";
+DROP TABLE "PullRequest";
+ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
+CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
+CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240817173150_add_sec_token/migration.sql b/backend/migrations/20240817173150_add_sec_token/migration.sql
new file mode 100644
index 0000000..f9cdd01
--- /dev/null
+++ b/backend/migrations/20240817173150_add_sec_token/migration.sql
@@ -0,0 +1,25 @@
+/*
+ Warnings:
+
+ - The required column `secondary_token` was added to the `PullRequest` table with a prisma-level default value. This is not possible if the table is not empty. Please add this column as optional, then populate it before making it required.
+
+*/
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_PullRequest" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "github_id" INTEGER NOT NULL,
+ "userId" TEXT,
+ "token" TEXT NOT NULL,
+ "secondary_token" TEXT NOT NULL,
+ CONSTRAINT "PullRequest_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
+);
+INSERT INTO "new_PullRequest" ("github_id", "id", "token", "userId") SELECT "github_id", "id", "token", "userId" FROM "PullRequest";
+DROP TABLE "PullRequest";
+ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
+CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
+CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");
+CREATE UNIQUE INDEX "PullRequest_secondary_token_key" ON "PullRequest"("secondary_token");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql b/backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql
new file mode 100644
index 0000000..b58ceeb
--- /dev/null
+++ b/backend/migrations/20240817193412_refactor_pull_user_relation/migration.sql
@@ -0,0 +1,39 @@
+/*
+ Warnings:
+
+ - You are about to drop the column `userId` on the `PullRequest` table. All the data in the column will be lost.
+
+*/
+-- CreateTable
+CREATE TABLE "_PullRequestToPossibleUser" (
+ "A" INTEGER NOT NULL,
+ "B" TEXT NOT NULL,
+ CONSTRAINT "_PullRequestToPossibleUser_A_fkey" FOREIGN KEY ("A") REFERENCES "PullRequest" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
+ CONSTRAINT "_PullRequestToPossibleUser_B_fkey" FOREIGN KEY ("B") REFERENCES "User" ("id") ON DELETE CASCADE ON UPDATE CASCADE
+);
+
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_PullRequest" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "github_id" INTEGER NOT NULL,
+ "known_user_id" TEXT,
+ "token" TEXT NOT NULL,
+ "secondary_token" TEXT NOT NULL,
+ CONSTRAINT "PullRequest_known_user_id_fkey" FOREIGN KEY ("known_user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
+);
+INSERT INTO "new_PullRequest" ("github_id", "id", "secondary_token", "token") SELECT "github_id", "id", "secondary_token", "token" FROM "PullRequest";
+DROP TABLE "PullRequest";
+ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
+CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
+CREATE UNIQUE INDEX "PullRequest_token_key" ON "PullRequest"("token");
+CREATE UNIQUE INDEX "PullRequest_secondary_token_key" ON "PullRequest"("secondary_token");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
+
+-- CreateIndex
+CREATE UNIQUE INDEX "_PullRequestToPossibleUser_AB_unique" ON "_PullRequestToPossibleUser"("A", "B");
+
+-- CreateIndex
+CREATE INDEX "_PullRequestToPossibleUser_B_index" ON "_PullRequestToPossibleUser"("B");
diff --git a/backend/migrations/20240821135629_prep_for_oauth/migration.sql b/backend/migrations/20240821135629_prep_for_oauth/migration.sql
new file mode 100644
index 0000000..4b1ef4e
--- /dev/null
+++ b/backend/migrations/20240821135629_prep_for_oauth/migration.sql
@@ -0,0 +1,10 @@
+/*
+ Warnings:
+
+ - You are about to drop the `_PullRequestToPossibleUser` table. If the table is not empty, all the data it contains will be lost.
+
+*/
+-- DropTable
+PRAGMA foreign_keys=off;
+DROP TABLE "_PullRequestToPossibleUser";
+PRAGMA foreign_keys=on;
diff --git a/backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql b/backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql
new file mode 100644
index 0000000..09deab9
--- /dev/null
+++ b/backend/migrations/20240823141450_refactor_schema_for_oauth/migration.sql
@@ -0,0 +1,26 @@
+/*
+ Warnings:
+
+ - You are about to drop the column `known_user_id` on the `PullRequest` table. All the data in the column will be lost.
+ - You are about to drop the column `secondary_token` on the `PullRequest` table. All the data in the column will be lost.
+ - You are about to drop the column `token` on the `PullRequest` table. All the data in the column will be lost.
+
+*/
+-- AlterTable
+ALTER TABLE "User" ADD COLUMN "github_user_id" TEXT;
+
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_PullRequest" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "github_id" INTEGER NOT NULL,
+ "user_id" TEXT,
+ CONSTRAINT "PullRequest_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
+);
+INSERT INTO "new_PullRequest" ("github_id", "id") SELECT "github_id", "id" FROM "PullRequest";
+DROP TABLE "PullRequest";
+ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
+CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240823141852_add_gh_token_field/migration.sql b/backend/migrations/20240823141852_add_gh_token_field/migration.sql
new file mode 100644
index 0000000..994913c
--- /dev/null
+++ b/backend/migrations/20240823141852_add_gh_token_field/migration.sql
@@ -0,0 +1,2 @@
+-- AlterTable
+ALTER TABLE "User" ADD COLUMN "github_token" TEXT;
diff --git a/backend/migrations/20240823145449_more_pr_schema_work/migration.sql b/backend/migrations/20240823145449_more_pr_schema_work/migration.sql
new file mode 100644
index 0000000..cce7526
--- /dev/null
+++ b/backend/migrations/20240823145449_more_pr_schema_work/migration.sql
@@ -0,0 +1,25 @@
+/*
+ Warnings:
+
+ - You are about to drop the column `github_token` on the `User` table. All the data in the column will be lost.
+
+*/
+-- AlterTable
+ALTER TABLE "PullRequest" ADD COLUMN "gh_user_id" TEXT;
+
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_User" (
+ "id" TEXT NOT NULL PRIMARY KEY,
+ "created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "slack_id" TEXT NOT NULL,
+ "name" TEXT NOT NULL,
+ "github_user_id" TEXT
+);
+INSERT INTO "new_User" ("created_at", "github_user_id", "id", "name", "slack_id") SELECT "created_at", "github_user_id", "id", "name", "slack_id" FROM "User";
+DROP TABLE "User";
+ALTER TABLE "new_User" RENAME TO "User";
+CREATE UNIQUE INDEX "User_slack_id_key" ON "User"("slack_id");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240823151408_remove_duplicated_field/migration.sql b/backend/migrations/20240823151408_remove_duplicated_field/migration.sql
new file mode 100644
index 0000000..815c31e
--- /dev/null
+++ b/backend/migrations/20240823151408_remove_duplicated_field/migration.sql
@@ -0,0 +1,21 @@
+/*
+ Warnings:
+
+ - You are about to drop the column `github_user_id` on the `User` table. All the data in the column will be lost.
+
+*/
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_User" (
+ "id" TEXT NOT NULL PRIMARY KEY,
+ "created_at" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "slack_id" TEXT NOT NULL,
+ "name" TEXT NOT NULL
+);
+INSERT INTO "new_User" ("created_at", "id", "name", "slack_id") SELECT "created_at", "id", "name", "slack_id" FROM "User";
+DROP TABLE "User";
+ALTER TABLE "new_User" RENAME TO "User";
+CREATE UNIQUE INDEX "User_slack_id_key" ON "User"("slack_id");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240823151722_make_gh_user_id_required/migration.sql b/backend/migrations/20240823151722_make_gh_user_id_required/migration.sql
new file mode 100644
index 0000000..57b2450
--- /dev/null
+++ b/backend/migrations/20240823151722_make_gh_user_id_required/migration.sql
@@ -0,0 +1,22 @@
+/*
+ Warnings:
+
+ - Made the column `gh_user_id` on table `PullRequest` required. This step will fail if there are existing NULL values in that column.
+
+*/
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_PullRequest" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "github_id" INTEGER NOT NULL,
+ "user_id" TEXT,
+ "gh_user_id" TEXT NOT NULL,
+ CONSTRAINT "PullRequest_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
+);
+INSERT INTO "new_PullRequest" ("gh_user_id", "github_id", "id", "user_id") SELECT "gh_user_id", "github_id", "id", "user_id" FROM "PullRequest";
+DROP TABLE "PullRequest";
+ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
+CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql b/backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql
new file mode 100644
index 0000000..ccfb293
--- /dev/null
+++ b/backend/migrations/20240823152458_make_gh_user_id_an_int/migration.sql
@@ -0,0 +1,22 @@
+/*
+ Warnings:
+
+ - You are about to alter the column `gh_user_id` on the `PullRequest` table. The data in that column could be lost. The data in that column will be cast from `String` to `Int`.
+
+*/
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_PullRequest" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "github_id" INTEGER NOT NULL,
+ "user_id" TEXT,
+ "gh_user_id" INTEGER NOT NULL,
+ CONSTRAINT "PullRequest_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE
+);
+INSERT INTO "new_PullRequest" ("gh_user_id", "github_id", "id", "user_id") SELECT "gh_user_id", "github_id", "id", "user_id" FROM "PullRequest";
+DROP TABLE "PullRequest";
+ALTER TABLE "new_PullRequest" RENAME TO "PullRequest";
+CREATE UNIQUE INDEX "PullRequest_github_id_key" ON "PullRequest"("github_id");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240831153915_add_session_model/migration.sql b/backend/migrations/20240831153915_add_session_model/migration.sql
new file mode 100644
index 0000000..8fece81
--- /dev/null
+++ b/backend/migrations/20240831153915_add_session_model/migration.sql
@@ -0,0 +1,14 @@
+-- CreateTable
+CREATE TABLE "Session" (
+ "id" TEXT NOT NULL PRIMARY KEY,
+ "user_id" TEXT NOT NULL,
+ "timestamp" TEXT NOT NULL,
+ "filename" TEXT NOT NULL,
+ "duration" INTEGER NOT NULL,
+ "reviewed" BOOLEAN NOT NULL DEFAULT false,
+ "approved" BOOLEAN NOT NULL DEFAULT false,
+ CONSTRAINT "Session_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "User" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
+);
+
+-- CreateIndex
+CREATE UNIQUE INDEX "Session_user_id_key" ON "Session"("user_id");
diff --git a/backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql b/backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql
new file mode 100644
index 0000000..64e8757
--- /dev/null
+++ b/backend/migrations/20240831155249_relate_session_model_to_pr_not_user/migration.sql
@@ -0,0 +1,26 @@
+/*
+ Warnings:
+
+ - You are about to drop the column `user_id` on the `Session` table. All the data in the column will be lost.
+ - Added the required column `pr_id` to the `Session` table without a default value. This is not possible if the table is not empty.
+
+*/
+-- RedefineTables
+PRAGMA defer_foreign_keys=ON;
+PRAGMA foreign_keys=OFF;
+CREATE TABLE "new_Session" (
+ "id" TEXT NOT NULL PRIMARY KEY,
+ "pr_id" INTEGER NOT NULL,
+ "timestamp" TEXT NOT NULL,
+ "filename" TEXT NOT NULL,
+ "duration" INTEGER NOT NULL,
+ "reviewed" BOOLEAN NOT NULL DEFAULT false,
+ "approved" BOOLEAN NOT NULL DEFAULT false,
+ CONSTRAINT "Session_pr_id_fkey" FOREIGN KEY ("pr_id") REFERENCES "PullRequest" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
+);
+INSERT INTO "new_Session" ("approved", "duration", "filename", "id", "reviewed", "timestamp") SELECT "approved", "duration", "filename", "id", "reviewed", "timestamp" FROM "Session";
+DROP TABLE "Session";
+ALTER TABLE "new_Session" RENAME TO "Session";
+CREATE UNIQUE INDEX "Session_pr_id_key" ON "Session"("pr_id");
+PRAGMA foreign_keys=ON;
+PRAGMA defer_foreign_keys=OFF;
diff --git a/backend/migrations/20240831222739_remove_unique_constraint/migration.sql b/backend/migrations/20240831222739_remove_unique_constraint/migration.sql
new file mode 100644
index 0000000..cf824ee
--- /dev/null
+++ b/backend/migrations/20240831222739_remove_unique_constraint/migration.sql
@@ -0,0 +1,2 @@
+-- DropIndex
+DROP INDEX "Session_pr_id_key";
diff --git a/backend/migrations/20240831222937_add_different_unique_constraint/migration.sql b/backend/migrations/20240831222937_add_different_unique_constraint/migration.sql
new file mode 100644
index 0000000..25d4f10
--- /dev/null
+++ b/backend/migrations/20240831222937_add_different_unique_constraint/migration.sql
@@ -0,0 +1,8 @@
+/*
+ Warnings:
+
+ - A unique constraint covering the columns `[filename]` on the table `Session` will be added. If there are existing duplicate values, this will fail.
+
+*/
+-- CreateIndex
+CREATE UNIQUE INDEX "Session_filename_key" ON "Session"("filename");
diff --git a/backend/requirements.txt b/backend/requirements.txt
new file mode 100644
index 0000000..47b78db
--- /dev/null
+++ b/backend/requirements.txt
@@ -0,0 +1,79 @@
+aiofiles==24.1.0
+aiohttp==3.9.5
+aiosignal==1.3.1
+annotated-types==0.7.0
+anyio==4.4.0
+APScheduler==3.10.4
+attrs==23.2.0
+black==24.4.2
+build==1.2.1
+certifi==2024.7.4
+cffi==1.17.0
+charset-normalizer==3.3.2
+click==8.1.7
+cryptography==43.0.0
+defusedxml==0.8.0rc2
+dnspython==2.6.1
+ecdsa==0.19.0
+email_validator==2.2.0
+fastapi==0.112.0
+fastapi-cli==0.0.4
+fastapi-oauth2==1.0.0
+fastapi-utils==0.7.0
+frozenlist==1.4.1
+h11==0.14.0
+httpcore==1.0.5
+httptools==0.6.1
+httpx==0.27.0
+idna==3.7
+Jinja2==3.1.4
+markdown-it-py==3.0.0
+MarkupSafe==2.1.5
+mdurl==0.1.2
+multidict==6.0.5
+mypy==1.11.0
+mypy-extensions==1.0.0
+nodeenv==1.9.1
+numpy==2.1.0
+oauthlib==3.2.2
+opencv-python==4.10.0.84
+packaging==24.1
+pathspec==0.12.1
+platformdirs==4.2.2
+prisma==0.14.0
+psutil==5.9.8
+pyasn1==0.6.0
+pycparser==2.22
+pydantic==2.8.2
+pydantic_core==2.20.1
+Pygments==2.18.0
+PyJWT==2.9.0
+pyproject_hooks==1.1.0
+python-dotenv==1.0.1
+python-jose==3.3.0
+python-multipart==0.0.9
+python3-openid==3.2.0
+pytz==2024.1
+PyYAML==6.0.1
+requests==2.32.3
+requests-oauthlib==2.0.0
+rich==13.7.1
+rsa==4.9
+shellingham==1.5.4
+six==1.16.0
+slack_bolt==1.20.0
+slack_sdk==3.31.0
+sniffio==1.3.1
+social-auth-core==4.5.4
+starlette==0.37.2
+tomlkit==0.13.0
+typer==0.12.3
+typing-inspect==0.9.0
+typing_extensions==4.12.2
+tzlocal==5.2
+urllib3==2.2.2
+uvicorn==0.30.6
+uvloop==0.19.0
+watchfiles==0.22.0
+websockets==12.0
+yarl==1.9.4
diff --git a/backend/schema.prisma b/backend/schema.prisma
index a4ff3c8..dca2c2e 100644
--- a/backend/schema.prisma
+++ b/backend/schema.prisma
@@ -1,20 +1,21 @@
generator client {
provider = "prisma-client-py"
+ recursive_type_depth = "5"
interface = "asyncio"
- recursive_type_depth = 5
}
datasource db {
provider = "sqlite"
- url = "file:./dev.db"
+ url = "file:./db/dev.db"
}
model User {
- id String @id @default(cuid())
- created_at DateTime @default(now())
- slack_id String @unique
- name String
- stream Stream?
+ id String @id @default(cuid())
+ created_at DateTime @default(now())
+ slack_id String @unique
+ name String
+ pull_requests PullRequest[] @relation("PullRequestToUser")
+ stream Stream?
}
model Stream {
@@ -23,6 +24,26 @@ model Stream {
is_live Boolean @default(false)
is_focused Boolean @default(false)
key String @unique @default(uuid())
- user User @relation(fields: [user_id], references: [id])
user_id String @unique
+ user User @relation(fields: [user_id], references: [id])
+}
+
+model PullRequest {
+ id Int @id @default(autoincrement())
+ github_id Int @unique
+ user_id String?
+ gh_user_id Int
+ user User? @relation("PullRequestToUser", fields: [user_id], references: [id])
+ sessions Session[]
+}
+
+model Session {
+ id String @id @default(cuid())
+ pr_id Int
+ pull PullRequest @relation(fields: [pr_id], references: [id])
+ timestamp String
+ filename String @unique
+ duration Int // in minutes
+ reviewed Boolean @default(false)
+ approved Boolean @default(false)
}
diff --git a/caddy/Caddyfile b/caddy/Caddyfile
new file mode 100644
index 0000000..127ced7
--- /dev/null
+++ b/caddy/Caddyfile
@@ -0,0 +1,12 @@
+live.onboard.hackclub.com {
+ reverse_proxy host.containers.internal:8889
+ handle /slack/* {
+ reverse_proxy host.containers.internal:8000
+ }
+ handle /api/v1/github/* {
+ reverse_proxy host.containers.internal:8000
+ }
+ handle /auth/* {
+ reverse_proxy host.containers.internal:8000
+ }
+}
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..158a698
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,71 @@
+services:
+ mediamtx:
+ restart: unless-stopped
+ network_mode: "host"
+ build:
+ context: ./mediamtx
+ dockerfile: Dockerfile
+ volumes:
+ - mediamtx_recordings:/recordings
+ # ports:
+ # - "8889:8889"
+ # - "1935:1935"
+ # - "9997:9997"
+ web-frontend:
+ ports:
+ - "4173:4173"
+ build:
+ context: ./tiling-frontend
+ dockerfile: Dockerfile
+ volumes:
+ - tiling_frontend_build:/usr/src/app/dist
+ live-stream:
+ deploy:
+ resources:
+ limits:
+ memory: 8192M
+ reservations:
+ memory: 8192M
+ network_mode: "host"
+ shm_size: '8gb'
+ restart: unless-stopped
+ env_file: .stream.env
+ depends_on:
+ backend:
+ condition: service_started
+ web-frontend:
+ condition: service_started
+ build:
+ context: ./live-stream
+ dockerfile: Dockerfile
+ volumes:
+ - tiling_frontend_build:/html
+ backend:
+ network_mode: "host"
+ restart: unless-stopped
+ env_file: .backend.env
+ build:
+ context: ./backend
+ dockerfile: Dockerfile
+ volumes:
+ - ./backend/db:/usr/src/app/db
+ mediamtx:
+ condition: service_started
+ caddy:
+ image: docker.io/caddy:alpine
+ restart: unless-stopped
+ cap_add:
+ - NET_ADMIN
+ ports:
+ - "80:80"
+ - "443:443"
+ - "443:443/udp"
+ volumes:
+ - $PWD/caddy/Caddyfile:/etc/caddy/Caddyfile
+ - caddy_data:/data
+ - caddy_config:/config=
+volumes:
+ mediamtx_recordings:
+ tiling_frontend_build:
+ caddy_data:
+ caddy_config:
diff --git a/live-stream/Dockerfile b/live-stream/Dockerfile
new file mode 100644
index 0000000..ba4c3b8
--- /dev/null
+++ b/live-stream/Dockerfile
@@ -0,0 +1,25 @@
+FROM ubuntu:latest
+
+RUN apt update
+
+RUN apt install -y ffmpeg xvfb software-properties-common dbus-x11 pulseaudio
+
+RUN add-apt-repository -y ppa:xtradeb/apps
+
+RUN apt update
+
+RUN apt install -y chromium
+
+RUN apt install -y sudo
+
+RUN rm -rf /var/lib/apt/lists/*
+
+RUN apt clean
+
+RUN useradd -ms /bin/bash stream
+
+COPY run.sh ./
+
+COPY user_run.sh /home/stream
+
+ENTRYPOINT ["./run.sh"]
diff --git a/live-stream/run.sh b/live-stream/run.sh
new file mode 100755
index 0000000..bac6bb7
--- /dev/null
+++ b/live-stream/run.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+dbus-daemon --config-file=/usr/share/dbus-1/system.conf &
+
+echo $YT_STREAM_KEY >/home/stream/key.txt
+
+chown stream /home/stream/key.txt
+
+chown stream /home/stream/user_run.sh
+
+sudo -i -u stream bash /home/stream/user_run.sh
diff --git a/live-stream/user_run.sh b/live-stream/user_run.sh
new file mode 100755
index 0000000..c60d68c
--- /dev/null
+++ b/live-stream/user_run.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+pulseaudio -D &
+sleep 2
+
+pacmd load-module module-null-sink sink_name=VirtSink
+pacmd update-sink-proplist VirtSink device.description=VirtSink
+
+export CHROMIUM_FLAGS="--disable-software-rasterizer"
+export LIBGL_ALWAYS_INDIRECT=1
+
+
+bash -c "DISPLAY=:99 xvfb-run \
+ --server-num 99 \
+ -s \"-nocursor -ac -screen 0 1920x1080x24\" \
+ dbus-launch chromium \
+ --temp-profile \
+ --window-size=1920,1080 \
+ --disable-gpu \
+ --window-position=0,0 \
+ --hide-scrollbars \
+ --autoplay-policy=no-user-gesture-required \
+ --app=http://localhost:4173" & disown
+
+bash -c "sleep 3 && DISPLAY=:99 ffmpeg -f x11grab -r 60 -s 1920x1080 -draw_mouse 0 -i :99.0 -f pulse -ac 2 -i default -vcodec libx264 -preset medium -b:v 7000k -framerate 60 -g 2 -pix_fmt yuv420p -acodec aac -f flv rtmp://x.rtmp.youtube.com/live2/$(cat /home/stream/key.txt)"
diff --git a/mediamtx/Dockerfile b/mediamtx/Dockerfile
new file mode 100644
index 0000000..ce4ca1c
--- /dev/null
+++ b/mediamtx/Dockerfile
@@ -0,0 +1,5 @@
+FROM docker.io/bluenviron/mediamtx
+
+COPY . /
+
+ENTRYPOINT ["/mediamtx"]
diff --git a/mediamtx/mediamtx.yml b/mediamtx/mediamtx.yml
new file mode 100644
index 0000000..db3e986
--- /dev/null
+++ b/mediamtx/mediamtx.yml
@@ -0,0 +1,25 @@
+playback: yes
+playbackAddress: :9996
+playbackTrustedProxies: [ '127.0.0.1' ]
+api: yes
+pathDefaults:
+ record: yes
+ # Path of recording segments.
+ # Extension is added automatically.
+ # Available variables are %path (path name), %Y %m %d %H %M %S %f %s (time in strftime format)
+ recordPath: /recordings/%path/%Y-%m-%d_%H-%M-%S-%f
+ recordDeleteAfter: 0s
+webrtcICEServers2:
+ - url: stun:stun.l.google.com:19302
+authInternalUsers:
+ # Username. 'any' means any user, including anonymous ones.
+- user: any
+ # Password. Not used in case of 'any' user.
+ pass:
+ # IPs or networks allowed to use this user. An empty list means any IP.
+ ips: []
+ permissions:
+ - action: api
+ - action: publish
+ - action: playback
+ - action: read
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index b5a1063..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-fastapi
-uvicorn[standard]
-slack-bolt
-requests
-python-dotenv
-prisma
-fastapi-utils
-httpx
\ No newline at end of file
diff --git a/tiling-frontend/Dockerfile b/tiling-frontend/Dockerfile
new file mode 100644
index 0000000..e766ca8
--- /dev/null
+++ b/tiling-frontend/Dockerfile
@@ -0,0 +1,18 @@
+FROM docker.io/oven/bun:slim AS base
+WORKDIR /usr/src/app
+
+FROM base AS install
+RUN mkdir -p /temp/dev
+COPY package.json bun.lockb /temp/dev/
+RUN cd /temp/dev && bun install
+
+RUN mkdir -p /temp/prod
+COPY package.json bun.lockb /temp/prod/
+RUN cd /temp/prod && bun install --production
+
+FROM base AS release
+COPY --from=install /temp/dev/node_modules node_modules
+COPY . .
+RUN bun --bun run build
+ENTRYPOINT ["bun", "--bun", "run", "preview", "--host"]
+
diff --git a/tiling-frontend/bun.lockb b/tiling-frontend/bun.lockb
index ac9ea20..cb12324 100755
Binary files a/tiling-frontend/bun.lockb and b/tiling-frontend/bun.lockb differ
diff --git a/tiling-frontend/src/App.svelte b/tiling-frontend/src/App.svelte
index 417eaa8..9cea327 100644
--- a/tiling-frontend/src/App.svelte
+++ b/tiling-frontend/src/App.svelte
@@ -51,7 +51,7 @@
pathData = newData;
setTimeout(() => {
for (const video in videos) {
- const hlsInstance = new hls({ progressive: false });
+ const hlsInstance = new hls({ backBufferLength: 2 });
hlsInstance.loadSource(
`http://localhost:8888/${video}/index.m3u8`,
);