Skip to content

Commit

Permalink
initial implementation of recordings table
Browse files Browse the repository at this point in the history
  • Loading branch information
hunterjm authored and blakeblackshear committed Jun 10, 2021
1 parent 8dfff83 commit 055bd22
Show file tree
Hide file tree
Showing 8 changed files with 233 additions and 48 deletions.
12 changes: 10 additions & 2 deletions frigate/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from frigate.events import EventProcessor, EventCleanup
from frigate.http import create_app
from frigate.log import log_process, root_configurer
from frigate.models import Event
from frigate.models import Event, Recordings
from frigate.mqtt import create_mqtt_client
from frigate.object_processing import TrackedObjectProcessor
from frigate.record import RecordingMaintainer
Expand Down Expand Up @@ -134,6 +134,14 @@ def init_queues(self):
)

def init_database(self):
# Migrate DB location
old_db_path = os.path.join(CLIPS_DIR, "frigate.db")
if not os.path.isfile(self.config.database.path) and os.path.isfile(
old_db_path
):
os.rename(old_db_path, self.config.database.path)

# Migrate DB schema
migrate_db = SqliteExtDatabase(self.config.database.path)

# Run migrations
Expand All @@ -144,7 +152,7 @@ def init_database(self):
migrate_db.close()

self.db = SqliteQueueDatabase(self.config.database.path)
models = [Event]
models = [Event, Recordings]
self.db.bind(models)

def init_stats(self):
Expand Down
4 changes: 2 additions & 2 deletions frigate/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import voluptuous as vol
import yaml

from frigate.const import RECORD_DIR, CLIPS_DIR, CACHE_DIR
from frigate.const import BASE_DIR, RECORD_DIR, CLIPS_DIR, CACHE_DIR
from frigate.util import create_mask

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -873,7 +873,7 @@ def to_dict(self) -> Dict[str, Any]:
FRIGATE_CONFIG_SCHEMA = vol.Schema(
{
vol.Optional("database", default={}): {
vol.Optional("path", default=os.path.join(CLIPS_DIR, "frigate.db")): str
vol.Optional("path", default=os.path.join(BASE_DIR, "frigate.db")): str
},
vol.Optional("model", default={"width": 320, "height": 320}): {
vol.Required("width"): int,
Expand Down
5 changes: 3 additions & 2 deletions frigate/const.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
CLIPS_DIR = "/media/frigate/clips"
RECORD_DIR = "/media/frigate/recordings"
BASE_DIR = "/media/frigate"
CLIPS_DIR = f"{BASE_DIR}/clips"
RECORD_DIR = f"{BASE_DIR}/recordings"
CACHE_DIR = "/tmp/cache"
110 changes: 75 additions & 35 deletions frigate/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from playhouse.shortcuts import model_to_dict

from frigate.const import CLIPS_DIR, RECORD_DIR
from frigate.models import Event
from frigate.models import Event, Recordings
from frigate.stats import stats_snapshot
from frigate.util import calculate_region
from frigate.version import VERSION
Expand Down Expand Up @@ -453,26 +453,45 @@ def latest_frame(camera_name):

@bp.route("/<camera_name>/recordings")
def recordings(camera_name):
files = glob.glob(f"{RECORD_DIR}/*/*/*/{camera_name}")
dates = OrderedDict()

if len(files) == 0:
return jsonify([])
# Retrieve all recordings for this camera
recordings = (
Recordings.select()
.where(Recordings.camera == camera_name)
.order_by(Recordings.start_time.asc())
)

files.sort()
last_end = 0
recording: Recordings
for recording in recordings:
date = datetime.fromtimestamp(recording.start_time)
key = date.strftime("%Y-%m-%d")
hour = date.strftime("%H")

dates = OrderedDict()
for path in files:
first = glob.glob(f"{path}/00.*.mp4")
delay = 0
if len(first) > 0:
delay = int(first[0].strip(path).split(".")[1])
search = re.search(r".+/(\d{4}[-]\d{2})/(\d{2})/(\d{2}).+", path)
if not search:
continue
date = f"{search.group(1)}-{search.group(2)}"
if date not in dates:
dates[date] = OrderedDict()
dates[date][search.group(3)] = {"delay": delay, "events": []}
# Create Day Record
if key not in dates:
dates[key] = OrderedDict()

# Create Hour Record
if hour not in dates[key]:
dates[key][hour] = {"delay": {}, "events": []}

# Check for delay
the_hour = datetime.strptime(f"{key} {hour}", "%Y-%m-%d %H").timestamp()
# diff current recording start time and the greater of the previous end time or top of the hour
diff = recording.start_time - max(last_end, the_hour)
# Determine seconds into recording
seconds = 0
if datetime.fromtimestamp(last_end).strftime("%H") == hour:
seconds = int(last_end - the_hour)
# Determine the delay
delay = min(int(diff), 3600 - seconds)
if delay > 1:
# Add an offset for any delay greater than a second
dates[key][hour]["delay"][seconds] = delay

last_end = recording.end_time

# Packing intervals to return all events with same label and overlapping times as one row.
# See: https://blogs.solidq.com/en/sqlserver/packing-intervals/
Expand Down Expand Up @@ -511,15 +530,15 @@ def recordings(camera_name):
camera_name,
)

e: Event
for e in events:
date = datetime.fromtimestamp(e.start_time)
event: Event
for event in events:
date = datetime.fromtimestamp(event.start_time)
key = date.strftime("%Y-%m-%d")
hour = date.strftime("%H")
if key in dates and hour in dates[key]:
dates[key][hour]["events"].append(
model_to_dict(
e,
event,
exclude=[
Event.false_positive,
Event.zones,
Expand Down Expand Up @@ -547,29 +566,50 @@ def recordings(camera_name):

@bp.route("/vod/<path:path>")
def vod(path):
# Make sure we actually have recordings
if not os.path.isdir(f"{RECORD_DIR}/{path}"):
return "Recordings not found.", 404

files = glob.glob(f"{RECORD_DIR}/{path}/*.mp4")
files.sort()
# Break up path
parts = path.split("/")
start_date = datetime.strptime(f"{parts[0]}-{parts[1]} {parts[2]}", "%Y-%m-%d %H")
end_date = start_date + timedelta(hours=1)
start_ts = start_date.timestamp()
end_ts = end_date.timestamp()
camera = parts[3]

# Select all recordings where either the start or end dates fall in the requested hour
recordings = (
Recordings.select()
.where(
(Recordings.start_time.between(start_ts, end_ts))
| (Recordings.end_time.between(start_ts, end_ts))
)
.where(Recordings.camera == camera)
.order_by(Recordings.start_time.asc())
)

clips = []
durations = []
for filename in files:
clips.append({"type": "source", "path": filename})
video = cv2.VideoCapture(filename)
duration = int(
video.get(cv2.CAP_PROP_FRAME_COUNT) / video.get(cv2.CAP_PROP_FPS) * 1000
)
durations.append(duration)

# Should we cache?
parts = path.split("/", 4)
date = datetime.strptime(f"{parts[0]}-{parts[1]} {parts[2]}", "%Y-%m-%d %H")
recording: Recordings
for recording in recordings:
clip = {"type": "source", "path": recording.path}
duration = int(recording.duration * 1000)
# Determine if offset is needed for first clip
if recording.start_time < start_ts:
offset = int((start_ts - recording.start_time) * 1000)
clip["clipFrom"] = offset
duration -= offset
# Determine if we need to end the last clip early
if recording.end_time > end_ts:
duration -= int((recording.end_time - end_ts) * 1000)
clips.append(clip)
durations.append(duration)

return jsonify(
{
"cache": datetime.now() - timedelta(hours=2) > date,
"cache": datetime.now() - timedelta(hours=1) > start_date,
"discontinuity": False,
"durations": durations,
"sequences": [{"clips": clips}],
Expand Down
10 changes: 10 additions & 0 deletions frigate/models.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from numpy import unique
from peewee import *
from playhouse.sqlite_ext import *

Expand All @@ -14,3 +15,12 @@ class Event(Model):
thumbnail = TextField()
has_clip = BooleanField(default=True)
has_snapshot = BooleanField(default=True)


class Recordings(Model):
id = CharField(null=False, primary_key=True, max_length=30)
camera = CharField(index=True, max_length=20)
path = CharField(unique=True)
start_time = DateTimeField()
end_time = DateTimeField()
duration = FloatField()
26 changes: 20 additions & 6 deletions frigate/record.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,18 @@
import datetime
import itertools
import json
import logging
import os
import queue
import random
import string
import subprocess as sp
import threading
import time
from collections import defaultdict
from pathlib import Path

import psutil

from frigate.config import FrigateConfig
from frigate.const import RECORD_DIR, CLIPS_DIR, CACHE_DIR
from frigate.const import RECORD_DIR
from frigate.models import Recordings

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -84,6 +83,7 @@ def move_files(self):
p = sp.run(ffprobe_cmd, capture_output=True)
if p.returncode == 0:
duration = float(p.stdout.decode().strip())
end_time = start_time + datetime.timedelta(seconds=duration)
else:
logger.info(f"bad file: {f}")
os.remove(os.path.join(RECORD_DIR, f))
Expand All @@ -97,8 +97,21 @@ def move_files(self):
os.makedirs(directory)

file_name = f"{start_time.strftime('%M.%S.mp4')}"
file_path = os.path.join(directory, file_name)

os.rename(os.path.join(RECORD_DIR, f), os.path.join(directory, file_name))
os.rename(os.path.join(RECORD_DIR, f), file_path)

rand_id = "".join(
random.choices(string.ascii_lowercase + string.digits, k=6)
)
Recordings.create(
id=f"{start_time.timestamp()}-{rand_id}",
camera=camera,
path=file_path,
start_time=start_time.timestamp(),
end_time=end_time.timestamp(),
duration=duration,
)

def expire_files(self):
delete_before = {}
Expand All @@ -112,6 +125,7 @@ def expire_files(self):
if not p.parent.name in delete_before:
continue
if p.stat().st_mtime < delete_before[p.parent.name]:
Recordings.delete().where(Recordings.path == str(p)).execute()
p.unlink(missing_ok=True)

def run(self):
Expand Down
Loading

0 comments on commit 055bd22

Please sign in to comment.