From 45414876b34fe6d81673b4ad9d3335c59a44d10b Mon Sep 17 00:00:00 2001 From: Guzman Lopez Date: Wed, 23 Oct 2024 14:03:30 +0100 Subject: [PATCH] add basic formatting and remove unused imports --- api/__init__.py | 2 - api/deckhand.py | 19 +- config/defaults.py | 22 +- db.py | 1 - dbdumps/create_dump.sh | 33 +- dbdumps/reset_schema.sh | 36 +- dbdumps/restore_dump.sh | 45 +- dbdumps/seed_db.py | 35 +- edge_http.py | 57 +- gps_fetch.py | 84 +- migrations/env.py | 30 +- .../versions/1d1b10e054af_add_timezones.py | 36 +- misc/data/port_locations.sql | 28 +- misc/gpsdata_scratchpad.sql | 180 +- misc/scratchpad_gst-launch_transcoding.sh | 7 +- ...r_data_as_cloudwatch_metrics_experiment.py | 65 +- model/__init__.py | 22 +- model/aifishdata.py | 43 +- model/boatschedule.py | 26 +- model/deckhandeventraw.py | 8 +- model/deckhandeventview.py | 16 +- model/gpsdata.py | 39 +- model/internetdata.py | 64 +- model/ondeckdata.py | 45 +- model/riskvector.py | 65 +- model/test.py | 77 +- model/track.py | 25 +- model/videofiles.py | 48 +- notebooks/catchcount_vector.ipynb | 485 ++- notebooks/edge_integration_charts.ipynb | 331 +- notebooks/elog_analysis.ipynb | 798 ++-- notebooks/helper_functions/aggregations.py | 78 +- notebooks/helper_functions/data_readers.py | 275 +- notebooks/key_event_detection.ipynb | 1493 ++++--- notebooks/timeseries_classifier_model.ipynb | 944 +++-- notebooks/tnc-edge-catch-plots.ipynb | 235 +- notebooks/tnc-edge-data-integration.ipynb | 912 +++-- notebooks/tnc-edge-gps-speed.ipynb | 81 +- notebooks/tnc-edge-network-uptime.ipynb | 128 +- notebooks/tnc-edge-ondeck-ops-df.ipynb | 41 +- notebooks/tnc-edge-system-uptime.ipynb | 820 ++-- notebooks/tnc-edge-vectorprocessing.ipynb | 834 ++-- notebooks/tnc_edge_bv_excel_parsing.ipynb | 317 +- reencode.py | 140 +- requirements.txt | 14 - run_aifish.py | 417 +- run_ondeck.py | 469 ++- s3_uploader.py | 283 +- scripts/adduser_aifish.sh | 8 +- scripts/adduser_ondeck.sh | 16 +- scripts/app-install.sh | 20 +- scripts/box_dot_com/box_reupload.py | 571 ++- scripts/box_dot_com/box_reupload.sh | 2 +- scripts/box_dot_com/box_reupload2.sh | 4 +- scripts/box_dot_com/boxapiexamples.sh | 7 +- scripts/gh_setup.sh | 23 +- scripts/netplan-autoswitcher.sh | 38 +- scripts/purge-video.sh | 4 +- scripts/system-install.sh | 431 +- scripts/video_bulk_copy/local_to_s3_upload.sh | 18 +- scripts/video_bulk_copy/local_to_usbstick.sh | 5 +- .../thalos_all_files_psql_to_usbstick.sh | 4 +- .../thalos_specific_files_to_usbstick.sh | 2 - .../usbdrive_ensure_encrypted.sh | 1 - scripts/vpn-install.sh | 12 +- tests/ondeck_json_to_tracks.py | 77 +- tests/onetimetests/test.py | 18 +- tests/onetimetests/test2.py | 28 +- tests/onetimetests/test3.py | 3215 +++++++-------- tests/onetimetests/test4.py | 3617 +++++++++-------- vector/__init__.py | 18 +- vector/catchcountA.py | 200 +- vector/elogtimegaps.py | 79 +- vector/equipment_outage_agg.py | 73 +- vector/fish_ai.py | 130 +- vector/gps.py | 175 +- vector/internet.py | 87 +- vector/tegrastats.py | 68 +- vector/thalos_mount.py | 62 +- vector/thalos_vids_exist.py | 107 +- vector_schedule.py | 82 +- video_fetch.py | 203 +- 82 files changed, 11029 insertions(+), 8629 deletions(-) delete mode 100644 requirements.txt diff --git a/api/__init__.py b/api/__init__.py index 4e6663d..f6fd8d2 100644 --- a/api/__init__.py +++ b/api/__init__.py @@ -1,3 +1 @@ - from .deckhand import blueprint as deckhand - diff --git a/api/deckhand.py b/api/deckhand.py index 1ab5c4b..ae2eae1 100644 --- a/api/deckhand.py +++ b/api/deckhand.py @@ -1,22 +1,20 @@ +import json from http.client import BAD_REQUEST -from flask import Blueprint, make_response, request, g - +from flask import Blueprint, g, make_response, request from sqlalchemy.orm import scoped_session, sessionmaker -from model import DeckhandEventRaw - -import json - from db import db +from model import DeckhandEventRaw -blueprint = Blueprint('DeckhandApi', __name__) +blueprint = Blueprint("DeckhandApi", __name__) # ORM Session # orm_session = scoped_session(sessionmaker()) -@blueprint.route('/', methods=['PUT', 'POST']) + +@blueprint.route("/", methods=["PUT", "POST"]) def event(): d = request.get_json() @@ -25,7 +23,4 @@ def event(): db.session.add(event) db.session.commit() - # for i in r: - # print(i); - - return make_response(('', 200)) + return make_response(("", 200)) diff --git a/config/defaults.py b/config/defaults.py index 5477e5e..cffa69a 100644 --- a/config/defaults.py +++ b/config/defaults.py @@ -1,11 +1,11 @@ -SECRET_KEY='not_so_secret' -DEBUG=True -DBUSER="edge" -DBNAME="edge" -THALOS_VIDEO_DIR="/thalos/videos" -THALOS_CAM_NAME='cam1' -VIDEO_OUTPUT_DIR='/videos' -VIDEO_PASSPHRASE_FILE='/dev/null' -THALOS_VIDEO_SUFFIX='.avi.done' -BOAT_NAME='' -DB_TABLES_VERSION='v1' +SECRET_KEY = "not_so_secret" +DEBUG = True +DBUSER = "edge" +DBNAME = "edge" +THALOS_VIDEO_DIR = "/thalos/videos" +THALOS_CAM_NAME = "cam1" +VIDEO_OUTPUT_DIR = "/videos" +VIDEO_PASSPHRASE_FILE = "/dev/null" +THALOS_VIDEO_SUFFIX = ".avi.done" +BOAT_NAME = "" +DB_TABLES_VERSION = "v1" diff --git a/db.py b/db.py index 21631f6..f0b13d6 100644 --- a/db.py +++ b/db.py @@ -1,4 +1,3 @@ - from flask_sqlalchemy import SQLAlchemy db = SQLAlchemy() diff --git a/dbdumps/create_dump.sh b/dbdumps/create_dump.sh index b718390..efab64c 100644 --- a/dbdumps/create_dump.sh +++ b/dbdumps/create_dump.sh @@ -1,32 +1,29 @@ +#!/bin/bash SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" function help { - echo "usage: $SCRIPTNAME [--dbuser USER] [--dbname NAME] " - exit 1 + echo "usage: $SCRIPTNAME [--dbuser USER] [--dbname NAME] " + exit 1 } DBNAME=edge DBUSER=edge -while (( "$#" )); do +while (("$#")); do case $1 in - --dbuser) - shift && DBUSER="$1" || help - ;; - --dbname) - shift && DBNAME="$1" || help - ;; - *) - help - ;; + --dbuser) + shift && DBUSER="$1" || help + ;; + --dbname) + shift && DBNAME="$1" || help + ;; + *) + help + ;; esac shift done - -pg_dump --clean -U "$DBUSER" "$DBNAME" > "$SCRIPTDIR/$(date -u -Iseconds | cut -f1 -d + )Z.pgdump" - - - +pg_dump --clean -U "$DBUSER" "$DBNAME" >"$SCRIPTDIR/$(date -u -Iseconds | cut -f1 -d +)Z.pgdump" diff --git a/dbdumps/reset_schema.sh b/dbdumps/reset_schema.sh index 935142a..c7b5728 100644 --- a/dbdumps/reset_schema.sh +++ b/dbdumps/reset_schema.sh @@ -1,37 +1,35 @@ +#!/bin/bash SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" - +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" DBNAME=edge DBUSER=edge -while (( "$#" )); do +while (("$#")); do case $1 in - --dbuser) - shift && DBUSER="$1" || help - ;; - --dbname) - shift && DBNAME="$1" || help - ;; - *) - help - ;; + --dbuser) + shift && DBUSER="$1" || help + ;; + --dbname) + shift && DBNAME="$1" || help + ;; + *) + help + ;; esac shift done - cd "$SCRIPTDIR/.." -if [ "$VIRTUAL_ENV" != "$(pwd)/venv" ] ; then - if [ "x$VIRTUAL_ENV" != "x" ] ; then - deactivate - fi - source venv/bin/activate +if [ "$VIRTUAL_ENV" != "$(pwd)/venv" ]; then + if [ "x$VIRTUAL_ENV" != "x" ]; then + deactivate + fi + source venv/bin/activate fi - python -c 'from sqlalchemy import create_engine; \ from model import Base; \ engine = create_engine("postgresql+psycopg2://'$DBUSER'@/'$DBNAME'", echo=True); \ diff --git a/dbdumps/restore_dump.sh b/dbdumps/restore_dump.sh index 093b651..286546d 100644 --- a/dbdumps/restore_dump.sh +++ b/dbdumps/restore_dump.sh @@ -1,11 +1,12 @@ +#!/bin/bash SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" function help { - echo "usage: $SCRIPTNAME [--dbuser USER] [--dbname NAME] [DUMPFILE]" - echo " DBDUMP_FILENAME defaults to the latest dumpfile, sorted by filename" - exit 1 + echo "usage: $SCRIPTNAME [--dbuser USER] [--dbname NAME] [DUMPFILE]" + echo " DBDUMP_FILENAME defaults to the latest dumpfile, sorted by filename" + exit 1 } DBNAME=edge @@ -13,28 +14,24 @@ DBUSER=edge DUMPFILE="$(ls $SCRIPTDIR/*.pgdump | sort | tail -n 1)" -while (( "$#" )); do +while (("$#")); do case $1 in - --dbuser) - shift && DBUSER="$1" || help - ;; - --dbname) - shift && DBNAME="$1" || help - ;; - *) - if [ -e "$1" ] ; then - DUMPFILE="$1" - else - echo "file does not exist" - exit 1 - fi - ;; + --dbuser) + shift && DBUSER="$1" || help + ;; + --dbname) + shift && DBNAME="$1" || help + ;; + *) + if [ -e "$1" ]; then + DUMPFILE="$1" + else + echo "file does not exist" + exit 1 + fi + ;; esac shift done - -psql -U "$DBUSER" "$DBNAME" < $DUMPFILE - - - +psql -U "$DBUSER" "$DBNAME" <$DUMPFILE diff --git a/dbdumps/seed_db.py b/dbdumps/seed_db.py index 84a0e02..7c11a01 100644 --- a/dbdumps/seed_db.py +++ b/dbdumps/seed_db.py @@ -1,28 +1,11 @@ -# from flask import Flask -# from flask_admin import Admin import click - from sqlalchemy import create_engine, select from sqlalchemy.orm import Session, sessionmaker -import os - -from model import Base, RiskVector, Test, T - -import sqlite3 +from model import Base, FishAiData, GpsData, InternetData, RiskVector, Test from model.internetdata import InternetData -from model import FishAiData, InternetData, GpsData - -# app = Flask(__name__) -# app.config.from_object('config.defaults') - -# if 'ENVIRONMENT' in os.environ: -# app.config.from_envvar('ENVIRONMENT') -# set optional bootswatch theme -# app.config['FLASK_ADMIN_SWATCH'] = 'cerulean' - def clear_db(session: Session): result = session.execute(select(Test)) for t in result: @@ -48,20 +31,20 @@ def clear_db(session: Session): @click.command() -@click.option('--cleardb', default=False, is_flag=True) -@click.option('--dbname', default="edge") -@click.option('--dbuser', default="edge") -@click.option('--force', default=False, is_flag=True) +@click.option("--cleardb", default=False, is_flag=True) +@click.option("--dbname", default="edge") +@click.option("--dbuser", default="edge") +@click.option("--force", default=False, is_flag=True) def cli(cleardb, dbname, dbuser, force): - - if not force : + if not force: import sys + print("This script is deprecated! run `venv/bin/alembic upgrade head` instead.") print("if you really want to run this script, rerun with --force") sys.exit(1) - + # engine = create_engine("sqlite:///db.db", echo=True) - engine = create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + engine = create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) SessionMaker = sessionmaker(engine) session = SessionMaker() if cleardb: diff --git a/edge_http.py b/edge_http.py index 9ca5f67..d29d613 100644 --- a/edge_http.py +++ b/edge_http.py @@ -1,28 +1,28 @@ -import click +import os -from flask import Flask, g +import click +from flask import Flask from flask_admin import Admin from flask_admin.contrib.sqla import ModelView - from sqlalchemy import text -from sqlalchemy.orm import scoped_session, sessionmaker -import os from db import db app = Flask(__name__) -app.config.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - app.config.from_envvar('ENVIRONMENT') +app.config.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + app.config.from_envvar("ENVIRONMENT") -# set optional bootswatch theme -app.config['FLASK_ADMIN_SWATCH'] = 'cerulean' +# Set optional bootswatch theme +app.config["FLASK_ADMIN_SWATCH"] = "cerulean" -app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql+psycopg2://%s@/%s"%(app.config['DBUSER'], app.config['DBNAME']) +app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql+psycopg2://%s@/%s" % ( + app.config["DBUSER"], + app.config["DBNAME"], +) # engine = create_engine("postgresql+psycopg2://%s@/%s"%(app.config['DBUSER'], app.config['DBNAME']), echo=True) - # SessionMaker = scoped_session(sessionmaker(bind=engine)) db.init_app(app) @@ -32,26 +32,38 @@ with app.app_context(): # Base.metadata.create_all(engine) db.metadata.create_all(db.engine) - + from alembic import command, config + cfg = config.Config("alembic.ini") command.upgrade(cfg, "head") with db.engine.begin() as connection: - connection.execute(text("SELECT setval('aifishdata_id_seq', (SELECT MAX(id) FROM aifishdata));")) - connection.execute(text("SELECT setval('boatschedules_id_seq', (SELECT MAX(id) FROM boatschedules));")) - connection.execute(text("SELECT setval('deckhandevents_id_seq', (SELECT MAX(id) FROM deckhandevents));")) - connection.execute(text("SELECT setval('internetdata_id_seq', (SELECT MAX(id) FROM internetdata));")) - connection.execute(text("SELECT setval('ondeckdata_id_seq', (SELECT MAX(id) FROM ondeckdata));")) + connection.execute( + text("SELECT setval('aifishdata_id_seq', (SELECT MAX(id) FROM aifishdata));") + ) + connection.execute( + text("SELECT setval('boatschedules_id_seq', (SELECT MAX(id) FROM boatschedules));") + ) + connection.execute( + text("SELECT setval('deckhandevents_id_seq', (SELECT MAX(id) FROM deckhandevents));") + ) + connection.execute( + text("SELECT setval('internetdata_id_seq', (SELECT MAX(id) FROM internetdata));") + ) + connection.execute( + text("SELECT setval('ondeckdata_id_seq', (SELECT MAX(id) FROM ondeckdata));") + ) connection.execute(text("SELECT setval('tests_id_seq', (SELECT MAX(id) FROM tests));")) connection.execute(text("SELECT setval('tracks_id_seq', (SELECT MAX(id) FROM tracks));")) connection.execute(text("SELECT setval('vectors_id_seq', (SELECT MAX(id) FROM vectors));")) from api import deckhand -app.register_blueprint(deckhand, url_prefix='/deckhand') +app.register_blueprint(deckhand, url_prefix="/deckhand") -admin = Admin(app, name='Risk Assesment', template_mode='bootstrap3') + +admin = Admin(app, name="Risk Assesment", template_mode="bootstrap3") # work with session @@ -68,9 +80,10 @@ @click.command() -@click.option('--port', default=50000) +@click.option("--port", default=50000) def serve(port): app.run(host="0.0.0.0", port=port) -if __name__ == '__main__': + +if __name__ == "__main__": serve() diff --git a/gps_fetch.py b/gps_fetch.py index 6ac9c1d..e0bed05 100644 --- a/gps_fetch.py +++ b/gps_fetch.py @@ -1,41 +1,39 @@ - -from datetime import datetime,timezone -from dateutil.parser import isoparse -import click -import codecs import os +import re +import time +from datetime import datetime, timezone from pathlib import Path + +import click import psycopg2 -from psycopg2.pool import SimpleConnectionPool -import re import schedule -import subprocess -import time +from dateutil.parser import isoparse +from flask.config import Config as FlaskConfig +from psycopg2.pool import SimpleConnectionPool +flaskconfig = FlaskConfig(root_path="") -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') def thalos_gps_filename_date(filename: str) -> datetime: - m = re.match('.*(\d{8}).?(\d{6})\.txt', filename) + m = re.match(".*(\d{8}).?(\d{6})\.txt", filename) if not m: return None return isoparse(m[1] + " " + m[2] + "+00:00") -def gps_fetch(cpool: SimpleConnectionPool, thalos_dir: Path): +def gps_fetch(cpool: SimpleConnectionPool, thalos_dir: Path): conn: psycopg2.connection = cpool.getconn() gps_files = [x for x in thalos_dir.iterdir()] dt_index = {} for gps_file in gps_files: - m = re.match('.*(\d{8}).?(\d{6})\.txt', gps_file.name) + m = re.match(".*(\d{8}).?(\d{6})\.txt", gps_file.name) if not m: continue - dt = datetime.strptime(m[1] + " " + m[2] + "Z", '%Y%m%d %H%M%S%z') + dt = datetime.strptime(m[1] + " " + m[2] + "Z", "%Y%m%d %H%M%S%z") dt_index[dt] = gps_file new_dts = [] @@ -44,37 +42,44 @@ def gps_fetch(cpool: SimpleConnectionPool, thalos_dir: Path): if len(dt_index.keys()) > 0: try: with conn.cursor() as cur: - args = ','.join( - cur.mogrify("(%s)", [dt]).decode('utf-8') - for dt in dt_index.keys() - ) - cur.execute("""WITH t (file_dt) AS ( VALUES """ + args + """ ) - SELECT t.file_dt FROM t + args = ",".join(cur.mogrify("(%s)", [dt]).decode("utf-8") for dt in dt_index.keys()) + cur.execute( + """WITH t (file_dt) AS ( VALUES """ + + args + + """ ) + SELECT t.file_dt FROM t LEFT JOIN gpsdata ON t.file_dt = gpsdata.gps_datetime - WHERE gpsdata.gps_datetime IS NULL;""") + WHERE gpsdata.gps_datetime IS NULL;""" + ) # print(cur.query) # print(cur.description) rows = cur.fetchall() new_dts.extend(col for cols in rows for col in cols) - insert_tuples=[] + insert_tuples = [] for new_dt in new_dts: new_file: Path = dt_index[new_dt.astimezone(timezone.utc)] with new_file.open() as data: line = data.readline() - m = re.match('([+-]?(\d+(\.\d*)?|\.\d+)).*,.*?([+-]?(\d+(\.\d*)?|\.\d+))', line) + m = re.match("([+-]?(\d+(\.\d*)?|\.\d+)).*,.*?([+-]?(\d+(\.\d*)?|\.\d+))", line) if m: lat = m[1] lon = m[4] - insert_tuples.append((new_dt, lat, lon,)) + insert_tuples.append( + ( + new_dt, + lat, + lon, + ) + ) if len(insert_tuples) > 0: - click.echo('inserting {} new gps coords'.format(len(insert_tuples))) + click.echo("inserting {} new gps coords".format(len(insert_tuples))) with conn.cursor() as cur: cur.executemany( "INSERT INTO gpsdata (gps_datetime, lat, lon) VALUES (%s, %s, %s);", - insert_tuples + insert_tuples, ) # print(cur.query) conn.commit() @@ -83,22 +88,20 @@ def gps_fetch(cpool: SimpleConnectionPool, thalos_dir: Path): @click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--thalos_gps_dir', default=flaskconfig.get('THALOS_GPS_DIR')) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--thalos_gps_dir", default=flaskconfig.get("THALOS_GPS_DIR")) def main(dbname, dbuser, thalos_gps_dir): - thalos_gps_dir = Path(thalos_gps_dir) cpool = SimpleConnectionPool(1, 1, database=dbname, user=dbuser) - - def runonce(cpool, thalos_gps_dir ): + + def runonce(cpool, thalos_gps_dir): gps_fetch(cpool, thalos_gps_dir) return schedule.CancelJob - schedule.every(1).seconds.do(runonce, cpool, thalos_gps_dir ) - schedule.every(15).minutes.do(gps_fetch, cpool, thalos_gps_dir ) - + schedule.every(1).seconds.do(runonce, cpool, thalos_gps_dir) + schedule.every(15).minutes.do(gps_fetch, cpool, thalos_gps_dir) while 1: n = schedule.idle_seconds() @@ -112,5 +115,6 @@ def runonce(cpool, thalos_gps_dir ): time.sleep(n) schedule.run_pending() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/migrations/env.py b/migrations/env.py index 96bc046..f6f523a 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -1,9 +1,11 @@ +import os from logging.config import fileConfig -from sqlalchemy import engine_from_config -from sqlalchemy import pool - from alembic import context +from flask.config import Config as FlaskConfig +from sqlalchemy import engine_from_config, pool + +from model import Base # this is the Alembic Config object, which provides # access to the values within the .ini file in use. @@ -18,7 +20,8 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata -from model import Base + + target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, @@ -26,15 +29,14 @@ # my_important_option = config.get_main_option("my_important_option") # ... etc. -import os -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -url = "postgresql+psycopg2://%s@/%s"%(flaskconfig['DBUSER'], flaskconfig['DBNAME']) +url = "postgresql+psycopg2://%s@/%s" % (flaskconfig["DBUSER"], flaskconfig["DBNAME"]) def run_migrations_offline() -> None: @@ -69,7 +71,7 @@ def run_migrations_online() -> None: """ alembicconfig = config.get_section(config.config_ini_section, {}) - alembicconfig['sqlalchemy.url'] = url + alembicconfig["sqlalchemy.url"] = url connectable = engine_from_config( alembicconfig, prefix="sqlalchemy.", @@ -77,9 +79,7 @@ def run_migrations_online() -> None: ) with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() diff --git a/migrations/versions/1d1b10e054af_add_timezones.py b/migrations/versions/1d1b10e054af_add_timezones.py index f67e3eb..28041d4 100644 --- a/migrations/versions/1d1b10e054af_add_timezones.py +++ b/migrations/versions/1d1b10e054af_add_timezones.py @@ -5,26 +5,26 @@ Create Date: 2023-07-12 14:59:24.746444 """ -from alembic import op -import sqlalchemy as sa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. -revision = '1d1b10e054af' -down_revision = '58dd42108a22' +revision = "1d1b10e054af" +down_revision = "58dd42108a22" branch_labels = None depends_on = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.execute('ALTER TABLE boatschedules ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE deckhandevents ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE fishaidata ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE gpsdata ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE internetdata ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE ondeckdata ALTER COLUMN datetime TYPE timestamp with time zone;') - op.execute('ALTER TABLE tests ALTER COLUMN datetime TYPE timestamp with time zone;') + op.execute("ALTER TABLE boatschedules ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE deckhandevents ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE fishaidata ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE gpsdata ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE internetdata ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE ondeckdata ALTER COLUMN datetime TYPE timestamp with time zone;") + op.execute("ALTER TABLE tests ALTER COLUMN datetime TYPE timestamp with time zone;") # op.execute('ALTER TABLE tests ALTER COLUMN datetime_from TYPE timestamp with time zone;') # op.execute('ALTER TABLE tests ALTER COLUMN datetime_to TYPE timestamp with time zone;') # op.execute('ALTER TABLE video_files ALTER COLUMN last_modified TYPE timestamp with time zone;') @@ -35,11 +35,11 @@ def upgrade() -> None: def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - op.execute('ALTER TABLE boatschedules ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE deckhandevents ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE fishaidata ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE gpsdata ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE internetdata ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE ondeckdata ALTER COLUMN datetime TYPE timestamp without time zone;') - op.execute('ALTER TABLE tests ALTER COLUMN datetime TYPE timestamp without time zone;') + op.execute("ALTER TABLE boatschedules ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE deckhandevents ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE fishaidata ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE gpsdata ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE internetdata ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE ondeckdata ALTER COLUMN datetime TYPE timestamp without time zone;") + op.execute("ALTER TABLE tests ALTER COLUMN datetime TYPE timestamp without time zone;") # ### end Alembic commands ### diff --git a/misc/data/port_locations.sql b/misc/data/port_locations.sql index f1694da..f8b9379 100644 --- a/misc/data/port_locations.sql +++ b/misc/data/port_locations.sql @@ -1,13 +1,27 @@ - -- puntarenas -truncate port_location; -insert into port_location (port_location) values (point(9.8106338, -84.875245)); - +TRUNCATE port_location; +INSERT INTO + port_location (port_location) +VALUES + (point(9.8106338, -84.875245)); -- QUEPOS -truncate port_location; -insert into port_location (port_location) values (point(9.4241879,-84.1833372)); +TRUNCATE port_location; +INSERT INTO + port_location (port_location) +VALUES + (point(9.4241879, -84.1833372)); -select *, 'dep' from port_departures union select *, 'arr' from port_arrivals; +SELECT + *, + 'dep' +FROM + port_departures +UNION +SELECT + *, + 'arr' +FROM + port_arrivals; \ No newline at end of file diff --git a/misc/gpsdata_scratchpad.sql b/misc/gpsdata_scratchpad.sql index 3f3416d..51bd605 100644 --- a/misc/gpsdata_scratchpad.sql +++ b/misc/gpsdata_scratchpad.sql @@ -1,60 +1,130 @@ -with t as ( - select *, ROW_NUMBER() OVER (ORDER BY gps_datetime) from gpsdata -) select -t1.gps_datetime, t2.gps_datetime, -t1.lat, t1.lon, t2.lat, t2.lon, -(t1.lat - t2.lat)*110.574 as latkmdiff, -cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320 as lonkmdiff, -sqrt( - ((t1.lat - t2.lat)*110.574)^2 + - (cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320)^2 -) as distance, -sqrt( - ((t1.lat - t2.lat)*110.574)^2 + - (cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320)^2 -)/(extract(epoch from t1.gps_datetime - t2.gps_datetime)/3600) as kph -from t t1 -join t t2 on t2.row_number = t1.row_number-1 -where t1.row_number > 200 -limit 500; +WITH t AS ( + SELECT + *, + ROW_NUMBER() OVER ( + ORDER BY + gps_datetime + ) + FROM + gpsdata +) +SELECT + t1.gps_datetime, + t2.gps_datetime, + t1.lat, + t1.lon, + t2.lat, + t2.lon, + (t1.lat - t2.lat) * 110.574 AS latkmdiff, + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 AS lonkmdiff, + sqrt( + ((t1.lat - t2.lat) * 110.574) ^ 2 + ( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 + ) ^ 2 + ) AS distance, + sqrt( + ((t1.lat - t2.lat) * 110.574) ^ 2 + ( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 + ) ^ 2 + ) /( + extract( + epoch + FROM + t1.gps_datetime - t2.gps_datetime + ) / 3600 + ) AS kph +FROM + t t1 + JOIN t t2 ON t2.row_number = t1.row_number -1 +WHERE + t1.row_number > 200 +LIMIT + 500; +WITH t AS ( + SELECT + *, + ROW_NUMBER() OVER ( + ORDER BY + gps_datetime + ) + FROM + gpsdata +) +SELECT + -- (t1.lat - t2.lat)*100000 as latdiff, + -- (t1.lon - t2.lon)*100000 as londiff, + t1.gps_datetime, + sqrt( + ((t1.lat - t2.lat) * 110.574) ^ 2 + ( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 + ) ^ 2 + ) /( + extract( + epoch + FROM + t1.gps_datetime - t2.gps_datetime + ) / 3600 + ) AS kph, + CASE + WHEN t1.lon - t2.lon = 0 THEN 0 + ELSE ( + 6 - sign(t1.lon - t2.lon) *( + sign(t1.lon - t2.lon) * atan( + (t1.lat - t2.lat) /( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) + ) + ) / 3.14159 + 0.5 + ) * 6 + ) + END AS clockheading +FROM + t t1 + JOIN t t2 ON t2.row_number = t1.row_number -1 +LIMIT + 10; -with t as ( - select *, ROW_NUMBER() OVER (ORDER BY gps_datetime) from gpsdata -) select --- (t1.lat - t2.lat)*100000 as latdiff, --- (t1.lon - t2.lon)*100000 as londiff, -t1.gps_datetime, -sqrt( - ((t1.lat - t2.lat)*110.574)^2 + - (cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320)^2 -)/(extract(epoch from t1.gps_datetime - t2.gps_datetime)/3600) as kph, - case - when t1.lon-t2.lon = 0 then 0 - else (6 - sign(t1.lon-t2.lon)*(sign(t1.lon-t2.lon)*atan((t1.lat - t2.lat)/(cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)))/3.14159 + 0.5 ) * 6 ) - end as clockheading -from t t1 -join t t2 on t2.row_number = t1.row_number-1 -limit 10; ; copy ( - with t as ( - select *, ROW_NUMBER() OVER (ORDER BY gps_datetime) from gpsdata - ) select - -- (t1.lat - t2.lat)*100000 as latdiff, - -- (t1.lon - t2.lon)*100000 as londiff, - t1.gps_datetime, - sqrt( - ((t1.lat - t2.lat)*110.574)^2 + - (cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)*111.320)^2 - )/(extract(epoch from t1.gps_datetime - t2.gps_datetime)/3600) as kph, - case - when t1.lon-t2.lon = 0 then 0 - else (6 - sign(t1.lon-t2.lon)*(sign(t1.lon-t2.lon)*atan((t1.lat - t2.lat)/(cos(t1.lat*3.14159265/180)*(t1.lon-t2.lon)))/3.14159 + 0.5 ) * 6 ) - end as clockheading - from t t1 - join t t2 on t2.row_number = t1.row_number-1 - -- limit 10 -) to stdout csv header -; + WITH t AS ( + SELECT + *, + ROW_NUMBER() OVER ( + ORDER BY + gps_datetime + ) + FROM + gpsdata + ) + SELECT + -- (t1.lat - t2.lat)*100000 as latdiff, + -- (t1.lon - t2.lon)*100000 as londiff, + t1.gps_datetime, + sqrt( + ((t1.lat - t2.lat) * 110.574) ^ 2 + ( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) * 111.320 + ) ^ 2 + ) /( + extract( + epoch + FROM + t1.gps_datetime - t2.gps_datetime + ) / 3600 + ) AS kph, + CASE + WHEN t1.lon - t2.lon = 0 THEN 0 + ELSE ( + 6 - sign(t1.lon - t2.lon) *( + sign(t1.lon - t2.lon) * atan( + (t1.lat - t2.lat) /( + cos(t1.lat * 3.14159265 / 180) *(t1.lon - t2.lon) + ) + ) / 3.14159 + 0.5 + ) * 6 + ) + END AS clockheading + FROM + t t1 + JOIN t t2 ON t2.row_number = t1.row_number -1 -- limit 10 +) TO stdout csv header; \ No newline at end of file diff --git a/misc/scratchpad_gst-launch_transcoding.sh b/misc/scratchpad_gst-launch_transcoding.sh index ba9349a..10ff606 100644 --- a/misc/scratchpad_gst-launch_transcoding.sh +++ b/misc/scratchpad_gst-launch_transcoding.sh @@ -1,3 +1,4 @@ +#!/bin/bash # see these documentation websites for each filter: # https://gstreamer.freedesktop.org/documentation/avi/avidemux.html @@ -56,7 +57,7 @@ gst-launch-1.0 filesrc location="/videos/20240308T124000Z_cam1_reenc.mkv" ! matr 20230912T133500Z_cam2_ondeck -mkdir ./frames || rm ./frames/* +mkdir ./frames || rm ./frames/* gst-launch-1.0 filesrc location="$V" ! avidemux ! multifilesink index=1 location=./frames/%d.jpg @@ -108,10 +109,8 @@ python3 -m http.server -for i in 12-09-2023-15-05.avi.done 12-09-2023-15-10.avi.done 12-09-2023-15-15.avi.done 12-09-2023-15-20.avi.done 12-09-2023-15-25.avi.done 12-09-2023-15-30.avi.done 12-09-2023-15-35.avi.done 12-09-2023-15-40.avi.done 12-09-2023-15-55.avi.done ; do +for i in 12-09-2023-15-05.avi.done 12-09-2023-15-10.avi.done 12-09-2023-15-15.avi.done 12-09-2023-15-20.avi.done 12-09-2023-15-25.avi.done 12-09-2023-15-30.avi.done 12-09-2023-15-35.avi.done 12-09-2023-15-40.avi.done 12-09-2023-15-55.avi.done ; do rm /videos/frames/* gst-launch-1.0 filesrc location="/thalos/brancol/videos/cam2/12-09-2023/15/$i" ! avidemux ! multifilesink index=1 location=/videos/frames/%d.jpg tar czf ~/$i.tar.gz -C /videos/frames {1,480,960,1440,1920,2400,2880,3360,3840,4320}.jpg done - - diff --git a/misc/vector_data_as_cloudwatch_metrics_experiment.py b/misc/vector_data_as_cloudwatch_metrics_experiment.py index 3125c70..8a21087 100644 --- a/misc/vector_data_as_cloudwatch_metrics_experiment.py +++ b/misc/vector_data_as_cloudwatch_metrics_experiment.py @@ -1,11 +1,11 @@ -import boto3 import time -from datetime import datetime + +import boto3 from dateutil.parser import parse as dateparse -athena = boto3.client('athena') +athena = boto3.client("athena") -custommetrics = boto3.client('cloudwatch') +custommetrics = boto3.client("cloudwatch") def has_query_succeeded(execution_id): @@ -28,19 +28,20 @@ def has_query_succeeded(execution_id): return False + def gen_put_metric_requests(vector_id, value_timestamp_pairs): request = None - for (value, timestamp) in value_timestamp_pairs: - - + for value, timestamp in value_timestamp_pairs: if request is None: - request = {'Namespace': 'tnc_edge_brancol_v1', 'MetricData': []} - request['MetricData'].append({ - 'MetricName': 'vector_{}'.format(vector_id), - 'Value': value, - 'Timestamp': timestamp, - }) - if len(request['MetricData']) >= 1000: + request = {"Namespace": "tnc_edge_brancol_v1", "MetricData": []} + request["MetricData"].append( + { + "MetricName": "vector_{}".format(vector_id), + "Value": value, + "Timestamp": timestamp, + } + ) + if len(request["MetricData"]) >= 1000: yield request request = None if request: @@ -51,8 +52,7 @@ def main(): # 5. Query Athena table query = f"SELECT vector_id, score, datetime from tnc_edge.brancol_v1_tests" response = athena.start_query_execution( - QueryString=query, - ResultConfiguration={"OutputLocation": "s3://51-gema-dev-athena/"} + QueryString=query, ResultConfiguration={"OutputLocation": "s3://51-gema-dev-athena/"} ) execution_id = response["QueryExecutionId"] @@ -61,46 +61,45 @@ def main(): query_status = has_query_succeeded(execution_id=execution_id) print(f"Query state: {query_status}") - paginator = athena.get_paginator('get_query_results') - page_iterator = paginator.paginate( - QueryExecutionId=execution_id - ) + paginator = athena.get_paginator("get_query_results") + page_iterator = paginator.paginate(QueryExecutionId=execution_id) def gen_results(): for page in page_iterator: - if len(page['ResultSet']['Rows']) > 1: - for row in page['ResultSet']['Rows'][1:]: + if len(page["ResultSet"]["Rows"]) > 1: + for row in page["ResultSet"]["Rows"][1:]: yield row - + grouped = {} for row in gen_results(): - vector_id = row['Data'][0]['VarCharValue'] + vector_id = row["Data"][0]["VarCharValue"] if vector_id not in grouped.keys(): grouped[vector_id] = [] - value = row['Data'][1].get('VarCharValue') + value = row["Data"][1].get("VarCharValue") try: value = float(value) except: continue - timestamp = row['Data'][2].get('VarCharValue') + timestamp = row["Data"][2].get("VarCharValue") if timestamp is None: continue timestamp = dateparse(timestamp) - if timestamp <= dateparse('2023-10-20 23:00:00Z'): + if timestamp <= dateparse("2023-10-20 23:00:00Z"): continue - grouped[vector_id].append( (value, timestamp) ) - - for (vector_id, value_timestamp_pairs) in grouped.items(): + grouped[vector_id].append((value, timestamp)) + + for vector_id, value_timestamp_pairs in grouped.items(): if int(vector_id) == 3: continue # metric_name = 'tnc_edge_brancol_v1_vector_{}'.format(vector_id) - for request in gen_put_metric_requests(vector_id=vector_id, value_timestamp_pairs=value_timestamp_pairs): - print('putting {} values on ') + for request in gen_put_metric_requests( + vector_id=vector_id, value_timestamp_pairs=value_timestamp_pairs + ): + print("putting {} values on ") response = custommetrics.put_metric_data(**request) print(response) - if __name__ == "__main__": main() diff --git a/model/__init__.py b/model/__init__.py index b4c02fb..404bb0b 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,20 +1,12 @@ - +from .aifishdata import AifishData from .base import Base - -from .riskvector import RiskVector, RiskVectorModelView - -from .test import Test, T, TestModelView - -from .gpsdata import GpsData - +from .boatschedule import BoatSchedule from .deckhandeventraw import DeckhandEventRaw from .deckhandeventview import DeckhandEventView - -from .aifishdata import AifishData -from .ondeckdata import OndeckData - +from .gpsdata import GpsData from .internetdata import InternetData, InternetDataView - -from .boatschedule import BoatSchedule -from .videofiles import VideoFile +from .ondeckdata import OndeckData +from .riskvector import RiskVector, RiskVectorModelView +from .test import T, Test, TestModelView from .track import Track +from .videofiles import VideoFile diff --git a/model/aifishdata.py b/model/aifishdata.py index c6ad7cb..fdddb6d 100644 --- a/model/aifishdata.py +++ b/model/aifishdata.py @@ -1,11 +1,12 @@ +from sqlalchemy import REAL, Column, DateTime, ForeignKey, Integer, String, text +from sqlalchemy.orm import relationship + from .base import Base from .videofiles import VideoFile -from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, text, REAL -from sqlalchemy.orm import relationship class AifishData(Base): - __tablename__ = 'aifishdata' + __tablename__ = "aifishdata" id = Column(Integer, primary_key=True) video_uri = Column(String, ForeignKey("video_files.decrypted_path"), unique=True) @@ -19,18 +20,24 @@ class AifishData(Base): status = Column(String) def __str__(self) -> str: - return 'AifishData(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'video_uri', - # 'video_file', - 'processing_uri', - 'output_uri', - 'datetime', - 'count', - 'runtimems', - 'detection_confidence', - 'status', - - ]]) + ')' - + return ( + "AifishData(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "video_uri", + # 'video_file', + "processing_uri", + "output_uri", + "datetime", + "count", + "runtimems", + "detection_confidence", + "status", + ] + ] + ) + + ")" + ) diff --git a/model/boatschedule.py b/model/boatschedule.py index 85557ea..1357671 100644 --- a/model/boatschedule.py +++ b/model/boatschedule.py @@ -1,20 +1,26 @@ +from sqlalchemy import Column, DateTime, Integer, String, text + from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, text class BoatSchedule(Base): - __tablename__ = 'boatschedules' + __tablename__ = "boatschedules" id = Column(Integer, primary_key=True) sentence = Column(String) datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) def __str__(self) -> str: - return 'BoatSchedule(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'sentence', - ]]) + ')' - - - + return ( + "BoatSchedule(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "sentence", + ] + ] + ) + + ")" + ) diff --git a/model/deckhandeventraw.py b/model/deckhandeventraw.py index 46dd4fd..af9787c 100644 --- a/model/deckhandeventraw.py +++ b/model/deckhandeventraw.py @@ -1,15 +1,11 @@ +from sqlalchemy import Column, DateTime, Integer, String, text from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, text - class DeckhandEventRaw(Base): - - __tablename__ = 'deckhandevents' + __tablename__ = "deckhandevents" id = Column(Integer, primary_key=True) jsonblob = Column(String) datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) - - \ No newline at end of file diff --git a/model/deckhandeventview.py b/model/deckhandeventview.py index 8c4b0d4..40a6715 100644 --- a/model/deckhandeventview.py +++ b/model/deckhandeventview.py @@ -1,17 +1,15 @@ +from sqlalchemy import Column, DateTime, Integer, String, text from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, text - class DeckhandEventView(Base): - - __tablename__ = 'deckhandevents_mostrecentlonglineevent_jsonextracted' + __tablename__ = "deckhandevents_mostrecentlonglineevent_jsonextracted" id = Column(Integer, primary_key=True) # jsonblob = Column(String) datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) - + bycatchcount = Column(Integer) catchcount = Column(Integer) systemstartsetdatetime = Column(DateTime(timezone=True)) @@ -26,7 +24,7 @@ class DeckhandEventView(Base): systemendhauldatetime = Column(DateTime(timezone=True)) systemendhaullatitude = Column(Integer) systemendhaullongitude = Column(Integer) - - -if __name__ == '__main__': - pass \ No newline at end of file + + +if __name__ == "__main__": + pass diff --git a/model/gpsdata.py b/model/gpsdata.py index 9675be0..99b3223 100644 --- a/model/gpsdata.py +++ b/model/gpsdata.py @@ -1,9 +1,10 @@ +from sqlalchemy import Column, DateTime, Float, text + from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, text, Float class GpsData(Base): - __tablename__ = 'gpsdata' + __tablename__ = "gpsdata" gps_datetime = Column(DateTime(timezone=True), primary_key=True) lat = Column(Float(), nullable=False) @@ -11,17 +12,27 @@ class GpsData(Base): datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) def __str__(self) -> str: - return 'GpsData(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'gps_datetime', - 'lat', - 'lon', - 'datetime', - ]]) + ')' -example_gps_data = ''' -$ cat /mnt/thalos/brancol/export_gps/brancol_20230601_145918.txt -+47.7411535°,-3.4073535° edge@edge1:~$ + return ( + "GpsData(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "gps_datetime", + "lat", + "lon", + "datetime", + ] + ] + ) + + ")" + ) + + +example_gps_data = """ +$ cat /mnt/thalos/brancol/export_gps/brancol_20230601_145918.txt ++47.7411535°,-3.4073535° edge@edge1:~$ edge@edge1:~$ cat /mnt/thalos/brancol/export_gps/brancol_20230601_145918.txt | xxd 00000000: 2b34 372e 3734 3131 3533 35c2 b02c 2d33 +47.7411535..,-3 -00000010: 2e34 3037 3335 3335 c2b0 20 .4073535.. -''' +00000010: 2e34 3037 3335 3335 c2b0 20 .4073535.. +""" diff --git a/model/internetdata.py b/model/internetdata.py index a6db90d..63976d8 100644 --- a/model/internetdata.py +++ b/model/internetdata.py @@ -1,10 +1,10 @@ -from .base import Base +from sqlalchemy import Column, DateTime, Float, Integer, String, text -from sqlalchemy import Column, Integer, String, Float, DateTime, text +from .base import Base class InternetData(Base): - __tablename__ = 'internetdata' + __tablename__ = "internetdata" id = Column(Integer, primary_key=True) traceroute = Column(String) @@ -15,27 +15,57 @@ class InternetData(Base): # fk = ForeignKeyConstraint(['id'], [RiskVector.id]) def __str__(self) -> str: - return 'InternetData(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'traceroute', - 'ping', - 'packetloss', - 'returncode', - 'datetime', - ]]) + ')' + return ( + "InternetData(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "traceroute", + "ping", + "packetloss", + "returncode", + "datetime", + ] + ] + ) + + ")" + ) from flask_admin.contrib.sqla import ModelView + class InternetDataView(ModelView): - def __init__(self, session, name=None, category=None, endpoint=None, url=None, static_folder=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None): - super().__init__(InternetData, session, name, category, endpoint, url, static_folder, menu_class_name, menu_icon_type, menu_icon_value) + def __init__( + self, + session, + name=None, + category=None, + endpoint=None, + url=None, + static_folder=None, + menu_class_name=None, + menu_icon_type=None, + menu_icon_value=None, + ): + super().__init__( + InternetData, + session, + name, + category, + endpoint, + url, + static_folder, + menu_class_name, + menu_icon_type, + menu_icon_value, + ) + can_delete = True column_display_pk = True column_hide_backrefs = False - column_list = ["id","traceroute", "ping", "packetloss", "returncode", "datetime"] + column_list = ["id", "traceroute", "ping", "packetloss", "returncode", "datetime"] # column_searchable_list = ["name"] # inline_models = (RiskVector,) - - diff --git a/model/ondeckdata.py b/model/ondeckdata.py index 457c43c..61d8908 100644 --- a/model/ondeckdata.py +++ b/model/ondeckdata.py @@ -1,11 +1,12 @@ +from sqlalchemy import REAL, Column, DateTime, ForeignKey, Integer, String, text +from sqlalchemy.orm import relationship + from .base import Base from .videofiles import VideoFile -from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, text, REAL -from sqlalchemy.orm import relationship class OndeckData(Base): - __tablename__ = 'ondeckdata' + __tablename__ = "ondeckdata" id = Column(Integer, primary_key=True) video_uri = Column(String, ForeignKey("video_files.decrypted_path"), unique=True) @@ -16,22 +17,28 @@ class OndeckData(Base): overallruntimems = Column(REAL) tracked_confidence = Column(REAL) status = Column(String) - overallcatches = Column(Integer) - overalldiscards = Column(Integer) + overallcatches = Column(Integer) + overalldiscards = Column(Integer) detection_confidence = Column(REAL) def __str__(self) -> str: - return 'OndeckData(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'video_uri', - # 'video_file', - 'cocoannotations_uri', - 'datetime', - 'overallcount', - 'overallruntimems', - 'tracked_confidence', - 'status', - - ]]) + ')' - + return ( + "OndeckData(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "video_uri", + # 'video_file', + "cocoannotations_uri", + "datetime", + "overallcount", + "overallruntimems", + "tracked_confidence", + "status", + ] + ] + ) + + ")" + ) diff --git a/model/riskvector.py b/model/riskvector.py index c76c78c..0817f15 100644 --- a/model/riskvector.py +++ b/model/riskvector.py @@ -1,12 +1,12 @@ -from .base import Base - -from sqlalchemy.orm import relationship +from flask_admin.model.template import EndpointLinkRowAction from sqlalchemy import Column, Integer, String +from sqlalchemy.orm import relationship + +from .base import Base -from flask_admin.model.template import EndpointLinkRowAction, LinkRowAction class RiskVector(Base): - __tablename__ = 'vectors' + __tablename__ = "vectors" id = Column(Integer, primary_key=True) name = Column(String) @@ -14,23 +14,48 @@ class RiskVector(Base): configblob = Column(String) tests = relationship("Test", back_populates="vector") - def __str__(self) -> str: - return 'RiskVector(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'name', - 'schedule_string', - 'configblob' - ]]) + ')' - + return ( + "RiskVector(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in ["id", "name", "schedule_string", "configblob"] + ] + ) + + ")" + ) from flask_admin.contrib.sqla import ModelView + class RiskVectorModelView(ModelView): - def __init__(self, session, name=None, category=None, endpoint=None, url=None, static_folder=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None): - super().__init__(RiskVector, session, name, category, endpoint, url, static_folder, menu_class_name, menu_icon_type, menu_icon_value) + def __init__( + self, + session, + name=None, + category=None, + endpoint=None, + url=None, + static_folder=None, + menu_class_name=None, + menu_icon_type=None, + menu_icon_value=None, + ): + super().__init__( + RiskVector, + session, + name, + category, + endpoint, + url, + static_folder, + menu_class_name, + menu_icon_type, + menu_icon_value, + ) + can_delete = True column_display_pk = True column_hide_backrefs = False @@ -39,6 +64,10 @@ def __init__(self, session, name=None, category=None, endpoint=None, url=None, s # inline_models = (RiskVector,) # column_select_related_list = ["tests.vector_id"] column_extra_row_actions = [ - EndpointLinkRowAction('glyphicon glyphicon-arrow-right', 'test.index_view', title="Go to Tests ➡️", id_arg="flt1_0") + EndpointLinkRowAction( + "glyphicon glyphicon-arrow-right", + "test.index_view", + title="Go to Tests ➡️", + id_arg="flt1_0", + ) ] - diff --git a/model/test.py b/model/test.py index 068478b..fe16085 100644 --- a/model/test.py +++ b/model/test.py @@ -1,11 +1,10 @@ -from .base import Base - -from .riskvector import RiskVector - from enum import Enum as PyEnum +from sqlalchemy import Column, DateTime, Enum, Float, ForeignKey, Integer, String, text from sqlalchemy.orm import relationship -from sqlalchemy import Column, Integer, String, Enum, ForeignKey, Float, DateTime, text + +from .base import Base +from .riskvector import RiskVector class T(PyEnum): @@ -15,7 +14,7 @@ class T(PyEnum): class Test(Base): - __tablename__ = 'tests' + __tablename__ = "tests" id = Column(Integer, primary_key=True) name = Column(String) @@ -30,28 +29,68 @@ class Test(Base): # fk = ForeignKeyConstraint(['id'], [RiskVector.id]) def __str__(self) -> str: - return 'Test(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - 'name', - 'type', - 'vector_id', - 'datetime', - ]]) + ')' + return ( + "Test(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + "name", + "type", + "vector_id", + "datetime", + ] + ] + ) + + ")" + ) from flask_admin.contrib.sqla import ModelView + class TestModelView(ModelView): - def __init__(self, session, name=None, category=None, endpoint=None, url=None, static_folder=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None): - super().__init__(Test, session, name, category, endpoint, url, static_folder, menu_class_name, menu_icon_type, menu_icon_value) + def __init__( + self, + session, + name=None, + category=None, + endpoint=None, + url=None, + static_folder=None, + menu_class_name=None, + menu_icon_type=None, + menu_icon_value=None, + ): + super().__init__( + Test, + session, + name, + category, + endpoint, + url, + static_folder, + menu_class_name, + menu_icon_type, + menu_icon_value, + ) + can_delete = True column_display_pk = True column_hide_backrefs = False - column_list = ["id","name","type","vector", "score", "detail", "datetime_from", "datetime_to", "datetime"] + column_list = [ + "id", + "name", + "type", + "vector", + "score", + "detail", + "datetime_from", + "datetime_to", + "datetime", + ] column_searchable_list = ["name"] column_filters = ["vector_id", "datetime"] # column_select_related_list=['vector'] # inline_models = (RiskVector,) - - diff --git a/model/track.py b/model/track.py index 0f23929..006acad 100644 --- a/model/track.py +++ b/model/track.py @@ -1,11 +1,10 @@ +from sqlalchemy import ARRAY, REAL, Column, DateTime, Integer, String, text + from .base import Base -from .videofiles import VideoFile -from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, text, REAL, ARRAY -from sqlalchemy.orm import relationship class Track(Base): - __tablename__ = 'tracks' + __tablename__ = "tracks" id = Column(Integer, primary_key=True) video_uri = Column(String) @@ -19,13 +18,19 @@ class Track(Base): last_framenum = Column(Integer) confidences = Column(ARRAY(REAL)) - datetime = Column(DateTime(timezone=True), server_default=text("CURRENT_TIMESTAMP")) # detection_confidence = Column(REAL) def __str__(self) -> str: - return 'Track(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - 'id', - - ]]) + ')' + return ( + "Track(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "id", + ] + ] + ) + + ")" + ) diff --git a/model/videofiles.py b/model/videofiles.py index 6ba483d..09de3c6 100644 --- a/model/videofiles.py +++ b/model/videofiles.py @@ -1,11 +1,11 @@ +from sqlalchemy import VARCHAR, Column, DateTime + from .base import Base -from sqlalchemy import Column, Integer, String, DateTime, VARCHAR, text, PrimaryKeyConstraint -# from sqlalchemy.orm import relationship class VideoFile(Base): - __tablename__ = 'video_files' - + __tablename__ = "video_files" + original_path = Column(VARCHAR(), primary_key=True, autoincrement=False, nullable=False) last_modified = Column(DateTime(timezone=True), autoincrement=False, nullable=False) start_datetime = Column(DateTime(timezone=True), autoincrement=False, nullable=False) @@ -18,22 +18,30 @@ class VideoFile(Base): reencoded_stdout = Column(VARCHAR(), autoincrement=False, nullable=True) reencoded_stderr = Column(VARCHAR(), autoincrement=False, nullable=True) cam_name = Column(VARCHAR(), nullable=True) - + # ondeckdata = relationship("OndeckData", back_populates="video_file") def __str__(self) -> str: - return 'VideoFile(' + ', '.join( - [n + '='+ str(self.__getattribute__(n)) for n in [ - "original_path", - "last_modified", - "start_datetime", - "decrypted_path", - "decrypted_datetime", - "stdout", - "stderr", - "reencoded_path", - "reencoded_datetime", - "reencoded_stdout", - "reencoded_stderr", - "cam_name", - ]]) + ')' + return ( + "VideoFile(" + + ", ".join( + [ + n + "=" + str(self.__getattribute__(n)) + for n in [ + "original_path", + "last_modified", + "start_datetime", + "decrypted_path", + "decrypted_datetime", + "stdout", + "stderr", + "reencoded_path", + "reencoded_datetime", + "reencoded_stdout", + "reencoded_stderr", + "cam_name", + ] + ] + ) + + ")" + ) diff --git a/notebooks/catchcount_vector.ipynb b/notebooks/catchcount_vector.ipynb index acb81de..4e6d169 100644 --- a/notebooks/catchcount_vector.ipynb +++ b/notebooks/catchcount_vector.ipynb @@ -39,17 +39,19 @@ "from sklearn.linear_model import LinearRegression\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", + "import itertools\n", "import matplotlib.gridspec as gridspec\n", "\n", "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange, AutoDateLocator\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "import json\n", "from tsai.all import *\n", "from IPython.display import display, Markdown\n", @@ -95,13 +97,13 @@ ], "source": [ "# Brancol Trips\n", - "Brancol1data = get_data(boat = 'brancol', trip_no = 0)\n", - "Brancol2data = get_data(boat = 'brancol', trip_no = 1)\n", - "Brancol3data = get_data(boat = 'brancol', trip_no = 2)\n", + "Brancol1data = get_data(boat=\"brancol\", trip_no=0)\n", + "Brancol2data = get_data(boat=\"brancol\", trip_no=1)\n", + "Brancol3data = get_data(boat=\"brancol\", trip_no=2)\n", "\n", "# ST Patrick Trips\n", - "StPatrick1data = get_data(boat = 'stpatrick', trip_no = 0)\n", - "StPatrick2data = get_data(boat = 'stpatrick', trip_no = 1)" + "StPatrick1data = get_data(boat=\"stpatrick\", trip_no=0)\n", + "StPatrick2data = get_data(boat=\"stpatrick\", trip_no=1)" ] }, { @@ -111,30 +113,29 @@ "metadata": {}, "outputs": [], "source": [ + "counts_Brancol1 = Brancol1data[\"all_counts\"]\n", + "counts_Brancol2 = Brancol2data[\"all_counts\"]\n", + "counts_Brancol3 = Brancol3data[\"all_counts\"]\n", "\n", - "counts_Brancol1 = Brancol1data['all_counts']\n", - "counts_Brancol2 = Brancol2data['all_counts']\n", - "counts_Brancol3 = Brancol3data['all_counts']\n", - "\n", - "brancol1_elog = Brancol1data['elogs']\n", - "brancol2_elog = Brancol2data['elogs']\n", - "brancol3_elog = Brancol3data['elogs']\n", + "brancol1_elog = Brancol1data[\"elogs\"]\n", + "brancol2_elog = Brancol2data[\"elogs\"]\n", + "brancol3_elog = Brancol3data[\"elogs\"]\n", "\n", - "brancol1_bv_sets = Brancol1data['bv_sets']\n", - "brancol2_bv_sets = Brancol2data['bv_sets']\n", - "brancol3_bv_sets = Brancol3data['bv_sets']\n", + "brancol1_bv_sets = Brancol1data[\"bv_sets\"]\n", + "brancol2_bv_sets = Brancol2data[\"bv_sets\"]\n", + "brancol3_bv_sets = Brancol3data[\"bv_sets\"]\n", "\n", - "brancol1_bv_set_counts = Brancol1data['bv_set_counts']\n", - "brancol2_bv_set_counts = Brancol2data['bv_set_counts']\n", - "brancol3_bv_set_counts = Brancol3data['bv_set_counts']\n", + "brancol1_bv_set_counts = Brancol1data[\"bv_set_counts\"]\n", + "brancol2_bv_set_counts = Brancol2data[\"bv_set_counts\"]\n", + "brancol3_bv_set_counts = Brancol3data[\"bv_set_counts\"]\n", "\n", - "aiCounts_Brancol1 = Brancol1data['ai_sets']\n", - "aiCounts_Brancol2 = Brancol2data['ai_sets']\n", - "aiCounts_Brancol3 = Brancol3data['ai_sets']\n", + "aiCounts_Brancol1 = Brancol1data[\"ai_sets\"]\n", + "aiCounts_Brancol2 = Brancol2data[\"ai_sets\"]\n", + "aiCounts_Brancol3 = Brancol3data[\"ai_sets\"]\n", "\n", - "brancol1trip = Brancol1data['trip_info']\n", - "brancol2trip = Brancol2data['trip_info']\n", - "brancol3trip = Brancol3data['trip_info']" + "brancol1trip = Brancol1data[\"trip_info\"]\n", + "brancol2trip = Brancol2data[\"trip_info\"]\n", + "brancol3trip = Brancol3data[\"trip_info\"]" ] }, { @@ -144,22 +145,20 @@ "metadata": {}, "outputs": [], "source": [ + "counts_StPatrick1 = StPatrick1data[\"all_counts\"]\n", + "counts_StPatrick2 = StPatrick2data[\"all_counts\"]\n", "\n", + "stpatrick1_elog = StPatrick1data[\"elogs\"]\n", + "stpatrick2_elog = StPatrick2data[\"elogs\"]\n", "\n", - "counts_StPatrick1 = StPatrick1data['all_counts']\n", - "counts_StPatrick2 = StPatrick2data['all_counts']\n", - "\n", - "stpatrick1_elog = StPatrick1data['elogs']\n", - "stpatrick2_elog = StPatrick2data['elogs']\n", - "\n", - "stpatrick1_bv_set_counts = StPatrick1data['bv_set_counts']\n", - "stpatrick2_bv_set_counts = StPatrick2data['bv_set_counts']\n", + "stpatrick1_bv_set_counts = StPatrick1data[\"bv_set_counts\"]\n", + "stpatrick2_bv_set_counts = StPatrick2data[\"bv_set_counts\"]\n", "\n", - "aiCounts_StPatrick1 = StPatrick1data['ai_sets']\n", - "aiCounts_StPatrick2 = StPatrick2data['ai_sets']\n", + "aiCounts_StPatrick1 = StPatrick1data[\"ai_sets\"]\n", + "aiCounts_StPatrick2 = StPatrick2data[\"ai_sets\"]\n", "\n", - "stpatrick2trip = StPatrick2data['trip_info']\n", - "stpatrick1trip = StPatrick1data['trip_info']" + "stpatrick2trip = StPatrick2data[\"trip_info\"]\n", + "stpatrick1trip = StPatrick1data[\"trip_info\"]" ] }, { @@ -179,13 +178,12 @@ "vectorData_StPatrick2 = {}\n", "\n", "for vector in vectors:\n", - " \n", - " vectorData_Brancol1[f'vector_{vector}'] = get_vector_data('brancol',vector, brancol1trip)\n", - " vectorData_Brancol2[f'vector_{vector}'] = get_vector_data('brancol',vector, brancol2trip)\n", - " vectorData_Brancol3[f'vector_{vector}'] = get_vector_data('brancol',vector, brancol3trip)\n", - " \n", - " vectorData_StPatrick1[f'vector_{vector}'] = get_vector_data('stpatrick',vector, stpatrick1trip)\n", - " vectorData_StPatrick2[f'vector_{vector}'] = get_vector_data('stpatrick',vector, stpatrick2trip)" + " vectorData_Brancol1[f\"vector_{vector}\"] = get_vector_data(\"brancol\", vector, brancol1trip)\n", + " vectorData_Brancol2[f\"vector_{vector}\"] = get_vector_data(\"brancol\", vector, brancol2trip)\n", + " vectorData_Brancol3[f\"vector_{vector}\"] = get_vector_data(\"brancol\", vector, brancol3trip)\n", + "\n", + " vectorData_StPatrick1[f\"vector_{vector}\"] = get_vector_data(\"stpatrick\", vector, stpatrick1trip)\n", + " vectorData_StPatrick2[f\"vector_{vector}\"] = get_vector_data(\"stpatrick\", vector, stpatrick2trip)" ] }, { @@ -195,20 +193,20 @@ "metadata": {}, "outputs": [], "source": [ - "vector7_brancol1 = vectorData_Brancol1['vector_7']\n", - "vector7_brancol1['datetime'] = pd.to_datetime(vector7_brancol1['datetime'])\n", + "vector7_brancol1 = vectorData_Brancol1[\"vector_7\"]\n", + "vector7_brancol1[\"datetime\"] = pd.to_datetime(vector7_brancol1[\"datetime\"])\n", "\n", - "vector7_brancol2 = vectorData_Brancol2['vector_7']\n", - "vector7_brancol2['datetime'] = pd.to_datetime(vector7_brancol2['datetime'])\n", + "vector7_brancol2 = vectorData_Brancol2[\"vector_7\"]\n", + "vector7_brancol2[\"datetime\"] = pd.to_datetime(vector7_brancol2[\"datetime\"])\n", "\n", - "vector7_brancol3 = vectorData_Brancol3['vector_7']\n", - "vector7_brancol3['datetime'] = pd.to_datetime(vector7_brancol3['datetime'])\n", + "vector7_brancol3 = vectorData_Brancol3[\"vector_7\"]\n", + "vector7_brancol3[\"datetime\"] = pd.to_datetime(vector7_brancol3[\"datetime\"])\n", "\n", - "vector7_stpatrick1 = vectorData_StPatrick1['vector_7']\n", - "vector7_stpatrick1['datetime'] = pd.to_datetime(vector7_stpatrick1['datetime'])\n", + "vector7_stpatrick1 = vectorData_StPatrick1[\"vector_7\"]\n", + "vector7_stpatrick1[\"datetime\"] = pd.to_datetime(vector7_stpatrick1[\"datetime\"])\n", "\n", - "vector7_stpatrick2 = vectorData_StPatrick2['vector_7']\n", - "vector7_stpatrick2['datetime'] = pd.to_datetime(vector7_stpatrick2['datetime'])" + "vector7_stpatrick2 = vectorData_StPatrick2[\"vector_7\"]\n", + "vector7_stpatrick2[\"datetime\"] = pd.to_datetime(vector7_stpatrick2[\"datetime\"])" ] }, { @@ -237,13 +235,12 @@ "outputs": [], "source": [ "def get_ai_counts_elog(ai_counts, elog_hauls):\n", - " conn = sqlite3.connect(':memory:')\n", - " \n", + " conn = sqlite3.connect(\":memory:\")\n", + "\n", + " # write the tables to add set_number\n", + " ai_counts.to_sql(\"ai_counts\", conn, index=False)\n", + " elog_hauls.to_sql(\"hauls\", conn, index=False)\n", "\n", - " #write the tables to add set_number\n", - " ai_counts.to_sql('ai_counts', conn, index=False)\n", - " elog_hauls.to_sql('hauls', conn, index=False)\n", - " \n", " query = \"\"\"\n", " select\n", " distinct\n", @@ -252,17 +249,15 @@ " hauls.id is not null as elog_haul\n", " from ai_counts\n", " left join hauls on ai_counts.utc_start_datetime between hauls.systemstarthauldatetime and hauls.systemendhauldatetime and hauls.id!= '140'\n", - " \n", + "\n", " \"\"\"\n", " ai_counts_elog = pd.read_sql_query(query, conn)\n", "\n", - " ai_counts_elog['utc_end_datetime'] = pd.to_datetime(ai_counts_elog['utc_end_datetime'])\n", - " ai_counts_elog['utc_start_datetime'] = pd.to_datetime(ai_counts_elog['utc_start_datetime'])\n", - "\n", - " elog_hauls['network_delay'] =elog_hauls['datetime']-elog_hauls['systemendhauldatetime']\n", - " elog_hauls['large_delay'] =elog_hauls['network_delay']> pd.Timedelta('4 hours')\n", + " ai_counts_elog[\"utc_end_datetime\"] = pd.to_datetime(ai_counts_elog[\"utc_end_datetime\"])\n", + " ai_counts_elog[\"utc_start_datetime\"] = pd.to_datetime(ai_counts_elog[\"utc_start_datetime\"])\n", "\n", - " \n", + " elog_hauls[\"network_delay\"] = elog_hauls[\"datetime\"] - elog_hauls[\"systemendhauldatetime\"]\n", + " elog_hauls[\"large_delay\"] = elog_hauls[\"network_delay\"] > pd.Timedelta(\"4 hours\")\n", "\n", " return ai_counts_elog, elog_hauls" ] @@ -275,25 +270,28 @@ "outputs": [], "source": [ "def identify_excluded_elogs(elog_hauls, df_vector, ai_counts_elog):\n", - " elog_hauls.sort_values(by = 'systemendhauldatetime', inplace = True)\n", - " df_vector['window_start'] = df_vector['datetime']-pd.Timedelta('12 hours')\n", + " elog_hauls.sort_values(by=\"systemendhauldatetime\", inplace=True)\n", + " df_vector[\"window_start\"] = df_vector[\"datetime\"] - pd.Timedelta(\"12 hours\")\n", "\n", " elog_vectors = {}\n", " for idx, haul in elog_hauls.iterrows():\n", " considered_vectors = []\n", - " received = haul['datetime']\n", - " haul_end = haul['systemendhauldatetime']\n", - " df_relevant_vectors = df_vector.loc[(haul_end df_vector['window_start'])]\n", + " received = haul[\"datetime\"]\n", + " haul_end = haul[\"systemendhauldatetime\"]\n", + " df_relevant_vectors = df_vector.loc[\n", + " (haul_end < df_vector[\"datetime\"]) & (haul_end > df_vector[\"window_start\"])\n", + " ]\n", " for idx, vector in df_relevant_vectors.iterrows():\n", - " vector_time = vector['datetime']\n", + " vector_time = vector[\"datetime\"]\n", " if received < vector_time:\n", - " considered_vectors.append(vector['id'])\n", - " elog_vectors[haul['id']] = considered_vectors\n", - " \n", - " \n", - " excluded_elogs = [key for key, value in elog_vectors.items() if len(value)==0]\n", - " \n", - " ai_counts_elog['unused_elog'] = ai_counts_elog['elog_id'].apply(lambda x: True if x in excluded_elogs else False)\n", + " considered_vectors.append(vector[\"id\"])\n", + " elog_vectors[haul[\"id\"]] = considered_vectors\n", + "\n", + " excluded_elogs = [key for key, value in elog_vectors.items() if len(value) == 0]\n", + "\n", + " ai_counts_elog[\"unused_elog\"] = ai_counts_elog[\"elog_id\"].apply(\n", + " lambda x: True if x in excluded_elogs else False\n", + " )\n", "\n", " return ai_counts_elog, elog_vectors" ] @@ -313,43 +311,43 @@ "metadata": {}, "outputs": [], "source": [ - "def annotate_notes(ax, df, text_col = 'network_delay', text_xy = (4, 4), var = 4):\n", - " arrowprops=dict(arrowstyle=\"->\",connectionstyle=\"arc3,rad=0\", color = 'black')\n", + "def annotate_notes(ax, df, text_col=\"network_delay\", text_xy=(4, 4), var=4):\n", + " arrowprops = dict(arrowstyle=\"->\", connectionstyle=\"arc3,rad=0\", color=\"black\")\n", "\n", " annots = []\n", - " bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + " bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "\n", " text_x, text_y = text_xy\n", "\n", - "\n", - " for idx, row in df.loc[df['large_delay']].iterrows():\n", + " for idx, row in df.loc[df[\"large_delay\"]].iterrows():\n", " # y_var = x_vars[n]\n", " components = row[text_col].components\n", " if components.days > 0:\n", - " days = f'{components.days}d '\n", + " days = f\"{components.days}d \"\n", " else:\n", - " days = ''\n", - " \n", - " text = f'{days}{components.hours}h delay'\n", - " data_xy = (row['systemendhauldatetime'], 1)\n", + " days = \"\"\n", + "\n", + " text = f\"{days}{components.hours}h delay\"\n", + " data_xy = (row[\"systemendhauldatetime\"], 1)\n", "\n", - " \n", " an = ax.annotate(\n", " text,\n", - " xy=data_xy, xycoords='data',\n", - " xytext=(text_x, text_y), textcoords='offset points',\n", + " xy=data_xy,\n", + " xycoords=\"data\",\n", + " xytext=(text_x, text_y),\n", + " textcoords=\"offset points\",\n", " arrowprops=arrowprops,\n", - " size = 9,\n", - " horizontalalignment='left',\n", - " verticalalignment='bottom',\n", + " size=9,\n", + " horizontalalignment=\"left\",\n", + " verticalalignment=\"bottom\",\n", " # bbox = bbox_args,\n", - " color = 'black'\n", + " color=\"black\",\n", " )\n", - " \n", + "\n", " annots.append(an)\n", - " \n", - " text_y+=var\n", - " var = var*-1\n", + "\n", + " text_y += var\n", + " var = var * -1\n", "\n", " return annots" ] @@ -372,106 +370,147 @@ } ], "source": [ - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", - "plt.rc('xtick',labelsize=8)\n", - "plt.rc('ytick',labelsize=8)\n", - "plt.rc('axes', labelsize = 9)\n", - "\n", - "def vector_score_plot(elog_hauls, ai_counts_elog, catch_col, df_vector, figsize, hpad, savefig = None):\n", - " bv_color = '#a2c662'\n", - " ai_color = '#184EAD'\n", - " elog_color = '#117347'\n", - " vector_color = 'red'\n", - " \n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", + "plt.rc(\"xtick\", labelsize=8)\n", + "plt.rc(\"ytick\", labelsize=8)\n", + "plt.rc(\"axes\", labelsize=9)\n", + "\n", + "\n", + "def vector_score_plot(\n", + " elog_hauls, ai_counts_elog, catch_col, df_vector, figsize, hpad, savefig=None\n", + "):\n", + " bv_color = \"#a2c662\"\n", + " ai_color = \"#184EAD\"\n", + " elog_color = \"#117347\"\n", + " vector_color = \"red\"\n", + "\n", " # predictions_color = '#43aa99'\n", - " \n", - " \n", - " \n", - " fig, axes = plt.subplots(3, 1, figsize = figsize, sharex = True)\n", + "\n", + " fig, axes = plt.subplots(3, 1, figsize=figsize, sharex=True)\n", " plt.tight_layout()\n", - " plt.subplots_adjust(hspace = hpad)\n", - " \n", + " plt.subplots_adjust(hspace=hpad)\n", + "\n", " ax = axes[0]\n", " ax2 = axes[1]\n", " ax3 = axes[2]\n", " # twin1 = ax.twinx()\n", " # twin2 = ax.twinx()\n", - " \n", + "\n", " # Offset the right spine of twin2. The ticks and label have already been\n", " # placed on the right by twinx above.\n", " # twin2.spines.right.set_position((\"axes\", 1.1))\n", " # twin2.set_ylim(-0.1,2)\n", " # twin2.set_yticks([0,1])\n", - " \n", - " annots = annotate_notes(ax ,elog_hauls, text_xy = (10, 10), var = 10 )\n", - " \n", - " # TOP ax = Elog plot \n", - " sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog.loc[ai_counts_elog['unused_elog']==False], ax = ax, label = 'Elog Included', color =elog_color, lw = 1, )\n", - " sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog.loc[(ai_counts_elog['unused_elog']) | (ai_counts_elog['elog_haul']==0)], ax = ax, label = 'Elog Excluded', linestyle = '--', color =elog_color, lw = 1)\n", + "\n", + " annots = annotate_notes(ax, elog_hauls, text_xy=(10, 10), var=10)\n", + "\n", + " # TOP ax = Elog plot\n", + " sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"elog_haul\",\n", + " data=ai_counts_elog.loc[ai_counts_elog[\"unused_elog\"] == False],\n", + " ax=ax,\n", + " label=\"Elog Included\",\n", + " color=elog_color,\n", + " lw=1,\n", + " )\n", + " sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"elog_haul\",\n", + " data=ai_counts_elog.loc[\n", + " (ai_counts_elog[\"unused_elog\"]) | (ai_counts_elog[\"elog_haul\"] == 0)\n", + " ],\n", + " ax=ax,\n", + " label=\"Elog Excluded\",\n", + " linestyle=\"--\",\n", + " color=elog_color,\n", + " lw=1,\n", + " )\n", " # sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog, ax = ax, label = 'Elog Excluded', linestyle = '--', color =elog_color, lw = 1)\n", - " ax.set_ylabel('Elog Haul')\n", + " ax.set_ylabel(\"Elog Haul\")\n", " ax.set_ylim(-0.1, 2.5)\n", - " ax.set_yticks([0,1])\n", - " \n", - " \n", + " ax.set_yticks([0, 1])\n", + "\n", " # MIDDLE ax2 = AI catches\n", - " sns.lineplot(x = 'utc_start_datetime', y = catch_col, data = ai_counts_elog, ax = ax2, label = 'AI Counts', color =ai_color, lw = 1)\n", - " ax2.set_ylabel('AI Count')\n", - " \n", + " sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=catch_col,\n", + " data=ai_counts_elog,\n", + " ax=ax2,\n", + " label=\"AI Counts\",\n", + " color=ai_color,\n", + " lw=1,\n", + " )\n", + " ax2.set_ylabel(\"AI Count\")\n", + "\n", " # BOTTOM ax3 = vector schore\n", " # sns.lineplot(x = 'datetime', y= 'score', data = df_vector, ax = ax3, label = 'vector_score', color = vector_color, marker = 'o', markersize = 5, lw = 1)\n", - " ax3.set_ylabel('Vector Score')\n", - "\n", - " df_vector_nulls = df_vector.loc[df_vector['score'].isna()]\n", - " \n", - " markerline, stemlines, baseline = ax3.stem(df_vector['datetime'], df_vector['score'], linefmt = vector_color, label = 'Vector Score')\n", - " stemlines.set_linewidths(.5)\n", - " baseline.set_linewidth(.2)\n", - " markerline.set_markersize(.3)\n", - "\n", - " markerline2, stemlines2, baseline2 = ax3.stem(df_vector_nulls['datetime'], [.2]*len(df_vector_nulls) , linefmt = 'grey', label = 'Null Score')\n", - " stemlines2.set_linewidths(.5)\n", - " baseline2.set_linewidth(.2)\n", + " ax3.set_ylabel(\"Vector Score\")\n", + "\n", + " df_vector_nulls = df_vector.loc[df_vector[\"score\"].isna()]\n", + "\n", + " markerline, stemlines, baseline = ax3.stem(\n", + " df_vector[\"datetime\"], df_vector[\"score\"], linefmt=vector_color, label=\"Vector Score\"\n", + " )\n", + " stemlines.set_linewidths(0.5)\n", + " baseline.set_linewidth(0.2)\n", + " markerline.set_markersize(0.3)\n", + "\n", + " markerline2, stemlines2, baseline2 = ax3.stem(\n", + " df_vector_nulls[\"datetime\"],\n", + " [0.2] * len(df_vector_nulls),\n", + " linefmt=\"grey\",\n", + " label=\"Null Score\",\n", + " )\n", + " stemlines2.set_linewidths(0.5)\n", + " baseline2.set_linewidth(0.2)\n", " markerline2.set_markersize(0)\n", - " \n", + "\n", " # ax3.vlines(df_vector_nulls['datetime'], ymin=-.01, ymax=.2, color = 'grey', linewidth = .3)\n", - " ax3.set_ylabel('Vector Score')\n", - " \n", - " \n", + " ax3.set_ylabel(\"Vector Score\")\n", + "\n", " # formatting x axis dates\n", - " locator = DayLocator(interval = 2)\n", + " locator = DayLocator(interval=2)\n", " # locator = AutoDateLocator(minticks = 14)\n", " formatter = mdates.ConciseDateFormatter(locator)\n", " ax3.xaxis.set_major_locator(locator)\n", " ax3.xaxis.set_major_formatter(formatter)\n", - " ax3.set_xlabel('Datetime (UTC)')\n", - " \n", - " \n", - " \n", + " ax3.set_xlabel(\"Datetime (UTC)\")\n", + "\n", " # creating legend\n", " h1, l1 = ax.get_legend_handles_labels()\n", " h2, l2 = ax2.get_legend_handles_labels()\n", " h3, l3 = ax3.get_legend_handles_labels()\n", - " # line = Line2D([0], [0], color='grey', marker='|', \n", + " # line = Line2D([0], [0], color='grey', marker='|',\n", " # markersize=10, markeredgewidth=1.5, label='Vertical line')\n", "\n", " # h3.extend([line])\n", - " \n", + "\n", " # ax.legend(h1+h2+ h3 , l1+l2+ l3, fontsize = 9,loc='upper center', bbox_to_anchor=(1.15, .5), frameon = False)\n", - " ax.legend(frameon = False, fontsize = 9)\n", - " ax2.legend(frameon = False, fontsize = 9, loc = 'upper left')\n", - " ax3.legend(frameon = False, fontsize = 9)\n", + " ax.legend(frameon=False, fontsize=9)\n", + " ax2.legend(frameon=False, fontsize=9, loc=\"upper left\")\n", + " ax3.legend(frameon=False, fontsize=9)\n", " # ax3.get_legend().remove()\n", " # ax2.get_legend().remove()\n", - " \n", - " sns.despine(trim=True, \n", - " right = False,\n", - " # left=True\n", - " )\n", + "\n", + " sns.despine(\n", + " trim=True,\n", + " right=False,\n", + " # left=True\n", + " )\n", " if savefig:\n", - " plt.savefig(savefig,bbox_inches='tight', dpi = 150)\n", + " plt.savefig(savefig, bbox_inches=\"tight\", dpi=150)\n", "\n", - "prep_and_plot(ai_counts = aiCounts_StPatrick2,elog_hauls = stpatrick2_elog, df_vector = vector7_stpatrick2.copy(), count_col = \"overallcatches\", figsize = (7, 3),hpad = 0, savefig = 'vector7_stpatrick2.png', )" + "\n", + "prep_and_plot(\n", + " ai_counts=aiCounts_StPatrick2,\n", + " elog_hauls=stpatrick2_elog,\n", + " df_vector=vector7_stpatrick2.copy(),\n", + " count_col=\"overallcatches\",\n", + " figsize=(7, 3),\n", + " hpad=0,\n", + " savefig=\"vector7_stpatrick2.png\",\n", + ")" ] }, { @@ -492,7 +531,7 @@ } ], "source": [ - "1040/150" + "1040 / 150" ] }, { @@ -513,7 +552,7 @@ } ], "source": [ - "446/150" + "446 / 150" ] }, { @@ -531,10 +570,20 @@ "metadata": {}, "outputs": [], "source": [ - "def prep_and_plot(ai_counts,elog_hauls, df_vector, count_col, figsize = (7, 3), hpad = .5, savefig = None):\n", - " ai_counts_elog, elog_hauls = get_ai_counts_elog(ai_counts,elog_hauls)\n", + "def prep_and_plot(\n", + " ai_counts, elog_hauls, df_vector, count_col, figsize=(7, 3), hpad=0.5, savefig=None\n", + "):\n", + " ai_counts_elog, elog_hauls = get_ai_counts_elog(ai_counts, elog_hauls)\n", " ai_counts_elog, elog_vectors = identify_excluded_elogs(elog_hauls, df_vector, ai_counts_elog)\n", - " vector_score_plot(elog_hauls, ai_counts_elog, count_col, df_vector, figsize = figsize, hpad = hpad, savefig = savefig)" + " vector_score_plot(\n", + " elog_hauls,\n", + " ai_counts_elog,\n", + " count_col,\n", + " df_vector,\n", + " figsize=figsize,\n", + " hpad=hpad,\n", + " savefig=savefig,\n", + " )" ] }, { @@ -563,7 +612,15 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_StPatrick2,elog_hauls = stpatrick2_elog, df_vector = vector7_stpatrick2.copy(), count_col = \"overallcatches\", figsize = (7, 3),hpad = 0, savefig = 'vector7_stpatrick2.png', )" + "prep_and_plot(\n", + " ai_counts=aiCounts_StPatrick2,\n", + " elog_hauls=stpatrick2_elog,\n", + " df_vector=vector7_stpatrick2.copy(),\n", + " count_col=\"overallcatches\",\n", + " figsize=(7, 3),\n", + " hpad=0,\n", + " savefig=\"vector7_stpatrick2.png\",\n", + ")" ] }, { @@ -584,7 +641,13 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_StPatrick1,elog_hauls = stpatrick1_elog, df_vector = vector7_stpatrick1.copy(), count_col = \"overallcatches\", figsize = (7, 3))" + "prep_and_plot(\n", + " ai_counts=aiCounts_StPatrick1,\n", + " elog_hauls=stpatrick1_elog,\n", + " df_vector=vector7_stpatrick1.copy(),\n", + " count_col=\"overallcatches\",\n", + " figsize=(7, 3),\n", + ")" ] }, { @@ -605,7 +668,13 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_Brancol1.copy(),elog_hauls = brancol1_elog.copy(), df_vector = vector7_brancol1.copy(), count_col = \"count\", figsize = (7, 3))" + "prep_and_plot(\n", + " ai_counts=aiCounts_Brancol1.copy(),\n", + " elog_hauls=brancol1_elog.copy(),\n", + " df_vector=vector7_brancol1.copy(),\n", + " count_col=\"count\",\n", + " figsize=(7, 3),\n", + ")" ] }, { @@ -634,7 +703,13 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_Brancol2,elog_hauls = brancol2_elog, df_vector = vector7_brancol2.copy(), count_col = \"count\", figsize = (7, 3))" + "prep_and_plot(\n", + " ai_counts=aiCounts_Brancol2,\n", + " elog_hauls=brancol2_elog,\n", + " df_vector=vector7_brancol2.copy(),\n", + " count_col=\"count\",\n", + " figsize=(7, 3),\n", + ")" ] }, { @@ -655,7 +730,13 @@ } ], "source": [ - "prep_and_plot(ai_counts = aiCounts_Brancol3,elog_hauls = brancol3_elog, df_vector = vector7_brancol3.copy(), count_col = \"count\", figsize = (7, 3))" + "prep_and_plot(\n", + " ai_counts=aiCounts_Brancol3,\n", + " elog_hauls=brancol3_elog,\n", + " df_vector=vector7_brancol3.copy(),\n", + " count_col=\"count\",\n", + " figsize=(7, 3),\n", + ")" ] }, { @@ -678,7 +759,7 @@ } ], "source": [ - "df_vector_nulls['detail'].value_counts()" + "df_vector_nulls[\"detail\"].value_counts()" ] }, { @@ -691,7 +772,7 @@ "ai_counts = aiCounts_Brancol2.copy()\n", "elog_hauls = brancol2_elog.copy()\n", "df_vector = vector7_brancol2.copy()\n", - "ai_counts_elog, elog_hauls = get_ai_counts_elog(ai_counts,elog_hauls)\n", + "ai_counts_elog, elog_hauls = get_ai_counts_elog(ai_counts, elog_hauls)\n", "ai_counts_elog, elog_vectors = identify_excluded_elogs(elog_hauls, df_vector, ai_counts_elog)" ] }, @@ -850,7 +931,7 @@ } ], "source": [ - "elog_hauls.set_index('id').drop(index = '140').head()" + "elog_hauls.set_index(\"id\").drop(index=\"140\").head()" ] }, { @@ -860,7 +941,7 @@ "metadata": {}, "outputs": [], "source": [ - "df_vector.sort_values('datetime', inplace = True)" + "df_vector.sort_values(\"datetime\", inplace=True)" ] }, { @@ -1206,7 +1287,7 @@ } ], "source": [ - "df_vector.set_index('datetime').loc['2024-02-07':'2024-02-08']" + "df_vector.set_index(\"datetime\").loc[\"2024-02-07\":\"2024-02-08\"]" ] }, { @@ -1237,16 +1318,15 @@ } ], "source": [ - "bv_color = '#a2c662'\n", - "ai_color = '#184EAD'\n", - "elog_color = '#117347'\n", - "vector_color = 'red'\n", + "bv_color = \"#a2c662\"\n", + "ai_color = \"#184EAD\"\n", + "elog_color = \"#117347\"\n", + "vector_color = \"red\"\n", "\n", "# predictions_color = '#43aa99'\n", "\n", "\n", - "\n", - "fig, axes = plt.subplots(3, 1, figsize = (7,3), sharex = True)\n", + "fig, axes = plt.subplots(3, 1, figsize=(7, 3), sharex=True)\n", "\n", "ax = axes[0]\n", "ax2 = axes[1]\n", @@ -1260,9 +1340,9 @@ "# twin2.set_ylim(-0.1,2)\n", "# twin2.set_yticks([0,1])\n", "\n", - "annots = annotate_notes(ax ,elog_hauls, text_xy = (-40, 7), var = 10 )\n", + "annots = annotate_notes(ax, elog_hauls, text_xy=(-40, 7), var=10)\n", "\n", - "# # TOP ax = Elog plot \n", + "# # TOP ax = Elog plot\n", "# sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog.loc[ai_counts_elog['unused_elog']==False], ax = ax, label = 'Elog Included', color =elog_color, lw = 1, )\n", "# sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog.loc[(ai_counts_elog['unused_elog']) | (ai_counts_elog['elog_haul']==0)], ax = ax, label = 'Elog Excluded', linestyle = '--', color =elog_color, lw = 1)\n", "# # sns.lineplot(x = 'utc_start_datetime', y = 'elog_haul', data = ai_counts_elog, ax = ax, label = 'Elog Excluded', linestyle = '--', color =elog_color, lw = 1)\n", @@ -1277,12 +1357,21 @@ "\n", "# BOTTOM ax3 = vector schore\n", "# sns.lineplot(x = 'datetime', y= 'score', data = df_vector, ax = ax3, label = 'vector_score', color = vector_color, marker = 'o', markersize = 5, lw = 1)\n", - "ax3.vlines(df_vector_nulls['datetime'], ymin=-.01, ymax=.2, color = 'grey', linewidth = .3, label = 'Null Score')\n", - "markerline, stemlines, baseline = ax3.stem(df_vector['datetime'], df_vector['score'], linefmt = vector_color, label = 'Vector Score')\n", - "stemlines.set_linewidths(.5)\n", - "baseline.set_linewidth(.2)\n", - "markerline.set_markersize(.3)\n", - "ax3.set_ylabel('Vector Score')" + "ax3.vlines(\n", + " df_vector_nulls[\"datetime\"],\n", + " ymin=-0.01,\n", + " ymax=0.2,\n", + " color=\"grey\",\n", + " linewidth=0.3,\n", + " label=\"Null Score\",\n", + ")\n", + "markerline, stemlines, baseline = ax3.stem(\n", + " df_vector[\"datetime\"], df_vector[\"score\"], linefmt=vector_color, label=\"Vector Score\"\n", + ")\n", + "stemlines.set_linewidths(0.5)\n", + "baseline.set_linewidth(0.2)\n", + "markerline.set_markersize(0.3)\n", + "ax3.set_ylabel(\"Vector Score\")" ] }, { @@ -1292,7 +1381,7 @@ "metadata": {}, "outputs": [], "source": [ - "df_vector_nulls = df_vector.loc[df_vector['score'].isna()]" + "df_vector_nulls = df_vector.loc[df_vector[\"score\"].isna()]" ] }, { @@ -1324,7 +1413,7 @@ } ], "source": [ - "df_vector['score'].isna()" + "df_vector[\"score\"].isna()" ] } ], diff --git a/notebooks/edge_integration_charts.ipynb b/notebooks/edge_integration_charts.ipynb index 5d212cf..36c9e81 100644 --- a/notebooks/edge_integration_charts.ipynb +++ b/notebooks/edge_integration_charts.ipynb @@ -41,17 +41,19 @@ "from sklearn.linear_model import LinearRegression\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", + "import itertools\n", "import matplotlib.gridspec as gridspec\n", "\n", "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "from tsai.all import *\n", "\n", @@ -66,7 +68,7 @@ "outputs": [], "source": [ "# read in ai & video status csv\n", - "df = pd.read_pickle('../data/integration_state_evaluations_videocopy_ai.pickle')" + "df = pd.read_pickle(\"../data/integration_state_evaluations_videocopy_ai.pickle\")" ] }, { @@ -270,20 +272,84 @@ "metadata": {}, "outputs": [], "source": [ - "report_colors = sns.color_palette(['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99'])\n", + "report_colors = sns.color_palette(\n", + " [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", + ")\n", "\n", - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]\n", "\n", "%matplotlib inline\n", - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]\n", + "\n", + "\n", "def show_color_pallete(pallete):\n", - "# fig, ax = plt.subplots()\n", - " \n", + " # fig, ax = plt.subplots()\n", + "\n", " sns.palplot(pallete, size=2)\n", " ax = plt.gca()\n", " for i, name in enumerate(pallete):\n", - " label = f'[{i}]'\n", - " ax.text(i, -.57, label,horizontalalignment='center', fontsize = 10) \n", + " label = f\"[{i}]\"\n", + " ax.text(i, -0.57, label, horizontalalignment=\"center\", fontsize=10)\n", " plt.show()" ] }, @@ -325,30 +391,44 @@ } ], "source": [ - "#create a dictionary of statuses and values\n", - "video_cols = ['videocopy_no_video', 'videocopy_found_unable_to_copy','videocopy_late', 'videocopy_ok']\n", - "ai_cols = ['ai_vidok_but_did_not_try','ai_crash_no_output', 'ai_outputed_but_cant_parse','ai_crash_output_too_fast', 'ai_ok']\n", + "# create a dictionary of statuses and values\n", + "video_cols = [\n", + " \"videocopy_no_video\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_late\",\n", + " \"videocopy_ok\",\n", + "]\n", + "ai_cols = [\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + "]\n", "video_status = {}\n", "ai_status = {}\n", "for video_col in video_cols:\n", " value_counts = df[video_col].value_counts().to_dict()\n", " if True in value_counts.keys():\n", - " video_status[video_col] = {'value': value_counts[True]}\n", + " video_status[video_col] = {\"value\": value_counts[True]}\n", "\n", "\n", "for ai_col in ai_cols:\n", " value_counts = df[ai_col].value_counts().to_dict()\n", " # print(value_counts)\n", " if True in value_counts.keys():\n", - " ai_status[ai_col] = {'value': value_counts[True]}\n", + " ai_status[ai_col] = {\"value\": value_counts[True]}\n", "\n", "\n", - "ai_status['no_video'] = {'value': video_status['videocopy_no_video']['value']+ video_status['videocopy_found_unable_to_copy']['value']}\n", + "ai_status[\"no_video\"] = {\n", + " \"value\": video_status[\"videocopy_no_video\"][\"value\"]\n", + " + video_status[\"videocopy_found_unable_to_copy\"][\"value\"]\n", + "}\n", "\n", "print(video_status)\n", "print(ai_status)\n", "# print(statusDF['status'].value_counts().to_dict())\n", - "print(' ')\n" + "print(\" \")\n" ] }, { @@ -360,49 +440,22 @@ "source": [ "# dictionaries of column name and desired display label and color for ai status\n", "ai_dict = {\n", - " 'no_video': {\n", - " 'label': 'No Video',\n", - " 'color':'#949494' \n", - " },\n", - " 'ai_vidok_but_did_not_try': {\n", - " 'label': 'Video ok but did not try',\n", - " 'color':report_colors[1]\n", - " } ,\n", - " 'ai_crash_no_output': {\n", - " 'label': 'Crash, no output',\n", - " 'color': report_colors[10]\n", - " }, \n", - " 'ai_outputed_but_cant_parse': {\n", - " 'label': 'Outputted, cannot parse',\n", - " 'color':report_colors[13]\n", - " },\n", - " 'ai_crash_output_too_fast': {\n", - " 'label': 'Empty Output',\n", - " 'color':report_colors[6]\n", - " },\n", - " 'ai_ok': {\n", - " 'label': 'Ok',\n", - " 'color': report_colors[15]\n", - " }\n", + " \"no_video\": {\"label\": \"No Video\", \"color\": \"#949494\"},\n", + " \"ai_vidok_but_did_not_try\": {\"label\": \"Video ok but did not try\", \"color\": report_colors[1]},\n", + " \"ai_crash_no_output\": {\"label\": \"Crash, no output\", \"color\": report_colors[10]},\n", + " \"ai_outputed_but_cant_parse\": {\"label\": \"Outputted, cannot parse\", \"color\": report_colors[13]},\n", + " \"ai_crash_output_too_fast\": {\"label\": \"Empty Output\", \"color\": report_colors[6]},\n", + " \"ai_ok\": {\"label\": \"Ok\", \"color\": report_colors[15]},\n", "}\n", "\n", "video_dict = {\n", - " 'videocopy_no_video': {\n", - " 'label': 'No Video',\n", - " 'color': '#949494'\n", - " },\n", - " 'videocopy_found_unable_to_copy': {\n", - " 'label': 'Video found, unable to copy',\n", - " 'color': report_colors[10]\n", - " } ,\n", - " 'videocopy_late': {\n", - " 'label': 'Late',\n", - " 'color':report_colors[13]\n", - " }, \n", - " 'videocopy_ok': {\n", - " 'label': 'Ok',\n", - " 'color': report_colors[15]\n", + " \"videocopy_no_video\": {\"label\": \"No Video\", \"color\": \"#949494\"},\n", + " \"videocopy_found_unable_to_copy\": {\n", + " \"label\": \"Video found, unable to copy\",\n", + " \"color\": report_colors[10],\n", " },\n", + " \"videocopy_late\": {\"label\": \"Late\", \"color\": report_colors[13]},\n", + " \"videocopy_ok\": {\"label\": \"Ok\", \"color\": report_colors[15]},\n", "}" ] }, @@ -458,26 +511,22 @@ "outputs": [], "source": [ "gps_status = {\n", - " 'integration_errors': {\n", - " 'value':int_errs,\n", - " 'label': 'Integration error',\n", - " 'color': report_colors[10]\n", + " \"integration_errors\": {\n", + " \"value\": int_errs,\n", + " \"label\": \"Integration error\",\n", + " \"color\": report_colors[10],\n", + " },\n", + " \"int_errs_pwr\": {\n", + " \"value\": int_errs_pwr,\n", + " \"label\": \"Integration error at power boundary\",\n", + " \"color\": report_colors[13],\n", " },\n", - " 'int_errs_pwr': {\n", - " 'value':int_errs_pwr,\n", - " 'label': 'Integration error at power boundary',\n", - " 'color': report_colors[13]\n", - " } ,\n", - " 'out_of_disk_space': {\n", - " 'value':out_of_disk_space,\n", - " 'label': 'Out of disk space',\n", - " 'color': report_colors[5]\n", - " }, \n", - " 'gps_ok': {\n", - " 'value':ok,\n", - " 'label': 'Ok',\n", - " 'color': report_colors[15]\n", - " }\n", + " \"out_of_disk_space\": {\n", + " \"value\": out_of_disk_space,\n", + " \"label\": \"Out of disk space\",\n", + " \"color\": report_colors[5],\n", + " },\n", + " \"gps_ok\": {\"value\": ok, \"label\": \"Ok\", \"color\": report_colors[15]},\n", "}" ] }, @@ -507,9 +556,21 @@ } ], "source": [ - "video_col_order = ['videocopy_no_video', 'videocopy_found_unable_to_copy','videocopy_late', 'videocopy_ok']\n", - "ai_col_order = ['no_video', 'ai_vidok_but_did_not_try','ai_crash_no_output', 'ai_outputed_but_cant_parse','ai_crash_output_too_fast', 'ai_ok']\n", - "gps_order = ['integration_errors', 'int_errs_pwr','out_of_disk_space', 'gps_ok']\n", + "video_col_order = [\n", + " \"videocopy_no_video\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_late\",\n", + " \"videocopy_ok\",\n", + "]\n", + "ai_col_order = [\n", + " \"no_video\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + "]\n", + "gps_order = [\"integration_errors\", \"int_errs_pwr\", \"out_of_disk_space\", \"gps_ok\"]\n", "\n", "waffle_dict = {}\n", "\n", @@ -521,29 +582,33 @@ "plot_dict = {}\n", "\n", "\n", - "for col in video_col_order: \n", - " \n", - " value = video_status[col]['value']\n", - " color = video_status[col]['color']\n", - " label = video_status[col]['label'] \n", + "for col in video_col_order:\n", + " value = video_status[col][\"value\"]\n", + " color = video_status[col][\"color\"]\n", + " label = video_status[col][\"label\"]\n", " n = 1\n", "\n", - " percent = (value/len(df))*100\n", + " percent = (value / len(df)) * 100\n", " percent_formatted = f\"{percent:.1f}\\\\%\"\n", - " formatted_label = label + r' $\\bf{{{}}}$'.format(percent_formatted.replace(' ', r'\\;'))\n", - " \n", - " values.append(value/scale)\n", + " formatted_label = label + r\" $\\bf{{{}}}$\".format(percent_formatted.replace(\" \", r\"\\;\"))\n", + "\n", + " values.append(value / scale)\n", " labels.append(formatted_label)\n", " colors.append(color)\n", "\n", - "plot_dict['values'] = values\n", - "plot_dict['labels'] = labels\n", - "plot_dict['legend'] = {'loc': 'upper left', 'bbox_to_anchor': (1.05, 1.1), 'fontsize': 8, 'frameon':False}\n", - "plot_dict['title'] = {'label': \"Video Integration Status\", 'loc': 'left', 'fontsize': 9}\n", - "plot_dict['colors'] = colors\n", + "plot_dict[\"values\"] = values\n", + "plot_dict[\"labels\"] = labels\n", + "plot_dict[\"legend\"] = {\n", + " \"loc\": \"upper left\",\n", + " \"bbox_to_anchor\": (1.05, 1.1),\n", + " \"fontsize\": 8,\n", + " \"frameon\": False,\n", + "}\n", + "plot_dict[\"title\"] = {\"label\": \"Video Integration Status\", \"loc\": \"left\", \"fontsize\": 9}\n", + "plot_dict[\"colors\"] = colors\n", + "\n", + "waffle_dict[310 + n] = plot_dict\n", "\n", - "waffle_dict[310+n] = plot_dict\n", - " \n", "# ai status waffle\n", "values = []\n", "labels = []\n", @@ -551,27 +616,31 @@ "plot_dict = {}\n", "\n", "for col in ai_col_order:\n", - " \n", - " label = ai_status[col]['label']\n", - " color = ai_status[col]['color']\n", - " value = ai_status[col]['value']\n", + " label = ai_status[col][\"label\"]\n", + " color = ai_status[col][\"color\"]\n", + " value = ai_status[col][\"value\"]\n", " n = 2\n", - " \n", - " percent = (value/len(df))*100\n", + "\n", + " percent = (value / len(df)) * 100\n", " percent_formatted = f\"{percent:.1f}\\\\%\"\n", - " formatted_label = label + r' $\\bf{{{}}}$'.format(percent_formatted.replace(' ', r'\\;'))\n", - " \n", - " values.append(value/scale)\n", + " formatted_label = label + r\" $\\bf{{{}}}$\".format(percent_formatted.replace(\" \", r\"\\;\"))\n", + "\n", + " values.append(value / scale)\n", " labels.append(formatted_label)\n", " colors.append(color)\n", "\n", - "plot_dict['values'] = values\n", - "plot_dict['labels'] = labels\n", - "plot_dict['legend'] = {'loc': 'upper left', 'bbox_to_anchor': (1.05, 1.1), 'fontsize': 8, 'frameon':False}\n", - "plot_dict['title'] = {'label': \"AI Integration Status\", 'loc': 'left', 'fontsize': 9}\n", - "plot_dict['colors'] = colors\n", + "plot_dict[\"values\"] = values\n", + "plot_dict[\"labels\"] = labels\n", + "plot_dict[\"legend\"] = {\n", + " \"loc\": \"upper left\",\n", + " \"bbox_to_anchor\": (1.05, 1.1),\n", + " \"fontsize\": 8,\n", + " \"frameon\": False,\n", + "}\n", + "plot_dict[\"title\"] = {\"label\": \"AI Integration Status\", \"loc\": \"left\", \"fontsize\": 9}\n", + "plot_dict[\"colors\"] = colors\n", "\n", - "waffle_dict[310+n] = plot_dict\n", + "waffle_dict[310 + n] = plot_dict\n", "\n", "\n", "# gps status waffle\n", @@ -580,39 +649,43 @@ "values = []\n", "labels = []\n", "plot_dict = {}\n", - "colors =[]\n", + "colors = []\n", "\n", - "for col in gps_order: \n", - " value = gps_status[col]['value']\n", - " label = gps_status[col]['label']\n", - " color = gps_status[col]['color']\n", + "for col in gps_order:\n", + " value = gps_status[col][\"value\"]\n", + " label = gps_status[col][\"label\"]\n", + " color = gps_status[col][\"color\"]\n", " n = 1\n", "\n", - " percent = (value/gps_len)*100\n", + " percent = (value / gps_len) * 100\n", " percent_formatted = f\"{percent:.1f}\\\\%\"\n", - " formatted_label = label + r' $\\bf{{{}}}$'.format(percent_formatted.replace(' ', r'\\;'))\n", - " \n", - " values.append(value/scale)\n", + " formatted_label = label + r\" $\\bf{{{}}}$\".format(percent_formatted.replace(\" \", r\"\\;\"))\n", + "\n", + " values.append(value / scale)\n", " labels.append(formatted_label)\n", " colors.append(color)\n", "\n", - "plot_dict['values'] = values\n", - "plot_dict['labels'] = labels\n", - "plot_dict['legend'] = {'loc': 'upper left', 'bbox_to_anchor': (1.05, 1.1), 'fontsize': 8, 'frameon':False}\n", - "plot_dict['title'] = {'label': \"GPS Integration Status\", 'loc': 'left', 'fontsize': 9}\n", - "plot_dict['colors'] = colors\n", + "plot_dict[\"values\"] = values\n", + "plot_dict[\"labels\"] = labels\n", + "plot_dict[\"legend\"] = {\n", + " \"loc\": \"upper left\",\n", + " \"bbox_to_anchor\": (1.05, 1.1),\n", + " \"fontsize\": 8,\n", + " \"frameon\": False,\n", + "}\n", + "plot_dict[\"title\"] = {\"label\": \"GPS Integration Status\", \"loc\": \"left\", \"fontsize\": 9}\n", + "plot_dict[\"colors\"] = colors\n", "\n", "waffle_dict[313] = plot_dict\n", "\n", "\n", - "\n", "fig = plt.figure(\n", " FigureClass=Waffle,\n", " plots=waffle_dict,\n", - " rows=10, # Outside parameter\n", + " rows=10, # Outside parameter\n", " # cmap_name=\"\", # Change color with cmap\n", - " rounding_rule='ceil', # Change rounding rule, so value less than 1000 will still have at least 1 block\n", - " figsize=(8, 4.89)\n", + " rounding_rule=\"ceil\", # Change rounding rule, so value less than 1000 will still have at least 1 block\n", + " figsize=(8, 4.89),\n", ")\n", "# fig.supxlabel(f'1 block = {scale} instances, Each instance occurs at a 5 minute interval',\n", "# fontsize=10,\n", @@ -621,7 +694,7 @@ "# )\n", "# Display the chart\n", "\n", - "plt.savefig('ai_video_status.png', bbox_inches='tight')" + "plt.savefig(\"ai_video_status.png\", bbox_inches=\"tight\")" ] } ], diff --git a/notebooks/elog_analysis.ipynb b/notebooks/elog_analysis.ipynb index a3eabb3..af67184 100644 --- a/notebooks/elog_analysis.ipynb +++ b/notebooks/elog_analysis.ipynb @@ -17,7 +17,6 @@ "metadata": {}, "outputs": [], "source": [ - " \n", "%autoreload 2" ] }, @@ -48,18 +47,21 @@ "from sklearn.linear_model import LinearRegression\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", + "import itertools\n", "# import matplotlib.gridspec as gridspec\n", "\n", "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "import json\n", + "\n", "# from tsai.all import *\n", "from IPython.display import display, Markdown\n", "import json\n", @@ -101,7 +103,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "# import matplotlib.ticker as ticker" ] }, @@ -120,7 +121,29 @@ "metadata": {}, "outputs": [], "source": [ - "colors = sns.color_palette(['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99'])" + "colors = sns.color_palette(\n", + " [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", + ")" ] }, { @@ -141,14 +164,36 @@ "outputs": [], "source": [ "%matplotlib inline\n", + "\n", + "\n", "def show_color_pallete():\n", - "# fig, ax = plt.subplots()\n", - " color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + " # fig, ax = plt.subplots()\n", + " color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", " sns.palplot(color_list, size=2)\n", " ax = plt.gca()\n", " for i, name in enumerate(color_list):\n", - " label = f'[{i}] {name}'\n", - " ax.text(i, -.57, label,horizontalalignment='center', fontsize = 10) \n", + " label = f\"[{i}] {name}\"\n", + " ax.text(i, -0.57, label, horizontalalignment=\"center\", fontsize=10)\n", " plt.show()" ] }, @@ -178,22 +223,22 @@ ], "source": [ "# ST Patrick Trips\n", - "StPatrick1data = get_data(boat = 'stpatrick', trip_no = 0)\n", - "StPatrick2data = get_data(boat = 'stpatrick', trip_no = 1)\n", - "StPatrick3data = get_data(boat = 'stpatrick', trip_no = 2)\n", + "StPatrick1data = get_data(boat=\"stpatrick\", trip_no=0)\n", + "StPatrick2data = get_data(boat=\"stpatrick\", trip_no=1)\n", + "StPatrick3data = get_data(boat=\"stpatrick\", trip_no=2)\n", "\n", - "counts_StPatrick1 = StPatrick1data['all_counts']\n", - "counts_StPatrick2 = StPatrick2data['all_counts']\n", - "counts_StPatrick3 = StPatrick3data['all_counts']\n", + "counts_StPatrick1 = StPatrick1data[\"all_counts\"]\n", + "counts_StPatrick2 = StPatrick2data[\"all_counts\"]\n", + "counts_StPatrick3 = StPatrick3data[\"all_counts\"]\n", "\n", - "stpatrick1_elog = StPatrick1data['elogs']\n", - "stpatrick2_elog = StPatrick2data['elogs']\n", - "stpatrick3_elog = StPatrick3data['elogs']\n", + "stpatrick1_elog = StPatrick1data[\"elogs\"]\n", + "stpatrick2_elog = StPatrick2data[\"elogs\"]\n", + "stpatrick3_elog = StPatrick3data[\"elogs\"]\n", "\n", "\n", - "stpatrick1_bv_sets = StPatrick1data['bv_set_counts']\n", - "stpatrick2_bv_sets = StPatrick2data['bv_set_counts']\n", - "stpatrick3_bv_sets = StPatrick3data['bv_set_counts']" + "stpatrick1_bv_sets = StPatrick1data[\"bv_set_counts\"]\n", + "stpatrick2_bv_sets = StPatrick2data[\"bv_set_counts\"]\n", + "stpatrick3_bv_sets = StPatrick3data[\"bv_set_counts\"]" ] }, { @@ -214,27 +259,27 @@ ], "source": [ "# Brancol Trips\n", - "Brancol1data = get_data(boat = 'brancol', trip_no = 0)\n", - "Brancol2data = get_data(boat = 'brancol', trip_no = 1)\n", - "Brancol3data = get_data(boat = 'brancol', trip_no = 2)\n", + "Brancol1data = get_data(boat=\"brancol\", trip_no=0)\n", + "Brancol2data = get_data(boat=\"brancol\", trip_no=1)\n", + "Brancol3data = get_data(boat=\"brancol\", trip_no=2)\n", "\n", "\n", - "counts_Brancol1 = Brancol1data['all_counts']\n", - "counts_Brancol2 = Brancol2data['all_counts']\n", - "counts_Brancol3 = Brancol3data['all_counts']\n", + "counts_Brancol1 = Brancol1data[\"all_counts\"]\n", + "counts_Brancol2 = Brancol2data[\"all_counts\"]\n", + "counts_Brancol3 = Brancol3data[\"all_counts\"]\n", "\n", - "brancol1_elog = Brancol1data['elogs']\n", - "brancol2_elog = Brancol2data['elogs']\n", - "brancol3_elog = Brancol3data['elogs']\n", + "brancol1_elog = Brancol1data[\"elogs\"]\n", + "brancol2_elog = Brancol2data[\"elogs\"]\n", + "brancol3_elog = Brancol3data[\"elogs\"]\n", "\n", - "brancol1_bv_sets = Brancol1data['bv_set_counts']\n", - "brancol2_bv_sets = Brancol2data['bv_set_counts']\n", - "brancol3_bv_sets = Brancol3data['bv_set_counts']\n", + "brancol1_bv_sets = Brancol1data[\"bv_set_counts\"]\n", + "brancol2_bv_sets = Brancol2data[\"bv_set_counts\"]\n", + "brancol3_bv_sets = Brancol3data[\"bv_set_counts\"]\n", "\n", "\n", - "brancol1trip = Brancol1data['trip_info']\n", - "brancol2trip = Brancol2data['trip_info']\n", - "brancol3trip = Brancol3data['trip_info']" + "brancol1trip = Brancol1data[\"trip_info\"]\n", + "brancol2trip = Brancol2data[\"trip_info\"]\n", + "brancol3trip = Brancol3data[\"trip_info\"]" ] }, { @@ -307,10 +352,10 @@ "# haul_start_col = 'haul_start_datetime'\n", "# haul_end_col = 'haul_end_datetime'\n", "# y_val = 0\n", - " \n", - "# colors = color_dict[source] \n", - " \n", - " \n", + "\n", + "# colors = color_dict[source]\n", + "\n", + "\n", "# #plot_hauling\n", "# plot_hlines(ax, df,y_val, haul_start_col, haul_end_col, 12, colors['haul'], 'haul')\n", "\n", @@ -327,9 +372,9 @@ "# text = f'count: {row[count_col]}'\n", "# x_value = row[x_col]\n", "# ax.text( x_value,y_value , text, fontsize=10, horizontalalignment='right', bbox=props)\n", - " \n", - " \n", - "# # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes) " + "\n", + "\n", + "# # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes)" ] }, { @@ -340,35 +385,34 @@ "outputs": [], "source": [ "def plot_set_bars(ax, df, source, color_dict):\n", - " if source == 'elog':\n", - " set_start_col = 'systemstartsetdatetime'\n", - " set_end_col = 'systemendsetdatetime'\n", - " haul_start_col = 'systemstarthauldatetime'\n", - " haul_end_col = 'systemendhauldatetime'\n", + " if source == \"elog\":\n", + " set_start_col = \"systemstartsetdatetime\"\n", + " set_end_col = \"systemendsetdatetime\"\n", + " haul_start_col = \"systemstarthauldatetime\"\n", + " haul_end_col = \"systemendhauldatetime\"\n", " y_val = 1.7\n", - " \n", - " elif source == 'bv':\n", - " set_start_col = 'set_start_datetime'\n", - " set_end_col = 'set_end_datetime'\n", - " haul_start_col = 'haul_start_datetime'\n", - " haul_end_col = 'haul_end_datetime'\n", - " y_val = .7\n", - "\n", - " df['set_duration'] = df[set_end_col] - df[set_start_col]\n", - " df['haul_duration'] = df[haul_end_col] - df[haul_start_col]\n", - " df['mid_duration'] = df[haul_start_col] - df[set_end_col]\n", - " \n", - " set_x = list(zip(df[set_start_col], df['set_duration']))\n", - " haul_x = list(zip(df[haul_start_col], df['haul_duration']))\n", - " mid_x = list(zip(df[set_end_col], df['mid_duration']))\n", - " \n", - " y = (y_val, .6)\n", + "\n", + " elif source == \"bv\":\n", + " set_start_col = \"set_start_datetime\"\n", + " set_end_col = \"set_end_datetime\"\n", + " haul_start_col = \"haul_start_datetime\"\n", + " haul_end_col = \"haul_end_datetime\"\n", + " y_val = 0.7\n", + "\n", + " df[\"set_duration\"] = df[set_end_col] - df[set_start_col]\n", + " df[\"haul_duration\"] = df[haul_end_col] - df[haul_start_col]\n", + " df[\"mid_duration\"] = df[haul_start_col] - df[set_end_col]\n", + "\n", + " set_x = list(zip(df[set_start_col], df[\"set_duration\"]))\n", + " haul_x = list(zip(df[haul_start_col], df[\"haul_duration\"]))\n", + " mid_x = list(zip(df[set_end_col], df[\"mid_duration\"]))\n", + "\n", + " y = (y_val, 0.6)\n", "\n", " colors = color_dict[source]\n", - " ax.broken_barh(mid_x, y, facecolors = colors['mid'], edgecolor = 'face')\n", - " ax.broken_barh(haul_x, y, facecolors = colors['haul'], edgecolor = 'face')\n", - " ax.broken_barh(set_x, y, facecolors = colors['set'], edgecolor = 'face')\n", - " " + " ax.broken_barh(mid_x, y, facecolors=colors[\"mid\"], edgecolor=\"face\")\n", + " ax.broken_barh(haul_x, y, facecolors=colors[\"haul\"], edgecolor=\"face\")\n", + " ax.broken_barh(set_x, y, facecolors=colors[\"set\"], edgecolor=\"face\")\n" ] }, { @@ -378,48 +422,42 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_elog_comparisons(ax, dfElog, dfBV, title, legend = True, annotate_counts = False, display_axis= True):\n", - "\n", + "def plot_elog_comparisons(\n", + " ax, dfElog, dfBV, title, legend=True, annotate_counts=False, display_axis=True\n", + "):\n", " # ax[0].autofmt_xdate()\n", "\n", - " \n", - " ax.set_yticks([1,2],('actual','elogs'))\n", + " ax.set_yticks([1, 2], (\"actual\", \"elogs\"))\n", " # ax.set_yticks([0,1,2],('bv','elogs',' '))\n", - " fig.suptitle(titles['main'], fontsize = 20)\n", - " \n", + " fig.suptitle(titles[\"main\"], fontsize=20)\n", + "\n", " # df1 = brancol1_elog\n", " # df1sets =brancol1_bv_sets\n", "\n", - " \n", - "\n", " if annotate_counts:\n", - " dfElog['totalcount'] = dfElog['bycatchcount'].astype(int) + dfElog['catchcount'].astype(int)\n", - " dfBV['retained_count'] = dfBV['retained_count'].astype('Int64')\n", - " annotate_counts(ax, dfElog, 'totalcount', 'systemstarthauldatetime', 1.2)\n", - " annotate_counts(ax, dfBV, 'retained_count', 'haul_start_datetime', 0.2)\n", - " \n", + " dfElog[\"totalcount\"] = dfElog[\"bycatchcount\"].astype(int) + dfElog[\"catchcount\"].astype(int)\n", + " dfBV[\"retained_count\"] = dfBV[\"retained_count\"].astype(\"Int64\")\n", + " annotate_counts(ax, dfElog, \"totalcount\", \"systemstarthauldatetime\", 1.2)\n", + " annotate_counts(ax, dfBV, \"retained_count\", \"haul_start_datetime\", 0.2)\n", "\n", - " plot_set_bars(ax, dfElog, 'elog', color_dict)\n", - " plot_set_bars(ax, dfBV, 'bv', color_dict)\n", + " plot_set_bars(ax, dfElog, \"elog\", color_dict)\n", + " plot_set_bars(ax, dfBV, \"bv\", color_dict)\n", "\n", - " \n", + " ax.set_title(title, x=0.1, y=1, fontsize=9)\n", "\n", - " ax.set_title(title,x = .1, y = 1, fontsize = 9)\n", - " \n", " # ax.autoscale()\n", " # ax[0].set_ylim(-.5,1.5)\n", " # ax[0].tick_params(axis='x', labelrotation=45)\n", "\n", " if legend:\n", " legend_elements = []\n", - " for label, color in color_dict['elog'].items():\n", - " \n", - " legend_elements.append(Patch(facecolor=color, edgecolor=color,\n", - " label=label))\n", - " ax.legend(handles = legend_elements, loc='center', bbox_to_anchor=(1.08, 1.1), ncol = 1, fontsize = 12)\n", + " for label, color in color_dict[\"elog\"].items():\n", + " legend_elements.append(Patch(facecolor=color, edgecolor=color, label=label))\n", + " ax.legend(\n", + " handles=legend_elements, loc=\"center\", bbox_to_anchor=(1.08, 1.1), ncol=1, fontsize=12\n", + " )\n", "\n", - " \n", - " #use consise date formater\n", + " # use consise date formater\n", "\n", " if display_axis:\n", " locator = DayLocator()\n", @@ -446,8 +484,12 @@ "outputs": [], "source": [ "color_dict = {\n", - " 'bv': {'set':'#40a018', 'mid':'#a2c662', 'haul':'#117347', },\n", - " 'elog': {'set':'#40a018', 'mid':'#a2c662', 'haul':'#117347'},\n", + " \"bv\": {\n", + " \"set\": \"#40a018\",\n", + " \"mid\": \"#a2c662\",\n", + " \"haul\": \"#117347\",\n", + " },\n", + " \"elog\": {\"set\": \"#40a018\", \"mid\": \"#a2c662\", \"haul\": \"#117347\"},\n", " # 'elog':{'set':'#648fff', 'haul':'#184EAD', 'mid':'#88ccee'}\n", "}" ] @@ -471,24 +513,24 @@ ], "source": [ "df1 = brancol1_elog\n", - "df1sets =brancol1_bv_sets\n", - "trip1 = Brancol1data['trip_info']\n", + "df1sets = brancol1_bv_sets\n", + "trip1 = Brancol1data[\"trip_info\"]\n", "\n", "df2 = brancol2_elog\n", "df2sets = brancol2_bv_sets\n", - "trip2 = Brancol2data['trip_info']\n", + "trip2 = Brancol2data[\"trip_info\"]\n", "\n", "trip1title = f'Brancol Trip 1: {trip1['trip_start_date']} to {trip1['trip_end_date']}'\n", "trip2title = f'Brancol Trip 2: {trip2['trip_start_date']} to {trip2['trip_end_date']}'\n", "\n", - "titles = {'main':'', 'plot1':trip1title, 'plot2':trip2title}\n", + "titles = {\"main\": \"\", \"plot1\": trip1title, \"plot2\": trip2title}\n", "\n", - "fig, ax = plt.subplots(2,1, figsize=(10,3))\n", + "fig, ax = plt.subplots(2, 1, figsize=(10, 3))\n", "# fig.tight_layout(pad=4.0)\n", - "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend = False)\n", - "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend = True)\n", - "plt.subplots_adjust(hspace=.7)\n", - "plt.savefig('elog_comparisons_brancol.png', bbox_inches='tight')" + "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend=False)\n", + "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend=True)\n", + "plt.subplots_adjust(hspace=0.7)\n", + "plt.savefig(\"elog_comparisons_brancol.png\", bbox_inches=\"tight\")" ] }, { @@ -510,24 +552,24 @@ ], "source": [ "df1 = stpatrick1_elog\n", - "df1sets =stpatrick1_bv_sets\n", - "trip1 = StPatrick1data['trip_info']\n", + "df1sets = stpatrick1_bv_sets\n", + "trip1 = StPatrick1data[\"trip_info\"]\n", "\n", "df2 = stpatrick2_elog\n", "df2sets = stpatrick2_bv_sets\n", - "trip2 = StPatrick2data['trip_info']\n", + "trip2 = StPatrick2data[\"trip_info\"]\n", "\n", "trip1title = f'St. Patrick Trip 1: {trip1['trip_start_date']} to {trip1['trip_end_date']}'\n", "trip2title = f'St. Patrick Trip 2: {trip2['trip_start_date']} to {trip2['trip_end_date']}'\n", "\n", - "titles = {'main':'', 'plot1':trip1title, 'plot2':trip2title}\n", + "titles = {\"main\": \"\", \"plot1\": trip1title, \"plot2\": trip2title}\n", "\n", - "fig, ax = plt.subplots(2,1, figsize=(10,3))\n", + "fig, ax = plt.subplots(2, 1, figsize=(10, 3))\n", "# fig.tight_layout(pad=4.0)\n", - "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend = False)\n", - "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend = True)\n", - "plt.subplots_adjust(hspace=.7)\n", - "plt.savefig('elog_comparisons_stpatrick.png',bbox_inches='tight')" + "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend=False)\n", + "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend=True)\n", + "plt.subplots_adjust(hspace=0.7)\n", + "plt.savefig(\"elog_comparisons_stpatrick.png\", bbox_inches=\"tight\")" ] }, { @@ -547,11 +589,11 @@ "metadata": {}, "outputs": [], "source": [ - "stpatrick2_elog.sort_values(by = 'systemstartsetdatetime', inplace = True)\n", + "stpatrick2_elog.sort_values(by=\"systemstartsetdatetime\", inplace=True)\n", "\n", - "missing_set = stpatrick2_elog.sort_values(by = 'systemstartsetdatetime').iloc[[0]]\n", + "missing_set = stpatrick2_elog.sort_values(by=\"systemstartsetdatetime\").iloc[[0]]\n", "\n", - "stpatrick2_elog.drop(stpatrick2_elog.index[0], axis = 0, inplace = True)\n", + "stpatrick2_elog.drop(stpatrick2_elog.index[0], axis=0, inplace=True)\n", "\n", "stpatrick1_elog = pd.concat([stpatrick1_elog, missing_set])" ] @@ -575,23 +617,23 @@ ], "source": [ "df1 = brancol3_elog\n", - "df1sets =brancol3_bv_sets\n", - "trip1 = Brancol3data['trip_info']\n", + "df1sets = brancol3_bv_sets\n", + "trip1 = Brancol3data[\"trip_info\"]\n", "\n", "df2 = stpatrick3_elog\n", "df2sets = stpatrick3_bv_sets\n", - "trip2 = StPatrick3data['trip_info']\n", + "trip2 = StPatrick3data[\"trip_info\"]\n", "\n", "trip1title = f'St. Patrick Trip 1: {trip1['trip_start_date']} to {trip1['trip_end_date']}'\n", "trip2title = f'St. Patrick Trip 2: {trip2['trip_start_date']} to {trip2['trip_end_date']}'\n", "\n", - "titles = {'main':'', 'plot1':trip1title, 'plot2':trip2title}\n", + "titles = {\"main\": \"\", \"plot1\": trip1title, \"plot2\": trip2title}\n", "\n", - "fig, ax = plt.subplots(2,1, figsize=(8,4))\n", + "fig, ax = plt.subplots(2, 1, figsize=(8, 4))\n", "fig.tight_layout(pad=4.0)\n", - "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend = False)\n", - "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend = True)\n", - "plt.subplots_adjust(wspace=0, hspace=.1)" + "plot_elog_comparisons(ax[0], df1, df1sets, trip1title, legend=False)\n", + "plot_elog_comparisons(ax[1], df2, df2sets, trip2title, legend=True)\n", + "plt.subplots_adjust(wspace=0, hspace=0.1)" ] }, { @@ -604,9 +646,11 @@ "# a type discovered in BV notes, mix up of AM and PM, changing ot intended value\n", "brancol2_bv_sets_adjusted = brancol2_bv_sets.copy()\n", "\n", - "brancol2_bv_sets_adjusted.loc[brancol2_bv_sets_adjusted['set_number'] == '2','haul_end_datetime'] = brancol2_bv_sets_adjusted.loc[brancol2_bv_sets_adjusted['set_number'] == '2']['haul_end_datetime'] - pd.to_timedelta('12 hours')\n", - "\n", - "\n" + "brancol2_bv_sets_adjusted.loc[\n", + " brancol2_bv_sets_adjusted[\"set_number\"] == \"2\", \"haul_end_datetime\"\n", + "] = brancol2_bv_sets_adjusted.loc[brancol2_bv_sets_adjusted[\"set_number\"] == \"2\"][\n", + " \"haul_end_datetime\"\n", + "] - pd.to_timedelta(\"12 hours\")\n" ] }, { @@ -645,15 +689,20 @@ "outputs": [], "source": [ "def get_elog_bv_deltas(dfMerged):\n", - " dfMerged['set_start_delta_minutes'] = (dfMerged['systemstartsetdatetime'] - dfMerged['set_start_datetime']).dt.total_seconds()/60\n", - " dfMerged['set_end_delta_minutes'] = (dfMerged['systemendsetdatetime'] - dfMerged['set_end_datetime']).dt.total_seconds()/60\n", - " dfMerged['haul_start_delta_minutes'] = (dfMerged['systemstarthauldatetime'] - dfMerged['haul_start_datetime']).dt.total_seconds()/60\n", - " dfMerged['haul_end_delta_minutes'] = (dfMerged['systemendhauldatetime'] - dfMerged['haul_end_datetime']).dt.total_seconds()/60\n", - " dfMerged['catch_count_delta'] = dfMerged['elog_total_count'] - dfMerged['bv_retained_count'] \n", + " dfMerged[\"set_start_delta_minutes\"] = (\n", + " dfMerged[\"systemstartsetdatetime\"] - dfMerged[\"set_start_datetime\"]\n", + " ).dt.total_seconds() / 60\n", + " dfMerged[\"set_end_delta_minutes\"] = (\n", + " dfMerged[\"systemendsetdatetime\"] - dfMerged[\"set_end_datetime\"]\n", + " ).dt.total_seconds() / 60\n", + " dfMerged[\"haul_start_delta_minutes\"] = (\n", + " dfMerged[\"systemstarthauldatetime\"] - dfMerged[\"haul_start_datetime\"]\n", + " ).dt.total_seconds() / 60\n", + " dfMerged[\"haul_end_delta_minutes\"] = (\n", + " dfMerged[\"systemendhauldatetime\"] - dfMerged[\"haul_end_datetime\"]\n", + " ).dt.total_seconds() / 60\n", + " dfMerged[\"catch_count_delta\"] = dfMerged[\"elog_total_count\"] - dfMerged[\"bv_retained_count\"]\n", "\n", - "\n", - "\n", - " \n", " # dfMerged['set_start_delta'].\n" ] }, @@ -664,30 +713,67 @@ "metadata": {}, "outputs": [], "source": [ - "def merge_bv_elog_sets(dfElog, dfBV, delta = '1 hour'):\n", - " dfElog.sort_values(by = 'systemstartsetdatetime', inplace = True)\n", - " dfBV.sort_values(by = 'set_start_datetime', inplace = True)\n", - " elog_columns = ['id','bycatchcount', 'catchcount','systemstartsetdatetime','systemendsetdatetime','systemstarthauldatetime','systemendhauldatetime']\n", - " bv_columns = ['set_number','set_start_datetime','set_end_datetime', 'haul_start_datetime','haul_end_datetime', 'retained_count']\n", - " dfMerged = pd.merge_asof(dfElog[elog_columns], dfBV[bv_columns], left_on = 'systemstartsetdatetime', right_on = 'set_start_datetime', tolerance=pd.Timedelta(delta), direction = 'nearest')\n", - "\n", - " col_order = ['id','set_number', \n", - " 'bycatchcount', 'catchcount', \n", - " 'retained_count',\n", - " 'systemstartsetdatetime','set_start_datetime',\n", - " 'systemendsetdatetime', 'set_end_datetime',\n", - " 'systemstarthauldatetime', 'haul_start_datetime',\n", - " 'systemendhauldatetime', 'haul_end_datetime']\n", + "def merge_bv_elog_sets(dfElog, dfBV, delta=\"1 hour\"):\n", + " dfElog.sort_values(by=\"systemstartsetdatetime\", inplace=True)\n", + " dfBV.sort_values(by=\"set_start_datetime\", inplace=True)\n", + " elog_columns = [\n", + " \"id\",\n", + " \"bycatchcount\",\n", + " \"catchcount\",\n", + " \"systemstartsetdatetime\",\n", + " \"systemendsetdatetime\",\n", + " \"systemstarthauldatetime\",\n", + " \"systemendhauldatetime\",\n", + " ]\n", + " bv_columns = [\n", + " \"set_number\",\n", + " \"set_start_datetime\",\n", + " \"set_end_datetime\",\n", + " \"haul_start_datetime\",\n", + " \"haul_end_datetime\",\n", + " \"retained_count\",\n", + " ]\n", + " dfMerged = pd.merge_asof(\n", + " dfElog[elog_columns],\n", + " dfBV[bv_columns],\n", + " left_on=\"systemstartsetdatetime\",\n", + " right_on=\"set_start_datetime\",\n", + " tolerance=pd.Timedelta(delta),\n", + " direction=\"nearest\",\n", + " )\n", + "\n", + " col_order = [\n", + " \"id\",\n", + " \"set_number\",\n", + " \"bycatchcount\",\n", + " \"catchcount\",\n", + " \"retained_count\",\n", + " \"systemstartsetdatetime\",\n", + " \"set_start_datetime\",\n", + " \"systemendsetdatetime\",\n", + " \"set_end_datetime\",\n", + " \"systemstarthauldatetime\",\n", + " \"haul_start_datetime\",\n", + " \"systemendhauldatetime\",\n", + " \"haul_end_datetime\",\n", + " ]\n", " dfMerged = dfMerged[col_order]\n", - " \n", - " \n", "\n", - " dfMerged.rename(columns = {'retained_count':'bv_retained_count','catchcount':'elog_target_count','bycatchcount':'elog_bycatch_count'}, inplace = True)\n", + " dfMerged.rename(\n", + " columns={\n", + " \"retained_count\": \"bv_retained_count\",\n", + " \"catchcount\": \"elog_target_count\",\n", + " \"bycatchcount\": \"elog_bycatch_count\",\n", + " },\n", + " inplace=True,\n", + " )\n", "\n", - " dfMerged['elog_total_count'] = dfMerged['elog_target_count'].astype(int) + dfMerged['elog_bycatch_count'].astype(int)\n", + " dfMerged[\"elog_total_count\"] = dfMerged[\"elog_target_count\"].astype(int) + dfMerged[\n", + " \"elog_bycatch_count\"\n", + " ].astype(int)\n", "\n", " get_elog_bv_deltas(dfMerged)\n", - " \n", + "\n", " return dfMerged" ] }, @@ -699,7 +785,12 @@ "outputs": [], "source": [ "def get_combined_delta_means(dfs):\n", - " delta_cols = ['set_start_delta_minutes','set_end_delta_minutes','haul_start_delta_minutes','haul_end_delta_minutes']\n", + " delta_cols = [\n", + " \"set_start_delta_minutes\",\n", + " \"set_end_delta_minutes\",\n", + " \"haul_start_delta_minutes\",\n", + " \"haul_end_delta_minutes\",\n", + " ]\n", " for col in delta_cols:\n", " combined_list = []\n", " for df in dfs:\n", @@ -708,16 +799,17 @@ " # print(combined_list)\n", " avg = np.mean(combined_list)\n", " # print(stats.describe(combined_list))\n", - " \n", - " \n", - " print(f'{col} Mean: {avg} min | Min: {np.min(combined_list)} | Max: {np.max(combined_list)}')\n", - " q = [10,25,50,75,80,90]\n", - " percentile_str = ''\n", - " percentiles = np.percentile(combined_list, q = q)\n", + "\n", + " print(\n", + " f\"{col} Mean: {avg} min | Min: {np.min(combined_list)} | Max: {np.max(combined_list)}\"\n", + " )\n", + " q = [10, 25, 50, 75, 80, 90]\n", + " percentile_str = \"\"\n", + " percentiles = np.percentile(combined_list, q=q)\n", " for p, percentile in zip(q, percentiles):\n", - " percentile_str += f'\\033[1m{p}%:\\033[0m {percentile} '\n", - " print(percentile_str) \n", - " print(' ')\n", + " percentile_str += f\"\\033[1m{p}%:\\033[0m {percentile} \"\n", + " print(percentile_str)\n", + " print(\" \")\n", " # print(f'number of logged sets: {len(combined_list)}')" ] }, @@ -729,10 +821,15 @@ "outputs": [], "source": [ "def get_delta_means(dfMerged):\n", - " delta_cols = ['set_start_delta_minutes','set_end_delta_minutes','haul_start_delta_minutes','haul_end_delta_minutes']\n", + " delta_cols = [\n", + " \"set_start_delta_minutes\",\n", + " \"set_end_delta_minutes\",\n", + " \"haul_start_delta_minutes\",\n", + " \"haul_end_delta_minutes\",\n", + " ]\n", " for col in delta_cols:\n", " avg = np.mean(dfMerged[col].abs())\n", - " print(f'{col}: {avg} min')" + " print(f\"{col}: {avg} min\")" ] }, { @@ -744,7 +841,12 @@ "source": [ "def get_combined_deltas(dfs):\n", " deltas = {}\n", - " delta_cols = ['set_start_delta_minutes','set_end_delta_minutes','haul_start_delta_minutes','haul_end_delta_minutes']\n", + " delta_cols = [\n", + " \"set_start_delta_minutes\",\n", + " \"set_end_delta_minutes\",\n", + " \"haul_start_delta_minutes\",\n", + " \"haul_end_delta_minutes\",\n", + " ]\n", " for col in delta_cols:\n", " combined_list = []\n", " for df in dfs:\n", @@ -765,7 +867,7 @@ "outputs": [], "source": [ "def get_combined_catch_deltas(dfs):\n", - " col = 'catch_count_delta'\n", + " col = \"catch_count_delta\"\n", " delta_list = []\n", " for df in dfs:\n", " df_list = df[col].to_list()\n", @@ -789,12 +891,12 @@ "outputs": [], "source": [ "dfBrancol1_merged = merge_bv_elog_sets(brancol1_elog, brancol1_bv_sets)\n", - "dfBrancol2_merged = merge_bv_elog_sets(brancol2_elog, brancol2_bv_sets_adjusted, '2 hours')\n", - "dfBrancol3_merged = merge_bv_elog_sets(brancol3_elog, brancol3_bv_sets, '2 hours')\n", + "dfBrancol2_merged = merge_bv_elog_sets(brancol2_elog, brancol2_bv_sets_adjusted, \"2 hours\")\n", + "dfBrancol3_merged = merge_bv_elog_sets(brancol3_elog, brancol3_bv_sets, \"2 hours\")\n", "\n", - "dfStPatrick1_merged = merge_bv_elog_sets(stpatrick1_elog, stpatrick1_bv_sets, '2 hours')\n", - "dfStPatrick2_merged = merge_bv_elog_sets(stpatrick2_elog, stpatrick2_bv_sets, '5 hours')\n", - "dfStPatrick3_merged = merge_bv_elog_sets(stpatrick3_elog, stpatrick3_bv_sets, '2 hours')" + "dfStPatrick1_merged = merge_bv_elog_sets(stpatrick1_elog, stpatrick1_bv_sets, \"2 hours\")\n", + "dfStPatrick2_merged = merge_bv_elog_sets(stpatrick2_elog, stpatrick2_bv_sets, \"5 hours\")\n", + "dfStPatrick3_merged = merge_bv_elog_sets(stpatrick3_elog, stpatrick3_bv_sets, \"2 hours\")" ] }, { @@ -812,7 +914,14 @@ "metadata": {}, "outputs": [], "source": [ - "all_merged_dfs = [dfBrancol1_merged, dfBrancol2_merged,dfBrancol3_merged, dfStPatrick1_merged, dfStPatrick2_merged, dfStPatrick3_merged]" + "all_merged_dfs = [\n", + " dfBrancol1_merged,\n", + " dfBrancol2_merged,\n", + " dfBrancol3_merged,\n", + " dfStPatrick1_merged,\n", + " dfStPatrick2_merged,\n", + " dfStPatrick3_merged,\n", + "]" ] }, { @@ -841,8 +950,16 @@ } ], "source": [ - "\n", - "get_combined_delta_means([dfBrancol1_merged, dfBrancol2_merged,dfBrancol3_merged, dfStPatrick1_merged, dfStPatrick2_merged, dfStPatrick3_merged])" + "get_combined_delta_means(\n", + " [\n", + " dfBrancol1_merged,\n", + " dfBrancol2_merged,\n", + " dfBrancol3_merged,\n", + " dfStPatrick1_merged,\n", + " dfStPatrick2_merged,\n", + " dfStPatrick3_merged,\n", + " ]\n", + ")" ] }, { @@ -864,11 +981,17 @@ "source": [ "df = pd.DataFrame()\n", "for col, values in deltas.items():\n", - " df_col = pd.DataFrame({'column': np.repeat(col, len(values)), 'value': values})\n", + " df_col = pd.DataFrame({\"column\": np.repeat(col, len(values)), \"value\": values})\n", " df = pd.concat([df, df_col])\n", "\n", - "label_dict = {'set_start_delta_minutes': 'Set Start', 'set_end_delta_minutes':'Set End', 'haul_start_delta_minutes':'Haul Start', 'haul_end_delta_minutes':'Haul End','catch_count_delta': 'Catch Count' }\n", - "df['label'] = df['column'].map(label_dict)" + "label_dict = {\n", + " \"set_start_delta_minutes\": \"Set Start\",\n", + " \"set_end_delta_minutes\": \"Set End\",\n", + " \"haul_start_delta_minutes\": \"Haul Start\",\n", + " \"haul_end_delta_minutes\": \"Haul End\",\n", + " \"catch_count_delta\": \"Catch Count\",\n", + "}\n", + "df[\"label\"] = df[\"column\"].map(label_dict)" ] }, { @@ -892,38 +1015,36 @@ "# Create the plot\n", "sns.set_theme(style=\"ticks\")\n", "fig, ax = plt.subplots(figsize=(8, 2.5)) # Increased the height to 4 for better spacing\n", - "sns.boxplot(y='label', x='value', data=df, hue='label', ax=ax, palette=colors4, width=0.5)\n", + "sns.boxplot(y=\"label\", x=\"value\", data=df, hue=\"label\", ax=ax, palette=colors4, width=0.5)\n", "ax.yaxis.label.set_visible(False)\n", "ax.set_xlim(-1, 40)\n", - "ax.set_xlabel('Time Delta (Absolute Value in Minutes)', size = 11)\n", + "ax.set_xlabel(\"Time Delta (Absolute Value in Minutes)\", size=11)\n", "\n", "# Calculate medians\n", - "medians = df.groupby('label')['value'].median()\n", + "medians = df.groupby(\"label\")[\"value\"].median()\n", "\n", "# Annotate medians above the boxplots with arrows pointing to the median line\n", "for label in medians.index:\n", " median_value = medians[label]\n", - " y_pos = df['label'].unique().tolist().index(label)\n", + " y_pos = df[\"label\"].unique().tolist().index(label)\n", " ax.annotate(\n", - " f'{median_value:.2f}', \n", - " xy=(median_value, y_pos), \n", + " f\"{median_value:.2f}\",\n", + " xy=(median_value, y_pos),\n", " xytext=(median_value, y_pos - 0.25), # Position the text above the boxplot\n", - " va='bottom', \n", - " ha='center', \n", - " color='black', \n", - " fontsize=8, \n", + " va=\"bottom\",\n", + " ha=\"center\",\n", + " color=\"black\",\n", + " fontsize=8,\n", " # fontweight='bold',\n", " # arrowprops=dict(facecolor='black', edgecolor='black', shrink=0.05, headwidth=5, headlength=5, width=.7), # Arrow properties\n", " # bbox=dict(facecolor='white', edgecolor='none', boxstyle='round,pad=0.3')\n", " )\n", "ax.xaxis.grid(True)\n", - "ax.tick_params(axis='x', labelsize=10) # Change x ticks font size to 12\n", - "ax.tick_params(axis='y', labelsize=10) # Change y ticks font size to 12\n", + "ax.tick_params(axis=\"x\", labelsize=10) # Change x ticks font size to 12\n", + "ax.tick_params(axis=\"y\", labelsize=10) # Change y ticks font size to 12\n", "# ax.set(ylabel=\"\")\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", - "plt.savefig('elog_timedeltas.png', bbox_inches = 'tight', dpi = 150)\n", + "sns.despine(trim=True, left=True)\n", + "plt.savefig(\"elog_timedeltas.png\", bbox_inches=\"tight\", dpi=150)\n", "plt.tight_layout()\n", "plt.show()" ] @@ -955,7 +1076,7 @@ "metadata": {}, "outputs": [], "source": [ - "catch_deltas_inverted = [x*-1 for x in catch_deltas]" + "catch_deltas_inverted = [x * -1 for x in catch_deltas]" ] }, { @@ -977,14 +1098,16 @@ "source": [ "avg = np.mean(catch_deltas)\n", "\n", - "print(f'Mean: {avg} min | Median: {np.median(catch_deltas)} | Min: {np.min(catch_deltas)} | Max: {np.max(catch_deltas)}')\n", - "q = [10,25,50,75,80,90]\n", - "percentile_str = ''\n", - "percentiles = np.percentile(catch_deltas, q = q)\n", + "print(\n", + " f\"Mean: {avg} min | Median: {np.median(catch_deltas)} | Min: {np.min(catch_deltas)} | Max: {np.max(catch_deltas)}\"\n", + ")\n", + "q = [10, 25, 50, 75, 80, 90]\n", + "percentile_str = \"\"\n", + "percentiles = np.percentile(catch_deltas, q=q)\n", "for p, percentile in zip(q, percentiles):\n", - " percentile_str += f'\\033[1m{p}%:\\033[0m {percentile} '\n", - "print(percentile_str) \n", - "print(' ')" + " percentile_str += f\"\\033[1m{p}%:\\033[0m {percentile} \"\n", + "print(percentile_str)\n", + "print(\" \")" ] }, { @@ -1010,17 +1133,17 @@ "\n", "colors4 = [colors[15], colors[16], colors[17], colors[18]]\n", "\n", - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", - "plt.rc('xtick',labelsize=8)\n", - "plt.rc('ytick',labelsize=8)\n", - "plt.rc('axes', labelsize = 8)\n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", + "plt.rc(\"xtick\", labelsize=8)\n", + "plt.rc(\"ytick\", labelsize=8)\n", + "plt.rc(\"axes\", labelsize=8)\n", "\n", "sns.set_theme(style=\"ticks\")\n", - "fig, ax = plt.subplots(figsize=(3.5, .8)) # Increased the height to 4 for better spacing\n", - "sns.boxplot( x=catch_deltas, ax=ax, palette=colors4)\n", + "fig, ax = plt.subplots(figsize=(3.5, 0.8)) # Increased the height to 4 for better spacing\n", + "sns.boxplot(x=catch_deltas, ax=ax, palette=colors4)\n", "ax.yaxis.label.set_visible(False)\n", "# ax.set_xlim(-30, 30)\n", - "ax.set_xlabel('Catch Count Delta')\n", + "ax.set_xlabel(\"Catch Count Delta\")\n", "\n", "# Calculate medians\n", "# medians = df.loc[df['column']=='catch_count_delta'].groupby('column')['value'].median()\n", @@ -1030,25 +1153,23 @@ "# median_value = medians[label]\n", "# y_pos = df['column'].unique().tolist().index(label)\n", "# ax.annotate(\n", - "# f'Median: {median_value:.0f}', \n", - "# xy=(0, median_value), \n", + "# f'Median: {median_value:.0f}',\n", + "# xy=(0, median_value),\n", "# xytext=(0, median_value), # Position the text above the boxplot\n", - "# va='bottom', \n", - "# ha='center', \n", - "# color='black', \n", - "# fontsize=9, \n", + "# va='bottom',\n", + "# ha='center',\n", + "# color='black',\n", + "# fontsize=9,\n", "\n", "# )\n", "\n", "\n", "ax.xaxis.grid(True)\n", - "ax.tick_params(axis='x', labelsize=9) # Change x ticks font size to 12\n", - "ax.tick_params(axis='y', labelsize=9, left = False) # Change y ticks font size to 12\n", + "ax.tick_params(axis=\"x\", labelsize=9) # Change x ticks font size to 12\n", + "ax.tick_params(axis=\"y\", labelsize=9, left=False) # Change y ticks font size to 12\n", "\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", - "plt.savefig('elog_countdeltas2.png', bbox_inches = 'tight', dpi = 150)\n", + "sns.despine(trim=True, left=True)\n", + "plt.savefig(\"elog_countdeltas2.png\", bbox_inches=\"tight\", dpi=150)\n", "plt.tight_layout()\n" ] }, @@ -1075,19 +1196,49 @@ "metadata": {}, "outputs": [], "source": [ - "def compare_elog_vector(vectors, df, dfsets, triptitle, rerun = False, figsize = (10, 4), titlesize = 12, lw = .5, ylim = None, markersize = 3):\n", + "def compare_elog_vector(\n", + " vectors,\n", + " df,\n", + " dfsets,\n", + " triptitle,\n", + " rerun=False,\n", + " figsize=(10, 4),\n", + " titlesize=12,\n", + " lw=0.5,\n", + " ylim=None,\n", + " markersize=3,\n", + "):\n", + " fig, ax = plt.subplots(\n", + " 2, 1, figsize=figsize, sharex=True, gridspec_kw={\"height_ratios\": [2, 1]}\n", + " )\n", "\n", - " fig, ax = plt.subplots(2,1,figsize=figsize, sharex = True, gridspec_kw={'height_ratios': [2, 1]})\n", - " \n", - " sns.lineplot(x = 'datetime', y = 'score', data = vectors, ax = ax[0], marker = \"o\", markersize = markersize, label = 'Original Vector \\n Score', lw = lw)\n", + " sns.lineplot(\n", + " x=\"datetime\",\n", + " y=\"score\",\n", + " data=vectors,\n", + " ax=ax[0],\n", + " marker=\"o\",\n", + " markersize=markersize,\n", + " label=\"Original Vector \\n Score\",\n", + " lw=lw,\n", + " )\n", " if rerun:\n", - " sns.lineplot(x = 'datetime', y = 'rerunscore', data = vectors, ax = ax[0], marker = \"o\",markersize = markersize, label = 'Point-in-time \\n Score', lw = lw)\n", - " plot_elog_comparisons(ax[1], df, dfsets, '', legend = False, display_axis = False)\n", + " sns.lineplot(\n", + " x=\"datetime\",\n", + " y=\"rerunscore\",\n", + " data=vectors,\n", + " ax=ax[0],\n", + " marker=\"o\",\n", + " markersize=markersize,\n", + " label=\"Point-in-time \\n Score\",\n", + " lw=lw,\n", + " )\n", + " plot_elog_comparisons(ax[1], df, dfsets, \"\", legend=False, display_axis=False)\n", " locator = DayLocator()\n", " formatter = mdates.ConciseDateFormatter(locator)\n", " ax[0].xaxis.set_major_locator(locator)\n", " ax[0].xaxis.set_major_formatter(formatter)\n", - " ax[0].set_title(triptitle,x = .5, y = 1, fontsize = titlesize)\n", + " ax[0].set_title(triptitle, x=0.5, y=1, fontsize=titlesize)\n", " # ax[0].set_xticks(locator)\n", " # plt.tick_params(axis='both', which='both')\n", " return fig, ax" @@ -1127,23 +1278,30 @@ } ], "source": [ - "set_dfs = {'brancol1': brancol1_bv_sets, 'brancol2': brancol2_bv_sets, 'brancol3': brancol3_bv_sets,\n", - " 'stpatrick1':stpatrick1_bv_sets, 'stpatrick2': stpatrick2_bv_sets, 'stpatrick3': stpatrick3_bv_sets\n", - " }\n", + "set_dfs = {\n", + " \"brancol1\": brancol1_bv_sets,\n", + " \"brancol2\": brancol2_bv_sets,\n", + " \"brancol3\": brancol3_bv_sets,\n", + " \"stpatrick1\": stpatrick1_bv_sets,\n", + " \"stpatrick2\": stpatrick2_bv_sets,\n", + " \"stpatrick3\": stpatrick3_bv_sets,\n", + "}\n", "\n", "set_delta_dfs = []\n", "\n", "for trip, df in set_dfs.items():\n", - " df.sort_values(by = 'set_start_datetime', inplace = True)\n", - " df['trip'] = trip\n", - " df['last_haul_end'] = df['haul_end_datetime'].shift(1)\n", - " df['last_haul_delta'] = df['haul_end_datetime']-df['last_haul_end']\n", - " set_delta_dfs.append(df[['trip','set_start_datetime','haul_end_datetime', 'last_haul_end', 'last_haul_delta']])\n", + " df.sort_values(by=\"set_start_datetime\", inplace=True)\n", + " df[\"trip\"] = trip\n", + " df[\"last_haul_end\"] = df[\"haul_end_datetime\"].shift(1)\n", + " df[\"last_haul_delta\"] = df[\"haul_end_datetime\"] - df[\"last_haul_end\"]\n", + " set_delta_dfs.append(\n", + " df[[\"trip\", \"set_start_datetime\", \"haul_end_datetime\", \"last_haul_end\", \"last_haul_delta\"]]\n", + " )\n", "\n", "\n", "set_deltas = pd.concat(set_delta_dfs)\n", "\n", - "set_deltas['last_haul_delta'].describe()" + "set_deltas[\"last_haul_delta\"].describe()" ] }, { @@ -1184,7 +1342,7 @@ } ], "source": [ - "sns.displot(set_deltas[\"last_haul_delta\"]/pd.Timedelta(\"1 hour\"),kind = \"ecdf\")" + "sns.displot(set_deltas[\"last_haul_delta\"] / pd.Timedelta(\"1 hour\"), kind=\"ecdf\")" ] }, { @@ -1194,7 +1352,7 @@ "metadata": {}, "outputs": [], "source": [ - "x = (set_deltas[\"last_haul_delta\"]/pd.Timedelta(\"1 hour\")).dropna().sort_values()\n", + "x = (set_deltas[\"last_haul_delta\"] / pd.Timedelta(\"1 hour\")).dropna().sort_values()\n", "\n", "params = st.lognorm.fit(x)\n", "# Separate parts of parameters\n", @@ -1212,9 +1370,9 @@ "source": [ "# sigmoid function used by Vector 6, time gap vector\n", "def vector_6_algorithim(x):\n", - " k = -0.15;\n", - " b = 60.0;\n", - " return 1.0/(1.0+math.exp(k*(x-b)))" + " k = -0.15\n", + " b = 60.0\n", + " return 1.0 / (1.0 + math.exp(k * (x - b)))" ] }, { @@ -1236,10 +1394,10 @@ ], "source": [ "x_data = np.linspace(0, max(x), 100)\n", - "cdf = stats.lognorm.cdf(x_data,loc=loc, scale=scale, *arg)\n", + "cdf = stats.lognorm.cdf(x_data, loc=loc, scale=scale, *arg)\n", "x_vector_6 = [vector_6_algorithim(x_val) for x_val in x_data]\n", - "plt.plot(x_data, cdf, label = 'cdf')\n", - "plt.plot(x_data,x_vector_6, label = 'vector 6')\n", + "plt.plot(x_data, cdf, label=\"cdf\")\n", + "plt.plot(x_data, x_vector_6, label=\"vector 6\")\n", "plt.legend()\n", "plt.show()\n" ] @@ -1259,9 +1417,9 @@ "metadata": {}, "outputs": [], "source": [ - "vector_rerun_brancol = pd.read_csv('../data/vector_id_6_rerun_brancol.csv')\n", - "vector_rerun_brancol['datetime'] = pd.to_datetime(vector_rerun_brancol['datetime'], utc=True)\n", - "vector_rerun_brancol['datetime'] = vector_rerun_brancol['datetime'].dt.tz_convert(None)" + "vector_rerun_brancol = pd.read_csv(\"../data/vector_id_6_rerun_brancol.csv\")\n", + "vector_rerun_brancol[\"datetime\"] = pd.to_datetime(vector_rerun_brancol[\"datetime\"], utc=True)\n", + "vector_rerun_brancol[\"datetime\"] = vector_rerun_brancol[\"datetime\"].dt.tz_convert(None)" ] }, { @@ -1272,10 +1430,9 @@ "outputs": [], "source": [ "def get_last_haul_delta(df, set_start_col, haul_end_col):\n", - " df.sort_values(by = set_start_col, inplace = True)\n", - " df['last_haul_end'] = df[haul_end_col].shift(1)\n", - " df['last_haul_delta'] = df[haul_end_col]-df['last_haul_end']\n", - "\n" + " df.sort_values(by=set_start_col, inplace=True)\n", + " df[\"last_haul_end\"] = df[haul_end_col].shift(1)\n", + " df[\"last_haul_delta\"] = df[haul_end_col] - df[\"last_haul_end\"]\n" ] }, { @@ -1286,9 +1443,9 @@ "outputs": [], "source": [ "# define trip details\n", - "trip = Brancol2data['trip_info']\n", - "trip_start_date =pd.to_datetime(trip['trip_start_date'])\n", - "trip_end_date = pd.to_datetime(trip['trip_end_date'])" + "trip = Brancol2data[\"trip_info\"]\n", + "trip_start_date = pd.to_datetime(trip[\"trip_start_date\"])\n", + "trip_end_date = pd.to_datetime(trip[\"trip_end_date\"])" ] }, { @@ -1300,27 +1457,28 @@ "source": [ "# create dataset of results using cdf function\n", "\n", + "\n", "def most_recent(haul_endings, test):\n", - " return max(haul for haul in haul_endings if haul<=test)\n", - " \n", + " return max(haul for haul in haul_endings if haul <= test)\n", + "\n", + "\n", "# create array of datetime at 4 hr interval\n", - "test_intervals= pd.date_range(trip_start_date, trip_end_date, freq = '4h')\n", - "haul_endings = brancol2_elog['systemendhauldatetime'].tolist()\n", + "test_intervals = pd.date_range(trip_start_date, trip_end_date, freq=\"4h\")\n", + "haul_endings = brancol2_elog[\"systemendhauldatetime\"].tolist()\n", "haul_endings.append(trip_start_date)\n", "\n", "tests = []\n", "for test in test_intervals:\n", " test_haul = {}\n", " last_haul = most_recent(haul_endings, test)\n", - " test_haul['test_datetime'] = test\n", - " test_haul['last_haul'] = last_haul\n", - " test_haul['last_haul_delta'] = test - last_haul\n", + " test_haul[\"test_datetime\"] = test\n", + " test_haul[\"last_haul\"] = last_haul\n", + " test_haul[\"last_haul_delta\"] = test - last_haul\n", "\n", - " \n", " tests.append(test_haul)\n", - "testsDF = pd.DataFrame(tests) \n", + "testsDF = pd.DataFrame(tests)\n", "\n", - "x_deltas = testsDF[\"last_haul_delta\"]/pd.Timedelta(\"1 hour\")\n", + "x_deltas = testsDF[\"last_haul_delta\"] / pd.Timedelta(\"1 hour\")\n", "cdf = stats.lognorm.cdf(x_deltas, loc=loc, scale=scale, *arg)" ] }, @@ -1342,50 +1500,60 @@ } ], "source": [ + "plt.rc(\"xtick\", labelsize=9)\n", + "plt.rc(\"ytick\", labelsize=9)\n", "\n", - "\n", - "plt.rc('xtick',labelsize=9)\n", - "plt.rc('ytick',labelsize=9)\n", - "\n", - "plt.rc('axes', labelsize = 9)\n", - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", + "plt.rc(\"axes\", labelsize=9)\n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", "\n", "df = brancol2_elog\n", "\n", - "get_last_haul_delta(df, 'systemstartsetdatetime', 'systemendhauldatetime')\n", + "get_last_haul_delta(df, \"systemstartsetdatetime\", \"systemendhauldatetime\")\n", "\n", - "dfsets =brancol2_bv_sets\n", + "dfsets = brancol2_bv_sets\n", "\n", "\n", - "mask = (vector_rerun_brancol['datetime'] > trip_start_date) & (vector_rerun_brancol['datetime'] <= trip_end_date)\n", - "vectors =vector_rerun_brancol.loc[mask]\n", - "triptitle = r\"$\\bf{Elog\\ Time\\ Gap\\ Vectors}$ | Brancol Trip 1: \" +f'{trip['trip_start_date']} to {trip['trip_end_date']}'\n", + "mask = (vector_rerun_brancol[\"datetime\"] > trip_start_date) & (\n", + " vector_rerun_brancol[\"datetime\"] <= trip_end_date\n", + ")\n", + "vectors = vector_rerun_brancol.loc[mask]\n", + "triptitle = (\n", + " r\"$\\bf{Elog\\ Time\\ Gap\\ Vectors}$ | Brancol Trip 1: \"\n", + " + f'{trip['trip_start_date']} to {trip['trip_end_date']}'\n", + ")\n", "\n", "\n", - "brancol1_fig, brancol1_ax = compare_elog_vector(vectors, df, dfsets, None, rerun = True, figsize = (7.5,3.5),titlesize = 10, lw = .8)\n", - "sns.lineplot(x = testsDF['test_datetime'], y = cdf, ax = brancol1_ax[0], label = 'Recalculated Vector \\n Score (CDF)', lw = .8)\n", + "brancol1_fig, brancol1_ax = compare_elog_vector(\n", + " vectors, df, dfsets, None, rerun=True, figsize=(7.5, 3.5), titlesize=10, lw=0.8\n", + ")\n", + "sns.lineplot(\n", + " x=testsDF[\"test_datetime\"],\n", + " y=cdf,\n", + " ax=brancol1_ax[0],\n", + " label=\"Recalculated Vector \\n Score (CDF)\",\n", + " lw=0.8,\n", + ")\n", "# brancol1_ax[0].legend().remove()\n", - "brancol1_ax[1].hlines([.25, .75],.01,.99, transform=brancol1_ax[1].transAxes, colors = 'grey', lw = .2, zorder = 0)\n", + "brancol1_ax[1].hlines(\n", + " [0.25, 0.75], 0.01, 0.99, transform=brancol1_ax[1].transAxes, colors=\"grey\", lw=0.2, zorder=0\n", + ")\n", "\n", "\n", "# plt.subplots_adjust(wspace=0, hspace=.1)\n", "legend_elements = []\n", - "for label, color in color_dict['elog'].items():\n", - " \n", - " legend_elements.append(Patch(facecolor=color, edgecolor=color,\n", - " label=label))\n", + "for label, color in color_dict[\"elog\"].items():\n", + " legend_elements.append(Patch(facecolor=color, edgecolor=color, label=label))\n", "\n", "# labels = ['Original vector \\n score', 'Point-in-time \\n score', 'Recalculated Vector \\n Score (CDF)']\n", - "brancol1_ax[0].legend( bbox_to_anchor=(.95, .9), ncol = 1, \n", - " loc = 'upper left', fontsize = 9, frameon = False)\n", - "brancol1_ax[1].legend(handles = legend_elements, bbox_to_anchor=(1, .9), \n", - " loc = 'upper left', fontsize = 9, frameon = False\n", - " )\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", + "brancol1_ax[0].legend(\n", + " bbox_to_anchor=(0.95, 0.9), ncol=1, loc=\"upper left\", fontsize=9, frameon=False\n", + ")\n", + "brancol1_ax[1].legend(\n", + " handles=legend_elements, bbox_to_anchor=(1, 0.9), loc=\"upper left\", fontsize=9, frameon=False\n", + ")\n", + "sns.despine(trim=True, left=True)\n", "brancol1_fig.show()\n", - "brancol1_fig.savefig('brancol1_elog_vector.png', bbox_inches='tight')\n", + "brancol1_fig.savefig(\"brancol1_elog_vector.png\", bbox_inches=\"tight\")\n", "\n", "# testsDF.head()" ] @@ -1414,7 +1582,7 @@ "outputs": [], "source": [ "# def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "\n", "# if duration:\n", "# x_duration = np.full(len(df), pd.Timedelta(duration))\n", "# elif end_col:\n", @@ -1422,13 +1590,11 @@ "# x_duration = df['duration']\n", "# elif duration_col:\n", "# x_duration = df[duration_col]\n", - " \n", + "\n", "# x = list(zip(df[datetime_col], x_duration))\n", "# y = (y_val, y_height)\n", "\n", - "# ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", - "\n", - " " + "# ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n" ] }, { @@ -1441,7 +1607,7 @@ "# def plot_event_vspan(df_events, ax, color_dict):\n", "# for category, color in color_dict.items():\n", "# df_category = df_events.loc[df_events['category']==category]\n", - " \n", + "\n", "# for idx, row in df_category.iterrows():\n", "# ax.axvspan(*mdates.date2num([row['start_time'], row['end_time']]), color=color, edgecolor = 'face',alpha=0.5)" ] @@ -1462,11 +1628,11 @@ "# trip_end_date = trip_info['trip_end_date']\n", "\n", "# sql = f\"\"\"\n", - "# SELECT \n", - "# v.start_datetime, \n", + "# SELECT\n", + "# v.start_datetime,\n", "# v.cam_name\n", - " \n", - "# from {vessel}_v1_video_files v \n", + "\n", + "# from {vessel}_v1_video_files v\n", "# where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}'\n", "# \"\"\"\n", "# video_df = wr.athena.read_sql_query(sql, database=\"tnc_edge\")\n", @@ -1492,7 +1658,7 @@ "# # y_var = x_vars[n]\n", "# text = row[text_col]\n", "# data_xy = (row['start_time'], 1.7)\n", - " \n", + "\n", "# an = ax.annotate(\n", "# text,\n", "# xy=data_xy, xycoords='data',\n", @@ -1501,14 +1667,14 @@ "# bbox = bbox_args,\n", "# color = 'white'\n", "# )\n", - " \n", + "\n", "# annots.append(an)\n", - " \n", + "\n", "# x, y = text_xy\n", - " \n", + "\n", "# y = y+y_var\n", "# y_var = y_var * -1\n", - " \n", + "\n", "# text_xy = (x,y)\n", "\n", "# return annots" @@ -1548,8 +1714,8 @@ "# for idx, (category, color) in enumerate(category_color_dict.items()):\n", "# df_category = df_events.loc[df_events['category']==category].copy()\n", "# y_val = y_vals[idx]\n", - " \n", - " \n", + "\n", + "\n", "# plot_event_bars(df_category, ax,category, 'start_time',end_col = 'end_time', y_val=y_val, y_height = y_height, color= color )" ] }, @@ -1561,7 +1727,7 @@ "outputs": [], "source": [ "# def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "\n", "# if duration:\n", "# x_duration = np.full(len(df), pd.Timedelta(duration))\n", "# elif end_col:\n", @@ -1571,13 +1737,11 @@ "# x_duration = df[duration_col]\n", "# else:\n", "# x_duration = np.full(len(df), 2)\n", - " \n", + "\n", "# x = list(zip(df[datetime_col], x_duration))\n", "# y = (y_val, y_height)\n", "\n", - "# ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", - "\n", - " " + "# ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n" ] }, { diff --git a/notebooks/helper_functions/aggregations.py b/notebooks/helper_functions/aggregations.py index fa61f63..a8e5683 100644 --- a/notebooks/helper_functions/aggregations.py +++ b/notebooks/helper_functions/aggregations.py @@ -1,94 +1,100 @@ import scipy.stats as stats -import itertools +import itertools import pandas as pd import numpy as np from sklearn.linear_model import LinearRegression + def aggregate_by_interval(df, freq, agg_list, agg_cols): agg_dict = {} for col in agg_cols: agg_dict[col] = agg_list - - data = df.groupby(pd.Grouper(key = 'utc_start_datetime', freq = f'{freq}min')).agg(agg_dict).reset_index() - data.columns = ['_'.join(col).strip() for col in data.columns.values] + + data = ( + df.groupby(pd.Grouper(key="utc_start_datetime", freq=f"{freq}min")) + .agg(agg_dict) + .reset_index() + ) + data.columns = ["_".join(col).strip() for col in data.columns.values] return data + def aggregate_concat(dfs, freq, agg_list, agg_cols): datas = [] for df in dfs: - data = aggregate_by_interval(df = df, freq = freq, agg_list = agg_list, agg_cols= agg_cols) - data.fillna(0, inplace = True) + data = aggregate_by_interval(df=df, freq=freq, agg_list=agg_list, agg_cols=agg_cols) + data.fillna(0, inplace=True) datas.append(data) concat_data = pd.concat(datas) return concat_data + def compare_aggregation_correlations(dfs, intervals, agg_list, x_col, y_col): results = {} for x_agg in agg_list: # print(f'x_agg: {x_agg}') x_results = [] for i in intervals: - data = aggregate_concat(dfs, freq = i, agg_list= agg_list, agg_cols=[x_col, y_col]) + data = aggregate_concat(dfs, freq=i, agg_list=agg_list, agg_cols=[x_col, y_col]) for y_agg in agg_list: y_results = {} - slope, intercept, rvalue, pvalue, stderr = stats.linregress(x=data[f'{x_col}_{x_agg}'], y=data[f'{y_col}_{y_agg}']) - r2 = rvalue ** 2 - y_results['interval'] = i - y_results['y_agg'] = y_agg - y_results['r2'] = r2 - + slope, intercept, rvalue, pvalue, stderr = stats.linregress( + x=data[f"{x_col}_{x_agg}"], y=data[f"{y_col}_{y_agg}"] + ) + r2 = rvalue**2 + y_results["interval"] = i + y_results["y_agg"] = y_agg + y_results["r2"] = r2 + x_results.append(y_results) - - + df_Xresults = pd.DataFrame(x_results) # print(df_Xresults.head()) results[x_agg] = df_Xresults return results + def compare_aggregation_correlation_columns(dfs, intervals, agg_list, x_col, y_cols): results = [] - + for i in intervals: - - data = aggregate_concat(dfs, freq = i, agg_list= agg_list, agg_cols=[x_col]+ y_cols) + data = aggregate_concat(dfs, freq=i, agg_list=agg_list, agg_cols=[x_col] + y_cols) # result['interval'] = i for y_col, x_agg, y_agg in itertools.product(y_cols, agg_list, agg_list): result = {} - result['interval'] = i - result['x_agg'] = x_agg - result['y_agg'] = y_agg - result['y_col'] = y_col + result["interval"] = i + result["x_agg"] = x_agg + result["y_agg"] = y_agg + result["y_col"] = y_col # get r2 value - x = np.array(data[f'{x_col}_{x_agg}']).reshape((-1, 1)) - y = np.array(data[f'{y_col}_{y_agg}']).reshape((-1, 1)) + x = np.array(data[f"{x_col}_{x_agg}"]).reshape((-1, 1)) + y = np.array(data[f"{y_col}_{y_agg}"]).reshape((-1, 1)) model = LinearRegression() - model.fit(x,y) - r2 = model.score(x,y) + model.fit(x, y) + r2 = model.score(x, y) - result['r2'] = r2 + result["r2"] = r2 results.append(result) - - - + df_results = pd.DataFrame(results) - return df_results + def add_rolling_aggregates(df, win, agg_dict, keep_cols): - rolling_df = df.rolling(win, center = True).agg(agg_dict) + rolling_df = df.rolling(win, center=True).agg(agg_dict) new_cols = {} for col in agg_dict.keys(): - new_cols[col]= f'rolling_{col}' - - rolling_df.rename(columns = new_cols, inplace = True) + new_cols[col] = f"rolling_{col}" + + rolling_df.rename(columns=new_cols, inplace=True) _df = pd.merge(df[keep_cols], rolling_df, left_index=True, right_index=True) - return _df.reset_index() \ No newline at end of file + return _df.reset_index() diff --git a/notebooks/helper_functions/data_readers.py b/notebooks/helper_functions/data_readers.py index c5c35d2..eae8794 100644 --- a/notebooks/helper_functions/data_readers.py +++ b/notebooks/helper_functions/data_readers.py @@ -3,207 +3,233 @@ import sqlite3 import numpy as np -def get_data(boat, trip_no): +def get_data(boat, trip_no): trip_data = {} - - trip_info = get_trip_info(boat = boat, trip_no=trip_no) - trip_data['trip_info'] = trip_info + + trip_info = get_trip_info(boat=boat, trip_no=trip_no) + trip_data["trip_info"] = trip_info print(trip_info) - bv_sets = get_bv_sets(boat=boat, trip_id = trip_info['trip_id']) - trip_data['bv_sets'] = bv_sets + bv_sets = get_bv_sets(boat=boat, trip_id=trip_info["trip_id"]) + trip_data["bv_sets"] = bv_sets - bv_fish = get_bv_fish(boat = boat, trip_id = trip_info['trip_id']) - trip_data['bv_fish'] = bv_fish - + bv_fish = get_bv_fish(boat=boat, trip_id=trip_info["trip_id"]) + trip_data["bv_fish"] = bv_fish - ai_df= get_ai_counts(boat=boat, trip_info=trip_info) - trip_data['ai_df'] = ai_df + ai_df = get_ai_counts(boat=boat, trip_info=trip_info) + trip_data["ai_df"] = ai_df all_counts = get_bv_counts(ai_df, bv_fish) - trip_data['all_counts'] = all_counts + trip_data["all_counts"] = all_counts ai_sets = join_bv_sets(bv_sets, ai_df) - trip_data['ai_sets'] = ai_sets + trip_data["ai_sets"] = ai_sets - df_elog = get_elog_data(boat, trip_info['trip_start_date'], trip_info['trip_end_date']) - trip_data['elogs'] = df_elog + df_elog = get_elog_data(boat, trip_info["trip_start_date"], trip_info["trip_end_date"]) + trip_data["elogs"] = df_elog bv_set_counts = get_bv_set_counts(bv_fish, bv_sets) - trip_data['bv_set_counts'] = bv_set_counts + trip_data["bv_set_counts"] = bv_set_counts - return trip_data - def get_trip_info(boat, trip_no): trip_df = wr.athena.read_sql_query(f"SELECT * FROM {boat}_v1_bv_trips", database="tnc_edge") - - trip_df.sort_values(by = 'trip_end_date', ascending= True, inplace= True) - - trip_id = trip_df['trip_id'].values[trip_no] - - trip_start_date = trip_df['trip_start_date'].values[trip_no] - trip_end_date = trip_df['trip_end_date'].values[trip_no] + + trip_df.sort_values(by="trip_end_date", ascending=True, inplace=True) + + trip_id = trip_df["trip_id"].values[trip_no] + + trip_start_date = trip_df["trip_start_date"].values[trip_no] + trip_end_date = trip_df["trip_end_date"].values[trip_no] trip_info = {} - trip_info['trip_id'] = trip_id - trip_info['trip_start_date'] = trip_start_date - trip_info['trip_end_date'] = trip_end_date + trip_info["trip_id"] = trip_id + trip_info["trip_start_date"] = trip_start_date + trip_info["trip_end_date"] = trip_end_date return trip_info - + + def get_bv_sets(boat, trip_id): - - bv_sets = wr.athena.read_sql_query(f"SELECT * FROM {boat}_v1_bv_sets where trip_id = '{trip_id}'", database = "tnc_edge") + bv_sets = wr.athena.read_sql_query( + f"SELECT * FROM {boat}_v1_bv_sets where trip_id = '{trip_id}'", database="tnc_edge" + ) return bv_sets -def get_bv_fish(boat, trip_id): - bv_fish = wr.athena.read_sql_query(f"SELECT fish.* FROM {boat}_v1_bv_fish fish left join {boat}_v1_bv_sets sets on sets.set_id = fish.set_id where sets.trip_id = '{trip_id}'", database = "tnc_edge") +def get_bv_fish(boat, trip_id): + bv_fish = wr.athena.read_sql_query( + f"SELECT fish.* FROM {boat}_v1_bv_fish fish left join {boat}_v1_bv_sets sets on sets.set_id = fish.set_id where sets.trip_id = '{trip_id}'", + database="tnc_edge", + ) return bv_fish + def get_bv_set_counts(bv_fish, bv_sets): - datetime_cols = ['set_start_datetime','set_end_datetime','haul_start_datetime','haul_end_datetime'] + datetime_cols = [ + "set_start_datetime", + "set_end_datetime", + "haul_start_datetime", + "haul_end_datetime", + ] for col in datetime_cols: - bv_sets[col] = pd.to_datetime(bv_sets[col], utc = True) + bv_sets[col] = pd.to_datetime(bv_sets[col], utc=True) bv_sets[col] = bv_sets[col].dt.tz_convert(None) - - retained_bv_counts = bv_fish[bv_fish['future'] == 'retained'].groupby('set_id').agg({'fish_id':'count'}).reset_index().rename(columns = {'fish_id':'retained_count'}) - joined_bv_sets = pd.merge(bv_sets, retained_bv_counts, how = 'left', on = 'set_id') - + + retained_bv_counts = ( + bv_fish[bv_fish["future"] == "retained"] + .groupby("set_id") + .agg({"fish_id": "count"}) + .reset_index() + .rename(columns={"fish_id": "retained_count"}) + ) + joined_bv_sets = pd.merge(bv_sets, retained_bv_counts, how="left", on="set_id") + return joined_bv_sets + # read in catch countst def get_ai_counts(boat, trip_info): - - model = 'ondeck' if boat == 'stpatrick' else 'aifish' if boat == 'brancol' else None - if model == 'ondeck': + model = "ondeck" if boat == "stpatrick" else "aifish" if boat == "brancol" else None + if model == "ondeck": count_column = None - number_columns = ['overallcatches', 'overallcount','overalldiscards', 'detection_confidence','count'] - elif model == 'aifish': - count_column = 'count' - number_columns = ['count', 'detection_confidence'] - - trip_start_date = trip_info['trip_start_date'] - trip_end_date = trip_info['trip_end_date'] - + number_columns = [ + "overallcatches", + "overallcount", + "overalldiscards", + "detection_confidence", + "count", + ] + elif model == "aifish": + count_column = "count" + number_columns = ["count", "detection_confidence"] + + trip_start_date = trip_info["trip_start_date"] + trip_end_date = trip_info["trip_end_date"] + aifish_sql = f""" - SELECT + SELECT aifd.id, aifd.{count_column} as count, aifd.detection_confidence, - v.start_datetime - FROM {boat}_v1_{model}data aifd - join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path + v.start_datetime + FROM {boat}_v1_{model}data aifd + join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}' """ ondeck_sql = f""" - SELECT + SELECT aifd.id, aifd.overallcount, aifd.overallcatches, aifd.overalldiscards, (cast(aifd.overallcatches as DOUBLE) - cast(aifd.overalldiscards as DOUBLE)) as count, aifd.detection_confidence, - v.start_datetime - FROM {boat}_v1_{model}data aifd - join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path + v.start_datetime + FROM {boat}_v1_{model}data aifd + join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}' - + """ - sql = ondeck_sql if model == 'ondeck' else aifish_sql if model == 'aifish' else None + sql = ondeck_sql if model == "ondeck" else aifish_sql if model == "aifish" else None ai_df = wr.athena.read_sql_query(sql, database="tnc_edge") - ai_df.start_datetime = pd.to_datetime(ai_df.start_datetime, utc = True) - ai_df['utc_start_datetime'] = ai_df['start_datetime'].dt.tz_convert(None) - ai_df['utc_end_datetime'] = ai_df['utc_start_datetime'] + pd.Timedelta(minutes = 5) + ai_df.start_datetime = pd.to_datetime(ai_df.start_datetime, utc=True) + ai_df["utc_start_datetime"] = ai_df["start_datetime"].dt.tz_convert(None) + ai_df["utc_end_datetime"] = ai_df["utc_start_datetime"] + pd.Timedelta(minutes=5) for col in number_columns: - ai_df[col] = pd.to_numeric(ai_df[col], errors='coerce') - - ai_df['weighted_count'] = ai_df['detection_confidence'] * ai_df['count'] + ai_df[col] = pd.to_numeric(ai_df[col], errors="coerce") + + ai_df["weighted_count"] = ai_df["detection_confidence"] * ai_df["count"] return ai_df -def join_bv_sets(bv_sets, ai_df): +def join_bv_sets(bv_sets, ai_df): # join aif_df to sets - df_hauls = bv_sets.loc[:,['set_id','set_number','haul_start_datetime','haul_end_datetime']] - df_hauls['haul_start_datetime'] = pd.to_datetime(df_hauls['haul_start_datetime']) - df_hauls['haul_end_datetime'] = pd.to_datetime(df_hauls['haul_end_datetime']) - df_hauls['haul_start_datetime'] = df_hauls['haul_start_datetime'].dt.tz_convert(None) - df_hauls['haul_end_datetime'] = df_hauls['haul_end_datetime'].dt.tz_convert(None) - - #Make the db in memory - conn = sqlite3.connect(':memory:') - - #write the tables - ai_df.to_sql('ai_counts', conn, index=False) - df_hauls.to_sql('hauls', conn, index=False) - + df_hauls = bv_sets.loc[:, ["set_id", "set_number", "haul_start_datetime", "haul_end_datetime"]] + df_hauls["haul_start_datetime"] = pd.to_datetime(df_hauls["haul_start_datetime"]) + df_hauls["haul_end_datetime"] = pd.to_datetime(df_hauls["haul_end_datetime"]) + df_hauls["haul_start_datetime"] = df_hauls["haul_start_datetime"].dt.tz_convert(None) + df_hauls["haul_end_datetime"] = df_hauls["haul_end_datetime"].dt.tz_convert(None) + + # Make the db in memory + conn = sqlite3.connect(":memory:") + + # write the tables + ai_df.to_sql("ai_counts", conn, index=False) + df_hauls.to_sql("hauls", conn, index=False) + query = """ select * - + from ai_counts left join hauls on ai_counts.utc_start_datetime between hauls.haul_start_datetime and hauls.haul_end_datetime - + """ df = pd.read_sql_query(query, conn) # convert datatypes - df['count'] = pd.to_numeric(df['count']) - df['haul_start_datetime'] = pd.to_datetime(df['haul_start_datetime']) - df['haul_end_datetime'] = pd.to_datetime(df['haul_end_datetime']) + df["count"] = pd.to_numeric(df["count"]) + df["haul_start_datetime"] = pd.to_datetime(df["haul_start_datetime"]) + df["haul_end_datetime"] = pd.to_datetime(df["haul_end_datetime"]) df.start_datetime = pd.to_datetime(df.start_datetime) - df['utc_start_datetime'] = pd.to_datetime(df['utc_start_datetime']) + df["utc_start_datetime"] = pd.to_datetime(df["utc_start_datetime"]) # get flags for haul not haul - df['is_haul_bool'] = df['set_number'].notnull() - df['is_haul'] = df['is_haul_bool'].apply(lambda x: 1 if x else 0) + df["is_haul_bool"] = df["set_number"].notnull() + df["is_haul"] = df["is_haul_bool"].apply(lambda x: 1 if x else 0) + + df.sort_values(by="utc_start_datetime", inplace=True) - df.sort_values(by = 'utc_start_datetime', inplace= True) - return df + def get_bv_counts(ai_df, bv_fish): - #join bv counts to ai counts - #Make the db in memory - conn = sqlite3.connect(':memory:') - - #write the tables - ai_df.to_sql('ai_counts', conn, index=False) - bv_fish.to_sql('bv_fish', conn, index=False) - + # join bv counts to ai counts + # Make the db in memory + conn = sqlite3.connect(":memory:") + + # write the tables + ai_df.to_sql("ai_counts", conn, index=False) + bv_fish.to_sql("bv_fish", conn, index=False) + query = """ select ai_counts.id, ai_counts.utc_start_datetime, bv_fish.* from ai_counts - join bv_fish on bv_fish.catch_datetime >= ai_counts.utc_start_datetime + join bv_fish on bv_fish.catch_datetime >= ai_counts.utc_start_datetime and bv_fish.catch_datetime < ai_counts.utc_end_datetime """ bv_ai_df = pd.read_sql_query(query, conn) - bv_counts = bv_ai_df.groupby('id').fish_id.agg('count').reset_index().rename(columns = {'fish_id':'bv_count'}) - - df_all_counts = pd.merge(ai_df, bv_counts, how = 'left', on = 'id') - df_all_counts.sort_values(by = 'utc_start_datetime', inplace = True) - df_all_counts['bv_count'].fillna(0, inplace= True) + bv_counts = ( + bv_ai_df.groupby("id") + .fish_id.agg("count") + .reset_index() + .rename(columns={"fish_id": "bv_count"}) + ) + + df_all_counts = pd.merge(ai_df, bv_counts, how="left", on="id") + df_all_counts.sort_values(by="utc_start_datetime", inplace=True) + df_all_counts["bv_count"].fillna(0, inplace=True) return df_all_counts def get_elog_data(vessel, trip_start_date, trip_end_date): - #elog data - + # elog data + sql = f""" select elogs.id, @@ -214,34 +240,39 @@ def get_elog_data(vessel, trip_start_date, trip_end_date): elogs.systemendsetdatetime, elogs.systemstarthauldatetime, elogs.systemendhauldatetime - + from {vessel}_v1_deckhandevents_mostrecentlonglineevent_jsonextracted elogs where datetime > '{trip_start_date}' and datetime < '{trip_end_date}' """ - df_elog = wr.athena.read_sql_query( - sql, - database="tnc_edge") - - datetime_cols = ['datetime','systemstartsetdatetime','systemendsetdatetime','systemstarthauldatetime','systemendhauldatetime'] + df_elog = wr.athena.read_sql_query(sql, database="tnc_edge") + + datetime_cols = [ + "datetime", + "systemstartsetdatetime", + "systemendsetdatetime", + "systemstarthauldatetime", + "systemendhauldatetime", + ] for col in datetime_cols: - df_elog[col] = pd.to_datetime(df_elog[col], utc = True) + df_elog[col] = pd.to_datetime(df_elog[col], utc=True) df_elog[col] = df_elog[col].dt.tz_convert(None) return df_elog + def get_vector_data(vessel, vector, trip_info): - trip_start_date = trip_info['trip_start_date'] - trip_end_date = trip_info['trip_end_date'] - + trip_start_date = trip_info["trip_start_date"] + trip_end_date = trip_info["trip_end_date"] + sql = f""" select id, score, datetime, detail, name from {vessel}_v1_tests - where vector_id = '{vector}' and datetime > '{trip_start_date}' and datetime < '{trip_end_date}' + where vector_id = '{vector}' and datetime > '{trip_start_date}' and datetime < '{trip_end_date}' """ df_vector = wr.athena.read_sql_query(sql, database="tnc_edge") - df_vector['datetime'] = pd.to_datetime(df_vector['datetime'], utc=True) - df_vector['datetime'] = df_vector['datetime'].dt.tz_convert(None) - df_vector['score'] = pd.to_numeric(df_vector['score']) - - return df_vector \ No newline at end of file + df_vector["datetime"] = pd.to_datetime(df_vector["datetime"], utc=True) + df_vector["datetime"] = df_vector["datetime"].dt.tz_convert(None) + df_vector["score"] = pd.to_numeric(df_vector["score"]) + + return df_vector diff --git a/notebooks/key_event_detection.ipynb b/notebooks/key_event_detection.ipynb index b960c46..130a828 100644 --- a/notebooks/key_event_detection.ipynb +++ b/notebooks/key_event_detection.ipynb @@ -17,7 +17,6 @@ "metadata": {}, "outputs": [], "source": [ - " \n", "%autoreload 2" ] }, @@ -74,17 +73,19 @@ "from sklearn.linear_model import LinearRegression\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", + "import itertools\n", "import matplotlib.gridspec as gridspec\n", "from scipy.stats import chi2_contingency\n", "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "import json\n", "from tsai.all import *\n", "from IPython.display import display, Markdown\n", @@ -107,7 +108,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "import matplotlib.ticker as ticker" ] }, @@ -126,20 +126,84 @@ "metadata": {}, "outputs": [], "source": [ - "colors = sns.color_palette(['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99'])\n", - "\n", - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + "colors = sns.color_palette(\n", + " [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", + ")\n", + "\n", + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]\n", "\n", "%matplotlib inline\n", - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]\n", + "\n", + "\n", "def show_color_pallete(pallete):\n", - "# fig, ax = plt.subplots()\n", - " \n", + " # fig, ax = plt.subplots()\n", + "\n", " sns.palplot(pallete, size=2)\n", " ax = plt.gca()\n", " for i, name in enumerate(pallete):\n", - " label = f'[{i}]'\n", - " ax.text(i, -.57, label,horizontalalignment='center', fontsize = 10) \n", + " label = f\"[{i}]\"\n", + " ax.text(i, -0.57, label, horizontalalignment=\"center\", fontsize=10)\n", " plt.show()" ] }, @@ -229,12 +293,12 @@ "# r2 = model.score(x,y)\n", "# coefficients = model.coef_\n", "# intercept = model.intercept_\n", - " \n", + "\n", "\n", "# beta0 = r'$intercept = \\hat\\beta_0 =$' + str(round(intercept[0],2))\n", - " \n", + "\n", "# beta1 = r'$slope = \\hat\\beta_1 =$' + str(round(coefficients[0][0],2))\n", - " \n", + "\n", "# r_squared = r'$R^2 =$' + str(round(r2,2))\n", "\n", "# textstr = '\\n'.join((\n", @@ -279,17 +343,17 @@ ], "source": [ "# ST Patrick Trips\n", - "StPatrick1data = get_data(boat = 'stpatrick', trip_no = 0)\n", - "StPatrick2data = get_data(boat = 'stpatrick', trip_no = 1)\n", + "StPatrick1data = get_data(boat=\"stpatrick\", trip_no=0)\n", + "StPatrick2data = get_data(boat=\"stpatrick\", trip_no=1)\n", "\n", - "counts_StPatrick1 = StPatrick1data['all_counts']\n", - "counts_StPatrick2 = StPatrick2data['all_counts']\n", + "counts_StPatrick1 = StPatrick1data[\"all_counts\"]\n", + "counts_StPatrick2 = StPatrick2data[\"all_counts\"]\n", "\n", - "stpatrick1_elog = StPatrick1data['elogs']\n", - "stpatrick2_elog = StPatrick2data['elogs']\n", + "stpatrick1_elog = StPatrick1data[\"elogs\"]\n", + "stpatrick2_elog = StPatrick2data[\"elogs\"]\n", "\n", - "stpatrick1_bv_set_counts = StPatrick1data['bv_set_counts']\n", - "stpatrick1_bv_set_counts = StPatrick2data['bv_set_counts']" + "stpatrick1_bv_set_counts = StPatrick1data[\"bv_set_counts\"]\n", + "stpatrick1_bv_set_counts = StPatrick2data[\"bv_set_counts\"]" ] }, { @@ -299,8 +363,8 @@ "metadata": {}, "outputs": [], "source": [ - "stpatrick1_bv_sets = StPatrick1data['bv_sets']\n", - "stpatrick2_bv_sets = StPatrick2data['bv_sets']" + "stpatrick1_bv_sets = StPatrick1data[\"bv_sets\"]\n", + "stpatrick2_bv_sets = StPatrick2data[\"bv_sets\"]" ] }, { @@ -478,30 +542,30 @@ ], "source": [ "# Brancol Trips\n", - "Brancol1data = get_data(boat = 'brancol', trip_no = 0)\n", - "Brancol2data = get_data(boat = 'brancol', trip_no = 1)\n", - "Brancol3data = get_data(boat = 'brancol', trip_no = 2)\n", + "Brancol1data = get_data(boat=\"brancol\", trip_no=0)\n", + "Brancol2data = get_data(boat=\"brancol\", trip_no=1)\n", + "Brancol3data = get_data(boat=\"brancol\", trip_no=2)\n", "\n", "\n", - "counts_Brancol1 = Brancol1data['all_counts']\n", - "counts_Brancol2 = Brancol2data['all_counts']\n", - "counts_Brancol3 = Brancol3data['all_counts']\n", + "counts_Brancol1 = Brancol1data[\"all_counts\"]\n", + "counts_Brancol2 = Brancol2data[\"all_counts\"]\n", + "counts_Brancol3 = Brancol3data[\"all_counts\"]\n", "\n", - "brancol1_elog = Brancol1data['elogs']\n", - "brancol2_elog = Brancol2data['elogs']\n", - "brancol3_elog = Brancol3data['elogs']\n", + "brancol1_elog = Brancol1data[\"elogs\"]\n", + "brancol2_elog = Brancol2data[\"elogs\"]\n", + "brancol3_elog = Brancol3data[\"elogs\"]\n", "\n", - "brancol2_bv_sets = Brancol2data['bv_sets']\n", - "brancol1_bv_sets = Brancol1data['bv_sets']\n", - "brancol3_bv_sets = Brancol3data['bv_sets']\n", + "brancol2_bv_sets = Brancol2data[\"bv_sets\"]\n", + "brancol1_bv_sets = Brancol1data[\"bv_sets\"]\n", + "brancol3_bv_sets = Brancol3data[\"bv_sets\"]\n", "\n", - "brancol1_bv_set_counts = Brancol1data['bv_set_counts']\n", - "brancol2_bv_set_counts = Brancol2data['bv_set_counts']\n", - "brancol3_bv_set_counts = Brancol3data['bv_set_counts']\n", + "brancol1_bv_set_counts = Brancol1data[\"bv_set_counts\"]\n", + "brancol2_bv_set_counts = Brancol2data[\"bv_set_counts\"]\n", + "brancol3_bv_set_counts = Brancol3data[\"bv_set_counts\"]\n", "\n", - "brancol1trip = Brancol1data['trip_info']\n", - "brancol2trip = Brancol2data['trip_info']\n", - "brancol3trip = Brancol3data['trip_info']" + "brancol1trip = Brancol1data[\"trip_info\"]\n", + "brancol2trip = Brancol2data[\"trip_info\"]\n", + "brancol3trip = Brancol3data[\"trip_info\"]" ] }, { @@ -560,10 +624,10 @@ "# haul_start_col = 'haul_start_datetime'\n", "# haul_end_col = 'haul_end_datetime'\n", "# y_val = 0\n", - " \n", - "# colors = color_dict[source] \n", - " \n", - " \n", + "\n", + "# colors = color_dict[source]\n", + "\n", + "\n", "# #plot_hauling\n", "# plot_hlines(ax, df,y_val, haul_start_col, haul_end_col, 12, colors['haul'], 'haul')\n", "\n", @@ -580,9 +644,9 @@ "# text = f'count: {row[count_col]}'\n", "# x_value = row[x_col]\n", "# ax.text( x_value,y_value , text, fontsize=10, horizontalalignment='right', bbox=props)\n", - " \n", - " \n", - "# # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes) " + "\n", + "\n", + "# # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes)" ] }, { @@ -599,7 +663,7 @@ "# haul_start_col = 'systemstarthauldatetime'\n", "# haul_end_col = 'systemendhauldatetime'\n", "# y_val = 1.7\n", - " \n", + "\n", "# elif source == 'bv':\n", "# set_start_col = 'set_start_datetime'\n", "# set_end_col = 'set_end_datetime'\n", @@ -610,18 +674,17 @@ "# df['set_duration'] = df[set_end_col] - df[set_start_col]\n", "# df['haul_duration'] = df[haul_end_col] - df[haul_start_col]\n", "# df['mid_duration'] = df[haul_start_col] - df[set_end_col]\n", - " \n", + "\n", "# set_x = list(zip(df[set_start_col], df['set_duration']))\n", "# haul_x = list(zip(df[haul_start_col], df['haul_duration']))\n", "# mid_x = list(zip(df[set_end_col], df['mid_duration']))\n", - " \n", + "\n", "# y = (y_val, .6)\n", "\n", "# colors = color_dict[source]\n", "# ax.broken_barh(mid_x, y, facecolors = colors['mid'], edgecolor = 'face')\n", "# ax.broken_barh(haul_x, y, facecolors = colors['haul'], edgecolor = 'face')\n", - "# ax.broken_barh(set_x, y, facecolors = colors['set'], edgecolor = 'face')\n", - " " + "# ax.broken_barh(set_x, y, facecolors = colors['set'], edgecolor = 'face')\n" ] }, { @@ -635,30 +698,28 @@ "\n", "# # ax[0].autofmt_xdate()\n", "\n", - " \n", + "\n", "# ax.set_yticks([1,2],('bv','elogs'))\n", "# # ax.set_yticks([0,1,2],('bv','elogs',' '))\n", "# fig.suptitle(titles['main'], fontsize = 20)\n", - " \n", + "\n", "# # df1 = brancol1_elog\n", "# # df1sets =brancol1_bv_sets\n", "\n", - " \n", "\n", "# if annotate_counts:\n", "# dfElog['totalcount'] = dfElog['bycatchcount'].astype(int) + dfElog['catchcount'].astype(int)\n", "# dfBV['retained_count'] = dfBV['retained_count'].astype('Int64')\n", "# annotate_counts(ax, dfElog, 'totalcount', 'systemstarthauldatetime', 1.2)\n", "# annotate_counts(ax, dfBV, 'retained_count', 'haul_start_datetime', 0.2)\n", - " \n", + "\n", "\n", "# plot_set_bars(ax, dfElog, 'elog', color_dict)\n", "# plot_set_bars(ax, dfBV, 'bv', color_dict)\n", "\n", - " \n", "\n", "# ax.set_title(title,x = .1, y = 1, fontsize = 9)\n", - " \n", + "\n", "# # ax.autoscale()\n", "# # ax[0].set_ylim(-.5,1.5)\n", "# # ax[0].tick_params(axis='x', labelrotation=45)\n", @@ -666,12 +727,12 @@ "# if legend:\n", "# legend_elements = []\n", "# for label, color in color_dict['elog'].items():\n", - " \n", + "\n", "# legend_elements.append(Patch(facecolor=color, edgecolor=color,\n", "# label=label))\n", "# ax.legend(handles = legend_elements, loc='center', bbox_to_anchor=(.5, -1), ncol = 3, fontsize = 8)\n", "\n", - " \n", + "\n", "# #use consise date formater\n", "\n", "# if display_axis:\n", @@ -691,8 +752,8 @@ "outputs": [], "source": [ "color_dict = {\n", - " 'bv': {'set':'#40a018', 'haul':'#117347', 'mid':'#a2c662'},\n", - " 'elog': {'set':'#40a018', 'haul':'#117347', 'mid':'#a2c662'},\n", + " \"bv\": {\"set\": \"#40a018\", \"haul\": \"#117347\", \"mid\": \"#a2c662\"},\n", + " \"elog\": {\"set\": \"#40a018\", \"haul\": \"#117347\", \"mid\": \"#a2c662\"},\n", " # 'elog':{'set':'#648fff', 'haul':'#184EAD', 'mid':'#88ccee'}\n", "}" ] @@ -724,27 +785,31 @@ "source": [ "# metrics.ConfusionMatrixDisplay(cm).plot(cmap = 'Blues',ax = ax)\n", "def plot_confusion_matrix(cm, ax, interp, title):\n", - "\n", - " ax.imshow(cm, interpolation='nearest', cmap = 'Blues')\n", + " ax.imshow(cm, interpolation=\"nearest\", cmap=\"Blues\")\n", " tick_marks = np.arange(len(interp.vocab))\n", - " alt_labels = ['haul', 'no haul']\n", + " alt_labels = [\"haul\", \"no haul\"]\n", " # ax.set_xticks(tick_marks, interp.vocab, rotation=0)\n", " ax.set_xticks(tick_marks, alt_labels, rotation=0)\n", " # ax.set_yticks(tick_marks, interp.vocab, va = 'center', rotation=90)\n", - " ax.tick_params(axis='both', which='both', length=0, pad = 3)\n", - " ax.set_yticks(tick_marks, alt_labels, va = 'center', rotation=90)\n", - " ax.set_xlabel('Predicted', fontweight='bold')\n", - " ax.set_ylabel('Actual', fontweight='bold')\n", - " ax.set_ylim(len(interp.vocab)-.5,-.5)\n", + " ax.tick_params(axis=\"both\", which=\"both\", length=0, pad=3)\n", + " ax.set_yticks(tick_marks, alt_labels, va=\"center\", rotation=90)\n", + " ax.set_xlabel(\"Predicted\", fontweight=\"bold\")\n", + " ax.set_ylabel(\"Actual\", fontweight=\"bold\")\n", + " ax.set_ylim(len(interp.vocab) - 0.5, -0.5)\n", " ax.grid(False)\n", - " \n", - " thresh = cm.max() / 2.\n", + "\n", + " thresh = cm.max() / 2.0\n", " for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n", - " coeff = f'{cm[i, j]}'\n", - " ax.text(j, i, coeff, \n", - " fontsize = 10,\n", - " horizontalalignment=\"center\", verticalalignment=\"center\", color=\"white\"\n", - " if cm[i, j] > thresh else \"black\")" + " coeff = f\"{cm[i, j]}\"\n", + " ax.text(\n", + " j,\n", + " i,\n", + " coeff,\n", + " fontsize=10,\n", + " horizontalalignment=\"center\",\n", + " verticalalignment=\"center\",\n", + " color=\"white\" if cm[i, j] > thresh else \"black\",\n", + " )" ] }, { @@ -755,14 +820,14 @@ "outputs": [], "source": [ "def prep_data(df):\n", - " df.sort_values(by = 'utc_start_datetime', inplace = True)\n", - " X = df.drop(columns = 'is_haul')\n", - " y = df['is_haul'].astype('int').to_numpy()\n", + " df.sort_values(by=\"utc_start_datetime\", inplace=True)\n", + " X = df.drop(columns=\"is_haul\")\n", + " y = df[\"is_haul\"].astype(\"int\").to_numpy()\n", "\n", - " X.loc[:,'utc_start_datetime'] = X.loc[:,'utc_start_datetime'].astype('int64')\n", - " X = np.atleast_3d(X).transpose(0,2,1)\n", + " X.loc[:, \"utc_start_datetime\"] = X.loc[:, \"utc_start_datetime\"].astype(\"int64\")\n", + " X = np.atleast_3d(X).transpose(0, 2, 1)\n", "\n", - " haul_map = {1:'haul', 0:'no_haul'}\n", + " haul_map = {1: \"haul\", 0: \"no_haul\"}\n", " labeler = ReLabeler(haul_map)\n", " y = labeler(y)\n", " return X, y" @@ -809,7 +874,7 @@ } ], "source": [ - "Brancol1data['ai_sets'].dtypes" + "Brancol1data[\"ai_sets\"].dtypes" ] }, { @@ -821,15 +886,15 @@ "source": [ "# create copies of training and testing dataframes, set utc_start_datetime as index\n", "# training set\n", - "dfAiSets_Brancol1 = Brancol1data['ai_sets'].copy()\n", - "dfAiSets_Brancol1.set_index('utc_start_datetime', inplace = True)\n", + "dfAiSets_Brancol1 = Brancol1data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol1.set_index(\"utc_start_datetime\", inplace=True)\n", "\n", "# testing sets\n", - "dfAiSets_Brancol2 = Brancol2data['ai_sets'].copy()\n", - "dfAiSets_Brancol2.set_index('utc_start_datetime', inplace = True)\n", + "dfAiSets_Brancol2 = Brancol2data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol2.set_index(\"utc_start_datetime\", inplace=True)\n", "\n", - "dfAiSets_Brancol3 = Brancol3data['ai_sets'].copy()\n", - "dfAiSets_Brancol3.set_index('utc_start_datetime', inplace = True)" + "dfAiSets_Brancol3 = Brancol3data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol3.set_index(\"utc_start_datetime\", inplace=True)" ] }, { @@ -988,7 +1053,7 @@ } ], "source": [ - "Brancol2data['ai_sets'].head()" + "Brancol2data[\"ai_sets\"].head()" ] }, { @@ -998,14 +1063,14 @@ "metadata": {}, "outputs": [], "source": [ - "win = '2h'\n", - "agg_dict = {'weighted_count':'sum','count':'sum'}\n", - "keep_cols = ['weighted_count','detection_confidence','count','is_haul', 'id']\n", + "win = \"2h\"\n", + "agg_dict = {\"weighted_count\": \"sum\", \"count\": \"sum\"}\n", + "keep_cols = [\"weighted_count\", \"detection_confidence\", \"count\", \"is_haul\", \"id\"]\n", "\n", "# add_rolling aggregates creates columns using rolling window functions to aggregate the columns in the agg_dict\n", - "df_train = add_rolling_aggregates(dfAiSets_Brancol1, '2h', agg_dict, keep_cols)\n", - "df_test = add_rolling_aggregates(dfAiSets_Brancol2, '2h', agg_dict, keep_cols)\n", - "df_test3 = add_rolling_aggregates(dfAiSets_Brancol3, '2h', agg_dict, keep_cols)" + "df_train = add_rolling_aggregates(dfAiSets_Brancol1, \"2h\", agg_dict, keep_cols)\n", + "df_test = add_rolling_aggregates(dfAiSets_Brancol2, \"2h\", agg_dict, keep_cols)\n", + "df_test3 = add_rolling_aggregates(dfAiSets_Brancol3, \"2h\", agg_dict, keep_cols)" ] }, { @@ -1016,10 +1081,10 @@ "outputs": [], "source": [ "# set id to integer\n", - "df_train['id'] = df_train['id'].astype(int)\n", + "df_train[\"id\"] = df_train[\"id\"].astype(int)\n", "\n", - "df_test['id'] = df_test['id'].astype(int)\n", - "df_test3['id'] = df_test['id'].astype(int)" + "df_test[\"id\"] = df_test[\"id\"].astype(int)\n", + "df_test3[\"id\"] = df_test[\"id\"].astype(int)" ] }, { @@ -1030,9 +1095,9 @@ "outputs": [], "source": [ "# drop nan values\n", - "df_train.dropna(inplace = True)\n", - "df_test.dropna(inplace = True)\n", - "df_test3.dropna(inplace = True)" + "df_train.dropna(inplace=True)\n", + "df_test.dropna(inplace=True)\n", + "df_test3.dropna(inplace=True)" ] }, { @@ -1071,7 +1136,7 @@ "metadata": {}, "outputs": [], "source": [ - "X,y = prep_data(df_train)\n", + "X, y = prep_data(df_train)\n", "X_test, y_test = prep_data(df_test)\n", "X_test3, y_test3 = prep_data(df_test3)" ] @@ -1117,16 +1182,18 @@ ], "source": [ "## train, test, validation splits to load into the model\n", - "splits = get_splits(y, \n", - " n_splits=1, \n", - " valid_size=0.3, \n", - " test_size=0.1, \n", - " shuffle=True, \n", - " balance=True, \n", - " stratify=True,\n", - " random_state=42, \n", - " show_plot=True, \n", - " verbose=True)\n", + "splits = get_splits(\n", + " y,\n", + " n_splits=1,\n", + " valid_size=0.3,\n", + " test_size=0.1,\n", + " shuffle=True,\n", + " balance=True,\n", + " stratify=True,\n", + " random_state=42,\n", + " show_plot=True,\n", + " verbose=True,\n", + ")\n", "splits" ] }, @@ -1138,11 +1205,11 @@ "outputs": [], "source": [ "## dataset and loaders\n", - "tfms = [None, [Categorize()]]\n", + "tfms = [None, [Categorize()]]\n", "dsets = TSDatasets(X, y, tfms=tfms, splits=splits)\n", - " \n", + "\n", "bs = 10\n", - "dls = TSDataLoaders.from_dsets(dsets.train, dsets.valid, bs=[bs, bs*2])" + "dls = TSDataLoaders.from_dsets(dsets.train, dsets.valid, bs=[bs, bs * 2])" ] }, { @@ -1153,7 +1220,7 @@ "outputs": [], "source": [ "# set up architecture for model\n", - "arch, k = (RNNPlus, {'n_layers':4, 'bidirectional': True})\n", + "arch, k = (RNNPlus, {\"n_layers\": 4, \"bidirectional\": True})\n", "model = create_model(arch, dls=dls, **k)" ] }, @@ -1165,7 +1232,7 @@ "outputs": [], "source": [ "# load pre-trained model into architecture\n", - "load_model('models/rnn_plus_haul_classifier.pth', model, opt = None, with_opt = False)" + "load_model(\"models/rnn_plus_haul_classifier.pth\", model, opt=None, with_opt=False)" ] }, { @@ -1211,7 +1278,7 @@ "outputs": [], "source": [ "# use training data to create the learner\n", - "learner = Learner(dls, model, metrics=accuracy)" + "learner = Learner(dls, model, metrics=accuracy)" ] }, { @@ -1232,7 +1299,7 @@ } ], "source": [ - "learner.load('rnn_plus_haul_classifier')" + "learner.load(\"rnn_plus_haul_classifier\")" ] }, { @@ -1250,7 +1317,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "# adding new test dataset\n", "valid_dl = dls.valid\n", "test_ds = valid_dl.dataset.add_test(X_test, y_test)\n", @@ -1314,7 +1380,9 @@ ], "source": [ "# get predictions for trip 2\n", - "_, temp_targets, temp_preds = learner.get_preds(dl=test_dl, with_decoded=True, save_preds=None, save_targs=None)\n", + "_, temp_targets, temp_preds = learner.get_preds(\n", + " dl=test_dl, with_decoded=True, save_preds=None, save_targs=None\n", + ")\n", "\n", "# decode predictions\n", "vocab = learner.dls.vocab\n", @@ -1367,7 +1435,9 @@ ], "source": [ "# get predictions for trip 3\n", - "_, temp_targets3, temp_preds3 = learner.get_preds(dl=test3_dl, with_decoded=True, save_preds=None, save_targs=None)\n", + "_, temp_targets3, temp_preds3 = learner.get_preds(\n", + " dl=test3_dl, with_decoded=True, save_preds=None, save_targs=None\n", + ")\n", "\n", "# decode predictions\n", "vocab = learner.dls.vocab\n", @@ -1525,14 +1595,14 @@ ], "source": [ "# creating a df out of trip 2 predictions to join to the original trip 2 test data set\n", - "pre_dict = {\"predictions\":decoded_preds}\n", + "pre_dict = {\"predictions\": decoded_preds}\n", "df_predict = pd.DataFrame(pre_dict)\n", "\n", - "df_results = pd.concat([df_test.reset_index(drop = True), df_predict.reset_index(drop = True)], axis = 1)\n", - "df_results['utc_start_datetime'] = pd.to_datetime(df_results['utc_start_datetime'])\n", - "df_results['utc_end_datetime'] = df_results['utc_start_datetime'] + pd.Timedelta(minutes = 5)\n", - "df_results['haul'] = df_results['is_haul'].map({1:'haul',0:'no_haul'})\n", - "df_results['predict_haul'] = df_results['predictions'].map({'haul':1,'no_haul':0})\n", + "df_results = pd.concat([df_test.reset_index(drop=True), df_predict.reset_index(drop=True)], axis=1)\n", + "df_results[\"utc_start_datetime\"] = pd.to_datetime(df_results[\"utc_start_datetime\"])\n", + "df_results[\"utc_end_datetime\"] = df_results[\"utc_start_datetime\"] + pd.Timedelta(minutes=5)\n", + "df_results[\"haul\"] = df_results[\"is_haul\"].map({1: \"haul\", 0: \"no_haul\"})\n", + "df_results[\"predict_haul\"] = df_results[\"predictions\"].map({\"haul\": 1, \"no_haul\": 0})\n", "\n", "df_results.head()" ] @@ -1688,14 +1758,16 @@ ], "source": [ "# creating a df out of trip 3 predictions to join to the original trip 3 test data set\n", - "pre_dict3 = {\"predictions\":decoded_preds3}\n", + "pre_dict3 = {\"predictions\": decoded_preds3}\n", "df_predict3 = pd.DataFrame(pre_dict3)\n", "\n", - "df_results3 = pd.concat([df_test3.reset_index(drop = True), df_predict3.reset_index(drop = True)], axis = 1)\n", - "df_results3['utc_start_datetime'] = pd.to_datetime(df_results3['utc_start_datetime'])\n", - "df_results3['utc_end_datetime'] = df_results3['utc_start_datetime'] + pd.Timedelta(minutes = 5)\n", - "df_results3['haul'] = df_results3['is_haul'].map({1:'haul',0:'no_haul'})\n", - "df_results3['predict_haul'] = df_results3['predictions'].map({'haul':1,'no_haul':0})\n", + "df_results3 = pd.concat(\n", + " [df_test3.reset_index(drop=True), df_predict3.reset_index(drop=True)], axis=1\n", + ")\n", + "df_results3[\"utc_start_datetime\"] = pd.to_datetime(df_results3[\"utc_start_datetime\"])\n", + "df_results3[\"utc_end_datetime\"] = df_results3[\"utc_start_datetime\"] + pd.Timedelta(minutes=5)\n", + "df_results3[\"haul\"] = df_results3[\"is_haul\"].map({1: \"haul\", 0: \"no_haul\"})\n", + "df_results3[\"predict_haul\"] = df_results3[\"predictions\"].map({\"haul\": 1, \"no_haul\": 0})\n", "\n", "df_results3.head()" ] @@ -1925,15 +1997,15 @@ "outputs": [], "source": [ "def get_metrics(cm):\n", - " TP = cm[0][0] # true positives\n", - " FN = cm[0][1] # false negatives\n", - " FP = cm[1][0] # false positives\n", - " TN = cm[1][1] # true negatives\n", - " recall = TP/sum(cm[0])\n", - " precision = TP/(TP + FP)\n", - " accuracy = (TP + TN)/sum(sum(cm))\n", + " TP = cm[0][0] # true positives\n", + " FN = cm[0][1] # false negatives\n", + " FP = cm[1][0] # false positives\n", + " TN = cm[1][1] # true negatives\n", + " recall = TP / sum(cm[0])\n", + " precision = TP / (TP + FP)\n", + " accuracy = (TP + TN) / sum(sum(cm))\n", "\n", - " print(f'recall: {recall} | precision: {precision} | accuracy: {accuracy}')\n", + " print(f\"recall: {recall} | precision: {precision} | accuracy: {accuracy}\")\n", "\n", " return recall, precision, accuracy" ] @@ -1989,7 +2061,7 @@ } ], "source": [ - "recall, precision, accuracy = get_metrics(cm2+cm3)" + "recall, precision, accuracy = get_metrics(cm2 + cm3)" ] }, { @@ -2010,32 +2082,37 @@ } ], "source": [ - "plt.rc('axes', labelsize = 9)\n", - "fig, ax = plt.subplots(1, 2, figsize = (3.6,1.8), dpi = 150)\n", + "plt.rc(\"axes\", labelsize=9)\n", + "fig, ax = plt.subplots(1, 2, figsize=(3.6, 1.8), dpi=150)\n", "plt.tight_layout()\n", "\n", - "plt.subplots_adjust(wspace=.6) # Increase the width padding between subplots\n", - "cm_dict = {'a': {'cm':cm2, 'interp':interp2}, 'b': {'cm':cm3, 'interp':interp3}}\n", + "plt.subplots_adjust(wspace=0.6) # Increase the width padding between subplots\n", + "cm_dict = {\"a\": {\"cm\": cm2, \"interp\": interp2}, \"b\": {\"cm\": cm3, \"interp\": interp3}}\n", "\n", "for i, (label, cm) in enumerate(cm_dict.items()):\n", - " plot_confusion_matrix(cm['cm'], ax[i], cm['interp'], 'confusion matrix')\n", + " plot_confusion_matrix(cm[\"cm\"], ax[i], cm[\"interp\"], \"confusion matrix\")\n", " # plot_confusion_matrix(cm3, ax[i], interp3, 'confusion matrix')\n", - " ax[i].tick_params(axis='x', labelsize=9) # Change x ticks font size to 12\n", - " ax[i].tick_params(axis='y', labelsize=9, left = False) # Change y ticks font size to 12\n", + " ax[i].tick_params(axis=\"x\", labelsize=9) # Change x ticks font size to 12\n", + " ax[i].tick_params(axis=\"y\", labelsize=9, left=False) # Change y ticks font size to 12\n", "\n", - " ax[i].text(-0.3, 1.2, f'({label})', transform=ax[i].transAxes, fontsize=9, fontweight='bold', va='top', ha='left')\n", + " ax[i].text(\n", + " -0.3,\n", + " 1.2,\n", + " f\"({label})\",\n", + " transform=ax[i].transAxes,\n", + " fontsize=9,\n", + " fontweight=\"bold\",\n", + " va=\"top\",\n", + " ha=\"left\",\n", + " )\n", "\n", "\n", "# ax[1].tick_params(axis='x', labelsize=12) # Change x ticks font size to 12\n", "# ax[1].tick_params(axis='y', labelsize=12, left = False) # Change y ticks font size to 12\n", - "sns.despine(trim=True, \n", - " left=True,\n", - " bottom = True\n", - " )\n", - "\n", + "sns.despine(trim=True, left=True, bottom=True)\n", "\n", "\n", - "plt.savefig('chart_pngs/confusion_matrix2.png',bbox_inches = 'tight')" + "plt.savefig(\"chart_pngs/confusion_matrix2.png\", bbox_inches=\"tight\")" ] }, { @@ -2061,24 +2138,32 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "def plot_event_bars(\n", + " df,\n", + " ax,\n", + " label,\n", + " datetime_col,\n", + " duration=None,\n", + " end_col=None,\n", + " duration_col=None,\n", + " y_val=0.7,\n", + " y_height=0.6,\n", + " color=\"#43aa99\",\n", + "):\n", " if duration:\n", " x_duration = np.full(len(df), pd.Timedelta(duration))\n", " elif end_col:\n", - " df['duration'] = df[end_col]- df[datetime_col]\n", - " x_duration = df['duration']\n", + " df[\"duration\"] = df[end_col] - df[datetime_col]\n", + " x_duration = df[\"duration\"]\n", " elif duration_col:\n", " x_duration = df[duration_col]\n", - " \n", + "\n", " x = list(zip(df[datetime_col], x_duration))\n", " y = (y_val, y_height)\n", "\n", - " plot = ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", - "\n", - " return plot\n", + " plot = ax.broken_barh(x, y, facecolors=color, edgecolor=\"face\", label=label, clip_on=False)\n", "\n", - " " + " return plot\n" ] }, { @@ -2091,7 +2176,7 @@ "# def plot_event_vspan(df_events, ax, color_dict):\n", "# for category, color in color_dict.items():\n", "# df_category = df_events.loc[df_events['category']==category]\n", - " \n", + "\n", "# for idx, row in df_category.iterrows():\n", "# ax.axvspan(*mdates.date2num([row['start_time'], row['end_time']]), color=color, edgecolor = 'face',alpha=0.5)" ] @@ -2103,25 +2188,31 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_set_vspan(df_sets, ax, color = '#a2c662'):\n", + "def plot_set_vspan(df_sets, ax, color=\"#a2c662\"):\n", " for idx, row in df_sets.iterrows():\n", - " ax.axvspan(*mdates.date2num([row['haul_start_datetime'], row['haul_end_datetime']]), color=color, edgecolor = '#40a018',alpha=0.5)\n", + " ax.axvspan(\n", + " *mdates.date2num([row[\"haul_start_datetime\"], row[\"haul_end_datetime\"]]),\n", + " color=color,\n", + " edgecolor=\"#40a018\",\n", + " alpha=0.5,\n", + " )\n", + "\n", "\n", "def get_video_times(vessel, trip_info):\n", - " trip_start_date = trip_info['trip_start_date']\n", - " trip_end_date = trip_info['trip_end_date']\n", + " trip_start_date = trip_info[\"trip_start_date\"]\n", + " trip_end_date = trip_info[\"trip_end_date\"]\n", "\n", " sql = f\"\"\"\n", - " SELECT \n", - " v.start_datetime, \n", + " SELECT\n", + " v.start_datetime,\n", " v.cam_name\n", - " \n", - " from {vessel}_v1_video_files v \n", + "\n", + " from {vessel}_v1_video_files v\n", " where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}'\n", " \"\"\"\n", " video_df = wr.athena.read_sql_query(sql, database=\"tnc_edge\")\n", " video_df.start_datetime = pd.to_datetime(video_df.start_datetime)\n", - " video_df['utc_start_datetime'] = video_df['start_datetime'].dt.tz_convert(None)\n", + " video_df[\"utc_start_datetime\"] = video_df[\"start_datetime\"].dt.tz_convert(None)\n", " # video_df['utc_end_datetime'] = video_df['utc_start_datetime'] + pd.Timedelta(minutes = 5)\n", " return video_df" ] @@ -2142,7 +2233,7 @@ "# # y_var = x_vars[n]\n", "# text = row[text_col]\n", "# data_xy = (row['start_time'], 1.7)\n", - " \n", + "\n", "# an = ax.annotate(\n", "# text,\n", "# xy=data_xy, xycoords='data',\n", @@ -2151,14 +2242,14 @@ "# bbox = bbox_args,\n", "# color = 'white'\n", "# )\n", - " \n", + "\n", "# annots.append(an)\n", - " \n", + "\n", "# x, y = text_xy\n", - " \n", + "\n", "# y = y+y_var\n", "# y_var = y_var * -1\n", - " \n", + "\n", "# text_xy = (x,y)\n", "\n", "# return annots" @@ -2179,7 +2270,7 @@ "metadata": {}, "outputs": [], "source": [ - "sns.set_style(\"whitegrid\", {'axes.grid' : False})" + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})" ] }, { @@ -2189,7 +2280,7 @@ "metadata": {}, "outputs": [], "source": [ - "ai_countsBrancol2 = Brancol2data['ai_sets']" + "ai_countsBrancol2 = Brancol2data[\"ai_sets\"]" ] }, { @@ -2199,7 +2290,7 @@ "metadata": {}, "outputs": [], "source": [ - "bvCounts_Brancol2 = Brancol2data['all_counts']" + "bvCounts_Brancol2 = Brancol2data[\"all_counts\"]" ] }, { @@ -2265,13 +2356,13 @@ "# Trip 2 predictions plot\n", "\n", "# setting style, font sizes, and fig size\n", - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", "sns.set_theme(style=\"ticks\")\n", - "figsize = (7.5,2.5)\n", + "figsize = (7.5, 2.5)\n", "\n", - "plt.rc('xtick',labelsize=8)\n", - "plt.rc('ytick',labelsize=8)\n", - "plt.rc('axes', labelsize = 8)\n", + "plt.rc(\"xtick\", labelsize=8)\n", + "plt.rc(\"ytick\", labelsize=8)\n", + "plt.rc(\"axes\", labelsize=8)\n", "\n", "# set up datasets to be used\n", "bv_sets = brancol2_bv_sets\n", @@ -2280,35 +2371,71 @@ "\n", "\n", "# define hex codes for colors used\n", - "bv_color = '#a2c662'\n", - "ai_color = '#184EAD'\n", - "elog_color = '#117347'\n", - "predictions_color = '#43aa99'\n", + "bv_color = \"#a2c662\"\n", + "ai_color = \"#184EAD\"\n", + "elog_color = \"#117347\"\n", + "predictions_color = \"#43aa99\"\n", "\n", "\n", - "\n", - "fig, ax = plt.subplots(2,1,figsize=figsize, gridspec_kw={'height_ratios': [2, 1]},sharex = True)\n", + "fig, ax = plt.subplots(2, 1, figsize=figsize, gridspec_kw={\"height_ratios\": [2, 1]}, sharex=True)\n", "\n", "# hlines for the event bars (the horizonal lines)\n", - "yticks = [.175, .5, .825]\n", - "yheight = .24\n", - "ypos = [tick - (yheight/2) for tick in yticks]\n", - "ax[0].hlines([.175, .5, .825],.01,.99, transform=ax[0].transAxes, colors = 'grey', lw = .2, zorder = 0)\n", + "yticks = [0.175, 0.5, 0.825]\n", + "yheight = 0.24\n", + "ypos = [tick - (yheight / 2) for tick in yticks]\n", + "ax[0].hlines(\n", + " [0.175, 0.5, 0.825], 0.01, 0.99, transform=ax[0].transAxes, colors=\"grey\", lw=0.2, zorder=0\n", + ")\n", "\n", "# ticks for the event bar positions and labels\n", - "ax[0].set_yticks([.175,.5,.825],('Predicted Hauls','Elog Hauls','Analyst Hauls'))\n", + "ax[0].set_yticks([0.175, 0.5, 0.825], (\"Predicted Hauls\", \"Elog Hauls\", \"Analyst Hauls\"))\n", "ax[0].set_ylim([0, 1])\n", "\n", "\n", "# plotting event bars\n", - "plot_event_bars(bv_sets, ax[0], 'Analyst Hauls', 'haul_start_datetime', end_col = 'haul_end_datetime', y_val = ypos[2], y_height = yheight, color = bv_color) #y_val = .675,\n", - "plot_event_bars(elog, ax[0], 'elog_hauls','systemstarthauldatetime', end_col = 'systemendhauldatetime', color = elog_color, y_val = ypos[1], y_height = yheight) # y_val = .35\n", - "plot_event_bars(results.loc[results.predict_haul ==1], ax[0], 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = ypos[0] , y_height = yheight, color = predictions_color) #y_val = .025\n", + "plot_event_bars(\n", + " bv_sets,\n", + " ax[0],\n", + " \"Analyst Hauls\",\n", + " \"haul_start_datetime\",\n", + " end_col=\"haul_end_datetime\",\n", + " y_val=ypos[2],\n", + " y_height=yheight,\n", + " color=bv_color,\n", + ") # y_val = .675,\n", + "plot_event_bars(\n", + " elog,\n", + " ax[0],\n", + " \"elog_hauls\",\n", + " \"systemstarthauldatetime\",\n", + " end_col=\"systemendhauldatetime\",\n", + " color=elog_color,\n", + " y_val=ypos[1],\n", + " y_height=yheight,\n", + ") # y_val = .35\n", + "plot_event_bars(\n", + " results.loc[results.predict_haul == 1],\n", + " ax[0],\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=ypos[0],\n", + " y_height=yheight,\n", + " color=predictions_color,\n", + ") # y_val = .025\n", "\n", "\n", "# plotting the ai counts\n", - "ln1 = sns.lineplot(x = 'utc_start_datetime', y = 'count', data = results, ax = ax[1], label = 'AI Counts', color =ai_color, clip_on=False, lw = .5)\n", - "\n", + "ln1 = sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"count\",\n", + " data=results,\n", + " ax=ax[1],\n", + " label=\"AI Counts\",\n", + " color=ai_color,\n", + " clip_on=False,\n", + " lw=0.5,\n", + ")\n", "\n", "\n", "# formatting x axis dates\n", @@ -2317,11 +2444,10 @@ "\n", "ax[1].xaxis.set_major_locator(locator)\n", "ax[1].xaxis.set_major_formatter(formatter)\n", - "ax[1].set_xlabel('Datetime (UTC)')\n", + "ax[1].set_xlabel(\"Datetime (UTC)\")\n", "\n", "# labeling y axis for fish counts\n", - "ax[1].set_ylabel('AI Fish Count')\n", - "\n", + "ax[1].set_ylabel(\"AI Fish Count\")\n", "\n", "\n", "# creating custom legend\n", @@ -2329,8 +2455,8 @@ "from matplotlib.patches import Patch\n", "\n", "# set xlimits for the plot based on the top plot\n", - "x0,x1 = ax[0].get_xlim()\n", - "ax[0].set_xlim(x0, x1) \n", + "x0, x1 = ax[0].get_xlim()\n", + "ax[0].set_xlim(x0, x1)\n", "\n", "# remove legend\n", "ax[1].get_legend().remove()\n", @@ -2339,10 +2465,8 @@ "plt.subplots_adjust(wspace=0, hspace=0)\n", "\n", "# remove spines\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", - "plt.savefig('../chart_pngs/haul_detection2_report.png', bbox_inches='tight', dpi = 150)\n", + "sns.despine(trim=True, left=True)\n", + "plt.savefig(\"../chart_pngs/haul_detection2_report.png\", bbox_inches=\"tight\", dpi=150)\n", "\n", "plt.show()" ] @@ -2368,13 +2492,13 @@ "# Trip 3 predictions plot\n", "\n", "# setting style, font sizes, and fig size\n", - "sns.set_style(\"whitegrid\", {'axes.grid' : False})\n", + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})\n", "sns.set_theme(style=\"ticks\")\n", - "figsize = (7.5,2.5)\n", + "figsize = (7.5, 2.5)\n", "\n", - "plt.rc('xtick',labelsize=8)\n", - "plt.rc('ytick',labelsize=8)\n", - "plt.rc('axes', labelsize = 8)\n", + "plt.rc(\"xtick\", labelsize=8)\n", + "plt.rc(\"ytick\", labelsize=8)\n", + "plt.rc(\"axes\", labelsize=8)\n", "\n", "# set up datasets to be used\n", "bv_sets = brancol3_bv_sets\n", @@ -2383,35 +2507,71 @@ "\n", "\n", "# define hex codes for colors used\n", - "bv_color = '#a2c662'\n", - "ai_color = '#184EAD'\n", - "elog_color = '#117347'\n", - "predictions_color = '#43aa99'\n", + "bv_color = \"#a2c662\"\n", + "ai_color = \"#184EAD\"\n", + "elog_color = \"#117347\"\n", + "predictions_color = \"#43aa99\"\n", "\n", "\n", - "\n", - "fig, ax = plt.subplots(2,1,figsize=figsize, gridspec_kw={'height_ratios': [2, 1]},sharex = True)\n", + "fig, ax = plt.subplots(2, 1, figsize=figsize, gridspec_kw={\"height_ratios\": [2, 1]}, sharex=True)\n", "\n", "# hlines for the event bars (the horizonal lines)\n", - "yticks = [.175, .5, .825]\n", - "yheight = .24\n", - "ypos = [tick - (yheight/2) for tick in yticks]\n", - "ax[0].hlines([.175, .5, .825],.01,.99, transform=ax[0].transAxes, colors = 'grey', lw = .2, zorder = 0)\n", + "yticks = [0.175, 0.5, 0.825]\n", + "yheight = 0.24\n", + "ypos = [tick - (yheight / 2) for tick in yticks]\n", + "ax[0].hlines(\n", + " [0.175, 0.5, 0.825], 0.01, 0.99, transform=ax[0].transAxes, colors=\"grey\", lw=0.2, zorder=0\n", + ")\n", "\n", "# ticks for the event bar positions and labels\n", - "ax[0].set_yticks([.175,.5,.825],('Predicted Hauls','Elog Hauls','Analyst Hauls'))\n", + "ax[0].set_yticks([0.175, 0.5, 0.825], (\"Predicted Hauls\", \"Elog Hauls\", \"Analyst Hauls\"))\n", "ax[0].set_ylim([0, 1])\n", "\n", "\n", "# plotting event bars\n", - "plot_event_bars(bv_sets, ax[0], 'Analyst Hauls', 'haul_start_datetime', end_col = 'haul_end_datetime', y_val = ypos[2], y_height = yheight, color = bv_color) #y_val = .675,\n", - "plot_event_bars(elog, ax[0], 'elog_hauls','systemstarthauldatetime', end_col = 'systemendhauldatetime', color = elog_color, y_val = ypos[1], y_height = yheight) # y_val = .35\n", - "plot_event_bars(results.loc[results.predict_haul ==1], ax[0], 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = ypos[0] , y_height = yheight, color = predictions_color) #y_val = .025\n", + "plot_event_bars(\n", + " bv_sets,\n", + " ax[0],\n", + " \"Analyst Hauls\",\n", + " \"haul_start_datetime\",\n", + " end_col=\"haul_end_datetime\",\n", + " y_val=ypos[2],\n", + " y_height=yheight,\n", + " color=bv_color,\n", + ") # y_val = .675,\n", + "plot_event_bars(\n", + " elog,\n", + " ax[0],\n", + " \"elog_hauls\",\n", + " \"systemstarthauldatetime\",\n", + " end_col=\"systemendhauldatetime\",\n", + " color=elog_color,\n", + " y_val=ypos[1],\n", + " y_height=yheight,\n", + ") # y_val = .35\n", + "plot_event_bars(\n", + " results.loc[results.predict_haul == 1],\n", + " ax[0],\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=ypos[0],\n", + " y_height=yheight,\n", + " color=predictions_color,\n", + ") # y_val = .025\n", "\n", "\n", "# plotting the ai counts\n", - "ln1 = sns.lineplot(x = 'utc_start_datetime', y = 'count', data = results, ax = ax[1], label = 'AI Counts', color =ai_color, clip_on=False, lw = .5)\n", - "\n", + "ln1 = sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"count\",\n", + " data=results,\n", + " ax=ax[1],\n", + " label=\"AI Counts\",\n", + " color=ai_color,\n", + " clip_on=False,\n", + " lw=0.5,\n", + ")\n", "\n", "\n", "# formatting x axis dates\n", @@ -2420,11 +2580,10 @@ "\n", "ax[1].xaxis.set_major_locator(locator)\n", "ax[1].xaxis.set_major_formatter(formatter)\n", - "ax[1].set_xlabel('Datetime (UTC)')\n", + "ax[1].set_xlabel(\"Datetime (UTC)\")\n", "\n", "# labeling y axis for fish counts\n", - "ax[1].set_ylabel('AI Fish Count')\n", - "\n", + "ax[1].set_ylabel(\"AI Fish Count\")\n", "\n", "\n", "# creating custom legend\n", @@ -2432,8 +2591,8 @@ "from matplotlib.patches import Patch\n", "\n", "# set xlimits for the plot based on the top plot\n", - "x0,x1 = ax[0].get_xlim()\n", - "ax[0].set_xlim(x0, x1) \n", + "x0, x1 = ax[0].get_xlim()\n", + "ax[0].set_xlim(x0, x1)\n", "\n", "# remove legend\n", "ax[1].get_legend().remove()\n", @@ -2442,10 +2601,8 @@ "plt.subplots_adjust(wspace=0, hspace=0)\n", "\n", "# remove spines\n", - "sns.despine(trim=True, \n", - " left=True\n", - " )\n", - "plt.savefig('../chart_pngs/haul_detection3_report.png', bbox_inches='tight', dpi = 150)\n", + "sns.despine(trim=True, left=True)\n", + "plt.savefig(\"../chart_pngs/haul_detection3_report.png\", bbox_inches=\"tight\", dpi=150)\n", "\n", "plt.show()" ] @@ -2474,7 +2631,7 @@ "outputs": [], "source": [ "# def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' , alpha = 1):\n", - " \n", + "\n", "# if duration:\n", "# x_duration = np.full(len(df), pd.Timedelta(duration))\n", "# elif end_col:\n", @@ -2484,14 +2641,13 @@ "# x_duration = df[duration_col]\n", "# else:\n", "# x_duration = np.full(len(df), 2)\n", - " \n", + "\n", "# x = list(zip(df[datetime_col], x_duration))\n", "# y = (y_val, y_height)\n", "\n", "# plot = ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=True, alpha = alpha)\n", "\n", - "# return plot\n", - " " + "# return plot\n" ] }, { @@ -2501,15 +2657,23 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_category_bars(df_events, ax, category_color_dict, y_val_start, y_height ):\n", + "def plot_event_category_bars(df_events, ax, category_color_dict, y_val_start, y_height):\n", " n = len(category_color_dict)\n", " y_vals = create_array(n, y_val_start, y_height)\n", " for idx, (category, color) in enumerate(category_color_dict.items()):\n", - " df_category = df_events.loc[df_events['category']==category].copy()\n", - " y_val = y_vals[idx] - (y_height/2)\n", - " \n", - " \n", - " plot_event_bars(df_category, ax,category, 'start_time',end_col = 'end_time', y_val=y_val, y_height = y_height, color= color )" + " df_category = df_events.loc[df_events[\"category\"] == category].copy()\n", + " y_val = y_vals[idx] - (y_height / 2)\n", + "\n", + " plot_event_bars(\n", + " df_category,\n", + " ax,\n", + " category,\n", + " \"start_time\",\n", + " end_col=\"end_time\",\n", + " y_val=y_val,\n", + " y_height=y_height,\n", + " color=color,\n", + " )" ] }, { @@ -2548,13 +2712,13 @@ "metadata": {}, "outputs": [], "source": [ - "df_notes = pd.read_csv('../data/reviewer_notes_6-20.csv')\n", + "df_notes = pd.read_csv(\"../data/reviewer_notes_6-20.csv\")\n", "# df_notes = pd.read_csv('../data/reviewer_notes.csv')\n", - "df_notes['start_time'] = pd.to_datetime(df_notes['start_time'], format = 'mixed')\n", + "df_notes[\"start_time\"] = pd.to_datetime(df_notes[\"start_time\"], format=\"mixed\")\n", "\n", - "df_notes['end_time'] = pd.to_datetime(df_notes['end_time'], format = 'mixed')\n", + "df_notes[\"end_time\"] = pd.to_datetime(df_notes[\"end_time\"], format=\"mixed\")\n", "\n", - "df_notes['category'].value_counts()" + "df_notes[\"category\"].value_counts()" ] }, { @@ -2564,8 +2728,8 @@ "metadata": {}, "outputs": [], "source": [ - "eventsBrancol2 = df_notes.loc[(df_notes['vessel'] == 'Brancol') & (df_notes['trip_number']==2)]\n", - "eventsBrancol3 = df_notes.loc[(df_notes['vessel'] == 'Brancol') & (df_notes['trip_number']==3)]" + "eventsBrancol2 = df_notes.loc[(df_notes[\"vessel\"] == \"Brancol\") & (df_notes[\"trip_number\"] == 2)]\n", + "eventsBrancol3 = df_notes.loc[(df_notes[\"vessel\"] == \"Brancol\") & (df_notes[\"trip_number\"] == 3)]" ] }, { @@ -2594,7 +2758,7 @@ } ], "source": [ - "eventsBrancol2['category'].value_counts()" + "eventsBrancol2[\"category\"].value_counts()" ] }, { @@ -2754,7 +2918,12 @@ "outputs": [], "source": [ "# create dictionary of colors for each category\n", - "category_dict = {'Haul Stop':colors[13],'Camera Blocked':colors[10],'No Video': colors[6], 'Abnormal Catch':colors[8]}" + "category_dict = {\n", + " \"Haul Stop\": colors[13],\n", + " \"Camera Blocked\": colors[10],\n", + " \"No Video\": colors[6],\n", + " \"Abnormal Catch\": colors[8],\n", + "}" ] }, { @@ -2775,62 +2944,73 @@ } ], "source": [ - "eventsBrancol2_notna = eventsBrancol2[eventsBrancol2['end_time'].notna()]\n", + "eventsBrancol2_notna = eventsBrancol2[eventsBrancol2[\"end_time\"].notna()]\n", "\n", "\n", "# df_test.reset_index(inplace = True)\n", - "bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + "bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "# plt.subplots_adjust(wspace=0, hspace=-.2)\n", "# bbox_args = dict(boxstyle=\"round\", fc=\"0.8\")\n", "arrow_args = dict(arrowstyle=\"->\")\n", "# plt.tight_layout()\n", - "fig, ax = plt.subplots(1,1,figsize=(11,2), sharex = True\n", - " # , gridspec_kw={'height_ratios': [2, 1]}, \n", - " )\n", + "fig, ax = plt.subplots(\n", + " 1,\n", + " 1,\n", + " figsize=(11, 2),\n", + " sharex=True,\n", + " # , gridspec_kw={'height_ratios': [2, 1]},\n", + ")\n", "\n", "# trip2 = Brancol2data['trip_info']\n", "plot_set_vspan(brancol2_bv_sets, ax)\n", "\n", - "plot_event_bars(df_results.loc[df_results.predict_haul ==1], ax, 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = .95, y_height = .1)\n", - "plot_event_category_bars(eventsBrancol2_notna, ax, category_dict, 1.2, .1)\n", + "plot_event_bars(\n", + " df_results.loc[df_results.predict_haul == 1],\n", + " ax,\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=0.95,\n", + " y_height=0.1,\n", + ")\n", + "plot_event_category_bars(eventsBrancol2_notna, ax, category_dict, 1.2, 0.1)\n", "\n", "\n", "n = len(category_dict)\n", - "y_vals = [0,1.0] + list(create_array(n, 1.2, .1))\n", - "y_labels = ['','predicted_hauls'] + list(category_dict.keys())\n", + "y_vals = [0, 1.0] + list(create_array(n, 1.2, 0.1))\n", + "y_labels = [\"\", \"predicted_hauls\"] + list(category_dict.keys())\n", "\n", - "ax.set_yticks(y_vals,y_labels)\n", - "ax.set_ylim([.85, max(y_vals)+.2])\n", + "ax.set_yticks(y_vals, y_labels)\n", + "ax.set_ylim([0.85, max(y_vals) + 0.2])\n", "# ax[1].set_ylim([0, 50])\n", "locator = DayLocator()\n", "formatter = mdates.ConciseDateFormatter(locator)\n", - "ax.tick_params(axis = 'both', labelsize = 9)\n", + "ax.tick_params(axis=\"both\", labelsize=9)\n", "ax.xaxis.set_major_locator(locator)\n", "ax.xaxis.set_major_formatter(formatter)\n", "\n", - "ax.spines['bottom'].set_visible(False)\n", + "ax.spines[\"bottom\"].set_visible(False)\n", "# ax[1].spines['top'].set_visible(False)\n", "ax.legend()\n", "\n", "from matplotlib.lines import Line2D\n", "from matplotlib.patches import Patch\n", "\n", - "legend_elements = [Patch(facecolor='#a2c662', edgecolor='#40a018', alpha = .4,\n", - " label='BV Hauls'),\n", - " Patch(facecolor='#117347', edgecolor='#117347',\n", - " label='Elog Hauls'),\n", - " Patch(facecolor='#43aa99', edgecolor='#43aa99',\n", - " label='Predicted Hauls'),\n", - " Line2D([0], [0], color='#184EAD', lw=2, label='AI Counts')]\n", + "legend_elements = [\n", + " Patch(facecolor=\"#a2c662\", edgecolor=\"#40a018\", alpha=0.4, label=\"BV Hauls\"),\n", + " Patch(facecolor=\"#117347\", edgecolor=\"#117347\", label=\"Elog Hauls\"),\n", + " Patch(facecolor=\"#43aa99\", edgecolor=\"#43aa99\", label=\"Predicted Hauls\"),\n", + " Line2D([0], [0], color=\"#184EAD\", lw=2, label=\"AI Counts\"),\n", + "]\n", "\n", "# ax[0].legend(handles=legend_elements, loc='upper left', fontsize = 9)\n", "# ax[1].get_legend().remove()\n", "\n", - "plt.legend( bbox_to_anchor=(1.1, 1))\n", + "plt.legend(bbox_to_anchor=(1.1, 1))\n", "\n", - "plt.subplots_adjust(wspace=0, hspace=-.2)\n", + "plt.subplots_adjust(wspace=0, hspace=-0.2)\n", "\n", - "plt.savefig('../chart_pngs/reivewer_events_haul_detection.png')\n", + "plt.savefig(\"../chart_pngs/reivewer_events_haul_detection.png\")\n", "plt.show()" ] }, @@ -2859,9 +3039,13 @@ "metadata": {}, "outputs": [], "source": [ - "video_events = df_notes.loc[(df_notes['category'] == 'No Video') |(df_notes['category'] == 'Camera Blocked')]\n", + "video_events = df_notes.loc[\n", + " (df_notes[\"category\"] == \"No Video\") | (df_notes[\"category\"] == \"Camera Blocked\")\n", + "]\n", "\n", - "video_eventsBrancol2 = video_events.loc[(video_events['vessel'] == 'Brancol') & (video_events['trip_number']==2)]" + "video_eventsBrancol2 = video_events.loc[\n", + " (video_events[\"vessel\"] == \"Brancol\") & (video_events[\"trip_number\"] == 2)\n", + "]" ] }, { @@ -2879,11 +3063,15 @@ "metadata": {}, "outputs": [], "source": [ - "haul_stops = df_notes.loc[(df_notes['category'] == 'Haul Stop') |(df_notes['category'] == 'Gear Issue')]\n", + "haul_stops = df_notes.loc[\n", + " (df_notes[\"category\"] == \"Haul Stop\") | (df_notes[\"category\"] == \"Gear Issue\")\n", + "]\n", "\n", "haul_stops.dtypes\n", "\n", - "haul_stopsBrancol2 = haul_stops.loc[(haul_stops['vessel'] == 'Brancol') & (haul_stops['trip_number']==2)]" + "haul_stopsBrancol2 = haul_stops.loc[\n", + " (haul_stops[\"vessel\"] == \"Brancol\") & (haul_stops[\"trip_number\"] == 2)\n", + "]" ] }, { @@ -3194,13 +3382,13 @@ "outputs": [], "source": [ "# create dataframe that joins the hauls from bv sets to the predicted hauls (df_results) to get the set number for trip 2\n", - "conn = sqlite3.connect(':memory:')\n", - "bv_sets = Brancol2data['bv_sets']\n", - "df_hauls = bv_sets.loc[:,['set_number','haul_start_datetime','haul_end_datetime']] \n", + "conn = sqlite3.connect(\":memory:\")\n", + "bv_sets = Brancol2data[\"bv_sets\"]\n", + "df_hauls = bv_sets.loc[:, [\"set_number\", \"haul_start_datetime\", \"haul_end_datetime\"]]\n", "\n", - "#write the tables\n", - "df_results.to_sql('results', conn, index=False)\n", - "df_hauls.to_sql('hauls', conn, index=False)\n", + "# write the tables\n", + "df_results.to_sql(\"results\", conn, index=False)\n", + "df_hauls.to_sql(\"hauls\", conn, index=False)\n", "\n", "query = \"\"\"\n", "select\n", @@ -3212,8 +3400,10 @@ "\n", "\"\"\"\n", "df_results_setnumber = pd.read_sql_query(query, conn)\n", - "df_results_setnumber['utc_end_datetime'] = pd.to_datetime(df_results_setnumber['utc_end_datetime'])\n", - "df_results_setnumber['utc_start_datetime'] = pd.to_datetime(df_results_setnumber['utc_start_datetime'])" + "df_results_setnumber[\"utc_end_datetime\"] = pd.to_datetime(df_results_setnumber[\"utc_end_datetime\"])\n", + "df_results_setnumber[\"utc_start_datetime\"] = pd.to_datetime(\n", + " df_results_setnumber[\"utc_start_datetime\"]\n", + ")" ] }, { @@ -3385,13 +3575,13 @@ "outputs": [], "source": [ "# create dataframe that joins the hauls from bv sets to the predicted hauls (df_results) to get the set number for trip 3\n", - "conn = sqlite3.connect(':memory:')\n", - "bv_sets3 = Brancol3data['bv_sets']\n", - "df_hauls3 = bv_sets3.loc[:,['set_number','haul_start_datetime','haul_end_datetime']] \n", + "conn = sqlite3.connect(\":memory:\")\n", + "bv_sets3 = Brancol3data[\"bv_sets\"]\n", + "df_hauls3 = bv_sets3.loc[:, [\"set_number\", \"haul_start_datetime\", \"haul_end_datetime\"]]\n", "\n", - "#write the tables\n", - "df_results3.to_sql('results', conn, index=False)\n", - "df_hauls3.to_sql('hauls', conn, index=False)\n", + "# write the tables\n", + "df_results3.to_sql(\"results\", conn, index=False)\n", + "df_hauls3.to_sql(\"hauls\", conn, index=False)\n", "\n", "query = \"\"\"\n", "select\n", @@ -3403,8 +3593,12 @@ "\n", "\"\"\"\n", "df_results3_setnumber = pd.read_sql_query(query, conn)\n", - "df_results3_setnumber['utc_end_datetime'] = pd.to_datetime(df_results3_setnumber['utc_end_datetime'])\n", - "df_results3_setnumber['utc_start_datetime'] = pd.to_datetime(df_results3_setnumber['utc_start_datetime'])" + "df_results3_setnumber[\"utc_end_datetime\"] = pd.to_datetime(\n", + " df_results3_setnumber[\"utc_end_datetime\"]\n", + ")\n", + "df_results3_setnumber[\"utc_start_datetime\"] = pd.to_datetime(\n", + " df_results3_setnumber[\"utc_start_datetime\"]\n", + ")" ] }, { @@ -3434,58 +3628,106 @@ "\n", "%matplotlib inline\n", "# %matplotlib widget\n", - "fig, axes = plt.subplots(1, 4, figsize = (8,1), sharey = True)\n", - "label = 'haul stops'\n", - "chart_labels = ['a', 'b', 'c', 'd']\n", + "fig, axes = plt.subplots(1, 4, figsize=(8, 1), sharey=True)\n", + "label = \"haul stops\"\n", + "chart_labels = [\"a\", \"b\", \"c\", \"d\"]\n", "\n", - "df_hauls = results.loc[results.predict_haul ==1]\n", + "df_hauls = results.loc[results.predict_haul == 1]\n", "\n", "# hlines for the event bars\n", - "yticks = [.175, .5, .825]\n", - "yticks = [.3, .7, 1.825]\n", - "yheight = .4\n", - "ypos = [tick - (yheight/2) for tick in yticks]\n", + "yticks = [0.175, 0.5, 0.825]\n", + "yticks = [0.3, 0.7, 1.825]\n", + "yheight = 0.4\n", + "ypos = [tick - (yheight / 2) for tick in yticks]\n", "\n", "for i, set_n in enumerate(interest_sets):\n", " ax = axes[i]\n", - " ax.hlines(yticks ,.01,.99, transform=ax.transAxes, colors = 'grey', lw = .2, zorder = 0)\n", + " ax.hlines(yticks, 0.01, 0.99, transform=ax.transAxes, colors=\"grey\", lw=0.2, zorder=0)\n", " # ticks for the event bar positions and labels\n", - " ytick_labels = ['Predicted Hauls','Haul Stops','Analyst Hauls']\n", - " ax.set_yticks(yticks,ytick_labels)\n", + " ytick_labels = [\"Predicted Hauls\", \"Haul Stops\", \"Analyst Hauls\"]\n", + " ax.set_yticks(yticks, ytick_labels)\n", " ax.set_yticklabels([])\n", " ax.set_ylim([0, 1])\n", - " \n", + "\n", " # plotting event bars\n", - " p1 = plot_event_bars(brancol2_bv_sets.loc[brancol2_bv_sets['set_number'].astype(int) == set_n].copy(), ax, ytick_labels[2], 'haul_start_datetime', end_col = 'haul_end_datetime', y_val = ypos[2], y_height = yheight, color = bv_color)\n", - " p2 = plot_event_bars(haul_stopsBrancol2.loc[haul_stopsBrancol2['set_number'] == set_n].copy(), ax,ytick_labels[1], 'start_time',end_col = 'end_time', y_val=ypos[1], y_height = yheight, color= colors[13] )\n", + " p1 = plot_event_bars(\n", + " brancol2_bv_sets.loc[brancol2_bv_sets[\"set_number\"].astype(int) == set_n].copy(),\n", + " ax,\n", + " ytick_labels[2],\n", + " \"haul_start_datetime\",\n", + " end_col=\"haul_end_datetime\",\n", + " y_val=ypos[2],\n", + " y_height=yheight,\n", + " color=bv_color,\n", + " )\n", + " p2 = plot_event_bars(\n", + " haul_stopsBrancol2.loc[haul_stopsBrancol2[\"set_number\"] == set_n].copy(),\n", + " ax,\n", + " ytick_labels[1],\n", + " \"start_time\",\n", + " end_col=\"end_time\",\n", + " y_val=ypos[1],\n", + " y_height=yheight,\n", + " color=colors[13],\n", + " )\n", " xlim = p1.axes.get_xlim()\n", - " p3 = plot_event_bars(df_hauls, ax, ytick_labels[0], 'utc_start_datetime', duration = '5m', y_val = ypos[0], y_height = yheight)\n", - " \n", - " xmin = pd.Timestamp(xlim[0],unit = 'D')- pd.Timedelta('1h')\n", - " xmax = pd.Timestamp(xlim[1],unit = 'D')+ pd.Timedelta('1h')\n", + " p3 = plot_event_bars(\n", + " df_hauls,\n", + " ax,\n", + " ytick_labels[0],\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=ypos[0],\n", + " y_height=yheight,\n", + " )\n", + "\n", + " xmin = pd.Timestamp(xlim[0], unit=\"D\") - pd.Timedelta(\"1h\")\n", + " xmax = pd.Timestamp(xlim[1], unit=\"D\") + pd.Timedelta(\"1h\")\n", " ax.set_xlim(xmin, xmax)\n", " # p3.axes.set_xlim(xlim)\n", "\n", - " locator = DayLocator(bymonthday = 1)\n", - " hour_locator = HourLocator(interval = 2)\n", + " locator = DayLocator(bymonthday=1)\n", + " hour_locator = HourLocator(interval=2)\n", " # formatter = mdates.ConciseDateFormatter(locator)\n", - " hour_formatter = mdates.ConciseDateFormatter(hour_locator, formats = ['%Y', '%b', '%d', '%H h', '%H:%M', '%S.%f'], offset_formats = ['', '%Y', '%Y-%b', '%Y-%b-%d', '%Y-%b-%d', '%Y-%b-%d %H:%M'], usetex = True)\n", - " fmt = '%H h'\n", + " hour_formatter = mdates.ConciseDateFormatter(\n", + " hour_locator,\n", + " formats=[\"%Y\", \"%b\", \"%d\", \"%H h\", \"%H:%M\", \"%S.%f\"],\n", + " offset_formats=[\"\", \"%Y\", \"%Y-%b\", \"%Y-%b-%d\", \"%Y-%b-%d\", \"%Y-%b-%d %H:%M\"],\n", + " usetex=True,\n", + " )\n", + " fmt = \"%H h\"\n", "\n", " ax.xaxis.set_major_locator(hour_locator)\n", " ax.xaxis.set_major_formatter(hour_formatter)\n", - " ax.xaxis.get_offset_text().set_fontweight('bold')\n", - " ax.tick_params(axis='y', length=0)\n", - " sns.despine(trim=True, left=True )\n", - "\n", - " ax.text(0.02, 1.05, f'({chart_labels[i]})', transform=ax.transAxes, fontsize=9, fontweight='bold', va='top', ha='left')\n", + " ax.xaxis.get_offset_text().set_fontweight(\"bold\")\n", + " ax.tick_params(axis=\"y\", length=0)\n", + " sns.despine(trim=True, left=True)\n", + "\n", + " ax.text(\n", + " 0.02,\n", + " 1.05,\n", + " f\"({chart_labels[i]})\",\n", + " transform=ax.transAxes,\n", + " fontsize=9,\n", + " fontweight=\"bold\",\n", + " va=\"top\",\n", + " ha=\"left\",\n", + " )\n", "\n", "handles, labels = ax.get_legend_handles_labels()\n", - "fig.legend(handles[1:], labels[1:], loc='upper center', fontsize = 9, bbox_to_anchor=(0.5, 1.25), ncol=2, frameon = False)\n", + "fig.legend(\n", + " handles[1:],\n", + " labels[1:],\n", + " loc=\"upper center\",\n", + " fontsize=9,\n", + " bbox_to_anchor=(0.5, 1.25),\n", + " ncol=2,\n", + " frameon=False,\n", + ")\n", "\n", "# Adding bolded labels to the top left corner of each subplot\n", "\n", - "plt.savefig('haulstop_subsets.png', bbox_inches='tight')\n" + "plt.savefig(\"haulstop_subsets.png\", bbox_inches=\"tight\")\n" ] }, { @@ -3664,7 +3906,7 @@ "metadata": {}, "outputs": [], "source": [ - "categories = eventsBrancol2['category'].unique()\n", + "categories = eventsBrancol2[\"category\"].unique()\n", "\n", "# categories = np.delete(categories, -1)\n" ] @@ -3698,7 +3940,7 @@ "metadata": {}, "outputs": [], "source": [ - "categories = [category for category in categories if str(category) != 'nan']" + "categories = [category for category in categories if str(category) != \"nan\"]" ] }, { @@ -3708,7 +3950,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "df_results = df_results.loc[:, ~df_results.columns.duplicated()]\n", "# df_results.head()" ] @@ -3725,57 +3966,54 @@ "df_category_results = df_results_setnumber.copy()\n", "# iterate through each category\n", "select_columns = df_results.columns.to_list()\n", - "select_columns = ['results.'+x for x in select_columns]\n", + "select_columns = [\"results.\" + x for x in select_columns]\n", "for category in categories:\n", " # create a category df\n", - " df_category = df_events.loc[df_events['category']==category].copy()\n", - " df_category = df_category.loc[:,['start_time','end_time','category','set_number']]\n", - "\n", - " column_label = 'is_'+ '_'.join(category.lower().split(' '))\n", - " \n", - " #Make the db in memory\n", - " conn = sqlite3.connect(':memory:')\n", - " #write the tables\n", - " df_category_results.to_sql('results', conn, index=False)\n", - " df_category.to_sql('category', conn, index=False)\n", - "\n", - " \n", + " df_category = df_events.loc[df_events[\"category\"] == category].copy()\n", + " df_category = df_category.loc[:, [\"start_time\", \"end_time\", \"category\", \"set_number\"]]\n", + "\n", + " column_label = \"is_\" + \"_\".join(category.lower().split(\" \"))\n", + "\n", + " # Make the db in memory\n", + " conn = sqlite3.connect(\":memory:\")\n", + " # write the tables\n", + " df_category_results.to_sql(\"results\", conn, index=False)\n", + " df_category.to_sql(\"category\", conn, index=False)\n", + "\n", " query = f\"\"\"\n", " select\n", " {', '.join(select_columns)},\n", " category.category is not null as {column_label}\n", - " \n", + "\n", " from results\n", - " left join category on \n", - " results.utc_start_datetime between category.start_time and category.end_time or \n", - " category.start_time between results.utc_start_datetime and results.utc_end_datetime \n", + " left join category on\n", + " results.utc_start_datetime between category.start_time and category.end_time or\n", + " category.start_time between results.utc_start_datetime and results.utc_end_datetime\n", " \"\"\"\n", "\n", " df_category_results = pd.read_sql_query(query, conn)\n", " select_columns.append(column_label)\n", "\n", "\n", - "if 'set_number' not in df_category_results.columns.to_list():\n", - " conn = sqlite3.connect(':memory:')\n", - " bv_sets = Brancol2data['bv_sets']\n", - " df_hauls = bv_sets.loc[:,['set_number','haul_start_datetime','haul_end_datetime']] \n", - " \n", - " #write the tables to add set_number\n", - " df_category_results.to_sql('results', conn, index=False)\n", - " df_hauls.to_sql('hauls', conn, index=False)\n", - " \n", + "if \"set_number\" not in df_category_results.columns.to_list():\n", + " conn = sqlite3.connect(\":memory:\")\n", + " bv_sets = Brancol2data[\"bv_sets\"]\n", + " df_hauls = bv_sets.loc[:, [\"set_number\", \"haul_start_datetime\", \"haul_end_datetime\"]]\n", + "\n", + " # write the tables to add set_number\n", + " df_category_results.to_sql(\"results\", conn, index=False)\n", + " df_hauls.to_sql(\"hauls\", conn, index=False)\n", + "\n", " query = \"\"\"\n", " select\n", " results.*,\n", " hauls.set_number\n", - " \n", + "\n", " from results\n", " left join hauls on results.utc_start_datetime between hauls.haul_start_datetime and hauls.haul_end_datetime\n", - " \n", - " \"\"\"\n", - " df_category_results = pd.read_sql_query(query, conn)\n", "\n", - "\n" + " \"\"\"\n", + " df_category_results = pd.read_sql_query(query, conn)\n" ] }, { @@ -3807,10 +4045,10 @@ ], "source": [ "# looking for correlations with the no_haul predictions\n", - "df_category_only_hauls['predict_no_haul'] = df_category_only_hauls['predict_haul'].map({0:1, 1:0})\n", + "df_category_only_hauls[\"predict_no_haul\"] = df_category_only_hauls[\"predict_haul\"].map({0: 1, 1: 0})\n", "\n", "\n", - "df_category_only_hauls.corr()['predict_no_haul']" + "df_category_only_hauls.corr()[\"predict_no_haul\"]" ] }, { @@ -3835,21 +4073,21 @@ "\n", "# get a list of the results columns to use for the sql select statement\n", "select_columns = df_results_setnumber.columns.to_list()\n", - "select_columns = ['results.'+x for x in select_columns]\n", + "select_columns = [\"results.\" + x for x in select_columns]\n", "\n", "# creating a df of only HaulStop events\n", - "category = 'Haul Stop'\n", - "dfHaulStop = df_events.loc[df_events['category']==category].copy()\n", - "dfHaulStop = dfHaulStop.loc[:,['start_time','end_time','category','set_number']]\n", - "dfHaulStop = dfHaulStop.reset_index().rename(columns={'index':'haul_stop_id'})\n", + "category = \"Haul Stop\"\n", + "dfHaulStop = df_events.loc[df_events[\"category\"] == category].copy()\n", + "dfHaulStop = dfHaulStop.loc[:, [\"start_time\", \"end_time\", \"category\", \"set_number\"]]\n", + "dfHaulStop = dfHaulStop.reset_index().rename(columns={\"index\": \"haul_stop_id\"})\n", "\n", - "column_label = 'is_'+ '_'.join(category.lower().split(' '))\n", + "column_label = \"is_\" + \"_\".join(category.lower().split(\" \"))\n", "\n", - "#Make the db in memory\n", - "conn = sqlite3.connect(':memory:')\n", - "#write the tables\n", - "df_haul_stop_results.to_sql('results', conn, index=False)\n", - "dfHaulStop.to_sql('category', conn, index=False)\n", + "# Make the db in memory\n", + "conn = sqlite3.connect(\":memory:\")\n", + "# write the tables\n", + "df_haul_stop_results.to_sql(\"results\", conn, index=False)\n", + "dfHaulStop.to_sql(\"category\", conn, index=False)\n", "\n", "# sql query to join haul stop events to the results if the prediction window is between the haul stop start/end or if the haul stop start time is between the prediction window\n", "query = f\"\"\"\n", @@ -3859,26 +4097,41 @@ " category.haul_stop_id,\n", " category.start_time as haul_stop_start,\n", " category.end_time as haul_stop_end\n", - " \n", + "\n", " from results\n", - " left join category on \n", - " results.utc_start_datetime between category.start_time and category.end_time or \n", - " category.start_time between results.utc_start_datetime and results.utc_end_datetime \n", + " left join category on\n", + " results.utc_start_datetime between category.start_time and category.end_time or\n", + " category.start_time between results.utc_start_datetime and results.utc_end_datetime\n", " \"\"\"\n", "\n", "df_haul_stop_results = pd.read_sql_query(query, conn)\n", "# setting up a column of predict_no_haul that is opposite of predict_haul\n", - "df_haul_stop_results['predict_no_haul'] = df_haul_stop_results['predict_haul'].map({0:1, 1:0})\n", - "df_haul_stop_results = df_haul_stop_results.loc[df_haul_stop_results['is_haul']==1]\n", - "df_haul_stop_results = df_haul_stop_results[['utc_start_datetime', 'utc_end_datetime','is_haul', 'predict_haul', 'predict_no_haul','set_number','is_haul_stop','haul_stop_id', 'haul_stop_start', 'haul_stop_end']]\n", - "\n", - "#converting columns to datetime\n", - "df_haul_stop_results['utc_end_datetime'] = pd.to_datetime(df_haul_stop_results['utc_end_datetime'])\n", - "df_haul_stop_results['utc_start_datetime'] = pd.to_datetime(df_haul_stop_results['utc_start_datetime'])\n", - "df_haul_stop_results['haul_stop_end'] = pd.to_datetime(df_haul_stop_results['haul_stop_end'])\n", - "df_haul_stop_results['haul_stop_start'] = pd.to_datetime(df_haul_stop_results['haul_stop_start'])\n", - "dfHaulStop['end_time'] = pd.to_datetime(dfHaulStop['end_time'])\n", - "dfHaulStop['start_time'] = pd.to_datetime(dfHaulStop['start_time'])" + "df_haul_stop_results[\"predict_no_haul\"] = df_haul_stop_results[\"predict_haul\"].map({0: 1, 1: 0})\n", + "df_haul_stop_results = df_haul_stop_results.loc[df_haul_stop_results[\"is_haul\"] == 1]\n", + "df_haul_stop_results = df_haul_stop_results[\n", + " [\n", + " \"utc_start_datetime\",\n", + " \"utc_end_datetime\",\n", + " \"is_haul\",\n", + " \"predict_haul\",\n", + " \"predict_no_haul\",\n", + " \"set_number\",\n", + " \"is_haul_stop\",\n", + " \"haul_stop_id\",\n", + " \"haul_stop_start\",\n", + " \"haul_stop_end\",\n", + " ]\n", + "]\n", + "\n", + "# converting columns to datetime\n", + "df_haul_stop_results[\"utc_end_datetime\"] = pd.to_datetime(df_haul_stop_results[\"utc_end_datetime\"])\n", + "df_haul_stop_results[\"utc_start_datetime\"] = pd.to_datetime(\n", + " df_haul_stop_results[\"utc_start_datetime\"]\n", + ")\n", + "df_haul_stop_results[\"haul_stop_end\"] = pd.to_datetime(df_haul_stop_results[\"haul_stop_end\"])\n", + "df_haul_stop_results[\"haul_stop_start\"] = pd.to_datetime(df_haul_stop_results[\"haul_stop_start\"])\n", + "dfHaulStop[\"end_time\"] = pd.to_datetime(dfHaulStop[\"end_time\"])\n", + "dfHaulStop[\"start_time\"] = pd.to_datetime(dfHaulStop[\"start_time\"])" ] }, { @@ -3895,20 +4148,20 @@ "select_columns = df_results3_setnumber.columns.to_list()\n", "\n", "\n", - "select_columns = ['results.'+x for x in select_columns]\n", + "select_columns = [\"results.\" + x for x in select_columns]\n", "\n", - "category = 'Haul Stop'\n", - "dfHaulStop3 = df_events3.loc[df_events3['category']==category].copy()\n", - "dfHaulStop3 = dfHaulStop3.loc[:,['start_time','end_time','category','set_number']]\n", - "dfHaulStop3 = dfHaulStop3.reset_index().rename(columns={'index':'haul_stop_id'})\n", + "category = \"Haul Stop\"\n", + "dfHaulStop3 = df_events3.loc[df_events3[\"category\"] == category].copy()\n", + "dfHaulStop3 = dfHaulStop3.loc[:, [\"start_time\", \"end_time\", \"category\", \"set_number\"]]\n", + "dfHaulStop3 = dfHaulStop3.reset_index().rename(columns={\"index\": \"haul_stop_id\"})\n", "\n", - "column_label = 'is_'+ '_'.join(category.lower().split(' '))\n", + "column_label = \"is_\" + \"_\".join(category.lower().split(\" \"))\n", "\n", - "#Make the db in memory\n", - "conn = sqlite3.connect(':memory:')\n", - "#write the tables\n", - "df_haul_stop_results3.to_sql('results', conn, index=False)\n", - "dfHaulStop3.to_sql('category', conn, index=False)\n", + "# Make the db in memory\n", + "conn = sqlite3.connect(\":memory:\")\n", + "# write the tables\n", + "df_haul_stop_results3.to_sql(\"results\", conn, index=False)\n", + "dfHaulStop3.to_sql(\"category\", conn, index=False)\n", "\n", "\n", "query = f\"\"\"\n", @@ -3918,28 +4171,45 @@ " category.haul_stop_id,\n", " category.start_time as haul_stop_start,\n", " category.end_time as haul_stop_end\n", - " \n", + "\n", " from results\n", - " left join category on \n", - " results.utc_start_datetime between category.start_time and category.end_time or \n", - " category.start_time between results.utc_start_datetime and results.utc_end_datetime \n", + " left join category on\n", + " results.utc_start_datetime between category.start_time and category.end_time or\n", + " category.start_time between results.utc_start_datetime and results.utc_end_datetime\n", " \"\"\"\n", "\n", "print(query)\n", "\n", "df_haul_stop_results3 = pd.read_sql_query(query, conn)\n", - "df_haul_stop_results3['predict_no_haul'] = df_haul_stop_results3['predict_haul'].map({0:1, 1:0})\n", - "df_haul_stop_results3 = df_haul_stop_results3.loc[df_haul_stop_results3['is_haul']==1]\n", - "df_haul_stop_results3 = df_haul_stop_results3[['utc_start_datetime', 'utc_end_datetime','is_haul', 'predict_haul', 'predict_no_haul','set_number','is_haul_stop','haul_stop_id', 'haul_stop_start', 'haul_stop_end']]\n", - "\n", - "df_haul_stop_results3['utc_end_datetime'] = pd.to_datetime(df_haul_stop_results3['utc_end_datetime'])\n", - "df_haul_stop_results3['utc_start_datetime'] = pd.to_datetime(df_haul_stop_results3['utc_start_datetime'])\n", - "\n", - "df_haul_stop_results3['haul_stop_end'] = pd.to_datetime(df_haul_stop_results3['haul_stop_end'])\n", - "df_haul_stop_results3['haul_stop_start'] = pd.to_datetime(df_haul_stop_results3['haul_stop_start'])\n", - "\n", - "dfHaulStop3['end_time'] = pd.to_datetime(dfHaulStop3['end_time'])\n", - "dfHaulStop3['start_time'] = pd.to_datetime(dfHaulStop3['start_time'])" + "df_haul_stop_results3[\"predict_no_haul\"] = df_haul_stop_results3[\"predict_haul\"].map({0: 1, 1: 0})\n", + "df_haul_stop_results3 = df_haul_stop_results3.loc[df_haul_stop_results3[\"is_haul\"] == 1]\n", + "df_haul_stop_results3 = df_haul_stop_results3[\n", + " [\n", + " \"utc_start_datetime\",\n", + " \"utc_end_datetime\",\n", + " \"is_haul\",\n", + " \"predict_haul\",\n", + " \"predict_no_haul\",\n", + " \"set_number\",\n", + " \"is_haul_stop\",\n", + " \"haul_stop_id\",\n", + " \"haul_stop_start\",\n", + " \"haul_stop_end\",\n", + " ]\n", + "]\n", + "\n", + "df_haul_stop_results3[\"utc_end_datetime\"] = pd.to_datetime(\n", + " df_haul_stop_results3[\"utc_end_datetime\"]\n", + ")\n", + "df_haul_stop_results3[\"utc_start_datetime\"] = pd.to_datetime(\n", + " df_haul_stop_results3[\"utc_start_datetime\"]\n", + ")\n", + "\n", + "df_haul_stop_results3[\"haul_stop_end\"] = pd.to_datetime(df_haul_stop_results3[\"haul_stop_end\"])\n", + "df_haul_stop_results3[\"haul_stop_start\"] = pd.to_datetime(df_haul_stop_results3[\"haul_stop_start\"])\n", + "\n", + "dfHaulStop3[\"end_time\"] = pd.to_datetime(dfHaulStop3[\"end_time\"])\n", + "dfHaulStop3[\"start_time\"] = pd.to_datetime(dfHaulStop3[\"start_time\"])" ] }, { @@ -3962,22 +4232,32 @@ ], "source": [ "# get the total number of minutes of haul events recorded by bv for trip 3\n", - "print('trip 3 durations')\n", - "brancol3_bv_sets['duration_minutes'] = brancol3_bv_sets['duration'].apply(lambda x: x.total_seconds()/60)\n", + "print(\"trip 3 durations\")\n", + "brancol3_bv_sets[\"duration_minutes\"] = brancol3_bv_sets[\"duration\"].apply(\n", + " lambda x: x.total_seconds() / 60\n", + ")\n", "print(f'bv haul duration: {sum(brancol3_bv_sets['duration_minutes'])}')\n", "\n", - "df_haul_stop_results3['haul_duration'] = df_haul_stop_results3.apply(lambda x: (x['utc_end_datetime'] - x['utc_start_datetime']).total_seconds()/60, axis = 1)\n", + "df_haul_stop_results3[\"haul_duration\"] = df_haul_stop_results3.apply(\n", + " lambda x: (x[\"utc_end_datetime\"] - x[\"utc_start_datetime\"]).total_seconds() / 60, axis=1\n", + ")\n", "print(f'bv haul duration aligned with ai results: {sum(df_haul_stop_results3['haul_duration'])}')\n", "\n", "# get total duration of haul stops from the haul stops that have available results\n", - "only_stops3 = df_haul_stop_results3.loc[df_haul_stop_results3['is_haul_stop']==1].copy()\n", - "only_stops3 = only_stops3[['haul_stop_id','haul_stop_start','haul_stop_end']].drop_duplicates()\n", - "only_stops3['duration'] = only_stops3.apply(lambda x: (x['haul_stop_end'] - x['haul_stop_start']).total_seconds()/60, axis = 1)\n", - "haul_stop_total3 = sum(only_stops3['duration'])\n", - "print(f'haul stop total duration: {haul_stop_total3}')\n", + "only_stops3 = df_haul_stop_results3.loc[df_haul_stop_results3[\"is_haul_stop\"] == 1].copy()\n", + "only_stops3 = only_stops3[[\"haul_stop_id\", \"haul_stop_start\", \"haul_stop_end\"]].drop_duplicates()\n", + "only_stops3[\"duration\"] = only_stops3.apply(\n", + " lambda x: (x[\"haul_stop_end\"] - x[\"haul_stop_start\"]).total_seconds() / 60, axis=1\n", + ")\n", + "haul_stop_total3 = sum(only_stops3[\"duration\"])\n", + "print(f\"haul stop total duration: {haul_stop_total3}\")\n", "\n", - "haul_stops_group3 = df_haul_stop_results3.groupby(['haul_stop_id','haul_stop_start','haul_stop_end']).agg({'predict_no_haul':'max'}).reset_index()\n", - "print(f'number of haul stops that aligned with no haul prediction: {len(haul_stops_group3)}')" + "haul_stops_group3 = (\n", + " df_haul_stop_results3.groupby([\"haul_stop_id\", \"haul_stop_start\", \"haul_stop_end\"])\n", + " .agg({\"predict_no_haul\": \"max\"})\n", + " .reset_index()\n", + ")\n", + "print(f\"number of haul stops that aligned with no haul prediction: {len(haul_stops_group3)}\")" ] }, { @@ -4000,23 +4280,33 @@ ], "source": [ "# get the total number of minutes of haul events recorded by bv for trip 2\n", - "print('trip 2 durations')\n", - "brancol2_bv_sets['duration_minutes'] = brancol2_bv_sets['duration'].apply(lambda x: x.total_seconds()/60)\n", + "print(\"trip 2 durations\")\n", + "brancol2_bv_sets[\"duration_minutes\"] = brancol2_bv_sets[\"duration\"].apply(\n", + " lambda x: x.total_seconds() / 60\n", + ")\n", "print(f'bv total haul duration: {sum(brancol2_bv_sets['duration_minutes'])}')\n", "\n", "# get the total number of minutes of bv haul events where a bv haul aligned with the ai results (if there were no ai catch count results, then those times are excluded)\n", - "df_haul_stop_results['haul_duration'] = df_haul_stop_results.apply(lambda x: (x['utc_end_datetime'] - x['utc_start_datetime']).total_seconds()/60, axis = 1)\n", + "df_haul_stop_results[\"haul_duration\"] = df_haul_stop_results.apply(\n", + " lambda x: (x[\"utc_end_datetime\"] - x[\"utc_start_datetime\"]).total_seconds() / 60, axis=1\n", + ")\n", "print(f'bv haul duration aligned with ai results: {sum(df_haul_stop_results['haul_duration'])}')\n", "\n", "# get total duration of haul stops from the haul stops that have available results\n", - "only_stops = df_haul_stop_results.loc[df_haul_stop_results['is_haul_stop']==1].copy()\n", - "only_stops = only_stops[['haul_stop_id','haul_stop_start','haul_stop_end']].drop_duplicates()\n", - "only_stops['duration'] = only_stops.apply(lambda x: (x['haul_stop_end'] - x['haul_stop_start']).total_seconds()/60, axis = 1)\n", - "haul_stop_total = sum(only_stops['duration'])\n", - "print(f'haul stop total duration: {haul_stop_total}')\n", + "only_stops = df_haul_stop_results.loc[df_haul_stop_results[\"is_haul_stop\"] == 1].copy()\n", + "only_stops = only_stops[[\"haul_stop_id\", \"haul_stop_start\", \"haul_stop_end\"]].drop_duplicates()\n", + "only_stops[\"duration\"] = only_stops.apply(\n", + " lambda x: (x[\"haul_stop_end\"] - x[\"haul_stop_start\"]).total_seconds() / 60, axis=1\n", + ")\n", + "haul_stop_total = sum(only_stops[\"duration\"])\n", + "print(f\"haul stop total duration: {haul_stop_total}\")\n", "\n", - "haul_stops_group = df_haul_stop_results.groupby(['haul_stop_id','haul_stop_start','haul_stop_end']).agg({'predict_no_haul':'max'}).reset_index()\n", - "print(f'number of haul stops that aligned with no haul prediction: {len(haul_stops_group)}')" + "haul_stops_group = (\n", + " df_haul_stop_results.groupby([\"haul_stop_id\", \"haul_stop_start\", \"haul_stop_end\"])\n", + " .agg({\"predict_no_haul\": \"max\"})\n", + " .reset_index()\n", + ")\n", + "print(f\"number of haul stops that aligned with no haul prediction: {len(haul_stops_group)}\")" ] }, { @@ -4126,7 +4416,7 @@ } ], "source": [ - "dfHaulStop['duration'] = dfHaulStop['end_time']-dfHaulStop['start_time']\n", + "dfHaulStop[\"duration\"] = dfHaulStop[\"end_time\"] - dfHaulStop[\"start_time\"]\n", "dfHaulStop.head()" ] }, @@ -4139,10 +4429,10 @@ "source": [ "def find_overlap_duration(row):\n", " # finds the overlap time of haul stops with the predicted no_haul events\n", - " if row['is_haul_stop'] == 1:\n", - " latest_start = max(row['utc_start_datetime'],row['haul_stop_start'])\n", - " earliest_end = min(row['utc_end_datetime'],row['haul_stop_end'])\n", - " duration = (earliest_end - latest_start).total_seconds()/60\n", + " if row[\"is_haul_stop\"] == 1:\n", + " latest_start = max(row[\"utc_start_datetime\"], row[\"haul_stop_start\"])\n", + " earliest_end = min(row[\"utc_end_datetime\"], row[\"haul_stop_end\"])\n", + " duration = (earliest_end - latest_start).total_seconds() / 60\n", " else:\n", " duration = 0\n", " return duration" @@ -4155,8 +4445,9 @@ "metadata": {}, "outputs": [], "source": [ - "df_haul_stop_results['overlap_minutes'] = df_haul_stop_results.apply(lambda x: find_overlap_duration(x), axis = 1)\n", - "\n" + "df_haul_stop_results[\"overlap_minutes\"] = df_haul_stop_results.apply(\n", + " lambda x: find_overlap_duration(x), axis=1\n", + ")\n" ] }, { @@ -4174,7 +4465,7 @@ "metadata": {}, "outputs": [], "source": [ - "df_results3['duration'] = df_results3['utc_end_datetime']-df_results3['utc_start_datetime']\n", + "df_results3[\"duration\"] = df_results3[\"utc_end_datetime\"] - df_results3[\"utc_start_datetime\"]\n", "df_onlyhaul3 = df_results3.loc[df_results3.is_haul == 1].copy()" ] }, @@ -4196,8 +4487,8 @@ } ], "source": [ - "df_onlyhaul3['duration_minutes'] = df_results3['duration'].apply(lambda x: x.total_seconds()/60)\n", - "sum(df_onlyhaul3['duration_minutes'].loc[df_onlyhaul3['predict_haul']==0])/60" + "df_onlyhaul3[\"duration_minutes\"] = df_results3[\"duration\"].apply(lambda x: x.total_seconds() / 60)\n", + "sum(df_onlyhaul3[\"duration_minutes\"].loc[df_onlyhaul3[\"predict_haul\"] == 0]) / 60" ] }, { @@ -4219,11 +4510,19 @@ ], "source": [ "# get duration of overlap\n", - "haulstop_total2 = sum(df_haul_stop_results['overlap_minutes']) # total number of minutes where a haul stop over laps with a haul?\n", - "overlap_nohaul2 = sum(df_haul_stop_results.loc[df_haul_stop_results['predict_haul']==0]['overlap_minutes']) # haul stops where predicted haul is 0, \n", - "overlap_haul2 = sum(df_haul_stop_results.loc[df_haul_stop_results['predict_haul']==1]['overlap_minutes']) # hauls stops where there is a haul predicted\n", - "total_nohaul2 = sum(df_haul_stop_results.loc[df_haul_stop_results['predict_haul']==0]['haul_duration']) # total prediction of haul gaps\n", - "overlap_nohaul2/haulstop_total2" + "haulstop_total2 = sum(\n", + " df_haul_stop_results[\"overlap_minutes\"]\n", + ") # total number of minutes where a haul stop over laps with a haul?\n", + "overlap_nohaul2 = sum(\n", + " df_haul_stop_results.loc[df_haul_stop_results[\"predict_haul\"] == 0][\"overlap_minutes\"]\n", + ") # haul stops where predicted haul is 0,\n", + "overlap_haul2 = sum(\n", + " df_haul_stop_results.loc[df_haul_stop_results[\"predict_haul\"] == 1][\"overlap_minutes\"]\n", + ") # hauls stops where there is a haul predicted\n", + "total_nohaul2 = sum(\n", + " df_haul_stop_results.loc[df_haul_stop_results[\"predict_haul\"] == 0][\"haul_duration\"]\n", + ") # total prediction of haul gaps\n", + "overlap_nohaul2 / haulstop_total2" ] }, { @@ -4245,13 +4544,21 @@ ], "source": [ "# get duration of overlap\n", - "df_haul_stop_results3['overlap_minutes'] = df_haul_stop_results3.apply(lambda x: find_overlap_duration(x), axis = 1)\n", - "overlap_total3 = sum(df_haul_stop_results3['overlap_minutes'])\n", - "haulstop_total3 = sum(df_haul_stop_results3['overlap_minutes'])\n", - "overlap_nohaul3 = sum(df_haul_stop_results3.loc[df_haul_stop_results3['predict_haul']==0]['overlap_minutes'])\n", - "overlap_haul3 = sum(df_haul_stop_results3.loc[df_haul_stop_results3['predict_haul']==1]['overlap_minutes'])\n", - "total_nohaul3 = sum(df_haul_stop_results3.loc[df_haul_stop_results3['predict_haul']==0]['haul_duration'])\n", - "overlap_nohaul3/haulstop_total3 #percentage of nohauls that are covered by haul stops?" + "df_haul_stop_results3[\"overlap_minutes\"] = df_haul_stop_results3.apply(\n", + " lambda x: find_overlap_duration(x), axis=1\n", + ")\n", + "overlap_total3 = sum(df_haul_stop_results3[\"overlap_minutes\"])\n", + "haulstop_total3 = sum(df_haul_stop_results3[\"overlap_minutes\"])\n", + "overlap_nohaul3 = sum(\n", + " df_haul_stop_results3.loc[df_haul_stop_results3[\"predict_haul\"] == 0][\"overlap_minutes\"]\n", + ")\n", + "overlap_haul3 = sum(\n", + " df_haul_stop_results3.loc[df_haul_stop_results3[\"predict_haul\"] == 1][\"overlap_minutes\"]\n", + ")\n", + "total_nohaul3 = sum(\n", + " df_haul_stop_results3.loc[df_haul_stop_results3[\"predict_haul\"] == 0][\"haul_duration\"]\n", + ")\n", + "overlap_nohaul3 / haulstop_total3 # percentage of nohauls that are covered by haul stops?" ] }, { @@ -4262,9 +4569,9 @@ "outputs": [], "source": [ "def format_hours_minutes(minutes):\n", - " hrs = minutes//60\n", - " mins = minutes%60\n", - " formatted = \"%dh %dm\" %(hrs,mins) \n", + " hrs = minutes // 60\n", + " mins = minutes % 60\n", + " formatted = \"%dh %dm\" % (hrs, mins)\n", " return formatted" ] }, @@ -4298,62 +4605,92 @@ "source": [ "# venn diagram for trip 2 and 3 combined\n", "from matplotlib_venn import venn2, venn2_circles\n", - "plt.figure(figsize=(3,3))\n", + "\n", + "plt.figure(figsize=(3, 3))\n", "# Use the venn2 function\n", "\n", "nohaul_color = colors[18]\n", "overlap_color = colors[17]\n", "haulstop_color = colors[13]\n", "\n", - "haulstop_id = '100'\n", - "overlap_id = '110'\n", - "nohaul_id = '010'\n", + "haulstop_id = \"100\"\n", + "overlap_id = \"110\"\n", + "nohaul_id = \"010\"\n", "\n", - "haulstop_total = haulstop_total2+haulstop_total3\n", - "total_nohaul = total_nohaul2+total_nohaul3\n", - "overlap_nohaul = overlap_nohaul2+overlap_nohaul3\n", + "haulstop_total = haulstop_total2 + haulstop_total3\n", + "total_nohaul = total_nohaul2 + total_nohaul3\n", + "overlap_nohaul = overlap_nohaul2 + overlap_nohaul3\n", "\n", "haulstop_total\n", - "v = venn2(subsets = (haulstop_total, total_nohaul, overlap_nohaul), set_labels = ('Haul Stops', 'Haul Prediction Gaps'))\n", + "v = venn2(\n", + " subsets=(haulstop_total, total_nohaul, overlap_nohaul),\n", + " set_labels=(\"Haul Stops\", \"Haul Prediction Gaps\"),\n", + ")\n", "\n", - "v.get_label_by_id(haulstop_id).set_text('')\n", - "v.get_label_by_id(overlap_id).set_text('')\n", - "v.get_label_by_id(nohaul_id).set_text('')\n", - "v.get_label_by_id('A').set_fontsize(10)\n", - "v.get_label_by_id('B').set_fontsize(10)\n", + "v.get_label_by_id(haulstop_id).set_text(\"\")\n", + "v.get_label_by_id(overlap_id).set_text(\"\")\n", + "v.get_label_by_id(nohaul_id).set_text(\"\")\n", + "v.get_label_by_id(\"A\").set_fontsize(10)\n", + "v.get_label_by_id(\"B\").set_fontsize(10)\n", "\n", "\n", - "v.get_patch_by_id(overlap_id).set_color(overlap_color) # overlap\n", + "v.get_patch_by_id(overlap_id).set_color(overlap_color) # overlap\n", "v.get_patch_by_id(overlap_id).set_alpha(1)\n", "\n", - "v.get_patch_by_id(nohaul_id).set_color(nohaul_color) # no haul prediction\n", + "v.get_patch_by_id(nohaul_id).set_color(nohaul_color) # no haul prediction\n", "v.get_patch_by_id(nohaul_id).set_alpha(1)\n", "\n", - "v.get_patch_by_id(haulstop_id).set_color(haulstop_color) # haul stops\n", + "v.get_patch_by_id(haulstop_id).set_color(haulstop_color) # haul stops\n", "v.get_patch_by_id(haulstop_id).set_alpha(1)\n", "\n", "\n", - "\n", - "plt.annotate(format_hours_minutes(total_nohaul-overlap_nohaul), xy=v.get_label_by_id(nohaul_id).get_position(), xytext=(0,0), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = 'white', weight = 'bold',\n", - " path_effects=[pe.withStroke(linewidth=4, foreground=colors[17], alpha = .6)]\n", - " )\n", - "\n", - "plt.annotate(format_hours_minutes(overlap_nohaul), xy=v.get_label_by_id(overlap_id).get_position(), xytext=(-20,50), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = overlap_color, weight = 'bold',\n", - " # path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = 1)],\n", - " arrowprops=dict(arrowstyle='->',lw = 1.5, connectionstyle='arc3,rad=-0.3',color='white', edgecolor=overlap_color, \n", - " path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = .6)]),\n", - " # bbox=dict(boxstyle='round,pad=0.2', fc=overlap_color, edgecolor = overlap_color, alpha = 1 )\n", - " )\n", - "\n", - "plt.annotate(format_hours_minutes(haulstop_total-overlap_nohaul), xy=v.get_label_by_id(haulstop_id).get_position(), xytext=(0,0), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = 'white', weight = 'bold', \n", - " path_effects=[pe.withStroke(linewidth=4, foreground=colors[12], alpha = .6)]\n", - " # bbox=dict(boxstyle='round,pad=0.5', fc=, edgecolor = overlap_color, alpha = 1 )\n", - " )\n", - "\n", - "plt.savefig('haul_stop_venn.png', bbox_inches='tight')" + "plt.annotate(\n", + " format_hours_minutes(total_nohaul - overlap_nohaul),\n", + " xy=v.get_label_by_id(nohaul_id).get_position(),\n", + " xytext=(0, 0),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=\"white\",\n", + " weight=\"bold\",\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=colors[17], alpha=0.6)],\n", + ")\n", + "\n", + "plt.annotate(\n", + " format_hours_minutes(overlap_nohaul),\n", + " xy=v.get_label_by_id(overlap_id).get_position(),\n", + " xytext=(-20, 50),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=overlap_color,\n", + " weight=\"bold\",\n", + " # path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = 1)],\n", + " arrowprops=dict(\n", + " arrowstyle=\"->\",\n", + " lw=1.5,\n", + " connectionstyle=\"arc3,rad=-0.3\",\n", + " color=\"white\",\n", + " edgecolor=overlap_color,\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha=0.6)],\n", + " ),\n", + " # bbox=dict(boxstyle='round,pad=0.2', fc=overlap_color, edgecolor = overlap_color, alpha = 1 )\n", + ")\n", + "\n", + "plt.annotate(\n", + " format_hours_minutes(haulstop_total - overlap_nohaul),\n", + " xy=v.get_label_by_id(haulstop_id).get_position(),\n", + " xytext=(0, 0),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=\"white\",\n", + " weight=\"bold\",\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=colors[12], alpha=0.6)],\n", + " # bbox=dict(boxstyle='round,pad=0.5', fc=, edgecolor = overlap_color, alpha = 1 )\n", + ")\n", + "\n", + "plt.savefig(\"haul_stop_venn.png\", bbox_inches=\"tight\")" ] }, { @@ -4374,7 +4711,7 @@ } ], "source": [ - "overlap_nohaul/haulstop_total" + "overlap_nohaul / haulstop_total" ] }, { @@ -4397,57 +4734,87 @@ "source": [ "# venn diagram for trip 3\n", "from matplotlib_venn import venn2, venn2_circles\n", - "plt.figure(figsize=(3,3))\n", + "\n", + "plt.figure(figsize=(3, 3))\n", "# Use the venn2 function\n", "\n", "nohaul_color = colors[18]\n", "overlap_color = colors[17]\n", "haulstop_color = colors[13]\n", "\n", - "haulstop_id = '100'\n", - "overlap_id = '110'\n", - "nohaul_id = '010'\n", + "haulstop_id = \"100\"\n", + "overlap_id = \"110\"\n", + "nohaul_id = \"010\"\n", "\n", - "v = venn2(subsets = (overlap_total3, total_nohaul3, overlap_nohaul3), set_labels = ('Haul Stops', 'Haul Prediction Gaps'))\n", + "v = venn2(\n", + " subsets=(overlap_total3, total_nohaul3, overlap_nohaul3),\n", + " set_labels=(\"Haul Stops\", \"Haul Prediction Gaps\"),\n", + ")\n", "\n", - "v.get_label_by_id(haulstop_id).set_text('')\n", - "v.get_label_by_id(overlap_id).set_text('')\n", - "v.get_label_by_id(nohaul_id).set_text('')\n", - "v.get_label_by_id('A').set_fontsize(10)\n", - "v.get_label_by_id('B').set_fontsize(10)\n", + "v.get_label_by_id(haulstop_id).set_text(\"\")\n", + "v.get_label_by_id(overlap_id).set_text(\"\")\n", + "v.get_label_by_id(nohaul_id).set_text(\"\")\n", + "v.get_label_by_id(\"A\").set_fontsize(10)\n", + "v.get_label_by_id(\"B\").set_fontsize(10)\n", "\n", "\n", - "v.get_patch_by_id(overlap_id).set_color(overlap_color) # overlap\n", + "v.get_patch_by_id(overlap_id).set_color(overlap_color) # overlap\n", "v.get_patch_by_id(overlap_id).set_alpha(1)\n", "\n", - "v.get_patch_by_id(nohaul_id).set_color(nohaul_color) # no haul prediction\n", + "v.get_patch_by_id(nohaul_id).set_color(nohaul_color) # no haul prediction\n", "v.get_patch_by_id(nohaul_id).set_alpha(1)\n", "\n", - "v.get_patch_by_id(haulstop_id).set_color(haulstop_color) # haul stops\n", + "v.get_patch_by_id(haulstop_id).set_color(haulstop_color) # haul stops\n", "v.get_patch_by_id(haulstop_id).set_alpha(1)\n", "\n", "\n", - "\n", - "plt.annotate(format_hours_minutes(total_nohaul3-overlap_nohaul3), xy=v.get_label_by_id(nohaul_id).get_position(), xytext=(0,0), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = 'white', weight = 'bold',\n", - " path_effects=[pe.withStroke(linewidth=4, foreground=colors[17], alpha = .6)]\n", - " )\n", - "\n", - "plt.annotate(format_hours_minutes(overlap_nohaul3), xy=v.get_label_by_id(overlap_id).get_position(), xytext=(-20,50), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = overlap_color, weight = 'bold',\n", - " # path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = 1)],\n", - " arrowprops=dict(arrowstyle='->',lw = 1.5, connectionstyle='arc3,rad=-0.3',color='white', edgecolor=overlap_color, \n", - " path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = .6)]),\n", - " # bbox=dict(boxstyle='round,pad=0.2', fc=overlap_color, edgecolor = overlap_color, alpha = 1 )\n", - " )\n", - "\n", - "plt.annotate(format_hours_minutes(overlap_total3-overlap_nohaul3), xy=v.get_label_by_id(haulstop_id).get_position(), xytext=(0,0), ha='center',\n", - " textcoords='offset points', fontsize = 10, color = 'white', weight = 'bold', \n", - " path_effects=[pe.withStroke(linewidth=4, foreground=colors[12], alpha = .6)]\n", - " # bbox=dict(boxstyle='round,pad=0.5', fc=, edgecolor = overlap_color, alpha = 1 )\n", - " )\n", - "\n", - "plt.savefig('haul_stop_venn3.png', bbox_inches='tight')" + "plt.annotate(\n", + " format_hours_minutes(total_nohaul3 - overlap_nohaul3),\n", + " xy=v.get_label_by_id(nohaul_id).get_position(),\n", + " xytext=(0, 0),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=\"white\",\n", + " weight=\"bold\",\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=colors[17], alpha=0.6)],\n", + ")\n", + "\n", + "plt.annotate(\n", + " format_hours_minutes(overlap_nohaul3),\n", + " xy=v.get_label_by_id(overlap_id).get_position(),\n", + " xytext=(-20, 50),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=overlap_color,\n", + " weight=\"bold\",\n", + " # path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha = 1)],\n", + " arrowprops=dict(\n", + " arrowstyle=\"->\",\n", + " lw=1.5,\n", + " connectionstyle=\"arc3,rad=-0.3\",\n", + " color=\"white\",\n", + " edgecolor=overlap_color,\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=overlap_color, alpha=0.6)],\n", + " ),\n", + " # bbox=dict(boxstyle='round,pad=0.2', fc=overlap_color, edgecolor = overlap_color, alpha = 1 )\n", + ")\n", + "\n", + "plt.annotate(\n", + " format_hours_minutes(overlap_total3 - overlap_nohaul3),\n", + " xy=v.get_label_by_id(haulstop_id).get_position(),\n", + " xytext=(0, 0),\n", + " ha=\"center\",\n", + " textcoords=\"offset points\",\n", + " fontsize=10,\n", + " color=\"white\",\n", + " weight=\"bold\",\n", + " path_effects=[pe.withStroke(linewidth=4, foreground=colors[12], alpha=0.6)],\n", + " # bbox=dict(boxstyle='round,pad=0.5', fc=, edgecolor = overlap_color, alpha = 1 )\n", + ")\n", + "\n", + "plt.savefig(\"haul_stop_venn3.png\", bbox_inches=\"tight\")" ] }, { @@ -4522,7 +4889,7 @@ } ], "source": [ - "(8*60)+16" + "(8 * 60) + 16" ] }, { @@ -4564,7 +4931,7 @@ } ], "source": [ - "overlap_total%60" + "overlap_total % 60" ] }, { @@ -4598,8 +4965,8 @@ "# Perform Chi-Square Test for each pair of events\n", "chi2_dict = {}\n", "for category in category_cols:\n", - " chi2, p = chi_square_test('predict_haul', category, df_category_only_hauls)\n", - " chi2_dict[category] = {'chi2': chi2, 'p':p}" + " chi2, p = chi_square_test(\"predict_haul\", category, df_category_only_hauls)\n", + " chi2_dict[category] = {\"chi2\": chi2, \"p\": p}" ] }, { @@ -4889,10 +5256,10 @@ } ], "source": [ - "x = 'is_haul_stop'\n", - "y = 'predict_no_haul'\n", - "z = 'predict_haul'\n", - "coocc[x][y]/coocc[x][x]" + "x = \"is_haul_stop\"\n", + "y = \"predict_no_haul\"\n", + "z = \"predict_haul\"\n", + "coocc[x][y] / coocc[x][x]" ] }, { @@ -4913,7 +5280,7 @@ } ], "source": [ - "coocc[y][y]+coocc[z][z]" + "coocc[y][y] + coocc[z][z]" ] }, { @@ -4931,12 +5298,12 @@ "metadata": {}, "outputs": [], "source": [ - "def association_confidence(x,y,coocc):\n", - " confidence = coocc[x][y]/coocc[x][x]\n", + "def association_confidence(x, y, coocc):\n", + " confidence = coocc[x][y] / coocc[x][x]\n", "\n", - " fraction_y = coocc[y][y]/len_cats\n", + " fraction_y = coocc[y][y] / len_cats\n", "\n", - " lift = confidence/fraction_y\n", + " lift = confidence / fraction_y\n", "\n", " return confidence, lift" ] @@ -4973,8 +5340,8 @@ "source": [ "for category in category_cols:\n", " print(category)\n", - " confidence, lift = association_confidence(category,'predict_no_haul', coocc)\n", - " print(f'confidence: {confidence}, lift: {lift}')" + " confidence, lift = association_confidence(category, \"predict_no_haul\", coocc)\n", + " print(f\"confidence: {confidence}, lift: {lift}\")" ] }, { @@ -5009,8 +5376,8 @@ "source": [ "for category in category_cols:\n", " print(category)\n", - " confidence, lift = association_confidence(category,'predict_no_haul', coocc)\n", - " print(f'confidence: {confidence}, lift: {lift}')" + " confidence, lift = association_confidence(category, \"predict_no_haul\", coocc)\n", + " print(f\"confidence: {confidence}, lift: {lift}\")" ] }, { diff --git a/notebooks/timeseries_classifier_model.ipynb b/notebooks/timeseries_classifier_model.ipynb index 29019cb..340cdbc 100644 --- a/notebooks/timeseries_classifier_model.ipynb +++ b/notebooks/timeseries_classifier_model.ipynb @@ -17,7 +17,6 @@ "metadata": {}, "outputs": [], "source": [ - " \n", "%autoreload 2" ] }, @@ -40,17 +39,19 @@ "from sklearn.linear_model import LinearRegression\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "\n", "sns.set_theme()\n", "\n", - "import itertools \n", + "import itertools\n", "import matplotlib.gridspec as gridspec\n", "\n", "from matplotlib.dates import DayLocator, HourLocator, DateFormatter, drange\n", "\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", "import json\n", "from tsai.all import *\n", "from IPython.display import display, Markdown\n", @@ -81,7 +82,29 @@ "metadata": {}, "outputs": [], "source": [ - "colors = sns.color_palette(['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99'])" + "colors = sns.color_palette(\n", + " [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", + ")" ] }, { @@ -91,7 +114,27 @@ "metadata": {}, "outputs": [], "source": [ - "color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']" + "color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + "]" ] }, { @@ -102,14 +145,36 @@ "outputs": [], "source": [ "%matplotlib inline\n", + "\n", + "\n", "def show_color_pallete():\n", - "# fig, ax = plt.subplots()\n", - " color_list = ['#184EAD','#648fff','#88ccee','#ae9ef7','#6844d5','#332288','#c52dac','#ef4341','#84164c','#cb6577','#ff6100','#90550f','#c78d1b','#ffb003','#ddcc77','#a2c662','#40a018','#117347','#43aa99']\n", + " # fig, ax = plt.subplots()\n", + " color_list = [\n", + " \"#184EAD\",\n", + " \"#648fff\",\n", + " \"#88ccee\",\n", + " \"#ae9ef7\",\n", + " \"#6844d5\",\n", + " \"#332288\",\n", + " \"#c52dac\",\n", + " \"#ef4341\",\n", + " \"#84164c\",\n", + " \"#cb6577\",\n", + " \"#ff6100\",\n", + " \"#90550f\",\n", + " \"#c78d1b\",\n", + " \"#ffb003\",\n", + " \"#ddcc77\",\n", + " \"#a2c662\",\n", + " \"#40a018\",\n", + " \"#117347\",\n", + " \"#43aa99\",\n", + " ]\n", " sns.palplot(color_list, size=2)\n", " ax = plt.gca()\n", " for i, name in enumerate(color_list):\n", - " label = f'[{i}] {name}'\n", - " ax.text(i, -.57, label,horizontalalignment='center', fontsize = 10) \n", + " label = f\"[{i}] {name}\"\n", + " ax.text(i, -0.57, label, horizontalalignment=\"center\", fontsize=10)\n", " plt.show()" ] }, @@ -136,12 +201,12 @@ "# r2 = model.score(x,y)\n", "# coefficients = model.coef_\n", "# intercept = model.intercept_\n", - " \n", + "\n", "\n", "# beta0 = r'$intercept = \\hat\\beta_0 =$' + str(round(intercept[0],2))\n", - " \n", + "\n", "# beta1 = r'$slope = \\hat\\beta_1 =$' + str(round(coefficients[0][0],2))\n", - " \n", + "\n", "# r_squared = r'$R^2 =$' + str(round(r2,2))\n", "\n", "# textstr = '\\n'.join((\n", @@ -176,17 +241,17 @@ ], "source": [ "# ST Patrick Trips\n", - "StPatrick1data = get_data(boat = 'stpatrick', trip_no = 0)\n", - "StPatrick2data = get_data(boat = 'stpatrick', trip_no = 1)\n", + "StPatrick1data = get_data(boat=\"stpatrick\", trip_no=0)\n", + "StPatrick2data = get_data(boat=\"stpatrick\", trip_no=1)\n", "\n", - "counts_StPatrick1 = StPatrick1data['all_counts']\n", - "counts_StPatrick2 = StPatrick2data['all_counts']\n", + "counts_StPatrick1 = StPatrick1data[\"all_counts\"]\n", + "counts_StPatrick2 = StPatrick2data[\"all_counts\"]\n", "\n", - "stpatrick1_elog = StPatrick1data['elogs']\n", - "stpatrick2_elog = StPatrick2data['elogs']\n", + "stpatrick1_elog = StPatrick1data[\"elogs\"]\n", + "stpatrick2_elog = StPatrick2data[\"elogs\"]\n", "\n", - "stpatrick1_bv_set_counts = StPatrick1data['bv_set_counts']\n", - "stpatrick1_bv_set_counts = StPatrick2data['bv_set_counts']" + "stpatrick1_bv_set_counts = StPatrick1data[\"bv_set_counts\"]\n", + "stpatrick1_bv_set_counts = StPatrick2data[\"bv_set_counts\"]" ] }, { @@ -196,8 +261,8 @@ "metadata": {}, "outputs": [], "source": [ - "stpatrick1_bv_sets = StPatrick1data['bv_sets']\n", - "stpatrick2_bv_sets = StPatrick2data['bv_sets']" + "stpatrick1_bv_sets = StPatrick1data[\"bv_sets\"]\n", + "stpatrick2_bv_sets = StPatrick2data[\"bv_sets\"]" ] }, { @@ -366,24 +431,24 @@ ], "source": [ "# Brancol Trips\n", - "Brancol1data = get_data(boat = 'brancol', trip_no = 0)\n", - "Brancol2data = get_data(boat = 'brancol', trip_no = 1)\n", + "Brancol1data = get_data(boat=\"brancol\", trip_no=0)\n", + "Brancol2data = get_data(boat=\"brancol\", trip_no=1)\n", "\n", "\n", - "counts_Brancol1 = Brancol1data['all_counts']\n", - "counts_Brancol2 = Brancol2data['all_counts']\n", + "counts_Brancol1 = Brancol1data[\"all_counts\"]\n", + "counts_Brancol2 = Brancol2data[\"all_counts\"]\n", "\n", - "brancol1_elog = Brancol1data['elogs']\n", - "brancol2_elog = Brancol2data['elogs']\n", + "brancol1_elog = Brancol1data[\"elogs\"]\n", + "brancol2_elog = Brancol2data[\"elogs\"]\n", "\n", - "brancol1_bv_sets = Brancol1data['bv_sets']\n", - "brancol2_bv_sets = Brancol2data['bv_sets']\n", + "brancol1_bv_sets = Brancol1data[\"bv_sets\"]\n", + "brancol2_bv_sets = Brancol2data[\"bv_sets\"]\n", "\n", - "brancol1_bv_set_counts = Brancol1data['bv_set_counts']\n", - "brancol2_bv_set_counts = Brancol2data['bv_set_counts']\n", + "brancol1_bv_set_counts = Brancol1data[\"bv_set_counts\"]\n", + "brancol2_bv_set_counts = Brancol2data[\"bv_set_counts\"]\n", "\n", - "brancol2trip = Brancol2data['trip_info']\n", - "brancol1trip = Brancol1data['trip_info']" + "brancol2trip = Brancol2data[\"trip_info\"]\n", + "brancol1trip = Brancol1data[\"trip_info\"]" ] }, { @@ -420,37 +485,35 @@ } ], "source": [ + "fig, ax = plt.subplots(2, 1, figsize=(40, 10))\n", "\n", - "\n", - "fig, ax = plt.subplots(2, 1, figsize=(40,10))\n", - "\n", - "dfBrancol1 = Brancol1data['ai_sets']\n", + "dfBrancol1 = Brancol1data[\"ai_sets\"]\n", "data = dfBrancol1\n", - "sets = Brancol1data['bv_sets']\n", + "sets = Brancol1data[\"bv_sets\"]\n", "haul_starts = sets.haul_start_datetime.unique().dropna()\n", "haul_ends = sets.haul_end_datetime.unique().dropna()\n", - "ymax = data['count'].max()\n", + "ymax = data[\"count\"].max()\n", "\n", - "ax[0].title.set_text('Brancol Trip 1')\n", - "sns.lineplot(x = 'utc_start_datetime', y = 'count', data = data, ax = ax[0], alpha = .5)\n", - "#draw verticle lines (I can do this in one line)\n", - "ax[0].vlines(haul_starts, 0, ymax, colors='green')\n", - "ax[0].vlines(haul_ends, 0, ymax, colors='r')\n", - "ax[0].set_xlabel('')\n", + "ax[0].title.set_text(\"Brancol Trip 1\")\n", + "sns.lineplot(x=\"utc_start_datetime\", y=\"count\", data=data, ax=ax[0], alpha=0.5)\n", + "# draw verticle lines (I can do this in one line)\n", + "ax[0].vlines(haul_starts, 0, ymax, colors=\"green\")\n", + "ax[0].vlines(haul_ends, 0, ymax, colors=\"r\")\n", + "ax[0].set_xlabel(\"\")\n", "\n", - "dfBrancol2 = Brancol2data['ai_sets']\n", + "dfBrancol2 = Brancol2data[\"ai_sets\"]\n", "data = dfBrancol2\n", - "sets = Brancol2data['bv_sets']\n", + "sets = Brancol2data[\"bv_sets\"]\n", "haul_starts = sets.haul_start_datetime.unique().dropna()\n", "haul_ends = sets.haul_end_datetime.unique().dropna()\n", - "ymax = data['count'].max()\n", + "ymax = data[\"count\"].max()\n", "\n", - "ax[1].title.set_text('Brancol Trip 2')\n", - "sns.lineplot(x = 'utc_start_datetime', y = 'count', data = data, ax = ax[1], alpha = .5)\n", - "#draw verticle lines (I can do this in one line)\n", - "ax[1].vlines(haul_starts, 0, ymax, colors='green')\n", - "ax[1].vlines(haul_ends, 0, ymax, colors='r')\n", - "ax[1].set_xlabel('')\n", + "ax[1].title.set_text(\"Brancol Trip 2\")\n", + "sns.lineplot(x=\"utc_start_datetime\", y=\"count\", data=data, ax=ax[1], alpha=0.5)\n", + "# draw verticle lines (I can do this in one line)\n", + "ax[1].vlines(haul_starts, 0, ymax, colors=\"green\")\n", + "ax[1].vlines(haul_ends, 0, ymax, colors=\"r\")\n", + "ax[1].set_xlabel(\"\")\n", "\n", "plt.show()" ] @@ -482,50 +545,50 @@ "source": [ "def plot_hlines(ax, df, y_val, start_col, end_col, width, color, label):\n", " ax.hlines(\n", - " y = np.full(len(df), y_val),\n", - " xmin = df[start_col].values.reshape((-1,1)),\n", - " xmax =df[end_col].values.reshape((-1,1)),\n", - " linewidth = width,colors= color, label = label\n", + " y=np.full(len(df), y_val),\n", + " xmin=df[start_col].values.reshape((-1, 1)),\n", + " xmax=df[end_col].values.reshape((-1, 1)),\n", + " linewidth=width,\n", + " colors=color,\n", + " label=label,\n", " )\n", "\n", + "\n", "def plot_set_hlines(ax, df, source, color_dict):\n", - " if source == 'elog':\n", - " set_start_col = 'systemstartsetdatetime'\n", - " set_end_col = 'systemendsetdatetime'\n", - " haul_start_col = 'systemstarthauldatetime'\n", - " haul_end_col = 'systemendhauldatetime'\n", + " if source == \"elog\":\n", + " set_start_col = \"systemstartsetdatetime\"\n", + " set_end_col = \"systemendsetdatetime\"\n", + " haul_start_col = \"systemstarthauldatetime\"\n", + " haul_end_col = \"systemendhauldatetime\"\n", " y_val = 1\n", "\n", - "\n", - " elif source == 'bv':\n", - " set_start_col = 'set_start_datetime'\n", - " set_end_col = 'set_end_datetime'\n", - " haul_start_col = 'haul_start_datetime'\n", - " haul_end_col = 'haul_end_datetime'\n", + " elif source == \"bv\":\n", + " set_start_col = \"set_start_datetime\"\n", + " set_end_col = \"set_end_datetime\"\n", + " haul_start_col = \"haul_start_datetime\"\n", + " haul_end_col = \"haul_end_datetime\"\n", " y_val = 0\n", - " \n", - " colors = color_dict[source] \n", - " \n", - " \n", - " #plot_hauling\n", - " plot_hlines(ax, df,y_val, haul_start_col, haul_end_col, 12, colors['haul'], 'haul')\n", "\n", - " #plot tweener time\n", - " plot_hlines(ax, df,y_val, set_end_col, haul_start_col, 12, colors['between'], 'mid')\n", + " colors = color_dict[source]\n", + "\n", + " # plot_hauling\n", + " plot_hlines(ax, df, y_val, haul_start_col, haul_end_col, 12, colors[\"haul\"], \"haul\")\n", + "\n", + " # plot tweener time\n", + " plot_hlines(ax, df, y_val, set_end_col, haul_start_col, 12, colors[\"between\"], \"mid\")\n", "\n", - " #plot setting\n", - " plot_hlines(ax, df, y_val, set_start_col, set_end_col, 12, colors['set'], 'set')\n", + " # plot setting\n", + " plot_hlines(ax, df, y_val, set_start_col, set_end_col, 12, colors[\"set\"], \"set\")\n", "\n", "\n", "def annotate_counts(ax, df, count_col, x_col, y_value):\n", - " props = dict(boxstyle='round', facecolor='white', alpha=0.35)\n", + " props = dict(boxstyle=\"round\", facecolor=\"white\", alpha=0.35)\n", " for idx, row in df.iterrows():\n", - " text = f'count: {row[count_col]}'\n", + " text = f\"count: {row[count_col]}\"\n", " x_value = row[x_col]\n", - " ax.text( x_value,y_value , text, fontsize=10, horizontalalignment='right', bbox=props)\n", - " \n", - " \n", - " # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes) " + " ax.text(x_value, y_value, text, fontsize=10, horizontalalignment=\"right\", bbox=props)\n", + "\n", + " # ax.text(.02, .9, f'r2={rvalue ** 2:.2f}, p={pvalue:.2g}, rmse={rmse:.2f}', transform=ax.transAxes)" ] }, { @@ -536,35 +599,34 @@ "outputs": [], "source": [ "def plot_set_bars(ax, df, source, color_dict):\n", - " if source == 'elog':\n", - " set_start_col = 'systemstartsetdatetime'\n", - " set_end_col = 'systemendsetdatetime'\n", - " haul_start_col = 'systemstarthauldatetime'\n", - " haul_end_col = 'systemendhauldatetime'\n", + " if source == \"elog\":\n", + " set_start_col = \"systemstartsetdatetime\"\n", + " set_end_col = \"systemendsetdatetime\"\n", + " haul_start_col = \"systemstarthauldatetime\"\n", + " haul_end_col = \"systemendhauldatetime\"\n", " y_val = 1.7\n", - " \n", - " elif source == 'bv':\n", - " set_start_col = 'set_start_datetime'\n", - " set_end_col = 'set_end_datetime'\n", - " haul_start_col = 'haul_start_datetime'\n", - " haul_end_col = 'haul_end_datetime'\n", - " y_val = .7\n", - "\n", - " df['set_duration'] = df[set_end_col] - df[set_start_col]\n", - " df['haul_duration'] = df[haul_end_col] - df[haul_start_col]\n", - " df['mid_duration'] = df[haul_start_col] - df[set_end_col]\n", - " \n", - " set_x = list(zip(df[set_start_col], df['set_duration']))\n", - " haul_x = list(zip(df[haul_start_col], df['haul_duration']))\n", - " mid_x = list(zip(df[set_end_col], df['mid_duration']))\n", - " \n", - " y = (y_val, .6)\n", + "\n", + " elif source == \"bv\":\n", + " set_start_col = \"set_start_datetime\"\n", + " set_end_col = \"set_end_datetime\"\n", + " haul_start_col = \"haul_start_datetime\"\n", + " haul_end_col = \"haul_end_datetime\"\n", + " y_val = 0.7\n", + "\n", + " df[\"set_duration\"] = df[set_end_col] - df[set_start_col]\n", + " df[\"haul_duration\"] = df[haul_end_col] - df[haul_start_col]\n", + " df[\"mid_duration\"] = df[haul_start_col] - df[set_end_col]\n", + "\n", + " set_x = list(zip(df[set_start_col], df[\"set_duration\"]))\n", + " haul_x = list(zip(df[haul_start_col], df[\"haul_duration\"]))\n", + " mid_x = list(zip(df[set_end_col], df[\"mid_duration\"]))\n", + "\n", + " y = (y_val, 0.6)\n", "\n", " colors = color_dict[source]\n", - " ax.broken_barh(mid_x, y, facecolors = colors['mid'], edgecolor = 'face')\n", - " ax.broken_barh(haul_x, y, facecolors = colors['haul'], edgecolor = 'face')\n", - " ax.broken_barh(set_x, y, facecolors = colors['set'], edgecolor = 'face')\n", - " " + " ax.broken_barh(mid_x, y, facecolors=colors[\"mid\"], edgecolor=\"face\")\n", + " ax.broken_barh(haul_x, y, facecolors=colors[\"haul\"], edgecolor=\"face\")\n", + " ax.broken_barh(set_x, y, facecolors=colors[\"set\"], edgecolor=\"face\")\n" ] }, { @@ -574,48 +636,42 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_elog_comparisons(ax, dfElog, dfBV, title, legend = True, annotate_counts = False, display_axis= True):\n", - "\n", + "def plot_elog_comparisons(\n", + " ax, dfElog, dfBV, title, legend=True, annotate_counts=False, display_axis=True\n", + "):\n", " # ax[0].autofmt_xdate()\n", "\n", - " \n", - " ax.set_yticks([1,2],('bv','elogs'))\n", + " ax.set_yticks([1, 2], (\"bv\", \"elogs\"))\n", " # ax.set_yticks([0,1,2],('bv','elogs',' '))\n", - " fig.suptitle(titles['main'], fontsize = 20)\n", - " \n", + " fig.suptitle(titles[\"main\"], fontsize=20)\n", + "\n", " # df1 = brancol1_elog\n", " # df1sets =brancol1_bv_sets\n", "\n", - " \n", - "\n", " if annotate_counts:\n", - " dfElog['totalcount'] = dfElog['bycatchcount'].astype(int) + dfElog['catchcount'].astype(int)\n", - " dfBV['retained_count'] = dfBV['retained_count'].astype('Int64')\n", - " annotate_counts(ax, dfElog, 'totalcount', 'systemstarthauldatetime', 1.2)\n", - " annotate_counts(ax, dfBV, 'retained_count', 'haul_start_datetime', 0.2)\n", - " \n", + " dfElog[\"totalcount\"] = dfElog[\"bycatchcount\"].astype(int) + dfElog[\"catchcount\"].astype(int)\n", + " dfBV[\"retained_count\"] = dfBV[\"retained_count\"].astype(\"Int64\")\n", + " annotate_counts(ax, dfElog, \"totalcount\", \"systemstarthauldatetime\", 1.2)\n", + " annotate_counts(ax, dfBV, \"retained_count\", \"haul_start_datetime\", 0.2)\n", "\n", - " plot_set_bars(ax, dfElog, 'elog', color_dict)\n", - " plot_set_bars(ax, dfBV, 'bv', color_dict)\n", + " plot_set_bars(ax, dfElog, \"elog\", color_dict)\n", + " plot_set_bars(ax, dfBV, \"bv\", color_dict)\n", "\n", - " \n", + " ax.set_title(title, x=0.1, y=1, fontsize=9)\n", "\n", - " ax.set_title(title,x = .1, y = 1, fontsize = 9)\n", - " \n", " # ax.autoscale()\n", " # ax[0].set_ylim(-.5,1.5)\n", " # ax[0].tick_params(axis='x', labelrotation=45)\n", "\n", " if legend:\n", " legend_elements = []\n", - " for label, color in color_dict['elog'].items():\n", - " \n", - " legend_elements.append(Patch(facecolor=color, edgecolor=color,\n", - " label=label))\n", - " ax.legend(handles = legend_elements, loc='center', bbox_to_anchor=(.5, -1), ncol = 3, fontsize = 8)\n", + " for label, color in color_dict[\"elog\"].items():\n", + " legend_elements.append(Patch(facecolor=color, edgecolor=color, label=label))\n", + " ax.legend(\n", + " handles=legend_elements, loc=\"center\", bbox_to_anchor=(0.5, -1), ncol=3, fontsize=8\n", + " )\n", "\n", - " \n", - " #use consise date formater\n", + " # use consise date formater\n", "\n", " if display_axis:\n", " locator = DayLocator()\n", @@ -642,8 +698,8 @@ "outputs": [], "source": [ "color_dict = {\n", - " 'bv': {'set':'#40a018', 'haul':'#117347', 'mid':'#a2c662'},\n", - " 'elog': {'set':'#40a018', 'haul':'#117347', 'mid':'#a2c662'},\n", + " \"bv\": {\"set\": \"#40a018\", \"haul\": \"#117347\", \"mid\": \"#a2c662\"},\n", + " \"elog\": {\"set\": \"#40a018\", \"haul\": \"#117347\", \"mid\": \"#a2c662\"},\n", " # 'elog':{'set':'#648fff', 'haul':'#184EAD', 'mid':'#88ccee'}\n", "}" ] @@ -681,21 +737,26 @@ "source": [ "# metrics.ConfusionMatrixDisplay(cm).plot(cmap = 'Blues',ax = ax)\n", "def plot_confusion_matrix(cm, ax, interp, title):\n", - "\n", - " ax.imshow(cm, interpolation='nearest', cmap = 'Blues')\n", + " ax.imshow(cm, interpolation=\"nearest\", cmap=\"Blues\")\n", " tick_marks = np.arange(len(interp.vocab))\n", " ax.set_xticks(tick_marks, interp.vocab, rotation=0)\n", " ax.set_yticks(tick_marks, interp.vocab, rotation=0)\n", - " ax.set_xlabel('Predicted')\n", - " ax.set_ylabel('Actual')\n", - " ax.set_ylim(len(interp.vocab)-.5,-.5)\n", + " ax.set_xlabel(\"Predicted\")\n", + " ax.set_ylabel(\"Actual\")\n", + " ax.set_ylim(len(interp.vocab) - 0.5, -0.5)\n", " ax.grid(False)\n", - " \n", - " thresh = cm.max() / 2.\n", + "\n", + " thresh = cm.max() / 2.0\n", " for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n", - " coeff = f'{cm[i, j]}'\n", - " plt.text(j, i, coeff, horizontalalignment=\"center\", verticalalignment=\"center\", color=\"white\"\n", - " if cm[i, j] > thresh else \"black\")" + " coeff = f\"{cm[i, j]}\"\n", + " plt.text(\n", + " j,\n", + " i,\n", + " coeff,\n", + " horizontalalignment=\"center\",\n", + " verticalalignment=\"center\",\n", + " color=\"white\" if cm[i, j] > thresh else \"black\",\n", + " )" ] }, { @@ -706,14 +767,14 @@ "outputs": [], "source": [ "def prep_data(df):\n", - " df.sort_values(by = 'utc_start_datetime', inplace = True)\n", - " X = df.drop(columns = 'is_haul')\n", - " y = df['is_haul'].astype('int').to_numpy()\n", + " df.sort_values(by=\"utc_start_datetime\", inplace=True)\n", + " X = df.drop(columns=\"is_haul\")\n", + " y = df[\"is_haul\"].astype(\"int\").to_numpy()\n", "\n", - " X.loc[:,'utc_start_datetime'] = X.loc[:,'utc_start_datetime'].astype('int64')\n", - " X = np.atleast_3d(X).transpose(0,2,1)\n", + " X.loc[:, \"utc_start_datetime\"] = X.loc[:, \"utc_start_datetime\"].astype(\"int64\")\n", + " X = np.atleast_3d(X).transpose(0, 2, 1)\n", "\n", - " haul_map = {1:'haul', 0:'no_haul'}\n", + " haul_map = {1: \"haul\", 0: \"no_haul\"}\n", " labeler = ReLabeler(haul_map)\n", " y = labeler(y)\n", " return X, y" @@ -735,8 +796,8 @@ "outputs": [], "source": [ "# training data\n", - "dfAiSets_Brancol1 = Brancol1data['ai_sets'].copy()\n", - "dfAiSets_Brancol1.set_index('utc_start_datetime', inplace = True)" + "dfAiSets_Brancol1 = Brancol1data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol1.set_index(\"utc_start_datetime\", inplace=True)" ] }, { @@ -747,8 +808,8 @@ "outputs": [], "source": [ "# testing data\n", - "dfAiSets_Brancol2 = Brancol2data['ai_sets'].copy()\n", - "dfAiSets_Brancol2.set_index('utc_start_datetime', inplace = True)" + "dfAiSets_Brancol2 = Brancol2data[\"ai_sets\"].copy()\n", + "dfAiSets_Brancol2.set_index(\"utc_start_datetime\", inplace=True)" ] }, { @@ -758,12 +819,12 @@ "metadata": {}, "outputs": [], "source": [ - "win = '2h'\n", - "agg_dict = {'weighted_count':'sum','count':'sum'}\n", - "keep_cols = ['weighted_count','detection_confidence','count','is_haul', 'id']\n", + "win = \"2h\"\n", + "agg_dict = {\"weighted_count\": \"sum\", \"count\": \"sum\"}\n", + "keep_cols = [\"weighted_count\", \"detection_confidence\", \"count\", \"is_haul\", \"id\"]\n", "\n", - "df_train = add_rolling_aggregates(dfAiSets_Brancol1, '2h', agg_dict, keep_cols)\n", - "df_test = add_rolling_aggregates(dfAiSets_Brancol2, '2h', agg_dict, keep_cols)" + "df_train = add_rolling_aggregates(dfAiSets_Brancol1, \"2h\", agg_dict, keep_cols)\n", + "df_test = add_rolling_aggregates(dfAiSets_Brancol2, \"2h\", agg_dict, keep_cols)" ] }, { @@ -774,9 +835,9 @@ "outputs": [], "source": [ "# df_train['id'].fillna(0, inplace = True)\n", - "df_train['id'] = df_train['id'].astype(int)\n", + "df_train[\"id\"] = df_train[\"id\"].astype(int)\n", "# df_test['id'].fillna(0, inplace = True)\n", - "df_test['id'] = df_test['id'].astype(int)" + "df_test[\"id\"] = df_test[\"id\"].astype(int)" ] }, { @@ -786,8 +847,8 @@ "metadata": {}, "outputs": [], "source": [ - "df_train.dropna(inplace = True)\n", - "df_test.dropna(inplace = True)" + "df_train.dropna(inplace=True)\n", + "df_test.dropna(inplace=True)" ] }, { @@ -826,7 +887,7 @@ "metadata": {}, "outputs": [], "source": [ - "X,y = prep_data(df_train)\n", + "X, y = prep_data(df_train)\n", "X_test, y_test = prep_data(df_test)" ] }, @@ -869,16 +930,18 @@ ], "source": [ "## train, test, validation splits\n", - "splits = get_splits(y, \n", - " n_splits=1, \n", - " valid_size=0.3, \n", - " test_size=0.1, \n", - " shuffle=True, \n", - " balance=True, \n", - " stratify=True,\n", - " random_state=42, \n", - " show_plot=True, \n", - " verbose=True)\n", + "splits = get_splits(\n", + " y,\n", + " n_splits=1,\n", + " valid_size=0.3,\n", + " test_size=0.1,\n", + " shuffle=True,\n", + " balance=True,\n", + " stratify=True,\n", + " random_state=42,\n", + " show_plot=True,\n", + " verbose=True,\n", + ")\n", "splits" ] }, @@ -891,11 +954,11 @@ "source": [ "## dataset and loaders\n", "\n", - "tfms = [None, [Categorize()]]\n", + "tfms = [None, [Categorize()]]\n", "dsets = TSDatasets(X, y, tfms=tfms, splits=splits)\n", - " \n", + "\n", "bs = 10\n", - "dls = TSDataLoaders.from_dsets(dsets.train, dsets.valid, bs=[bs, bs*2])" + "dls = TSDataLoaders.from_dsets(dsets.train, dsets.valid, bs=[bs, bs * 2])" ] }, { @@ -907,18 +970,18 @@ "source": [ "# all the different models to test\n", "archs = [\n", - " (RNNPlus, {'n_layers':3, 'bidirectional': True} ),\n", - " (LSTMPlus,{'n_layers':3, 'bidirectional': True} ),\n", - " (LSTMPlus,{'n_layers':4, 'bidirectional': True} ),\n", - " (GRUPlus, {'n_layers':3, 'bidirectional': True} ), \n", - " (RNNPlus, {'n_layers':4, 'bidirectional': True} ),\n", - " (RNNPlus, {'n_layers':4, 'bidirectional': True}), \n", - " (LSTM, {'n_layers':3, 'bidirectional': False}), \n", - " (RNN, {'n_layers':3, 'bidirectional': True} ), \n", - " (LSTM, {'n_layers':3, 'bidirectional': True} ),\n", - " (LSTM, {'n_layers':4, 'bidirectional': True} ),\n", - " (GRU, {'n_layers':3, 'bidirectional': True} ), \n", - " ]" + " (RNNPlus, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (LSTMPlus, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (LSTMPlus, {\"n_layers\": 4, \"bidirectional\": True}),\n", + " (GRUPlus, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (RNNPlus, {\"n_layers\": 4, \"bidirectional\": True}),\n", + " (RNNPlus, {\"n_layers\": 4, \"bidirectional\": True}),\n", + " (LSTM, {\"n_layers\": 3, \"bidirectional\": False}),\n", + " (RNN, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (LSTM, {\"n_layers\": 3, \"bidirectional\": True}),\n", + " (LSTM, {\"n_layers\": 4, \"bidirectional\": True}),\n", + " (GRU, {\"n_layers\": 3, \"bidirectional\": True}),\n", + "]" ] }, { @@ -935,11 +998,11 @@ "# results = pd.DataFrame(columns=['arch', 'hyperparams', 'total params', 'train loss', 'valid loss', 'accuracy', 'time'])\n", "# models = {}\n", "# for i, (arch, k) in enumerate(archs):\n", - " \n", + "\n", "# model = create_model(arch, dls=dls, **k)\n", - " \n", + "\n", "# print(model.__class__.__name__)\n", - " \n", + "\n", "# learn = Learner(dls, model, metrics=accuracy)\n", "# start = time.time()\n", "# learn.fit_one_cycle(20, 1e-3)\n", @@ -947,9 +1010,9 @@ "# vals = learn.recorder.values[-1]\n", "# results.loc[i] = [arch.__name__, k, count_parameters(model), vals[0], vals[1], vals[2], int(elapsed)]\n", "# results.sort_values(by='accuracy', ascending=False, ignore_index=True, inplace=True)\n", - " \n", + "\n", "# models[f'{arch.__name__} {k}'] = learn\n", - " \n", + "\n", "# clear_output()\n", "# display(results)\n", "# return models" @@ -971,7 +1034,7 @@ ], "source": [ "if \"RNNPlus {'n_layers': 3, 'bidirectional': True}\" in models.keys():\n", - " print('yup!')" + " print(\"yup!\")" ] }, { @@ -1173,26 +1236,32 @@ "from IPython.display import clear_output\n", "\n", "\n", - "\n", - "results = pd.DataFrame(columns=['arch', 'hyperparams', 'total params', 'train loss', 'valid loss', 'accuracy', 'time'])\n", + "results = pd.DataFrame(\n", + " columns=[\"arch\", \"hyperparams\", \"total params\", \"train loss\", \"valid loss\", \"accuracy\", \"time\"]\n", + ")\n", "models = {}\n", "for i, (arch, k) in enumerate(archs):\n", - "\n", - "\n", - "\n", " model = create_model(arch, dls=dls, **k)\n", - " \n", + "\n", " print(model.__class__.__name__)\n", - " \n", - " learn = Learner(dls, model, metrics=accuracy)\n", + "\n", + " learn = Learner(dls, model, metrics=accuracy)\n", " start = time.time()\n", " learn.fit_one_cycle(20, 1e-3)\n", " elapsed = time.time() - start\n", " vals = learn.recorder.values[-1]\n", - " results.loc[i] = [arch.__name__, k, count_parameters(model), vals[0], vals[1], vals[2], int(elapsed)]\n", - " results.sort_values(by='accuracy', ascending=False, ignore_index=True, inplace=True)\n", - "\n", - " models[f'{arch.__name__} {k}'] = learn\n", + " results.loc[i] = [\n", + " arch.__name__,\n", + " k,\n", + " count_parameters(model),\n", + " vals[0],\n", + " vals[1],\n", + " vals[2],\n", + " int(elapsed),\n", + " ]\n", + " results.sort_values(by=\"accuracy\", ascending=False, ignore_index=True, inplace=True)\n", + "\n", + " models[f\"{arch.__name__} {k}\"] = learn\n", "\n", " clear_output()\n", " display(results)" @@ -2062,7 +2131,7 @@ " # add a new subplot iteratively using nrows and cols\n", " ax = plt.subplot(nrows, ncols, n + 1)\n", " ax.set_title(arch)\n", - " # plt.sca(ax) \n", + " # plt.sca(ax)\n", " interp = ClassificationInterpretation.from_learner(model)\n", " plot_confusion_matrix(interp.confusion_matrix(), ax, interp, arch)\n", " # print(type(fig))\n", @@ -2103,13 +2172,13 @@ "metadata": {}, "outputs": [], "source": [ - "# choosing the RNN Plus model and saving it \n", - "arch, k = (RNNPlus, {'n_layers':4, 'bidirectional': True})\n", + "# choosing the RNN Plus model and saving it\n", + "arch, k = (RNNPlus, {\"n_layers\": 4, \"bidirectional\": True})\n", "model = create_model(arch, dls=dls, **k)\n", "\n", - "learner = load_model('models/rnn_plus_haul_classifier.pth', model, opt = None, with_opt = False)\n", + "learner = load_model(\"models/rnn_plus_haul_classifier.pth\", model, opt=None, with_opt=False)\n", "\n", - "learner = Learner(dls, model, metrics=accuracy)" + "learner = Learner(dls, model, metrics=accuracy)" ] }, { @@ -2135,22 +2204,30 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "def plot_event_bars(\n", + " df,\n", + " ax,\n", + " label,\n", + " datetime_col,\n", + " duration=None,\n", + " end_col=None,\n", + " duration_col=None,\n", + " y_val=0.7,\n", + " y_height=0.6,\n", + " color=\"#43aa99\",\n", + "):\n", " if duration:\n", " x_duration = np.full(len(df), pd.Timedelta(duration))\n", " elif end_col:\n", - " df['duration'] = df[end_col]- df[datetime_col]\n", - " x_duration = df['duration']\n", + " df[\"duration\"] = df[end_col] - df[datetime_col]\n", + " x_duration = df[\"duration\"]\n", " elif duration_col:\n", " x_duration = df[duration_col]\n", - " \n", + "\n", " x = list(zip(df[datetime_col], x_duration))\n", " y = (y_val, y_height)\n", "\n", - " ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", - "\n", - " " + " ax.broken_barh(x, y, facecolors=color, edgecolor=\"face\", label=label, clip_on=False)\n" ] }, { @@ -2162,10 +2239,15 @@ "source": [ "def plot_event_vspan(df_events, ax, color_dict):\n", " for category, color in color_dict.items():\n", - " df_category = df_events.loc[df_events['category']==category]\n", - " \n", + " df_category = df_events.loc[df_events[\"category\"] == category]\n", + "\n", " for idx, row in df_category.iterrows():\n", - " ax.axvspan(*mdates.date2num([row['start_time'], row['end_time']]), color=color, edgecolor = 'face',alpha=0.5)" + " ax.axvspan(\n", + " *mdates.date2num([row[\"start_time\"], row[\"end_time\"]]),\n", + " color=color,\n", + " edgecolor=\"face\",\n", + " alpha=0.5,\n", + " )" ] }, { @@ -2175,25 +2257,31 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_set_vspan(df_sets, ax, color = '#a2c662'):\n", + "def plot_set_vspan(df_sets, ax, color=\"#a2c662\"):\n", " for idx, row in df_sets.iterrows():\n", - " ax.axvspan(*mdates.date2num([row['haul_start_datetime'], row['haul_end_datetime']]), color=color, edgecolor = '#40a018',alpha=0.5)\n", + " ax.axvspan(\n", + " *mdates.date2num([row[\"haul_start_datetime\"], row[\"haul_end_datetime\"]]),\n", + " color=color,\n", + " edgecolor=\"#40a018\",\n", + " alpha=0.5,\n", + " )\n", + "\n", "\n", "def get_video_times(vessel, trip_info):\n", - " trip_start_date = trip_info['trip_start_date']\n", - " trip_end_date = trip_info['trip_end_date']\n", + " trip_start_date = trip_info[\"trip_start_date\"]\n", + " trip_end_date = trip_info[\"trip_end_date\"]\n", "\n", " sql = f\"\"\"\n", - " SELECT \n", - " v.start_datetime, \n", + " SELECT\n", + " v.start_datetime,\n", " v.cam_name\n", - " \n", - " from {vessel}_v1_video_files v \n", + "\n", + " from {vessel}_v1_video_files v\n", " where start_datetime > '{trip_start_date}' and start_datetime < '{trip_end_date}'\n", " \"\"\"\n", " video_df = wr.athena.read_sql_query(sql, database=\"tnc_edge\")\n", " video_df.start_datetime = pd.to_datetime(video_df.start_datetime)\n", - " video_df['utc_start_datetime'] = video_df['start_datetime'].dt.tz_convert(None)\n", + " video_df[\"utc_start_datetime\"] = video_df[\"start_datetime\"].dt.tz_convert(None)\n", " # video_df['utc_end_datetime'] = video_df['utc_start_datetime'] + pd.Timedelta(minutes = 5)\n", " return video_df" ] @@ -2205,33 +2293,35 @@ "metadata": {}, "outputs": [], "source": [ - "def annotate_notes(ax, df, text_col, text_xy = (-60, 30)):\n", - " arrowprops=dict(arrowstyle=\"->\",connectionstyle=\"arc3,rad=.2\", color = 'black')\n", + "def annotate_notes(ax, df, text_col, text_xy=(-60, 30)):\n", + " arrowprops = dict(arrowstyle=\"->\", connectionstyle=\"arc3,rad=.2\", color=\"black\")\n", " y_var = 20\n", " annots = []\n", - " bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + " bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", " for idx, row in df.iterrows():\n", " # y_var = x_vars[n]\n", " text = row[text_col]\n", - " data_xy = (row['start_time'], 1.7)\n", - " \n", + " data_xy = (row[\"start_time\"], 1.7)\n", + "\n", " an = ax.annotate(\n", " text,\n", - " xy=data_xy, xycoords='data',\n", - " xytext=text_xy, textcoords='offset points',\n", + " xy=data_xy,\n", + " xycoords=\"data\",\n", + " xytext=text_xy,\n", + " textcoords=\"offset points\",\n", " arrowprops=arrowprops,\n", - " bbox = bbox_args,\n", - " color = 'white'\n", + " bbox=bbox_args,\n", + " color=\"white\",\n", " )\n", - " \n", + "\n", " annots.append(an)\n", - " \n", + "\n", " x, y = text_xy\n", - " \n", - " y = y+y_var\n", + "\n", + " y = y + y_var\n", " y_var = y_var * -1\n", - " \n", - " text_xy = (x,y)\n", + "\n", + " text_xy = (x, y)\n", "\n", " return annots" ] @@ -2251,7 +2341,7 @@ "metadata": {}, "outputs": [], "source": [ - "dfVector5 = get_vector_data('brancol',5,brancol2trip)" + "dfVector5 = get_vector_data(\"brancol\", 5, brancol2trip)" ] }, { @@ -2362,7 +2452,7 @@ } ], "source": [ - "dfVector4 = get_vector_data('brancol',4,brancol2trip)\n", + "dfVector4 = get_vector_data(\"brancol\", 4, brancol2trip)\n", "dfVector4.head()" ] }, @@ -2373,8 +2463,7 @@ "metadata": {}, "outputs": [], "source": [ - "\n", - "video_Brancol2 = get_video_times('brancol', brancol2trip)" + "video_Brancol2 = get_video_times(\"brancol\", brancol2trip)" ] }, { @@ -2394,14 +2483,18 @@ "metadata": {}, "outputs": [], "source": [ - "df_notes = pd.read_csv('../data/reviewer_notes.csv')\n", - "df_notes['start_time'] = pd.to_datetime(df_notes['start_time'], format = 'mixed')\n", + "df_notes = pd.read_csv(\"../data/reviewer_notes.csv\")\n", + "df_notes[\"start_time\"] = pd.to_datetime(df_notes[\"start_time\"], format=\"mixed\")\n", "\n", - "video_events = df_notes.loc[(df_notes['category'] == 'No Video') |(df_notes['category'] == 'Camera Covered')]\n", + "video_events = df_notes.loc[\n", + " (df_notes[\"category\"] == \"No Video\") | (df_notes[\"category\"] == \"Camera Covered\")\n", + "]\n", "\n", "video_events.dtypes\n", "\n", - "video_eventsBrancol2 = video_events.loc[(video_events['vessel'] == 'Brancol') & (video_events['trip_number']==2)]" + "video_eventsBrancol2 = video_events.loc[\n", + " (video_events[\"vessel\"] == \"Brancol\") & (video_events[\"trip_number\"] == 2)\n", + "]" ] }, { @@ -2411,7 +2504,7 @@ "metadata": {}, "outputs": [], "source": [ - "sns.set_style(\"whitegrid\", {'axes.grid' : False})" + "sns.set_style(\"whitegrid\", {\"axes.grid\": False})" ] }, { @@ -2421,7 +2514,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "import matplotlib.ticker as ticker" ] }, @@ -2459,34 +2551,55 @@ ], "source": [ "# df_test.reset_index(inplace = True)\n", - "bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + "bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "# bbox_args = dict(boxstyle=\"round\", fc=\"0.8\")\n", "arrow_args = dict(arrowstyle=\"->\")\n", "plt.tight_layout()\n", - "fig, ax = plt.subplots(3,1,figsize=(16,4), sharex = True, gridspec_kw={'height_ratios': [3, 1, 1]}, )\n", + "fig, ax = plt.subplots(\n", + " 3,\n", + " 1,\n", + " figsize=(16, 4),\n", + " sharex=True,\n", + " gridspec_kw={\"height_ratios\": [3, 1, 1]},\n", + ")\n", "\n", "plot_set_vspan(brancol2_bv_sets, ax[0])\n", "plot_set_vspan(brancol2_bv_sets, ax[1])\n", - "plot_event_bars(df_results.loc[df_results.predict_haul ==1], ax[0], 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = .7, )\n", - "plot_event_bars(video_Brancol2.loc[video_Brancol2['cam_name']=='cam1'], ax[0], 'video coverage', 'utc_start_datetime', duration = '5m', y_val = 1.4, color = '#117347')\n", - "\n", - "\n", - "sns.lineplot(x = 'datetime', y = 'score', data = dfVector4, ax = ax[1], label = 'vector 4')\n", - "sns.lineplot(x = 'datetime', y = 'score', data = dfVector5, ax = ax[2], label = 'vector 5')\n", - "\n", - "ax[0].set_yticks([1,1.7],('predicted_hauls','video coverage'))\n", - "ax[0].set_ylim([.5, 3])\n", + "plot_event_bars(\n", + " df_results.loc[df_results.predict_haul == 1],\n", + " ax[0],\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=0.7,\n", + ")\n", + "plot_event_bars(\n", + " video_Brancol2.loc[video_Brancol2[\"cam_name\"] == \"cam1\"],\n", + " ax[0],\n", + " \"video coverage\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=1.4,\n", + " color=\"#117347\",\n", + ")\n", + "\n", + "\n", + "sns.lineplot(x=\"datetime\", y=\"score\", data=dfVector4, ax=ax[1], label=\"vector 4\")\n", + "sns.lineplot(x=\"datetime\", y=\"score\", data=dfVector5, ax=ax[2], label=\"vector 5\")\n", + "\n", + "ax[0].set_yticks([1, 1.7], (\"predicted_hauls\", \"video coverage\"))\n", + "ax[0].set_ylim([0.5, 3])\n", "locator = DayLocator()\n", "formatter = mdates.ConciseDateFormatter(locator)\n", - "ax[1].tick_params(axis = 'y', labelsize = 7)\n", - "ax[2].tick_params(axis = 'y', labelsize = 7)\n", + "ax[1].tick_params(axis=\"y\", labelsize=7)\n", + "ax[2].tick_params(axis=\"y\", labelsize=7)\n", "ax[2].xaxis.set_major_locator(locator)\n", "ax[2].xaxis.set_major_formatter(formatter)\n", "\n", - "ax[0].spines['bottom'].set_visible(False)\n", - "ax[1].spines['top'].set_visible(False)\n", - "ax[2].spines['top'].set_visible(False)\n", - "annots = annotate_notes(ax[0],video_eventsBrancol2, 'category')\n", + "ax[0].spines[\"bottom\"].set_visible(False)\n", + "ax[1].spines[\"top\"].set_visible(False)\n", + "ax[2].spines[\"top\"].set_visible(False)\n", + "annots = annotate_notes(ax[0], video_eventsBrancol2, \"category\")\n", "\n", "# setup(axs[5], title=\"AutoLocator()\")\n", "locator1 = ticker.MultipleLocator(0.03, offset=0.02)\n", @@ -2513,7 +2626,7 @@ "label_coords = []\n", "for ann in annots:\n", " box = matplotlib.text.Text.get_window_extent(ann)\n", - " coords = ax[0].transAxes.inverted().transform(box)\n", + " coords = ax[0].transAxes.inverted().transform(box)\n", "\n", " label_coords.append(coords)" ] @@ -2525,7 +2638,7 @@ "metadata": {}, "outputs": [], "source": [ - "ai_countsBrancol2 = Brancol2data['ai_sets']" + "ai_countsBrancol2 = Brancol2data[\"ai_sets\"]" ] }, { @@ -3139,7 +3252,7 @@ "metadata": {}, "outputs": [], "source": [ - "bvCounts_Brancol2 = Brancol2data['all_counts']" + "bvCounts_Brancol2 = Brancol2data[\"all_counts\"]" ] }, { @@ -3414,7 +3527,7 @@ } ], "source": [ - "df_results.loc[df_results['count']>0].head()" + "df_results.loc[df_results[\"count\"] > 0].head()" ] }, { @@ -3458,79 +3571,124 @@ } ], "source": [ - "\n", - "\n", "# df_test.reset_index(inplace = True)\n", - "bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + "bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "# plt.subplots_adjust(wspace=0, hspace=-.2)\n", "# bbox_args = dict(boxstyle=\"round\", fc=\"0.8\")\n", "arrow_args = dict(arrowstyle=\"->\")\n", "# plt.tight_layout()\n", - "fig, ax = plt.subplots(2,1,figsize=(11,2), sharex = True\n", - " # , gridspec_kw={'height_ratios': [2, 1]}, \n", - " )\n", + "fig, ax = plt.subplots(\n", + " 2,\n", + " 1,\n", + " figsize=(11, 2),\n", + " sharex=True,\n", + " # , gridspec_kw={'height_ratios': [2, 1]},\n", + ")\n", "# plt.tight_layout()\n", "# trip2 = Brancol2data['trip_info']\n", "# plot_set_vspan(brancol2_bv_sets, ax[0])\n", "# plot_set_vspan(brancol2_bv_sets, ax[1])\n", - "ax[0].hlines([.175, .5, .825],.01,.99, transform=ax[0].transAxes, colors = 'grey', lw = .3, zorder = 0)\n", - "\n", - "\n", - "\n", - "\n", - " # matplotlib.pyplot.hlines(y, xmin, xmax, colors=None, linestyles='solid', label='', *, data=None, **kwargs)\n", - "\n", - "ax2 = ax[1].twinx() \n", - "ln1 = sns.lineplot(x = 'utc_start_datetime', y = 'count', data = df_results, ax = ax[1], label = 'AI Counts', color ='#184EAD', clip_on=False, lw = .4)\n", - "ln2 = sns.lineplot(x = 'start_datetime', y = 'bv_count', data = bvCounts_Brancol2, ax = ax2, label = 'Reviewer Counts', color ='#a2c662', clip_on=False, lw = .4)\n", - "\n", - "ax[0].set_yticks([.175,.5,.825],('Predicted Hauls','Elog Hauls','Reviewer Hauls'))\n", + "ax[0].hlines(\n", + " [0.175, 0.5, 0.825], 0.01, 0.99, transform=ax[0].transAxes, colors=\"grey\", lw=0.3, zorder=0\n", + ")\n", + "\n", + "\n", + "# matplotlib.pyplot.hlines(y, xmin, xmax, colors=None, linestyles='solid', label='', *, data=None, **kwargs)\n", + "\n", + "ax2 = ax[1].twinx()\n", + "ln1 = sns.lineplot(\n", + " x=\"utc_start_datetime\",\n", + " y=\"count\",\n", + " data=df_results,\n", + " ax=ax[1],\n", + " label=\"AI Counts\",\n", + " color=\"#184EAD\",\n", + " clip_on=False,\n", + " lw=0.4,\n", + ")\n", + "ln2 = sns.lineplot(\n", + " x=\"start_datetime\",\n", + " y=\"bv_count\",\n", + " data=bvCounts_Brancol2,\n", + " ax=ax2,\n", + " label=\"Reviewer Counts\",\n", + " color=\"#a2c662\",\n", + " clip_on=False,\n", + " lw=0.4,\n", + ")\n", + "\n", + "ax[0].set_yticks([0.175, 0.5, 0.825], (\"Predicted Hauls\", \"Elog Hauls\", \"Reviewer Hauls\"))\n", "ax[0].set_ylim([0, 1])\n", "# ax[1].set_ylim([0, 50])\n", "locator = DayLocator()\n", "formatter = mdates.ConciseDateFormatter(locator)\n", - "ax[1].tick_params(axis = 'both', labelsize = 9)\n", + "ax[1].tick_params(axis=\"both\", labelsize=9)\n", "ax[1].xaxis.set_major_locator(locator)\n", "ax[1].xaxis.set_major_formatter(formatter)\n", - "ax[1].set_xlabel('Datetime (UTC)')\n", - "ax[1].set_ylabel('AI Fish Count')\n", - "ax2.set_ylabel('Reviewer Fish Count')\n", + "ax[1].set_xlabel(\"Datetime (UTC)\")\n", + "ax[1].set_ylabel(\"AI Fish Count\")\n", + "ax2.set_ylabel(\"Reviewer Fish Count\")\n", "\n", "# ax[0].spines['bottom'].set_visible(False)\n", "# ax[1].spines['top'].set_visible(False)\n", "\n", - "plot_event_bars(brancol2_bv_sets, ax[0], 'Reviewer Hauls', 'haul_start_datetime', end_col = 'haul_end_datetime', y_val = .675, y_height = .3, color = '#a2c662')\n", - "plot_event_bars(brancol2_elog, ax[0], 'elog_hauls','systemstarthauldatetime', end_col = 'systemendhauldatetime', y_val = .35, color = '#117347', y_height = .3)\n", - "plot_event_bars(df_results.loc[df_results.predict_haul ==1], ax[0], 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = .025, y_height = .3)\n", + "plot_event_bars(\n", + " brancol2_bv_sets,\n", + " ax[0],\n", + " \"Reviewer Hauls\",\n", + " \"haul_start_datetime\",\n", + " end_col=\"haul_end_datetime\",\n", + " y_val=0.675,\n", + " y_height=0.3,\n", + " color=\"#a2c662\",\n", + ")\n", + "plot_event_bars(\n", + " brancol2_elog,\n", + " ax[0],\n", + " \"elog_hauls\",\n", + " \"systemstarthauldatetime\",\n", + " end_col=\"systemendhauldatetime\",\n", + " y_val=0.35,\n", + " color=\"#117347\",\n", + " y_height=0.3,\n", + ")\n", + "plot_event_bars(\n", + " df_results.loc[df_results.predict_haul == 1],\n", + " ax[0],\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=0.025,\n", + " y_height=0.3,\n", + ")\n", "\n", "\n", "from matplotlib.lines import Line2D\n", "from matplotlib.patches import Patch\n", "\n", - "legend_elements = [Patch(facecolor='#a2c662', edgecolor='#a2c662',\n", - " label='BV Hauls'),\n", - " Patch(facecolor='#117347', edgecolor='#117347',\n", - " label='Elog Hauls'),\n", - " Patch(facecolor='#43aa99', edgecolor='#43aa99',\n", - " label='Predicted Hauls'),\n", - " Line2D([0], [0], color='#184EAD', lw=2, label='AI Counts')]\n", + "legend_elements = [\n", + " Patch(facecolor=\"#a2c662\", edgecolor=\"#a2c662\", label=\"BV Hauls\"),\n", + " Patch(facecolor=\"#117347\", edgecolor=\"#117347\", label=\"Elog Hauls\"),\n", + " Patch(facecolor=\"#43aa99\", edgecolor=\"#43aa99\", label=\"Predicted Hauls\"),\n", + " Line2D([0], [0], color=\"#184EAD\", lw=2, label=\"AI Counts\"),\n", + "]\n", "\n", "# ax[0].legend(handles=legend_elements, loc='upper left', fontsize = 9)\n", "\n", "\n", - "x0,x1 = ax[0].get_xlim()\n", + "x0, x1 = ax[0].get_xlim()\n", "\n", "h1, l1 = ax[1].get_legend_handles_labels()\n", "h2, l2 = ax2.get_legend_handles_labels()\n", - "ax[1].legend(h1+h2, l1+l2, loc=2)\n", + "ax[1].legend(h1 + h2, l1 + l2, loc=2)\n", "\n", - "ax[0].set_xlim(x0, x1) \n", + "ax[0].set_xlim(x0, x1)\n", "ax2.get_legend().remove()\n", "# plt.legend(fontsize=20)\n", "\n", "plt.subplots_adjust(wspace=0, hspace=0)\n", "\n", - "plt.savefig('haul_detection.png')\n", + "plt.savefig(\"haul_detection.png\")\n", "\n", "plt.show()" ] @@ -3564,7 +3722,7 @@ "metadata": {}, "outputs": [], "source": [ - "x0,x1 = ax[1].get_xlim()" + "x0, x1 = ax[1].get_xlim()" ] }, { @@ -3585,7 +3743,7 @@ } ], "source": [ - "x1+300" + "x1 + 300" ] }, { @@ -3647,8 +3805,8 @@ "metadata": {}, "outputs": [], "source": [ - "df_notes = pd.read_csv('../data/reviewer_notes.csv')\n", - "df_notes['start_time'] = pd.to_datetime(df_notes['start_time'], format = 'mixed')" + "df_notes = pd.read_csv(\"../data/reviewer_notes.csv\")\n", + "df_notes[\"start_time\"] = pd.to_datetime(df_notes[\"start_time\"], format=\"mixed\")" ] }, { @@ -3658,7 +3816,7 @@ "metadata": {}, "outputs": [], "source": [ - "df_notes['end_time'] = pd.to_datetime(df_notes['end_time'], format = 'mixed')" + "df_notes[\"end_time\"] = pd.to_datetime(df_notes[\"end_time\"], format=\"mixed\")" ] }, { @@ -3668,11 +3826,15 @@ "metadata": {}, "outputs": [], "source": [ - "video_events = df_notes.loc[(df_notes['category'] == 'No Video') |(df_notes['category'] == 'Camera Covered')]\n", + "video_events = df_notes.loc[\n", + " (df_notes[\"category\"] == \"No Video\") | (df_notes[\"category\"] == \"Camera Covered\")\n", + "]\n", "\n", "video_events.dtypes\n", "\n", - "video_eventsBrancol2 = video_events.loc[(video_events['vessel'] == 'Brancol') & (video_events['trip_number']==2)]" + "video_eventsBrancol2 = video_events.loc[\n", + " (video_events[\"vessel\"] == \"Brancol\") & (video_events[\"trip_number\"] == 2)\n", + "]" ] }, { @@ -3706,7 +3868,7 @@ } ], "source": [ - "df_notes['category'].value_counts()" + "df_notes[\"category\"].value_counts()" ] }, { @@ -3716,7 +3878,7 @@ "metadata": {}, "outputs": [], "source": [ - "eventsBrancol2 = df_notes.loc[(df_notes['vessel'] == 'Brancol') & (df_notes['trip_number']==2)]" + "eventsBrancol2 = df_notes.loc[(df_notes[\"vessel\"] == \"Brancol\") & (df_notes[\"trip_number\"] == 2)]" ] }, { @@ -3753,7 +3915,7 @@ } ], "source": [ - "eventsBrancol2['category'].value_counts()" + "eventsBrancol2[\"category\"].value_counts()" ] }, { @@ -3784,7 +3946,13 @@ "metadata": {}, "outputs": [], "source": [ - "category_dict = {'Haul Stop':colors[13],'Other Gear':colors[12],'Camera Covered':colors[10],'No Video': colors[6], 'Abnormal Catch':colors[8]}" + "category_dict = {\n", + " \"Haul Stop\": colors[13],\n", + " \"Other Gear\": colors[12],\n", + " \"Camera Covered\": colors[10],\n", + " \"No Video\": colors[6],\n", + " \"Abnormal Catch\": colors[8],\n", + "}" ] }, { @@ -3835,15 +4003,23 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_category_bars(df_events, ax, category_color_dict, y_val_start, y_height ):\n", + "def plot_event_category_bars(df_events, ax, category_color_dict, y_val_start, y_height):\n", " n = len(category_color_dict)\n", " y_vals = create_array(n, y_val_start, y_height)\n", " for idx, (category, color) in enumerate(category_color_dict.items()):\n", - " df_category = df_events.loc[df_events['category']==category].copy()\n", + " df_category = df_events.loc[df_events[\"category\"] == category].copy()\n", " y_val = y_vals[idx]\n", - " \n", - " \n", - " plot_event_bars(df_category, ax,category, 'start_time',end_col = 'end_time', y_val=y_val, y_height = y_height, color= color )" + "\n", + " plot_event_bars(\n", + " df_category,\n", + " ax,\n", + " category,\n", + " \"start_time\",\n", + " end_col=\"end_time\",\n", + " y_val=y_val,\n", + " y_height=y_height,\n", + " color=color,\n", + " )" ] }, { @@ -3853,24 +4029,32 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_event_bars(df, ax,label, datetime_col,duration=None,end_col = None, duration_col = None, y_val=.7, y_height = .6, color= '#43aa99' ):\n", - " \n", + "def plot_event_bars(\n", + " df,\n", + " ax,\n", + " label,\n", + " datetime_col,\n", + " duration=None,\n", + " end_col=None,\n", + " duration_col=None,\n", + " y_val=0.7,\n", + " y_height=0.6,\n", + " color=\"#43aa99\",\n", + "):\n", " if duration:\n", " x_duration = np.full(len(df), pd.Timedelta(duration))\n", " elif end_col:\n", - " df['duration'] = df[end_col]- df[datetime_col]\n", - " x_duration = df['duration']\n", + " df[\"duration\"] = df[end_col] - df[datetime_col]\n", + " x_duration = df[\"duration\"]\n", " elif duration_col:\n", " x_duration = df[duration_col]\n", " else:\n", " x_duration = np.full(len(df), 2)\n", - " \n", + "\n", " x = list(zip(df[datetime_col], x_duration))\n", " y = (y_val, y_height)\n", "\n", - " ax.broken_barh(x, y, facecolors = color, edgecolor = 'face', label = label, clip_on=False)\n", - "\n", - " " + " ax.broken_barh(x, y, facecolors=color, edgecolor=\"face\", label=label, clip_on=False)\n" ] }, { @@ -3937,7 +4121,7 @@ "metadata": {}, "outputs": [], "source": [ - "y_vals = [1.2, " + "y_vals = [1.2," ] }, { @@ -3968,7 +4152,7 @@ "metadata": {}, "outputs": [], "source": [ - "y_labels = ['','predicted_hauls'] + list(category_dict.keys())" + "y_labels = [\"\", \"predicted_hauls\"] + list(category_dict.keys())" ] }, { @@ -4016,7 +4200,7 @@ } ], "source": [ - "len(create_array(n, 1.2, .2))" + "len(create_array(n, 1.2, 0.2))" ] }, { @@ -4040,7 +4224,7 @@ } ], "source": [ - "eventsBrancol2['category'].value_counts()" + "eventsBrancol2[\"category\"].value_counts()" ] }, { @@ -4061,7 +4245,7 @@ } ], "source": [ - "category_dict.pop('Other Gear')" + "category_dict.pop(\"Other Gear\")" ] }, { @@ -4131,65 +4315,73 @@ } ], "source": [ - "\n", - "\n", - "\n", "# df_test.reset_index(inplace = True)\n", - "bbox_args = dict(boxstyle='round', facecolor='black', alpha=0.35)\n", + "bbox_args = dict(boxstyle=\"round\", facecolor=\"black\", alpha=0.35)\n", "# plt.subplots_adjust(wspace=0, hspace=-.2)\n", "# bbox_args = dict(boxstyle=\"round\", fc=\"0.8\")\n", "arrow_args = dict(arrowstyle=\"->\")\n", "# plt.tight_layout()\n", - "fig, ax = plt.subplots(1,1,figsize=(11,2), sharex = True\n", - " # , gridspec_kw={'height_ratios': [2, 1]}, \n", - " )\n", + "fig, ax = plt.subplots(\n", + " 1,\n", + " 1,\n", + " figsize=(11, 2),\n", + " sharex=True,\n", + " # , gridspec_kw={'height_ratios': [2, 1]},\n", + ")\n", "\n", "# trip2 = Brancol2data['trip_info']\n", "plot_set_vspan(brancol2_bv_sets, ax)\n", "# plot_set_vspan(brancol2_bv_sets, ax[1])\n", - "plot_event_bars(df_results.loc[df_results.predict_haul ==1], ax, 'predicted_hauls', 'utc_start_datetime', duration = '5m', y_val = .6, y_height = .5)\n", - "plot_event_category_bars(eventsBrancol2, ax, category_dict, 1.2, .1)\n", + "plot_event_bars(\n", + " df_results.loc[df_results.predict_haul == 1],\n", + " ax,\n", + " \"predicted_hauls\",\n", + " \"utc_start_datetime\",\n", + " duration=\"5m\",\n", + " y_val=0.6,\n", + " y_height=0.5,\n", + ")\n", + "plot_event_category_bars(eventsBrancol2, ax, category_dict, 1.2, 0.1)\n", "\n", "\n", "# plot_event_bars(brancol2_elog, ax[0], 'elog_hauls','systemstarthauldatetime', end_col = 'systemendhauldatetime', y_val = 1.2, color = '#117347', y_height = .4)\n", "# sns.lineplot(x = 'utc_start_datetime', y = 'count', data = df_results, ax = ax[1], label = 'AI Counts', color ='#184EAD', clip_on=False, lw = .4)\n", "\n", "n = len(category_dict)\n", - "y_vals = [0,.9] + list(create_array(n, 1.2, .2))\n", - "y_labels = ['','predicted_hauls'] + list(category_dict.keys())\n", + "y_vals = [0, 0.9] + list(create_array(n, 1.2, 0.2))\n", + "y_labels = [\"\", \"predicted_hauls\"] + list(category_dict.keys())\n", "\n", - "ax.set_yticks(y_vals,y_labels)\n", - "ax.set_ylim([.5, max(y_vals)+.2])\n", + "ax.set_yticks(y_vals, y_labels)\n", + "ax.set_ylim([0.5, max(y_vals) + 0.2])\n", "# ax[1].set_ylim([0, 50])\n", "locator = DayLocator()\n", "formatter = mdates.ConciseDateFormatter(locator)\n", - "ax.tick_params(axis = 'both', labelsize = 9)\n", + "ax.tick_params(axis=\"both\", labelsize=9)\n", "ax.xaxis.set_major_locator(locator)\n", "ax.xaxis.set_major_formatter(formatter)\n", "\n", - "ax.spines['bottom'].set_visible(False)\n", + "ax.spines[\"bottom\"].set_visible(False)\n", "# ax[1].spines['top'].set_visible(False)\n", "ax.legend()\n", "\n", "from matplotlib.lines import Line2D\n", "from matplotlib.patches import Patch\n", "\n", - "legend_elements = [Patch(facecolor='#a2c662', edgecolor='#40a018', alpha = .4,\n", - " label='BV Hauls'),\n", - " Patch(facecolor='#117347', edgecolor='#117347',\n", - " label='Elog Hauls'),\n", - " Patch(facecolor='#43aa99', edgecolor='#43aa99',\n", - " label='Predicted Hauls'),\n", - " Line2D([0], [0], color='#184EAD', lw=2, label='AI Counts')]\n", + "legend_elements = [\n", + " Patch(facecolor=\"#a2c662\", edgecolor=\"#40a018\", alpha=0.4, label=\"BV Hauls\"),\n", + " Patch(facecolor=\"#117347\", edgecolor=\"#117347\", label=\"Elog Hauls\"),\n", + " Patch(facecolor=\"#43aa99\", edgecolor=\"#43aa99\", label=\"Predicted Hauls\"),\n", + " Line2D([0], [0], color=\"#184EAD\", lw=2, label=\"AI Counts\"),\n", + "]\n", "\n", "# ax[0].legend(handles=legend_elements, loc='upper left', fontsize = 9)\n", "# ax[1].get_legend().remove()\n", "\n", "# plt.legend(fontsize=20)\n", "\n", - "plt.subplots_adjust(wspace=0, hspace=-.2)\n", + "plt.subplots_adjust(wspace=0, hspace=-0.2)\n", "\n", - "plt.savefig('haul_detection.png')\n", + "plt.savefig(\"haul_detection.png\")\n", "plt.show()" ] }, @@ -4200,7 +4392,7 @@ "metadata": {}, "outputs": [], "source": [ - "df = pd.read_csv('your.csv', " + "df = pd.read_csv('your.csv'," ] }, { @@ -4220,7 +4412,9 @@ "metadata": {}, "outputs": [], "source": [ - "dfSystem_Brancol = pd.read_csv('../data/sessions_brancol.csv',on_bad_lines=lambda x: bad_lines.append(str(x)), engine='python')" + "dfSystem_Brancol = pd.read_csv(\n", + " \"../data/sessions_brancol.csv\", on_bad_lines=lambda x: bad_lines.append(str(x)), engine=\"python\"\n", + ")" ] }, { diff --git a/notebooks/tnc-edge-catch-plots.ipynb b/notebooks/tnc-edge-catch-plots.ipynb index 657af92..8082d00 100644 --- a/notebooks/tnc-edge-catch-plots.ipynb +++ b/notebooks/tnc-edge-catch-plots.ipynb @@ -34,14 +34,14 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')" ] @@ -58,19 +58,21 @@ "import json\n", "import re\n", "from datetime import datetime, timezone\n", + "\n", + "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -80,12 +82,15 @@ "metadata": {}, "outputs": [], "source": [ - "elog_df = awswrangler.athena.read_sql_query(f\"SELECT id,jsonblob,datetime from stpatrick_v1_deckhandevents where jsonblob like '%\\\"eventType\\\": \\\"tripDetailsEvent\\\"%' and datetime < '2024-02-01';\", database='tnc_edge')\n", - "elog_df['jsonblob'] = elog_df['jsonblob'].apply(lambda x: re.sub('\"gearPhoto\": \"[^\"]*\"', '', x))\n", + "elog_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT id,jsonblob,datetime from stpatrick_v1_deckhandevents where jsonblob like '%\\\"eventType\\\": \\\"tripDetailsEvent\\\"%' and datetime < '2024-02-01';\",\n", + " database=\"tnc_edge\",\n", + ")\n", + "elog_df[\"jsonblob\"] = elog_df[\"jsonblob\"].apply(lambda x: re.sub('\"gearPhoto\": \"[^\"]*\"', \"\", x))\n", "# elog_df['euuid'] = elog_df['json'].apply(lambda x: x['eventId'])\n", "# elog_df['tuuid'] = elog_df['json'].apply(lambda x: x['tripId'])\n", "\n", - "display_full(elog_df.sort_values('datetime'))" + "display_full(elog_df.sort_values(\"datetime\"))" ] }, { @@ -95,7 +100,10 @@ "metadata": {}, "outputs": [], "source": [ - "s = awswrangler.athena.read_sql_query(f\"SELECT stpatrick_v1_video_files.*, stpatrick_v1_ondeckdata.video_uri, stpatrick_v1_ondeckdata.cocoannotations_uri, stpatrick_v1_ondeckdata.datetime, stpatrick_v1_ondeckdata.overallcount, stpatrick_v1_ondeckdata.overallruntimems, stpatrick_v1_ondeckdata.tracked_confidence, stpatrick_v1_ondeckdata.status, stpatrick_v1_ondeckdata.overallcatches, stpatrick_v1_ondeckdata.overalldiscards, stpatrick_v1_ondeckdata.detection_confidence FROM stpatrick_v1_video_files left join stpatrick_v1_ondeckdata on decrypted_path = video_uri where cam_name = 'cam1' and start_datetime > '2024-03-01' order by start_datetime asc limit 10000\", database='tnc_edge')\n", + "s = awswrangler.athena.read_sql_query(\n", + " f\"SELECT stpatrick_v1_video_files.*, stpatrick_v1_ondeckdata.video_uri, stpatrick_v1_ondeckdata.cocoannotations_uri, stpatrick_v1_ondeckdata.datetime, stpatrick_v1_ondeckdata.overallcount, stpatrick_v1_ondeckdata.overallruntimems, stpatrick_v1_ondeckdata.tracked_confidence, stpatrick_v1_ondeckdata.status, stpatrick_v1_ondeckdata.overallcatches, stpatrick_v1_ondeckdata.overalldiscards, stpatrick_v1_ondeckdata.detection_confidence FROM stpatrick_v1_video_files left join stpatrick_v1_ondeckdata on decrypted_path = video_uri where cam_name = 'cam1' and start_datetime > '2024-03-01' order by start_datetime asc limit 10000\",\n", + " database=\"tnc_edge\",\n", + ")\n", "display(s)" ] }, @@ -106,10 +114,10 @@ "metadata": {}, "outputs": [], "source": [ - "pandas.set_option('display.max_rows', 500)\n", - "pandas.set_option('display.min_rows', 500)\n", + "pandas.set_option(\"display.max_rows\", 500)\n", + "pandas.set_option(\"display.min_rows\", 500)\n", "\n", - "display(s[['start_datetime','status']])" + "display(s[[\"start_datetime\", \"status\"]])" ] }, { @@ -578,7 +586,10 @@ } ], "source": [ - "s = awswrangler.athena.read_sql_query(f\"SELECT * FROM stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where datetime > '2024-03-01'\", database='tnc_edge')\n", + "s = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * FROM stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where datetime > '2024-03-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "s" ] }, @@ -601,11 +612,31 @@ ], "source": [ "d = s.copy()\n", - "d = d.sort_values('systemstartsetdatetime')\n", - "for i in [\"systemstartsetdatetime\",\"systemendsetdatetime\",\"systemstarthauldatetime\",\"systemendhauldatetime\"]:\n", + "d = d.sort_values(\"systemstartsetdatetime\")\n", + "for i in [\n", + " \"systemstartsetdatetime\",\n", + " \"systemendsetdatetime\",\n", + " \"systemstarthauldatetime\",\n", + " \"systemendhauldatetime\",\n", + "]:\n", " d[i] = pandas.to_datetime(s[i])\n", " d[i] = d[i].transform(lambda x: x.astimezone(timezone.utc))\n", - "d[[\"systemstartsetdatetime\",\"systemstartsetlatitude\",\"systemstartsetlongitude\",\"systemendsetdatetime\",\"systemendsetlatitude\",\"systemendsetlongitude\",\"systemstarthauldatetime\",\"systemstarthaullatitude\",\"systemstarthaullongitude\",\"systemendhauldatetime\",\"systemendhaullatitude\",\"systemendhaullongitude\"]].to_csv()" + "d[\n", + " [\n", + " \"systemstartsetdatetime\",\n", + " \"systemstartsetlatitude\",\n", + " \"systemstartsetlongitude\",\n", + " \"systemendsetdatetime\",\n", + " \"systemendsetlatitude\",\n", + " \"systemendsetlongitude\",\n", + " \"systemstarthauldatetime\",\n", + " \"systemstarthaullatitude\",\n", + " \"systemstarthaullongitude\",\n", + " \"systemendhauldatetime\",\n", + " \"systemendhaullatitude\",\n", + " \"systemendhaullongitude\",\n", + " ]\n", + "].to_csv()" ] }, { @@ -696,7 +727,9 @@ } ], "source": [ - "trip_df = awswrangler.athena.read_sql_query(f\"SELECT * FROM brancol_v1_bv_trips\", database=\"tnc_edge\")\n", + "trip_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * FROM brancol_v1_bv_trips\", database=\"tnc_edge\"\n", + ")\n", "trip_df" ] }, @@ -788,7 +821,9 @@ } ], "source": [ - "trip_df = awswrangler.athena.read_sql_query(f\"SELECT * FROM stpatrick_v1_bv_trips\", database=\"tnc_edge\")\n", + "trip_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * FROM stpatrick_v1_bv_trips\", database=\"tnc_edge\"\n", + ")\n", "trip_df" ] }, @@ -1163,7 +1198,10 @@ ], "source": [ "# sets_df = awswrangler.athena.read_sql_query(f\"SELECT * FROM brancol_v1_bv_sets\", database=\"tnc_edge\")\n", - "sets_df = awswrangler.athena.read_sql_query(f\"SELECT * FROM stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where datetime > '2024-03-01'\", database=\"tnc_edge\")\n", + "sets_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * FROM stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where datetime > '2024-03-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "sets_df" ] }, @@ -1541,15 +1579,18 @@ ], "source": [ "# boat='brancol'\n", - "boat = 'stpatrick'\n", + "boat = \"stpatrick\"\n", "\n", - "trip_id = trip_df['trip_id'].values[0]\n", - "trip_start_date = trip_df['trip_start_date'].values[0]\n", - "trip_end_date = trip_df['trip_end_date'].values[0]\n", + "trip_id = trip_df[\"trip_id\"].values[0]\n", + "trip_start_date = trip_df[\"trip_start_date\"].values[0]\n", + "trip_end_date = trip_df[\"trip_end_date\"].values[0]\n", "\n", - "bv_df = awswrangler.athena.read_sql_query(f\"SELECT bv_f.* FROM {boat}_v1_bv_fish bv_f \\\n", + "bv_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT bv_f.* FROM {boat}_v1_bv_fish bv_f \\\n", "join {boat}_v1_bv_sets bv_s on bv_f.set_id = bv_s.set_id \\\n", - "where trip_id = '{trip_id}'\", database=\"tnc_edge\")\n", + "where trip_id = '{trip_id}'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "bv_df" ] @@ -1915,14 +1956,14 @@ } ], "source": [ - "\n", - "if boat != 'brancol':\n", - " raise Error('wrong boat')\n", + "if boat != \"brancol\":\n", + " raise Error(\"wrong boat\")\n", "aif_df = awswrangler.athena.read_sql_query(\n", " f\"SELECT aifd.*, v.start_datetime FROM {boat}_v1_aifishdata aifd \\\n", " join {boat}_v1_video_files v on aifd.video_uri = v.decrypted_path \\\n", " where start_datetime >= '{trip_start_date}' and start_datetime <= '{trip_end_date}'\",\n", - " database=\"tnc_edge\")\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "aif_df" ] @@ -2312,14 +2353,14 @@ } ], "source": [ - "\n", - "if boat != 'stpatrick':\n", - " raise Error('wrong boat')\n", + "if boat != \"stpatrick\":\n", + " raise Error(\"wrong boat\")\n", "ond_df = awswrangler.athena.read_sql_query(\n", " f\"SELECT ond.*, v.start_datetime FROM {boat}_v1_ondeckdata ond \\\n", " join {boat}_v1_video_files v on ond.video_uri = v.decrypted_path \\\n", " where start_datetime >= '{trip_start_date}' and start_datetime <= '{trip_end_date}'\",\n", - " database=\"tnc_edge\")\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "ond_df" ] @@ -2354,6 +2395,7 @@ "source": [ "from dateutil.parser import parse as parse_dt\n", "from datetime import datetime, timedelta, timezone, date, time\n", + "\n", "# import datetime\n", "import pandas as pd\n", "\n", @@ -2364,28 +2406,38 @@ "\n", "\n", "# maxmin = pd.DataFrame({\n", - "# \"max\": ping_series.resample('1d').max(), \n", + "# \"max\": ping_series.resample('1d').max(),\n", "# \"min\": ping_series.resample('1d').min()\n", "# })\n", "# print(maxmin.sort_index().to_string())\n", "\n", "bv_df.catch_datetime = pd.to_datetime(bv_df.catch_datetime)\n", "\n", - "bv_df = bv_df.append(pd.DataFrame([\n", - " [(pd.Timestamp(trip_start_date) + timedelta(0)).replace(tzinfo=timezone.utc), ''],\n", - " [(pd.Timestamp(trip_start_date) + timedelta(hours=23,minutes=59)).replace(tzinfo=timezone.utc), ''],\n", - "], columns=['catch_datetime', 'fish_id']))\n", + "bv_df = bv_df.append(\n", + " pd.DataFrame(\n", + " [\n", + " [(pd.Timestamp(trip_start_date) + timedelta(0)).replace(tzinfo=timezone.utc), \"\"],\n", + " [\n", + " (pd.Timestamp(trip_start_date) + timedelta(hours=23, minutes=59)).replace(\n", + " tzinfo=timezone.utc\n", + " ),\n", + " \"\",\n", + " ],\n", + " ],\n", + " columns=[\"catch_datetime\", \"fish_id\"],\n", + " )\n", + ")\n", "\n", "# bv_df\n", "\n", - "cnt = bv_df.groupby('catch_datetime').count()[['fish_id']]\n", + "cnt = bv_df.groupby(\"catch_datetime\").count()[[\"fish_id\"]]\n", "\n", "# cnt\n", "bv_cnt_ts = cnt.resample(timedelta(minutes=30)).sum(min_count=1)\n", - "bv_cnt_ts['bv_count'] = bv_cnt_ts.pop('fish_id')\n", + "bv_cnt_ts[\"bv_count\"] = bv_cnt_ts.pop(\"fish_id\")\n", "\n", "# bv_cnt_ts.plot()\n", - "bv_cnt_ts.plot(figsize=(100,5))\n" + "bv_cnt_ts.plot(figsize=(100, 5))\n" ] }, { @@ -2523,16 +2575,15 @@ } ], "source": [ - "\n", - "if boat != 'brancol':\n", - " raise Error('wrong boat')\n", + "if boat != \"brancol\":\n", + " raise Error(\"wrong boat\")\n", "aif_df\n", "aif_df.start_datetime = pd.to_datetime(aif_df.start_datetime)\n", "\n", - "cnt = aif_df.groupby('start_datetime').sum()[['count']]\n", + "cnt = aif_df.groupby(\"start_datetime\").sum()[[\"count\"]]\n", "\n", "# cnt.count()\n", - "cnt['count'] = pd.to_numeric(cnt['count'])\n", + "cnt[\"count\"] = pd.to_numeric(cnt[\"count\"])\n", "# cnt.dtypes\n", "# cnt = cnt[cnt != '']\n", "# # cnt[cnt.index[0]]\n", @@ -2541,11 +2592,11 @@ "aif_cnt_ts = cnt.resample(timedelta(minutes=30)).sum(min_count=1)\n", "# aif_cnt_ts\n", "# aif_cnt_ts.loc['2024-01-16']\n", - "aif_cnt_ts['aifish_count'] = aif_cnt_ts.pop('count')\n", + "aif_cnt_ts[\"aifish_count\"] = aif_cnt_ts.pop(\"count\")\n", "\n", "display(aif_cnt_ts)\n", "\n", - "aif_cnt_ts.plot(figsize=(100,5))\n" + "aif_cnt_ts.plot(figsize=(100, 5))\n" ] }, { @@ -2683,16 +2734,15 @@ } ], "source": [ - "\n", - "if boat != 'stpatrick':\n", - " raise Error('wrong boat')\n", + "if boat != \"stpatrick\":\n", + " raise Error(\"wrong boat\")\n", "ond_df\n", "ond_df.start_datetime = pd.to_datetime(ond_df.start_datetime)\n", "\n", - "cnt = ond_df.groupby('start_datetime').sum()[['overallcount']]\n", + "cnt = ond_df.groupby(\"start_datetime\").sum()[[\"overallcount\"]]\n", "\n", "# cnt.count()\n", - "cnt['count'] = pd.to_numeric(cnt['overallcount'])\n", + "cnt[\"count\"] = pd.to_numeric(cnt[\"overallcount\"])\n", "# cnt.dtypes\n", "# cnt = cnt[cnt != '']\n", "# # cnt[cnt.index[0]]\n", @@ -2701,11 +2751,11 @@ "ond_cnt_ts = cnt.resample(timedelta(minutes=30)).sum(min_count=1)\n", "# ond_cnt_ts\n", "# ond_cnt_ts.loc['2024-01-16']\n", - "ond_cnt_ts['ondeck_count'] = ond_cnt_ts.pop('count')\n", + "ond_cnt_ts[\"ondeck_count\"] = ond_cnt_ts.pop(\"count\")\n", "\n", "# display(aif_cnt_ts)\n", "\n", - "ond_cnt_ts.plot(figsize=(100,5))\n" + "ond_cnt_ts.plot(figsize=(100, 5))\n" ] }, { @@ -2751,13 +2801,12 @@ } ], "source": [ - "\n", - "if boat == 'brancol':\n", + "if boat == \"brancol\":\n", " both_ts = aif_cnt_ts.merge(bv_cnt_ts, left_index=True, right_index=True)\n", - "if boat == 'stpatrick':\n", + "if boat == \"stpatrick\":\n", " both_ts = ond_cnt_ts.merge(bv_cnt_ts, left_index=True, right_index=True)\n", "\n", - "both_ts.plot(figsize=(100,10))\n" + "both_ts.plot(figsize=(100, 10))\n" ] }, { @@ -3258,11 +3307,13 @@ } ], "source": [ + "if boat != \"brancol\":\n", + " raise Error(\"wrong boat\")\n", "\n", - "if boat != 'brancol':\n", - " raise Error('wrong boat')\n", - "\n", - "elog_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where systemstartsetdatetime >= '{trip_start_date}' and systemendhauldatetime <= '{trip_end_date}'\", database=\"tnc_edge\")\n", + "elog_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where systemstartsetdatetime >= '{trip_start_date}' and systemendhauldatetime <= '{trip_end_date}'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "# '{trip_start_date}' and start_datetime < '{trip_end_date}'\n", "elog_df" @@ -3285,13 +3336,15 @@ } ], "source": [ + "if boat != \"stpatrick\":\n", + " raise Error(\"wrong boat\")\n", "\n", - "if boat != 'stpatrick':\n", - " raise Error('wrong boat')\n", + "elog_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where systemstartsetdatetime >= '{trip_start_date}' and systemendhauldatetime <= '{trip_end_date}'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", - "elog_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_deckhandevents_mostrecentlonglineevent_jsonextracted where systemstartsetdatetime >= '{trip_start_date}' and systemendhauldatetime <= '{trip_end_date}'\", database=\"tnc_edge\")\n", - "\n", - "display(elog_df[elog_df.columns.difference(['jsonblob'])].to_string())\n", + "display(elog_df[elog_df.columns.difference([\"jsonblob\"])].to_string())\n", "\n", "# elog_df" ] @@ -3401,22 +3454,22 @@ "source": [ "elog_df_ts = None\n", "\n", - "for (k, row) in elog_df.iterrows():\n", - " start_haul = parse_dt(row['systemstarthauldatetime'])\n", - " end_haul = parse_dt(row['systemendhauldatetime'])\n", - "# print(end_haul - start_haul)\n", + "for k, row in elog_df.iterrows():\n", + " start_haul = parse_dt(row[\"systemstarthauldatetime\"])\n", + " end_haul = parse_dt(row[\"systemendhauldatetime\"])\n", + " # print(end_haul - start_haul)\n", " i = pd.DatetimeIndex([start_haul, end_haul])\n", " df = pd.DataFrame(index=i)\n", - "# df['mycol'] = [0, 0]\n", + " # df['mycol'] = [0, 0]\n", " df = df.resample(timedelta(minutes=5)).sum()\n", - " df['elog_count'] = (float(row['catchcount']) + float(row['bycatchcount'])) / len(df.index)\n", - "# print(df)\n", - "# break\n", + " df[\"elog_count\"] = (float(row[\"catchcount\"]) + float(row[\"bycatchcount\"])) / len(df.index)\n", + " # print(df)\n", + " # break\n", " if elog_df_ts is None:\n", " elog_df_ts = df\n", " else:\n", " elog_df_ts = elog_df_ts.append(df)\n", - " \n", + "\n", "elog_df_ts\n", "\n", "\n", @@ -3457,7 +3510,7 @@ "source": [ "triple_df = elog_df_ts.merge(both_ts, left_index=True, right_index=True)\n", "\n", - "triple_df.plot(figsize=(200,30))" + "triple_df.plot(figsize=(200, 30))" ] }, { @@ -4019,10 +4072,12 @@ } ], "source": [ - "aif_df['tmp'] = pd.to_datetime(aif_df['start_datetime'])\n", + "aif_df[\"tmp\"] = pd.to_datetime(aif_df[\"start_datetime\"])\n", "\n", - "aif_df.loc[aif_df['tmp'] >= parse_dt('2024-01-13 22:25:00Z')].loc[aif_df['tmp'] <= parse_dt('2024-01-13 23:55:00Z')].sort_values('tmp')\n", - "# \n", + "aif_df.loc[aif_df[\"tmp\"] >= parse_dt(\"2024-01-13 22:25:00Z\")].loc[\n", + " aif_df[\"tmp\"] <= parse_dt(\"2024-01-13 23:55:00Z\")\n", + "].sort_values(\"tmp\")\n", + "#\n", "\n", "# aif_df" ] diff --git a/notebooks/tnc-edge-data-integration.ipynb b/notebooks/tnc-edge-data-integration.ipynb index 26f8ef6..8ffa3e3 100644 --- a/notebooks/tnc-edge-data-integration.ipynb +++ b/notebooks/tnc-edge-data-integration.ipynb @@ -41,14 +41,14 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')\n", "\n", @@ -61,24 +61,26 @@ "import pytz\n", "import io\n", "\n", + "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 5000)\n", - " pandas.set_option('display.min_rows', 1000)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 5000)\n", + " pandas.set_option(\"display.min_rows\", 1000)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n", + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n", + "\n", "\n", "try:\n", " import pyperclip\n", "except ModuleNotFoundError:\n", - " print('no copypaste functionality today... please `pip install pyperclip`')" + " print(\"no copypaste functionality today... please `pip install pyperclip`\")" ] }, { @@ -104,7 +106,7 @@ " buf = io.StringIO(pyperclip.paste())\n", " for l in buf.readlines():\n", " l = l.strip()\n", - "# print(l)\n", + " # print(l)\n", " try:\n", " pasted_array.append(parser.isoparse(l))\n", " except ValueError as e:\n", @@ -1212,12 +1214,16 @@ } ], "source": [ - "branc_gps = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_gpsdata where datetime > '2024-01-01'\", database='tnc_edge')\n", - "branc_gps['datetime'] = pandas.to_datetime(branc_gps['datetime'], utc=True)\n", - "branc_gps['gps_datetime'] = pandas.to_datetime(branc_gps['gps_datetime'], utc=True)\n", - "stpat_gps = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_gpsdata where datetime > '2024-01-01'\", database='tnc_edge')\n", - "stpat_gps['datetime'] = pandas.to_datetime(stpat_gps['datetime'], utc=True)\n", - "stpat_gps['gps_datetime'] = pandas.to_datetime(stpat_gps['gps_datetime'], utc=True)\n", + "branc_gps = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_gpsdata where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "branc_gps[\"datetime\"] = pandas.to_datetime(branc_gps[\"datetime\"], utc=True)\n", + "branc_gps[\"gps_datetime\"] = pandas.to_datetime(branc_gps[\"gps_datetime\"], utc=True)\n", + "stpat_gps = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_gpsdata where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_gps[\"datetime\"] = pandas.to_datetime(stpat_gps[\"datetime\"], utc=True)\n", + "stpat_gps[\"gps_datetime\"] = pandas.to_datetime(stpat_gps[\"gps_datetime\"], utc=True)\n", "\n", "branc_gps" ] @@ -1244,14 +1250,14 @@ "source": [ "gps_rows = len(branc_gps) + len(stpat_gps)\n", "\n", - "print(branc_gps[['datetime', \t'gps_datetime', \t'lat', \t'lon']].to_csv()[:300])\n", + "print(branc_gps[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]].to_csv()[:300])\n", "\n", - "gps_bytes = \\\n", - " len(branc_gps[['datetime', \t'gps_datetime', \t'lat', \t'lon']].to_csv()) + \\\n", - " len(stpat_gps[['datetime', \t'gps_datetime', \t'lat', \t'lon']].to_csv())\n", + "gps_bytes = len(branc_gps[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]].to_csv()) + len(\n", + " stpat_gps[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]].to_csv()\n", + ")\n", "\n", "\n", - "print(\"gps rows\", gps_rows, 'gps MiB', gps_bytes/1024/1024)" + "print(\"gps rows\", gps_rows, \"gps MiB\", gps_bytes / 1024 / 1024)" ] }, { @@ -3016,19 +3022,45 @@ } ], "source": [ - "branc_vids = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_video_files where last_modified <> '' and start_datetime = ''\", database='tnc_edge')\n", + "branc_vids = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_video_files where last_modified <> '' and start_datetime = ''\",\n", + " database=\"tnc_edge\",\n", + ")\n", "print(\"could not process \", len(branc_vids))\n", "\n", - "branc_vids = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_video_files where start_datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['last_modified','decrypted_datetime','start_datetime','reencoded_datetime']:\n", + "branc_vids = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_video_files where start_datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "for col in [\"last_modified\", \"decrypted_datetime\", \"start_datetime\", \"reencoded_datetime\"]:\n", " branc_vids[col] = pandas.to_datetime(branc_vids[col], utc=True)\n", - "branc_vids = branc_vids.drop(columns=['md_timestamp_added', 'md_file_name', 'md_ingest_uuid', 'partition_0', 'partition_1', 'partition_2'])\n", - " \n", - "stpat_vids = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_video_files where start_datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['last_modified','decrypted_datetime','start_datetime','reencoded_datetime']:\n", + "branc_vids = branc_vids.drop(\n", + " columns=[\n", + " \"md_timestamp_added\",\n", + " \"md_file_name\",\n", + " \"md_ingest_uuid\",\n", + " \"partition_0\",\n", + " \"partition_1\",\n", + " \"partition_2\",\n", + " ]\n", + ")\n", + "\n", + "stpat_vids = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_video_files where start_datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", + "for col in [\"last_modified\", \"decrypted_datetime\", \"start_datetime\", \"reencoded_datetime\"]:\n", " stpat_vids[col] = pandas.to_datetime(stpat_vids[col], utc=True)\n", - "stpat_vids = stpat_vids.drop(columns=['md_timestamp_added', 'md_file_name', 'md_ingest_uuid', 'partition_0', 'partition_1', 'partition_2'])\n", - " \n", + "stpat_vids = stpat_vids.drop(\n", + " columns=[\n", + " \"md_timestamp_added\",\n", + " \"md_file_name\",\n", + " \"md_ingest_uuid\",\n", + " \"partition_0\",\n", + " \"partition_1\",\n", + " \"partition_2\",\n", + " ]\n", + ")\n", + "\n", "\n", "stpat_vids" ] @@ -3065,15 +3097,26 @@ "source": [ "vids_rows = len(branc_vids) + len(stpat_vids)\n", "\n", - "collist = ['original_path', 'last_modified', 'decrypted_path', 'decrypted_datetime', 'stdout', 'stderr', 'start_datetime', 'cam_name', 'reencoded_path', 'reencoded_datetime', 'reencoded_stdout', 'reencoded_stderr']\n", + "collist = [\n", + " \"original_path\",\n", + " \"last_modified\",\n", + " \"decrypted_path\",\n", + " \"decrypted_datetime\",\n", + " \"stdout\",\n", + " \"stderr\",\n", + " \"start_datetime\",\n", + " \"cam_name\",\n", + " \"reencoded_path\",\n", + " \"reencoded_datetime\",\n", + " \"reencoded_stdout\",\n", + " \"reencoded_stderr\",\n", + "]\n", "print(branc_vids[collist].loc[0].to_csv())\n", "\n", - "vids_bytes = \\\n", - " len(branc_vids[collist].to_csv()) + \\\n", - " len(stpat_vids[collist].to_csv())\n", + "vids_bytes = len(branc_vids[collist].to_csv()) + len(stpat_vids[collist].to_csv())\n", "\n", "\n", - "print(\"vids rows\", vids_rows, 'vids MiB', vids_bytes/1024/1024)" + "print(\"vids rows\", vids_rows, \"vids MiB\", vids_bytes / 1024 / 1024)" ] }, { @@ -3091,14 +3134,22 @@ } ], "source": [ - "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x['original_path']) and pandas.isna(x['decrypted_path']), axis=1)]\n", + "a = branc_vids.loc[\n", + " branc_vids.apply(\n", + " lambda x: pandas.notna(x[\"original_path\"]) and pandas.isna(x[\"decrypted_path\"]), axis=1\n", + " )\n", + "]\n", "# print(\"failed to look at videos\",len(a))\n", "# display_full(a.sort_values('start_datetime'))\n", "\n", - "b = stpat_vids.loc[stpat_vids.apply(lambda x: pandas.notna(x['original_path']) and pandas.isna(x['decrypted_path']), axis=1)]\n", + "b = stpat_vids.loc[\n", + " stpat_vids.apply(\n", + " lambda x: pandas.notna(x[\"original_path\"]) and pandas.isna(x[\"decrypted_path\"]), axis=1\n", + " )\n", + "]\n", "\n", "\n", - "print(\"failed to look at # videos:\",len(a) + len(b))" + "print(\"failed to look at # videos:\", len(a) + len(b))" ] }, { @@ -3119,29 +3170,46 @@ } ], "source": [ - "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x['reencoded_path']), axis=1)]\n", + "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x[\"reencoded_path\"]), axis=1)]\n", "\n", "# display_full(a.loc[409])\n", "\n", - "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x['reencoded_stdout']) and 'Execution ended after' not in x['reencoded_stdout'], axis=1)]\n", + "a = branc_vids.loc[\n", + " branc_vids.apply(\n", + " lambda x: pandas.notna(x[\"reencoded_stdout\"])\n", + " and \"Execution ended after\" not in x[\"reencoded_stdout\"],\n", + " axis=1,\n", + " )\n", + "]\n", "# display_full(a.loc[681])\n", "# display_full(a.loc[1387])\n", "# display_full(a)\n", "print(\"branc errored transcodes\", len(a))\n", "\n", "\n", - "b = stpat_vids.loc[stpat_vids.apply(lambda x: pandas.notna(x['reencoded_stdout']) and 'Execution ended after' not in x['reencoded_stdout'], axis=1)]\n", + "b = stpat_vids.loc[\n", + " stpat_vids.apply(\n", + " lambda x: pandas.notna(x[\"reencoded_stdout\"])\n", + " and \"Execution ended after\" not in x[\"reencoded_stdout\"],\n", + " axis=1,\n", + " )\n", + "]\n", "# display_full(b.loc[18376])\n", "print(\"stpat errored transcodes\", len(b))\n", "\n", "# taken from operational logs when copying avi files onto usb sticks\n", - "average_size_per_avi = 314460123.9/1024/1024\n", + "average_size_per_avi = 314460123.9 / 1024 / 1024\n", "# a\n", - "print('average_size_per_avi ', average_size_per_avi)\n", + "print(\"average_size_per_avi \", average_size_per_avi)\n", "\n", - "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x['decrypted_path']), axis=1)]\n", - "b = stpat_vids.loc[stpat_vids.apply(lambda x: pandas.notna(x['decrypted_path']), axis=1)]\n", - "print(\"video copied\", len(b) + len(a), \"MiB of video copied (estimate)\", (len(b) + len(a))*average_size_per_avi)" + "a = branc_vids.loc[branc_vids.apply(lambda x: pandas.notna(x[\"decrypted_path\"]), axis=1)]\n", + "b = stpat_vids.loc[stpat_vids.apply(lambda x: pandas.notna(x[\"decrypted_path\"]), axis=1)]\n", + "print(\n", + " \"video copied\",\n", + " len(b) + len(a),\n", + " \"MiB of video copied (estimate)\",\n", + " (len(b) + len(a)) * average_size_per_avi,\n", + ")" ] }, { @@ -4249,12 +4317,23 @@ "# print(\"could not process \", len(branc_aiout))\n", "\n", "\n", - "branc_aiout = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_aifishdata where datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['datetime']:\n", + "branc_aiout = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_aifishdata where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "for col in [\"datetime\"]:\n", " branc_aiout[col] = pandas.to_datetime(branc_aiout[col], utc=True)\n", - "for col in ['count','runtimems','detection_confidence']:\n", + "for col in [\"count\", \"runtimems\", \"detection_confidence\"]:\n", " branc_aiout[col] = pandas.to_numeric(branc_aiout[col])\n", - "branc_aiout = branc_aiout.drop(columns=['md_timestamp_added', 'md_file_name', 'md_ingest_uuid', 'partition_0', 'partition_1', 'partition_2'])\n", + "branc_aiout = branc_aiout.drop(\n", + " columns=[\n", + " \"md_timestamp_added\",\n", + " \"md_file_name\",\n", + " \"md_ingest_uuid\",\n", + " \"partition_0\",\n", + " \"partition_1\",\n", + " \"partition_2\",\n", + " ]\n", + ")\n", "branc_aiout" ] }, @@ -5359,8 +5438,8 @@ } ], "source": [ - "a = branc_aiout.loc[branc_aiout['runtimems'] < 100]\n", - "a.sort_values('runtimems')\n" + "a = branc_aiout.loc[branc_aiout[\"runtimems\"] < 100]\n", + "a.sort_values(\"runtimems\")\n" ] }, { @@ -5399,39 +5478,74 @@ } ], "source": [ - "\n", - "branc_vidsaiout = branc_vids.loc[branc_vids['cam_name'] == 'cam1'].join(branc_aiout.set_index('video_uri'), on='decrypted_path', how='left')\n", - "branc_vidsaiout['videocopy_found_unable_to_copy'] = pandas.notna(branc_vidsaiout['original_path']) & pandas.isna(branc_vidsaiout['decrypted_path'])\n", + "branc_vidsaiout = branc_vids.loc[branc_vids[\"cam_name\"] == \"cam1\"].join(\n", + " branc_aiout.set_index(\"video_uri\"), on=\"decrypted_path\", how=\"left\"\n", + ")\n", + "branc_vidsaiout[\"videocopy_found_unable_to_copy\"] = pandas.notna(\n", + " branc_vidsaiout[\"original_path\"]\n", + ") & pandas.isna(branc_vidsaiout[\"decrypted_path\"])\n", "\n", "\n", - "branc_vidsaiout['videocopy_lateness'] = branc_vidsaiout['decrypted_datetime'] - branc_vidsaiout['start_datetime']\n", + "branc_vidsaiout[\"videocopy_lateness\"] = (\n", + " branc_vidsaiout[\"decrypted_datetime\"] - branc_vidsaiout[\"start_datetime\"]\n", + ")\n", "\n", - "branc_vidsaiout['videocopy_ok'] = pandas.notna(branc_vidsaiout['decrypted_path'])\n", + "branc_vidsaiout[\"videocopy_ok\"] = pandas.notna(branc_vidsaiout[\"decrypted_path\"])\n", "# branc_vidsaiout.loc[pandas.isna(branc_vidsaiout['processing_uri'])]\n", - "branc_vidsaiout['ai_vidok_but_did_not_try'] = branc_vidsaiout['videocopy_ok'] & pandas.isna(branc_vidsaiout['processing_uri'])\n", - "branc_vidsaiout['ai_crash_no_output'] = branc_vidsaiout['videocopy_ok'] & ~ branc_vidsaiout['ai_vidok_but_did_not_try'] & (branc_vidsaiout['status'] == 'queued')\n", - "branc_vidsaiout['ai_outputed_but_cant_parse'] = branc_vidsaiout['videocopy_ok'] & ~ branc_vidsaiout['ai_vidok_but_did_not_try'] & (branc_vidsaiout['status'] == 'parsing')\n", + "branc_vidsaiout[\"ai_vidok_but_did_not_try\"] = branc_vidsaiout[\"videocopy_ok\"] & pandas.isna(\n", + " branc_vidsaiout[\"processing_uri\"]\n", + ")\n", + "branc_vidsaiout[\"ai_crash_no_output\"] = (\n", + " branc_vidsaiout[\"videocopy_ok\"]\n", + " & ~branc_vidsaiout[\"ai_vidok_but_did_not_try\"]\n", + " & (branc_vidsaiout[\"status\"] == \"queued\")\n", + ")\n", + "branc_vidsaiout[\"ai_outputed_but_cant_parse\"] = (\n", + " branc_vidsaiout[\"videocopy_ok\"]\n", + " & ~branc_vidsaiout[\"ai_vidok_but_did_not_try\"]\n", + " & (branc_vidsaiout[\"status\"] == \"parsing\")\n", + ")\n", "\n", "\n", - "i = branc_vidsaiout.loc[(branc_vidsaiout['count'] > 0) | (branc_vidsaiout['detection_confidence'] > 0)].sort_values('runtimems')\n", - "smallest_runtimems_with_nonzero_detections = i.loc[i.index[0]]['runtimems']\n", - "print('smallest_runtimems_with_nonzero_detections',smallest_runtimems_with_nonzero_detections)\n", + "i = branc_vidsaiout.loc[\n", + " (branc_vidsaiout[\"count\"] > 0) | (branc_vidsaiout[\"detection_confidence\"] > 0)\n", + "].sort_values(\"runtimems\")\n", + "smallest_runtimems_with_nonzero_detections = i.loc[i.index[0]][\"runtimems\"]\n", + "print(\"smallest_runtimems_with_nonzero_detections\", smallest_runtimems_with_nonzero_detections)\n", "\n", - "branc_vidsaiout['ai_crash_output_too_fast'] = (branc_vidsaiout['runtimems'] < 17464).fillna(False)\n", - "branc_vidsaiout['ai_ok'] = ((branc_vidsaiout['runtimems'] > 17464) & (branc_vidsaiout['status'] == 'done')).fillna(False)\n", + "branc_vidsaiout[\"ai_crash_output_too_fast\"] = (branc_vidsaiout[\"runtimems\"] < 17464).fillna(False)\n", + "branc_vidsaiout[\"ai_ok\"] = (\n", + " (branc_vidsaiout[\"runtimems\"] > 17464) & (branc_vidsaiout[\"status\"] == \"done\")\n", + ").fillna(False)\n", "\n", "# bad_boolean_rows_check = branc_vidsaiout.loc[branc_vidsaiout[['ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']].applymap(int).sum(axis=1) > 1]\n", "# bad_boolean_rows_check\n", - "branc_vidsaiout.loc[branc_vidsaiout['videocopy_lateness'] > timedelta(minutes=12)].sort_values('videocopy_lateness')\n", + "branc_vidsaiout.loc[branc_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=12)].sort_values(\n", + " \"videocopy_lateness\"\n", + ")\n", "\n", - "print('max lateness', branc_vidsaiout['videocopy_lateness'].map(lambda x: x.total_seconds()/60).max())\n", + "print(\n", + " \"max lateness\",\n", + " branc_vidsaiout[\"videocopy_lateness\"].map(lambda x: x.total_seconds() / 60).max(),\n", + ")\n", "\n", "min_lateness = 1\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+branc_vidsaiout['videocopy_lateness'].map(lambda x: x.total_seconds()/60).max(),10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log(min_lateness, 10),\n", + " math.log(\n", + " 1 + branc_vidsaiout[\"videocopy_lateness\"].map(lambda x: x.total_seconds() / 60).max(), 10\n", + " ),\n", + " num=50,\n", + ")\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "branc_vidsaiout['videocopy_latenessbucket'] = pandas.cut(branc_vidsaiout.loc[branc_vidsaiout['videocopy_lateness'] > timedelta(minutes=min_lateness)]['videocopy_lateness'], bins=bins)\n", - "branc_vidsaiout.groupby('videocopy_latenessbucket')['videocopy_lateness'].count().plot.bar()" + "branc_vidsaiout[\"videocopy_latenessbucket\"] = pandas.cut(\n", + " branc_vidsaiout.loc[branc_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=min_lateness)][\n", + " \"videocopy_lateness\"\n", + " ],\n", + " bins=bins,\n", + ")\n", + "branc_vidsaiout.groupby(\"videocopy_latenessbucket\")[\"videocopy_lateness\"].count().plot.bar()" ] }, { @@ -5457,12 +5571,16 @@ } ], "source": [ - "branc_vid_late_threshold=11\n", - "print('old ok count', branc_vidsaiout['videocopy_ok'].value_counts())\n", - "branc_vidsaiout['videocopy_late'] = (branc_vidsaiout['videocopy_ok'] & (branc_vidsaiout['videocopy_lateness'] > timedelta(minutes=branc_vid_late_threshold)))\n", - "print('late count', branc_vidsaiout['videocopy_late'].value_counts())\n", - "branc_vidsaiout['videocopy_ok'] = branc_vidsaiout.apply(lambda x: x['videocopy_ok'] and not x['videocopy_late'], axis=1)\n", - "print('new ok count', branc_vidsaiout['videocopy_ok'].value_counts())" + "branc_vid_late_threshold = 11\n", + "print(\"old ok count\", branc_vidsaiout[\"videocopy_ok\"].value_counts())\n", + "branc_vidsaiout[\"videocopy_late\"] = branc_vidsaiout[\"videocopy_ok\"] & (\n", + " branc_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=branc_vid_late_threshold)\n", + ")\n", + "print(\"late count\", branc_vidsaiout[\"videocopy_late\"].value_counts())\n", + "branc_vidsaiout[\"videocopy_ok\"] = branc_vidsaiout.apply(\n", + " lambda x: x[\"videocopy_ok\"] and not x[\"videocopy_late\"], axis=1\n", + ")\n", + "print(\"new ok count\", branc_vidsaiout[\"videocopy_ok\"].value_counts())" ] }, { @@ -5485,31 +5603,78 @@ } ], "source": [ - "thaloslogs_brancol_uptime_ts_df = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-02']\n", - "thaloslogs_brancol_isup['up']\n", - "branc_vidsaiout2 = branc_vidsaiout.join(thaloslogs_brancol_isup['up'], on='start_datetime', how='outer')\n", - "print(\"outer_join_disjointed count, thaloslogs thought it was down\", len(branc_vidsaiout2.loc[branc_vidsaiout2['up'].isna()]))\n", - "print(\"outer_join_disjointed count, thaloslogs thought it was up\", len(branc_vidsaiout2.loc[branc_vidsaiout2['original_path'].isna()]))\n", - "branc_vidsaiout2['videocopy_no_video'] = branc_vidsaiout2['original_path'].isna()\n", - "for col in ['videocopy_found_unable_to_copy','videocopy_ok','videocopy_late','ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']:\n", + "thaloslogs_brancol_uptime_ts_df = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[\n", + " thaloslogs_brancol_uptime_ts_df[\"up\"] == 1\n", + "]\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-02\"]\n", + "thaloslogs_brancol_isup[\"up\"]\n", + "branc_vidsaiout2 = branc_vidsaiout.join(\n", + " thaloslogs_brancol_isup[\"up\"], on=\"start_datetime\", how=\"outer\"\n", + ")\n", + "print(\n", + " \"outer_join_disjointed count, thaloslogs thought it was down\",\n", + " len(branc_vidsaiout2.loc[branc_vidsaiout2[\"up\"].isna()]),\n", + ")\n", + "print(\n", + " \"outer_join_disjointed count, thaloslogs thought it was up\",\n", + " len(branc_vidsaiout2.loc[branc_vidsaiout2[\"original_path\"].isna()]),\n", + ")\n", + "branc_vidsaiout2[\"videocopy_no_video\"] = branc_vidsaiout2[\"original_path\"].isna()\n", + "for col in [\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_ok\",\n", + " \"videocopy_late\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + "]:\n", " branc_vidsaiout2[col] = branc_vidsaiout2[col].fillna(False)\n", - " \n", + "\n", "# display_full(branc_vidsaiout2.loc[(branc_vidsaiout2['ai_crash_output_too_fast'] == False) & (branc_vidsaiout2['count'] == 0.0)])\n", "\n", - "tmp = pandas.DataFrame({'zerosize_datetime': pandas.to_datetime(pasted_array), 'is_zerosize': True})\n", + "tmp = pandas.DataFrame({\"zerosize_datetime\": pandas.to_datetime(pasted_array), \"is_zerosize\": True})\n", "tmp\n", - "tmp = tmp.set_index('zerosize_datetime')\n", + "tmp = tmp.set_index(\"zerosize_datetime\")\n", "\n", - "branc_vidsaiout2 = branc_vidsaiout2.join(tmp, on='start_datetime', how='left')\n", + "branc_vidsaiout2 = branc_vidsaiout2.join(tmp, on=\"start_datetime\", how=\"left\")\n", "\n", - "branc_vidsaiout2['is_zerosize'] = branc_vidsaiout2['is_zerosize'].fillna(False)\n", + "branc_vidsaiout2[\"is_zerosize\"] = branc_vidsaiout2[\"is_zerosize\"].fillna(False)\n", "branc_vidsaiout2\n", - "print('a', ((branc_vidsaiout2['ai_crash_output_too_fast'] == False) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == True)).sum() )\n", - "print('b', ((branc_vidsaiout2['ai_crash_output_too_fast'] == False) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == False)).sum() )\n", - "print('a', ((branc_vidsaiout2['ai_crash_output_too_fast'] == True) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == True)).sum() )\n", - "print('b', ((branc_vidsaiout2['ai_crash_output_too_fast'] == True) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == False)).sum() )\n", + "print(\n", + " \"a\",\n", + " (\n", + " (branc_vidsaiout2[\"ai_crash_output_too_fast\"] == False)\n", + " & (branc_vidsaiout2[\"count\"] == 0.0)\n", + " & (branc_vidsaiout2[\"is_zerosize\"] == True)\n", + " ).sum(),\n", + ")\n", + "print(\n", + " \"b\",\n", + " (\n", + " (branc_vidsaiout2[\"ai_crash_output_too_fast\"] == False)\n", + " & (branc_vidsaiout2[\"count\"] == 0.0)\n", + " & (branc_vidsaiout2[\"is_zerosize\"] == False)\n", + " ).sum(),\n", + ")\n", + "print(\n", + " \"a\",\n", + " (\n", + " (branc_vidsaiout2[\"ai_crash_output_too_fast\"] == True)\n", + " & (branc_vidsaiout2[\"count\"] == 0.0)\n", + " & (branc_vidsaiout2[\"is_zerosize\"] == True)\n", + " ).sum(),\n", + ")\n", + "print(\n", + " \"b\",\n", + " (\n", + " (branc_vidsaiout2[\"ai_crash_output_too_fast\"] == True)\n", + " & (branc_vidsaiout2[\"count\"] == 0.0)\n", + " & (branc_vidsaiout2[\"is_zerosize\"] == False)\n", + " ).sum(),\n", + ")\n", "# display_full(branc_vidsaiout2.loc[(branc_vidsaiout2['ai_crash_output_too_fast'] == True) & (branc_vidsaiout2['count'] == 0.0) & (branc_vidsaiout2['is_zerosize'] == False)])" ] }, @@ -5520,20 +5685,37 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "# branc_aiout = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_aifishdata where datetime > '2024-01-01' limit 10;\", database='tnc_edge')\n", "# print(\"could not process \", len(branc_aiout))\n", "\n", "\n", - "\n", - "\n", - "stpat_aiout = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_ondeckdata where datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['datetime']:\n", + "stpat_aiout = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_ondeckdata where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", + "for col in [\"datetime\"]:\n", " stpat_aiout[col] = pandas.to_datetime(stpat_aiout[col], utc=True)\n", - "for col in ['overallcount','overallruntimems','tracked_confidence','overallcatches','overalldiscards','detection_confidence']:\n", + "for col in [\n", + " \"overallcount\",\n", + " \"overallruntimems\",\n", + " \"tracked_confidence\",\n", + " \"overallcatches\",\n", + " \"overalldiscards\",\n", + " \"detection_confidence\",\n", + "]:\n", " stpat_aiout[col] = pandas.to_numeric(stpat_aiout[col])\n", - "stpat_aiout = stpat_aiout.drop(columns=['md_timestamp_added', 'md_file_name', 'md_ingest_uuid', 'partition_0', 'partition_1', 'partition_2'])\n", - "stpat_aiout = stpat_aiout.loc[stpat_aiout['status'].notna()] # rows with status NA were actidentally run by the old ondeck model, remove them" + "stpat_aiout = stpat_aiout.drop(\n", + " columns=[\n", + " \"md_timestamp_added\",\n", + " \"md_file_name\",\n", + " \"md_ingest_uuid\",\n", + " \"partition_0\",\n", + " \"partition_1\",\n", + " \"partition_2\",\n", + " ]\n", + ")\n", + "stpat_aiout = stpat_aiout.loc[\n", + " stpat_aiout[\"status\"].notna()\n", + "] # rows with status NA were actidentally run by the old ondeck model, remove them" ] }, { @@ -5572,27 +5754,49 @@ } ], "source": [ - "\n", - "stpat_vidsaiout =stpat_vids.loc[stpat_vids['cam_name'] == 'cam1'].join(stpat_aiout.set_index('video_uri'), on='decrypted_path', how='left')\n", - "stpat_vidsaiout['videocopy_found_unable_to_copy'] = pandas.notna(stpat_vidsaiout['original_path']) & pandas.isna(stpat_vidsaiout['decrypted_path'])\n", + "stpat_vidsaiout = stpat_vids.loc[stpat_vids[\"cam_name\"] == \"cam1\"].join(\n", + " stpat_aiout.set_index(\"video_uri\"), on=\"decrypted_path\", how=\"left\"\n", + ")\n", + "stpat_vidsaiout[\"videocopy_found_unable_to_copy\"] = pandas.notna(\n", + " stpat_vidsaiout[\"original_path\"]\n", + ") & pandas.isna(stpat_vidsaiout[\"decrypted_path\"])\n", "\n", "\n", - "stpat_vidsaiout['videocopy_lateness'] = stpat_vidsaiout['decrypted_datetime'] - stpat_vidsaiout['start_datetime']\n", + "stpat_vidsaiout[\"videocopy_lateness\"] = (\n", + " stpat_vidsaiout[\"decrypted_datetime\"] - stpat_vidsaiout[\"start_datetime\"]\n", + ")\n", "\n", - "stpat_vidsaiout['videocopy_ok'] = pandas.notna(stpat_vidsaiout['decrypted_path'])\n", + "stpat_vidsaiout[\"videocopy_ok\"] = pandas.notna(stpat_vidsaiout[\"decrypted_path\"])\n", "# stpat_vidsaiout.loc[pandas.isna(stpat_vidsaiout['processing_uri'])]\n", - "stpat_vidsaiout['ai_vidok_but_did_not_try'] = stpat_vidsaiout['videocopy_ok'] & pandas.isna(stpat_vidsaiout['cocoannotations_uri'])\n", - "stpat_vidsaiout['ai_crash_no_output'] = stpat_vidsaiout['videocopy_ok'] & ~ stpat_vidsaiout['ai_vidok_but_did_not_try'] & (stpat_vidsaiout['status'] == 'queued')\n", - "stpat_vidsaiout['ai_outputed_but_cant_parse'] = stpat_vidsaiout['videocopy_ok'] & ~ stpat_vidsaiout['ai_vidok_but_did_not_try'] & (stpat_vidsaiout['status'] == 'parsing')\n", + "stpat_vidsaiout[\"ai_vidok_but_did_not_try\"] = stpat_vidsaiout[\"videocopy_ok\"] & pandas.isna(\n", + " stpat_vidsaiout[\"cocoannotations_uri\"]\n", + ")\n", + "stpat_vidsaiout[\"ai_crash_no_output\"] = (\n", + " stpat_vidsaiout[\"videocopy_ok\"]\n", + " & ~stpat_vidsaiout[\"ai_vidok_but_did_not_try\"]\n", + " & (stpat_vidsaiout[\"status\"] == \"queued\")\n", + ")\n", + "stpat_vidsaiout[\"ai_outputed_but_cant_parse\"] = (\n", + " stpat_vidsaiout[\"videocopy_ok\"]\n", + " & ~stpat_vidsaiout[\"ai_vidok_but_did_not_try\"]\n", + " & (stpat_vidsaiout[\"status\"] == \"parsing\")\n", + ")\n", "\n", "\n", - "i = stpat_vidsaiout.loc[(stpat_vidsaiout['overallcount'] > 0) | (stpat_vidsaiout['detection_confidence'] > 0)].sort_values('overallruntimems')\n", + "i = stpat_vidsaiout.loc[\n", + " (stpat_vidsaiout[\"overallcount\"] > 0) | (stpat_vidsaiout[\"detection_confidence\"] > 0)\n", + "].sort_values(\"overallruntimems\")\n", "# display_full(i)\n", - "smallest_runtimems_with_nonzero_detections = i.loc[i.index[0]]['overallruntimems']\n", - "print('smallest_runtimems_with_nonzero_detections',smallest_runtimems_with_nonzero_detections)\n", + "smallest_runtimems_with_nonzero_detections = i.loc[i.index[0]][\"overallruntimems\"]\n", + "print(\"smallest_runtimems_with_nonzero_detections\", smallest_runtimems_with_nonzero_detections)\n", "\n", - "stpat_vidsaiout['ai_crash_output_too_fast'] = (stpat_vidsaiout['overallruntimems'] < 1).fillna(False)\n", - "stpat_vidsaiout['ai_ok'] = ((stpat_vidsaiout['overallruntimems'] > 1) & ((stpat_vidsaiout['status'] == 'done') | (stpat_vidsaiout['status'] == 'doneskiphalf'))).fillna(False)\n", + "stpat_vidsaiout[\"ai_crash_output_too_fast\"] = (stpat_vidsaiout[\"overallruntimems\"] < 1).fillna(\n", + " False\n", + ")\n", + "stpat_vidsaiout[\"ai_ok\"] = (\n", + " (stpat_vidsaiout[\"overallruntimems\"] > 1)\n", + " & ((stpat_vidsaiout[\"status\"] == \"done\") | (stpat_vidsaiout[\"status\"] == \"doneskiphalf\"))\n", + ").fillna(False)\n", "\n", "\n", "# bad_boolean_rows_check = stpat_vidsaiout.loc[stpat_vidsaiout[['ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']].applymap(int).sum(axis=1) > 1]\n", @@ -5603,16 +5807,32 @@ "# display_full(stpat_vidsaiout.loc[stpat_vidsaiout['ai_crash_output_too_fast']])\n", "# display_full(stpat_vidsaiout.loc[stpat_vidsaiout['ai_ok']])\n", "\n", - "stpat_vidsaiout.loc[stpat_vidsaiout['videocopy_lateness'] > timedelta(minutes=17)].sort_values('videocopy_lateness')\n", + "stpat_vidsaiout.loc[stpat_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=17)].sort_values(\n", + " \"videocopy_lateness\"\n", + ")\n", "\n", - "print('max lateness', stpat_vidsaiout['videocopy_lateness'].map(lambda x: x.total_seconds()/60).max())\n", + "print(\n", + " \"max lateness\",\n", + " stpat_vidsaiout[\"videocopy_lateness\"].map(lambda x: x.total_seconds() / 60).max(),\n", + ")\n", "\n", "min_lateness = 4\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_vidsaiout['videocopy_lateness'].map(lambda x: x.total_seconds()/60).max(),10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log(min_lateness, 10),\n", + " math.log(\n", + " 1 + stpat_vidsaiout[\"videocopy_lateness\"].map(lambda x: x.total_seconds() / 60).max(), 10\n", + " ),\n", + " num=50,\n", + ")\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_vidsaiout['videocopy_latenessbucket'] = pandas.cut(stpat_vidsaiout.loc[stpat_vidsaiout['videocopy_lateness'] > timedelta(minutes=min_lateness)]['videocopy_lateness'], bins=bins)\n", - "stpat_vidsaiout.groupby('videocopy_latenessbucket')['videocopy_lateness'].count().plot.bar()" + "stpat_vidsaiout[\"videocopy_latenessbucket\"] = pandas.cut(\n", + " stpat_vidsaiout.loc[stpat_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=min_lateness)][\n", + " \"videocopy_lateness\"\n", + " ],\n", + " bins=bins,\n", + ")\n", + "stpat_vidsaiout.groupby(\"videocopy_latenessbucket\")[\"videocopy_lateness\"].count().plot.bar()" ] }, { @@ -5638,12 +5858,16 @@ } ], "source": [ - "stpat_vid_late_threshold=17\n", - "print('old ok count', stpat_vidsaiout['videocopy_ok'].value_counts())\n", - "stpat_vidsaiout['videocopy_late'] = (stpat_vidsaiout['videocopy_ok'] & (stpat_vidsaiout['videocopy_lateness'] > timedelta(minutes=stpat_vid_late_threshold)))\n", - "print('late count', stpat_vidsaiout['videocopy_late'].value_counts())\n", - "stpat_vidsaiout['videocopy_ok'] = stpat_vidsaiout.apply(lambda x: x['videocopy_ok'] and not x['videocopy_late'], axis=1)\n", - "print('new ok count', stpat_vidsaiout['videocopy_ok'].value_counts())" + "stpat_vid_late_threshold = 17\n", + "print(\"old ok count\", stpat_vidsaiout[\"videocopy_ok\"].value_counts())\n", + "stpat_vidsaiout[\"videocopy_late\"] = stpat_vidsaiout[\"videocopy_ok\"] & (\n", + " stpat_vidsaiout[\"videocopy_lateness\"] > timedelta(minutes=stpat_vid_late_threshold)\n", + ")\n", + "print(\"late count\", stpat_vidsaiout[\"videocopy_late\"].value_counts())\n", + "stpat_vidsaiout[\"videocopy_ok\"] = stpat_vidsaiout.apply(\n", + " lambda x: x[\"videocopy_ok\"] and not x[\"videocopy_late\"], axis=1\n", + ")\n", + "print(\"new ok count\", stpat_vidsaiout[\"videocopy_ok\"].value_counts())" ] }, { @@ -5662,15 +5886,36 @@ } ], "source": [ - "thaloslogs_stpatrick_uptime_ts_df = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", - "thaloslogs_stpatrick_isup['up']\n", - "stpat_vidsaiout2 = stpat_vidsaiout.join(thaloslogs_stpatrick_isup['up'], on='start_datetime', how='outer')\n", - "print(\"outer_join_disjointed count, thaloslogs thought it was down\", len(stpat_vidsaiout2.loc[stpat_vidsaiout2['up'].isna()]))\n", - "print(\"outer_join_disjointed count, thaloslogs thought it was up\", len(stpat_vidsaiout2.loc[stpat_vidsaiout2['original_path'].isna()]))\n", - "stpat_vidsaiout2['videocopy_no_video'] = stpat_vidsaiout2['original_path'].isna()\n", - "for col in ['videocopy_found_unable_to_copy','videocopy_ok','videocopy_late','ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']:\n", + "thaloslogs_stpatrick_uptime_ts_df = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[\n", + " thaloslogs_stpatrick_uptime_ts_df[\"up\"] == 1\n", + "]\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", + "thaloslogs_stpatrick_isup[\"up\"]\n", + "stpat_vidsaiout2 = stpat_vidsaiout.join(\n", + " thaloslogs_stpatrick_isup[\"up\"], on=\"start_datetime\", how=\"outer\"\n", + ")\n", + "print(\n", + " \"outer_join_disjointed count, thaloslogs thought it was down\",\n", + " len(stpat_vidsaiout2.loc[stpat_vidsaiout2[\"up\"].isna()]),\n", + ")\n", + "print(\n", + " \"outer_join_disjointed count, thaloslogs thought it was up\",\n", + " len(stpat_vidsaiout2.loc[stpat_vidsaiout2[\"original_path\"].isna()]),\n", + ")\n", + "stpat_vidsaiout2[\"videocopy_no_video\"] = stpat_vidsaiout2[\"original_path\"].isna()\n", + "for col in [\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_ok\",\n", + " \"videocopy_late\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + "]:\n", " stpat_vidsaiout2[col] = stpat_vidsaiout2[col].fillna(False)\n", "# display_full(stpat_vidsaiout2.loc[stpat_vidsaiout2['videocopy_no_video']])\n", "# display_full(stpat_vidsaiout2)" @@ -5691,29 +5936,68 @@ } ], "source": [ - "a = branc_vidsaiout2[['original_path','decrypted_datetime','start_datetime','status','videocopy_no_video','videocopy_found_unable_to_copy','videocopy_late','videocopy_ok','ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']]\n", - "b = stpat_vidsaiout2[['original_path','decrypted_datetime','start_datetime','status','videocopy_no_video','videocopy_found_unable_to_copy','videocopy_late','videocopy_ok','ai_vidok_but_did_not_try','ai_crash_no_output','ai_outputed_but_cant_parse','ai_crash_output_too_fast','ai_ok']]\n", + "a = branc_vidsaiout2[\n", + " [\n", + " \"original_path\",\n", + " \"decrypted_datetime\",\n", + " \"start_datetime\",\n", + " \"status\",\n", + " \"videocopy_no_video\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_late\",\n", + " \"videocopy_ok\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + " ]\n", + "]\n", + "b = stpat_vidsaiout2[\n", + " [\n", + " \"original_path\",\n", + " \"decrypted_datetime\",\n", + " \"start_datetime\",\n", + " \"status\",\n", + " \"videocopy_no_video\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_late\",\n", + " \"videocopy_ok\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + " ]\n", + "]\n", "# display_full(a)\n", "\n", "c = a.append(b, ignore_index=True)\n", "\n", - "c.to_pickle('integration_state_evaluations_videocopy_ai.pickle')\n", + "c.to_pickle(\"integration_state_evaluations_videocopy_ai.pickle\")\n", "\n", "# show rows with at least one NA\n", "# display_full(c.loc[c.applymap(pandas.isna).any(axis=1)])\n", "# display_full(c.loc[(c['videocopy_no_video']) & (c['start_datetime'] < pandas.Timestamp('2024-01-13 00:00:00+00:00'))].sort_values('start_datetime') )\n", - "print(\"disk outage accounts for max\",\n", - " len(c.loc[(c['videocopy_no_video']) & (c['start_datetime'] < pandas.Timestamp('2024-01-13 00:00:00+00:00'))]),\n", - " \" out of \", \n", - " len(c.loc[(c['videocopy_no_video'])]))\n", + "print(\n", + " \"disk outage accounts for max\",\n", + " len(\n", + " c.loc[\n", + " (c[\"videocopy_no_video\"])\n", + " & (c[\"start_datetime\"] < pandas.Timestamp(\"2024-01-13 00:00:00+00:00\"))\n", + " ]\n", + " ),\n", + " \" out of \",\n", + " len(c.loc[(c[\"videocopy_no_video\"])]),\n", + ")\n", "# print(len(b.loc[(b['videocopy_no_video']) & (b['start_datetime'] < pandas.Timestamp('2024-01-13 00:00:00+00:00'))]))\n", "\n", "# print(len(a.loc[(a['videocopy_no_video']) & (a['start_datetime'] > pandas.Timestamp('2024-01-13 00:00:00+00:00')) & (a['start_datetime'] < pandas.Timestamp('2024-02-01 00:00:00+00:00'))]))\n", "# print(len(b.loc[(b['videocopy_no_video']) & (b['start_datetime'] > pandas.Timestamp('2024-01-13 00:00:00+00:00')) & (b['start_datetime'] < pandas.Timestamp('2024-02-01 00:00:00+00:00'))]))\n", - " \n", + "\n", "# print(len(a.loc[(a['videocopy_no_video']) & (a['start_datetime'] > pandas.Timestamp('2024-02-01 00:00:00+00:00')) & (a['start_datetime'] < pandas.Timestamp('2024-03-01 00:00:00+00:00'))]))\n", "# print(len(b.loc[(b['videocopy_no_video']) & (b['start_datetime'] > pandas.Timestamp('2024-02-01 00:00:00+00:00')) & (b['start_datetime'] < pandas.Timestamp('2024-03-01 00:00:00+00:00'))]))\n", - " \n", + "\n", "# print(len(a.loc[(a['videocopy_no_video']) & (a['start_datetime'] > pandas.Timestamp('2024-03-01 00:00:00+00:00')) & (a['start_datetime'] < pandas.Timestamp('2024-04-07 00:00:00+00:00'))]))\n", "# print(len(b.loc[(b['videocopy_no_video']) & (b['start_datetime'] > pandas.Timestamp('2024-03-01 00:00:00+00:00')) & (b['start_datetime'] < pandas.Timestamp('2024-04-07 00:00:00+00:00'))]))\n", "\n", @@ -6322,12 +6606,15 @@ } ], "source": [ - "stpat_gpsdata = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_gpsdata where datetime > '2024-01-01' and gps_datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['datetime', 'gps_datetime']:\n", + "stpat_gpsdata = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_gpsdata where datetime > '2024-01-01' and gps_datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", + "for col in [\"datetime\", \"gps_datetime\"]:\n", " stpat_gpsdata[col] = pandas.to_datetime(stpat_gpsdata[col], utc=True)\n", - "for col in ['lat', 'lon']:\n", + "for col in [\"lat\", \"lon\"]:\n", " stpat_gpsdata[col] = pandas.to_numeric(stpat_gpsdata[col])\n", - "stpat_gpsdata = stpat_gpsdata[['datetime', 'gps_datetime','lat', 'lon']]\n", + "stpat_gpsdata = stpat_gpsdata[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]]\n", "stpat_gpsdata" ] }, @@ -7396,49 +7683,64 @@ } ], "source": [ - "stpat_gpsdata['lateness'] = stpat_gpsdata['datetime'] - stpat_gpsdata['gps_datetime']\n", + "stpat_gpsdata[\"lateness\"] = stpat_gpsdata[\"datetime\"] - stpat_gpsdata[\"gps_datetime\"]\n", "stpat_gpsdata\n", "\n", - "stpat_gpsdata.loc[stpat_gpsdata['lateness'] > timedelta(minutes=30)].sort_values('lateness')\n", + "stpat_gpsdata.loc[stpat_gpsdata[\"lateness\"] > timedelta(minutes=30)].sort_values(\"lateness\")\n", "\n", - "print('max lateness', stpat_gpsdata['lateness'].map(lambda x: x.total_seconds()/60).max())\n", + "print(\"max lateness\", stpat_gpsdata[\"lateness\"].map(lambda x: x.total_seconds() / 60).max())\n", "\n", "min_lateness = 21.2\n", - "max_lateness = 1440 # 1 day\n", + "max_lateness = 1440 # 1 day\n", "# bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_gpsdata['lateness'].map(lambda x: x.total_seconds()/60).max(),10), num=100)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(max_lateness,10), num=100)\n", + "bins = numpy.logspace(math.log(min_lateness, 10), math.log(max_lateness, 10), num=100)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_gpsdata['latenessbucket'] = pandas.cut(stpat_gpsdata.loc[stpat_gpsdata['lateness'] > timedelta(minutes=min_lateness)]['lateness'], bins=bins)\n", - "stpat_gpsdata.groupby('latenessbucket')['lateness'].count().plot.bar(figsize=(10,3))\n", + "stpat_gpsdata[\"latenessbucket\"] = pandas.cut(\n", + " stpat_gpsdata.loc[stpat_gpsdata[\"lateness\"] > timedelta(minutes=min_lateness)][\"lateness\"],\n", + " bins=bins,\n", + ")\n", + "stpat_gpsdata.groupby(\"latenessbucket\")[\"lateness\"].count().plot.bar(figsize=(10, 3))\n", "\n", "print(\"using lateness cutoff of 21minutes\")\n", "\n", - "thaloslogs_stpatrick_uptime_ts_df = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", - "thaloslogs_stpatrick_downevent = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['status'] == ' down']\n", + "thaloslogs_stpatrick_uptime_ts_df = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", + "thaloslogs_stpatrick_downevent = thaloslogs_stpatrick_uptime_ts_df.loc[\n", + " thaloslogs_stpatrick_uptime_ts_df[\"status\"] == \" down\"\n", + "]\n", "\n", - "thaloslogs_stpatrick_downevent = thaloslogs_stpatrick_downevent.loc[thaloslogs_stpatrick_downevent.index < '2024-04-08']\n", - "downevent_dts = thaloslogs_stpatrick_downevent.loc[thaloslogs_stpatrick_downevent['datetime'].notna()]['datetime']\n", + "thaloslogs_stpatrick_downevent = thaloslogs_stpatrick_downevent.loc[\n", + " thaloslogs_stpatrick_downevent.index < \"2024-04-08\"\n", + "]\n", + "downevent_dts = thaloslogs_stpatrick_downevent.loc[\n", + " thaloslogs_stpatrick_downevent[\"datetime\"].notna()\n", + "][\"datetime\"]\n", "downevent_dts = pandas.to_datetime(downevent_dts, utc=True)\n", "\n", - "testtime = datetime.fromisoformat('2024-01-03T22:29:41+00:00').astimezone(timezone.utc)\n", + "testtime = datetime.fromisoformat(\"2024-01-03T22:29:41+00:00\").astimezone(timezone.utc)\n", "\n", "# pandas.to_datetime(datetime.now().astimezone(timezone.utc))\n", "# pandas.to_datetime(downevent_dts, utc=True)\n", "# downevent_dts.map(lambda y: abs((testtime - y).total_seconds()) < 21*60).any()\n", "\n", "\n", - "#following works, but is very slow\n", + "# following works, but is very slow\n", "# stpat_gpsdata['near_down_event'] = stpat_gpsdata['gps_datetime'].map(lambda x: downevent_dts.map(lambda y: abs((x - y).total_seconds()) < 21*60).any())\n", "\n", "td_21m = timedelta(minutes=21, seconds=12)\n", - "#following works, is slightly less slow\n", - "stpat_gpsdata['just_before_downevent'] = stpat_gpsdata['gps_datetime'].map(lambda x: ((downevent_dts >= x - td_21m) & (downevent_dts <= x )).any() )\n", - "stpat_gpsdata['just_after_downevent'] = stpat_gpsdata['gps_datetime'].map(lambda x: ((downevent_dts >= x ) & (downevent_dts <= x + td_21m)).any() )\n", + "# following works, is slightly less slow\n", + "stpat_gpsdata[\"just_before_downevent\"] = stpat_gpsdata[\"gps_datetime\"].map(\n", + " lambda x: ((downevent_dts >= x - td_21m) & (downevent_dts <= x)).any()\n", + ")\n", + "stpat_gpsdata[\"just_after_downevent\"] = stpat_gpsdata[\"gps_datetime\"].map(\n", + " lambda x: ((downevent_dts >= x) & (downevent_dts <= x + td_21m)).any()\n", + ")\n", "\n", "\n", - "stpat_gpsdata['gps_late_integration_err'] = (stpat_gpsdata['lateness'] > td_21m ) & (stpat_gpsdata['just_after_downevent'] == False)\n", - "display(stpat_gpsdata.loc[stpat_gpsdata['gps_late_integration_err']])\n", + "stpat_gpsdata[\"gps_late_integration_err\"] = (stpat_gpsdata[\"lateness\"] > td_21m) & (\n", + " stpat_gpsdata[\"just_after_downevent\"] == False\n", + ")\n", + "display(stpat_gpsdata.loc[stpat_gpsdata[\"gps_late_integration_err\"]])\n", "# stpat_gpsdata.loc[(stpat_gpsdata['lateness'] > td_21m ) & (stpat_gpsdata['just_after_downevent'] == True)].sort_values('gps_datetime')\n", "\n", "# stpat_gpsdata['status'] = stpat_gpsdata.apply(lambda x: thaloslogs_stpatrick_uptime_ts_df.loc[x['gps_datetime'].replace(minute=math.floor(x['gps_datetime'].minute/5)*5,second=0)]['status'],axis=1)\n", @@ -7447,10 +7749,19 @@ "# if I were to declare that on status == 'down' there is no integration error, I would only save like 14 rows. Not worth my time.\n", "\n", "\n", - "print('len', len(stpat_gpsdata['gps_late_integration_err']))\n", - "print('num of int errors', stpat_gpsdata['gps_late_integration_err'].sum())\n", - "print('num of ok ', (stpat_gpsdata['gps_late_integration_err'] == False).sum())\n", - "print('num of errors on boundary', ((stpat_gpsdata['lateness'] > td_21m ) & ((stpat_gpsdata['just_before_downevent'] == True) | (stpat_gpsdata['just_after_downevent'] == True)) ).sum())\n" + "print(\"len\", len(stpat_gpsdata[\"gps_late_integration_err\"]))\n", + "print(\"num of int errors\", stpat_gpsdata[\"gps_late_integration_err\"].sum())\n", + "print(\"num of ok \", (stpat_gpsdata[\"gps_late_integration_err\"] == False).sum())\n", + "print(\n", + " \"num of errors on boundary\",\n", + " (\n", + " (stpat_gpsdata[\"lateness\"] > td_21m)\n", + " & (\n", + " (stpat_gpsdata[\"just_before_downevent\"] == True)\n", + " | (stpat_gpsdata[\"just_after_downevent\"] == True)\n", + " )\n", + " ).sum(),\n", + ")\n" ] }, { @@ -7470,17 +7781,21 @@ ], "source": [ "# display_full(stpat_gpsdata.loc[stpat_gpsdata['lateness'] > td_21m].sort_values('lateness'))\n", - "print(\"bef\" , len(stpat_gpsdata.loc[stpat_gpsdata['just_before_downevent']]))\n", + "print(\"bef\", len(stpat_gpsdata.loc[stpat_gpsdata[\"just_before_downevent\"]]))\n", "# display_full(stpat_gpsdata.loc[stpat_gpsdata['just_before_downevent']].sort_values('gps_datetime'))\n", - "print(\"aft\" ,len(stpat_gpsdata.loc[stpat_gpsdata['just_after_downevent']]))\n", + "print(\"aft\", len(stpat_gpsdata.loc[stpat_gpsdata[\"just_after_downevent\"]]))\n", "# display_full(stpat_gpsdata.loc[stpat_gpsdata['just_after_downevent']].sort_values('gps_datetime'))\n", - "thaloslogs_stpatrick_uptime_ts_df['datetime'] = pandas.to_datetime(thaloslogs_stpatrick_uptime_ts_df['datetime'], utc=True)\n", - "updownevents = thaloslogs_stpatrick_uptime_ts_df[thaloslogs_stpatrick_uptime_ts_df['datetime'].notna()].set_index('datetime')\n", + "thaloslogs_stpatrick_uptime_ts_df[\"datetime\"] = pandas.to_datetime(\n", + " thaloslogs_stpatrick_uptime_ts_df[\"datetime\"], utc=True\n", + ")\n", + "updownevents = thaloslogs_stpatrick_uptime_ts_df[\n", + " thaloslogs_stpatrick_uptime_ts_df[\"datetime\"].notna()\n", + "].set_index(\"datetime\")\n", "\n", - "stpat_gpsdata2 = stpat_gpsdata.set_index('gps_datetime')\n", - "stpat_gpsdata2 = stpat_gpsdata2.join(updownevents, how='outer')\n", + "stpat_gpsdata2 = stpat_gpsdata.set_index(\"gps_datetime\")\n", + "stpat_gpsdata2 = stpat_gpsdata2.join(updownevents, how=\"outer\")\n", "stpat_gpsdata2 = stpat_gpsdata2.sort_index()\n", - "stpat_gpsdata2['status'] = stpat_gpsdata2['status'].ffill()\n", + "stpat_gpsdata2[\"status\"] = stpat_gpsdata2[\"status\"].ffill()\n", "# display_full(stpat_gpsdata2.loc[(stpat_gpsdata2['status'] == ' down') | (stpat_gpsdata2['status'].shift(1) == ' down') | (stpat_gpsdata2['status'].shift(-1) == ' down')])" ] }, @@ -8086,12 +8401,15 @@ } ], "source": [ - "branc_gpsdata = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_gpsdata where datetime > '2024-01-01' and gps_datetime > '2024-01-01'\", database='tnc_edge')\n", - "for col in ['datetime', 'gps_datetime']:\n", + "branc_gpsdata = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_gpsdata where datetime > '2024-01-01' and gps_datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", + "for col in [\"datetime\", \"gps_datetime\"]:\n", " branc_gpsdata[col] = pandas.to_datetime(branc_gpsdata[col], utc=True)\n", - "for col in ['lat', 'lon']:\n", + "for col in [\"lat\", \"lon\"]:\n", " branc_gpsdata[col] = pandas.to_numeric(branc_gpsdata[col])\n", - "branc_gpsdata = branc_gpsdata[['datetime', 'gps_datetime','lat', 'lon']]\n", + "branc_gpsdata = branc_gpsdata[[\"datetime\", \"gps_datetime\", \"lat\", \"lon\"]]\n", "branc_gpsdata" ] }, @@ -8129,63 +8447,109 @@ } ], "source": [ - "branc_gpsdata['lateness'] = branc_gpsdata['datetime'] - branc_gpsdata['gps_datetime']\n", + "branc_gpsdata[\"lateness\"] = branc_gpsdata[\"datetime\"] - branc_gpsdata[\"gps_datetime\"]\n", "branc_gpsdata\n", "\n", - "branc_gpsdata.loc[branc_gpsdata['lateness'] > timedelta(minutes=30)].sort_values('lateness')\n", + "branc_gpsdata.loc[branc_gpsdata[\"lateness\"] > timedelta(minutes=30)].sort_values(\"lateness\")\n", "\n", - "print('max lateness', branc_gpsdata['lateness'].map(lambda x: x.total_seconds()/60).max())\n", + "print(\"max lateness\", branc_gpsdata[\"lateness\"].map(lambda x: x.total_seconds() / 60).max())\n", "\n", "min_lateness = 21.2\n", - "max_lateness = 1440 # 1 day\n", + "max_lateness = 1440 # 1 day\n", "# bins = numpy.logspace(math.log(min_lateness,10), math.log(1+branc_gpsdata['lateness'].map(lambda x: x.total_seconds()/60).max(),10), num=100)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(max_lateness,10), num=100)\n", + "bins = numpy.logspace(math.log(min_lateness, 10), math.log(max_lateness, 10), num=100)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "branc_gpsdata['latenessbucket'] = pandas.cut(branc_gpsdata.loc[branc_gpsdata['lateness'] > timedelta(minutes=min_lateness)]['lateness'], bins=bins)\n", - "branc_gpsdata.groupby('latenessbucket')['lateness'].count().plot.bar(figsize=(10,3))\n", + "branc_gpsdata[\"latenessbucket\"] = pandas.cut(\n", + " branc_gpsdata.loc[branc_gpsdata[\"lateness\"] > timedelta(minutes=min_lateness)][\"lateness\"],\n", + " bins=bins,\n", + ")\n", + "branc_gpsdata.groupby(\"latenessbucket\")[\"lateness\"].count().plot.bar(figsize=(10, 3))\n", "\n", "print(\"using lateness cutoff of 21minutes\")\n", "\n", - "thaloslogs_brancol_uptime_ts_df = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", - "thaloslogs_brancol_downevent = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['status'] == ' down']\n", + "thaloslogs_brancol_uptime_ts_df = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", + "thaloslogs_brancol_downevent = thaloslogs_brancol_uptime_ts_df.loc[\n", + " thaloslogs_brancol_uptime_ts_df[\"status\"] == \" down\"\n", + "]\n", "\n", - "thaloslogs_brancol_downevent = thaloslogs_brancol_downevent.loc[thaloslogs_brancol_downevent.index < '2024-04-08']\n", - "downevent_dts = thaloslogs_brancol_downevent.loc[thaloslogs_brancol_downevent['datetime'].notna()]['datetime']\n", + "thaloslogs_brancol_downevent = thaloslogs_brancol_downevent.loc[\n", + " thaloslogs_brancol_downevent.index < \"2024-04-08\"\n", + "]\n", + "downevent_dts = thaloslogs_brancol_downevent.loc[thaloslogs_brancol_downevent[\"datetime\"].notna()][\n", + " \"datetime\"\n", + "]\n", "downevent_dts = pandas.to_datetime(downevent_dts, utc=True)\n", "\n", - "testtime = datetime.fromisoformat('2024-01-03T22:29:41+00:00').astimezone(timezone.utc)\n", + "testtime = datetime.fromisoformat(\"2024-01-03T22:29:41+00:00\").astimezone(timezone.utc)\n", "\n", "# pandas.to_datetime(datetime.now().astimezone(timezone.utc))\n", "# pandas.to_datetime(downevent_dts, utc=True)\n", "# downevent_dts.map(lambda y: abs((testtime - y).total_seconds()) < 21*60).any()\n", "\n", "\n", - "#following works, but is very slow\n", + "# following works, but is very slow\n", "# branc_gpsdata['near_down_event'] = branc_gpsdata['gps_datetime'].map(lambda x: downevent_dts.map(lambda y: abs((x - y).total_seconds()) < 21*60).any())\n", "\n", "td_21m = timedelta(minutes=21, seconds=12)\n", "\n", - "#following works, is slightly less slow\n", - "branc_gpsdata['just_before_downevent'] = branc_gpsdata['gps_datetime'].map(lambda x: ((downevent_dts >= x - td_21m) & (downevent_dts <= x )).any() )\n", - "branc_gpsdata['just_after_downevent'] = branc_gpsdata['gps_datetime'].map(lambda x: ((downevent_dts >= x ) & (downevent_dts <= x + td_21m)).any() )\n", + "# following works, is slightly less slow\n", + "branc_gpsdata[\"just_before_downevent\"] = branc_gpsdata[\"gps_datetime\"].map(\n", + " lambda x: ((downevent_dts >= x - td_21m) & (downevent_dts <= x)).any()\n", + ")\n", + "branc_gpsdata[\"just_after_downevent\"] = branc_gpsdata[\"gps_datetime\"].map(\n", + " lambda x: ((downevent_dts >= x) & (downevent_dts <= x + td_21m)).any()\n", + ")\n", "\n", "\n", - "branc_gpsdata['status'] = branc_gpsdata.apply(lambda x: thaloslogs_brancol_uptime_ts_df.loc[x['gps_datetime'].replace(minute=math.floor(x['gps_datetime'].minute/5)*5,second=0)]['status'],axis=1)\n", + "branc_gpsdata[\"status\"] = branc_gpsdata.apply(\n", + " lambda x: thaloslogs_brancol_uptime_ts_df.loc[\n", + " x[\"gps_datetime\"].replace(minute=math.floor(x[\"gps_datetime\"].minute / 5) * 5, second=0)\n", + " ][\"status\"],\n", + " axis=1,\n", + ")\n", "\n", - "print('branc num of lates where because down', len(branc_gpsdata.loc[(branc_gpsdata['lateness'] > td_21m ) & (branc_gpsdata['status'] == ' down')]))\n", - "print('branc num of lates where because before_down', len(branc_gpsdata.loc[(branc_gpsdata['lateness'] > td_21m ) & (branc_gpsdata['just_before_downevent'])]))\n", + "print(\n", + " \"branc num of lates where because down\",\n", + " len(\n", + " branc_gpsdata.loc[\n", + " (branc_gpsdata[\"lateness\"] > td_21m) & (branc_gpsdata[\"status\"] == \" down\")\n", + " ]\n", + " ),\n", + ")\n", + "print(\n", + " \"branc num of lates where because before_down\",\n", + " len(\n", + " branc_gpsdata.loc[\n", + " (branc_gpsdata[\"lateness\"] > td_21m) & (branc_gpsdata[\"just_before_downevent\"])\n", + " ]\n", + " ),\n", + ")\n", "# if I were to declare that on status == 'down' there is no integration error, I would only save like 14 rows. Not worth my time.\n", "\n", - "branc_gpsdata['gps_late_integration_err'] = (branc_gpsdata['lateness'] > td_21m ) & (branc_gpsdata['just_after_downevent'] == False) & (branc_gpsdata['just_before_downevent'] == False) & (branc_gpsdata['status'] == ' up') \n", + "branc_gpsdata[\"gps_late_integration_err\"] = (\n", + " (branc_gpsdata[\"lateness\"] > td_21m)\n", + " & (branc_gpsdata[\"just_after_downevent\"] == False)\n", + " & (branc_gpsdata[\"just_before_downevent\"] == False)\n", + " & (branc_gpsdata[\"status\"] == \" up\")\n", + ")\n", "# display_full(branc_gpsdata.loc[branc_gpsdata['gps_late_integration_err']].sort_values('gps_datetime'))\n", "# branc_gpsdata.loc[(branc_gpsdata['lateness'] > td_21m ) & (branc_gpsdata['just_after_downevent'] == True)].sort_values('gps_datetime')\n", "\n", "\n", - "print('len', len(branc_gpsdata['gps_late_integration_err']))\n", - "print('num of int errors', branc_gpsdata['gps_late_integration_err'].sum())\n", - "print('num of ok ', (branc_gpsdata['gps_late_integration_err'] == False).sum())\n", - "print('num of errors on boundary', ((branc_gpsdata['lateness'] > td_21m ) & ((branc_gpsdata['just_before_downevent'] == True) | (branc_gpsdata['just_after_downevent'] == True)) ).sum())\n" + "print(\"len\", len(branc_gpsdata[\"gps_late_integration_err\"]))\n", + "print(\"num of int errors\", branc_gpsdata[\"gps_late_integration_err\"].sum())\n", + "print(\"num of ok \", (branc_gpsdata[\"gps_late_integration_err\"] == False).sum())\n", + "print(\n", + " \"num of errors on boundary\",\n", + " (\n", + " (branc_gpsdata[\"lateness\"] > td_21m)\n", + " & (\n", + " (branc_gpsdata[\"just_before_downevent\"] == True)\n", + " | (branc_gpsdata[\"just_after_downevent\"] == True)\n", + " )\n", + " ).sum(),\n", + ")\n" ] }, { @@ -8199,17 +8563,18 @@ "from collections import defaultdict\n", "import json\n", "\n", - "a='brancol_jan_aifishoutput'\n", - "b='brancol_feb_aifish_output'\n", - "c='brancol_mar_aifish_output'\n", + "a = \"brancol_jan_aifishoutput\"\n", + "b = \"brancol_feb_aifish_output\"\n", + "c = \"brancol_mar_aifish_output\"\n", "\n", - "aiff = [ x for i in [a, b, c] for x in Path(i).iterdir() ]\n", + "aiff = [x for i in [a, b, c] for x in Path(i).iterdir()]\n", "\n", - "aiff = pandas.DataFrame({'f': aiff})\n", + "aiff = pandas.DataFrame({\"f\": aiff})\n", "# len(aiff)\n", - "aiff['name'] = aiff['f'].apply(lambda x: x.name)\n", - "aiff = aiff.loc[aiff['name'].apply(lambda x: x.endswith('.json'))]\n", - "aiff['forjoin'] = aiff['name'].apply(lambda x: '/videos/output/' + x)\n", + "aiff[\"name\"] = aiff[\"f\"].apply(lambda x: x.name)\n", + "aiff = aiff.loc[aiff[\"name\"].apply(lambda x: x.endswith(\".json\"))]\n", + "aiff[\"forjoin\"] = aiff[\"name\"].apply(lambda x: \"/videos/output/\" + x)\n", + "\n", "\n", "def do_fcnt_allcnt_mdc(f: Path):\n", " with f.open() as d:\n", @@ -8220,34 +8585,41 @@ " # error handling here\n", " return (0, 0, 0)\n", "\n", - " fish_detections = list(filter(lambda d: d.get('class_name') == 'fish', detections))\n", + " fish_detections = list(filter(lambda d: d.get(\"class_name\") == \"fish\", detections))\n", "\n", " if len(fish_detections) == 0:\n", " # error handling here\n", " return (0, len(detections), 0)\n", "\n", - " detectionconfidences = list(filter(lambda x: x is not None, map(lambda d: d.get('object_confidence'), fish_detections)))\n", + " detectionconfidences = list(\n", + " filter(\n", + " lambda x: x is not None,\n", + " map(lambda d: d.get(\"object_confidence\"), fish_detections),\n", + " )\n", + " )\n", " # = max(map(lambda detection: detection.get('object_confidence'), detections))\n", " # trackedconfidences = []\n", "\n", " tracks = defaultdict(list)\n", " for d in fish_detections:\n", - " tracks[d.get('track')].append(d)\n", + " tracks[d.get(\"track\")].append(d)\n", "\n", " cnt = len(tracks.keys())\n", "\n", " if len(detectionconfidences) > 0:\n", - " meandetectionconfidence = float(sum(detectionconfidences)) / float(len(detectionconfidences))\n", + " meandetectionconfidence = float(sum(detectionconfidences)) / float(\n", + " len(detectionconfidences)\n", + " )\n", " else:\n", " meandetectionconfidence = 0\n", - " \n", + "\n", " return (cnt, len(detections), meandetectionconfidence)\n", - " \n", + "\n", " except json.JSONDecodeError:\n", - " print('json error in ' + f.name)\n", - " \n", + " print(\"json error in \" + f.name)\n", "\n", - "aiff['fcnt_allcnt_mdc'] = aiff['f'].apply(do_fcnt_allcnt_mdc)\n", + "\n", + "aiff[\"fcnt_allcnt_mdc\"] = aiff[\"f\"].apply(do_fcnt_allcnt_mdc)\n", "aiff\n", "# branc_vidsaiout[output_uri]" ] @@ -10256,31 +10628,65 @@ } ], "source": [ - "aiff['fcnt'] = aiff['fcnt_allcnt_mdc'].apply(lambda x: pandas.NA if x is None else x[0])\n", - "aiff['allcnt'] = aiff['fcnt_allcnt_mdc'].apply(lambda x: pandas.NA if x is None else x[1])\n", - "aiff['mdc'] = aiff['fcnt_allcnt_mdc'].apply(lambda x: pandas.NA if x is None else x[2])\n", + "aiff[\"fcnt\"] = aiff[\"fcnt_allcnt_mdc\"].apply(lambda x: pandas.NA if x is None else x[0])\n", + "aiff[\"allcnt\"] = aiff[\"fcnt_allcnt_mdc\"].apply(lambda x: pandas.NA if x is None else x[1])\n", + "aiff[\"mdc\"] = aiff[\"fcnt_allcnt_mdc\"].apply(lambda x: pandas.NA if x is None else x[2])\n", "\n", "display(branc_vidsaiout.columns)\n", "\n", - "toofast_df = branc_vidsaiout.join(aiff.set_index('forjoin'), on='output_uri')\n", - "\n", - "print('matchingcounts', (toofast_df['fcnt'] == toofast_df['count']).sum())\n", - "print('nonmatchingcounts', (toofast_df['fcnt'] != toofast_df['count']).sum())\n", - "print('nonmatchingcounts higher', (toofast_df['fcnt'] > toofast_df['count']).sum())\n", - "print('nonmatchingcounts lower', (toofast_df['fcnt'] < toofast_df['count']).sum())\n", - "print('0fish,somenonfish', ((toofast_df['fcnt'] == 0 ) & ( toofast_df['allcnt'] > 0)).sum())\n", - "\n", - "\n", - "print('nonmatchingcounts that match toofast', ((toofast_df['fcnt'] != toofast_df['count']) & (toofast_df['ai_crash_output_too_fast'])).sum())\n", - "print('somenonefish that match toofast', (( toofast_df['allcnt'] > 0) & (toofast_df['ai_crash_output_too_fast'])).sum())\n", - "print('legit toofast', ((toofast_df['fcnt'] == toofast_df['count']) & (toofast_df['allcnt'] == toofast_df['count']) & (toofast_df['ai_crash_output_too_fast'])).sum())\n", - "toofast_df.loc[(toofast_df['fcnt'] == toofast_df['count']) & (toofast_df['allcnt'] == toofast_df['count']) & (toofast_df['ai_crash_output_too_fast'])][['start_datetime', 'output_uri', 'datetime',\n", - " 'count', 'runtimems', 'detection_confidence', 'status',\n", - " 'videocopy_found_unable_to_copy', 'videocopy_lateness', 'videocopy_ok',\n", - " 'ai_vidok_but_did_not_try', 'ai_crash_no_output',\n", - " 'ai_outputed_but_cant_parse', 'ai_crash_output_too_fast', 'ai_ok',\n", - " 'videocopy_latenessbucket', 'videocopy_late',\n", - " 'fcnt','allcnt', 'mdc']]" + "toofast_df = branc_vidsaiout.join(aiff.set_index(\"forjoin\"), on=\"output_uri\")\n", + "\n", + "print(\"matchingcounts\", (toofast_df[\"fcnt\"] == toofast_df[\"count\"]).sum())\n", + "print(\"nonmatchingcounts\", (toofast_df[\"fcnt\"] != toofast_df[\"count\"]).sum())\n", + "print(\"nonmatchingcounts higher\", (toofast_df[\"fcnt\"] > toofast_df[\"count\"]).sum())\n", + "print(\"nonmatchingcounts lower\", (toofast_df[\"fcnt\"] < toofast_df[\"count\"]).sum())\n", + "print(\"0fish,somenonfish\", ((toofast_df[\"fcnt\"] == 0) & (toofast_df[\"allcnt\"] > 0)).sum())\n", + "\n", + "\n", + "print(\n", + " \"nonmatchingcounts that match toofast\",\n", + " ((toofast_df[\"fcnt\"] != toofast_df[\"count\"]) & (toofast_df[\"ai_crash_output_too_fast\"])).sum(),\n", + ")\n", + "print(\n", + " \"somenonefish that match toofast\",\n", + " ((toofast_df[\"allcnt\"] > 0) & (toofast_df[\"ai_crash_output_too_fast\"])).sum(),\n", + ")\n", + "print(\n", + " \"legit toofast\",\n", + " (\n", + " (toofast_df[\"fcnt\"] == toofast_df[\"count\"])\n", + " & (toofast_df[\"allcnt\"] == toofast_df[\"count\"])\n", + " & (toofast_df[\"ai_crash_output_too_fast\"])\n", + " ).sum(),\n", + ")\n", + "toofast_df.loc[\n", + " (toofast_df[\"fcnt\"] == toofast_df[\"count\"])\n", + " & (toofast_df[\"allcnt\"] == toofast_df[\"count\"])\n", + " & (toofast_df[\"ai_crash_output_too_fast\"])\n", + "][\n", + " [\n", + " \"start_datetime\",\n", + " \"output_uri\",\n", + " \"datetime\",\n", + " \"count\",\n", + " \"runtimems\",\n", + " \"detection_confidence\",\n", + " \"status\",\n", + " \"videocopy_found_unable_to_copy\",\n", + " \"videocopy_lateness\",\n", + " \"videocopy_ok\",\n", + " \"ai_vidok_but_did_not_try\",\n", + " \"ai_crash_no_output\",\n", + " \"ai_outputed_but_cant_parse\",\n", + " \"ai_crash_output_too_fast\",\n", + " \"ai_ok\",\n", + " \"videocopy_latenessbucket\",\n", + " \"videocopy_late\",\n", + " \"fcnt\",\n", + " \"allcnt\",\n", + " \"mdc\",\n", + " ]\n", + "]" ] }, { @@ -10299,9 +10705,9 @@ } ], "source": [ - "rug = pandas.read_pickle('integration_state_evaluations_videocopy_ai.pickle')\n", - "print('aitoofast', rug['ai_crash_output_too_fast'].sum())\n", - "print('aitoofast', (~rug['ai_crash_output_too_fast']).sum())" + "rug = pandas.read_pickle(\"integration_state_evaluations_videocopy_ai.pickle\")\n", + "print(\"aitoofast\", rug[\"ai_crash_output_too_fast\"].sum())\n", + "print(\"aitoofast\", (~rug[\"ai_crash_output_too_fast\"]).sum())" ] } ], diff --git a/notebooks/tnc-edge-gps-speed.ipynb b/notebooks/tnc-edge-gps-speed.ipynb index 828a874..be18091 100644 --- a/notebooks/tnc-edge-gps-speed.ipynb +++ b/notebooks/tnc-edge-gps-speed.ipynb @@ -21,12 +21,14 @@ "import numpy as np\n", "from datetime import datetime, date, time, timezone, timedelta\n", "from dateutil.parser import parse as parse_datetime\n", + "\n", "# help(np.argwhere)\n", "import re\n", "import awswrangler as wr\n", "import boto3\n", "import math\n", - "boto3.setup_default_session(profile_name='XXXXXXXX')\n" + "\n", + "boto3.setup_default_session(profile_name=\"XXXXXXXX\")\n" ] }, { @@ -183,7 +185,10 @@ } ], "source": [ - "gps_df = wr.athena.read_sql_query(\"SELECT datetime ,gps_datetime ,lat ,lon from stpatrick_v1_gpsdata where gps_datetime > '2024-01-01';\", database=\"tnc_edge\")\n", + "gps_df = wr.athena.read_sql_query(\n", + " \"SELECT datetime ,gps_datetime ,lat ,lon from stpatrick_v1_gpsdata where gps_datetime > '2024-01-01';\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", "\n", "gps_df" @@ -414,41 +419,54 @@ } ], "source": [ - "if gps_df['lat'].dtype != 'float64':\n", - " gps_df['lat'] = pandas.to_numeric(gps_df['lat'])\n", - "if gps_df['lon'].dtype != 'float64':\n", - " gps_df['lon'] = pandas.to_numeric(gps_df['lon'])\n", - "if gps_df['gps_datetime'].dtype != 'object':\n", - " gps_df['gps_datetime'] = pandas.to_datetime(gps_df['gps_datetime'])\n", + "if gps_df[\"lat\"].dtype != \"float64\":\n", + " gps_df[\"lat\"] = pandas.to_numeric(gps_df[\"lat\"])\n", + "if gps_df[\"lon\"].dtype != \"float64\":\n", + " gps_df[\"lon\"] = pandas.to_numeric(gps_df[\"lon\"])\n", + "if gps_df[\"gps_datetime\"].dtype != \"object\":\n", + " gps_df[\"gps_datetime\"] = pandas.to_datetime(gps_df[\"gps_datetime\"])\n", "\n", "\n", - "gps_df = gps_df.sort_values('gps_datetime')\n", - " \n", + "gps_df = gps_df.sort_values(\"gps_datetime\")\n", "\n", - "gps_df['prev_lat'] = gps_df['lat'].shift(1)\n", - "gps_df['prev_lon'] = gps_df['lon'].shift(1)\n", - "gps_df['prev_dt'] = gps_df['gps_datetime'].shift(1)\n", "\n", - "if gps_df['prev_dt'].dtype != 'object':\n", - " gps_df['prev_dt'] = pandas.to_datetime(gps_df['prev_dt'])\n", + "gps_df[\"prev_lat\"] = gps_df[\"lat\"].shift(1)\n", + "gps_df[\"prev_lon\"] = gps_df[\"lon\"].shift(1)\n", + "gps_df[\"prev_dt\"] = gps_df[\"gps_datetime\"].shift(1)\n", "\n", + "if gps_df[\"prev_dt\"].dtype != \"object\":\n", + " gps_df[\"prev_dt\"] = pandas.to_datetime(gps_df[\"prev_dt\"])\n", "\n", - "gps_df['kph'] = np.power(\n", - " np.power((gps_df['lat'] - gps_df['prev_lat'])*110.574, 2) + \n", - " np.power(np.cos(gps_df['lat']*3.14159265/180)*(gps_df['lon']-gps_df['prev_lon'])*111.320, 2), 0.5\n", - " )/((gps_df['gps_datetime'] - gps_df['prev_dt']).dt.total_seconds()/3600)\n", "\n", - "gps_df['clockheading'] = (6 - \n", - " np.sign(gps_df['lon']-gps_df['prev_lon'])\n", - " *(np.sign(gps_df['lon']-gps_df['prev_lon'])\n", - " *np.arctan(\n", - " (gps_df['lat'] - gps_df['prev_lat'])\n", - " /(np.cos(gps_df['lat']*3.14159265/180)*(gps_df['lon']-gps_df['prev_lon'])))/3.14159 + 0.5 )\n", - " * 6 )\n", + "gps_df[\"kph\"] = np.power(\n", + " np.power((gps_df[\"lat\"] - gps_df[\"prev_lat\"]) * 110.574, 2)\n", + " + np.power(\n", + " np.cos(gps_df[\"lat\"] * 3.14159265 / 180) * (gps_df[\"lon\"] - gps_df[\"prev_lon\"]) * 111.320, 2\n", + " ),\n", + " 0.5,\n", + ") / ((gps_df[\"gps_datetime\"] - gps_df[\"prev_dt\"]).dt.total_seconds() / 3600)\n", + "\n", + "gps_df[\"clockheading\"] = (\n", + " 6\n", + " - np.sign(gps_df[\"lon\"] - gps_df[\"prev_lon\"])\n", + " * (\n", + " np.sign(gps_df[\"lon\"] - gps_df[\"prev_lon\"])\n", + " * np.arctan(\n", + " (gps_df[\"lat\"] - gps_df[\"prev_lat\"])\n", + " / (np.cos(gps_df[\"lat\"] * 3.14159265 / 180) * (gps_df[\"lon\"] - gps_df[\"prev_lon\"]))\n", + " )\n", + " / 3.14159\n", + " + 0.5\n", + " )\n", + " * 6\n", + ")\n", "\n", "# if the lon difference == 0, then the math above can't tell if it's north or south. It defaults to south (6).\n", "# check here and conditionally set to north\n", - "gps_df.loc[(gps_df['lon']-gps_df['prev_lon'] == 0) & ( gps_df['lat'] - gps_df['prev_lat'] > 0), 'clockheading'] = 0\n", + "gps_df.loc[\n", + " (gps_df[\"lon\"] - gps_df[\"prev_lon\"] == 0) & (gps_df[\"lat\"] - gps_df[\"prev_lat\"] > 0),\n", + " \"clockheading\",\n", + "] = 0\n", "\n", "gps_df" ] @@ -686,7 +704,7 @@ "# gps_df.loc[range(30465,30485)]\n", "\n", "# gps_df['gps_datetime']\n", - "gps_df[gps_df['kph'] <= 0.1]\n", + "gps_df[gps_df[\"kph\"] <= 0.1]\n", "\n", "# gps_df[gps_df['gps_datetime'] - gps_df['prev_dt'] <= np.timedelta64(0) ]\n" ] @@ -719,18 +737,17 @@ } ], "source": [ - "\n", - "gps_df['gps_datetime'] = pandas.to_datetime(gps_df['gps_datetime'], utc=True)\n", + "gps_df[\"gps_datetime\"] = pandas.to_datetime(gps_df[\"gps_datetime\"], utc=True)\n", "\n", "\n", - "avg = gps_df.groupby('gps_datetime').mean()[['kph', 'clockheading']]\n", + "avg = gps_df.groupby(\"gps_datetime\").mean()[[\"kph\", \"clockheading\"]]\n", "\n", "# i = pandas.DatetimeIndex([gps_df['gps_datetime'].min(), gps_df['gps_datetime'].max()])\n", "# gps_df_ts = pandas.DataFrame(index=i)\n", "# df['mycol'] = [0, 0]\n", "gps_df_ts = avg.resample(timedelta(minutes=30)).mean()\n", "\n", - "gps_df_ts.plot(figsize=(100,5))" + "gps_df_ts.plot(figsize=(100, 5))" ] } ], diff --git a/notebooks/tnc-edge-network-uptime.ipynb b/notebooks/tnc-edge-network-uptime.ipynb index c53afd9..a1a7af3 100644 --- a/notebooks/tnc-edge-network-uptime.ipynb +++ b/notebooks/tnc-edge-network-uptime.ipynb @@ -36,14 +36,14 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')\n", "\n", @@ -53,19 +53,20 @@ "from dateutil import parser\n", "import pytz\n", "\n", + "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -75,7 +76,9 @@ "metadata": {}, "outputs": [], "source": [ - "branc_dhe = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_deckhandevents where datetime > '2024-01-01'\", database='tnc_edge')\n", + "branc_dhe = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_deckhandevents where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", "branc_dhe.datetime = pandas.to_datetime(branc_dhe.datetime, utc=True)\n", "display(branc_dhe)" ] @@ -87,7 +90,9 @@ "metadata": {}, "outputs": [], "source": [ - "stp_dhe = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_deckhandevents where datetime > '2024-01-01'\", database='tnc_edge')\n", + "stp_dhe = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_deckhandevents where datetime > '2024-01-01'\", database=\"tnc_edge\"\n", + ")\n", "stp_dhe.datetime = pandas.to_datetime(stp_dhe.datetime, utc=True)\n", "display(stp_dhe)" ] @@ -122,39 +127,44 @@ "source": [ "import json\n", "\n", - "branc_dhe['boat'] = 'brancol'\n", - "stp_dhe['boat'] = 'stpatrick'\n", + "branc_dhe[\"boat\"] = \"brancol\"\n", + "stp_dhe[\"boat\"] = \"stpatrick\"\n", "\n", - "s = pandas.concat([branc_dhe,stp_dhe])\n", - "if 'jsonblob' in s.columns:\n", - " s['json'] = s['jsonblob'].map(json.loads)\n", - " s = s.drop('jsonblob', axis=1)\n", + "s = pandas.concat([branc_dhe, stp_dhe])\n", + "if \"jsonblob\" in s.columns:\n", + " s[\"json\"] = s[\"jsonblob\"].map(json.loads)\n", + " s = s.drop(\"jsonblob\", axis=1)\n", "# display_full(s['json'].loc[s['json'].map(lambda x: x['eventType'] == 'tripDetailsEvent')])\n", "\n", "# return\n", - "s['jsondatetime'] = s['json'].map(lambda x: x['lastCompletedTimestamp'] if x['eventType'] == 'longlineEvent' \\\n", - " else x['lastCompletedTimestamp'] if x['eventType'] == 'tripDetailsEvent' else x['eventType'])\n", - "\n", - "s['jsondatetime'] = pandas.to_datetime(s['jsondatetime'].map(lambda x: x*1000000000), utc=True)\n", - "s['diff'] = s['datetime'] - s['jsondatetime']\n", + "s[\"jsondatetime\"] = s[\"json\"].map(\n", + " lambda x: x[\"lastCompletedTimestamp\"]\n", + " if x[\"eventType\"] == \"longlineEvent\"\n", + " else x[\"lastCompletedTimestamp\"]\n", + " if x[\"eventType\"] == \"tripDetailsEvent\"\n", + " else x[\"eventType\"]\n", + ")\n", "\n", + "s[\"jsondatetime\"] = pandas.to_datetime(s[\"jsondatetime\"].map(lambda x: x * 1000000000), utc=True)\n", + "s[\"diff\"] = s[\"datetime\"] - s[\"jsondatetime\"]\n", "\n", "\n", "# s['diff'].plot()\n", - "binsandlabels = [[timedelta(seconds=0), ''],\n", - " [timedelta(seconds=1), '1sec'],\n", - " [timedelta(seconds=5), '5secs'],\n", - " [timedelta(seconds=30), '30secs'],\n", - " [timedelta(minutes=1), '1min'],\n", - " [timedelta(minutes=5), '5mins'],\n", - " [timedelta(minutes=30), '30mins'],\n", - " [timedelta(minutes=60), '1hour'],\n", - " [timedelta(days=2), '2days'],\n", - " ]\n", + "binsandlabels = [\n", + " [timedelta(seconds=0), \"\"],\n", + " [timedelta(seconds=1), \"1sec\"],\n", + " [timedelta(seconds=5), \"5secs\"],\n", + " [timedelta(seconds=30), \"30secs\"],\n", + " [timedelta(minutes=1), \"1min\"],\n", + " [timedelta(minutes=5), \"5mins\"],\n", + " [timedelta(minutes=30), \"30mins\"],\n", + " [timedelta(minutes=60), \"1hour\"],\n", + " [timedelta(days=2), \"2days\"],\n", + "]\n", "bins = [x[0] for x in binsandlabels]\n", "labels = [x[1] for x in binsandlabels][1:]\n", - "s['elog submission delay'] = pandas.cut(s['diff'], bins, labels=labels)\n", - "s.groupby('elog submission delay').count()['id'].plot(kind='bar', figsize=(7,3))" + "s[\"elog submission delay\"] = pandas.cut(s[\"diff\"], bins, labels=labels)\n", + "s.groupby(\"elog submission delay\").count()[\"id\"].plot(kind=\"bar\", figsize=(7, 3))" ] }, { @@ -956,7 +966,7 @@ } ], "source": [ - "display_full(s[['boat','datetime', 'jsondatetime','diff']])" + "display_full(s[[\"boat\", \"datetime\", \"jsondatetime\", \"diff\"]])" ] }, { @@ -1747,10 +1757,13 @@ } ], "source": [ - "stp_inettests = awswrangler.athena.read_sql_query(f\"SELECT id,name,type,vector_id,score,detail,datetime from stpatrick_v1_tests where vector_id = '1' and datetime > '2024-01-01'\", database='tnc_edge')\n", + "stp_inettests = awswrangler.athena.read_sql_query(\n", + " f\"SELECT id,name,type,vector_id,score,detail,datetime from stpatrick_v1_tests where vector_id = '1' and datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "stp_inettests.datetime = pandas.to_datetime(stp_inettests.datetime, utc=True)\n", - "stp_inettests['score'] = pandas.to_numeric(stp_inettests['score'])\n", - "stp_inettests = stp_inettests.sort_values('datetime')\n", + "stp_inettests[\"score\"] = pandas.to_numeric(stp_inettests[\"score\"])\n", + "stp_inettests = stp_inettests.sort_values(\"datetime\")\n", "# display_full(stp_inettests)\n", "stp_inettests" ] @@ -1783,13 +1796,13 @@ } ], "source": [ - "stp_inet_ts = stp_inettests.set_index('datetime')\n", + "stp_inet_ts = stp_inettests.set_index(\"datetime\")\n", "\n", - "stp_inet_ts['score'] = stp_inet_ts['score'].apply(lambda x: 0 if x < 0.7 else 1)\n", + "stp_inet_ts[\"score\"] = stp_inet_ts[\"score\"].apply(lambda x: 0 if x < 0.7 else 1)\n", "\n", "stp_inet_ts = stp_inet_ts.resample(timedelta(minutes=30)).first()\n", "\n", - "stp_inet_ts['score'].plot(figsize=(150,3))" + "stp_inet_ts[\"score\"].plot(figsize=(150, 3))" ] }, { @@ -2580,10 +2593,13 @@ } ], "source": [ - "branc_inettests = awswrangler.athena.read_sql_query(f\"SELECT id,name,type,vector_id,score,detail,datetime from brancol_v1_tests where vector_id = '1' and datetime > '2024-01-01'\", database='tnc_edge')\n", + "branc_inettests = awswrangler.athena.read_sql_query(\n", + " f\"SELECT id,name,type,vector_id,score,detail,datetime from brancol_v1_tests where vector_id = '1' and datetime > '2024-01-01'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "branc_inettests.datetime = pandas.to_datetime(branc_inettests.datetime, utc=True)\n", - "branc_inettests['score'] = pandas.to_numeric(branc_inettests['score'])\n", - "branc_inettests = branc_inettests.sort_values('datetime')\n", + "branc_inettests[\"score\"] = pandas.to_numeric(branc_inettests[\"score\"])\n", + "branc_inettests = branc_inettests.sort_values(\"datetime\")\n", "# display_full(branc_inettests)\n", "branc_inettests" ] @@ -2616,11 +2632,11 @@ } ], "source": [ - "branc_inet_ts = branc_inettests.set_index('datetime')\n", - "branc_inet_ts['score'] = branc_inet_ts['score'].apply(lambda x: 0 if x < 0.7 else 1)\n", + "branc_inet_ts = branc_inettests.set_index(\"datetime\")\n", + "branc_inet_ts[\"score\"] = branc_inet_ts[\"score\"].apply(lambda x: 0 if x < 0.7 else 1)\n", "\n", "branc_inet_ts = branc_inet_ts.resample(timedelta(minutes=30)).first()\n", - "branc_inet_ts['score'].plot(figsize=(150,3))" + "branc_inet_ts[\"score\"].plot(figsize=(150, 3))" ] }, { @@ -2642,10 +2658,10 @@ } ], "source": [ - "b_len= branc_inettests['score'].notna().sum()\n", - "b_sum= branc_inettests['score'].apply(lambda x: 0 if x < 0.7 else 1).sum()\n", - "s_len= stp_inettests['score'].notna().sum()\n", - "s_sum= stp_inettests['score'].apply(lambda x: 0 if x < 0.7 else 1).sum()\n", + "b_len = branc_inettests[\"score\"].notna().sum()\n", + "b_sum = branc_inettests[\"score\"].apply(lambda x: 0 if x < 0.7 else 1).sum()\n", + "s_len = stp_inettests[\"score\"].notna().sum()\n", + "s_sum = stp_inettests[\"score\"].apply(lambda x: 0 if x < 0.7 else 1).sum()\n", "\n", "print(\"b len\", b_len)\n", "print(\"b sum\", b_sum)\n", diff --git a/notebooks/tnc-edge-ondeck-ops-df.ipynb b/notebooks/tnc-edge-ondeck-ops-df.ipynb index 3a7b5f0..13c4b1f 100644 --- a/notebooks/tnc-edge-ondeck-ops-df.ipynb +++ b/notebooks/tnc-edge-ondeck-ops-df.ipynb @@ -19,8 +19,8 @@ "source": [ "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXXXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXXXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "import pandas\n", @@ -28,7 +28,6 @@ "from datetime import datetime, timezone\n", "\n", "\n", - "\n", "boto3.setup_default_session(**aws_config)\n" ] }, @@ -40,19 +39,18 @@ "outputs": [], "source": [ "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n", - "\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -62,16 +60,19 @@ "metadata": {}, "outputs": [], "source": [ - "s = awswrangler.athena.read_sql_query(f\"SELECT stpatrick_v1_video_files.*, \\\n", + "s = awswrangler.athena.read_sql_query(\n", + " f\"SELECT stpatrick_v1_video_files.*, \\\n", "stpatrick_v1_ondeckdata.video_uri, stpatrick_v1_ondeckdata.cocoannotations_uri, stpatrick_v1_ondeckdata.datetime, \\\n", "stpatrick_v1_ondeckdata.overallcount, stpatrick_v1_ondeckdata.overallruntimems, \\\n", "stpatrick_v1_ondeckdata.tracked_confidence, stpatrick_v1_ondeckdata.status, \\\n", "stpatrick_v1_ondeckdata.overallcatches, stpatrick_v1_ondeckdata.overalldiscards, \\\n", "stpatrick_v1_ondeckdata.detection_confidence FROM stpatrick_v1_video_files \\\n", "left join stpatrick_v1_ondeckdata on decrypted_path = video_uri \\\n", - "where cam_name = 'cam1' and start_datetime > '2024-01-01' limit 50000\", database='tnc_edge')\n", + "where cam_name = 'cam1' and start_datetime > '2024-01-01' limit 50000\",\n", + " database=\"tnc_edge\",\n", + ")\n", "\n", - "s = s.sort_values('start_datetime')\n", + "s = s.sort_values(\"start_datetime\")\n", "\n", "display(s)" ] @@ -83,7 +84,7 @@ "metadata": {}, "outputs": [], "source": [ - "s.to_csv('tmp.csv')" + "s.to_csv(\"tmp.csv\")" ] }, { @@ -2417,7 +2418,7 @@ } ], "source": [ - "display(s.loc[s['status'] == 'errored'])" + "display(s.loc[s[\"status\"] == \"errored\"])" ] } ], diff --git a/notebooks/tnc-edge-system-uptime.ipynb b/notebooks/tnc-edge-system-uptime.ipynb index cdcdae1..0326617 100644 --- a/notebooks/tnc-edge-system-uptime.ipynb +++ b/notebooks/tnc-edge-system-uptime.ipynb @@ -50,14 +50,14 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')" ] @@ -75,19 +75,20 @@ "from dateutil import parser\n", "import pytz\n", "\n", + "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -217,19 +218,25 @@ } ], "source": [ - "sunrisesunset = pandas.read_csv('cr_sunrise_sunset.csv')\n", - "crtz = pytz.timezone('America/Costa_Rica')\n", + "sunrisesunset = pandas.read_csv(\"cr_sunrise_sunset.csv\")\n", + "crtz = pytz.timezone(\"America/Costa_Rica\")\n", "\n", - "# the following doesn't work for some reason? \n", + "# the following doesn't work for some reason?\n", "# so I need to replace tz in a separate step\n", "# sunrisesunset['sunrise'] = sunrisesunset.apply(lambda x: datetime.combine(parser.parse(x['date']).date(),parser.parse(x['sunrise']).time()).replace(tzinfo=crtz), axis=1)\n", "# sunrisesunset['sunset'] = sunrisesunset.apply(lambda x: datetime.combine(parser.parse(x['date']).date(),parser.parse(x['sunset']).time()).replace(tzinfo=crtz), axis=1)\n", "\n", - "sunrisesunset['sunrise'] = sunrisesunset.apply(lambda x: datetime.combine(parser.parse(x['date']).date(),parser.parse(x['sunrise']).time()), axis=1)\n", - "sunrisesunset['sunset'] = sunrisesunset.apply(lambda x: datetime.combine(parser.parse(x['date']).date(),parser.parse(x['sunset']).time()), axis=1)\n", + "sunrisesunset[\"sunrise\"] = sunrisesunset.apply(\n", + " lambda x: datetime.combine(parser.parse(x[\"date\"]).date(), parser.parse(x[\"sunrise\"]).time()),\n", + " axis=1,\n", + ")\n", + "sunrisesunset[\"sunset\"] = sunrisesunset.apply(\n", + " lambda x: datetime.combine(parser.parse(x[\"date\"]).date(), parser.parse(x[\"sunset\"]).time()),\n", + " axis=1,\n", + ")\n", "\n", - "sunrisesunset['sunrise'] = sunrisesunset['sunrise'].map(lambda x: x.replace(tzinfo=crtz))\n", - "sunrisesunset['sunset'] = sunrisesunset['sunset'].map(lambda x: x.replace(tzinfo=crtz))\n", + "sunrisesunset[\"sunrise\"] = sunrisesunset[\"sunrise\"].map(lambda x: x.replace(tzinfo=crtz))\n", + "sunrisesunset[\"sunset\"] = sunrisesunset[\"sunset\"].map(lambda x: x.replace(tzinfo=crtz))\n", "\n", "sunrisesunset" ] @@ -241,7 +248,10 @@ "metadata": {}, "outputs": [], "source": [ - "s = awswrangler.athena.read_sql_query(f\"SELECT brancol_v1_tests.* from brancol_v1_tests where vector_id in ('1', '3') and datetime > '2024-01-01' order by id asc limit 10000\", database='tnc_edge')\n", + "s = awswrangler.athena.read_sql_query(\n", + " f\"SELECT brancol_v1_tests.* from brancol_v1_tests where vector_id in ('1', '3') and datetime > '2024-01-01' order by id asc limit 10000\",\n", + " database=\"tnc_edge\",\n", + ")\n", "s.datetime = pandas.to_datetime(s.datetime, utc=True)\n", "display(s)" ] @@ -1781,9 +1791,9 @@ } ], "source": [ - "s['lan_errors'] = s.loc[s['vector_id'] == '3']['score']\n", - "s['wan_errors'] = s.loc[s['vector_id'] == '1']['score']\n", - "s = s.drop(columns=['score'])\n", + "s[\"lan_errors\"] = s.loc[s[\"vector_id\"] == \"3\"][\"score\"]\n", + "s[\"wan_errors\"] = s.loc[s[\"vector_id\"] == \"1\"][\"score\"]\n", + "s = s.drop(columns=[\"score\"])\n", "s" ] }, @@ -1836,31 +1846,31 @@ ], "source": [ "# dir(s['score'].dtype.num)\n", - "display(type(s['datetime'].dtype))\n", - "if not s['datetime'].dtype == 'datetime64':\n", - " s['datetime'] = pandas.to_datetime(s['datetime'], utc=True)\n", + "display(type(s[\"datetime\"].dtype))\n", + "if not s[\"datetime\"].dtype == \"datetime64\":\n", + " s[\"datetime\"] = pandas.to_datetime(s[\"datetime\"], utc=True)\n", "# s['datetime'] = pandas.to_datetime(s['datetime'], utc=True)\n", "\n", - " \n", + "\n", "# if not pandas.api.types.is_numeric_dtype(s['score'].dtype):\n", "# s['score'] = pandas.to_numeric(s['score'])\n", - "if not pandas.api.types.is_numeric_dtype(s['lan_errors'].dtype):\n", - " s['lan_errors'] = pandas.to_numeric(s['lan_errors'])\n", - "if not pandas.api.types.is_numeric_dtype(s['wan_errors'].dtype):\n", - " s['wan_errors'] = pandas.to_numeric(s['wan_errors'])\n", + "if not pandas.api.types.is_numeric_dtype(s[\"lan_errors\"].dtype):\n", + " s[\"lan_errors\"] = pandas.to_numeric(s[\"lan_errors\"])\n", + "if not pandas.api.types.is_numeric_dtype(s[\"wan_errors\"].dtype):\n", + " s[\"wan_errors\"] = pandas.to_numeric(s[\"wan_errors\"])\n", "\n", "# display(s)\n", - " \n", - "s_gb = s.groupby('datetime').mean(numeric_only=True)\n", + "\n", + "s_gb = s.groupby(\"datetime\").mean(numeric_only=True)\n", "\n", "# display(s_gb.index)\n", "cnt_ts = s_gb.resample(timedelta(minutes=60)).mean()\n", "\n", - "ax = cnt_ts.plot(figsize=(100,2))\n", + "ax = cnt_ts.plot(figsize=(100, 2))\n", "xticks = pandas.date_range(start=cnt_ts.index.min(), end=cnt_ts.index.max(), freq=timedelta(days=1))\n", "# display(xticks)\n", "ax.set_xticks(xticks.to_pydatetime())\n", - "ax.set_xticklabels([x.strftime('%D') for x in xticks], rotation=77)\n", + "ax.set_xticklabels([x.strftime(\"%D\") for x in xticks], rotation=77)\n", "# ax.xticks(rotation=90);\n", "display(ax)\n", "# ax" @@ -2287,43 +2297,61 @@ } ], "source": [ - "\n", - "\n", - "branc_box_cam1_filedatetimes_df = pandas.read_csv('brancol_box_cam1_filedatetimes.txt', names=['datetime'])\n", - "branc_box_cam1_filedatetimes_df['datetime'] = pandas.to_datetime(branc_box_cam1_filedatetimes_df['datetime'], utc=True)\n", - "branc_box_cam1_filedatetimes_df['cam1'] = 1\n", - "branc_box_cam1_filedatetimes_df.index = branc_box_cam1_filedatetimes_df['datetime']\n", - "branc_box_cam1_filedatetimes_df = branc_box_cam1_filedatetimes_df[['cam1']]\n", - "branc_box_cam2_filedatetimes_df = pandas.read_csv('brancol_box_cam2_filedatetimes.txt', names=['datetime'])\n", - "branc_box_cam2_filedatetimes_df['datetime'] = pandas.to_datetime(branc_box_cam2_filedatetimes_df['datetime'], utc=True)\n", - "branc_box_cam2_filedatetimes_df['cam2'] = 1\n", - "branc_box_cam2_filedatetimes_df.index = branc_box_cam2_filedatetimes_df['datetime']\n", - "branc_box_cam2_filedatetimes_df = branc_box_cam2_filedatetimes_df[['cam2']]\n", - "\n", - "branc_box_filedatetimes_df = branc_box_cam1_filedatetimes_df.join(branc_box_cam2_filedatetimes_df, how='outer')\n", - "branc_box_filedatetimes_df = branc_box_filedatetimes_df.loc['2024']\n", + "branc_box_cam1_filedatetimes_df = pandas.read_csv(\n", + " \"brancol_box_cam1_filedatetimes.txt\", names=[\"datetime\"]\n", + ")\n", + "branc_box_cam1_filedatetimes_df[\"datetime\"] = pandas.to_datetime(\n", + " branc_box_cam1_filedatetimes_df[\"datetime\"], utc=True\n", + ")\n", + "branc_box_cam1_filedatetimes_df[\"cam1\"] = 1\n", + "branc_box_cam1_filedatetimes_df.index = branc_box_cam1_filedatetimes_df[\"datetime\"]\n", + "branc_box_cam1_filedatetimes_df = branc_box_cam1_filedatetimes_df[[\"cam1\"]]\n", + "branc_box_cam2_filedatetimes_df = pandas.read_csv(\n", + " \"brancol_box_cam2_filedatetimes.txt\", names=[\"datetime\"]\n", + ")\n", + "branc_box_cam2_filedatetimes_df[\"datetime\"] = pandas.to_datetime(\n", + " branc_box_cam2_filedatetimes_df[\"datetime\"], utc=True\n", + ")\n", + "branc_box_cam2_filedatetimes_df[\"cam2\"] = 1\n", + "branc_box_cam2_filedatetimes_df.index = branc_box_cam2_filedatetimes_df[\"datetime\"]\n", + "branc_box_cam2_filedatetimes_df = branc_box_cam2_filedatetimes_df[[\"cam2\"]]\n", + "\n", + "branc_box_filedatetimes_df = branc_box_cam1_filedatetimes_df.join(\n", + " branc_box_cam2_filedatetimes_df, how=\"outer\"\n", + ")\n", + "branc_box_filedatetimes_df = branc_box_filedatetimes_df.loc[\"2024\"]\n", "branc_box_filedatetimes_df = branc_box_filedatetimes_df.fillna(0)\n", "\n", - "branc_box_filedatetimes_df.to_pickle('brancol_box_filedatetimes.pickle')\n", - "\n", - "stp_box_cam1_filedatetimes_df = pandas.read_csv('stpatrick_box_cam1_filedatetimes.txt', names=['datetime'])\n", - "stp_box_cam1_filedatetimes_df['datetime'] = pandas.to_datetime(stp_box_cam1_filedatetimes_df['datetime'], utc=True)\n", - "stp_box_cam1_filedatetimes_df['cam1'] = 1\n", - "stp_box_cam1_filedatetimes_df.index = stp_box_cam1_filedatetimes_df['datetime']\n", - "stp_box_cam1_filedatetimes_df = stp_box_cam1_filedatetimes_df[['cam1']]\n", - "\n", - "stp_box_cam2_filedatetimes_df = pandas.read_csv('stpatrick_box_cam2_filedatetimes.txt', names=['datetime'])\n", - "stp_box_cam2_filedatetimes_df['datetime'] = pandas.to_datetime(stp_box_cam2_filedatetimes_df['datetime'], utc=True)\n", - "stp_box_cam2_filedatetimes_df['cam2'] = 1\n", - "stp_box_cam2_filedatetimes_df.index = stp_box_cam2_filedatetimes_df['datetime']\n", - "stp_box_cam2_filedatetimes_df = stp_box_cam2_filedatetimes_df[['cam2']]\n", - "\n", - "\n", - "stp_box_filedatetimes_df = stp_box_cam1_filedatetimes_df.join(stp_box_cam2_filedatetimes_df, how='outer')\n", - "stp_box_filedatetimes_df = stp_box_filedatetimes_df.loc['2024']\n", + "branc_box_filedatetimes_df.to_pickle(\"brancol_box_filedatetimes.pickle\")\n", + "\n", + "stp_box_cam1_filedatetimes_df = pandas.read_csv(\n", + " \"stpatrick_box_cam1_filedatetimes.txt\", names=[\"datetime\"]\n", + ")\n", + "stp_box_cam1_filedatetimes_df[\"datetime\"] = pandas.to_datetime(\n", + " stp_box_cam1_filedatetimes_df[\"datetime\"], utc=True\n", + ")\n", + "stp_box_cam1_filedatetimes_df[\"cam1\"] = 1\n", + "stp_box_cam1_filedatetimes_df.index = stp_box_cam1_filedatetimes_df[\"datetime\"]\n", + "stp_box_cam1_filedatetimes_df = stp_box_cam1_filedatetimes_df[[\"cam1\"]]\n", + "\n", + "stp_box_cam2_filedatetimes_df = pandas.read_csv(\n", + " \"stpatrick_box_cam2_filedatetimes.txt\", names=[\"datetime\"]\n", + ")\n", + "stp_box_cam2_filedatetimes_df[\"datetime\"] = pandas.to_datetime(\n", + " stp_box_cam2_filedatetimes_df[\"datetime\"], utc=True\n", + ")\n", + "stp_box_cam2_filedatetimes_df[\"cam2\"] = 1\n", + "stp_box_cam2_filedatetimes_df.index = stp_box_cam2_filedatetimes_df[\"datetime\"]\n", + "stp_box_cam2_filedatetimes_df = stp_box_cam2_filedatetimes_df[[\"cam2\"]]\n", + "\n", + "\n", + "stp_box_filedatetimes_df = stp_box_cam1_filedatetimes_df.join(\n", + " stp_box_cam2_filedatetimes_df, how=\"outer\"\n", + ")\n", + "stp_box_filedatetimes_df = stp_box_filedatetimes_df.loc[\"2024\"]\n", "stp_box_filedatetimes_df = stp_box_filedatetimes_df.fillna(0)\n", "\n", - "stp_box_filedatetimes_df.to_pickle('stpatrick_box_filedatetimes.pickle')\n" + "stp_box_filedatetimes_df.to_pickle(\"stpatrick_box_filedatetimes.pickle\")\n" ] }, { @@ -2354,9 +2382,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2364,24 +2395,28 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", "p = df.resample(timedelta(minutes=5)).sum()\n", "\n", "# p = p.loc[p.index >= '2024-02-17 00:00:00-06']\n", "# p = p.loc[p.index <= '2024-02-21 00:00:00-06']\n", - "p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", + "p.index = p.index.map(lambda x: x.tz_convert(\"America/Costa_Rica\"))\n", "# p.index\n", - "ax = p.plot(figsize=(100,2), label='Video Output')\n", + "ax = p.plot(figsize=(100, 2), label=\"Video Output\")\n", "# display_full(rs['2023-12-27 19'])\n", "\n", "first = True\n", - "for time in sunrisesunset['sunrise']:\n", - " ax.axvline(x=pandas.to_datetime(time), color='b', linestyle='--', label='sunrise' if first else None)\n", + "for time in sunrisesunset[\"sunrise\"]:\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"b\", linestyle=\"--\", label=\"sunrise\" if first else None\n", + " )\n", " first = False\n", "first = True\n", - "for time in sunrisesunset['sunset']:\n", - " ax.axvline(x=pandas.to_datetime(time), color='r', linestyle='--', label='sunset' if first else None)\n", + "for time in sunrisesunset[\"sunset\"]:\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"r\", linestyle=\"--\", label=\"sunset\" if first else None\n", + " )\n", " first = False\n", "\n", "# Show the plot\n", @@ -2416,9 +2451,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2426,27 +2464,35 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", "p = df.resample(timedelta(minutes=5)).sum()\n", "\n", - "p = p.loc[p.index >= '2024-02-17 00:00:00-06']\n", - "p = p.loc[p.index <= '2024-02-21 00:00:00-06']\n", - "p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", + "p = p.loc[p.index >= \"2024-02-17 00:00:00-06\"]\n", + "p = p.loc[p.index <= \"2024-02-21 00:00:00-06\"]\n", + "p.index = p.index.map(lambda x: x.tz_convert(\"America/Costa_Rica\"))\n", "# p.index\n", - "ax = p.plot(figsize=(6,2), label='Video Output')\n", + "ax = p.plot(figsize=(6, 2), label=\"Video Output\")\n", "# display_full(rs['2023-12-27 19'])\n", "\n", - "sunrises = sunrisesunset['sunrise'].loc[(sunrisesunset['sunrise'] > '2024-02-17') & (sunrisesunset['sunrise'] < '2024-02-21')]\n", + "sunrises = sunrisesunset[\"sunrise\"].loc[\n", + " (sunrisesunset[\"sunrise\"] > \"2024-02-17\") & (sunrisesunset[\"sunrise\"] < \"2024-02-21\")\n", + "]\n", "\n", - "sunsets = sunrisesunset['sunset'].loc[(sunrisesunset['sunrise'] > '2024-02-17') & (sunrisesunset['sunrise'] < '2024-02-21')]\n", + "sunsets = sunrisesunset[\"sunset\"].loc[\n", + " (sunrisesunset[\"sunrise\"] > \"2024-02-17\") & (sunrisesunset[\"sunrise\"] < \"2024-02-21\")\n", + "]\n", "first = True\n", "for time in sunrises:\n", - " ax.axvline(x=pandas.to_datetime(time), color='b', linestyle='--', label='sunrise' if first else None)\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"b\", linestyle=\"--\", label=\"sunrise\" if first else None\n", + " )\n", " first = False\n", "first = True\n", "for time in sunsets:\n", - " ax.axvline(x=pandas.to_datetime(time), color='r', linestyle='--', label='sunset' if first else None)\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"r\", linestyle=\"--\", label=\"sunset\" if first else None\n", + " )\n", " first = False\n", "\n", "# Show the plot\n", @@ -2481,9 +2527,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2491,32 +2540,40 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", "p = df.resample(timedelta(minutes=5)).sum()\n", "\n", - "p = p.loc[p.index >= '2024-02-09 00:00:00-06']\n", - "p = p.loc[p.index <= '2024-02-13 00:00:00-06']\n", - "p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", + "p = p.loc[p.index >= \"2024-02-09 00:00:00-06\"]\n", + "p = p.loc[p.index <= \"2024-02-13 00:00:00-06\"]\n", + "p.index = p.index.map(lambda x: x.tz_convert(\"America/Costa_Rica\"))\n", "# p.index\n", - "ax = p.plot(figsize=(9,2), label='Video Output')\n", + "ax = p.plot(figsize=(9, 2), label=\"Video Output\")\n", "# display_full(rs['2023-12-27 19'])\n", "\n", - "sunrises = sunrisesunset['sunrise'].loc[(sunrisesunset['sunrise'] > '2024-02-09') & (sunrisesunset['sunrise'] < '2024-02-13')]\n", + "sunrises = sunrisesunset[\"sunrise\"].loc[\n", + " (sunrisesunset[\"sunrise\"] > \"2024-02-09\") & (sunrisesunset[\"sunrise\"] < \"2024-02-13\")\n", + "]\n", "\n", - "sunsets = sunrisesunset['sunset'].loc[(sunrisesunset['sunrise'] > '2024-02-09') & (sunrisesunset['sunrise'] < '2024-02-13')]\n", + "sunsets = sunrisesunset[\"sunset\"].loc[\n", + " (sunrisesunset[\"sunrise\"] > \"2024-02-09\") & (sunrisesunset[\"sunrise\"] < \"2024-02-13\")\n", + "]\n", "first = True\n", "for time in sunrises:\n", - " ax.axvline(x=pandas.to_datetime(time), color='g', linestyle='-', label='sunrise' if first else None)\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"g\", linestyle=\"-\", label=\"sunrise\" if first else None\n", + " )\n", " first = False\n", "first = True\n", "for time in sunsets:\n", - " ax.axvline(x=pandas.to_datetime(time), color='r', linestyle='-', label='sunset' if first else None)\n", + " ax.axvline(\n", + " x=pandas.to_datetime(time), color=\"r\", linestyle=\"-\", label=\"sunset\" if first else None\n", + " )\n", " first = False\n", "\n", "ax.legend()\n", "ax.set_yticks([])\n", - "ax.set_xlabel('')" + "ax.set_xlabel(\"\")" ] }, { @@ -2635,9 +2692,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2645,13 +2705,16 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "recorded_sum=0\n", - "triptime_sum=0\n", + "recorded_sum = 0\n", + "triptime_sum = 0\n", "\n", "\n", - "df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "brancol_trip1_startend = (parser.parse('2024-01-05 16:14:42Z'), parser.parse('2024-01-26 01:35:17Z'),)\n", + "brancol_trip1_startend = (\n", + " parser.parse(\"2024-01-05 16:14:42Z\"),\n", + " parser.parse(\"2024-01-26 01:35:17Z\"),\n", + ")\n", "brancoltrip1 = df.loc[df.index > brancol_trip1_startend[0]]\n", "brancoltrip1 = brancoltrip1.loc[brancoltrip1.index < brancol_trip1_startend[1]]\n", "brancoltrip1.loc[brancol_trip1_startend[0]] = 0\n", @@ -2660,9 +2723,12 @@ "p = brancoltrip1.resample(timedelta(minutes=5)).sum()\n", "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('brancol trip 1: ', p.sum() / len(p))\n", + "display(\"brancol trip 1: \", p.sum() / len(p))\n", "\n", - "brancol_trip2_startend = (parser.parse('2024-02-03 17:16:47Z'), parser.parse('2024-02-26 23:32:03Z') + timedelta(hours=7.0966),)\n", + "brancol_trip2_startend = (\n", + " parser.parse(\"2024-02-03 17:16:47Z\"),\n", + " parser.parse(\"2024-02-26 23:32:03Z\") + timedelta(hours=7.0966),\n", + ")\n", "\n", "brancoltrip2 = df.loc[df.index > brancol_trip2_startend[0]]\n", "brancoltrip2 = brancoltrip2.loc[brancoltrip2.index < brancol_trip2_startend[1]]\n", @@ -2670,12 +2736,15 @@ "brancoltrip2.loc[brancol_trip2_startend[1]] = 0\n", "# brancoltrip1\n", "p = brancoltrip2.resample(timedelta(minutes=5)).sum()\n", - "recorded_sum += p.sum() \n", + "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('brancol trip 2: ', p.sum() / len(p))\n", + "display(\"brancol trip 2: \", p.sum() / len(p))\n", "\n", "\n", - "brancol_trip3_startend = (parser.parse('2024-03-05 15:26:13Z'), parser.parse('2024-04-01 01:21:47Z') + timedelta(hours=8.4725),)\n", + "brancol_trip3_startend = (\n", + " parser.parse(\"2024-03-05 15:26:13Z\"),\n", + " parser.parse(\"2024-04-01 01:21:47Z\") + timedelta(hours=8.4725),\n", + ")\n", "\n", "brancoltrip3 = df.loc[df.index > brancol_trip3_startend[0]]\n", "brancoltrip3 = brancoltrip3.loc[brancoltrip3.index < brancol_trip3_startend[1]]\n", @@ -2683,15 +2752,18 @@ "brancoltrip3.loc[brancol_trip3_startend[1]] = 0\n", "# brancoltrip3\n", "p = brancoltrip3.resample(timedelta(minutes=5)).sum()\n", - "recorded_sum += p.sum() \n", + "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('brancol trip 3: ', p.sum() / len(p))\n", + "display(\"brancol trip 3: \", p.sum() / len(p))\n", "\n", "\n", - "df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", + "df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", "df = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", "\n", - "stpatrick_trip1_startend = (parser.parse('2024-01-03 13:22:25Z'), parser.parse('2024-01-28 06:40:04Z'),)\n", + "stpatrick_trip1_startend = (\n", + " parser.parse(\"2024-01-03 13:22:25Z\"),\n", + " parser.parse(\"2024-01-28 06:40:04Z\"),\n", + ")\n", "\n", "\n", "stpatricktrip1 = df.loc[df.index > stpatrick_trip1_startend[0]]\n", @@ -2701,12 +2773,15 @@ "# brancoltrip3\n", "p = stpatricktrip1.resample(timedelta(minutes=5)).sum()\n", "# display_full(p)\n", - "recorded_sum += p.sum() \n", + "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('stpatrick trip 1: ', p.sum() / len(p))\n", + "display(\"stpatrick trip 1: \", p.sum() / len(p))\n", "\n", "# 10.9 miles / 6 knots = 1.57 hours\n", - "stpatrick_trip2_startend = (parser.parse('2024-02-03 13:44:11Z'), parser.parse('2024-02-28 03:42:55Z') + timedelta(hours=1.578),)\n", + "stpatrick_trip2_startend = (\n", + " parser.parse(\"2024-02-03 13:44:11Z\"),\n", + " parser.parse(\"2024-02-28 03:42:55Z\") + timedelta(hours=1.578),\n", + ")\n", "\n", "stpatricktrip2 = df.loc[df.index > stpatrick_trip2_startend[0]]\n", "stpatricktrip2 = stpatricktrip2.loc[stpatricktrip2.index < stpatrick_trip2_startend[1]]\n", @@ -2714,12 +2789,12 @@ "stpatricktrip2.loc[stpatrick_trip2_startend[1]] = 0\n", "# brancoltrip3\n", "p = stpatricktrip2.resample(timedelta(minutes=5)).sum()\n", - "recorded_sum += p.sum() \n", + "recorded_sum += p.sum()\n", "triptime_sum += len(p)\n", - "display('stpatrick trip 2: ', p.sum() / len(p))\n", + "display(\"stpatrick trip 2: \", p.sum() / len(p))\n", "\n", "display(\"recording uptime for all trips:\")\n", - "display(f\"{(recorded_sum / triptime_sum * 100):.1f}%\" )" + "display(f\"{(recorded_sum / triptime_sum * 100):.1f}%\")" ] }, { @@ -2730,12 +2805,18 @@ "outputs": [], "source": [ "ts = pandas.DataFrame()\n", - "ts['ts'] = [sunrisesunset.index.min().replace(tzinfo=crtz), sunrisesunset.index.max().replace(tzinfo=crtz)] \n", + "ts[\"ts\"] = [\n", + " sunrisesunset.index.min().replace(tzinfo=crtz),\n", + " sunrisesunset.index.max().replace(tzinfo=crtz),\n", + "]\n", "\n", - "ts.index = ts['ts']\n", + "ts.index = ts[\"ts\"]\n", "ts = ts.resample(timedelta(minutes=5)).sum()\n", "type(sunrisesunset.index[0])\n", - "ts['is_daytime'] = ts.index.map(lambda x: sunrisesunset.loc[pandas.Timestamp(x.date())]['sunrise'] < x and sunrisesunset.loc[pandas.Timestamp(x.date())]['sunset'] > x )\n", + "ts[\"is_daytime\"] = ts.index.map(\n", + " lambda x: sunrisesunset.loc[pandas.Timestamp(x.date())][\"sunrise\"] < x\n", + " and sunrisesunset.loc[pandas.Timestamp(x.date())][\"sunset\"] > x\n", + ")\n", "is_daytime = ts" ] }, @@ -2837,9 +2918,12 @@ } ], "source": [ - "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle').rename(columns={'cam1': 'stp_cam1', 'cam2': 'stp_cam2'})\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle').rename(columns={'cam1': 'bra_cam1', 'cam2': 'bra_cam2'})\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"stp_cam1\", \"cam2\": \"stp_cam2\"}\n", + ")\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\").rename(\n", + " columns={\"cam1\": \"bra_cam1\", \"cam2\": \"bra_cam2\"}\n", + ")\n", "\n", "# aa = branc_box_filedatetimes_df.join(stp_box_filedatetimes_df, how='outer')\n", "# aa = aa.fillna(0)\n", @@ -2847,55 +2931,72 @@ "# aa\n", "# branc_box_filedatetimes_df\n", "\n", - "recorded_sum=0\n", - "triptime_sum=0\n", - "\n", - "\n", - "df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", - "df['bothcams'] = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "brancol_trip1_startend = (parser.parse('2024-01-05 16:14:42Z'), parser.parse('2024-01-26 01:35:17Z'),)\n", - "brancol_trip2_startend = (parser.parse('2024-02-03 17:16:47Z'), parser.parse('2024-02-26 23:32:03Z') + timedelta(hours=7.0966),)\n", - "brancol_trip3_startend = (parser.parse('2024-03-05 15:26:13Z'), parser.parse('2024-04-01 01:21:47Z') + timedelta(hours=8.4725),)\n", + "recorded_sum = 0\n", + "triptime_sum = 0\n", + "\n", + "\n", + "df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", + "df[\"bothcams\"] = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", + "brancol_trip1_startend = (\n", + " parser.parse(\"2024-01-05 16:14:42Z\"),\n", + " parser.parse(\"2024-01-26 01:35:17Z\"),\n", + ")\n", + "brancol_trip2_startend = (\n", + " parser.parse(\"2024-02-03 17:16:47Z\"),\n", + " parser.parse(\"2024-02-26 23:32:03Z\") + timedelta(hours=7.0966),\n", + ")\n", + "brancol_trip3_startend = (\n", + " parser.parse(\"2024-03-05 15:26:13Z\"),\n", + " parser.parse(\"2024-04-01 01:21:47Z\") + timedelta(hours=8.4725),\n", + ")\n", "df.loc[brancol_trip1_startend[0]] = 0\n", "df.loc[brancol_trip3_startend[1]] = 0\n", "df = df.resample(timedelta(minutes=5)).sum()\n", - "df = df.loc[(df.index > brancol_trip1_startend[0] ) & (df.index < brancol_trip1_startend[1]) \\\n", - " | (df.index > brancol_trip2_startend[0] ) & (df.index < brancol_trip2_startend[1]) \\\n", - " | (df.index > brancol_trip3_startend[0] ) & (df.index < brancol_trip3_startend[1]) ]\n", - "\n", + "df = df.loc[\n", + " (df.index > brancol_trip1_startend[0]) & (df.index < brancol_trip1_startend[1])\n", + " | (df.index > brancol_trip2_startend[0]) & (df.index < brancol_trip2_startend[1])\n", + " | (df.index > brancol_trip3_startend[0]) & (df.index < brancol_trip3_startend[1])\n", + "]\n", "\n", "\n", "df = df.join(is_daytime)\n", "\n", "df = df.loc[df.is_daytime]\n", - "display('brancol bothcams upimecount', df['bothcams'].sum())\n", - "display('brancol bothcams daytimecount', len(df['bothcams']))\n", - "display(\"brancol uptime during sunrisesunset\", df['bothcams'].sum() / len(df['bothcams']))\n", - "\n", - "numeratorsum = df['bothcams'].sum()\n", - "denominatorsum = len(df['bothcams'])\n", - "\n", - "\n", - "df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", - "df['bothcams'] = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "stpatrick_trip1_startend = (parser.parse('2024-01-03 13:22:25Z'), parser.parse('2024-01-28 06:40:04Z'),)\n", - "stpatrick_trip2_startend = (parser.parse('2024-02-03 13:44:11Z'), parser.parse('2024-02-28 03:42:55Z') + timedelta(hours=1.578),)\n", + "display(\"brancol bothcams upimecount\", df[\"bothcams\"].sum())\n", + "display(\"brancol bothcams daytimecount\", len(df[\"bothcams\"]))\n", + "display(\"brancol uptime during sunrisesunset\", df[\"bothcams\"].sum() / len(df[\"bothcams\"]))\n", + "\n", + "numeratorsum = df[\"bothcams\"].sum()\n", + "denominatorsum = len(df[\"bothcams\"])\n", + "\n", + "\n", + "df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", + "df[\"bothcams\"] = df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", + "stpatrick_trip1_startend = (\n", + " parser.parse(\"2024-01-03 13:22:25Z\"),\n", + " parser.parse(\"2024-01-28 06:40:04Z\"),\n", + ")\n", + "stpatrick_trip2_startend = (\n", + " parser.parse(\"2024-02-03 13:44:11Z\"),\n", + " parser.parse(\"2024-02-28 03:42:55Z\") + timedelta(hours=1.578),\n", + ")\n", "df.loc[stpatrick_trip1_startend[0]] = 0\n", "df.loc[stpatrick_trip2_startend[1]] = 0\n", "df = df.resample(timedelta(minutes=5)).sum()\n", - "df = df.loc[(df.index > stpatrick_trip1_startend[0] ) & (df.index < stpatrick_trip1_startend[1]) \\\n", - " | (df.index > stpatrick_trip2_startend[0] ) & (df.index < stpatrick_trip2_startend[1]) ]\n", - "\n", + "df = df.loc[\n", + " (df.index > stpatrick_trip1_startend[0]) & (df.index < stpatrick_trip1_startend[1])\n", + " | (df.index > stpatrick_trip2_startend[0]) & (df.index < stpatrick_trip2_startend[1])\n", + "]\n", "\n", "\n", "df = df.join(is_daytime)\n", "\n", "df = df.loc[df.is_daytime]\n", "\n", - "display(\"stpatrick uptime during sunrisesunset\", df['bothcams'].sum() / len(df['bothcams']))\n", + "display(\"stpatrick uptime during sunrisesunset\", df[\"bothcams\"].sum() / len(df[\"bothcams\"]))\n", "\n", - "numeratorsum += df['bothcams'].sum()\n", - "denominatorsum += len(df['bothcams'])\n", + "numeratorsum += df[\"bothcams\"].sum()\n", + "denominatorsum += len(df[\"bothcams\"])\n", "\n", "\n", "display(\"alltrips uptime during sunrisesunset\", numeratorsum / denominatorsum)" @@ -2933,11 +3034,11 @@ "import re\n", "\n", "\n", - "brancol_fname = 'thalos_uptime_logs/sessions_brancol.csv'\n", + "brancol_fname = \"thalos_uptime_logs/sessions_brancol.csv\"\n", "# stpatrick_fname = 'thalos_uptime_logs/sessions_saintpatrick.csv'\n", "\n", "\n", - "r = re.compile('^\\d+(, .*){8}')\n", + "r = re.compile(\"^\\d+(, .*){8}\")\n", "brancol_lines = []\n", "with open(brancol_fname) as brancol_f:\n", " for l in brancol_f.readlines():\n", @@ -2946,31 +3047,48 @@ "# print(type(brancol_f), dir(brancol_f))\n", "\n", "# display(brancol_lines)\n", - " \n", - "thalos_uptime_logs_brancol = pandas.read_csv(io.StringIO(''.join(brancol_lines)), names=['id', 'datetime', 'boat', 'certus', 'status', 'i', 'j', 'name', 'ip'])\n", - " \n", + "\n", + "thalos_uptime_logs_brancol = pandas.read_csv(\n", + " io.StringIO(\"\".join(brancol_lines)),\n", + " names=[\"id\", \"datetime\", \"boat\", \"certus\", \"status\", \"i\", \"j\", \"name\", \"ip\"],\n", + ")\n", + "\n", "# thalos_uptime_logs_stpatrick = pandas.read_csv()\n", "\n", - "thalos_uptime_logs_brancol['datetime'] = pandas.to_datetime(thalos_uptime_logs_brancol['datetime'], utc=True)\n", - "thalos_uptime_logs_brancol = thalos_uptime_logs_brancol.loc[thalos_uptime_logs_brancol['datetime'] > '2024-01-01']\n", + "thalos_uptime_logs_brancol[\"datetime\"] = pandas.to_datetime(\n", + " thalos_uptime_logs_brancol[\"datetime\"], utc=True\n", + ")\n", + "thalos_uptime_logs_brancol = thalos_uptime_logs_brancol.loc[\n", + " thalos_uptime_logs_brancol[\"datetime\"] > \"2024-01-01\"\n", + "]\n", "\n", - "thalos_uptime_logs_brancol.index = thalos_uptime_logs_brancol['datetime']\n", + "thalos_uptime_logs_brancol.index = thalos_uptime_logs_brancol[\"datetime\"]\n", "\n", "# display(thalos_uptime_logs_brancol.index[4] )\n", "\n", "# display(thalos_uptime_logs_brancol.loc[parser.parse('2024-01-04 15:22:07')])\n", "\n", - "thalos_uptime_logs_brancol.loc[parser.parse('2024-01-01 00:00Z')] = [0, parser.parse('2024-01-01'), ' brancol', ' cer1', ' down', 0, 0, ' pop-prd-pthalos000', ' ']\n", + "thalos_uptime_logs_brancol.loc[parser.parse(\"2024-01-01 00:00Z\")] = [\n", + " 0,\n", + " parser.parse(\"2024-01-01\"),\n", + " \" brancol\",\n", + " \" cer1\",\n", + " \" down\",\n", + " 0,\n", + " 0,\n", + " \" pop-prd-pthalos000\",\n", + " \" \",\n", + "]\n", "\n", "# dir(thalos_uptime_logs_brancol.resample(timedelta(minutes=5)))\n", "thalos_brancol_ts_df = thalos_uptime_logs_brancol.resample(timedelta(minutes=5)).last()\n", "# u\n", - "thalos_brancol_ts_df['status'] = thalos_brancol_ts_df['status'].ffill()\n", - "thalos_brancol_ts_df['up'] = thalos_brancol_ts_df['status'].map(lambda x: 1 if x==' up' else 0)\n", + "thalos_brancol_ts_df[\"status\"] = thalos_brancol_ts_df[\"status\"].ffill()\n", + "thalos_brancol_ts_df[\"up\"] = thalos_brancol_ts_df[\"status\"].map(lambda x: 1 if x == \" up\" else 0)\n", "\n", - "thalos_brancol_ts_df.to_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thalos_brancol_ts_df.to_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "\n", - "thalos_brancol_ts_df['up'].plot(figsize=(200,2))" + "thalos_brancol_ts_df[\"up\"].plot(figsize=(200, 2))" ] }, { @@ -3001,19 +3119,21 @@ } ], "source": [ - "brancol_file_uptime = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", - "brancol_file_uptime.loc[parser.parse('2024-01-01 00:00Z')] = [0, 0]\n", - "brancol_file_uptime['bothcamsup'] = brancol_file_uptime.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", + "brancol_file_uptime = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", + "brancol_file_uptime.loc[parser.parse(\"2024-01-01 00:00Z\")] = [0, 0]\n", + "brancol_file_uptime[\"bothcamsup\"] = brancol_file_uptime.apply(\n", + " lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1\n", + ")\n", "# display(stpatrick_file_uptime)\n", "p = brancol_file_uptime.resample(timedelta(minutes=5)).last()\n", - "p['bothcamsup'] = p['bothcamsup'].fillna(0)\n", + "p[\"bothcamsup\"] = p[\"bothcamsup\"].fillna(0)\n", "\n", "# p = p.loc[p.index >= '2024-02-17 00:00:00-06']\n", "# p = p.loc[p.index <= '2024-02-21 00:00:00-06']\n", "# p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", "\n", - "p= p.join(thalos_brancol_ts_df['up'])\n", - "p[['bothcamsup', 'up']].plot(figsize=(100,2))" + "p = p.join(thalos_brancol_ts_df[\"up\"])\n", + "p[[\"bothcamsup\", \"up\"]].plot(figsize=(100, 2))" ] }, { @@ -3038,7 +3158,7 @@ } ], "source": [ - "disparities = p.loc[p['bothcamsup'] + p['up'] == 1]\n", + "disparities = p.loc[p[\"bothcamsup\"] + p[\"up\"] == 1]\n", "disparities.sum()" ] }, @@ -3075,10 +3195,10 @@ "\n", "\n", "# brancol_fname = 'thalos_uptime_logs/sessions_brancol.csv'\n", - "stpatrick_fname = 'thalos_uptime_logs/sessions_saintpatrick.csv'\n", + "stpatrick_fname = \"thalos_uptime_logs/sessions_saintpatrick.csv\"\n", "\n", "\n", - "r = re.compile('^\\d+(, .*){8}')\n", + "r = re.compile(\"^\\d+(, .*){8}\")\n", "stpatrick_lines = []\n", "with open(stpatrick_fname) as stpatrick_f:\n", " for l in stpatrick_f.readlines():\n", @@ -3087,31 +3207,50 @@ "# print(type(brancol_f), dir(brancol_f))\n", "\n", "# display(brancol_lines)\n", - " \n", - "thalos_uptime_logs_stpatrick = pandas.read_csv(io.StringIO(''.join(stpatrick_lines)), names=['id', 'datetime', 'boat', 'certus', 'status', 'i', 'j', 'name', 'ip'])\n", - " \n", + "\n", + "thalos_uptime_logs_stpatrick = pandas.read_csv(\n", + " io.StringIO(\"\".join(stpatrick_lines)),\n", + " names=[\"id\", \"datetime\", \"boat\", \"certus\", \"status\", \"i\", \"j\", \"name\", \"ip\"],\n", + ")\n", + "\n", "# thalos_uptime_logs_stpatrick = pandas.read_csv()\n", "\n", - "thalos_uptime_logs_stpatrick['datetime'] = pandas.to_datetime(thalos_uptime_logs_stpatrick['datetime'], utc=True)\n", - "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[thalos_uptime_logs_stpatrick['datetime'] > '2024-01-01']\n", + "thalos_uptime_logs_stpatrick[\"datetime\"] = pandas.to_datetime(\n", + " thalos_uptime_logs_stpatrick[\"datetime\"], utc=True\n", + ")\n", + "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[\n", + " thalos_uptime_logs_stpatrick[\"datetime\"] > \"2024-01-01\"\n", + "]\n", "\n", - "thalos_uptime_logs_stpatrick.index = thalos_uptime_logs_stpatrick['datetime']\n", + "thalos_uptime_logs_stpatrick.index = thalos_uptime_logs_stpatrick[\"datetime\"]\n", "\n", "# display(thalos_uptime_logs_brancol.index[4] )\n", "\n", "# display(thalos_uptime_logs_brancol.loc[parser.parse('2024-01-04 15:22:07')])\n", "\n", - "thalos_uptime_logs_stpatrick.loc[parser.parse('2024-01-01 00:00Z')] = [0, parser.parse('2024-01-01'), ' saintpatrick', ' cer1', ' down', 0, 0, ' pop-prd-pthalos000', ' ']\n", + "thalos_uptime_logs_stpatrick.loc[parser.parse(\"2024-01-01 00:00Z\")] = [\n", + " 0,\n", + " parser.parse(\"2024-01-01\"),\n", + " \" saintpatrick\",\n", + " \" cer1\",\n", + " \" down\",\n", + " 0,\n", + " 0,\n", + " \" pop-prd-pthalos000\",\n", + " \" \",\n", + "]\n", "\n", "# dir(thalos_uptime_logs_brancol.resample(timedelta(minutes=5)))\n", "thalos_stpatrick_ts_df = thalos_uptime_logs_stpatrick.resample(timedelta(minutes=5)).last()\n", "# u\n", - "thalos_stpatrick_ts_df['status'] = thalos_stpatrick_ts_df['status'].ffill()\n", - "thalos_stpatrick_ts_df['up'] = thalos_stpatrick_ts_df['status'].map(lambda x: 1 if x==' up' else 0)\n", + "thalos_stpatrick_ts_df[\"status\"] = thalos_stpatrick_ts_df[\"status\"].ffill()\n", + "thalos_stpatrick_ts_df[\"up\"] = thalos_stpatrick_ts_df[\"status\"].map(\n", + " lambda x: 1 if x == \" up\" else 0\n", + ")\n", "\n", - "thalos_stpatrick_ts_df.to_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thalos_stpatrick_ts_df.to_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "\n", - "thalos_stpatrick_ts_df['up'].plot(figsize=(200,2))" + "thalos_stpatrick_ts_df[\"up\"].plot(figsize=(200, 2))" ] }, { @@ -3142,20 +3281,21 @@ } ], "source": [ - "\n", - "stpatrick_file_uptime = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", - "stpatrick_file_uptime.loc[parser.parse('2024-01-01 00:00Z')] = [0, 0]\n", - "stpatrick_file_uptime['bothcamsup'] = stpatrick_file_uptime.apply(lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1)\n", + "stpatrick_file_uptime = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", + "stpatrick_file_uptime.loc[parser.parse(\"2024-01-01 00:00Z\")] = [0, 0]\n", + "stpatrick_file_uptime[\"bothcamsup\"] = stpatrick_file_uptime.apply(\n", + " lambda x: x.cam1 > 0 and x.cam2 > 0, axis=1\n", + ")\n", "# display(stpatrick_file_uptime)\n", "q = stpatrick_file_uptime.resample(timedelta(minutes=5)).last()\n", - "q['bothcamsup'] = q['bothcamsup'].fillna(0)\n", + "q[\"bothcamsup\"] = q[\"bothcamsup\"].fillna(0)\n", "\n", "# p = p.loc[p.index >= '2024-02-17 00:00:00-06']\n", "# p = p.loc[p.index <= '2024-02-21 00:00:00-06']\n", "# p.index = p.index.map(lambda x: x.tz_convert('America/Costa_Rica'))\n", "\n", - "q= q.join(thalos_stpatrick_ts_df['up'])\n", - "q[['bothcamsup', 'up']].plot(figsize=(100,2))" + "q = q.join(thalos_stpatrick_ts_df[\"up\"])\n", + "q[[\"bothcamsup\", \"up\"]].plot(figsize=(100, 2))" ] }, { @@ -3180,7 +3320,7 @@ } ], "source": [ - "disparities = q.loc[q['bothcamsup'] + q['up'] == 1]\n", + "disparities = q.loc[q[\"bothcamsup\"] + q[\"up\"] == 1]\n", "disparities.sum()" ] }, @@ -3201,17 +3341,22 @@ } ], "source": [ - "thalos_system_logs_as_baseline = p.loc[p['up'] == 1]\n", - "br_numerator = thalos_system_logs_as_baseline['bothcamsup'].sum()\n", + "thalos_system_logs_as_baseline = p.loc[p[\"up\"] == 1]\n", + "br_numerator = thalos_system_logs_as_baseline[\"bothcamsup\"].sum()\n", "br_denominator = len(thalos_system_logs_as_baseline)\n", - "print('brancol video uptime with thalos system logs as baseline', br_numerator / br_denominator)\n", + "print(\"brancol video uptime with thalos system logs as baseline\", br_numerator / br_denominator)\n", "\n", - "thalos_system_logs_as_baseline = q.loc[q['up'] == 1]\n", - "stp_numerator = thalos_system_logs_as_baseline['bothcamsup'].sum()\n", + "thalos_system_logs_as_baseline = q.loc[q[\"up\"] == 1]\n", + "stp_numerator = thalos_system_logs_as_baseline[\"bothcamsup\"].sum()\n", "stp_denominator = len(thalos_system_logs_as_baseline)\n", - "print('saintpatrick video uptime with thalos system logs as baseline', stp_numerator / stp_denominator)\n", - "\n", - "print('all trips with thalos system logs as a baseline', (br_numerator + stp_numerator) / (br_denominator+stp_denominator))" + "print(\n", + " \"saintpatrick video uptime with thalos system logs as baseline\", stp_numerator / stp_denominator\n", + ")\n", + "\n", + "print(\n", + " \"all trips with thalos system logs as a baseline\",\n", + " (br_numerator + stp_numerator) / (br_denominator + stp_denominator),\n", + ")" ] }, { @@ -3259,70 +3404,107 @@ } ], "source": [ - "\n", - "branc_box_filedatetimes_df = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", - "\n", - "brancol_trip1_startend = (parser.parse('2024-01-05 16:14:42Z'), parser.parse('2024-01-26 01:35:17Z'),)\n", - "brancol_trip2_startend = (parser.parse('2024-02-03 17:16:47Z'), parser.parse('2024-02-26 23:32:03Z') + timedelta(hours=7.0966),)\n", - "brancol_trip3_startend = (parser.parse('2024-03-05 15:26:13Z'), parser.parse('2024-04-01 01:21:47Z') + timedelta(hours=8.4725),)\n", + "branc_box_filedatetimes_df = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", + "\n", + "brancol_trip1_startend = (\n", + " parser.parse(\"2024-01-05 16:14:42Z\"),\n", + " parser.parse(\"2024-01-26 01:35:17Z\"),\n", + ")\n", + "brancol_trip2_startend = (\n", + " parser.parse(\"2024-02-03 17:16:47Z\"),\n", + " parser.parse(\"2024-02-26 23:32:03Z\") + timedelta(hours=7.0966),\n", + ")\n", + "brancol_trip3_startend = (\n", + " parser.parse(\"2024-03-05 15:26:13Z\"),\n", + " parser.parse(\"2024-04-01 01:21:47Z\") + timedelta(hours=8.4725),\n", + ")\n", "branc_box_filedatetimes_df.loc[brancol_trip1_startend[0]] = 0\n", "branc_box_filedatetimes_df.loc[brancol_trip3_startend[1]] = 0\n", "branc_box_filedatetimes_df = branc_box_filedatetimes_df.resample(timedelta(minutes=5)).last()\n", - "branc_box_filedatetimes_df['trip1'] = (branc_box_filedatetimes_df.index > brancol_trip1_startend[0] ) & (branc_box_filedatetimes_df.index < brancol_trip1_startend[1]) \n", - "branc_box_filedatetimes_df['trip2'] = (branc_box_filedatetimes_df.index > brancol_trip2_startend[0] ) & (branc_box_filedatetimes_df.index < brancol_trip2_startend[1]) \n", - "branc_box_filedatetimes_df['trip3'] = (branc_box_filedatetimes_df.index > brancol_trip3_startend[0] ) & (branc_box_filedatetimes_df.index < brancol_trip3_startend[1])\n", - "branc_box_filedatetimes_df['cam1'] = branc_box_filedatetimes_df['cam1'].fillna(0)\n", - "branc_box_filedatetimes_df['cam2'] = branc_box_filedatetimes_df['cam2'].fillna(0)\n", - "branc_box_filedatetimes_df['bothcams'] = branc_box_filedatetimes_df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "sr = sunrisesunset[['sunrise']]\n", - "sr['a'] = 1\n", - "sr = sr.set_index('sunrise')\n", + "branc_box_filedatetimes_df[\"trip1\"] = (\n", + " branc_box_filedatetimes_df.index > brancol_trip1_startend[0]\n", + ") & (branc_box_filedatetimes_df.index < brancol_trip1_startend[1])\n", + "branc_box_filedatetimes_df[\"trip2\"] = (\n", + " branc_box_filedatetimes_df.index > brancol_trip2_startend[0]\n", + ") & (branc_box_filedatetimes_df.index < brancol_trip2_startend[1])\n", + "branc_box_filedatetimes_df[\"trip3\"] = (\n", + " branc_box_filedatetimes_df.index > brancol_trip3_startend[0]\n", + ") & (branc_box_filedatetimes_df.index < brancol_trip3_startend[1])\n", + "branc_box_filedatetimes_df[\"cam1\"] = branc_box_filedatetimes_df[\"cam1\"].fillna(0)\n", + "branc_box_filedatetimes_df[\"cam2\"] = branc_box_filedatetimes_df[\"cam2\"].fillna(0)\n", + "branc_box_filedatetimes_df[\"bothcams\"] = branc_box_filedatetimes_df.apply(\n", + " lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1\n", + ")\n", + "sr = sunrisesunset[[\"sunrise\"]]\n", + "sr[\"a\"] = 1\n", + "sr = sr.set_index(\"sunrise\")\n", "sr = sr.resample(timedelta(minutes=5)).last()\n", - "ss = sunrisesunset[['sunset']]\n", - "ss['b'] = 0\n", - "ss = ss.set_index('sunset')\n", + "ss = sunrisesunset[[\"sunset\"]]\n", + "ss[\"b\"] = 0\n", + "ss = ss.set_index(\"sunset\")\n", "ss = ss.resample(timedelta(minutes=5)).last()\n", - "srss = sr.join(ss, how='outer')\n", + "srss = sr.join(ss, how=\"outer\")\n", "# sj['a'] = sj['a'].fillna(value=None)\n", "# sj['b'] = sj['b'].fillna(value=None)\n", "# display(sj)\n", "\n", - "srss['sunup'] = srss.apply(lambda x: x['a'] if pandas.notna(x['a']) else x['b'] if pandas.notna(x['b']) else None, axis=1)\n", - "srss['sunup'] = srss['sunup'].ffill()\n", + "srss[\"sunup\"] = srss.apply(\n", + " lambda x: x[\"a\"] if pandas.notna(x[\"a\"]) else x[\"b\"] if pandas.notna(x[\"b\"]) else None, axis=1\n", + ")\n", + "srss[\"sunup\"] = srss[\"sunup\"].ffill()\n", "\n", - "branc_box_filedatetimes_df = branc_box_filedatetimes_df.join(srss['sunup'])\n", + "branc_box_filedatetimes_df = branc_box_filedatetimes_df.join(srss[\"sunup\"])\n", "# display_full(branc_box_filedatetimes_df)\n", "\n", - "stp_box_filedatetimes_df = pandas.read_pickle('stpatrick_box_filedatetimes.pickle')\n", + "stp_box_filedatetimes_df = pandas.read_pickle(\"stpatrick_box_filedatetimes.pickle\")\n", "\n", - "stpatrick_trip1_startend = (parser.parse('2024-01-03 13:22:25Z'), parser.parse('2024-01-28 06:40:04Z'),)\n", - "stpatrick_trip2_startend = (parser.parse('2024-02-03 13:44:11Z'), parser.parse('2024-02-28 03:42:55Z') + timedelta(hours=1.578),)\n", + "stpatrick_trip1_startend = (\n", + " parser.parse(\"2024-01-03 13:22:25Z\"),\n", + " parser.parse(\"2024-01-28 06:40:04Z\"),\n", + ")\n", + "stpatrick_trip2_startend = (\n", + " parser.parse(\"2024-02-03 13:44:11Z\"),\n", + " parser.parse(\"2024-02-28 03:42:55Z\") + timedelta(hours=1.578),\n", + ")\n", "stp_box_filedatetimes_df.loc[stpatrick_trip1_startend[0]] = 0\n", "stp_box_filedatetimes_df.loc[stpatrick_trip2_startend[1]] = 0\n", "stp_box_filedatetimes_df = stp_box_filedatetimes_df.resample(timedelta(minutes=5)).last()\n", "\n", - "stp_box_filedatetimes_df['trip1'] = (stp_box_filedatetimes_df.index > stpatrick_trip1_startend[0] ) & (stp_box_filedatetimes_df.index < stpatrick_trip1_startend[1]) \n", - "stp_box_filedatetimes_df['trip2'] = (stp_box_filedatetimes_df.index > stpatrick_trip2_startend[0] ) & (stp_box_filedatetimes_df.index < stpatrick_trip2_startend[1]) \n", - "stp_box_filedatetimes_df['trip3'] = False\n", - "stp_box_filedatetimes_df['cam1'] = stp_box_filedatetimes_df['cam1'].fillna(0)\n", - "stp_box_filedatetimes_df['cam2'] = stp_box_filedatetimes_df['cam2'].fillna(0)\n", - "stp_box_filedatetimes_df['bothcams'] = stp_box_filedatetimes_df.apply(lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1)\n", - "\n", - "stp_box_filedatetimes_df = stp_box_filedatetimes_df.join(srss['sunup'])\n", + "stp_box_filedatetimes_df[\"trip1\"] = (\n", + " stp_box_filedatetimes_df.index > stpatrick_trip1_startend[0]\n", + ") & (stp_box_filedatetimes_df.index < stpatrick_trip1_startend[1])\n", + "stp_box_filedatetimes_df[\"trip2\"] = (\n", + " stp_box_filedatetimes_df.index > stpatrick_trip2_startend[0]\n", + ") & (stp_box_filedatetimes_df.index < stpatrick_trip2_startend[1])\n", + "stp_box_filedatetimes_df[\"trip3\"] = False\n", + "stp_box_filedatetimes_df[\"cam1\"] = stp_box_filedatetimes_df[\"cam1\"].fillna(0)\n", + "stp_box_filedatetimes_df[\"cam2\"] = stp_box_filedatetimes_df[\"cam2\"].fillna(0)\n", + "stp_box_filedatetimes_df[\"bothcams\"] = stp_box_filedatetimes_df.apply(\n", + " lambda x: 1 if x.cam1 > 0 and x.cam2 > 0 else 0, axis=1\n", + ")\n", + "\n", + "stp_box_filedatetimes_df = stp_box_filedatetimes_df.join(srss[\"sunup\"])\n", "# display_full(stp_box_filedatetimes_df)\n", "\n", + "\n", "class ReduceOutages:\n", " def __init__(self):\n", " self.outages = []\n", " self.outageStart = None\n", " self.last = None\n", + "\n", " def __call__(self, nx):\n", - "# print(dir(nx.index))\n", - "# if nx.Index == parser.parse('2024-02-06 23:25:00+00:00'):\n", - "# print(nx, self.last)\n", - " if not ((nx.trip1 or nx.trip2 or nx.trip3 ) and nx.sunup):\n", + " # print(dir(nx.index))\n", + " # if nx.Index == parser.parse('2024-02-06 23:25:00+00:00'):\n", + " # print(nx, self.last)\n", + " if not ((nx.trip1 or nx.trip2 or nx.trip3) and nx.sunup):\n", " if self.outageStart and self.last is not None:\n", - " self.outages.append((self.outageStart, nx.Index,))\n", + " self.outages.append(\n", + " (\n", + " self.outageStart,\n", + " nx.Index,\n", + " )\n", + " )\n", " self.outageStart = None\n", " self.last = None\n", " return\n", @@ -3334,51 +3516,68 @@ " if self.last.bothcams == 1 and nx.bothcams == 0:\n", " self.outageStart = nx.Index\n", " if self.last.bothcams == 0 and nx.bothcams == 1:\n", - " self.outages.append((self.outageStart, nx.Index,))\n", + " self.outages.append(\n", + " (\n", + " self.outageStart,\n", + " nx.Index,\n", + " )\n", + " )\n", " self.outageStart = None\n", " self.last = nx\n", - " \n", + "\n", + "\n", "branc_reduce_outages = ReduceOutages()\n", - "for i in branc_box_filedatetimes_df.itertuples(index=True, name='a'):\n", + "for i in branc_box_filedatetimes_df.itertuples(index=True, name=\"a\"):\n", " branc_reduce_outages(i)\n", "\n", "stp_reduce_outages = ReduceOutages()\n", - "for i in stp_box_filedatetimes_df.itertuples(index=True, name='a'):\n", + "for i in stp_box_filedatetimes_df.itertuples(index=True, name=\"a\"):\n", " stp_reduce_outages(i)\n", "\n", "# put the outages into a resample.first.ffill so that it can be plotted alongside uptime on the very long time graph\n", - "tmp = pandas.DataFrame([ x for (o_start, o_end) in branc_reduce_outages.outages for x in [{'datetime':o_start, 'outage':1, 'diff': o_end - o_start},{'datetime':o_end,'outage':0, 'diff':pandas.NA}]]).set_index('datetime')\n", + "tmp = pandas.DataFrame(\n", + " [\n", + " x\n", + " for (o_start, o_end) in branc_reduce_outages.outages\n", + " for x in [\n", + " {\"datetime\": o_start, \"outage\": 1, \"diff\": o_end - o_start},\n", + " {\"datetime\": o_end, \"outage\": 0, \"diff\": pandas.NA},\n", + " ]\n", + " ]\n", + ").set_index(\"datetime\")\n", "tmp = tmp.resample(timedelta(minutes=5)).first().ffill()\n", "\n", - "branc_box_filedatetimes_df['outage'] = tmp['outage']\n", - "branc_box_filedatetimes_df['diff'] = tmp['diff']\n", - "branc_box_filedatetimes_df['outage'] = branc_box_filedatetimes_df['outage'].fillna(0)\n", + "branc_box_filedatetimes_df[\"outage\"] = tmp[\"outage\"]\n", + "branc_box_filedatetimes_df[\"diff\"] = tmp[\"diff\"]\n", + "branc_box_filedatetimes_df[\"outage\"] = branc_box_filedatetimes_df[\"outage\"].fillna(0)\n", "# display(branc_box_filedatetimes_df[['bothcams','outage']].plot(figsize=(200,2)))\n", "\n", "all_outtages_df = pandas.DataFrame(branc_reduce_outages.outages + stp_reduce_outages.outages)\n", - "all_outtages_df['diff'] = all_outtages_df[1] - all_outtages_df[0]\n", - "binandlabels = [(timedelta(minutes=0), '0mins'),\n", - " (timedelta(minutes=5), '5mins'),\n", - " (timedelta(minutes=10),'10mins'),\n", - " (timedelta(minutes=15),'15mins'),\n", - " (timedelta(minutes=20), '20mins'),\n", - " (timedelta(minutes=40), '40mins'),\n", - " (timedelta(minutes=60), '60mins'),\n", - " (timedelta(minutes=80), '80mins'),\n", - " (timedelta(minutes=100), '100mins'),\n", - " (timedelta(minutes=120), '120mins'),\n", - " (timedelta(hours=3), '3hours'),\n", - " (timedelta(hours=4), '4hours'),\n", - " (timedelta(hours=5), '5hours'),\n", - " (timedelta(hours=6), '6hours'),\n", - " (timedelta(hours=8), '8hours'),\n", - " (timedelta(hours=10), '10hours'),\n", - " (timedelta(hours=12), '12hours'),]\n", + "all_outtages_df[\"diff\"] = all_outtages_df[1] - all_outtages_df[0]\n", + "binandlabels = [\n", + " (timedelta(minutes=0), \"0mins\"),\n", + " (timedelta(minutes=5), \"5mins\"),\n", + " (timedelta(minutes=10), \"10mins\"),\n", + " (timedelta(minutes=15), \"15mins\"),\n", + " (timedelta(minutes=20), \"20mins\"),\n", + " (timedelta(minutes=40), \"40mins\"),\n", + " (timedelta(minutes=60), \"60mins\"),\n", + " (timedelta(minutes=80), \"80mins\"),\n", + " (timedelta(minutes=100), \"100mins\"),\n", + " (timedelta(minutes=120), \"120mins\"),\n", + " (timedelta(hours=3), \"3hours\"),\n", + " (timedelta(hours=4), \"4hours\"),\n", + " (timedelta(hours=5), \"5hours\"),\n", + " (timedelta(hours=6), \"6hours\"),\n", + " (timedelta(hours=8), \"8hours\"),\n", + " (timedelta(hours=10), \"10hours\"),\n", + " (timedelta(hours=12), \"12hours\"),\n", + "]\n", "bins = [i[0] for i in binandlabels]\n", "labels = [i[1] for i in binandlabels][1:]\n", - "all_outtages_df['cut'] = pandas.cut(all_outtages_df['diff'], bins, labels=labels)\n", + "all_outtages_df[\"cut\"] = pandas.cut(all_outtages_df[\"diff\"], bins, labels=labels)\n", "\n", - "all_outtages_df.groupby('cut').count()['diff'].plot(kind='bar', figsize=(7,3), xlabel='' )\n" + "all_outtages_df.groupby(\"cut\").count()[\"diff\"].plot(kind=\"bar\", figsize=(7, 3), xlabel=\"\")\n" ] }, { @@ -3409,22 +3608,33 @@ } ], "source": [ - "# try to prove that captain behavior (long outages) accounted for most of the difference \n", + "# try to prove that captain behavior (long outages) accounted for most of the difference\n", "# between videoon/systemon and videoon/daylighton\n", "\n", - "sunupcount = ((branc_box_filedatetimes_df['trip1'] | branc_box_filedatetimes_df['trip2'] | branc_box_filedatetimes_df['trip3'] ) & branc_box_filedatetimes_df['sunup'] == 1).sum()\n", - "print('outagecount', (branc_box_filedatetimes_df['outage'] == 1).sum())\n", - "print('sunupcount', sunupcount)\n", + "sunupcount = (\n", + " (\n", + " branc_box_filedatetimes_df[\"trip1\"]\n", + " | branc_box_filedatetimes_df[\"trip2\"]\n", + " | branc_box_filedatetimes_df[\"trip3\"]\n", + " )\n", + " & branc_box_filedatetimes_df[\"sunup\"]\n", + " == 1\n", + ").sum()\n", + "print(\"outagecount\", (branc_box_filedatetimes_df[\"outage\"] == 1).sum())\n", + "print(\"sunupcount\", sunupcount)\n", "\n", - "print((sunupcount-1079)/sunupcount)\n", + "print((sunupcount - 1079) / sunupcount)\n", "\n", "\n", "# branc_box_filedatetimes_df.loc[branc_box_filedatetimes_df['diff'].map(pandas.notna)]\n", - "outagecount_minuslongoutages = ((branc_box_filedatetimes_df['outage'] == 1 ) & ((branc_box_filedatetimes_df['diff'] < timedelta(hours=2) ))).sum()\n", + "outagecount_minuslongoutages = (\n", + " (branc_box_filedatetimes_df[\"outage\"] == 1)\n", + " & (branc_box_filedatetimes_df[\"diff\"] < timedelta(hours=2))\n", + ").sum()\n", "\n", - "print('outagecount_minuslongoutages', outagecount_minuslongoutages)\n", + "print(\"outagecount_minuslongoutages\", outagecount_minuslongoutages)\n", "# print('sunupcount', ((branc_box_filedatetimes_df['trip1'] | branc_box_filedatetimes_df['trip2'] | branc_box_filedatetimes_df['trip3'] ) & branc_box_filedatetimes_df['sunup'] == 1).sum())\n", - "(sunupcount-outagecount_minuslongoutages)/sunupcount\n", + "(sunupcount - outagecount_minuslongoutages) / sunupcount\n", "\n", "# not compelling. 89% -> 95% is not enough for me\n", "# I expected it to be closer to the 98% (from brancol video uptime with thalos system logs as baseline)" @@ -3437,36 +3647,42 @@ "metadata": {}, "outputs": [], "source": [ - "brancol_file_uptime = pandas.read_pickle('brancol_box_filedatetimes.pickle')\n", + "brancol_file_uptime = pandas.read_pickle(\"brancol_box_filedatetimes.pickle\")\n", "brancol_file_uptime = brancol_file_uptime.resample(timedelta(minutes=5)).sum()\n", - "branc_list_of_state_changes = brancol_file_uptime.join(brancol_file_uptime.shift(1),lsuffix='_next', rsuffix='_prev')\n", + "branc_list_of_state_changes = brancol_file_uptime.join(\n", + " brancol_file_uptime.shift(1), lsuffix=\"_next\", rsuffix=\"_prev\"\n", + ")\n", "# print(dir(branc_list_of_state_changes.index))\n", "branc_list_of_state_changes = branc_list_of_state_changes.loc[branc_list_of_state_changes.index[1:]]\n", + "\n", + "\n", "# branc_list_of_state_changes\n", "def newstate(x):\n", " retA = None\n", - " if x['cam1_prev'] < x['cam1_next']:\n", - " retA='up'\n", - " elif x['cam1_prev'] > x['cam1_next']:\n", - " retA='down'\n", - " \n", + " if x[\"cam1_prev\"] < x[\"cam1_next\"]:\n", + " retA = \"up\"\n", + " elif x[\"cam1_prev\"] > x[\"cam1_next\"]:\n", + " retA = \"down\"\n", + "\n", " retB = None\n", - " if x['cam2_prev'] < x['cam2_next']:\n", - " retB='up'\n", - " elif x['cam2_prev'] > x['cam2_next']:\n", - " retB='down'\n", - " \n", + " if x[\"cam2_prev\"] < x[\"cam2_next\"]:\n", + " retB = \"up\"\n", + " elif x[\"cam2_prev\"] > x[\"cam2_next\"]:\n", + " retB = \"down\"\n", + "\n", " if retA and retB and retA != retB:\n", - " return 'mixed'\n", + " return \"mixed\"\n", " elif retA:\n", " return retA\n", " elif retB:\n", " return retB\n", " else:\n", " return pandas.NA\n", - "branc_list_of_state_changes['newstate'] = branc_list_of_state_changes.apply(newstate, axis=1)\n", + "\n", + "\n", + "branc_list_of_state_changes[\"newstate\"] = branc_list_of_state_changes.apply(newstate, axis=1)\n", "# branc_list_of_state_changes\n", - "display(branc_list_of_state_changes.loc[pandas.notna(branc_list_of_state_changes['newstate'])])\n", + "display(branc_list_of_state_changes.loc[pandas.notna(branc_list_of_state_changes[\"newstate\"])])\n", "\n", "# brancol_file_uptime.loc[ (brancol_file_uptime['cam1'] == 0) & (brancol_file_uptime['cam2'] == 0) ]\n" ] diff --git a/notebooks/tnc-edge-vectorprocessing.ipynb b/notebooks/tnc-edge-vectorprocessing.ipynb index 8e970b9..78b776f 100644 --- a/notebooks/tnc-edge-vectorprocessing.ipynb +++ b/notebooks/tnc-edge-vectorprocessing.ipynb @@ -38,14 +38,14 @@ "\n", "aws_config = {}\n", "\n", - "aws_config['profile_name'] ='XXXXXXXX'\n", - "aws_config['region_name'] = 'us-east-1'\n", + "aws_config[\"profile_name\"] = \"XXXXXXXX\"\n", + "aws_config[\"region_name\"] = \"us-east-1\"\n", "\n", "import boto3\n", "\n", "boto3.setup_default_session(**aws_config)\n", "\n", - "s3 = boto3.client('s3')\n", + "s3 = boto3.client(\"s3\")\n", "\n", "# s3.list_objects(Bucket='51-gema-dev-dp-raw' , Prefix='tnc_edge/')\n", "\n", @@ -57,19 +57,20 @@ "from dateutil import parser\n", "import pytz\n", "\n", + "\n", "def display_full(x):\n", - " pandas.set_option('display.max_rows', 5000)\n", - " pandas.set_option('display.min_rows', 1000)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 5000)\n", + " pandas.set_option(\"display.min_rows\", 1000)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -79,11 +80,14 @@ "metadata": {}, "outputs": [], "source": [ - "branc_equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where datetime > '2024-01-01' and vector_id='4'\", database='tnc_edge')\n", + "branc_equip_agg_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_tests where datetime > '2024-01-01' and vector_id='4'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "# equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '4' limit 10\", database='tnc_edge')\n", - "branc_equip_agg_df['datetime'] = pandas.to_datetime(branc_equip_agg_df['datetime'], utc=True)\n", + "branc_equip_agg_df[\"datetime\"] = pandas.to_datetime(branc_equip_agg_df[\"datetime\"], utc=True)\n", "\n", - "branc_equip_agg_df = branc_equip_agg_df.sort_values('datetime')\n" + "branc_equip_agg_df = branc_equip_agg_df.sort_values(\"datetime\")\n" ] }, { @@ -93,11 +97,14 @@ "metadata": {}, "outputs": [], "source": [ - "stpat_equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where datetime > '2024-01-01' and vector_id='4'\", database='tnc_edge')\n", + "stpat_equip_agg_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_tests where datetime > '2024-01-01' and vector_id='4'\",\n", + " database=\"tnc_edge\",\n", + ")\n", "# equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '4' limit 10\", database='tnc_edge')\n", - "stpat_equip_agg_df['datetime'] = pandas.to_datetime(stpat_equip_agg_df['datetime'], utc=True)\n", + "stpat_equip_agg_df[\"datetime\"] = pandas.to_datetime(stpat_equip_agg_df[\"datetime\"], utc=True)\n", "\n", - "stpat_equip_agg_df = stpat_equip_agg_df.sort_values('datetime')\n" + "stpat_equip_agg_df = stpat_equip_agg_df.sort_values(\"datetime\")\n" ] }, { @@ -1603,17 +1610,25 @@ } ], "source": [ - "branc_equip_agg_df['diff'] = branc_equip_agg_df['datetime'] - branc_equip_agg_df.shift(1)['datetime']\n", + "branc_equip_agg_df[\"diff\"] = (\n", + " branc_equip_agg_df[\"datetime\"] - branc_equip_agg_df.shift(1)[\"datetime\"]\n", + ")\n", "# branc_equip_agg_df\n", "\n", - "min_lateness = 3.9995*60\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+branc_equip_agg_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=150)\n", + "min_lateness = 3.9995 * 60\n", + "bins = numpy.logspace(\n", + " math.log(min_lateness, 10),\n", + " math.log(1 + branc_equip_agg_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max(), 10),\n", + " num=150,\n", + ")\n", "# bins = numpy.logspace(math.log(min_lateness,10), math.log(4.0005*60,10), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "branc_equip_agg_df['diffbucket'] = pandas.cut(branc_equip_agg_df.loc[branc_equip_agg_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "branc_equip_agg_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "branc_equip_agg_df[\"diffbucket\"] = pandas.cut(\n", + " branc_equip_agg_df.loc[branc_equip_agg_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "branc_equip_agg_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n" ] }, { @@ -1644,18 +1659,26 @@ } ], "source": [ - "stpat_equip_agg_df['diff'] = stpat_equip_agg_df['datetime'] - stpat_equip_agg_df.shift(1)['datetime']\n", + "stpat_equip_agg_df[\"diff\"] = (\n", + " stpat_equip_agg_df[\"datetime\"] - stpat_equip_agg_df.shift(1)[\"datetime\"]\n", + ")\n", "# branc_equip_agg_df\n", "\n", "# min_lateness=3.9*60\n", - "min_lateness = 3.9995*60\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_equip_agg_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=150)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(4.0005*60,10), num=50)\n", + "min_lateness = 3.9995 * 60\n", + "bins = numpy.logspace(\n", + " math.log(min_lateness, 10),\n", + " math.log(1 + stpat_equip_agg_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max(), 10),\n", + " num=150,\n", + ")\n", + "bins = numpy.logspace(math.log(min_lateness, 10), math.log(4.0005 * 60, 10), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_equip_agg_df['diffbucket'] = pandas.cut(stpat_equip_agg_df.loc[stpat_equip_agg_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "stpat_equip_agg_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "stpat_equip_agg_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_equip_agg_df.loc[stpat_equip_agg_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "stpat_equip_agg_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n" ] }, { @@ -1666,13 +1689,17 @@ "outputs": [], "source": [ "# stpat_equip_agg_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where datetime > '2024-01-01' and vector_id='4'\", database='tnc_edge')\n", - "branc_thalosmount_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '3'\", database='tnc_edge')\n", - "branc_thalosmount_df['datetime'] = pandas.to_datetime(branc_thalosmount_df['datetime'], utc=True)\n", - "branc_thalosmount_df = branc_thalosmount_df.sort_values('datetime')\n", + "branc_thalosmount_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_tests where vector_id = '3'\", database=\"tnc_edge\"\n", + ")\n", + "branc_thalosmount_df[\"datetime\"] = pandas.to_datetime(branc_thalosmount_df[\"datetime\"], utc=True)\n", + "branc_thalosmount_df = branc_thalosmount_df.sort_values(\"datetime\")\n", "\n", - "stpat_thalosmount_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '3'\", database='tnc_edge')\n", - "stpat_thalosmount_df['datetime'] = pandas.to_datetime(stpat_thalosmount_df['datetime'], utc=True)\n", - "stpat_thalosmount_df = stpat_thalosmount_df.sort_values('datetime')" + "stpat_thalosmount_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_tests where vector_id = '3'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_thalosmount_df[\"datetime\"] = pandas.to_datetime(stpat_thalosmount_df[\"datetime\"], utc=True)\n", + "stpat_thalosmount_df = stpat_thalosmount_df.sort_values(\"datetime\")" ] }, { @@ -1693,19 +1720,36 @@ } ], "source": [ - "branc_thalosmount_df['diff'] = branc_thalosmount_df['datetime'] - branc_thalosmount_df.shift(1)['datetime']\n", - "stpat_thalosmount_df['diff'] = stpat_thalosmount_df['datetime'] - stpat_thalosmount_df.shift(1)['datetime']\n", + "branc_thalosmount_df[\"diff\"] = (\n", + " branc_thalosmount_df[\"datetime\"] - branc_thalosmount_df.shift(1)[\"datetime\"]\n", + ")\n", + "stpat_thalosmount_df[\"diff\"] = (\n", + " stpat_thalosmount_df[\"datetime\"] - stpat_thalosmount_df.shift(1)[\"datetime\"]\n", + ")\n", "\n", "min_lateness = 9.995\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_thalosmount_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=120)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(10.005,10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log(min_lateness, 10),\n", + " math.log(1 + stpat_thalosmount_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max(), 10),\n", + " num=120,\n", + ")\n", + "bins = numpy.logspace(math.log(min_lateness, 10), math.log(10.005, 10), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_thalosmount_df['diffbucket'] = pandas.cut(stpat_thalosmount_df.loc[stpat_thalosmount_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "stpat_thalosmount_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_thalosmount_df['diffbucket'] = pandas.cut(branc_thalosmount_df.loc[branc_thalosmount_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "# branc_thalosmount_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "stpat_thalosmount_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_thalosmount_df.loc[stpat_thalosmount_df[\"diff\"] > timedelta(minutes=min_lateness)][\n", + " \"diff\"\n", + " ],\n", + " bins=bins,\n", + ")\n", + "stpat_thalosmount_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n", + "branc_thalosmount_df[\"diffbucket\"] = pandas.cut(\n", + " branc_thalosmount_df.loc[branc_thalosmount_df[\"diff\"] > timedelta(minutes=min_lateness)][\n", + " \"diff\"\n", + " ],\n", + " bins=bins,\n", + ")\n", + "# branc_thalosmount_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n" ] }, { @@ -3659,30 +3703,55 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=10))[['up']].min()\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=10))[[\"up\"]].min()\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_thalosmount_ts_df = stpat_thalosmount_df.set_index('datetime')\n", + "stpat_thalosmount_ts_df = stpat_thalosmount_df.set_index(\"datetime\")\n", "# stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.resample(timedelta(minutes=10)).last()\n", "display()\n", - "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.loc[stpat_thalosmount_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.loc[\n", + " stpat_thalosmount_ts_df.index >= thaloslogs_stpatrick_isup.index[0]\n", + "]\n", + "stpat_thalosmount_ts_df = stpat_thalosmount_ts_df.join(\n", + " thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\"\n", + ")\n", "\n", "# stpat_thalosmount_ts_df.loc[ ( stpat_thalosmount_ts_df['id'].notna() ) & ( stpat_thalosmount_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_thalosmount_ts_df['id'].isna() ) & ( stpat_thalosmount_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_thalosmount_ts_df['id'].notna() ) & ( stpat_thalosmount_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_thalosmount_ts_df['id'].isna() ) & ( stpat_thalosmount_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_thalosmount_ts_df['id'].notna() ) & ( stpat_thalosmount_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_thalosmount_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\",\n", + " ((stpat_thalosmount_ts_df[\"id\"].isna()) & (stpat_thalosmount_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_thalosmount_ts_df[\"id\"].notna()) & (stpat_thalosmount_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\",\n", + " ((stpat_thalosmount_ts_df[\"id\"].isna()) & (stpat_thalosmount_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((stpat_thalosmount_ts_df[\"id\"].notna()) & (stpat_thalosmount_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (stpat_thalosmount_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_thalosmount_ts_df['prevup'] = stpat_thalosmount_ts_df['up'].shift(1)\n", - "display(stpat_thalosmount_ts_df.loc[(( stpat_thalosmount_ts_df['id'].isna() ) & (( stpat_thalosmount_ts_df['up'] == 1 )) | ( stpat_thalosmount_ts_df['up'] != stpat_thalosmount_ts_df['prevup'] ) ) ])" + "stpat_thalosmount_ts_df[\"prevup\"] = stpat_thalosmount_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_thalosmount_ts_df.loc[\n", + " (\n", + " (stpat_thalosmount_ts_df[\"id\"].isna()) & (stpat_thalosmount_ts_df[\"up\"] == 1)\n", + " | (stpat_thalosmount_ts_df[\"up\"] != stpat_thalosmount_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")" ] }, { @@ -5700,30 +5769,51 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=10))[['up']].min()\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=10))[[\"up\"]].min()\n", "thaloslogs_brancol_isup\n", "\n", - "branc_thalosmount_ts_df = branc_thalosmount_df.set_index('datetime')\n", + "branc_thalosmount_ts_df = branc_thalosmount_df.set_index(\"datetime\")\n", "# branc_thalosmount_ts_df = branc_thalosmount_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_thalosmount_ts_df = branc_thalosmount_ts_df.resample(timedelta(minutes=10)).last()\n", "display()\n", - "branc_thalosmount_ts_df = branc_thalosmount_ts_df.loc[branc_thalosmount_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_thalosmount_ts_df = branc_thalosmount_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_thalosmount_ts_df = branc_thalosmount_ts_df.loc[\n", + " branc_thalosmount_ts_df.index >= thaloslogs_brancol_isup.index[0]\n", + "]\n", + "branc_thalosmount_ts_df = branc_thalosmount_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_thalosmount_ts_df.loc[ ( branc_thalosmount_ts_df['id'].notna() ) & ( branc_thalosmount_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_thalosmount_ts_df['id'].isna() ) & ( branc_thalosmount_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_thalosmount_ts_df['id'].notna() ) & ( branc_thalosmount_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_thalosmount_ts_df['id'].isna() ) & ( branc_thalosmount_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_thalosmount_ts_df['id'].notna() ) & ( branc_thalosmount_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_thalosmount_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\",\n", + " ((branc_thalosmount_ts_df[\"id\"].isna()) & (branc_thalosmount_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_thalosmount_ts_df[\"id\"].notna()) & (branc_thalosmount_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\",\n", + " ((branc_thalosmount_ts_df[\"id\"].isna()) & (branc_thalosmount_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((branc_thalosmount_ts_df[\"id\"].notna()) & (branc_thalosmount_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (branc_thalosmount_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_thalosmount_ts_df['prevup'] = branc_thalosmount_ts_df['up'].shift(1)\n", - "display(branc_thalosmount_ts_df.loc[(( branc_thalosmount_ts_df['id'].isna() ) & (( branc_thalosmount_ts_df['up'] == 1 )) | ( branc_thalosmount_ts_df['up'] != branc_thalosmount_ts_df['prevup'] ) ) ])" + "branc_thalosmount_ts_df[\"prevup\"] = branc_thalosmount_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_thalosmount_ts_df.loc[\n", + " (\n", + " (branc_thalosmount_ts_df[\"id\"].isna()) & (branc_thalosmount_ts_df[\"up\"] == 1)\n", + " | (branc_thalosmount_ts_df[\"up\"] != branc_thalosmount_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")" ] }, { @@ -5753,7 +5843,6 @@ "# stpat 627 during 5th-12th downtime\n", "\n", "\n", - "\n", "# off, no vector 7230\n", "# off, vector ran somehow? 167\n", "# on, no vector? 1234\n", @@ -5772,13 +5861,17 @@ "metadata": {}, "outputs": [], "source": [ - "branc_gpstest_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '2'\", database='tnc_edge')\n", - "branc_gpstest_df['datetime'] = pandas.to_datetime(branc_gpstest_df['datetime'], utc=True)\n", - "branc_gpstest_df = branc_gpstest_df.sort_values('datetime')\n", + "branc_gpstest_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_tests where vector_id = '2'\", database=\"tnc_edge\"\n", + ")\n", + "branc_gpstest_df[\"datetime\"] = pandas.to_datetime(branc_gpstest_df[\"datetime\"], utc=True)\n", + "branc_gpstest_df = branc_gpstest_df.sort_values(\"datetime\")\n", "\n", - "stpat_gpstest_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '2'\", database='tnc_edge')\n", - "stpat_gpstest_df['datetime'] = pandas.to_datetime(stpat_gpstest_df['datetime'], utc=True)\n", - "stpat_gpstest_df = stpat_gpstest_df.sort_values('datetime')" + "stpat_gpstest_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_tests where vector_id = '2'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_gpstest_df[\"datetime\"] = pandas.to_datetime(stpat_gpstest_df[\"datetime\"], utc=True)\n", + "stpat_gpstest_df = stpat_gpstest_df.sort_values(\"datetime\")" ] }, { @@ -5799,19 +5892,28 @@ } ], "source": [ - "branc_gpstest_df['diff'] = branc_gpstest_df['datetime'] - branc_gpstest_df.shift(1)['datetime']\n", - "stpat_gpstest_df['diff'] = stpat_gpstest_df['datetime'] - stpat_gpstest_df.shift(1)['datetime']\n", + "branc_gpstest_df[\"diff\"] = branc_gpstest_df[\"datetime\"] - branc_gpstest_df.shift(1)[\"datetime\"]\n", + "stpat_gpstest_df[\"diff\"] = stpat_gpstest_df[\"datetime\"] - stpat_gpstest_df.shift(1)[\"datetime\"]\n", "\n", "min_lateness = 29.995\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_gpstest_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=120)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(30.005,10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log(min_lateness, 10),\n", + " math.log(1 + stpat_gpstest_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max(), 10),\n", + " num=120,\n", + ")\n", + "bins = numpy.logspace(math.log(min_lateness, 10), math.log(30.005, 10), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_gpstest_df['diffbucket'] = pandas.cut(stpat_gpstest_df.loc[stpat_gpstest_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "stpat_gpstest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_gpstest_df['diffbucket'] = pandas.cut(branc_gpstest_df.loc[branc_gpstest_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "# branc_gpstest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "stpat_gpstest_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_gpstest_df.loc[stpat_gpstest_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "stpat_gpstest_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n", + "branc_gpstest_df[\"diffbucket\"] = pandas.cut(\n", + " branc_gpstest_df.loc[branc_gpstest_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "# branc_gpstest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n" ] }, { @@ -7765,30 +7867,49 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=30))[['up']].max()\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=30))[[\"up\"]].max()\n", "thaloslogs_brancol_isup\n", "\n", - "branc_gpstest_ts_df = branc_gpstest_df.set_index('datetime')\n", + "branc_gpstest_ts_df = branc_gpstest_df.set_index(\"datetime\")\n", "# branc_gpstest_ts_df = branc_gpstest_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_gpstest_ts_df = branc_gpstest_ts_df.resample(timedelta(minutes=30)).last()\n", "display()\n", - "branc_gpstest_ts_df = branc_gpstest_ts_df.loc[branc_gpstest_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_gpstest_ts_df = branc_gpstest_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_gpstest_ts_df = branc_gpstest_ts_df.loc[\n", + " branc_gpstest_ts_df.index >= thaloslogs_brancol_isup.index[0]\n", + "]\n", + "branc_gpstest_ts_df = branc_gpstest_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_gpstest_ts_df.loc[ ( branc_gpstest_ts_df['id'].notna() ) & ( branc_gpstest_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_gpstest_ts_df['id'].isna() ) & ( branc_gpstest_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_gpstest_ts_df['id'].notna() ) & ( branc_gpstest_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_gpstest_ts_df['id'].isna() ) & ( branc_gpstest_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_gpstest_ts_df['id'].notna() ) & ( branc_gpstest_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_gpstest_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\", ((branc_gpstest_ts_df[\"id\"].isna()) & (branc_gpstest_ts_df[\"up\"] == 0)).sum()\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_gpstest_ts_df[\"id\"].notna()) & (branc_gpstest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\", ((branc_gpstest_ts_df[\"id\"].isna()) & (branc_gpstest_ts_df[\"up\"] == 1)).sum()\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((branc_gpstest_ts_df[\"id\"].notna()) & (branc_gpstest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (branc_gpstest_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_gpstest_ts_df['prevup'] = branc_gpstest_ts_df['up'].shift(1)\n", - "display(branc_gpstest_ts_df.loc[(( branc_gpstest_ts_df['id'].isna() ) & (( branc_gpstest_ts_df['up'] == 1 )) | ( branc_gpstest_ts_df['up'] != branc_gpstest_ts_df['prevup'] ) ) ])" + "branc_gpstest_ts_df[\"prevup\"] = branc_gpstest_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_gpstest_ts_df.loc[\n", + " (\n", + " (branc_gpstest_ts_df[\"id\"].isna()) & (branc_gpstest_ts_df[\"up\"] == 1)\n", + " | (branc_gpstest_ts_df[\"up\"] != branc_gpstest_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")" ] }, { @@ -9742,30 +9863,51 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=30))[['up']].min()\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=30))[[\"up\"]].min()\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_gpstest_ts_df = stpat_gpstest_df.set_index('datetime')\n", + "stpat_gpstest_ts_df = stpat_gpstest_df.set_index(\"datetime\")\n", "# stpat_gpstest_ts_df = stpat_gpstest_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_gpstest_ts_df = stpat_gpstest_ts_df.resample(timedelta(minutes=30)).last()\n", "display()\n", - "stpat_gpstest_ts_df = stpat_gpstest_ts_df.loc[stpat_gpstest_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_gpstest_ts_df = stpat_gpstest_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_gpstest_ts_df = stpat_gpstest_ts_df.loc[\n", + " stpat_gpstest_ts_df.index >= thaloslogs_stpatrick_isup.index[0]\n", + "]\n", + "stpat_gpstest_ts_df = stpat_gpstest_ts_df.join(thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\")\n", "\n", "# stpat_gpstest_ts_df.loc[ ( stpat_gpstest_ts_df['id'].notna() ) & ( stpat_gpstest_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_gpstest_ts_df['id'].isna() ) & ( stpat_gpstest_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_gpstest_ts_df['id'].notna() ) & ( stpat_gpstest_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_gpstest_ts_df['id'].isna() ) & ( stpat_gpstest_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_gpstest_ts_df['id'].notna() ) & ( stpat_gpstest_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_gpstest_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\", ((stpat_gpstest_ts_df[\"id\"].isna()) & (stpat_gpstest_ts_df[\"up\"] == 0)).sum()\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_gpstest_ts_df[\"id\"].notna()) & (stpat_gpstest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\", ((stpat_gpstest_ts_df[\"id\"].isna()) & (stpat_gpstest_ts_df[\"up\"] == 1)).sum()\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((stpat_gpstest_ts_df[\"id\"].notna()) & (stpat_gpstest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (stpat_gpstest_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_gpstest_ts_df['prevup'] = stpat_gpstest_ts_df['up'].shift(1)\n", - "display(stpat_gpstest_ts_df.loc[(( stpat_gpstest_ts_df['id'].isna() ) & (( stpat_gpstest_ts_df['up'] == 1 )) | ( stpat_gpstest_ts_df['up'] != stpat_gpstest_ts_df['prevup'] ) ) ])" + "stpat_gpstest_ts_df[\"prevup\"] = stpat_gpstest_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_gpstest_ts_df.loc[\n", + " (\n", + " (stpat_gpstest_ts_df[\"id\"].isna()) & (stpat_gpstest_ts_df[\"up\"] == 1)\n", + " | (stpat_gpstest_ts_df[\"up\"] != stpat_gpstest_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")" ] }, { @@ -9804,7 +9946,7 @@ "# 207 during 5th-12th downtime\n", "\n", "\n", - "(1776 + 1985 ) / ( 2148 - 336 + 2202 - 207)" + "(1776 + 1985) / (2148 - 336 + 2202 - 207)" ] }, { @@ -9814,14 +9956,18 @@ "metadata": {}, "outputs": [], "source": [ - "branc_inettest_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '1'\", database='tnc_edge')\n", + "branc_inettest_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_tests where vector_id = '1'\", database=\"tnc_edge\"\n", + ")\n", "# branc_inettest_df\n", - "branc_inettest_df['datetime'] = pandas.to_datetime(branc_inettest_df['datetime'], utc=True)\n", - "branc_inettest_df = branc_inettest_df.sort_values('datetime')\n", + "branc_inettest_df[\"datetime\"] = pandas.to_datetime(branc_inettest_df[\"datetime\"], utc=True)\n", + "branc_inettest_df = branc_inettest_df.sort_values(\"datetime\")\n", "\n", - "stpat_inettest_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '1'\", database='tnc_edge')\n", - "stpat_inettest_df['datetime'] = pandas.to_datetime(stpat_inettest_df['datetime'], utc=True)\n", - "stpat_inettest_df = stpat_inettest_df.sort_values('datetime')" + "stpat_inettest_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_tests where vector_id = '1'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_inettest_df[\"datetime\"] = pandas.to_datetime(stpat_inettest_df[\"datetime\"], utc=True)\n", + "stpat_inettest_df = stpat_inettest_df.sort_values(\"datetime\")" ] }, { @@ -9842,19 +9988,28 @@ } ], "source": [ - "branc_inettest_df['diff'] = branc_inettest_df['datetime'] - branc_inettest_df.shift(1)['datetime']\n", - "stpat_inettest_df['diff'] = stpat_inettest_df['datetime'] - stpat_inettest_df.shift(1)['datetime']\n", + "branc_inettest_df[\"diff\"] = branc_inettest_df[\"datetime\"] - branc_inettest_df.shift(1)[\"datetime\"]\n", + "stpat_inettest_df[\"diff\"] = stpat_inettest_df[\"datetime\"] - stpat_inettest_df.shift(1)[\"datetime\"]\n", "\n", "min_lateness = 29.995\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_inettest_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=120)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(30.005,10), num=50)\n", + "bins = numpy.logspace(\n", + " math.log(min_lateness, 10),\n", + " math.log(1 + stpat_inettest_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max(), 10),\n", + " num=120,\n", + ")\n", + "bins = numpy.logspace(math.log(min_lateness, 10), math.log(30.005, 10), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_inettest_df['diffbucket'] = pandas.cut(stpat_inettest_df.loc[stpat_inettest_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "stpat_inettest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_inettest_df['diffbucket'] = pandas.cut(branc_inettest_df.loc[branc_inettest_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "# branc_inettest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "stpat_inettest_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_inettest_df.loc[stpat_inettest_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "stpat_inettest_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n", + "branc_inettest_df[\"diffbucket\"] = pandas.cut(\n", + " branc_inettest_df.loc[branc_inettest_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "# branc_inettest_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n" ] }, { @@ -11616,30 +11771,53 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=30))[['up']].min()\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(minutes=30))[[\"up\"]].min()\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_inettest_ts_df = stpat_inettest_df.set_index('datetime')\n", + "stpat_inettest_ts_df = stpat_inettest_df.set_index(\"datetime\")\n", "# stpat_inettest_ts_df = stpat_inettest_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_inettest_ts_df = stpat_inettest_ts_df.resample(timedelta(minutes=30)).last()\n", "display()\n", - "stpat_inettest_ts_df = stpat_inettest_ts_df.loc[stpat_inettest_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_inettest_ts_df = stpat_inettest_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_inettest_ts_df = stpat_inettest_ts_df.loc[\n", + " stpat_inettest_ts_df.index >= thaloslogs_stpatrick_isup.index[0]\n", + "]\n", + "stpat_inettest_ts_df = stpat_inettest_ts_df.join(thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\")\n", "\n", "# stpat_inettest_ts_df.loc[ ( stpat_inettest_ts_df['id'].notna() ) & ( stpat_inettest_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_inettest_ts_df['id'].isna() ) & ( stpat_inettest_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_inettest_ts_df['id'].notna() ) & ( stpat_inettest_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_inettest_ts_df['id'].isna() ) & ( stpat_inettest_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_inettest_ts_df['id'].notna() ) & ( stpat_inettest_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_inettest_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\",\n", + " ((stpat_inettest_ts_df[\"id\"].isna()) & (stpat_inettest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_inettest_ts_df[\"id\"].notna()) & (stpat_inettest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\",\n", + " ((stpat_inettest_ts_df[\"id\"].isna()) & (stpat_inettest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((stpat_inettest_ts_df[\"id\"].notna()) & (stpat_inettest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (stpat_inettest_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_inettest_ts_df['prevup'] = stpat_inettest_ts_df['up'].shift(1)\n", - "display(stpat_inettest_ts_df.loc[(( stpat_inettest_ts_df['id'].isna() ) & (( stpat_inettest_ts_df['up'] == 1 )) | ( stpat_inettest_ts_df['up'] != stpat_inettest_ts_df['prevup'] ) ) ])\n", + "stpat_inettest_ts_df[\"prevup\"] = stpat_inettest_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_inettest_ts_df.loc[\n", + " (\n", + " (stpat_inettest_ts_df[\"id\"].isna()) & (stpat_inettest_ts_df[\"up\"] == 1)\n", + " | (stpat_inettest_ts_df[\"up\"] != stpat_inettest_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 207" ] @@ -13467,32 +13645,53 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=30))[['up']].min()\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(minutes=30))[[\"up\"]].min()\n", "thaloslogs_brancol_isup\n", "\n", - "branc_inettest_ts_df = branc_inettest_df.set_index('datetime')\n", + "branc_inettest_ts_df = branc_inettest_df.set_index(\"datetime\")\n", "# branc_inettest_ts_df = branc_inettest_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_inettest_ts_df = branc_inettest_ts_df.resample(timedelta(minutes=30)).last()\n", "display()\n", - "branc_inettest_ts_df = branc_inettest_ts_df.loc[branc_inettest_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_inettest_ts_df = branc_inettest_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_inettest_ts_df = branc_inettest_ts_df.loc[\n", + " branc_inettest_ts_df.index >= thaloslogs_brancol_isup.index[0]\n", + "]\n", + "branc_inettest_ts_df = branc_inettest_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_inettest_ts_df.loc[ ( branc_inettest_ts_df['id'].notna() ) & ( branc_inettest_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_inettest_ts_df['id'].isna() ) & ( branc_inettest_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_inettest_ts_df['id'].notna() ) & ( branc_inettest_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_inettest_ts_df['id'].isna() ) & ( branc_inettest_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_inettest_ts_df['id'].notna() ) & ( branc_inettest_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_inettest_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\",\n", + " ((branc_inettest_ts_df[\"id\"].isna()) & (branc_inettest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_inettest_ts_df[\"id\"].notna()) & (branc_inettest_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\",\n", + " ((branc_inettest_ts_df[\"id\"].isna()) & (branc_inettest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((branc_inettest_ts_df[\"id\"].notna()) & (branc_inettest_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (branc_inettest_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_inettest_ts_df['prevup'] = branc_inettest_ts_df['up'].shift(1)\n", - "display(branc_inettest_ts_df.loc[(( branc_inettest_ts_df['id'].isna() ) & (( branc_inettest_ts_df['up'] == 1 )) | ( branc_inettest_ts_df['up'] != branc_inettest_ts_df['prevup'] ) ) ])\n", + "branc_inettest_ts_df[\"prevup\"] = branc_inettest_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_inettest_ts_df.loc[\n", + " (\n", + " (branc_inettest_ts_df[\"id\"].isna()) & (branc_inettest_ts_df[\"up\"] == 1)\n", + " | (branc_inettest_ts_df[\"up\"] != branc_inettest_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", - "#336" + "# 336" ] }, { @@ -13537,10 +13736,10 @@ "\n", "\n", "# brancol_fname = 'misc/data/thalos_uptime_logs/sessions_brancol.csv'\n", - "stpatrick_fname = 'misc/data/thalos_uptime_logs/sessions_saintpatrick.csv'\n", + "stpatrick_fname = \"misc/data/thalos_uptime_logs/sessions_saintpatrick.csv\"\n", "\n", "\n", - "r = re.compile('^\\d+(, .*){8}')\n", + "r = re.compile(\"^\\d+(, .*){8}\")\n", "stpatrick_lines = []\n", "with open(stpatrick_fname) as stpatrick_f:\n", " for l in stpatrick_f.readlines():\n", @@ -13549,26 +13748,53 @@ "# print(type(brancol_f), dir(brancol_f))\n", "\n", "# display(brancol_lines)\n", - " \n", - "thalos_uptime_logs_stpatrick = pandas.read_csv(io.StringIO(''.join(stpatrick_lines)), names=['id', 'datetime', 'boat', 'certus', 'status', 'i', 'j', 'name', 'ip'])\n", - " \n", + "\n", + "thalos_uptime_logs_stpatrick = pandas.read_csv(\n", + " io.StringIO(\"\".join(stpatrick_lines)),\n", + " names=[\"id\", \"datetime\", \"boat\", \"certus\", \"status\", \"i\", \"j\", \"name\", \"ip\"],\n", + ")\n", + "\n", "# thalos_uptime_logs_stpatrick = pandas.read_csv()\n", "\n", - "thalos_uptime_logs_stpatrick['datetime'] = pandas.to_datetime(thalos_uptime_logs_stpatrick['datetime'], utc=True)\n", - "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[thalos_uptime_logs_stpatrick['datetime'] > '2024-01-01']\n", + "thalos_uptime_logs_stpatrick[\"datetime\"] = pandas.to_datetime(\n", + " thalos_uptime_logs_stpatrick[\"datetime\"], utc=True\n", + ")\n", + "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[\n", + " thalos_uptime_logs_stpatrick[\"datetime\"] > \"2024-01-01\"\n", + "]\n", "# thalos_uptime_logs_stpatrick.loc[666]\n", - "thalos_uptime_logs_stpatrick.loc[0] = [0,parser.parse('2024-01-01 00:00:00+00:00'),'saintpatrick','cer1',' down',46668,63663 ,'pop-prd-pthalos000','']\n", + "thalos_uptime_logs_stpatrick.loc[0] = [\n", + " 0,\n", + " parser.parse(\"2024-01-01 00:00:00+00:00\"),\n", + " \"saintpatrick\",\n", + " \"cer1\",\n", + " \" down\",\n", + " 46668,\n", + " 63663,\n", + " \"pop-prd-pthalos000\",\n", + " \"\",\n", + "]\n", "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.sort_index()\n", - "thalos_uptime_logs_stpatrick['prev_datetime'] = thalos_uptime_logs_stpatrick['datetime'].shift(1)\n", - "thalos_uptime_logs_stpatrick['diff'] = thalos_uptime_logs_stpatrick['datetime'] - thalos_uptime_logs_stpatrick['prev_datetime']\n", - "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[thalos_uptime_logs_stpatrick['diff'].notna()]\n", - "thalos_uptime_logs_stpatrick['tenmin'] = (thalos_uptime_logs_stpatrick['diff'] / timedelta(minutes=10)).apply(math.floor)\n", - "thalos_uptime_logs_stpatrick['thirtymin'] = (thalos_uptime_logs_stpatrick['diff'] / timedelta(minutes=30)).apply(math.floor)\n", - "thalos_uptime_logs_stpatrick['fourhours'] = (thalos_uptime_logs_stpatrick['diff'] / timedelta(hours=4)).apply(math.floor)\n", - "thalos_uptime_logs_stpatrick\\\n", - ".loc[thalos_uptime_logs_stpatrick['status'] == ' up']\\\n", - ".loc[(thalos_uptime_logs_stpatrick['datetime'] < parser.parse('2024-01-05 12:30:00+00:00') ) | (thalos_uptime_logs_stpatrick['datetime'] > parser.parse('2024-01-12 21:30:00+00:00') )]\\\n", - ".sum()\n", + "thalos_uptime_logs_stpatrick[\"prev_datetime\"] = thalos_uptime_logs_stpatrick[\"datetime\"].shift(1)\n", + "thalos_uptime_logs_stpatrick[\"diff\"] = (\n", + " thalos_uptime_logs_stpatrick[\"datetime\"] - thalos_uptime_logs_stpatrick[\"prev_datetime\"]\n", + ")\n", + "thalos_uptime_logs_stpatrick = thalos_uptime_logs_stpatrick.loc[\n", + " thalos_uptime_logs_stpatrick[\"diff\"].notna()\n", + "]\n", + "thalos_uptime_logs_stpatrick[\"tenmin\"] = (\n", + " thalos_uptime_logs_stpatrick[\"diff\"] / timedelta(minutes=10)\n", + ").apply(math.floor)\n", + "thalos_uptime_logs_stpatrick[\"thirtymin\"] = (\n", + " thalos_uptime_logs_stpatrick[\"diff\"] / timedelta(minutes=30)\n", + ").apply(math.floor)\n", + "thalos_uptime_logs_stpatrick[\"fourhours\"] = (\n", + " thalos_uptime_logs_stpatrick[\"diff\"] / timedelta(hours=4)\n", + ").apply(math.floor)\n", + "thalos_uptime_logs_stpatrick.loc[thalos_uptime_logs_stpatrick[\"status\"] == \" up\"].loc[\n", + " (thalos_uptime_logs_stpatrick[\"datetime\"] < parser.parse(\"2024-01-05 12:30:00+00:00\"))\n", + " | (thalos_uptime_logs_stpatrick[\"datetime\"] > parser.parse(\"2024-01-12 21:30:00+00:00\"))\n", + "].sum()\n", "\n", "# I can't use this method, because enough vectors run outside of thalos's 'on' windows, such that the efficiency is over 100%\n" ] @@ -13580,14 +13806,18 @@ "metadata": {}, "outputs": [], "source": [ - "branc_eloggap_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '6'\", database='tnc_edge')\n", + "branc_eloggap_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_tests where vector_id = '6'\", database=\"tnc_edge\"\n", + ")\n", "branc_eloggap_df\n", - "branc_eloggap_df['datetime'] = pandas.to_datetime(branc_eloggap_df['datetime'], utc=True)\n", - "branc_eloggap_df = branc_eloggap_df.sort_values('datetime')\n", + "branc_eloggap_df[\"datetime\"] = pandas.to_datetime(branc_eloggap_df[\"datetime\"], utc=True)\n", + "branc_eloggap_df = branc_eloggap_df.sort_values(\"datetime\")\n", "\n", - "stpat_eloggap_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '6'\", database='tnc_edge')\n", - "stpat_eloggap_df['datetime'] = pandas.to_datetime(stpat_eloggap_df['datetime'], utc=True)\n", - "stpat_eloggap_df = stpat_eloggap_df.sort_values('datetime')" + "stpat_eloggap_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_tests where vector_id = '6'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_eloggap_df[\"datetime\"] = pandas.to_datetime(stpat_eloggap_df[\"datetime\"], utc=True)\n", + "stpat_eloggap_df = stpat_eloggap_df.sort_values(\"datetime\")" ] }, { @@ -13618,19 +13848,28 @@ } ], "source": [ - "branc_eloggap_df['diff'] = branc_eloggap_df['datetime'] - branc_eloggap_df.shift(1)['datetime']\n", - "stpat_eloggap_df['diff'] = stpat_eloggap_df['datetime'] - stpat_eloggap_df.shift(1)['datetime']\n", + "branc_eloggap_df[\"diff\"] = branc_eloggap_df[\"datetime\"] - branc_eloggap_df.shift(1)[\"datetime\"]\n", + "stpat_eloggap_df[\"diff\"] = stpat_eloggap_df[\"datetime\"] - stpat_eloggap_df.shift(1)[\"datetime\"]\n", "\n", - "min_lateness = 3.995*60\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_eloggap_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=120)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(4.01*60,10), num=50)\n", + "min_lateness = 3.995 * 60\n", + "bins = numpy.logspace(\n", + " math.log(min_lateness, 10),\n", + " math.log(1 + stpat_eloggap_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max(), 10),\n", + " num=120,\n", + ")\n", + "bins = numpy.logspace(math.log(min_lateness, 10), math.log(4.01 * 60, 10), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_eloggap_df['diffbucket'] = pandas.cut(stpat_eloggap_df.loc[stpat_eloggap_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", + "stpat_eloggap_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_eloggap_df.loc[stpat_eloggap_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", "# stpat_eloggap_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_eloggap_df['diffbucket'] = pandas.cut(branc_eloggap_df.loc[branc_eloggap_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "branc_eloggap_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "branc_eloggap_df[\"diffbucket\"] = pandas.cut(\n", + " branc_eloggap_df.loc[branc_eloggap_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"],\n", + " bins=bins,\n", + ")\n", + "branc_eloggap_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n" ] }, { @@ -15584,30 +15823,49 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(hours=4))[['up']].sum() >= 48\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(hours=4))[[\"up\"]].sum() >= 48\n", "thaloslogs_brancol_isup\n", "\n", - "branc_eloggap_ts_df = branc_eloggap_df.set_index('datetime')\n", + "branc_eloggap_ts_df = branc_eloggap_df.set_index(\"datetime\")\n", "# branc_eloggap_ts_df = branc_eloggap_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_eloggap_ts_df = branc_eloggap_ts_df.resample(timedelta(hours=4)).last()\n", "display()\n", - "branc_eloggap_ts_df = branc_eloggap_ts_df.loc[branc_eloggap_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_eloggap_ts_df = branc_eloggap_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_eloggap_ts_df = branc_eloggap_ts_df.loc[\n", + " branc_eloggap_ts_df.index >= thaloslogs_brancol_isup.index[0]\n", + "]\n", + "branc_eloggap_ts_df = branc_eloggap_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_eloggap_ts_df.loc[ ( branc_eloggap_ts_df['id'].notna() ) & ( branc_eloggap_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_eloggap_ts_df['id'].isna() ) & ( branc_eloggap_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_eloggap_ts_df['id'].notna() ) & ( branc_eloggap_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_eloggap_ts_df['id'].isna() ) & ( branc_eloggap_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_eloggap_ts_df['id'].notna() ) & ( branc_eloggap_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_eloggap_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\", ((branc_eloggap_ts_df[\"id\"].isna()) & (branc_eloggap_ts_df[\"up\"] == 0)).sum()\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_eloggap_ts_df[\"id\"].notna()) & (branc_eloggap_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\", ((branc_eloggap_ts_df[\"id\"].isna()) & (branc_eloggap_ts_df[\"up\"] == 1)).sum()\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((branc_eloggap_ts_df[\"id\"].notna()) & (branc_eloggap_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (branc_eloggap_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_eloggap_ts_df['prevup'] = branc_eloggap_ts_df['up'].shift(1)\n", - "display(branc_eloggap_ts_df.loc[(( branc_eloggap_ts_df['id'].isna() ) & (( branc_eloggap_ts_df['up'] == 1 )) | ( branc_eloggap_ts_df['up'] != branc_eloggap_ts_df['prevup'] ) ) ])\n", + "branc_eloggap_ts_df[\"prevup\"] = branc_eloggap_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_eloggap_ts_df.loc[\n", + " (\n", + " (branc_eloggap_ts_df[\"id\"].isna()) & (branc_eloggap_ts_df[\"up\"] == 1)\n", + " | (branc_eloggap_ts_df[\"up\"] != branc_eloggap_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 27" ] @@ -17563,30 +17821,51 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(hours=4))[['up']].min()\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(hours=4))[[\"up\"]].min()\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_eloggap_ts_df = stpat_eloggap_df.set_index('datetime')\n", + "stpat_eloggap_ts_df = stpat_eloggap_df.set_index(\"datetime\")\n", "# stpat_eloggap_ts_df = stpat_eloggap_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_eloggap_ts_df = stpat_eloggap_ts_df.resample(timedelta(hours=4)).last()\n", "display()\n", - "stpat_eloggap_ts_df = stpat_eloggap_ts_df.loc[stpat_eloggap_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_eloggap_ts_df = stpat_eloggap_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_eloggap_ts_df = stpat_eloggap_ts_df.loc[\n", + " stpat_eloggap_ts_df.index >= thaloslogs_stpatrick_isup.index[0]\n", + "]\n", + "stpat_eloggap_ts_df = stpat_eloggap_ts_df.join(thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\")\n", "\n", "# stpat_eloggap_ts_df.loc[ ( stpat_eloggap_ts_df['id'].notna() ) & ( stpat_eloggap_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_eloggap_ts_df['id'].isna() ) & ( stpat_eloggap_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_eloggap_ts_df['id'].notna() ) & ( stpat_eloggap_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_eloggap_ts_df['id'].isna() ) & ( stpat_eloggap_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_eloggap_ts_df['id'].notna() ) & ( stpat_eloggap_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_eloggap_ts_df['up'] == 1 )).sum())\n", + "print(\n", + " \"off, no vector\", ((stpat_eloggap_ts_df[\"id\"].isna()) & (stpat_eloggap_ts_df[\"up\"] == 0)).sum()\n", + ")\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_eloggap_ts_df[\"id\"].notna()) & (stpat_eloggap_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\n", + " \"on, no vector?\", ((stpat_eloggap_ts_df[\"id\"].isna()) & (stpat_eloggap_ts_df[\"up\"] == 1)).sum()\n", + ")\n", + "print(\n", + " \"on, vector worked.\",\n", + " ((stpat_eloggap_ts_df[\"id\"].notna()) & (stpat_eloggap_ts_df[\"up\"] == 1)).sum(),\n", + ")\n", + "print(\"total on\", (stpat_eloggap_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_eloggap_ts_df['prevup'] = stpat_eloggap_ts_df['up'].shift(1)\n", - "display(stpat_eloggap_ts_df.loc[(( stpat_eloggap_ts_df['id'].isna() ) & (( stpat_eloggap_ts_df['up'] == 1 )) | ( stpat_eloggap_ts_df['up'] != stpat_eloggap_ts_df['prevup'] ) ) ])\n", + "stpat_eloggap_ts_df[\"prevup\"] = stpat_eloggap_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_eloggap_ts_df.loc[\n", + " (\n", + " (stpat_eloggap_ts_df[\"id\"].isna()) & (stpat_eloggap_ts_df[\"up\"] == 1)\n", + " | (stpat_eloggap_ts_df[\"up\"] != stpat_eloggap_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 4" ] @@ -17609,7 +17888,7 @@ } ], "source": [ - "(132+126) / (139-4+177-27)" + "(132 + 126) / (139 - 4 + 177 - 27)" ] }, { @@ -17619,14 +17898,18 @@ "metadata": {}, "outputs": [], "source": [ - "branc_cca_df = awswrangler.athena.read_sql_query(f\"SELECT * from brancol_v1_tests where vector_id = '7'\", database='tnc_edge')\n", + "branc_cca_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from brancol_v1_tests where vector_id = '7'\", database=\"tnc_edge\"\n", + ")\n", "branc_cca_df\n", - "branc_cca_df['datetime'] = pandas.to_datetime(branc_cca_df['datetime'], utc=True)\n", - "branc_cca_df = branc_cca_df.sort_values('datetime')\n", + "branc_cca_df[\"datetime\"] = pandas.to_datetime(branc_cca_df[\"datetime\"], utc=True)\n", + "branc_cca_df = branc_cca_df.sort_values(\"datetime\")\n", "\n", - "stpat_cca_df = awswrangler.athena.read_sql_query(f\"SELECT * from stpatrick_v1_tests where vector_id = '7'\", database='tnc_edge')\n", - "stpat_cca_df['datetime'] = pandas.to_datetime(stpat_cca_df['datetime'], utc=True)\n", - "stpat_cca_df = stpat_cca_df.sort_values('datetime')" + "stpat_cca_df = awswrangler.athena.read_sql_query(\n", + " f\"SELECT * from stpatrick_v1_tests where vector_id = '7'\", database=\"tnc_edge\"\n", + ")\n", + "stpat_cca_df[\"datetime\"] = pandas.to_datetime(stpat_cca_df[\"datetime\"], utc=True)\n", + "stpat_cca_df = stpat_cca_df.sort_values(\"datetime\")" ] }, { @@ -17657,19 +17940,26 @@ } ], "source": [ - "branc_cca_df['diff'] = branc_cca_df['datetime'] - branc_cca_df.shift(1)['datetime']\n", - "stpat_cca_df['diff'] = stpat_cca_df['datetime'] - stpat_cca_df.shift(1)['datetime']\n", + "branc_cca_df[\"diff\"] = branc_cca_df[\"datetime\"] - branc_cca_df.shift(1)[\"datetime\"]\n", + "stpat_cca_df[\"diff\"] = stpat_cca_df[\"datetime\"] - stpat_cca_df.shift(1)[\"datetime\"]\n", "\n", - "min_lateness = 0.995*60\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1+stpat_cca_df['diff'].map(lambda x: x.total_seconds()/60).max(),10), num=90)\n", - "bins = numpy.logspace(math.log(min_lateness,10), math.log(1.005*60,10), num=50)\n", + "min_lateness = 0.995 * 60\n", + "bins = numpy.logspace(\n", + " math.log(min_lateness, 10),\n", + " math.log(1 + stpat_cca_df[\"diff\"].map(lambda x: x.total_seconds() / 60).max(), 10),\n", + " num=90,\n", + ")\n", + "bins = numpy.logspace(math.log(min_lateness, 10), math.log(1.005 * 60, 10), num=50)\n", "bins = list(map(lambda x: timedelta(minutes=x), bins))\n", "bins\n", - "stpat_cca_df['diffbucket'] = pandas.cut(stpat_cca_df.loc[stpat_cca_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", + "stpat_cca_df[\"diffbucket\"] = pandas.cut(\n", + " stpat_cca_df.loc[stpat_cca_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"], bins=bins\n", + ")\n", "# stpat_cca_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "branc_cca_df['diffbucket'] = pandas.cut(branc_cca_df.loc[branc_cca_df['diff'] > timedelta(minutes=min_lateness)]['diff'], bins=bins)\n", - "branc_cca_df.groupby('diffbucket')['diff'].count().plot.bar(figsize=(10,3))\n", - "\n" + "branc_cca_df[\"diffbucket\"] = pandas.cut(\n", + " branc_cca_df.loc[branc_cca_df[\"diff\"] > timedelta(minutes=min_lateness)][\"diff\"], bins=bins\n", + ")\n", + "branc_cca_df.groupby(\"diffbucket\")[\"diff\"].count().plot.bar(figsize=(10, 3))\n" ] }, { @@ -19689,30 +19979,40 @@ } ], "source": [ - "thaloslogs_brancol_isup = pandas.read_pickle('thaloslogs_brancol_uptime_ts_df.pickle')\n", + "thaloslogs_brancol_isup = pandas.read_pickle(\"thaloslogs_brancol_uptime_ts_df.pickle\")\n", "# thaloslogs_brancol_isup = thaloslogs_brancol_uptime_ts_df.loc[thaloslogs_brancol_uptime_ts_df['up'] == 1]\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < '2024-04-08']\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.loc[thaloslogs_brancol_isup.index < \"2024-04-08\"]\n", "thaloslogs_brancol_isup.sort_index()\n", - "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(hours=1))[['up']].sum() >= 10\n", + "thaloslogs_brancol_isup = thaloslogs_brancol_isup.resample(timedelta(hours=1))[[\"up\"]].sum() >= 10\n", "thaloslogs_brancol_isup\n", "\n", - "branc_cca_ts_df = branc_cca_df.set_index('datetime')\n", + "branc_cca_ts_df = branc_cca_df.set_index(\"datetime\")\n", "# branc_cca_ts_df = branc_cca_ts_df.resample(timedelta(minutes=5)).last()\n", "branc_cca_ts_df = branc_cca_ts_df.resample(timedelta(hours=1)).last()\n", "display()\n", "branc_cca_ts_df = branc_cca_ts_df.loc[branc_cca_ts_df.index >= thaloslogs_brancol_isup.index[0]]\n", - "branc_cca_ts_df = branc_cca_ts_df.join(thaloslogs_brancol_isup[['up']] , how='outer')\n", + "branc_cca_ts_df = branc_cca_ts_df.join(thaloslogs_brancol_isup[[\"up\"]], how=\"outer\")\n", "\n", "# branc_cca_ts_df.loc[ ( branc_cca_ts_df['id'].notna() ) & ( branc_cca_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( branc_cca_ts_df['id'].isna() ) & ( branc_cca_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( branc_cca_ts_df['id'].notna() ) & ( branc_cca_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( branc_cca_ts_df['id'].isna() ) & ( branc_cca_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( branc_cca_ts_df['id'].notna() ) & ( branc_cca_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( branc_cca_ts_df['up'] == 1 )).sum())\n", + "print(\"off, no vector\", ((branc_cca_ts_df[\"id\"].isna()) & (branc_cca_ts_df[\"up\"] == 0)).sum())\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((branc_cca_ts_df[\"id\"].notna()) & (branc_cca_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\"on, no vector?\", ((branc_cca_ts_df[\"id\"].isna()) & (branc_cca_ts_df[\"up\"] == 1)).sum())\n", + "print(\"on, vector worked.\", ((branc_cca_ts_df[\"id\"].notna()) & (branc_cca_ts_df[\"up\"] == 1)).sum())\n", + "print(\"total on\", (branc_cca_ts_df[\"up\"] == 1).sum())\n", "\n", - "branc_cca_ts_df['prevup'] = branc_cca_ts_df['up'].shift(1)\n", - "display(branc_cca_ts_df.loc[(( branc_cca_ts_df['id'].isna() ) & (( branc_cca_ts_df['up'] == 1 )) | ( branc_cca_ts_df['up'] != branc_cca_ts_df['prevup'] ) ) ])\n", + "branc_cca_ts_df[\"prevup\"] = branc_cca_ts_df[\"up\"].shift(1)\n", + "display(\n", + " branc_cca_ts_df.loc[\n", + " (\n", + " (branc_cca_ts_df[\"id\"].isna()) & (branc_cca_ts_df[\"up\"] == 1)\n", + " | (branc_cca_ts_df[\"up\"] != branc_cca_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 169" ] @@ -21732,30 +22032,44 @@ } ], "source": [ - "thaloslogs_stpatrick_isup = pandas.read_pickle('thaloslogs_stpatrick_uptime_ts_df.pickle')\n", + "thaloslogs_stpatrick_isup = pandas.read_pickle(\"thaloslogs_stpatrick_uptime_ts_df.pickle\")\n", "# thaloslogs_stpatrick_isup = thaloslogs_stpatrick_uptime_ts_df.loc[thaloslogs_stpatrick_uptime_ts_df['up'] == 1]\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[thaloslogs_stpatrick_isup.index < '2024-04-08']\n", + "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.loc[\n", + " thaloslogs_stpatrick_isup.index < \"2024-04-08\"\n", + "]\n", "thaloslogs_stpatrick_isup.sort_index()\n", - "thaloslogs_stpatrick_isup = thaloslogs_stpatrick_isup.resample(timedelta(hours=1))[['up']].sum() >= 10\n", + "thaloslogs_stpatrick_isup = (\n", + " thaloslogs_stpatrick_isup.resample(timedelta(hours=1))[[\"up\"]].sum() >= 10\n", + ")\n", "thaloslogs_stpatrick_isup\n", "\n", - "stpat_cca_ts_df = stpat_cca_df.set_index('datetime')\n", + "stpat_cca_ts_df = stpat_cca_df.set_index(\"datetime\")\n", "# stpat_cca_ts_df = stpat_cca_ts_df.resample(timedelta(minutes=5)).last()\n", "stpat_cca_ts_df = stpat_cca_ts_df.resample(timedelta(hours=1)).last()\n", "display()\n", "stpat_cca_ts_df = stpat_cca_ts_df.loc[stpat_cca_ts_df.index >= thaloslogs_stpatrick_isup.index[0]]\n", - "stpat_cca_ts_df = stpat_cca_ts_df.join(thaloslogs_stpatrick_isup[['up']] , how='outer')\n", + "stpat_cca_ts_df = stpat_cca_ts_df.join(thaloslogs_stpatrick_isup[[\"up\"]], how=\"outer\")\n", "\n", "# stpat_cca_ts_df.loc[ ( stpat_cca_ts_df['id'].notna() ) & ( stpat_cca_ts_df['up'] == 1 ) ]\n", "\n", - "print('off, no vector', (( stpat_cca_ts_df['id'].isna() ) & ( stpat_cca_ts_df['up'] == 0 )).sum())\n", - "print('off, vector ran somehow?', (( stpat_cca_ts_df['id'].notna() ) & ( stpat_cca_ts_df['up'] == 0 )).sum())\n", - "print('on, no vector?', (( stpat_cca_ts_df['id'].isna() ) & ( stpat_cca_ts_df['up'] == 1 )).sum())\n", - "print('on, vector worked.', (( stpat_cca_ts_df['id'].notna() ) & ( stpat_cca_ts_df['up'] == 1 )).sum())\n", - "print('total on', (( stpat_cca_ts_df['up'] == 1 )).sum())\n", + "print(\"off, no vector\", ((stpat_cca_ts_df[\"id\"].isna()) & (stpat_cca_ts_df[\"up\"] == 0)).sum())\n", + "print(\n", + " \"off, vector ran somehow?\",\n", + " ((stpat_cca_ts_df[\"id\"].notna()) & (stpat_cca_ts_df[\"up\"] == 0)).sum(),\n", + ")\n", + "print(\"on, no vector?\", ((stpat_cca_ts_df[\"id\"].isna()) & (stpat_cca_ts_df[\"up\"] == 1)).sum())\n", + "print(\"on, vector worked.\", ((stpat_cca_ts_df[\"id\"].notna()) & (stpat_cca_ts_df[\"up\"] == 1)).sum())\n", + "print(\"total on\", (stpat_cca_ts_df[\"up\"] == 1).sum())\n", "\n", - "stpat_cca_ts_df['prevup'] = stpat_cca_ts_df['up'].shift(1)\n", - "display(stpat_cca_ts_df.loc[(( stpat_cca_ts_df['id'].isna() ) & (( stpat_cca_ts_df['up'] == 1 )) | ( stpat_cca_ts_df['up'] != stpat_cca_ts_df['prevup'] ) ) ])\n", + "stpat_cca_ts_df[\"prevup\"] = stpat_cca_ts_df[\"up\"].shift(1)\n", + "display(\n", + " stpat_cca_ts_df.loc[\n", + " (\n", + " (stpat_cca_ts_df[\"id\"].isna()) & (stpat_cca_ts_df[\"up\"] == 1)\n", + " | (stpat_cca_ts_df[\"up\"] != stpat_cca_ts_df[\"prevup\"])\n", + " )\n", + " ]\n", + ")\n", "\n", "# 108" ] @@ -21795,7 +22109,7 @@ "# 108\n", "\n", "# (863+976)/(1186-169+1126-108)\n", - "(863+976)/(1086-169+1126-108)" + "(863 + 976) / (1086 - 169 + 1126 - 108)" ] }, { @@ -23677,7 +23991,7 @@ } ], "source": [ - "tmp = stpat_eloggap_df.set_index('datetime')\n", + "tmp = stpat_eloggap_df.set_index(\"datetime\")\n", "tmp.sort_index()" ] } diff --git a/notebooks/tnc_edge_bv_excel_parsing.ipynb b/notebooks/tnc_edge_bv_excel_parsing.ipynb index 03e9e2e..82c7567 100644 --- a/notebooks/tnc_edge_bv_excel_parsing.ipynb +++ b/notebooks/tnc_edge_bv_excel_parsing.ipynb @@ -29,6 +29,7 @@ "import numpy as np\n", "from datetime import datetime, date, time, timezone, timedelta\n", "from dateutil.parser import parse as parse_datetime\n", + "\n", "# help(np.argwhere)\n", "import re" ] @@ -52,14 +53,14 @@ "# dir(pandas)\n", "# help(pandas.read_excel)\n", "\n", - "fname = '20240308_SAINT PATRICK_FO6_FO7_FO8_FO9_FO10_FO11_FO12_FO13.xlsx'\n", + "fname = \"20240308_SAINT PATRICK_FO6_FO7_FO8_FO9_FO10_FO11_FO12_FO13.xlsx\"\n", "\n", "# boat = 'brancol'\n", - "boat = 'stpatrick'\n", + "boat = \"stpatrick\"\n", "\n", "all_sheets = pandas.read_excel(fname, sheet_name=None)\n", "\n", - "sheet_names = list(filter(lambda k: re.match('^FO \\d+', k), all_sheets.keys()))\n", + "sheet_names = list(filter(lambda k: re.match(\"^FO \\d+\", k), all_sheets.keys()))\n", "\n", "curr_sheet = all_sheets[sheet_names[1]]\n" ] @@ -72,18 +73,18 @@ "outputs": [], "source": [ "def display_full(x):\n", - " pandas.set_option('display.max_rows', 1000)\n", - " pandas.set_option('display.min_rows', 400)\n", - " pandas.set_option('display.max_columns', None)\n", - " pandas.set_option('display.width', 2000)\n", - " pandas.set_option('display.float_format', '{:20,.2f}'.format)\n", - " pandas.set_option('display.max_colwidth', None)\n", + " pandas.set_option(\"display.max_rows\", 1000)\n", + " pandas.set_option(\"display.min_rows\", 400)\n", + " pandas.set_option(\"display.max_columns\", None)\n", + " pandas.set_option(\"display.width\", 2000)\n", + " pandas.set_option(\"display.float_format\", \"{:20,.2f}\".format)\n", + " pandas.set_option(\"display.max_colwidth\", None)\n", " display(x)\n", - " pandas.reset_option('display.max_rows')\n", - " pandas.reset_option('display.max_columns')\n", - " pandas.reset_option('display.width')\n", - " pandas.reset_option('display.float_format')\n", - " pandas.reset_option('display.max_colwidth')\n" + " pandas.reset_option(\"display.max_rows\")\n", + " pandas.reset_option(\"display.max_columns\")\n", + " pandas.reset_option(\"display.width\")\n", + " pandas.reset_option(\"display.float_format\")\n", + " pandas.reset_option(\"display.max_colwidth\")\n" ] }, { @@ -93,55 +94,78 @@ "metadata": {}, "outputs": [], "source": [ - "\n", - "\n", "def findcell(sheet, needle):\n", " for col_name in list(sheet.keys()):\n", " try:\n", " start_idx = sheet[col_name].to_list().index(needle)\n", - " return (col_name, start_idx+1)\n", + " return (col_name, start_idx + 1)\n", " except ValueError:\n", " pass\n", " return None\n", "\n", + "\n", "def set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, cell_str, cell_offset):\n", " cols_index = list(curr_sheet.keys())\n", - " \n", + "\n", " if cols_index.index(set_haul_title_cell[0]) + cell_offset[0] >= len(cols_index):\n", - " raise ValueError(f'index offset {cell_offset[0]} out of bounds in sheet {cols_index}')\n", - " \n", - "# print(set_haul_title_cell)\n", - "# print(cell_offset)\n", - "# print(cols_index.index(set_haul_title_cell[0]))\n", - "# print(cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]])\n", - "# print(curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]])\n", - "# print(curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][set_haul_title_cell[1] + cell_offset[1]])\n", - " \n", - " if curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][set_haul_title_cell[1] + cell_offset[1]] != cell_str:\n", + " raise ValueError(f\"index offset {cell_offset[0]} out of bounds in sheet {cols_index}\")\n", + "\n", + " # print(set_haul_title_cell)\n", + " # print(cell_offset)\n", + " # print(cols_index.index(set_haul_title_cell[0]))\n", + " # print(cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]])\n", + " # print(curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]])\n", + " # print(curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][set_haul_title_cell[1] + cell_offset[1]])\n", + "\n", + " if (\n", + " curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][\n", + " set_haul_title_cell[1] + cell_offset[1]\n", + " ]\n", + " != cell_str\n", + " ):\n", " raise ValueError(f\"can't find {cell_str}\")\n", - " return curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][set_haul_title_cell[1] + cell_offset[1]+1]\n", + " return curr_sheet[cols_index[cols_index.index(set_haul_title_cell[0]) + cell_offset[0]]][\n", + " set_haul_title_cell[1] + cell_offset[1] + 1\n", + " ]\n", "\n", "\n", "def set_haul_grid_fetch_all(curr_sheet, set_haul_title_cell):\n", - " start_date = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'start date', (0, 0))\n", + " start_date = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"start date\", (0, 0))\n", " try:\n", - " start_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'start time (UTC)', (1, 0))\n", + " start_time = set_haul_grid_fetch_one(\n", + " curr_sheet, set_haul_title_cell, \"start time (UTC)\", (1, 0)\n", + " )\n", " except:\n", - " start_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'start time', (1, 0))\n", - " start_lat = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'latitude', (2, 0))\n", - " start_lon = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'longitude', (3, 0))\n", - " finish_date = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'finish date', (0, 2))\n", + " start_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"start time\", (1, 0))\n", + " start_lat = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"latitude\", (2, 0))\n", + " start_lon = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"longitude\", (3, 0))\n", + " finish_date = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"finish date\", (0, 2))\n", " try:\n", - " finish_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'finish time (UTC)', (1, 2))\n", + " finish_time = set_haul_grid_fetch_one(\n", + " curr_sheet, set_haul_title_cell, \"finish time (UTC)\", (1, 2)\n", + " )\n", " except:\n", " try:\n", - " finish_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'finish time (UTC', (1, 2))\n", + " finish_time = set_haul_grid_fetch_one(\n", + " curr_sheet, set_haul_title_cell, \"finish time (UTC\", (1, 2)\n", + " )\n", " except:\n", - " finish_time = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'finish time', (1, 2))\n", - " finish_lat = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'latitude', (2, 2))\n", - " finish_lon = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, 'longitude', (3, 2))\n", + " finish_time = set_haul_grid_fetch_one(\n", + " curr_sheet, set_haul_title_cell, \"finish time\", (1, 2)\n", + " )\n", + " finish_lat = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"latitude\", (2, 2))\n", + " finish_lon = set_haul_grid_fetch_one(curr_sheet, set_haul_title_cell, \"longitude\", (3, 2))\n", "\n", - " return (start_date, start_time, start_lat, start_lon, finish_date, finish_time, finish_lat, finish_lon)\n" + " return (\n", + " start_date,\n", + " start_time,\n", + " start_lat,\n", + " start_lon,\n", + " finish_date,\n", + " finish_time,\n", + " finish_lat,\n", + " finish_lon,\n", + " )\n" ] }, { @@ -2159,65 +2183,73 @@ } ], "source": [ - "\n", - "\n", "# trip info\n", "\n", "\n", - "if list(all_sheets.keys())[0] != 'TRIP':\n", - " raise ValueError('first sheet should be trip info')\n", + "if list(all_sheets.keys())[0] != \"TRIP\":\n", + " raise ValueError(\"first sheet should be trip info\")\n", "\n", - "trip_sheet = all_sheets['TRIP']\n", + "trip_sheet = all_sheets[\"TRIP\"]\n", "\n", - "if 'Fishing trip' in trip_sheet.keys():\n", - " trip_start_date_cell = ('Fishing trip', 0)\n", + "if \"Fishing trip\" in trip_sheet.keys():\n", + " trip_start_date_cell = (\"Fishing trip\", 0)\n", "else:\n", - " trip_start_date_cell = findcell(curr_sheet, 'Fishing trip')\n", + " trip_start_date_cell = findcell(curr_sheet, \"Fishing trip\")\n", " if not setting_cell:\n", " raise ValueError(\"no 'Fishing trip' block in sheet\")\n", - " \n", "\n", - "trip_notes = ''\n", + "\n", + "trip_notes = \"\"\n", "try:\n", - " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'General notes ', (7,0)) \n", + " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"General notes \", (7, 0))\n", "except:\n", " try:\n", - " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Notes', (7,0)) \n", + " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Notes\", (7, 0))\n", " except:\n", " try:\n", - " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Notes', (7,1)) \n", + " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Notes\", (7, 1))\n", " except:\n", " try:\n", - " trip_notes = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Note ', (8,0)) \n", + " trip_notes = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Note \", (8, 0)\n", + " )\n", " except:\n", " pass\n", "try:\n", - " trip_start_date = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Start date', (0,1)).date()\n", + " trip_start_date = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Start date\", (0, 1)\n", + " ).date()\n", "except:\n", - " trip_start_date = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Start date', (0,3)).date()\n", + " trip_start_date = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Start date\", (0, 3)\n", + " ).date()\n", "\n", "try:\n", - " trip_end_date = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Finish date', (2,1)).date()\n", + " trip_end_date = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Finish date\", (2, 1)\n", + " ).date()\n", "except:\n", - " trip_end_date = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, 'Finish date', (2,3)).date()\n", + " trip_end_date = set_haul_grid_fetch_one(\n", + " trip_sheet, trip_start_date_cell, \"Finish date\", (2, 3)\n", + " ).date()\n", "trip_id = boat + \"_\" + str(trip_start_date)\n", "\n", "try:\n", - " obsv_name = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Observer's name\", (0,4))\n", + " obsv_name = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Observer's name\", (0, 4))\n", "except ValueError as e:\n", - " obsv_name = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Observer's name\", (0,6))\n", - " \n", + " obsv_name = set_haul_grid_fetch_one(trip_sheet, trip_start_date_cell, \"Observer's name\", (0, 6))\n", + "\n", "\n", "trip_data = dict(\n", - "trip_id = trip_id,\n", - "trip_start_date = trip_start_date,\n", - "trip_end_date = trip_end_date,\n", - "trip_notes = trip_notes,\n", - "obsv_name = obsv_name,\n", + " trip_id=trip_id,\n", + " trip_start_date=trip_start_date,\n", + " trip_end_date=trip_end_date,\n", + " trip_notes=trip_notes,\n", + " obsv_name=obsv_name,\n", ")\n", - "trip_data = {k:[v] for k,v in trip_data.items()}\n", + "trip_data = {k: [v] for k, v in trip_data.items()}\n", "trip_df = pandas.DataFrame(trip_data)\n", - "trip_df = trip_df.set_index('trip_id')\n", + "trip_df = trip_df.set_index(\"trip_id\")\n", "# print(trip_df)\n", "\n", "\n", @@ -2233,120 +2265,147 @@ " curr_sheet = all_sheets[sheet_name]\n", "\n", " # look for 'latitude' in this sheet. If found, it's probably a sheet with set/haul data\n", - " fao_code_cell = findcell(curr_sheet, 'FAO code')\n", + " fao_code_cell = findcell(curr_sheet, \"FAO code\")\n", " if not fao_code_cell:\n", " continue\n", - " catchcondition_cell = findcell(curr_sheet, 'catch condition')\n", + " catchcondition_cell = findcell(curr_sheet, \"catch condition\")\n", " if not catchcondition_cell:\n", " continue\n", - " discardreason_cell = findcell(curr_sheet, 'reason for discard')\n", + " discardreason_cell = findcell(curr_sheet, \"reason for discard\")\n", " if not discardreason_cell:\n", " continue\n", - " if discardreason_cell[1] != catchcondition_cell[1] or catchcondition_cell[1] != discardreason_cell[1]:\n", + " if (\n", + " discardreason_cell[1] != catchcondition_cell[1]\n", + " or catchcondition_cell[1] != discardreason_cell[1]\n", + " ):\n", " raise ValueError(f\"cannot find header row for fish data in sheet {sheet_name}\")\n", "\n", " # find other metadata values\n", "\n", " cols_index = list(curr_sheet.keys())\n", - " if 'Setting' in cols_index:\n", - " setting_cell = ('Setting', 0)\n", + " if \"Setting\" in cols_index:\n", + " setting_cell = (\"Setting\", 0)\n", " else:\n", - " setting_cell = findcell(curr_sheet, 'Setting')\n", + " setting_cell = findcell(curr_sheet, \"Setting\")\n", " if not setting_cell:\n", " raise ValueError(\"no 'Setting' block in sheet\")\n", "\n", - " (set_start_date, set_start_time, set_start_lat ,\n", - " set_start_lon ,set_end_date, set_end_time,set_end_lat ,\n", - " set_end_lon,) = set_haul_grid_fetch_all(curr_sheet, setting_cell)\n", - "\n", + " (\n", + " set_start_date,\n", + " set_start_time,\n", + " set_start_lat,\n", + " set_start_lon,\n", + " set_end_date,\n", + " set_end_time,\n", + " set_end_lat,\n", + " set_end_lon,\n", + " ) = set_haul_grid_fetch_all(curr_sheet, setting_cell)\n", "\n", - " if 'Hauling' in cols_index:\n", - " hauling_cell = ('Hauling', 0)\n", + " if \"Hauling\" in cols_index:\n", + " hauling_cell = (\"Hauling\", 0)\n", " else:\n", - " hauling_cell = findcell(curr_sheet, 'Hauling')\n", + " hauling_cell = findcell(curr_sheet, \"Hauling\")\n", " if not hauling_cell:\n", " raise ValueError(\"no 'Hauling' block in sheet\")\n", "\n", - " (haul_start_date, haul_start_time, haul_start_lat ,\n", - " haul_start_lon ,haul_end_date, haul_end_time,haul_end_lat ,\n", - " haul_end_lon,) = set_haul_grid_fetch_all(curr_sheet, hauling_cell)\n", + " (\n", + " haul_start_date,\n", + " haul_start_time,\n", + " haul_start_lat,\n", + " haul_start_lon,\n", + " haul_end_date,\n", + " haul_end_time,\n", + " haul_end_lat,\n", + " haul_end_lon,\n", + " ) = set_haul_grid_fetch_all(curr_sheet, hauling_cell)\n", "\n", " set_number += 1\n", - " set_id = trip_id + \"_set_\"+str(set_number).zfill(2)\n", - " # print(set_id)\n", + " set_id = trip_id + \"_set_\" + str(set_number).zfill(2)\n", + " # print(set_id)\n", " set_row = dict(\n", " set_id=set_id,\n", " trip_id=trip_id,\n", " set_number=set_number,\n", - "\n", - " set_start_datetime = datetime.combine(set_start_date.date(), set_start_time).replace(tzinfo=timezone.utc),\n", - " set_start_lat = set_start_lat ,\n", - " set_start_lon = set_start_lon ,\n", - "\n", - " set_end_datetime = datetime.combine(set_end_date.date(), set_end_time).replace(tzinfo=timezone.utc),\n", - " set_end_lat = set_end_lat ,\n", - " set_end_lon = set_end_lon ,\n", - "\n", - " haul_start_datetime = datetime.combine(haul_start_date.date(), haul_start_time).replace(tzinfo=timezone.utc),\n", - " haul_start_lat = haul_start_lat,\n", - " haul_start_lon = haul_start_lon ,\n", - "\n", - " haul_end_datetime = datetime.combine(haul_end_date.date(), haul_end_time).replace(tzinfo=timezone.utc),\n", - " haul_end_lat = haul_end_lat,\n", - " haul_end_lon = haul_end_lon\n", + " set_start_datetime=datetime.combine(set_start_date.date(), set_start_time).replace(\n", + " tzinfo=timezone.utc\n", + " ),\n", + " set_start_lat=set_start_lat,\n", + " set_start_lon=set_start_lon,\n", + " set_end_datetime=datetime.combine(set_end_date.date(), set_end_time).replace(\n", + " tzinfo=timezone.utc\n", + " ),\n", + " set_end_lat=set_end_lat,\n", + " set_end_lon=set_end_lon,\n", + " haul_start_datetime=datetime.combine(haul_start_date.date(), haul_start_time).replace(\n", + " tzinfo=timezone.utc\n", + " ),\n", + " haul_start_lat=haul_start_lat,\n", + " haul_start_lon=haul_start_lon,\n", + " haul_end_datetime=datetime.combine(haul_end_date.date(), haul_end_time).replace(\n", + " tzinfo=timezone.utc\n", + " ),\n", + " haul_end_lat=haul_end_lat,\n", + " haul_end_lon=haul_end_lon,\n", " )\n", - " # print({k:v for k,v in set_row.items()})\n", - " set_row = {k:[v] for k,v in set_row.items()}\n", + " # print({k:v for k,v in set_row.items()})\n", + " set_row = {k: [v] for k, v in set_row.items()}\n", "\n", - " # print(set_row)\n", + " # print(set_row)\n", " set_df = pandas.DataFrame(set_row)\n", - " set_df = set_df.set_index('set_id')\n", - " # print(set_df)\n", + " set_df = set_df.set_index(\"set_id\")\n", + " # print(set_df)\n", " if sets_df is None:\n", " sets_df = set_df\n", " else:\n", " sets_df = sets_df.append(set_df)\n", "\n", - " # print(sheet_name, fao_code_cell[1])\n", + " # print(sheet_name, fao_code_cell[1])\n", "\n", " reimport_sheet = pandas.read_excel(fname, sheet_name=sheet_name, skiprows=fao_code_cell[1])\n", - " reimport_sheet = reimport_sheet.loc[:, ~reimport_sheet.columns.str.contains('^Unnamed: ')]\n", + " reimport_sheet = reimport_sheet.loc[:, ~reimport_sheet.columns.str.contains(\"^Unnamed: \")]\n", + "\n", " def replace_catch_hour(catch_hour):\n", " if type(catch_hour) == str:\n", - " # print(catch_hour)\n", + " # print(catch_hour)\n", " catch_hour = parse_datetime(catch_hour).time()\n", - " haul_datetime = set_row['haul_start_datetime'][0]\n", + " haul_datetime = set_row[\"haul_start_datetime\"][0]\n", " catch_datetime = haul_datetime.replace(hour=catch_hour.hour, minute=catch_hour.minute)\n", " if haul_datetime - catch_datetime > timedelta(hours=2):\n", " # the catch_datetime is somehow smaller than the start of the haul\n", " # this is outside of the haul window\n", " # this is probably because the haul started just before midnight, and continued to the next day\n", " catch_datetime += timedelta(days=1)\n", - " end_datetime = set_row['haul_end_datetime'][0]\n", + " end_datetime = set_row[\"haul_end_datetime\"][0]\n", " if catch_datetime - end_datetime > timedelta(hours=2):\n", " # adding a day didn't work, now it's outside of the haul window on the other side\n", - " raise ValueError(f'catch time {catch_hour} cannot fit between haul times {haul_datetime} - {end_datetime}')\n", + " raise ValueError(\n", + " f\"catch time {catch_hour} cannot fit between haul times {haul_datetime} - {end_datetime}\"\n", + " )\n", " return catch_datetime\n", "\n", " try:\n", - " reimport_sheet['catch_datetime'] = reimport_sheet['hour'].map(replace_catch_hour)\n", + " reimport_sheet[\"catch_datetime\"] = reimport_sheet[\"hour\"].map(replace_catch_hour)\n", " except BaseException as e:\n", " print(\"error on sheetname\", sheet_name)\n", " raise e\n", "\n", - " # print(reimport_sheet)\n", + " # print(reimport_sheet)\n", "\n", - " reimport_sheet['set_id'] = set_id\n", - " reimport_sheet.insert(0, 'set_id', reimport_sheet.pop('set_id'))\n", - " reimport_sheet['fish_id'] = reimport_sheet['set_id'] + \"_fish_\" + pandas.Series(map(lambda i: str(i).zfill(3), reimport_sheet.index.values))\n", - " reimport_sheet = reimport_sheet.set_index('fish_id')\n", + " reimport_sheet[\"set_id\"] = set_id\n", + " reimport_sheet.insert(0, \"set_id\", reimport_sheet.pop(\"set_id\"))\n", + " reimport_sheet[\"fish_id\"] = (\n", + " reimport_sheet[\"set_id\"]\n", + " + \"_fish_\"\n", + " + pandas.Series(map(lambda i: str(i).zfill(3), reimport_sheet.index.values))\n", + " )\n", + " reimport_sheet = reimport_sheet.set_index(\"fish_id\")\n", "\n", " if fish_df is None:\n", " fish_df = reimport_sheet\n", " else:\n", " fish_df = fish_df.append(reimport_sheet)\n", " except BaseException as e:\n", - " print('debug - in sheet', sheet_name)\n", + " print(\"debug - in sheet\", sheet_name)\n", " raise e\n", "\n", "\n", @@ -2358,8 +2417,7 @@ "\n", "# fish_df.pop('fish_id')\n", "\n", - "display(fish_df)\n", - "\n" + "display(fish_df)\n" ] }, { @@ -2381,7 +2439,8 @@ "source": [ "import awswrangler as wr\n", "import boto3\n", - "boto3.setup_default_session(profile_name='XXXXXXXX')" + "\n", + "boto3.setup_default_session(profile_name=\"XXXXXXXX\")" ] }, { @@ -2393,15 +2452,15 @@ "source": [ "# dir(wr.s3)\n", "# wr.s3.list_buckets()\n", - "bucket='51-gema-dev-dp-raw'\n", + "bucket = \"51-gema-dev-dp-raw\"\n", "# wr.s3.list_directories(f's3://{bucket}/tnc_edge/')\n", "# help(wr.s3.to_csv)\n", "\n", "\n", "print(\n", - " wr.s3.to_csv(trip_df, f's3://{bucket}/tnc_edge/{boat}_v1_bv_trips/{trip_id}.csv'),\n", - " wr.s3.to_csv(sets_df, f's3://{bucket}/tnc_edge/{boat}_v1_bv_sets/{trip_id}.csv'),\n", - " wr.s3.to_csv(fish_df, f's3://{bucket}/tnc_edge/{boat}_v1_bv_fish/{trip_id}.csv')\n", + " wr.s3.to_csv(trip_df, f\"s3://{bucket}/tnc_edge/{boat}_v1_bv_trips/{trip_id}.csv\"),\n", + " wr.s3.to_csv(sets_df, f\"s3://{bucket}/tnc_edge/{boat}_v1_bv_sets/{trip_id}.csv\"),\n", + " wr.s3.to_csv(fish_df, f\"s3://{bucket}/tnc_edge/{boat}_v1_bv_fish/{trip_id}.csv\"),\n", ")\n" ] }, diff --git a/reencode.py b/reencode.py index 98991ab..6df841a 100644 --- a/reencode.py +++ b/reencode.py @@ -1,62 +1,70 @@ - -import shutil -import click -import json import os -from pathlib import Path -import re -import schedule import subprocess -from subprocess import CompletedProcess import time +from pathlib import Path +from subprocess import CompletedProcess -from model import Base as ModelBase, VideoFile, OndeckData +import click +import schedule import sqlalchemy as sa -from sqlalchemy.orm import sessionmaker as SessionMaker, Query +from flask.config import Config as FlaskConfig +from sqlalchemy.orm import Query +from sqlalchemy.orm import sessionmaker as SessionMaker from sqlalchemy.orm.session import Session -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from model import Base as ModelBase +from model import VideoFile + +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') def system_gst_check() -> str: # nvidia hw encoder - p: CompletedProcess[str] = subprocess.run("gst-inspect-1.0 | grep -q 'nvv4l2h265enc:'", shell=True, capture_output=False) + p: CompletedProcess[str] = subprocess.run( + "gst-inspect-1.0 | grep -q 'nvv4l2h265enc:'", shell=True, capture_output=False + ) if p.returncode == 0: - return ' nvv4l2decoder mjpeg=true ! nvv4l2h265enc bitrate=2000000 ' - + return " nvv4l2decoder mjpeg=true ! nvv4l2h265enc bitrate=2000000 " + # osx hw encoder - p: CompletedProcess[str] = subprocess.run("gst-inspect-1.0 | grep -q 'vtenc_h265_hw:'", shell=True, capture_output=False) + p: CompletedProcess[str] = subprocess.run( + "gst-inspect-1.0 | grep -q 'vtenc_h265_hw:'", shell=True, capture_output=False + ) if p.returncode == 0: - return ' jpegdec ! vtenc_h265_hw bitrate=2000 ' + return " jpegdec ! vtenc_h265_hw bitrate=2000 " raise Exception("unknown gst plugins") + gst_internal_plugins = system_gst_check() + def next_videos(session: Session): - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files + results: Query[VideoFile] = session.query(VideoFile).from_statement( + sa.text( + """ + select video_files.* from video_files cross join ( - select coalesce(max(start_datetime), to_timestamp(0)) as latest_reencoded - from video_files + select coalesce(max(start_datetime), to_timestamp(0)) as latest_reencoded + from video_files where video_files.reencoded_stdout is not null or video_files.reencoded_stderr is not null ) latest_reencoded - where video_files.decrypted_path is not null + where video_files.decrypted_path is not null and video_files.reencoded_stdout is null and video_files.reencoded_stderr is null and video_files.start_datetime >= latest_reencoded.latest_reencoded order by video_files.start_datetime asc; - """)) - return list(results) + """ + ) + ) + return list(results) def run_reencode(output_dir: Path, sessionmaker: SessionMaker): - video_files: list[VideoFile] = [] with sessionmaker() as session: @@ -67,25 +75,28 @@ def run_reencode(output_dir: Path, sessionmaker: SessionMaker): video_file: VideoFile = video_files.pop(0) # print(video_file) decrypted_path = Path(video_file.decrypted_path) - last_dot_index: int = decrypted_path.name.index('.') + last_dot_index: int = decrypted_path.name.index(".") if last_dot_index < 0: last_dot_index = None mkv_out_file: Path = output_dir / Path(decrypted_path.name[0:last_dot_index] + "_reenc.mkv") - - cmd: str = "gst-launch-1.0 filesrc location='%s' ! avidemux ! \ + + cmd: str = ( + "gst-launch-1.0 filesrc location='%s' ! avidemux ! \ %s ! \ - h265parse ! matroskamux ! filesink location='%s'"%( - str(decrypted_path.absolute()), - gst_internal_plugins, - str(mkv_out_file.absolute()) - ) - + h265parse ! matroskamux ! filesink location='%s'" + % (str(decrypted_path.absolute()), gst_internal_plugins, str(mkv_out_file.absolute())) + ) + update_reencoded_path = None p: CompletedProcess[str] = subprocess.run(cmd, shell=True, capture_output=True, text=True) - if p.returncode == 0 and p.stderr.find("No such file") < 0 and p.stderr.find("Failed to start") < 0: + if ( + p.returncode == 0 + and p.stderr.find("No such file") < 0 + and p.stderr.find("Failed to start") < 0 + ): update_reencoded_path = str(mkv_out_file.absolute()) - + try: # shutil.copy(mkv_out_file, Path('/usbdrive/') / mkv_out_file.name ) pass @@ -93,43 +104,40 @@ def run_reencode(output_dir: Path, sessionmaker: SessionMaker): # FileNotFoundError or some other permissions error. Drive must not be inserted. Ignore. pass - with sessionmaker() as session: - session.execute(sa.text("update video_files set \ + session.execute( + sa.text( + "update video_files set \ reencoded_path = :reencoded_path, reencoded_datetime = current_timestamp, \ reencoded_stdout = :reencoded_stdout, reencoded_stderr = :reencoded_stderr \ - where decrypted_path = :decrypted_path;"), { - "reencoded_path": update_reencoded_path, - "reencoded_stdout":p.stdout, - "reencoded_stderr":p.stderr, - "decrypted_path": str(decrypted_path.absolute()), - } - ) + where decrypted_path = :decrypted_path;" + ), + { + "reencoded_path": update_reencoded_path, + "reencoded_stdout": p.stdout, + "reencoded_stderr": p.stderr, + "decrypted_path": str(decrypted_path.absolute()), + }, + ) session.commit() - + with sessionmaker() as session: video_files = next_videos(session) - - @click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--output_dir', default=flaskconfig.get('VIDEO_OUTPUT_DIR')) -@click.option('--print_queue', is_flag=True) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--output_dir", default=flaskconfig.get("VIDEO_OUTPUT_DIR")) +@click.option("--print_queue", is_flag=True) def main(dbname, dbuser, output_dir, print_queue): - output_dir = Path(output_dir) - - - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) sessionmaker = SessionMaker(sa_engine) ModelBase.metadata.create_all(sa_engine) - if print_queue: with sessionmaker() as session: video_files = next_videos(session) @@ -140,10 +148,10 @@ def main(dbname, dbuser, output_dir, print_queue): def runonce(output_dir, sessionmaker): run_reencode(output_dir, sessionmaker) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce, output_dir, sessionmaker) - schedule.every(5).minutes.do(run_reencode, output_dir, sessionmaker ) + schedule.every(5).minutes.do(run_reencode, output_dir, sessionmaker) while 1: n = schedule.idle_seconds() @@ -152,10 +160,10 @@ def runonce(output_dir, sessionmaker): break elif n > 0: # sleep exactly the right amount of time - click.echo(f'sleeping for: {n}') + click.echo(f"sleeping for: {n}") time.sleep(n) schedule.run_pending() -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 441acca..0000000 --- a/requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -flask -flask_admin -sqlalchemy<2.0 -click -nmeasim -geographiclib -pynmeagps -wheel -psycopg2-binary -Flask-SQLAlchemy==3.0.3 -alembic -requests -schedule -boto3 diff --git a/run_aifish.py b/run_aifish.py index f6a6fce..4b47bb9 100644 --- a/run_aifish.py +++ b/run_aifish.py @@ -1,104 +1,122 @@ - -from datetime import datetime, timezone, timedelta -from dateutil import parser -import click -from collections import defaultdict import json import os -from pathlib import Path -import re -import requests -from requests import Response -import schedule import shutil import subprocess -from subprocess import CompletedProcess import sys import time +from collections import defaultdict +from datetime import datetime, timedelta, timezone +from pathlib import Path -from model import Base as ModelBase, VideoFile, AifishData, Track +import click +import requests +import schedule import sqlalchemy as sa -from sqlalchemy.orm import sessionmaker as SessionMaker, Query +from dateutil import parser +from flask.config import Config as FlaskConfig +from requests import Response +from sqlalchemy.orm import Query +from sqlalchemy.orm import sessionmaker as SessionMaker from sqlalchemy.orm.session import Session -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from model import AifishData, Track, VideoFile +from model import Base as ModelBase + +flaskconfig = FlaskConfig(root_path="") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -# select video_files.* from video_files +# select video_files.* from video_files # join ( -# select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday +# select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday # from ( # select date(start_datetime AT TIME ZONE 'utc' - interval '8 hours' ) as workday, -# count(*) as count -# from video_files -# where decrypted_path is not null +# count(*) as count +# from video_files +# where decrypted_path is not null # group by workday -# ) workday_counts +# ) workday_counts # where workday_counts.count > 4 -# ) workdays +# ) workdays # on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone '08:00Z' -# left join aifishdata -# on video_files.decrypted_path = aifishdata.video_uri -# where video_files.decrypted_path is not null +# left join aifishdata +# on video_files.decrypted_path = aifishdata.video_uri +# where video_files.decrypted_path is not null # and aifishdata.video_uri is null # and video_files.cam_name = 'cam1' # order by video_files.decrypted_datetime asc; + def next_videos(session: Session, thalos_cam_name): - workday_start_hour_at_utc_interval = '8 hours'; - workday_start_hour_at_utc_timestr = '08:00Z'; - num_vids_required = 4; - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files + workday_start_hour_at_utc_interval = "8 hours" + workday_start_hour_at_utc_timestr = "08:00Z" + num_vids_required = 4 + results: Query[VideoFile] = ( + session.query(VideoFile) + .from_statement( + sa.text( + """ + select video_files.* from video_files join ( - select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday + select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday from ( select date(start_datetime AT TIME ZONE 'utc' - interval :timei ) as workday, - count(*) as count - from video_files - where decrypted_path is not null + count(*) as count + from video_files + where decrypted_path is not null group by workday - ) workday_counts + ) workday_counts where workday_counts.count > :numvids - ) workdays - on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone :times - left join aifishdata - on video_files.decrypted_path = aifishdata.video_uri - where video_files.decrypted_path is not null + ) workdays + on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone :times + left join aifishdata + on video_files.decrypted_path = aifishdata.video_uri + where video_files.decrypted_path is not null and aifishdata.video_uri is null and video_files.cam_name = :cam_name order by video_files.decrypted_datetime asc; - """)).params( - { - "timei": workday_start_hour_at_utc_interval, - "times": workday_start_hour_at_utc_timestr, - "numvids": num_vids_required, - "cam_name": thalos_cam_name, - }) - return list(results) + """ + ) + ) + .params( + { + "timei": workday_start_hour_at_utc_interval, + "times": workday_start_hour_at_utc_timestr, + "numvids": num_vids_required, + "cam_name": thalos_cam_name, + } + ) + ) + return list(results) + def v2_next_videos(session: Session, thalos_cam_name): - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files - left join aifishdata - on video_files.decrypted_path = aifishdata.video_uri - where video_files.decrypted_path is not null + results: Query[VideoFile] = ( + session.query(VideoFile) + .from_statement( + sa.text( + """ + select video_files.* from video_files + left join aifishdata + on video_files.decrypted_path = aifishdata.video_uri + where video_files.decrypted_path is not null and video_files.start_datetime is not null and aifishdata.video_uri is null and video_files.cam_name = :cam_name order by video_files.start_datetime asc; - """)).params( - { - "cam_name": thalos_cam_name, - }) - return list(results) + """ + ) + ) + .params( + { + "cam_name": thalos_cam_name, + } + ) + ) + return list(results) MAGIC_VALUE_5_MiB = 5 * 1024 * 1024 @@ -111,47 +129,53 @@ def parse_json(session: Session, decrypted_path: Path, json_out_file: Path, only if len(detections) == 0: # error handling here pass - - fish_detections = list(filter(lambda d: d.get('class_name') == 'fish', detections)) + + fish_detections = list(filter(lambda d: d.get("class_name") == "fish", detections)) if len(fish_detections) == 0: # error handling here if only_tracks: return - session.execute(sa.text("""insert into aifishdata ( video_uri, output_uri, - count, detection_confidence ) - values ( :decrypted_path, :json_out_file , :cnt, :mean_c) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into aifishdata ( video_uri, output_uri, + count, detection_confidence ) + values ( :decrypted_path, :json_out_file , :cnt, :mean_c) + on conflict (video_uri) do update set output_uri = :json_out_file, count = :cnt, detection_confidence = :mean_c - ;"""), { - "decrypted_path": str(decrypted_path.absolute()), - "json_out_file":str(json_out_file.absolute()), - "cnt": 0, - "mean_c": 0, - } - ) + ;"""), + { + "decrypted_path": str(decrypted_path.absolute()), + "json_out_file": str(json_out_file.absolute()), + "cnt": 0, + "mean_c": 0, + }, + ) session.commit() return - last_frame = max(map(lambda d: d.get('frame'), detections)) + last_frame = max(map(lambda d: d.get("frame"), detections)) frames = [] - detectionconfidences = list(filter(lambda x: x is not None, map(lambda d: d.get('object_confidence'), fish_detections))) + detectionconfidences = list( + filter( + lambda x: x is not None, map(lambda d: d.get("object_confidence"), fish_detections) + ) + ) # = max(map(lambda detection: detection.get('object_confidence'), detections)) # trackedconfidences = [] tracks = defaultdict(list) for d in fish_detections: - tracks[d.get('track')].append(d) - + tracks[d.get("track")].append(d) + cnt = len(tracks.keys()) done_tracks = [] for track_id, detections in tracks.items(): - frame_nums = list(map(lambda d: d.get('frame'), detections)) + frame_nums = list(map(lambda d: d.get("frame"), detections)) min_frame = min(frame_nums) max_frame = max(frame_nums) @@ -163,41 +187,45 @@ def parse_json(session: Session, decrypted_path: Path, json_out_file: Path, only t.last_framenum = max_frame t.confidences = [0 for i in range(1 + max_frame - min_frame)] for d in detections: - t.confidences[d.get('frame') - min_frame] = d.get('object_confidence') or 0 + t.confidences[d.get("frame") - min_frame] = d.get("object_confidence") or 0 done_tracks.append(t) session.add_all(done_tracks) session.commit() if only_tracks: return - + if len(detectionconfidences) > 0: - meandetectionconfidence = float(sum(detectionconfidences)) / float(len(detectionconfidences)) + meandetectionconfidence = float(sum(detectionconfidences)) / float( + len(detectionconfidences) + ) else: meandetectionconfidence = 0 - # with sessionmaker() as session: - session.execute(sa.text("""insert into aifishdata ( video_uri, output_uri, - count, detection_confidence ) - values ( :decrypted_path, :json_out_file , :cnt, :mean_c) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into aifishdata ( video_uri, output_uri, + count, detection_confidence ) + values ( :decrypted_path, :json_out_file , :cnt, :mean_c) + on conflict (video_uri) do update set output_uri = :json_out_file, count = :cnt, detection_confidence = :mean_c - ;"""), { - "decrypted_path": str(decrypted_path.absolute()), - "json_out_file":str(json_out_file.absolute()), - "cnt":cnt, - "mean_c":meandetectionconfidence, - } - ) + ;"""), + { + "decrypted_path": str(decrypted_path.absolute()), + "json_out_file": str(json_out_file.absolute()), + "cnt": cnt, + "mean_c": meandetectionconfidence, + }, + ) session.commit() -VIDEO_TOO_SMALL = 1024*1024 + +VIDEO_TOO_SMALL = 1024 * 1024 + def enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: str): - video_files: list[VideoFile] = [] with sessionmaker() as session: @@ -212,8 +240,14 @@ def enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: str): # use_reencoded = False v_source_path = str(decrypted_path.absolute()) v_source_name = decrypted_path.name - if not decrypted_path.exists() or not decrypted_path.is_file() or decrypted_path.stat().st_size < VIDEO_TOO_SMALL: - click.echo(f"original video file {decrypted_path.name} failed basic checks. Using reencoded") + if ( + not decrypted_path.exists() + or not decrypted_path.is_file() + or decrypted_path.stat().st_size < VIDEO_TOO_SMALL + ): + click.echo( + f"original video file {decrypted_path.name} failed basic checks. Using reencoded" + ) # use_reencoded = True if video_file.reencoded_path is None: click.echo(f"video not reencoded, skipping video") @@ -221,18 +255,26 @@ def enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: str): reencoded_path = Path(video_file.reencoded_path) v_source_path = str(reencoded_path.absolute()) v_source_name = reencoded_path.name - if not reencoded_path.exists() or not reencoded_path.is_file() or reencoded_path.stat().st_size < VIDEO_TOO_SMALL: - click.echo(f"reencoded_video {reencoded_path.name} fails basic checks. skipping video") + if ( + not reencoded_path.exists() + or not reencoded_path.is_file() + or reencoded_path.stat().st_size < VIDEO_TOO_SMALL + ): + click.echo( + f"reencoded_video {reencoded_path.name} fails basic checks. skipping video" + ) continue rname = v_source_name[::-1] - last_dot_index: int = rname.find('.') + last_dot_index: int = rname.find(".") if last_dot_index < 0: json_out_file: Path = output_dir / Path(v_source_name + ".json") else: - json_out_file: Path = output_dir / Path(v_source_name[0:-last_dot_index-1] + ".json") + json_out_file: Path = output_dir / Path( + v_source_name[0 : -last_dot_index - 1] + ".json" + ) - aifish_processing_path = decrypted_path.parent / 'processing' / v_source_name + aifish_processing_path = decrypted_path.parent / "processing" / v_source_name # decrypted_path.rename(aifish_processing_path) @@ -243,42 +285,44 @@ def enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: str): shutil.copy(v_source_path, aifish_processing_path) with sessionmaker() as session: - session.execute(sa.text("""insert into aifishdata ( video_uri, processing_uri, output_uri, status ) + session.execute( + sa.text("""insert into aifishdata ( video_uri, processing_uri, output_uri, status ) values ( :video_uri, :processing_uri, :output_uri, :status ) - on conflict (video_uri) DO UPDATE SET status = :status ;"""), { - "video_uri": str(decrypted_path.absolute()), - "processing_uri": str(aifish_processing_path.absolute()), - "output_uri": str(json_out_file.absolute()), - "status": "queued" - } + on conflict (video_uri) DO UPDATE SET status = :status ;"""), + { + "video_uri": str(decrypted_path.absolute()), + "processing_uri": str(aifish_processing_path.absolute()), + "output_uri": str(json_out_file.absolute()), + "status": "queued", + }, ) session.commit() + MAGIC_VALUE_1_MINUTE = 60 + def parse(output_dir: Path, sessionmaker: SessionMaker): # only pick files that end with .json - a = filter(lambda x: x.is_file() and x.name.endswith('.json'), output_dir.iterdir()) + a = filter(lambda x: x.is_file() and x.name.endswith(".json"), output_dir.iterdir()) epoch_now = int(time.time()) # only pick files that haven't been modified in the last minute b = filter(lambda x: x.stat().st_mtime + MAGIC_VALUE_1_MINUTE < epoch_now, a) # get the filenames - c = map(lambda x: str(x.absolute()) , b) + c = map(lambda x: str(x.absolute()), b) found_aifish_files = list(c) click.echo("found {} .json files".format(str(len(found_aifish_files)))) with sessionmaker() as session: - results: Query[AifishData] = session.query(AifishData).where( AifishData.status == 'queued' ) + results: Query[AifishData] = session.query(AifishData).where(AifishData.status == "queued") for pending_aifishdata in results: - # click.echo("found {} queued row".format(str(pending_aifishdata))) if pending_aifishdata.output_uri in found_aifish_files: - video = Path(pending_aifishdata.video_uri) processing = Path(pending_aifishdata.processing_uri) output = Path(pending_aifishdata.output_uri) @@ -293,48 +337,47 @@ def parse(output_dir: Path, sessionmaker: SessionMaker): if processing.exists(): processing.unlink() - + parse_json(session, video, output) pending_aifishdata.status = "done" session.commit() - - def errors(sessionmaker: SessionMaker): try: - r: Response = requests.get('http://127.0.0.1:5000/errors') + r: Response = requests.get("http://127.0.0.1:5000/errors") click.echo("errors resp: {} body: {}".format(repr(r), repr(r.json()))) for error in r.json(): - input_path = error.get('input_path') - error_message = error.get('error_message') + input_path = error.get("input_path") + error_message = error.get("error_message") - if error_message.startswith('Task performance mode set to SKIP'): + if error_message.startswith("Task performance mode set to SKIP"): with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, status ) - values ( :decrypted_path, :skiphalfstatus ) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, status ) + values ( :decrypted_path, :skiphalfstatus ) + on conflict (video_uri) do update set status = :skiphalfstatus - ;"""), { - "decrypted_path": input_path, - "skiphalfstatus": "runningskiphalf" - } + ;"""), + {"decrypted_path": input_path, "skiphalfstatus": "runningskiphalf"}, ) session.commit() continue with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri ) - values ( :decrypted_path, :error_str ) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri ) + values ( :decrypted_path, :error_str ) + on conflict (video_uri) do update set status = 'errored', cocoannotations_uri = :error_str - ;"""), { - "decrypted_path": input_path, - "error_str": "ondeck model failure. stdout, stderr: " + error_message - } + ;"""), + { + "decrypted_path": input_path, + "error_str": "ondeck model failure. stdout, stderr: " + error_message, + }, ) session.commit() @@ -342,31 +385,41 @@ def errors(sessionmaker: SessionMaker): click.echo("ondeck model errors request exception: {}".format(e)) return + LOST_TIME_BUFFER = timedelta(minutes=30) + def lost_inprogress(sessionmaker: SessionMaker, aifish_processing_dir: Path): - last_start_time_s = subprocess.run('journalctl -o short-iso -u aifish_model.service | grep systemd | grep Started | tail -n 1 | sed "s/edge.*//"', shell=True, text=True, capture_output=True) + last_start_time_s = subprocess.run( + 'journalctl -o short-iso -u aifish_model.service | grep systemd | grep Started | tail -n 1 | sed "s/edge.*//"', + shell=True, + text=True, + capture_output=True, + ) last_start_time_dt = parser.parse(last_start_time_s.stdout) - - check_these = list(filter( - lambda f: f.is_file() - and (f.name.endswith('.avi') - or f.name.endswith('.mkv')) - and datetime.fromtimestamp(f.stat().st_mtime, tz=timezone.utc) + LOST_TIME_BUFFER < last_start_time_dt, - aifish_processing_dir.iterdir() - )) + check_these = list( + filter( + lambda f: f.is_file() + and (f.name.endswith(".avi") or f.name.endswith(".mkv")) + and datetime.fromtimestamp(f.stat().st_mtime, tz=timezone.utc) + LOST_TIME_BUFFER + < last_start_time_dt, + aifish_processing_dir.iterdir(), + ) + ) if len(check_these) > 0: abs_names = list(map(lambda f: str(f.absolute()), check_these)) with sessionmaker() as session: - rows: Query = session.query(AifishData) \ - .filter(AifishData.processing_uri.in_(abs_names)) \ - .filter(AifishData.status == 'queued') + rows: Query = ( + session.query(AifishData) + .filter(AifishData.processing_uri.in_(abs_names)) + .filter(AifishData.status == "queued") + ) for lost_file in rows.all(): - click.echo(f'found lost file in progress - deleting: {lost_file.processing_uri}') + click.echo(f"found lost file in progress - deleting: {lost_file.processing_uri}") Path(lost_file.processing_uri).unlink() - lost_file.status = 'errored' - session.commit() + lost_file.status = "errored" + session.commit() def ensure_is_dir(p: Path): @@ -383,21 +436,31 @@ def ensure_is_dir(p: Path): click.echo(f"Could not create folder {a}. Exiting") sys.exit(1) -@click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--output_dir', default=flaskconfig.get('VIDEO_OUTPUT_DIR')) -@click.option('--engine', default=flaskconfig.get('ONDECK_MODEL_ENGINE')) -@click.option('--thalos_cam_name', default=flaskconfig.get('THALOS_CAM_NAME')) -@click.option('--print_queue', is_flag=True) -@click.option('--parsetesta') -@click.option('--parsetestb') -@click.option('--testlostinprogress', is_flag=True) -def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parsetesta, parsetestb, testlostinprogress): +@click.command() +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--output_dir", default=flaskconfig.get("VIDEO_OUTPUT_DIR")) +@click.option("--engine", default=flaskconfig.get("ONDECK_MODEL_ENGINE")) +@click.option("--thalos_cam_name", default=flaskconfig.get("THALOS_CAM_NAME")) +@click.option("--print_queue", is_flag=True) +@click.option("--parsetesta") +@click.option("--parsetestb") +@click.option("--testlostinprogress", is_flag=True) +def main( + dbname, + dbuser, + output_dir, + engine, + thalos_cam_name, + print_queue, + parsetesta, + parsetestb, + testlostinprogress, +): video_output_dir = Path(output_dir) - aifish_processing_dir = video_output_dir / 'processing' - aifish_output_dir = video_output_dir / 'output' + aifish_processing_dir = video_output_dir / "processing" + aifish_output_dir = video_output_dir / "output" ensure_is_dir(aifish_processing_dir) ensure_is_dir(aifish_output_dir) @@ -405,8 +468,7 @@ def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parse if engine: engine = Path(engine) - - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) sessionmaker = SessionMaker(sa_engine) ModelBase.metadata.create_all(sa_engine) @@ -427,31 +489,29 @@ def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parse lost_inprogress(sessionmaker, aifish_processing_dir) return - def runonce_enqueue(aifish_output_dir, sessionmaker, thalos_cam_name): enqueue(aifish_output_dir, sessionmaker, thalos_cam_name) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce_enqueue, aifish_output_dir, sessionmaker, thalos_cam_name) - schedule.every(5).minutes.do(enqueue, aifish_output_dir, sessionmaker, thalos_cam_name ) + schedule.every(5).minutes.do(enqueue, aifish_output_dir, sessionmaker, thalos_cam_name) def runonce_errors(sessionmaker): errors(sessionmaker) return schedule.CancelJob - - schedule.every(1).seconds.do(runonce_errors, sessionmaker) + + schedule.every(1).seconds.do(runonce_errors, sessionmaker) schedule.every(1).minutes.do(errors, sessionmaker) def runonce_parse(aifish_output_dir, sessionmaker): parse(aifish_output_dir, sessionmaker) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce_parse, aifish_output_dir, sessionmaker) - schedule.every(1).minutes.do(parse, aifish_output_dir, sessionmaker ) - + schedule.every(1).minutes.do(parse, aifish_output_dir, sessionmaker) # def runonce_lost_inprogress(sessionmaker, aifish_processing_dir): # lost_inprogress(sessionmaker, aifish_processing_dir) @@ -459,8 +519,6 @@ def runonce_parse(aifish_output_dir, sessionmaker): # schedule.every(1).seconds.do(runonce_lost_inprogress, sessionmaker, aifish_processing_dir) # schedule.every(5).minutes.do(lost_inprogress, sessionmaker, aifish_processing_dir ) - - while 1: n = schedule.idle_seconds() if n is None: @@ -472,5 +530,6 @@ def runonce_parse(aifish_output_dir, sessionmaker): time.sleep(n) schedule.run_pending() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/run_ondeck.py b/run_ondeck.py index 3401ff8..e7562ba 100644 --- a/run_ondeck.py +++ b/run_ondeck.py @@ -1,67 +1,78 @@ - -from datetime import datetime, timezone -import click import json import os -from pathlib import Path -import re -import requests -from requests import Response -import schedule import subprocess -from subprocess import CompletedProcess import time +from datetime import timezone +from pathlib import Path +from subprocess import CompletedProcess -from model import Base as ModelBase, VideoFile, OndeckData, Track +import click +import requests +import schedule import sqlalchemy as sa -from sqlalchemy.orm import sessionmaker as SessionMaker, Query +from flask.config import Config as FlaskConfig +from requests import Response +from sqlalchemy.orm import Query +from sqlalchemy.orm import sessionmaker as SessionMaker from sqlalchemy.orm.session import Session -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from model import Base as ModelBase +from model import OndeckData, Track, VideoFile + +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') def next_videos(session: Session, thalos_cam_name): - workday_start_hour_at_utc_interval = '8 hours'; - workday_start_hour_at_utc_timestr = '08:00Z'; - num_vids_required = 4; - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files + workday_start_hour_at_utc_interval = "8 hours" + workday_start_hour_at_utc_timestr = "08:00Z" + num_vids_required = 4 + results: Query[VideoFile] = ( + session.query(VideoFile) + .from_statement( + sa.text( + """ + select video_files.* from video_files join ( - select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday + select COALESCE(max(workday_counts.workday), '1970-01-01') most_recent_active_workday from ( select date(start_datetime AT TIME ZONE 'utc' - interval :timei ) as workday, - count(*) as count - from video_files - where decrypted_path is not null + count(*) as count + from video_files + where decrypted_path is not null group by workday - ) workday_counts + ) workday_counts where workday_counts.count > :numvids - ) workdays - on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone :times - left join ondeckdata - on video_files.decrypted_path = ondeckdata.video_uri - where video_files.decrypted_path is not null + ) workdays + on video_files.start_datetime >= workdays.most_recent_active_workday + time with time zone :times + left join ondeckdata + on video_files.decrypted_path = ondeckdata.video_uri + where video_files.decrypted_path is not null and ondeckdata.video_uri is null and video_files.cam_name = :cam_name order by video_files.decrypted_datetime asc; - """)).params( - { - "timei": workday_start_hour_at_utc_interval, - "times": workday_start_hour_at_utc_timestr, - "numvids": num_vids_required, - "cam_name": thalos_cam_name, - }) - return list(results) + """ + ) + ) + .params( + { + "timei": workday_start_hour_at_utc_interval, + "times": workday_start_hour_at_utc_timestr, + "numvids": num_vids_required, + "cam_name": thalos_cam_name, + } + ) + ) + return list(results) + MAGIC_VALUE_5_MiB = 5 * 1024 * 1024 + def run_ondeck(output_dir: Path, engine: Path, sessionmaker: SessionMaker, thalos_cam_name): - video_files: list[VideoFile] = [] with sessionmaker() as session: @@ -72,10 +83,12 @@ def run_ondeck(output_dir: Path, engine: Path, sessionmaker: SessionMaker, thalo video_file: VideoFile = video_files.pop(0) # click.echo(video_file) decrypted_path = Path(video_file.decrypted_path) - last_dot_index: int = decrypted_path.name.index('.') + last_dot_index: int = decrypted_path.name.index(".") if last_dot_index < 0: last_dot_index = None - json_out_file: Path = output_dir / Path(decrypted_path.name[0:last_dot_index] + "_ondeck.json") + json_out_file: Path = output_dir / Path( + decrypted_path.name[0:last_dot_index] + "_ondeck.json" + ) ondeck_input = str(decrypted_path.absolute()) try: @@ -86,177 +99,193 @@ def run_ondeck(output_dir: Path, engine: Path, sessionmaker: SessionMaker, thalo pass # sudo /usr/bin/docker run --rm -v /videos:/videos --runtime=nvidia --network none gcr.io/edge-gcr/edge-service-image:latest --output /videos --input /videos/21-07-2023-09-55.avi - cmd: str = "sudo /usr/bin/docker run --rm -v /videos:/videos --runtime=nvidia --network none \ + cmd: str = ( + "sudo /usr/bin/docker run --rm -v /videos:/videos --runtime=nvidia --network none \ gcr.io/edge-gcr/edge-service-image:latest \ - --output %s --input %s"%( - str(json_out_file.absolute()), - ondeck_input - ) + --output %s --input %s" + % (str(json_out_file.absolute()), ondeck_input) + ) if engine: - cmd += " --model %s"%( str(engine.absolute()), ) + cmd += " --model %s" % (str(engine.absolute()),) p: CompletedProcess[str] = subprocess.run(cmd, shell=True, capture_output=True, text=True) if p.returncode == 0: - with sessionmaker() as session: parse_json(session, decrypted_path, json_out_file) else: # click.echo("ondeck model failure. stdout, stderr: {} {}".format( p.stdout, p.stderr)) with sessionmaker() as session: - session.execute(sa.text("insert into ondeckdata ( video_uri, cocoannotations_uri ) \ - values ( :decrypted_path, :error_str ) ;"), { - "decrypted_path": str(decrypted_path.absolute()), - "error_str": "ondeck model failure. stdout, stderr: " + p.stdout + p.stderr - } + session.execute( + sa.text( + "insert into ondeckdata ( video_uri, cocoannotations_uri ) \ + values ( :decrypted_path, :error_str ) ;" + ), + { + "decrypted_path": str(decrypted_path.absolute()), + "error_str": "ondeck model failure. stdout, stderr: " + p.stdout + p.stderr, + }, ) session.commit() with sessionmaker() as session: video_files = next_videos(session, thalos_cam_name) + def parse_json(session: Session, decrypted_path: Path, json_out_file: Path, only_tracks=False): with json_out_file.open() as f: o: dict = json.load(f) - if 'overallRuntimeMs' in o.keys(): + if "overallRuntimeMs" in o.keys(): if only_tracks: return v1_parse_json(session, decrypted_path, json_out_file, o) - elif 'overallRuntimeSeconds' in o.keys(): + elif "overallRuntimeSeconds" in o.keys(): v2_parse_json(session, decrypted_path, json_out_file, o, only_tracks=only_tracks) + def v1_parse_json(session: Session, decrypted_path: Path, json_out_file: Path, o: dict): - cnt = o.get('overallCount') - runtime = o.get('overallRuntimeMs') - frames = o.get('frames', []) + cnt = o.get("overallCount") + runtime = o.get("overallRuntimeMs") + frames = o.get("frames", []) - ## stats - trackedframes = filter(lambda frame: len(frame.get('trackingIds'))>0, frames) - confidencesarrs = map(lambda frame: frame.get('confidence'), trackedframes) + ## stats + trackedframes = filter(lambda frame: len(frame.get("trackingIds")) > 0, frames) + confidencesarrs = map(lambda frame: frame.get("confidence"), trackedframes) confidences = [c for confidencesarr in confidencesarrs for c in confidencesarr] if len(confidences) > 0: meanconf = float(sum(confidences)) / float(len(confidences)) else: meanconf = 0 - ## tracks + ## tracks tracks = {} for f in frames: - frame_confidences = f.get('confidence') + frame_confidences = f.get("confidence") i = 0 - for trackid in f.get('trackingIds'): + for trackid in f.get("trackingIds"): if trackid not in tracks: t = { - "first_frame": f.get('frameNum'), - "first_timestamp": f.get('timestamp'), - "confidences": [] - } + "first_frame": f.get("frameNum"), + "first_timestamp": f.get("timestamp"), + "confidences": [], + } tracks[trackid] = t t = tracks[trackid] if len(frame_confidences) > i: - t['confidences'].append(frame_confidences[i]) + t["confidences"].append(frame_confidences[i]) else: - t['confidences'].append(0) + t["confidences"].append(0) i += 1 - # with sessionmaker() as session: - session.execute(sa.text("insert into ondeckdata ( video_uri, cocoannotations_uri, \ + session.execute( + sa.text( + "insert into ondeckdata ( video_uri, cocoannotations_uri, \ overallcount, overallruntimems, tracked_confidence ) \ - values ( :decrypted_path, :json_out_file , :cnt, :runt, :mean_c) ;"), { - "decrypted_path": str(decrypted_path.absolute()), - "json_out_file":str(json_out_file.absolute()), - "cnt":cnt, - "runt":runtime, - "mean_c":meanconf, - } - ) + values ( :decrypted_path, :json_out_file , :cnt, :runt, :mean_c) ;" + ), + { + "decrypted_path": str(decrypted_path.absolute()), + "json_out_file": str(json_out_file.absolute()), + "cnt": cnt, + "runt": runtime, + "mean_c": meanconf, + }, + ) session.commit() -def v2_parse_json(session: Session, decrypted_path: Path, json_out_file: Path, o: dict, only_tracks=False): - - cnt = o.get('overallCount') - catches = o.get('overallCatches') - discards = o.get('overallDiscards') - runtime = o.get('overallRuntimeSeconds') - frames = o.get('frames', []) - +def v2_parse_json( + session: Session, decrypted_path: Path, json_out_file: Path, o: dict, only_tracks=False +): + cnt = o.get("overallCount") + catches = o.get("overallCatches") + discards = o.get("overallDiscards") + runtime = o.get("overallRuntimeSeconds") + frames = o.get("frames", []) detectionconfidences = [] # trackedconfidences = [] active_tracks = {} done_tracks: list[Track] = [] - - for frame in frames: - detectionconfidences.extend(frame.get('confidence')) - + detectionconfidences.extend(frame.get("confidence")) + # idx = 0 # for trackingId in frame.get('trackingIds'): # if trackingId in frame.get('allActiveTrackingIds'): # trackedconfidences.append(frame.get('confidence')[idx]) # idx += 1 - - if 'allActiveTrackingIds' not in frame: + + if "allActiveTrackingIds" not in frame: continue - for activeTrackingId_str in frame['allActiveTrackingIds']: + for activeTrackingId_str in frame["allActiveTrackingIds"]: activeTrackingId = int(activeTrackingId_str) if activeTrackingId not in active_tracks.keys(): active_tracks[activeTrackingId] = Track() active_tracks[activeTrackingId].video_uri = str(decrypted_path.absolute()) active_tracks[activeTrackingId].cocoannotations_uri = str(json_out_file.absolute()) active_tracks[activeTrackingId].track_id = activeTrackingId - active_tracks[activeTrackingId].first_framenum = frame['frameNum'] + active_tracks[activeTrackingId].first_framenum = frame["frameNum"] active_tracks[activeTrackingId].confidences = [] t = active_tracks[activeTrackingId] - try: - idx = frame['trackingIds'].index(activeTrackingId_str) - t.confidences.append(frame['confidence'][idx]) + try: + idx = frame["trackingIds"].index(activeTrackingId_str) + t.confidences.append(frame["confidence"][idx]) except: t.confidences.append(0.0) for track_id in list(active_tracks.keys()): track = active_tracks[track_id] - if str(track_id) not in frame['allActiveTrackingIds']: + if str(track_id) not in frame["allActiveTrackingIds"]: # the confidences will probably have a long trail of 0s at the end, which are not useful # cut them out track.confidences.reverse() - last_nonzero_index = next((i for (i,x) in enumerate(track.confidences) if x), None) + last_nonzero_index = next((i for (i, x) in enumerate(track.confidences) if x), None) track.confidences.reverse() if last_nonzero_index: track.confidences = track.confidences[:-last_nonzero_index] - track.last_framenum = frame['frameNum'] + track.last_framenum = frame["frameNum"] done_tracks.append(track) active_tracks.pop(track_id) - session.add_all(done_tracks) - session.commit() if only_tracks: return - + if len(detectionconfidences) > 0: - meandetectionconfidence = float(sum(detectionconfidences)) / float(len(detectionconfidences)) + meandetectionconfidence = float(sum(detectionconfidences)) / float( + len(detectionconfidences) + ) else: meandetectionconfidence = 0 - if len(done_tracks) > 0: - tracks_avg_conf = list(map(lambda t: float(sum(t.confidences)) / float(len(t.confidences)) if len(t.confidences) else 0.0, done_tracks)) - meantrackedconfidence = float(sum(tracks_avg_conf)) / float(len(tracks_avg_conf)) if len(tracks_avg_conf) else 0.0 + tracks_avg_conf = list( + map( + lambda t: float(sum(t.confidences)) / float(len(t.confidences)) + if len(t.confidences) + else 0.0, + done_tracks, + ) + ) + meantrackedconfidence = ( + float(sum(tracks_avg_conf)) / float(len(tracks_avg_conf)) + if len(tracks_avg_conf) + else 0.0 + ) else: meantrackedconfidence = 0 - # with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri, - overallcount, overallcatches, overalldiscards, overallruntimems, detection_confidence, tracked_confidence ) - values ( :decrypted_path, :json_out_file , :cnt, :catches, :discards, :runt, :mean_c, :mean_t) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri, + overallcount, overallcatches, overalldiscards, overallruntimems, detection_confidence, tracked_confidence ) + values ( :decrypted_path, :json_out_file , :cnt, :catches, :discards, :runt, :mean_c, :mean_t) + on conflict (video_uri) do update set cocoannotations_uri = :json_out_file, overallcount = :cnt, overallruntimems = :runt, @@ -264,38 +293,48 @@ def v2_parse_json(session: Session, decrypted_path: Path, json_out_file: Path, o overallcatches = :catches, overalldiscards = :discards, detection_confidence = :mean_c - ;"""), { - "decrypted_path": str(decrypted_path.absolute()), - "json_out_file":str(json_out_file.absolute()), - "cnt":cnt, - "catches":catches, - "discards":discards, - "runt":runtime, - "mean_c":meandetectionconfidence, - "mean_t":meantrackedconfidence, - } - ) + ;"""), + { + "decrypted_path": str(decrypted_path.absolute()), + "json_out_file": str(json_out_file.absolute()), + "cnt": cnt, + "catches": catches, + "discards": discards, + "runt": runtime, + "mean_c": meandetectionconfidence, + "mean_t": meantrackedconfidence, + }, + ) session.commit() + def v2_next_videos(session: Session, thalos_cam_name): - results: Query[VideoFile] = session.query(VideoFile).from_statement(sa.text( - """ - select video_files.* from video_files - left join ondeckdata - on video_files.decrypted_path = ondeckdata.video_uri - where video_files.decrypted_path is not null + results: Query[VideoFile] = ( + session.query(VideoFile) + .from_statement( + sa.text( + """ + select video_files.* from video_files + left join ondeckdata + on video_files.decrypted_path = ondeckdata.video_uri + where video_files.decrypted_path is not null and video_files.start_datetime is not null and ondeckdata.video_uri is null and video_files.cam_name = :cam_name order by video_files.start_datetime asc; - """)).params( - { - "cam_name": thalos_cam_name, - }) - return list(results) + """ + ) + ) + .params( + { + "cam_name": thalos_cam_name, + } + ) + ) + return list(results) + def v2_enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: str): - video_files: list[VideoFile] = [] with sessionmaker() as session: @@ -306,10 +345,12 @@ def v2_enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: st video_file: VideoFile = video_files.pop(0) # print(video_file) decrypted_path = Path(video_file.decrypted_path) - last_dot_index: int = decrypted_path.name.index('.') + last_dot_index: int = decrypted_path.name.index(".") if last_dot_index < 0: last_dot_index = None - json_out_file: Path = output_dir / Path(decrypted_path.name[0:last_dot_index] + "_ondeck.json") + json_out_file: Path = output_dir / Path( + decrypted_path.name[0:last_dot_index] + "_ondeck.json" + ) ondeck_input = str(decrypted_path.absolute()) # try: @@ -320,95 +361,110 @@ def v2_enqueue(output_dir: Path, sessionmaker: SessionMaker, thalos_cam_name: st # pass try: - r: Response = requests.post('http://127.0.0.1:5000/inference', json={ - "input_path":ondeck_input, - "output_path":str(json_out_file.absolute()), - "current_timestamp": video_file.start_datetime.astimezone(timezone.utc).replace(tzinfo=None).isoformat() + ".00Z" - }) + r: Response = requests.post( + "http://127.0.0.1:5000/inference", + json={ + "input_path": ondeck_input, + "output_path": str(json_out_file.absolute()), + "current_timestamp": video_file.start_datetime.astimezone(timezone.utc) + .replace(tzinfo=None) + .isoformat() + + ".00Z", + }, + ) click.echo("resp: {} body: {}".format(repr(r), repr(r.json()))) with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri, status ) + session.execute( + sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri, status ) values ( :ondeck_input, :ondeck_output, :status ) - on conflict (video_uri) DO UPDATE SET status = :status ;"""), { - "ondeck_input": ondeck_input, - "ondeck_output": str(json_out_file.absolute()), - "status": "queued" - } + on conflict (video_uri) DO UPDATE SET status = :status ;"""), + { + "ondeck_input": ondeck_input, + "ondeck_output": str(json_out_file.absolute()), + "status": "queued", + }, ) session.commit() except requests.exceptions.RequestException as e: click.echo("ondeck model request exception: {}".format(e)) return + MAGIC_VALUE_1_MINUTE = 60 + def v2_parse(output_dir: Path, sessionmaker: SessionMaker): # only pick files that end with _ondeck.json - a = filter(lambda x: x.is_file() and x.name.endswith('_ondeck.json'), output_dir.iterdir()) + a = filter(lambda x: x.is_file() and x.name.endswith("_ondeck.json"), output_dir.iterdir()) epoch_now = int(time.time()) # only pick files that haven't been modified in the last minute b = filter(lambda x: x.stat().st_mtime + MAGIC_VALUE_1_MINUTE < epoch_now, a) # get the filenames - c = map(lambda x: str(x.absolute()) , b) + c = map(lambda x: str(x.absolute()), b) found_ondeck_files = list(c) click.echo("found {} _ondeck.json files".format(str(len(found_ondeck_files)))) with sessionmaker() as session: - results: Query[OndeckData] = session.query(OndeckData).where( sa.or_( OndeckData.status == 'queued' , OndeckData.status == 'runningskiphalf' )) + results: Query[OndeckData] = session.query(OndeckData).where( + sa.or_(OndeckData.status == "queued", OndeckData.status == "runningskiphalf") + ) for pending_ondeckdata in results: is_skiphalf = pending_ondeckdata.status == "runningskiphalf" # click.echo("found {} queued row".format(str(pending_ondeckdata))) if pending_ondeckdata.cocoannotations_uri in found_ondeck_files: pending_ondeckdata.status = "parsing" session.commit() - - parse_json(session, Path(pending_ondeckdata.video_uri), Path(pending_ondeckdata.cocoannotations_uri)) + + parse_json( + session, + Path(pending_ondeckdata.video_uri), + Path(pending_ondeckdata.cocoannotations_uri), + ) pending_ondeckdata.status = "doneskiphalf" if is_skiphalf else "done" session.commit() - - def v2_errors(sessionmaker: SessionMaker): try: - r: Response = requests.get('http://127.0.0.1:5000/errors') + r: Response = requests.get("http://127.0.0.1:5000/errors") click.echo("errors resp: {} body: {}".format(repr(r), repr(r.json()))) for error in r.json(): - input_path = error.get('input_path') - error_message = error.get('error_message') + input_path = error.get("input_path") + error_message = error.get("error_message") - if error_message.startswith('Task performance mode set to SKIP'): + if error_message.startswith("Task performance mode set to SKIP"): with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, status ) - values ( :decrypted_path, :skiphalfstatus ) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, status ) + values ( :decrypted_path, :skiphalfstatus ) + on conflict (video_uri) do update set status = :skiphalfstatus - ;"""), { - "decrypted_path": input_path, - "skiphalfstatus": "runningskiphalf" - } + ;"""), + {"decrypted_path": input_path, "skiphalfstatus": "runningskiphalf"}, ) session.commit() continue with sessionmaker() as session: - session.execute(sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri ) - values ( :decrypted_path, :error_str ) - on conflict (video_uri) do update set + session.execute( + sa.text("""insert into ondeckdata ( video_uri, cocoannotations_uri ) + values ( :decrypted_path, :error_str ) + on conflict (video_uri) do update set status = 'errored', cocoannotations_uri = :error_str - ;"""), { - "decrypted_path": input_path, - "error_str": "ondeck model failure. stdout, stderr: " + error_message - } + ;"""), + { + "decrypted_path": input_path, + "error_str": "ondeck model failure. stdout, stderr: " + error_message, + }, ) session.commit() @@ -416,25 +472,34 @@ def v2_errors(sessionmaker: SessionMaker): click.echo("ondeck model errors request exception: {}".format(e)) return -@click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--output_dir', default=flaskconfig.get('VIDEO_OUTPUT_DIR')) -@click.option('--engine', default=flaskconfig.get('ONDECK_MODEL_ENGINE')) -@click.option('--thalos_cam_name', default=flaskconfig.get('THALOS_CAM_NAME')) -@click.option('--print_queue', is_flag=True) -@click.option('--parsetesta') -@click.option('--parsetestb') -@click.option('--force_v2', is_flag=True) -def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parsetesta, parsetestb, force_v2: bool): +@click.command() +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--output_dir", default=flaskconfig.get("VIDEO_OUTPUT_DIR")) +@click.option("--engine", default=flaskconfig.get("ONDECK_MODEL_ENGINE")) +@click.option("--thalos_cam_name", default=flaskconfig.get("THALOS_CAM_NAME")) +@click.option("--print_queue", is_flag=True) +@click.option("--parsetesta") +@click.option("--parsetestb") +@click.option("--force_v2", is_flag=True) +def main( + dbname, + dbuser, + output_dir, + engine, + thalos_cam_name, + print_queue, + parsetesta, + parsetestb, + force_v2: bool, +): output_dir = Path(output_dir) if engine: engine = Path(engine) - - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) sessionmaker = SessionMaker(sa_engine) ModelBase.metadata.create_all(sa_engine) @@ -452,46 +517,46 @@ def main(dbname, dbuser, output_dir, engine, thalos_cam_name, print_queue, parse use_v2 = False try: - r: Response = requests.get('http://127.0.0.1:5000/queueSummary') + r: Response = requests.get("http://127.0.0.1:5000/queueSummary") use_v2 = r.status_code == 200 click.echo("resp: {} body: {}".format(repr(r), repr(r.json()))) except requests.exceptions.RequestException as e: click.echo("ondeck model request exception: {}".format(e)) if force_v2 or use_v2: - + def runonce_enqueue(output_dir, sessionmaker, thalos_cam_name): v2_enqueue(output_dir, sessionmaker, thalos_cam_name) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce_enqueue, output_dir, sessionmaker, thalos_cam_name) - schedule.every(5).minutes.do(v2_enqueue, output_dir, sessionmaker, thalos_cam_name ) + schedule.every(5).minutes.do(v2_enqueue, output_dir, sessionmaker, thalos_cam_name) def runonce_errors(sessionmaker): v2_errors(sessionmaker) return schedule.CancelJob - - schedule.every(1).seconds.do(runonce_errors, sessionmaker) + + schedule.every(1).seconds.do(runonce_errors, sessionmaker) schedule.every(1).minutes.do(v2_errors, sessionmaker) def runonce_parse(output_dir, sessionmaker): v2_parse(output_dir, sessionmaker) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce_parse, output_dir, sessionmaker) - schedule.every(1).minutes.do(v2_parse, output_dir, sessionmaker ) + schedule.every(1).minutes.do(v2_parse, output_dir, sessionmaker) else: def runonce(output_dir, engine, sessionmaker, thalos_cam_name): run_ondeck(output_dir, engine, sessionmaker, thalos_cam_name) return schedule.CancelJob - + schedule.every(1).seconds.do(runonce, output_dir, engine, sessionmaker, thalos_cam_name) - schedule.every(5).minutes.do(run_ondeck, output_dir, engine, sessionmaker, thalos_cam_name ) + schedule.every(5).minutes.do(run_ondeck, output_dir, engine, sessionmaker, thalos_cam_name) while 1: n = schedule.idle_seconds() @@ -504,6 +569,6 @@ def runonce(output_dir, engine, sessionmaker, thalos_cam_name): time.sleep(n) schedule.run_pending() -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/s3_uploader.py b/s3_uploader.py index 77cdbdb..1b7b0ca 100644 --- a/s3_uploader.py +++ b/s3_uploader.py @@ -1,106 +1,127 @@ -import json import io - -from flask import Flask -from flask_admin import Admin - -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session -import psycopg2 -from psycopg2.pool import SimpleConnectionPool import os - -from model import Base as ModelBase, RiskVector, RiskVectorModelView, Test, TestModelView -from vector import GpsVector, FishAiEventsComeInFourHourBurstsVector, InternetVector, EquipmentOutageAggVector - -import sqlite3 +import string +import time from datetime import datetime, timedelta, timezone +import boto3 import click - +import psycopg2 import schedule -import re -import time -import string +from flask.config import Config as FlaskConfig +from psycopg2.pool import SimpleConnectionPool +from sqlalchemy.orm import Session +from model import Test -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +flaskconfig = FlaskConfig(root_path="") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -import boto3 +s3 = boto3.resource("s3") +bucket = s3.Bucket("51-gema-dev-dp-raw") -s3 = boto3.resource('s3') -bucket = s3.Bucket('51-gema-dev-dp-raw') + +csvprintable = string.printable +csvprintable = csvprintable[0 : 1 + csvprintable.index("\t")] +csvprintable = csvprintable.replace(",", "") -csvprintable=string.printable -csvprintable = csvprintable[0:1+csvprintable.index("\t")] -csvprintable = csvprintable.replace(',', '') def csvfilter(s): - return ''.join(filter(lambda c: c in csvprintable, s)) + return "".join(filter(lambda c: c in csvprintable, s)) -def DEPRECATED_export_method_with_sqlalchemy_models(session: Session): +def DEPRECATED_export_method_with_sqlalchemy_models(session: Session): try: now = datetime.now().astimezone(timezone.utc) - - result = session.query(Test)\ - .where(Test.datetime_from > now - timedelta(days=13), Test.vector_id == 2)\ - .order_by(Test.datetime.desc())\ - .limit(1).all() + + result = ( + session.query(Test) + .where(Test.datetime_from > now - timedelta(days=13), Test.vector_id == 2) + .order_by(Test.datetime.desc()) + .limit(1) + .all() + ) rows = list(result) if len(rows) > 0: - partition = str(now.year) + "/" + str(now.month) + "/" + str(now.day) - + body = io.BytesIO() - body.write((','.join([column.name for column in Test.__mapper__.columns]) + '\n').encode()) - [body.write((','.join([str(getattr(row, column.name)) for column in Test.__mapper__.columns]) + '\n').encode()) for row in rows] - bucket.put_object(Key="tnc_edge/"+Test.__tablename__+"/"+partition+"/"+str(int(now.timestamp()))+".csv", Body=body.getvalue()) + body.write( + (",".join([column.name for column in Test.__mapper__.columns]) + "\n").encode() + ) + [ + body.write( + ( + ",".join( + [str(getattr(row, column.name)) for column in Test.__mapper__.columns] + ) + + "\n" + ).encode() + ) + for row in rows + ] + bucket.put_object( + Key="tnc_edge/" + + Test.__tablename__ + + "/" + + partition + + "/" + + str(int(now.timestamp())) + + ".csv", + Body=body.getvalue(), + ) except Exception as e: print("Error: exception in s3 uploader", e) -def DEPRECATED_s3uploader(cpool: SimpleConnectionPool, boat, ver): +def DEPRECATED_s3uploader(cpool: SimpleConnectionPool, boat, ver): DEPRECATED_tables = [ - 'deckhandevents', - 'gpsdata', - 'internetdata', - 'deckhandevents_mostrecentlonglineevent_jsonextracted', - 'tests', - 'video_files', - 'tracks', - 'ondeckdata', - 'aifishdata', + "deckhandevents", + "gpsdata", + "internetdata", + "deckhandevents_mostrecentlonglineevent_jsonextracted", + "tests", + "video_files", + "tracks", + "ondeckdata", + "aifishdata", ] conn: psycopg2.connection = cpool.getconn() try: with conn.cursor() as cur: for table in DEPRECATED_tables: - # print(table) - cur.execute("SELECT column_name FROM information_schema.columns \ - WHERE table_name = %s order by ordinal_position;", (table,)) + cur.execute( + "SELECT column_name FROM information_schema.columns \ + WHERE table_name = %s order by ordinal_position;", + (table,), + ) columns = cur.fetchall() - - cur.execute("select max(a.max), CURRENT_TIMESTAMP from ( \ + + cur.execute( + "select max(a.max), CURRENT_TIMESTAMP from ( \ select max(s3uploads.datetime), CURRENT_TIMESTAMP \ from s3uploads where tablename = %s group by tablename \ union select timestamp with time zone '1970-01-01' as max, CURRENT_TIMESTAMP \ - ) a;", (table,)) + ) a;", + (table,), + ) dates = cur.fetchone() - - - if table == 'video_files': - cur.execute('select * from video_files where start_datetime > %s and start_datetime <= %s;', (dates[0], dates[1])) + if table == "video_files": + cur.execute( + "select * from video_files where start_datetime > %s and start_datetime <= %s;", + (dates[0], dates[1]), + ) else: - cur.execute('select * from '+table+' where datetime > %s and datetime <= %s;', (dates[0], dates[1])) + cur.execute( + "select * from " + table + " where datetime > %s and datetime <= %s;", + (dates[0], dates[1]), + ) now = datetime.now().astimezone(timezone.utc) partition = str(now.year) + "/" + str(now.month) + "/" + str(now.day) @@ -108,89 +129,138 @@ def DEPRECATED_s3uploader(cpool: SimpleConnectionPool, boat, ver): rows = list(cur.fetchall()) if len(rows) > 0: body = io.BytesIO() - body.write((','.join([column[0] for column in columns]) + '\n').encode()) - [body.write((','.join([csvfilter(str(value)) for value in row]) + '\n').encode()) for row in rows] - - bucket.put_object(Key="tnc_edge/"+boat+"_"+ver+"_"+table+"/"+partition+"/"+str(int(dates[1].timestamp()))+".csv", Body=body.getvalue()) - - cur.execute('insert into s3uploads (datetime, tablename) values (%s, %s)', (dates[1], table,)) + body.write((",".join([column[0] for column in columns]) + "\n").encode()) + [ + body.write( + (",".join([csvfilter(str(value)) for value in row]) + "\n").encode() + ) + for row in rows + ] + + bucket.put_object( + Key="tnc_edge/" + + boat + + "_" + + ver + + "_" + + table + + "/" + + partition + + "/" + + str(int(dates[1].timestamp())) + + ".csv", + Body=body.getvalue(), + ) + + cur.execute( + "insert into s3uploads (datetime, tablename) values (%s, %s)", + ( + dates[1], + table, + ), + ) conn.commit() finally: cpool.putconn(conn) -def s3psqlcopyer(cpool: SimpleConnectionPool, boat, ver): +def s3psqlcopyer(cpool: SimpleConnectionPool, boat, ver): tables = [ - 'deckhandevents', - 'gpsdata', - 'internetdata', - 'deckhandevents_mostrecentlonglineevent_jsonextracted', - 'tests', - 'video_files', - 'tracks', - 'ondeckdata', - 'aifishdata', + "deckhandevents", + "gpsdata", + "internetdata", + "deckhandevents_mostrecentlonglineevent_jsonextracted", + "tests", + "video_files", + "tracks", + "ondeckdata", + "aifishdata", ] conn: psycopg2.connection = cpool.getconn() - + try: with conn.cursor() as cur: for table in tables: - # print(table) - cur.execute("SELECT column_name FROM information_schema.columns \ - WHERE table_name = %s order by ordinal_position;", (table,)) + cur.execute( + "SELECT column_name FROM information_schema.columns \ + WHERE table_name = %s order by ordinal_position;", + (table,), + ) columns = cur.fetchall() - - cur.execute("select max(a.max), CURRENT_TIMESTAMP from ( \ + + cur.execute( + "select max(a.max), CURRENT_TIMESTAMP from ( \ select max(s3uploads.datetime), CURRENT_TIMESTAMP \ from s3uploads where tablename = %s group by tablename \ union select timestamp with time zone '1970-01-01' as max, CURRENT_TIMESTAMP \ - ) a;", (table,)) + ) a;", + (table,), + ) dates = cur.fetchone() cur.execute(f"CREATE TEMP TABLE t as SELECT * from {table} where false;") - if table == 'video_files': - cur.execute(f"insert into t (select * from video_files where start_datetime > '{dates[0]}' and start_datetime <= '{dates[1]}');") + if table == "video_files": + cur.execute( + f"insert into t (select * from video_files where start_datetime > '{dates[0]}' and start_datetime <= '{dates[1]}');" + ) else: - cur.execute(f"insert into t (select * from {table} where datetime > '{dates[0]}' and datetime <= '{dates[1]}');") - copy_sql = f'COPY t TO STDOUT WITH CSV HEADER;' + cur.execute( + f"insert into t (select * from {table} where datetime > '{dates[0]}' and datetime <= '{dates[1]}');" + ) + copy_sql = f"COPY t TO STDOUT WITH CSV HEADER;" now = datetime.now().astimezone(timezone.utc) partition = str(now.year) + "/" + str(now.month) + "/" + str(now.day) f = io.BytesIO() cur.copy_expert(copy_sql, f) f.seek(0) - f.readline() # csv header line - if len(f.readline()) > 0: # first line of data. If it exists, write to bucket + f.readline() # csv header line + if len(f.readline()) > 0: # first line of data. If it exists, write to bucket f.seek(0) - key = "tnc_edge/"+boat+"_"+ver+"_"+table+"/"+partition+"/"+str(int(dates[1].timestamp()))+".csv" - click.echo(f'uploading {key}') + key = ( + "tnc_edge/" + + boat + + "_" + + ver + + "_" + + table + + "/" + + partition + + "/" + + str(int(dates[1].timestamp())) + + ".csv" + ) + click.echo(f"uploading {key}") bucket.put_object(Key=key, Body=f.getvalue()) - cur.execute('insert into s3uploads (datetime, tablename) values (%s, %s)', (dates[1], table,)) - cur.execute('drop table t;') + cur.execute( + "insert into s3uploads (datetime, tablename) values (%s, %s)", + ( + dates[1], + table, + ), + ) + cur.execute("drop table t;") conn.commit() finally: cpool.putconn(conn) + @click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--boatname', default=flaskconfig.get('BOAT_NAME')) -@click.option('--dbtablesversion', default=flaskconfig.get('DB_TABLES_VERSION')) -@click.option('--test', is_flag=True) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--boatname", default=flaskconfig.get("BOAT_NAME")) +@click.option("--dbtablesversion", default=flaskconfig.get("DB_TABLES_VERSION")) +@click.option("--test", is_flag=True) def main(dbname, dbuser, boatname, dbtablesversion, test): - # engine = create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) # SessionMaker = sessionmaker(engine) - # ModelBase.metadata.create_all(engine) - cpool = SimpleConnectionPool(1, 1, database=dbname, user=dbuser) - + if test: # s3psqlcopyer(cpool, boatname, dbtablesversion) @@ -210,9 +280,10 @@ def runonce(cpool, boatname, dbtablesversion): break elif n > 0: # sleep exactly the right amount of time - click.echo(f'sleeping for: {n}') + click.echo(f"sleeping for: {n}") time.sleep(n) schedule.run_pending() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/scripts/adduser_aifish.sh b/scripts/adduser_aifish.sh index 598a9ff..da284b9 100644 --- a/scripts/adduser_aifish.sh +++ b/scripts/adduser_aifish.sh @@ -1,14 +1,13 @@ #!/bin/bash +scriptdir="$(dirname -- "$(readlink -f -- "$0")")" -scriptdir="$(dirname -- "$( readlink -f -- "$0")")" - -if [ "$UID" -lt 1000 ] ; then +if [ "$UID" -lt 1000 ]; then echo "This script should be run as a non-root user with 'sudo' access" exit 1 fi -if ! [ -e "$scriptdir/secret_adduser_aifish.txt" ] ; then +if ! [ -e "$scriptdir/secret_adduser_aifish.txt" ]; then echo "Cannot adduser without secrets file containing password" exit 1 fi @@ -46,4 +45,3 @@ EOF # on prod machines, user can only run docker commands # aifish ALL=NOPASSWD: /usr/bin/docker * - diff --git a/scripts/adduser_ondeck.sh b/scripts/adduser_ondeck.sh index da48305..3d5c47d 100644 --- a/scripts/adduser_ondeck.sh +++ b/scripts/adduser_ondeck.sh @@ -1,14 +1,13 @@ #!/bin/bash +scriptdir="$(dirname -- "$(readlink -f -- "$0")")" -scriptdir="$(dirname -- "$( readlink -f -- "$0")")" - -if [ "$UID" -lt 1000 ] ; then +if [ "$UID" -lt 1000 ]; then echo "This script should be run as a non-root user with 'sudo' access" exit 1 fi -if ! [ -e "$scriptdir/secret_adduser_ondeck.txt" ] ; then +if ! [ -e "$scriptdir/secret_adduser_ondeck.txt" ]; then echo "Cannot adduser without secrets file containing password" exit 1 fi @@ -43,10 +42,9 @@ EOF # ondeck ALL=NOPASSWD: /usr/bin/docker * - gapp_creds_config_line=$(sudo grep -E '^export GOOGLE_APPLICATION_CREDENTIALS=' "$USERHOME/.bashrc") -if [ $? -eq 0 ] && [ "x$gapp_creds_config_line" != "x" ] ; then +if [ $? -eq 0 ] && [ "x$gapp_creds_config_line" != "x" ]; then # eval to make this value available in this script eval "$gapp_creds_config_line" else @@ -60,8 +58,8 @@ EOF GOOGLE_APPLICATION_CREDENTIALS="$USERHOME/google_application_credentials.json" fi -if ! [ -e "$GOOGLE_APPLICATION_CREDENTIALS" ] ; then - if ! [ -e "$scriptdir/secret_ondeck_gcr_token.json" ] ; then +if ! [ -e "$GOOGLE_APPLICATION_CREDENTIALS" ]; then + if ! [ -e "$scriptdir/secret_ondeck_gcr_token.json" ]; then echo "cannot find and cannot install google app creds json file!" echo "make the creds available in this scripts dir and rerun this script" exit 1 @@ -69,5 +67,3 @@ if ! [ -e "$GOOGLE_APPLICATION_CREDENTIALS" ] ; then sudo cp "$scriptdir/secret_ondeck_gcr_token.json" "$GOOGLE_APPLICATION_CREDENTIALS" sudo chown ondeck:ondeck "$GOOGLE_APPLICATION_CREDENTIALS" fi - - diff --git a/scripts/app-install.sh b/scripts/app-install.sh index b1c62e3..12818bd 100644 --- a/scripts/app-install.sh +++ b/scripts/app-install.sh @@ -1,6 +1,6 @@ #!/bin/bash -scriptdir="$(dirname -- "$( readlink -f -- "$0")")" +scriptdir="$(dirname -- "$(readlink -f -- "$0")")" cd "$scriptdir/.." || exit @@ -10,14 +10,12 @@ WORKINGDIR="$USERHOME/tnc-edge-service" cd "$WORKINGDIR" || exit - -if ! [ -e ./venv/bin/activate ] ; then +if ! [ -e ./venv/bin/activate ]; then python3 -m venv venv fi - -if [ "$VIRTUAL_ENV" != "$(pwd)/venv" ] ; then - if [ "x$VIRTUAL_ENV" != "x" ] ; then +if [ "$VIRTUAL_ENV" != "$(pwd)/venv" ]; then + if [ "x$VIRTUAL_ENV" != "x" ]; then deactivate fi source ./venv/bin/activate @@ -25,15 +23,13 @@ fi pip install -r requirements.txt - PROD_CONF_FILE="$WORKINGDIR/config/prod.py" -if ! [ -e "$PROD_CONF_FILE" ] ; then - echo "DEBUG=False" >> "$PROD_CONF_FILE" +if ! [ -e "$PROD_CONF_FILE" ]; then + echo "DEBUG=False" >>"$PROD_CONF_FILE" fi -if ! grep -q -E "^SECRET_KEY=" "$PROD_CONF_FILE" ; then +if ! grep -q -E "^SECRET_KEY=" "$PROD_CONF_FILE"; then echo "creating secret_key in prod config" - echo "SECRET_KEY='$(dd if=/dev/urandom count=1 | base64 | tr -d '+/Il10O' | fold -w 32 | head -n 1)'" >> "$PROD_CONF_FILE" + echo "SECRET_KEY='$(dd if=/dev/urandom count=1 | base64 | tr -d '+/Il10O' | fold -w 32 | head -n 1)'" >>"$PROD_CONF_FILE" fi - diff --git a/scripts/box_dot_com/box_reupload.py b/scripts/box_dot_com/box_reupload.py index 94e3e4e..97666ca 100644 --- a/scripts/box_dot_com/box_reupload.py +++ b/scripts/box_dot_com/box_reupload.py @@ -1,139 +1,121 @@ -import requests - -from datetime import datetime, timezone, timedelta - -import boto3 -import click -import sys +import concurrent.futures import json import re -import concurrent.futures +import sys +from datetime import datetime, timedelta, timezone -from dateutil.parser import parse as datetimeparse, isoparse as iso8601parse +import boto3 +import click +import requests +(client_id, client_secret) = json.load(open("secret_box_creds.json")) -(client_id, client_secret) = json.load(open('secret_box_creds.json')) class Token: def __init__(self): self.token_str = None self.token_exp = datetime.now() - + def __call__(self): if self.token_str and self.token_exp > datetime.now(): return self.token_str - + resp = requests.post( - 'https://api.box.com/oauth2/token', + "https://api.box.com/oauth2/token", data={ - 'client_id': client_id, - 'client_secret': client_secret, - 'grant_type': "client_credentials", - 'box_subject_type': "enterprise", - 'box_subject_id': "994495604", - } + "client_id": client_id, + "client_secret": client_secret, + "grant_type": "client_credentials", + "box_subject_type": "enterprise", + "box_subject_id": "994495604", + }, ) if resp.status_code > 299: - click.echo(f'{resp.status_code} {resp.headers} {resp.content}') + click.echo(f"{resp.status_code} {resp.headers} {resp.content}") j = resp.json() - self.token_str = j['access_token'] - self.token_exp = datetime.now() + timedelta(seconds=j['expires_in']-200) + self.token_str = j["access_token"] + self.token_exp = datetime.now() + timedelta(seconds=j["expires_in"] - 200) return self.token_str + token = Token() -s = boto3.Session(profile_name='AWSAdministratorAccess-867800856651') +s = boto3.Session(profile_name="AWSAdministratorAccess-867800856651") try: from secret_aws_s3_creds import AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY + s = boto3.Session( - aws_access_key_id=AWS_ACCESS_KEY_ID, - aws_secret_access_key=AWS_SECRET_ACCESS_KEY, - region_name='us-east-1') + aws_access_key_id=AWS_ACCESS_KEY_ID, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + region_name="us-east-1", + ) except: pass -s3 = s.resource('s3') +s3 = s.resource("s3") + def box_folder_get_items(folder_id, offset): - url='https://api.box.com/2.0/folders/{}/items'.format(folder_id) + url = "https://api.box.com/2.0/folders/{}/items".format(folder_id) params = {} if offset: - params['offset'] = offset - headers={"Authorization": "Bearer "+token()} + params["offset"] = offset + headers = {"Authorization": "Bearer " + token()} # print(url, params, headers) - resp = requests.get( - url, - params, - headers=headers - ) + resp = requests.get(url, params, headers=headers) if resp.status_code > 299: - click.echo(f'{resp.status_code} {resp.headers} {resp.content}') + click.echo(f"{resp.status_code} {resp.headers} {resp.content}") return resp.json() + def box_folder_upload_item(folder_id, fname, f): - url = 'https://upload.box.com/api/2.0/files/content' - - headers={"Authorization": "Bearer "+token()} - attrs = { - 'name': fname, - 'parent': { - 'id': folder_id - } - } - fdict= { - 'attributes': (None, json.dumps(attrs)), - 'file': (fname, f), + url = "https://upload.box.com/api/2.0/files/content" + + headers = {"Authorization": "Bearer " + token()} + attrs = {"name": fname, "parent": {"id": folder_id}} + fdict = { + "attributes": (None, json.dumps(attrs)), + "file": (fname, f), } # click.echo(f'{url} {attrs} {headers}') - resp = requests.post( - url, - files=fdict, - headers=headers - ) + resp = requests.post(url, files=fdict, headers=headers) if resp.status_code > 299: - click.echo(f'{resp.status_code} {resp.headers} {resp.content}') + click.echo(f"{resp.status_code} {resp.headers} {resp.content}") # click.echo(f'{resp.status_code} {resp.headers} {resp.content}') return resp -def box_create_folder(folder_id, fname ): - url = 'https://api.box.com/2.0/folders' + +def box_create_folder(folder_id, fname): + url = "https://api.box.com/2.0/folders" # url = 'http://localhost:50001/2.0/folders' - - headers={"Authorization": "Bearer "+token(), - 'User-Agent':'python-requests/2.32.3', - } - data = { - 'name': fname, - 'parent': { - 'id': folder_id - } + + headers = { + "Authorization": "Bearer " + token(), + "User-Agent": "python-requests/2.32.3", } + data = {"name": fname, "parent": {"id": folder_id}} # click.echo(f'{url} {attrs} {headers}') - resp = requests.post( - url, - json=data, - headers=headers - ) + resp = requests.post(url, json=data, headers=headers) if resp.status_code > 299: - click.echo(f'{resp.status_code} {resp.headers} {resp.content}') + click.echo(f"{resp.status_code} {resp.headers} {resp.content}") # click.echo(f'{resp.status_code} {resp.headers} {resp.content}') return resp.json() + @click.group() def main(): pass + @main.command() def iter_box_folder_copy_to_s3(): - files_done = [] - with open('box_reupload.done', 'r') as f: + with open("box_reupload.done", "r") as f: for line in f.readlines(): r = line.strip() if len(r) > 0: files_done.append(r) - folder_ids = [ # ('220398416437', 'TNC EDGE Trip Video Files', 0), # ("220396095950", "Brancol", 0), @@ -159,42 +141,48 @@ def iter_box_folder_copy_to_s3(): files = [] - root = box_folder_get_items('0', 0) - for f in filter(lambda x: x['name'] == 'TNC EDGE Trip Video Files', root['entries']): - folder_ids.append((f['id'], f['name'], 0,)) - - with open('box_reupload.done', 'a') as new_done: + root = box_folder_get_items("0", 0) + for f in filter(lambda x: x["name"] == "TNC EDGE Trip Video Files", root["entries"]): + folder_ids.append( + ( + f["id"], + f["name"], + 0, + ) + ) + with open("box_reupload.done", "a") as new_done: while len(folder_ids) > 0: (folder_id, folder_name, offset) = folder_ids.pop(0) - if folder_name.endswith('/gps'): + if folder_name.endswith("/gps"): continue - j = box_folder_get_items(folder_id, offset) - if j['total_count'] - offset > j['limit']: + if j["total_count"] - offset > j["limit"]: print("Warning: folder {} has too many items".format(folder_name)) - folder_ids.append((folder_id, folder_name , offset+100)) - for f in filter(lambda x: x['type'] == 'folder', j['entries']): - folder_ids.append((f['id'], folder_name + "/" + f['name'], 0)) - for f in filter(lambda x: x['type'] == 'file', j['entries']): - if f['id'] in files_done: + folder_ids.append((folder_id, folder_name, offset + 100)) + for f in filter(lambda x: x["type"] == "folder", j["entries"]): + folder_ids.append((f["id"], folder_name + "/" + f["name"], 0)) + for f in filter(lambda x: x["type"] == "file", j["entries"]): + if f["id"] in files_done: continue - files.append((f['id'], folder_name + "/" + f['name'],)) - url='https://api.box.com/2.0/files/{}/content'.format(f['id']) - print(url) - resp = requests.get( - url, - headers={"Authorization": "Bearer "+token()} + files.append( + ( + f["id"], + folder_name + "/" + f["name"], + ) ) - with open('tmpfile', 'wb') as tmp: + url = "https://api.box.com/2.0/files/{}/content".format(f["id"]) + print(url) + resp = requests.get(url, headers={"Authorization": "Bearer " + token()}) + with open("tmpfile", "wb") as tmp: tmp.write(resp.content) - s3.Object('dp.riskedge.fish', folder_name + "/" + f['name']).put(Body=open('tmpfile', 'rb')) - new_done.write(f['id'] + '\n') - - + s3.Object("dp.riskedge.fish", folder_name + "/" + f["name"]).put( + Body=open("tmpfile", "rb") + ) + new_done.write(f["id"] + "\n") for i in files: print(files) @@ -203,232 +191,243 @@ def iter_box_folder_copy_to_s3(): def box_navigate_path(path, all_box_folders): """ Use the box.com api to find the folder associated with the path - + :param path: the path to find - + :param all_box_folders: a dict of box api return values. Key is box folder id. Dict is modified by method - + :return: box folder object. """ - - if not re.match('^/([\w]([\w \.-]*\w)?/)*$', path): - click.echo(f'bad path: {path}') + + if not re.match("^/([\w]([\w \.-]*\w)?/)*$", path): + click.echo(f"bad path: {path}") sys.exit(1) - - fnames = path.split('/')[1:-1] - + + fnames = path.split("/")[1:-1] + def box_get_all(folder_id): offset = 0 ret = box_folder_get_items(folder_id, offset) last_req = ret - while last_req.get('total_count') > last_req.get('limit') + offset: - offset += last_req.get('limit') + while last_req.get("total_count") > last_req.get("limit") + offset: + offset += last_req.get("limit") last_req = box_folder_get_items(folder_id, offset) - ret.get('entries').append(last_req.get('entries')) + ret.get("entries").append(last_req.get("entries")) return ret - + if 0 not in all_box_folders.keys(): - root = box_get_all('0') - root.update({'name': "", 'id': "0"}) + root = box_get_all("0") + root.update({"name": "", "id": "0"}) all_box_folders[0] = root - + curr = all_box_folders[0] for fname in fnames: - if fname in map(lambda x: x.get('name'), curr.get('entries')): - f = next(filter(lambda x: x.get('name') == fname, curr.get('entries'))) - f_id = f.get('id') + if fname in map(lambda x: x.get("name"), curr.get("entries")): + f = next(filter(lambda x: x.get("name") == fname, curr.get("entries"))) + f_id = f.get("id") else: # have to create the folder in box - resp = box_create_folder(curr.get('id'), fname) - f_id = resp.get('id') - curr.get('entries').append({'type': 'folder', 'id': f_id, 'name': fname}) + resp = box_create_folder(curr.get("id"), fname) + f_id = resp.get("id") + curr.get("entries").append({"type": "folder", "id": f_id, "name": fname}) if f_id in all_box_folders.keys(): curr = all_box_folders[f_id] else: # have to populate the folder curr = box_get_all(f_id) - curr.update({'name': fname, 'id':f_id}) + curr.update({"name": fname, "id": f_id}) all_box_folders[f_id] = curr - - return curr + return curr # python3 box_reupload.py s3-uri-to-box dp.riskedge.fish 'TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/' # python3 box_reupload.py s3-uri-to-box dp.riskedge.fish 'TNC EDGE Trip Video Files/Brancol/alt_hd_upload/' + def byte_range_gen(total_bytes, chunksize): - offset=0 + offset = 0 while offset + chunksize < total_bytes: r = str(offset) + "-" + str(offset + chunksize - 1) # print(f'chunk {r}') - yield "bytes="+r + yield "bytes=" + r offset += chunksize - r = str(offset) + '-' + str(total_bytes - 1) + r = str(offset) + "-" + str(total_bytes - 1) # print(f'chunk {r} (last)') - yield "bytes="+r + yield "bytes=" + r + + +S3_CHUNK_S = int(2 * 1024 * 1024) -S3_CHUNK_S = int(2*1024*1024) @main.command() -@click.option('--dry-run', is_flag=True) -@click.option('--done-filename', default='s3_to_box.done') -@click.option('--max-workers', default=25) -@click.argument('s3_bucket') -@click.argument('s3_path_prefix') -@click.argument('box_name', default='/uncompressed-video/') +@click.option("--dry-run", is_flag=True) +@click.option("--done-filename", default="s3_to_box.done") +@click.option("--max-workers", default=25) +@click.argument("s3_bucket") +@click.argument("s3_path_prefix") +@click.argument("box_name", default="/uncompressed-video/") def s3_uri_to_box(s3_bucket, s3_path_prefix, box_name, dry_run, done_filename, max_workers): print(s3_bucket, s3_path_prefix) all_box_folders = dict() box_folder = box_navigate_path(box_name, all_box_folders) - + # box_boat_folders_req = box_folder_get_items(m[0].get('id'), 0) # box_boat_folders = list(filter(lambda f: f.get('type') == 'folder', box_boat_folders_req.get('entries'))) - already_done=[] + already_done = [] with open(done_filename) as f: already_done.extend(map(lambda s: s.strip(), f.readlines())) # print(already_done) - s3c = s.client('s3') + s3c = s.client("s3") def iter_s3(): - paginator = s3c.get_paginator('list_objects_v2') + paginator = s3c.get_paginator("list_objects_v2") for page in paginator.paginate( - Bucket=s3_bucket, - Prefix=s3_path_prefix, + Bucket=s3_bucket, + Prefix=s3_path_prefix, ): - for c in page.get('Contents'): - yield c.get('Key') - - with open(done_filename, 'a') as f: + for c in page.get("Contents"): + yield c.get("Key") + + with open(done_filename, "a") as f: with concurrent.futures.ThreadPoolExecutor(max_workers=10) as exe1: with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as exe2: + def doit(k): if k in already_done: # print('skipping') return - - k_split = k.split('/') + + k_split = k.split("/") k_path = k_split[:-1] k_fname = k_split[-1] - box_target_path = box_name + '/'.join(k_path) + "/" + box_target_path = box_name + "/".join(k_path) + "/" # print("box_target_path", box_target_path) - + if not dry_run: o = s3.Object(s3_bucket, k) - + def dl_chunk(range): # print(f"dl chunk {range} start") resp = o.get(Range=range) - body = resp.get('Body') + body = resp.get("Body") b = body.read() # print(f"dl chunk {range} done") return b - + bs = exe2.map(dl_chunk, byte_range_gen(o.content_length, S3_CHUNK_S)) - b = b''.join(bs) + b = b"".join(bs) # resp = o.get() # with resp.get('Body') as streamingBytes: - # b = streamingBytes.read() - click.echo(f'downloaded {k}') + # b = streamingBytes.read() + click.echo(f"downloaded {k}") box_target_f = box_navigate_path(box_target_path, all_box_folders) - resp = box_folder_upload_item(box_target_f.get('id'), k_fname, b) + resp = box_folder_upload_item(box_target_f.get("id"), k_fname, b) if resp.status_code < 400: - click.echo(f'uploaded {k}') + click.echo(f"uploaded {k}") f.write(k + "\n") else: - click.echo('failed to upload') + click.echo("failed to upload") # print("all s3") # print(list(iter_s3())) results = list(exe1.map(doit, iter_s3())) - print('done') + print("done") @main.command() # @click.argument() def list_box(): - root = box_folder_get_items('0', 0) + root = box_folder_get_items("0", 0) print(root) @main.command() -@click.option('--dry-run', is_flag=True) -@click.option('--max-workers', default=25) -@click.option('--done-filename', default='box_reupload2.done') -@click.argument('s3_bucket', default='dp.riskedge.fish') -@click.argument('s3_path_prefix', default='TNC EDGE Trip Video Files/Brancol/alt_hd_upload/') -@click.argument('box_name', default='uncompressed-video') +@click.option("--dry-run", is_flag=True) +@click.option("--max-workers", default=25) +@click.option("--done-filename", default="box_reupload2.done") +@click.argument("s3_bucket", default="dp.riskedge.fish") +@click.argument("s3_path_prefix", default="TNC EDGE Trip Video Files/Brancol/alt_hd_upload/") +@click.argument("box_name", default="uncompressed-video") def hq_s3_to_box(dry_run, s3_bucket, s3_path_prefix, box_name, max_workers, done_filename): - root = box_folder_get_items('0', 0) - m = list(filter(lambda f: f.get('name') == box_name and f.get('type') == 'folder', root.get('entries'))) + root = box_folder_get_items("0", 0) + m = list( + filter( + lambda f: f.get("name") == box_name and f.get("type") == "folder", root.get("entries") + ) + ) if len(m) < 1: - click.echo('box folder not found') + click.echo("box folder not found") sys.exit(1) if len(m) > 1: - click.echo('too many box folders with name') + click.echo("too many box folders with name") sys.exit(1) - - box_boat_folders_req = box_folder_get_items(m[0].get('id'), 0) - box_boat_folders = list(filter(lambda f: f.get('type') == 'folder', box_boat_folders_req.get('entries'))) - already_done=[] + box_boat_folders_req = box_folder_get_items(m[0].get("id"), 0) + box_boat_folders = list( + filter(lambda f: f.get("type") == "folder", box_boat_folders_req.get("entries")) + ) + + already_done = [] with open(done_filename) as f: already_done.extend(map(lambda s: s.strip(), f.readlines())) # print(already_done) - s3c = s.client('s3') + s3c = s.client("s3") def iter_s3(): - paginator = s3c.get_paginator('list_objects_v2') + paginator = s3c.get_paginator("list_objects_v2") for page in paginator.paginate( - Bucket=s3_bucket, - Prefix=s3_path_prefix, + Bucket=s3_bucket, + Prefix=s3_path_prefix, ): - for c in page.get('Contents'): - yield c.get('Key') - - with open(done_filename, 'a') as f: + for c in page.get("Contents"): + yield c.get("Key") + + with open(done_filename, "a") as f: + def doit(k): if k in already_done: # print('skipping') return - ksplit= k.split('/') + ksplit = k.split("/") boat = ksplit[1] fname = ksplit[3] - m = list(filter(lambda f: f.get('name') == boat , box_boat_folders)) + m = list(filter(lambda f: f.get("name") == boat, box_boat_folders)) if len(m) < 1: click.echo(f"boat folder not found {boat}") return - box_boat_id = m[0].get('id') + box_boat_id = m[0].get("id") - m = re.match('^(\d+T\d+Z_cam[12].avi)(.done)?$', fname) + m = re.match("^(\d+T\d+Z_cam[12].avi)(.done)?$", fname) if m: fname = m[1] else: - m = re.match('^(\d+T\d+Z_cam[12])_reenc.mkv$', fname) + m = re.match("^(\d+T\d+Z_cam[12])_reenc.mkv$", fname) if m: - fname = m[1] + '.mkv' + fname = m[1] + ".mkv" else: - m = re.match('^(cam[12])_(\d\d-\d\d-\d\d\d\d-\d\d-\d\d).avi(.done)?$', fname) + m = re.match("^(cam[12])_(\d\d-\d\d-\d\d\d\d-\d\d-\d\d).avi(.done)?$", fname) if m: - dt = datetime.strptime(m[2], '%d-%m-%Y-%H-%M') + dt = datetime.strptime(m[2], "%d-%m-%Y-%H-%M") dt = dt.replace(tzinfo=timezone.utc) - dt_str = dt.isoformat().replace('-', '').replace(':', '').replace('+0000', 'Z') - fname = dt_str + "_" + m[1] + '.avi' + dt_str = ( + dt.isoformat().replace("-", "").replace(":", "").replace("+0000", "Z") + ) + fname = dt_str + "_" + m[1] + ".avi" print(k + "\n" + fname) else: - - click.echo(f'no match for fname {fname}') + click.echo(f"no match for fname {fname}") sys.exit(1) if fname in already_done: @@ -436,204 +435,190 @@ def doit(k): return if not dry_run: resp = s3.Object(s3_bucket, k).get() - streamingBytes = resp.get('Body') + streamingBytes = resp.get("Body") b = streamingBytes.read() - click.echo(f'downloaded {fname}') + click.echo(f"downloaded {fname}") resp = box_folder_upload_item(box_boat_id, fname, b) if resp.status_code < 400: - click.echo(f'uploaded {fname}') + click.echo(f"uploaded {fname}") f.write(k + "\n") else: - click.echo('failed to upload') + click.echo("failed to upload") with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: results = list(executor.map(doit, iter_s3())) - print('done') - - + print("done") + @main.command() -@click.argument('s3_bucket', default='dp.riskedge.fish') -@click.argument('s3_path_prefix', default='TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/') -def list_s3(s3_bucket, s3_path_prefix ): - - s3c = s.client('s3') +@click.argument("s3_bucket", default="dp.riskedge.fish") +@click.argument("s3_path_prefix", default="TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/") +def list_s3(s3_bucket, s3_path_prefix): + s3c = s.client("s3") def iter_s3(): - paginator = s3c.get_paginator('list_objects_v2') + paginator = s3c.get_paginator("list_objects_v2") for page in paginator.paginate( - Bucket=s3_bucket, - Prefix=s3_path_prefix, + Bucket=s3_bucket, + Prefix=s3_path_prefix, ): - for c in page.get('Contents'): - yield c.get('Key') - + for c in page.get("Contents"): + yield c.get("Key") + for k in iter_s3(): - ksplit= k.split('/') + ksplit = k.split("/") fname = ksplit[-1] - m = re.match('^(\d+T\d+Z_cam[12].avi)(.done)?$', fname) + m = re.match("^(\d+T\d+Z_cam[12].avi)(.done)?$", fname) if m: fname = m[1] else: - m = re.match('^(\d+T\d+Z_cam[12])_reenc.mkv$', fname) + m = re.match("^(\d+T\d+Z_cam[12])_reenc.mkv$", fname) if m: - fname = m[1] + '.mkv' + fname = m[1] + ".mkv" else: - m = re.match('^(cam[12])_(\d\d-\d\d-\d\d\d\d-\d\d-\d\d).avi(.done)?$', fname) + m = re.match("^(cam[12])_(\d\d-\d\d-\d\d\d\d-\d\d-\d\d).avi(.done)?$", fname) if m: - dt = datetime.strptime(m[2], '%d-%m-%Y-%H-%M') + dt = datetime.strptime(m[2], "%d-%m-%Y-%H-%M") dt = dt.replace(tzinfo=timezone.utc) - dt_str = dt.isoformat().replace('-', '').replace(':', '').replace('+0000', 'Z') - fname = dt_str + "_" + m[1] + '.avi' + dt_str = dt.isoformat().replace("-", "").replace(":", "").replace("+0000", "Z") + fname = dt_str + "_" + m[1] + ".avi" # print(k + "\n" + fname) else: - - click.echo(f'no match for fname {fname}') + click.echo(f"no match for fname {fname}") sys.exit(1) print(fname) + def list_box_fid(box_folder_id, recurse): - all_box=[] - + all_box = [] + offset = 0 res = box_folder_get_items(box_folder_id, offset) - all_box.extend(res.get('entries')) + all_box.extend(res.get("entries")) # print(res) - - while res.get('total_count') > len(res.get('entries')) + res.get('offset'): + + while res.get("total_count") > len(res.get("entries")) + res.get("offset"): offset += 100 res = box_folder_get_items(box_folder_id, offset) - all_box.extend(res.get('entries')) + all_box.extend(res.get("entries")) if recurse: - for f in filter(lambda x: x.get('type') == 'folder', all_box): - all_box.extend(list_box_fid(f.get('id'), recurse)) - + for f in filter(lambda x: x.get("type") == "folder", all_box): + all_box.extend(list_box_fid(f.get("id"), recurse)) + return all_box - + @main.command() -@click.argument('box_name', default='uncompressed-video/Saint Patrick') -@click.option('-r', 'recurse', is_flag=True) +@click.argument("box_name", default="uncompressed-video/Saint Patrick") +@click.option("-r", "recurse", is_flag=True) def list_box(box_name, recurse): # box_name = 'uncompressed-video' - box_folder_id = '0' - for fname in box_name.split('/'): + box_folder_id = "0" + for fname in box_name.split("/"): res = box_folder_get_items(box_folder_id, 0) - m = list(filter(lambda f: f.get('name') == fname and f.get('type') == 'folder', res.get('entries'))) + m = list( + filter( + lambda f: f.get("name") == fname and f.get("type") == "folder", res.get("entries") + ) + ) if len(m) < 1: - click.echo(f'box folder not found {fname}') + click.echo(f"box folder not found {fname}") sys.exit(1) if len(m) > 1: - click.echo(f'too many box folders with name {fname}') + click.echo(f"too many box folders with name {fname}") sys.exit(1) - box_folder_id = m[0].get('id') - + box_folder_id = m[0].get("id") + all_box = list_box_fid(box_folder_id, recurse) for f in all_box: - print(f.get('name')) - + print(f.get("name")) + @main.command() -@click.argument('box_name', default='uncompressed-video/Saint Patrick') +@click.argument("box_name", default="uncompressed-video/Saint Patrick") def move_box(box_name): # box_name = 'uncompressed-video' - box_folder_id = '0' - for fname in box_name.split('/'): + box_folder_id = "0" + for fname in box_name.split("/"): res = box_folder_get_items(box_folder_id, 0) - m = list(filter(lambda f: f.get('name') == fname and f.get('type') == 'folder', res.get('entries'))) + m = list( + filter( + lambda f: f.get("name") == fname and f.get("type") == "folder", res.get("entries") + ) + ) if len(m) < 1: - click.echo(f'box folder not found {fname}') + click.echo(f"box folder not found {fname}") sys.exit(1) if len(m) > 1: - click.echo(f'too many box folders with name {fname}') + click.echo(f"too many box folders with name {fname}") sys.exit(1) - box_folder_id = m[0].get('id') - + box_folder_id = m[0].get("id") + def iter_files(): offset = 0 res = box_folder_get_items(box_folder_id, offset) - for entry in res.get('entries'): + for entry in res.get("entries"): yield entry - - while res.get('total_count') > len(res.get('entries')) + res.get('offset'): + + while res.get("total_count") > len(res.get("entries")) + res.get("offset"): offset += 100 res = box_folder_get_items(box_folder_id, offset) - for entry in res.get('entries'): + for entry in res.get("entries"): yield entry dayfolders = {} filestomove = [] - - for f in iter_files(): - fname = f.get('name') + fname = f.get("name") ftype = f.get("type") - if ftype == 'file': + if ftype == "file": filestomove.append(f) - elif ftype == 'folder': + elif ftype == "folder": try: - day_str = fname.split('T')[0] + day_str = fname.split("T")[0] if day_str not in dayfolders.keys(): dayfolders.update({day_str: f}) except ValueError as e: - click.echo(f'unknownfolder {fname}') + click.echo(f"unknownfolder {fname}") else: - click.echo(f'unknowntype {ftype} on {fname}') + click.echo(f"unknowntype {ftype} on {fname}") # print(dayfolders) # print(filestomove[0:2]) def add_day_folder(parent_id, foldername): - url= 'https://api.box.com/2.0/folders' - j = { - "name": foldername, - "parent": { - "id": parent_id - } - } + url = "https://api.box.com/2.0/folders" + j = {"name": foldername, "parent": {"id": parent_id}} print("new folder", url, j) - resp = requests.post( - url, - headers={"Authorization": "Bearer "+token()}, - json=j - ) + resp = requests.post(url, headers={"Authorization": "Bearer " + token()}, json=j) return resp.json() - + def move_file_to_folder(box_file_id, box_parent_id): - url= f'https://api.box.com/2.0/files/{box_file_id}' - j = { - "parent": { - "id": box_parent_id - } - } + url = f"https://api.box.com/2.0/files/{box_file_id}" + j = {"parent": {"id": box_parent_id}} print("moving file", url, j) # sys.exit(1) - resp = requests.put( - url, - headers={"Authorization": "Bearer "+token()}, - json=j - ) + resp = requests.put(url, headers={"Authorization": "Bearer " + token()}, json=j) return resp.json() for f in filestomove: - fname = f.get('name') + fname = f.get("name") try: - day_str = fname.split('T')[0] + day_str = fname.split("T")[0] if day_str not in dayfolders.keys(): resp = add_day_folder(box_folder_id, day_str) dayfolders.update({day_str: resp}) - + parent = dayfolders.get(day_str) - move_file_to_folder(f.get('id'), parent.get('id')) + move_file_to_folder(f.get("id"), parent.get("id")) except ValueError as e: - click.echo(f'unparsable filedate, cannot move {fname}') - + click.echo(f"unparsable filedate, cannot move {fname}") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts/box_dot_com/box_reupload.sh b/scripts/box_dot_com/box_reupload.sh index 978e5cf..b6cf780 100644 --- a/scripts/box_dot_com/box_reupload.sh +++ b/scripts/box_dot_com/box_reupload.sh @@ -2,7 +2,7 @@ LEN="$(wc -l box_reupload2.done | awk '{print $1}')" -while [ "$LEN" -lt 3497 ] ; do +while [ "$LEN" -lt 3497 ]; do python3 box_reupload.py hq-s3-to-box sleep 1 LEN="$(wc -l box_reupload2.done | awk '{print $1}')" diff --git a/scripts/box_dot_com/box_reupload2.sh b/scripts/box_dot_com/box_reupload2.sh index 906d63f..05df132 100644 --- a/scripts/box_dot_com/box_reupload2.sh +++ b/scripts/box_dot_com/box_reupload2.sh @@ -2,8 +2,8 @@ LEN="$(wc -l box_reupload3.done | awk '{print $1}')" -while [ "$LEN" -lt 4837 ] ; do - python3 box_reupload.py hq-s3-to-box --done-filename box_reupload3.done dp.riskedge.fish 'TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/' +while [ "$LEN" -lt 4837 ]; do + python3 box_reupload.py hq-s3-to-box --done-filename box_reupload3.done dp.riskedge.fish 'TNC EDGE Trip Video Files/Saint Patrick/alt_hd_upload/' sleep 1 LEN="$(wc -l box_reupload3.done | awk '{print $1}')" echo "restarting, $LEN" diff --git a/scripts/box_dot_com/boxapiexamples.sh b/scripts/box_dot_com/boxapiexamples.sh index 7a83ac0..100c524 100644 --- a/scripts/box_dot_com/boxapiexamples.sh +++ b/scripts/box_dot_com/boxapiexamples.sh @@ -15,7 +15,7 @@ curl -i -X POST "https://api.box.com/oauth2/token" \ -d "client_id=$CLIENT_ID" \ -d "client_secret=$CLIENT_SECRET" \ -d "grant_type=client_credentials" \ - -d "box_subject_type=enterprise" \ + -d "box_subject_type=enterprise" \ -d "box_subject_id=15290560022" curl -i -X POST "https://api.box.com/oauth2/token" \ @@ -23,7 +23,7 @@ curl -i -X POST "https://api.box.com/oauth2/token" \ -d "client_id=$CLIENT_ID" \ -d "client_secret=$CLIENT_SECRET" \ -d "grant_type=client_credentials" \ - -d "box_subject_type=enterprise" \ + -d "box_subject_type=enterprise" \ -d "box_subject_id=994495604" curl -i -X POST "https://api.box.com/oauth2/token" \ @@ -31,8 +31,7 @@ curl -i -X POST "https://api.box.com/oauth2/token" \ -d "client_id=$CLIENT_ID" \ -d "client_secret=$CLIENT_SECRET" \ -d "grant_type=client_credentials" \ - -d "box_subject_type=user" \ + -d "box_subject_type=user" \ -d "box_subject_id=15290560022" - curl -v -H 'Authorization: Bearer wYjFjTpWbjNVpnQmADaMnjU9vNJECoXF' 'https://api.box.com/2.0/folders/231635673007/items' diff --git a/scripts/gh_setup.sh b/scripts/gh_setup.sh index a9f9bfe..ce6ffa0 100644 --- a/scripts/gh_setup.sh +++ b/scripts/gh_setup.sh @@ -1,4 +1,4 @@ - +#!/bin/bash # visit https://github.com/productOps/tnc-edge-service/settings/actions/runners/new?arch=arm64&os=linux # replace this entire script with the contents of that webpage! @@ -6,16 +6,19 @@ # don't commit the secret token to this file! - # Create a folder - mkdir actions-runner - cd actions-runner +# Create a folder +mkdir actions-runner +cd actions-runner + # Download the latest runner package - curl -o actions-runner-linux-arm64-2.304.0.tar.gz -L https://github.com/actions/runner/releases/download/v2.304.0/actions-runner-linux-arm64-2.304.0.tar.gz +curl -o actions-runner-linux-arm64-2.304.0.tar.gz -L https://github.com/actions/runner/releases/download/v2.304.0/actions-runner-linux-arm64-2.304.0.tar.gz + # Optional: Validate the hash - echo "34c49bd0e294abce6e4a073627ed60dc2f31eee970c13d389b704697724b31c6 actions-runner-linux-arm64-2.304.0.tar.gz" | shasum -a 256 -c +echo "34c49bd0e294abce6e4a073627ed60dc2f31eee970c13d389b704697724b31c6 actions-runner-linux-arm64-2.304.0.tar.gz" | shasum -a 256 -c + # Extract the installer - tar xzf ./actions-runner-linux-arm64-2.304.0.tar.gz -#Configure -# Create the runner and start the configuration experience - ./config.sh --url https://github.com/productOps/tnc-edge-service --token XXXXXXXX +tar xzf ./actions-runner-linux-arm64-2.304.0.tar.gz +# Configure +# Create the runner and start the configuration experience +./config.sh --url https://github.com/productOps/tnc-edge-service --token XXXXXXXX diff --git a/scripts/netplan-autoswitcher.sh b/scripts/netplan-autoswitcher.sh index e47d2ff..99b8814 100644 --- a/scripts/netplan-autoswitcher.sh +++ b/scripts/netplan-autoswitcher.sh @@ -2,37 +2,33 @@ # netplan auto switcher! - -if [ "$UID" -gt 0 ] ; then +if [ "$UID" -gt 0 ]; then echo "this script must be run as root" exit 1 fi - - FOUND="" -for file in /etc/netplan/01_eth0_dhcp.yaml* ; do - if [ -e "$file" ] ; then +for file in /etc/netplan/01_eth0_dhcp.yaml*; do + if [ -e "$file" ]; then FOUND="y" fi done -if ! [ "$FOUND" ] ; then +if ! [ "$FOUND" ]; then echo "could not find /etc/netplan/01_eth0_dhcp.yaml*" exit 1 fi FOUND="" -for file in /etc/netplan/01_eth0_static.yaml* ; do - if [ -e "$file" ] ; then +for file in /etc/netplan/01_eth0_static.yaml*; do + if [ -e "$file" ]; then FOUND="y" fi done -if ! [ "$FOUND" ] ; then +if ! [ "$FOUND" ]; then echo "could not find /etc/netplan/01_eth0_static.yaml*" exit 1 fi - function switch_to_dhcp { echo "switching netplan to dhcp" mv /etc/netplan/01_eth0_dhcp.yaml* /etc/netplan/01_eth0_dhcp.yaml @@ -49,23 +45,22 @@ function switch_to_static { systemctl try-restart openvpn-client@tnc-edge.service github-actions-runner.service } - -if grep -q "method=manual" /run/NetworkManager/system-connections/netplan-eth0.nmconnection ; then - ROUTE="$(grep -e "route.*=0.0.0.0/0," /run/NetworkManager/system-connections/netplan-eth0.nmconnection )" +if grep -q "method=manual" /run/NetworkManager/system-connections/netplan-eth0.nmconnection; then + ROUTE="$(grep -e "route.*=0.0.0.0/0," /run/NetworkManager/system-connections/netplan-eth0.nmconnection)" GATEWAYIP="${ROUTE##*,}" - if ! ping "$GATEWAYIP" -c 3 >/dev/null 2>&1 ; then + if ! ping "$GATEWAYIP" -c 3 >/dev/null 2>&1; then switch_to_dhcp exit 0 fi - -elif grep -q "method=auto" /run/NetworkManager/system-connections/netplan-eth0.nmconnection ; then + +elif grep -q "method=auto" /run/NetworkManager/system-connections/netplan-eth0.nmconnection; then GATEWAYIP="$(nmcli d show eth0 | grep IP4.GATEWAY | awk '{print $2;}')" STATICGWIP="$(grep "via:" /etc/netplan/01_eth0_static.yaml* | awk '{print $2;}')" - - if [ "$GATEWAYIP" == "$STATICGWIP" ] ; then - if ping "$STATICGWIP" -c 3 >/dev/null 2>&1 ; then - if ping "api.oceanbox2.com" -c 3 >/dev/null 2>&1 ; then + + if [ "$GATEWAYIP" == "$STATICGWIP" ]; then + if ping "$STATICGWIP" -c 3 >/dev/null 2>&1; then + if ping "api.oceanbox2.com" -c 3 >/dev/null 2>&1; then switch_to_static exit 0 fi @@ -76,4 +71,3 @@ else echo "something is wrong with the NetworkManager config that netplan generated" exit 1 fi - diff --git a/scripts/purge-video.sh b/scripts/purge-video.sh index fc18710..71e8ec8 100644 --- a/scripts/purge-video.sh +++ b/scripts/purge-video.sh @@ -1,9 +1,9 @@ #!/bin/bash -while [ "$(du -s /videos/*.avi | awk '{total += $1 }; END { print total}')" -gt 50000000 ] ; do +while [ "$(du -s /videos/*.avi | awk '{total += $1 }; END { print total}')" -gt 50000000 ]; do ls -tr /videos/*.avi | head | xargs rm done -while [ "$(du -s /videos/*_reenc.mkv | awk '{total += $1 }; END {print total}')" -gt 150000000 ] ; do +while [ "$(du -s /videos/*_reenc.mkv | awk '{total += $1 }; END {print total}')" -gt 150000000 ]; do ls -tr /videos/*_reenc.mkv | head | xargs rm done diff --git a/scripts/system-install.sh b/scripts/system-install.sh index 208e011..0fce722 100644 --- a/scripts/system-install.sh +++ b/scripts/system-install.sh @@ -1,19 +1,19 @@ #!/bin/bash SCRIPTNAME="$0" -scriptdir="$(dirname -- "$( readlink -f -- "$0")")" +scriptdir="$(dirname -- "$(readlink -f -- "$0")")" USERNAME="$(whoami)" USERHOME="/home/$USERNAME" cd "$USERHOME" || exit -if [ "$UID" -lt 1000 ] ; then +if [ "$UID" -lt 1000 ]; then echo "This script should be run as a non-root user with 'sudo' access" exit 1 fi -if [ "$ENVIRONMENT" == "" ] || ! [ -e "$ENVIRONMENT" ] ; then +if [ "$ENVIRONMENT" == "" ] || ! [ -e "$ENVIRONMENT" ]; then echo "No ENVIRONMENT specified. Please add an export ENVIRONMENT line to .bashrc and restart" exit 1 fi @@ -23,42 +23,42 @@ function help { exit 1 } -while (( "$#" )); do - case $1 in - --do-github) - DO_GITHUB="y" - ;; - --do-copy-numpy) - DO_COPY_PY_PANDAS_TO_VENV="y" - ;; - --do-ondeck) - DO_ONDECK="y" - ;; - --do-aifish) - DO_AIFISH="y" - ;; - *) - help - ;; - esac - shift +while (("$#")); do + case $1 in + --do-github) + DO_GITHUB="y" + ;; + --do-copy-numpy) + DO_COPY_PY_PANDAS_TO_VENV="y" + ;; + --do-ondeck) + DO_ONDECK="y" + ;; + --do-aifish) + DO_AIFISH="y" + ;; + *) + help + ;; + esac + shift done -if ! which iftop ; then sudo apt -y install iftop ; fi -if ! which traceroute ; then sudo apt -y install traceroute ; fi -if ! which jq ; then sudo apt -y install jq ; fi -if ! which curl ; then sudo apt -y install curl ; fi -if ! which mount.cifs ; then sudo apt -y install cifs-utils ; fi -if ! dpkg -s python3-pip | grep "Status: install ok installed" ; then sudo apt -y install python3-pip ; fi -if ! dpkg -s python3-venv | grep "Status: install ok installed" ; then sudo apt -y install python3-venv ; fi -if ! dpkg -s python3-dev | grep "Status: install ok installed" ; then sudo apt -y install python3-dev ; fi -if ! which netplan ; then sudo apt -y install netplan.io ; fi -if ! which rsync ; then sudo apt -y install rsync ; fi -if ! which tmux ; then sudo apt -y install tmux ; fi -if ! which parallel ; then sudo apt -y install parallel ; fi -if ! which par2 ; then sudo apt -y install par2 ; fi -if ! which nmap ; then sudo apt -y install nmap ; fi -if ! which at ; then sudo apt -y install at ; fi +if ! which iftop; then sudo apt -y install iftop; fi +if ! which traceroute; then sudo apt -y install traceroute; fi +if ! which jq; then sudo apt -y install jq; fi +if ! which curl; then sudo apt -y install curl; fi +if ! which mount.cifs; then sudo apt -y install cifs-utils; fi +if ! dpkg -s python3-pip | grep "Status: install ok installed"; then sudo apt -y install python3-pip; fi +if ! dpkg -s python3-venv | grep "Status: install ok installed"; then sudo apt -y install python3-venv; fi +if ! dpkg -s python3-dev | grep "Status: install ok installed"; then sudo apt -y install python3-dev; fi +if ! which netplan; then sudo apt -y install netplan.io; fi +if ! which rsync; then sudo apt -y install rsync; fi +if ! which tmux; then sudo apt -y install tmux; fi +if ! which parallel; then sudo apt -y install parallel; fi +if ! which par2; then sudo apt -y install par2; fi +if ! which nmap; then sudo apt -y install nmap; fi +if ! which at; then sudo apt -y install at; fi WRITE_RTC_UDEV_RULE=0 @@ -66,72 +66,67 @@ RTC_UDEV_RULE_FILE="/etc/udev/rules.d/60-rtc-custom.rules" # RTC_UDEV_RULE_FILE="arst.txt" RTC_UDEV_RULE_STR='ACTION=="add", SUBSYSTEM=="rtc", ATTRS{hctosys}=="0", RUN+="/usr/sbin/hwclock -s --utc"' -if [ -e "$RTC_UDEV_RULE_FILE" ] ; then - if grep -q "$RTC_UDEV_RULE_STR" "$RTC_UDEV_RULE_FILE" ; then +if [ -e "$RTC_UDEV_RULE_FILE" ]; then + if grep -q "$RTC_UDEV_RULE_STR" "$RTC_UDEV_RULE_FILE"; then # no need to write udev rule WRITE_RTC_UDEV_RULE=1 fi fi - -if [ $WRITE_RTC_UDEV_RULE ] ; then - sudo /bin/bash < $RTC_UDEV_RULE_FILE echo '$RTC_UDEV_RULE_STR' >> $RTC_UDEV_RULE_FILE EOF fi - -if journalctl -u systemd-timesyncd.service | tail -n 1 | grep -q -E 'synchroniz.*ntp\.org' ; then - echo "synchronization with online ntp server looks good." - echo "Running hwclock to set hw time and update drift" - sudo /bin/bash <' /etc/nvpmodel.conf ; then - echo "setting new default power level" +if ! grep -e '^< PM_CONFIG DEFAULT='"$NEW_PM_ID"' >' /etc/nvpmodel.conf; then + echo "setting new default power level" sudo sed -i"" 's/^< PM_CONFIG DEFAULT=.* >/< PM_CONFIG DEFAULT='"$NEW_PM_ID"' >/' /etc/nvpmodel.conf fi -if ! ( sudo nvpmodel -q | grep -e '^'"$NEW_PM_ID"'$' ) ; then - echo "setting new power level" +if ! (sudo nvpmodel -q | grep -e '^'"$NEW_PM_ID"'$'); then + echo "setting new power level" sudo nvpmodel -m "$NEW_PM_ID" fi - -if ! (hostname | grep -e '^edge[a-z0-9][a-z0-9]*$' ) ; then +if ! (hostname | grep -e '^edge[a-z0-9][a-z0-9]*$'); then echo "set the hostname to 'edgeX'!" echo "be sure to use the command 'sudo hostnamectl set-hostname '" exit 1 fi -if ! grep -E "^127\.[0-9\.]*\s*$(hostname)" /etc/hosts ; then - if ! grep -E "^127\.[0-9\.]*\s*ubuntu$" /etc/hosts ; then +if ! grep -E "^127\.[0-9\.]*\s*$(hostname)" /etc/hosts; then + if ! grep -E "^127\.[0-9\.]*\s*ubuntu$" /etc/hosts; then echo "aah I assumed the old hostname was 'ubuntu', but it's not in /etc/hosts! exiting!" exit 1 fi sudo sed -i"" 's/^127\.\([0-9\.\t ]*\)ubuntu.*$/127.\1'"$(hostname)"'/' /etc/hosts fi - NVFANCONTROL_FILE=/etc/nvfancontrol.conf # NVFANCONTROL_FILE=arst.txt -if [ -e "$NVFANCONTROL_FILE" ] ; then - if ! grep -q -E "FAN_DEFAULT_PROFILE\s*cool" "$NVFANCONTROL_FILE" ; then +if [ -e "$NVFANCONTROL_FILE" ]; then + if ! grep -q -E "FAN_DEFAULT_PROFILE\s*cool" "$NVFANCONTROL_FILE"; then sudo /bin/bash < ./github-actions-runner.service << EOF + if ! [ -e "/etc/systemd/system/github-actions-runner.service" ]; then + cat >./github-actions-runner.service < "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/tnc-edge-http.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "tnc-edge-http.service" fi rm "$TMP_FILE" - -if ! systemctl status postgresql ; then +if ! systemctl status postgresql; then sudo apt -y install postgresql fi -if [ -z "$(find /usr/include/ -name libpq-fe.h)" ] ; then +if [ -z "$(find /usr/include/ -name libpq-fe.h)" ]; then sudo apt -y install libpq-dev fi -if ! systemctl is-enabled postgresql ; then - sudo systemctl daemon-reload +if ! systemctl is-enabled postgresql; then + sudo systemctl daemon-reload sudo systemctl enable postgresql fi -if ! systemctl is-active postgresql ; then +if ! systemctl is-active postgresql; then sudo systemctl start postgresql sleep 2 - if ! systemctl is-active postgresql ; then - echo "fatal error with postgresql server" - echo "fix and rerun this script" - exit 1 + if ! systemctl is-active postgresql; then + echo "fatal error with postgresql server" + echo "fix and rerun this script" + exit 1 fi fi -if ! ( echo "select 1;" | psql postgres ) ; then +if ! (echo "select 1;" | psql postgres); then sudo -u postgres psql <> "$USERHOME/.ssh/authorized_keys" + if ! grep -q "$k" "$USERHOME/.ssh/authorized_keys"; then + echo "$k" >>"$USERHOME/.ssh/authorized_keys" fi done <"$scriptdir"/edge_authorized_keys.txt - - # turn off Ubuntu screen off events gsettings set org.gnome.desktop.session idle-delay 0 gsettings set org.gnome.desktop.screensaver lock-enabled false gsettings set org.gnome.desktop.screensaver ubuntu-lock-on-suspend false - # turn off Ubuntu auto apt updates sudo sed -i"" -e 's/^APT::Periodic::Update-Package-Lists "\?1"\?;/APT::Periodic::Update-Package-Lists "0";/' /etc/apt/apt.conf.d/10periodic sudo sed -i"" -e 's/^APT::Periodic::Download-Upgradeable-Packages "\?1"\?;/APT::Periodic::Download-Upgradeable-Packages "0";/' /etc/apt/apt.conf.d/10periodic @@ -325,14 +312,14 @@ sudo systemctl stop fwupd sudo systemctl disable fwupd # disable internet-connectivity polls -if ! [ -e /etc/NetworkManager/conf.d/20-connectivity-ubuntu.conf ] ; then - # writing to this file overwrites default internet checking behavior. +if ! [ -e /etc/NetworkManager/conf.d/20-connectivity-ubuntu.conf ]; then + # writing to this file overwrites default internet checking behavior. # Empty file means no internet polling # see https://askubuntu.com/a/1094558 sudo touch /etc/NetworkManager/conf.d/20-connectivity-ubuntu.conf fi -if ! which docker-credential-gcr ; then +if ! which docker-credential-gcr; then # rm ./docker-credential-gcr ./docker-credential-gcr.tar.gz # curl -L 'https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v2.1.8/docker-credential-gcr_linux_arm64-2.1.8.tar.gz' -o docker-credential-gcr.tar.gz # tar xzf docker-credential-gcr.tar.gz @@ -341,39 +328,38 @@ if ! which docker-credential-gcr ; then # actually, I'm going to copy the script from google's docs: VERSION=2.1.8 - OS=linux # or "darwin" for OSX, "windows" for Windows. + OS=linux # or "darwin" for OSX, "windows" for Windows. # shellcheck disable=SC2268 - if [ "x$(uname -p)" == 'xaarch64' ] ; then - ARCH="arm64" # or "386" for 32-bit OSs - elif [ "x$(uname -p)" == 'xx86_64' ] ; then - ARCH="amd64" # or "386" for 32-bit OSs + if [ "x$(uname -p)" == 'xaarch64' ]; then + ARCH="arm64" # or "386" for 32-bit OSs + elif [ "x$(uname -p)" == 'xx86_64' ]; then + ARCH="amd64" # or "386" for 32-bit OSs else echo "unknown system architecture" exit 1 fi - curl -fsSL "https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v${VERSION}/docker-credential-gcr_${OS}_${ARCH}-${VERSION}.tar.gz" \ - | tar xz docker-credential-gcr \ - && chmod +x docker-credential-gcr \ - && sudo mv docker-credential-gcr /usr/local/bin/ + curl -fsSL "https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v${VERSION}/docker-credential-gcr_${OS}_${ARCH}-${VERSION}.tar.gz" | + tar xz docker-credential-gcr && + chmod +x docker-credential-gcr && + sudo mv docker-credential-gcr /usr/local/bin/ fi -if ! [ -e "$USERHOME/.config/gcloud/docker_credential_gcr_config.json" ] ; then +if ! [ -e "$USERHOME/.config/gcloud/docker_credential_gcr_config.json" ]; then docker-credential-gcr config --token-source="env, store" fi -if ! grep -E '^export GOOGLE_APPLICATION_CREDENTIALS=' "$USERHOME/.bashrc" ; then - echo "export GOOGLE_APPLICATION_CREDENTIALS=$scriptdir/secret_ondeck_gcr_token.json" >> "$USERHOME/.bashrc" +if ! grep -E '^export GOOGLE_APPLICATION_CREDENTIALS=' "$USERHOME/.bashrc"; then + echo "export GOOGLE_APPLICATION_CREDENTIALS=$scriptdir/secret_ondeck_gcr_token.json" >>"$USERHOME/.bashrc" fi gsettings set org.gnome.Vino require-encryption false - -if ! [ -d "$USERHOME/.aws" ] ; then +if ! [ -d "$USERHOME/.aws" ]; then mkdir "$USERHOME/.aws" fi -if ! [ -e "$USERHOME/.aws/credentials" ] ; then - if ! [ -e "$scriptdir/secret_aws_creds.txt" ] ; then +if ! [ -e "$USERHOME/.aws/credentials" ]; then + if ! [ -e "$scriptdir/secret_aws_creds.txt" ]; then echo "aws secret keys file not found! please add the secret and rerun this script" exit 1 fi @@ -383,19 +369,18 @@ if ! [ -e "$USERHOME/.aws/credentials" ] ; then chmod go-rwx "$USERHOME/.aws/credentials" fi - -if [ -e "$USERHOME/.gnupg/pubring.kbx" ] && [ "x$USERNAME:$USERNAME" != "x$(stat --format '%U:%G' "$USERHOME/.gnupg/pubring.kbx")" ] ; then +if [ -e "$USERHOME/.gnupg/pubring.kbx" ] && [ "x$USERNAME:$USERNAME" != "x$(stat --format '%U:%G' "$USERHOME/.gnupg/pubring.kbx")" ]; then sudo chown "$USERNAME":"$USERNAME" "$USERHOME/.gnupg/pubring.kbx" fi FOUND="" -for file in /etc/netplan/01_eth0_dhcp.yaml* ; do - if [ -e "$file" ] ; then +for file in /etc/netplan/01_eth0_dhcp.yaml*; do + if [ -e "$file" ]; then FOUND="y" fi done -if ! [ "$FOUND" ] ; then - cat > ./01_eth0_dhcp.yaml <./01_eth0_dhcp.yaml < ./01_eth0_static.yaml <./01_eth0_static.yaml < ./netplan-autoswitcher.service << EOF + cat >./netplan-autoswitcher.service < "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/thalos-video-autodecrypt.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "thalos-video-autodecrypt.service" fi rm "$TMP_FILE" -if ! [ -d "/thalos" ] ; then +if ! [ -d "/thalos" ]; then sudo mkdir /thalos sudo chmod go+rwx /thalos fi -if ! [ -d "/videos" ] ; then +if ! [ -d "/videos" ]; then sudo mkdir /videos sudo chmod go+rwx /videos fi -if ! [ -d "/videos/processing" ] ; then +if ! [ -d "/videos/processing" ]; then sudo mkdir /videos/processing sudo chmod go+rwx /videos/processing fi -if ! [ -d "/videos/output" ] ; then +if ! [ -d "/videos/output" ]; then sudo mkdir /videos/output sudo chmod go+rwx /videos/output fi -if ! [ -d "/usbdrive" ] ; then +if ! [ -d "/usbdrive" ]; then sudo mkdir /usbdrive sudo chmod go+rwx /usbdrive fi -if ! [ -e "/etc/systemd/system/thalos.mount" ] ; then - cat > ./thalos.mount << EOF +if ! [ -e "/etc/systemd/system/thalos.mount" ]; then + cat >./thalos.mount < ./thalos.automount << EOF +if ! [ -e "/etc/systemd/system/thalos.automount" ]; then + cat >./thalos.automount </dev/null ; then +if ! sudo test -e "/root/purge-video.sh" || ! sudo diff "$scriptdir/purge-video.sh" /root/purge-video.sh >/dev/null; then sudo cp "$scriptdir/purge-video.sh" /root/purge-video.sh fi +if ! [ -e "/etc/systemd/system/purge-video.service" ]; then -if ! [ -e "/etc/systemd/system/purge-video.service" ] ; then - - cat > ./purge-video.service << EOF + cat >./purge-video.service < "$TMP_FILE" << EOF + cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/ondeck-runner.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "ondeck-runner.service" fi rm "$TMP_FILE" fi - TMP_FILE="$(mktemp)" -cat > "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/gps_fetch.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "gps_fetch.service" fi rm "$TMP_FILE" -if [ "$DO_COPY_PY_PANDAS_TO_VENV" ] ; then +if [ "$DO_COPY_PY_PANDAS_TO_VENV" ]; then cp -r /usr/lib/python3/dist-packages/pytz* "$USERHOME"/tnc-edge-service/venv/lib/python3.8/site-packages/ cp -r /usr/lib/python3/dist-packages/tzdata* "$USERHOME"/tnc-edge-service/venv/lib/python3.8/site-packages/ cp -r /usr/lib/python3/dist-packages/numpy* "$USERHOME"/tnc-edge-service/venv/lib/python3.8/site-packages/ cp -r /usr/lib/python3/dist-packages/pandas* "$USERHOME"/tnc-edge-service/venv/lib/python3.8/site-packages/ fi - - TMP_FILE="$(mktemp)" -cat > "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/s3_uploader.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "s3_uploader.service" fi rm "$TMP_FILE" - - - TMP_FILE="$(mktemp)" -cat > "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/reencode_video_tnc.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "reencode_video_tnc.service" fi rm "$TMP_FILE" +if [ "$DO_ONDECK" ]; then - -if [ "$DO_ONDECK" ] ; then - - - if sudo jetson_clocks --show | grep -q "Xavier NX" ; then + if sudo jetson_clocks --show | grep -q "Xavier NX"; then DEVICE_STR="xavier" fi - if sudo jetson_clocks --show | grep -q "Orin NX" ; then + if sudo jetson_clocks --show | grep -q "Orin NX"; then DEVICE_STR="orin" fi - if [ -z "$DEVICE_STR" ] ; then + if [ -z "$DEVICE_STR" ]; then echo "cannot determine jetson device. Please check the output of 'sudo jetson_clocks --show'" exit 1 fi - if ! [ -s "$ENVIRONMENT" ] ; then + if ! [ -s "$ENVIRONMENT" ]; then echo "this script needs access to the ENVIRONMENT file" exit 1 - fi + fi export "$(grep ONDECK_MODEL_ENGINE "$ENVIRONMENT")" export "$(grep ONDECK_POLYGON_STR "$ENVIRONMENT")" - if [ -z "$ONDECK_MODEL_ENGINE" ] ; then + if [ -z "$ONDECK_MODEL_ENGINE" ]; then ONDECK_ENGINE_OVERRIDE="" else ONDECK_ENGINE_OVERRIDE="-e ENGINE_OVERRIDE=$ONDECK_MODEL_ENGINE" fi - if [ -z "$ONDECK_POLYGON_STR" ] ; then + if [ -z "$ONDECK_POLYGON_STR" ]; then echo "please set ONDECK_POLYGON_STR in ENVIRONMENT file" exit 1 fi TMP_FILE="$(mktemp)" - cat > "$TMP_FILE" << EOF + cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/ondeck_model.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "ondeck_model.service" fi rm "$TMP_FILE" fi - -if [ "$DO_AIFISH" ] ; then - +if [ "$DO_AIFISH" ]; then TMP_FILE="$(mktemp)" - cat > "$TMP_FILE" << EOF + cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/aifish-runner.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "aifish-runner.service" fi rm "$TMP_FILE" - TMP_FILE="$(mktemp)" - cat > "$TMP_FILE" << EOF + cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/aifish_model.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "aifish_model.service" fi rm "$TMP_FILE" - fi - - - TMP_FILE="$(mktemp)" -cat > "$TMP_FILE" << EOF +cat >"$TMP_FILE" </dev/null; then sudo cp "$TMP_FILE" /etc/systemd/system/vector_schedule.service - sudo systemctl daemon-reload + sudo systemctl daemon-reload sudo systemctl restart "vector_schedule.service" fi rm "$TMP_FILE" - - diff --git a/scripts/video_bulk_copy/local_to_s3_upload.sh b/scripts/video_bulk_copy/local_to_s3_upload.sh index e5fbf57..1d8b828 100644 --- a/scripts/video_bulk_copy/local_to_s3_upload.sh +++ b/scripts/video_bulk_copy/local_to_s3_upload.sh @@ -1,39 +1,39 @@ #!/bin/bash # SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" EXTHDPATH="/Volumes/Expansion 1" cd "$EXTHDPATH" || exit 1 -for i in *.avi.enc *.avi.done.enc ; do - if grep -q "$i" "$SCRIPTDIR/done.txt" ; then +for i in *.avi.enc *.avi.done.enc; do + if grep -q "$i" "$SCRIPTDIR/done.txt"; then continue fi bname="$(basename "$i")" cname="${bname%*.enc}" boatname="${cname%%_*}" dtname="${cname#*_}" - if [ 'saintpatrick' == "$boatname" ] ; then + if [ 'saintpatrick' == "$boatname" ]; then echo "$cname,Saint Patrick,$dtname" else echo "$cname,${boatname^},$dtname" fi -done | parallel -v -r --eta --jobs 6 --colsep "," 'flock '$SCRIPTDIR'/usb2.lock cp {1}.enc '$SCRIPTDIR'/{1}.enc && gpg -d --batch -o '$SCRIPTDIR'/{1} '$SCRIPTDIR'/{1}.enc && aws s3 --profile AWSAdministratorAccess-867800856651 cp '$SCRIPTDIR'/{1} "s3://dp.riskedge.fish/TNC EDGE Trip Video Files/"{2}"/alt_hd_upload/{3}" && flock '$SCRIPTDIR'/done.lock echo {1}.enc >> '$SCRIPTDIR'/done.txt && rm '$SCRIPTDIR'/{1} '$SCRIPTDIR'/{1}.enc' 2>&1 +done | parallel -v -r --eta --jobs 6 --colsep "," 'flock '$SCRIPTDIR'/usb2.lock cp {1}.enc '$SCRIPTDIR'/{1}.enc && gpg -d --batch -o '$SCRIPTDIR'/{1} '$SCRIPTDIR'/{1}.enc && aws s3 --profile AWSAdministratorAccess-867800856651 cp '$SCRIPTDIR'/{1} "s3://dp.riskedge.fish/TNC EDGE Trip Video Files/"{2}"/alt_hd_upload/{3}" && flock '$SCRIPTDIR'/done.lock echo {1}.enc >> '$SCRIPTDIR'/done.txt && rm '$SCRIPTDIR'/{1} '$SCRIPTDIR'/{1}.enc' 2>&1 -for i in *.avi *.avi.done *.mkv ; do - if grep -q "$i" "$SCRIPTDIR/done.txt" ; then +for i in *.avi *.avi.done *.mkv; do + if grep -q "$i" "$SCRIPTDIR/done.txt"; then continue fi bname="$(basename "$i")" boatname="${bname%%_*}" dtname="${bname#*_}" - if [ 'saintpatrick' == "$boatname" ] ; then + if [ 'saintpatrick' == "$boatname" ]; then echo "$bname,Saint Patrick,$dtname" else echo "$bname,${boatname^},$dtname" fi -done | parallel -v -r --eta --jobs 6 --colsep "," 'flock '$SCRIPTDIR'/usb2.lock cp {1} '$SCRIPTDIR'/{1} && aws s3 --profile AWSAdministratorAccess-867800856651 cp '$SCRIPTDIR'/{1} "s3://dp.riskedge.fish/TNC EDGE Trip Video Files/"{2}"/alt_hd_upload/{3}" && flock '$SCRIPTDIR'/done.lock echo {1} >> '$SCRIPTDIR'/done.txt && rm '$SCRIPTDIR'/{1}' 2>&1 +done | parallel -v -r --eta --jobs 6 --colsep "," 'flock '$SCRIPTDIR'/usb2.lock cp {1} '$SCRIPTDIR'/{1} && aws s3 --profile AWSAdministratorAccess-867800856651 cp '$SCRIPTDIR'/{1} "s3://dp.riskedge.fish/TNC EDGE Trip Video Files/"{2}"/alt_hd_upload/{3}" && flock '$SCRIPTDIR'/done.lock echo {1} >> '$SCRIPTDIR'/done.txt && rm '$SCRIPTDIR'/{1}' 2>&1 exit 0 diff --git a/scripts/video_bulk_copy/local_to_usbstick.sh b/scripts/video_bulk_copy/local_to_usbstick.sh index bb87b2d..67e2dde 100644 --- a/scripts/video_bulk_copy/local_to_usbstick.sh +++ b/scripts/video_bulk_copy/local_to_usbstick.sh @@ -1,15 +1,12 @@ #!/bin/bash # SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" EXTHDPATH="/Volumes/Expansion 1" cd "$EXTHDPATH" || exit 1 - ls *.avi *.mkv | parallel -v -r --eta --jobs 2 ' flock '$SCRIPTDIR'/usb.lock cp {} '$SCRIPTDIR'/{} && && rm '$SCRIPTDIR'/{}' 2>&1 | tee mainsh.stdout_and_stderr.txt ls *.enc | parallel -v -r --eta --jobs 2 ' flock '$SCRIPTDIR'/usb.lock cp {} '$SCRIPTDIR'/{} && gpg -e -z 0 --batch -r edgedevice -o /tmp/{}.enc /tmp/{} && flock /tmp/usb.lock cp /tmp/{}.enc /usbdrive/{}.enc && rm /tmp/{} /tmp/{}.enc ; fi ' 2>&1 | tee /home/edge/enc_from_usb_to_usb.stdout_and_stderr.txt - - diff --git a/scripts/video_bulk_copy/thalos_all_files_psql_to_usbstick.sh b/scripts/video_bulk_copy/thalos_all_files_psql_to_usbstick.sh index 1b3a1a8..c313556 100644 --- a/scripts/video_bulk_copy/thalos_all_files_psql_to_usbstick.sh +++ b/scripts/video_bulk_copy/thalos_all_files_psql_to_usbstick.sh @@ -1,10 +1,8 @@ #!/bin/bash SCRIPTNAME="$0" -SCRIPTDIR="$(dirname -- "$( readlink -f -- "$0")")" +SCRIPTDIR="$(dirname -- "$(readlink -f -- "$0")")" touch /tmp/usb.lock /tmp/network.lock - echo "select original_path from video_files where start_datetime > '2023-10-16 16:45:00Z' order by start_datetime asc;" | psql -t | awk 'NF' | python3 -c 'from datetime import datetime; import sys; from pathlib import Path; [print( line.strip(), Path(line.strip()).parents[4].name+"_"+datetime.strptime(Path(line.strip()).name[0:16], "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M00Z")+"_"+Path(line.strip()).parents[2].name+".avi") for line in sys.stdin.readlines() ]' | parallel -v -r --eta --jobs 4 --colsep " " 'if [ ! -e "/usbdrive/{2}.enc" ] ; then flock /tmp/network.lock cp {1} /tmp/{2} && gpg -e -z 0 --batch -r edgedevice -o /tmp/{2}.enc /tmp/{2} && flock /tmp/usb.lock cp /tmp/{2}.enc /usbdrive/{2}.enc && rm /tmp/{2} /tmp/{2}.enc ; fi ' - diff --git a/scripts/video_bulk_copy/thalos_specific_files_to_usbstick.sh b/scripts/video_bulk_copy/thalos_specific_files_to_usbstick.sh index a1def5f..c77967b 100644 --- a/scripts/video_bulk_copy/thalos_specific_files_to_usbstick.sh +++ b/scripts/video_bulk_copy/thalos_specific_files_to_usbstick.sh @@ -5,6 +5,4 @@ touch /tmp/usb.lock /tmp/network.lock - find /thalos/brancol/videos/cam{1,2}/{15,16,17,18,19,20,21,22,23,24}-10-2023 -name '*.avi.done' | python3 -c 'from datetime import datetime; import sys; from pathlib import Path; [print( line.strip(), Path(line.strip()).parents[4].name+"_"+datetime.strptime(Path(line.strip()).name[0:16], "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M00Z")+"_"+Path(line.strip()).parents[2].name+".avi") for line in sys.stdin.readlines() ]' | parallel -v -r --eta --jobs 4 --colsep " " 'if [ ! -e "/usbdrive/{2}.enc" ] ; then flock /tmp/network.lock cp {1} /tmp/{2} && gpg -e -z 0 --batch -r edgedevice -o /tmp/{2}.enc /tmp/{2} && flock /tmp/usb.lock cp /tmp/{2}.enc /usbdrive/{2}.enc && rm /tmp/{2} /tmp/{2}.enc ; fi ' - diff --git a/scripts/video_bulk_copy/usbdrive_ensure_encrypted.sh b/scripts/video_bulk_copy/usbdrive_ensure_encrypted.sh index b250c9e..aecc569 100644 --- a/scripts/video_bulk_copy/usbdrive_ensure_encrypted.sh +++ b/scripts/video_bulk_copy/usbdrive_ensure_encrypted.sh @@ -5,7 +5,6 @@ touch /tmp/usb.lock - cd /usbdrive || exit 1 ls | grep -e 'avi' -e 'mkv' | grep -v '.enc' | parallel -v -r --eta --jobs 2 'if [ ! -e "/usbdrive/{}.enc" ] ; then flock /tmp/usb.lock cp {} /tmp/{} && gpg -e -z 0 --batch -r edgedevice -o /tmp/{}.enc /tmp/{} && flock /tmp/usb.lock cp /tmp/{}.enc /usbdrive/{}.enc && rm /tmp/{} /tmp/{}.enc ; fi ' 2>&1 | tee /home/edge/enc_from_usb_to_usb.stdout_and_stderr.txt diff --git a/scripts/vpn-install.sh b/scripts/vpn-install.sh index da16d29..3fbc8d7 100644 --- a/scripts/vpn-install.sh +++ b/scripts/vpn-install.sh @@ -7,8 +7,8 @@ echo "Before running:" echo " 1. ssh into vpn.riskedge.fish" echo " 2. stay in the home dir" -echo " 3. run `./easyrsa/easyrsa --pki-dir=tnc-edge-vpn-pki build-client-full nopass`" -echo " 4. cat the files `tnc-edge-vpn-pki/private/edgeX.key` and `tnc-edge-vpn-pki/issued/edgeX.crt `" +echo " 3. run $(./easyrsa/easyrsa --pki-dir=tnc-edge-vpn-pki build-client-full nopass)" +echo " 4. cat the files $(tnc-edge-vpn-pki/private/edgeX.key) and $(tnc-edge-vpn-pki/issued/edgeX.crt)" echo " 5. edit an existing edgeX.ovpn and paste the key+cert" function usage { @@ -16,20 +16,18 @@ function usage { echo " vpn-install.sh " } -if [ "$1" == "" ] || ! [ -e "$1" ] ; then +if [ "$1" == "" ] || ! [ -e "$1" ]; then echo "no OpenVPN config file" echo "" usage exit 1 fi - -if ! which openvpn ; then +if ! which openvpn; then echo "installing openvpn" - sudo apt -y install openvpn + sudo apt -y install openvpn fi sudo cp "$1" /etc/openvpn/client/tnc-edge.conf sudo systemctl enable openvpn-client@tnc-edge sudo systemctl restart openvpn-client@tnc-edge - diff --git a/tests/ondeck_json_to_tracks.py b/tests/ondeck_json_to_tracks.py index 163a906..0f07ef8 100644 --- a/tests/ondeck_json_to_tracks.py +++ b/tests/ondeck_json_to_tracks.py @@ -1,92 +1,95 @@ - -from model import Base, Track, OndeckData - import os + +import click +import sqlalchemy as sa from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from sqlalchemy.orm import Session +from sqlalchemy.orm import sessionmaker as SessionMaker -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +from model import Base, Track +flaskconfig = FlaskConfig(root_path="") -import sqlalchemy as sa -from sqlalchemy.orm import sessionmaker as SessionMaker, Session +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -import click @click.group(invoke_without_command=True) @click.pass_context -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(ctx, dbname, dbuser): - - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) ctx.ensure_object(dict) - ctx.obj['sessionmaker'] = sessionmaker + ctx.obj["sessionmaker"] = sessionmaker if ctx.invoked_subcommand is None: - click.echo('I was invoked without subcommand') + click.echo("I was invoked without subcommand") with sessionmaker() as session: active_tracks = {} done_tracks = [] - fname = '/Users/ericfultz/Documents/pops/TNC/tnc-edge-service/tests/data/20231205T212500Z_cam1_ondeck.json' + fname = "/Users/ericfultz/Documents/pops/TNC/tnc-edge-service/tests/data/20231205T212500Z_cam1_ondeck.json" import json - with open(fname) as f: + with open(fname) as f: j = json.load(f) - for frame in j['frames']: - if 'allActiveTrackingIds' not in frame: + for frame in j["frames"]: + if "allActiveTrackingIds" not in frame: continue - for activeTrackingId_str in frame['allActiveTrackingIds']: + for activeTrackingId_str in frame["allActiveTrackingIds"]: activeTrackingId = int(activeTrackingId_str) if activeTrackingId not in active_tracks.keys(): active_tracks[activeTrackingId] = Track() active_tracks[activeTrackingId].cocoannotations_uri = fname active_tracks[activeTrackingId].track_id = activeTrackingId - active_tracks[activeTrackingId].first_framenum = frame['frameNum'] + active_tracks[activeTrackingId].first_framenum = frame["frameNum"] active_tracks[activeTrackingId].confidences = [] t = active_tracks[activeTrackingId] - try: - idx = frame['trackingIds'].index(activeTrackingId_str) - t.confidences.append(frame['confidence'][idx]) + try: + idx = frame["trackingIds"].index(activeTrackingId_str) + t.confidences.append(frame["confidence"][idx]) except: t.confidences.append(0.0) for track_id in list(active_tracks.keys()): track = active_tracks[track_id] - if str(track_id) not in frame['allActiveTrackingIds']: - track.last_framenum = frame['frameNum'] + if str(track_id) not in frame["allActiveTrackingIds"]: + track.last_framenum = frame["frameNum"] done_tracks.append(track) active_tracks.pop(track_id) - - - session.add_all(done_tracks) + session.add_all(done_tracks) session.commit() - + @main.command() @click.pass_context def archive(ctx): - import run_ondeck from pathlib import Path - sessionmaker = ctx.obj['sessionmaker'] + import run_ondeck + + sessionmaker = ctx.obj["sessionmaker"] session: Session = sessionmaker() with session: - res = session.execute(sa.text("select ondeckdata.video_uri, ondeckdata.cocoannotations_uri from ondeckdata \ + res = session.execute( + sa.text( + "select ondeckdata.video_uri, ondeckdata.cocoannotations_uri from ondeckdata \ left join tracks on ondeckdata.cocoannotations_uri = tracks.cocoannotations_uri \ - where tracks.id is null and ondeckdata.cocoannotations_uri like '/videos/%ondeck.json';")) - for (video_uri, json_uri) in res: + where tracks.id is null and ondeckdata.cocoannotations_uri like '/videos/%ondeck.json';" + ) + ) + for video_uri, json_uri in res: json_path = Path(json_uri) if json_path.is_file(): run_ondeck.parse_json(session, Path(video_uri), json_path, only_tracks=True) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/tests/onetimetests/test.py b/tests/onetimetests/test.py index c5d17b9..c64bbfa 100644 --- a/tests/onetimetests/test.py +++ b/tests/onetimetests/test.py @@ -1,15 +1,19 @@ import os + +import sqlalchemy as sa from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from sqlalchemy.orm import sessionmaker as SessionMaker -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +from model import Base as ModelBase +from model import DeckhandEventView + +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -import sqlalchemy as sa -from model import Base as ModelBase, DeckhandEventView -from sqlalchemy.orm import sessionmaker as SessionMaker sa_engine = sa.create_engine("postgresql+psycopg2://ericfultz@/edge", echo=True) sessionmaker = SessionMaker(sa_engine) ModelBase.metadata.create_all(sa_engine) diff --git a/tests/onetimetests/test2.py b/tests/onetimetests/test2.py index e768e56..a327a82 100644 --- a/tests/onetimetests/test2.py +++ b/tests/onetimetests/test2.py @@ -1,18 +1,16 @@ from datetime import datetime -with open('brancol_usb_files') as f: +with open("brancol_usb_files") as f: for l in f.readlines(): - l = l.strip() - if l.find(' ') < 0: - print(l[:-4]) - continue - n = l.split(' ')[-1] - # print(n) - # continue - try: - (_, c, r) = l.split("_") - fd = r.split(".")[0] - d = datetime.strptime(fd, "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M%SZ") - print(d+"_"+c) - except: - continue \ No newline at end of file + l = l.strip() + if l.find(" ") < 0: + print(l[:-4]) + continue + n = l.split(" ")[-1] + try: + (_, c, r) = l.split("_") + fd = r.split(".")[0] + d = datetime.strptime(fd, "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M%SZ") + print(d + "_" + c) + except: + continue diff --git a/tests/onetimetests/test3.py b/tests/onetimetests/test3.py index a185608..e1e4951 100644 --- a/tests/onetimetests/test3.py +++ b/tests/onetimetests/test3.py @@ -1,1607 +1,1618 @@ -a = ['/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-35.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-40.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-45.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-50.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-55.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-00.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-05.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-10.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-15.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-20.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-25.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-30.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-35.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-40.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-45.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-50.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-55.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-00.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-05.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-10.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-15.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-20.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-25.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-30.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-35.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-40.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-45.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-50.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-55.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-00.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-05.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-10.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-15.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-20.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-25.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-30.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-35.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-40.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-45.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-50.avi.done', -'/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-05.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-10.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-15.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-20.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-25.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-30.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-35.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-40.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-45.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-50.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-55.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/20/04-10-2023-20-00.avi.done', -'/thalos/saintpatrick/videos/cam1/04-10-2023/20/04-10-2023-20-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-35.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-55.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-00.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-05.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-10.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-15.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-20.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-40.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-45.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-50.avi.done', -'/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-10.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-35.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/11/06-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/11/06-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/20/06-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam1/06-10-2023/20/06-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/05/08-10-2023-05-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/05/08-10-2023-05-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-00.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-05.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-30.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-45.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-50.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-55.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam1/12-10-2023/18/12-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/10/13-10-2023-10-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/10/13-10-2023-10-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-35.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-40.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-50.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-55.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-00.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-05.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-10.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-15.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-20.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-25.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-30.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-35.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-40.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-50.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-55.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-00.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-05.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-10.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-15.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-20.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-25.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-30.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-35.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-40.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-50.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-55.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-00.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-05.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-10.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-15.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-20.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-25.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-30.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-35.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-40.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-45.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-50.avi.done', -'/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-05.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-10.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-15.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-20.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-25.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-30.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-35.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-40.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-45.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-50.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-55.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/20/04-10-2023-20-00.avi.done', -'/thalos/saintpatrick/videos/cam2/04-10-2023/20/04-10-2023-20-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-35.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-55.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-00.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-05.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-10.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-15.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-20.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-40.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-45.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-50.avi.done', -'/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-10.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-35.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/11/06-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/11/06-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-15.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-20.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/20/06-10-2023-20-25.avi.done', -'/thalos/saintpatrick/videos/cam2/06-10-2023/20/06-10-2023-20-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/05/08-10-2023-05-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/05/08-10-2023-05-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-05.avi.done', -'/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-00.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-05.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-30.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-45.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-50.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-55.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-45.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-50.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-55.avi.done', -'/thalos/saintpatrick/videos/cam2/12-10-2023/18/12-10-2023-18-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/10/13-10-2023-10-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/10/13-10-2023-10-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-45.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-50.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-55.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-00.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-05.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-10.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-15.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-20.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-25.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-30.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-35.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-40.avi.done', -'/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-45.avi.done', +from datetime import datetime + +video_paths = [ + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-35.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-40.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-45.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-50.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/20/03-10-2023-20-55.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-00.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-05.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-10.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-15.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-20.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-25.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-30.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-35.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-40.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-45.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-50.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/21/03-10-2023-21-55.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-00.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-05.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-10.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-15.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-20.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-25.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-30.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-35.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-40.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-45.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-50.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/22/03-10-2023-22-55.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-00.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-05.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-10.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-15.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-20.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-25.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-30.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-35.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-40.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-45.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-50.avi.done", + "/thalos/saintpatrick/videos/cam1/03-10-2023/23/03-10-2023-23-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/00/04-10-2023-00-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/11/04-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/12/04-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/13/04-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/14/04-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/15/04-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/16/04-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/17/04-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/18/04-10-2023-18-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-05.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-10.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-15.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-20.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-25.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-30.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-35.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-40.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-45.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-50.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/19/04-10-2023-19-55.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/20/04-10-2023-20-00.avi.done", + "/thalos/saintpatrick/videos/cam1/04-10-2023/20/04-10-2023-20-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/11/05-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/12/05-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/13/05-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/14/05-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/15/05-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/16/05-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/17/05-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/18/05-10-2023-18-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-35.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/19/05-10-2023-19-55.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-00.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-05.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-10.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-15.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-20.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/20/05-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-40.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-45.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-50.avi.done", + "/thalos/saintpatrick/videos/cam1/05-10-2023/23/05-10-2023-23-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-10.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-35.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/00/06-10-2023-00-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/11/06-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/11/06-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/12/06-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/13/06-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/14/06-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/15/06-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/16/06-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/18/06-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/20/06-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam1/06-10-2023/20/06-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/11/07-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/12/07-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/13/07-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/14/07-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/15/07-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/16/07-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/07-10-2023/17/07-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/05/08-10-2023-05-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/05/08-10-2023-05-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/06/08-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/07/08-10-2023-07-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/08/08-10-2023-08-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/11/08-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/12/08-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/13/08-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/14/08-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/15/08-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/16/08-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/17/08-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam1/08-10-2023/18/08-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/11/09-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/12/09-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/13/09-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/14/09-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/15/09-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/09-10-2023/16/09-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-00.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-05.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/06/10-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-30.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/07/10-10-2023-07-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-45.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-50.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/10/10-10-2023-10-55.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/11/10-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/12/10-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/10-10-2023/13/10-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/11-10-2023/12/11-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/11/12-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/12/12-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/13/12-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/14/12-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/15/12-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/16/12-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/17/12-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam1/12-10-2023/18/12-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/06/13-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/07/13-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/10/13-10-2023-10-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/10/13-10-2023-10-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/11/13-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/12/13-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/13/13-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/14/13-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/15/13-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/16/13-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam1/13-10-2023/17/13-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-35.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-40.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-50.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/20/03-10-2023-20-55.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-00.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-05.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-10.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-15.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-20.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-25.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-30.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-35.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-40.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-50.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/21/03-10-2023-21-55.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-00.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-05.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-10.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-15.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-20.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-25.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-30.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-35.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-40.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-50.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/22/03-10-2023-22-55.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-00.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-05.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-10.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-15.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-20.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-25.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-30.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-35.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-40.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-45.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-50.avi.done", + "/thalos/saintpatrick/videos/cam2/03-10-2023/23/03-10-2023-23-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/00/04-10-2023-00-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/11/04-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/12/04-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/13/04-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/14/04-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/15/04-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/16/04-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/17/04-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/18/04-10-2023-18-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-05.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-10.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-15.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-20.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-25.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-30.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-35.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-40.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-45.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-50.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/19/04-10-2023-19-55.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/20/04-10-2023-20-00.avi.done", + "/thalos/saintpatrick/videos/cam2/04-10-2023/20/04-10-2023-20-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/11/05-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/12/05-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/13/05-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/14/05-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/15/05-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/16/05-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/17/05-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/18/05-10-2023-18-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-35.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/19/05-10-2023-19-55.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-00.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-05.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-10.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-15.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-20.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/20/05-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-40.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-45.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-50.avi.done", + "/thalos/saintpatrick/videos/cam2/05-10-2023/23/05-10-2023-23-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-10.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-35.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/00/06-10-2023-00-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/11/06-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/11/06-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/12/06-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/13/06-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/14/06-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/15/06-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/16/06-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-15.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-20.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/18/06-10-2023-18-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/20/06-10-2023-20-25.avi.done", + "/thalos/saintpatrick/videos/cam2/06-10-2023/20/06-10-2023-20-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/11/07-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/12/07-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/13/07-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/14/07-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/15/07-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/16/07-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/07-10-2023/17/07-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/05/08-10-2023-05-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/05/08-10-2023-05-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/06/08-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/07/08-10-2023-07-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/08/08-10-2023-08-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/11/08-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/12/08-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/13/08-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/14/08-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/15/08-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/16/08-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/17/08-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-05.avi.done", + "/thalos/saintpatrick/videos/cam2/08-10-2023/18/08-10-2023-18-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/11/09-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/12/09-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/13/09-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/14/09-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/15/09-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/09-10-2023/16/09-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-00.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-05.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/06/10-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-30.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/07/10-10-2023-07-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-45.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-50.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/10/10-10-2023-10-55.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/11/10-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/12/10-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/10-10-2023/13/10-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/11-10-2023/12/11-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/11/12-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/12/12-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/13/12-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/14/12-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/15/12-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/16/12-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-45.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-50.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/17/12-10-2023-17-55.avi.done", + "/thalos/saintpatrick/videos/cam2/12-10-2023/18/12-10-2023-18-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/06/13-10-2023-06-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/07/13-10-2023-07-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/10/13-10-2023-10-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/10/13-10-2023-10-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/11/13-10-2023-11-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/12/13-10-2023-12-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/13/13-10-2023-13-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/14/13-10-2023-14-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/15/13-10-2023-15-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-45.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-50.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/16/13-10-2023-16-55.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-00.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-05.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-10.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-15.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-20.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-25.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-30.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-35.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-40.avi.done", + "/thalos/saintpatrick/videos/cam2/13-10-2023/17/13-10-2023-17-45.avi.done", ] -from datetime import datetime -a.sort(key=lambda x: datetime.strptime(x.split('/')[-1].split('.')[0], "%d-%m-%Y-%H-%M").strftime("%Y%m%dT%H%M%SZ")) -for i in a: - (_, _, b, _, c, _, _, f) = i.split('/') +video_paths.sort( + key=lambda x: datetime.strptime(x.split("/")[-1].split(".")[0], "%d-%m-%Y-%H-%M").strftime( + "%Y%m%dT%H%M%SZ" + ) +) + +for i in video_paths: + (_, _, b, _, c, _, _, f) = i.split("/") t = "{}_{}_{}".format(b, c, f) e = t + ".enc" - print("cp {} /tmp/{} ; gpg -e --batch -z 0 --trust-model always -r edgedevice --output /tmp/{} /tmp/{} ; flock /tmp/lock cp /tmp/{} /usbdrive/{} ; rm /tmp/{} /tmp/{}".format(i, t, e, t, e, e, t, e)) + print( + "cp {} /tmp/{} ; gpg -e --batch -z 0 --trust-model always -r edgedevice --output /tmp/{} /tmp/{} ; flock /tmp/lock cp /tmp/{} /usbdrive/{} ; rm /tmp/{} /tmp/{}".format( + i, t, e, t, e, e, t, e + ) + ) diff --git a/tests/onetimetests/test4.py b/tests/onetimetests/test4.py index 5966fda..96d77ac 100644 --- a/tests/onetimetests/test4.py +++ b/tests/onetimetests/test4.py @@ -1,1813 +1,1814 @@ -a=['18-10-2023-00-00.ndx', -'18-10-2023-00-00.pcm', -'18-10-2023-00-01.ndx', -'18-10-2023-00-01.pcm', -'18-10-2023-00-02.ndx', -'18-10-2023-00-02.pcm', -'18-10-2023-00-03.ndx', -'18-10-2023-00-03.pcm', -'18-10-2023-00-05.ndx', -'18-10-2023-00-05.pcm', -'18-10-2023-00-06.ndx', -'18-10-2023-00-06.pcm', -'18-10-2023-00-07.ndx', -'18-10-2023-00-07.pcm', -'18-10-2023-00-08.ndx', -'18-10-2023-00-08.pcm', -'18-10-2023-00-09.ndx', -'18-10-2023-00-09.pcm', -'18-10-2023-00-10.ndx', -'18-10-2023-00-10.pcm', -'18-10-2023-00-11.ndx', -'18-10-2023-00-11.pcm', -'18-10-2023-00-12.ndx', -'18-10-2023-00-12.pcm', -'18-10-2023-00-13.ndx', -'18-10-2023-00-13.pcm', -'18-10-2023-00-14.ndx', -'18-10-2023-00-14.pcm', -'18-10-2023-00-15.ndx', -'18-10-2023-00-15.pcm', -'18-10-2023-00-16.ndx', -'18-10-2023-00-16.pcm', -'18-10-2023-00-17.ndx', -'18-10-2023-00-17.pcm', -'18-10-2023-00-18.ndx', -'18-10-2023-00-18.pcm', -'18-10-2023-00-19.ndx', -'18-10-2023-00-19.pcm', -'18-10-2023-00-20.ndx', -'18-10-2023-00-20.pcm', -'18-10-2023-00-21.ndx', -'18-10-2023-00-21.pcm', -'18-10-2023-00-22.ndx', -'18-10-2023-00-22.pcm', -'18-10-2023-00-23.ndx', -'18-10-2023-00-23.pcm', -'18-10-2023-00-24.ndx', -'18-10-2023-00-24.pcm', -'18-10-2023-00-25.ndx', -'18-10-2023-00-25.pcm', -'18-10-2023-00-26.ndx', -'18-10-2023-00-26.pcm', -'18-10-2023-00-27.ndx', -'18-10-2023-00-27.pcm', -'18-10-2023-00-28.ndx', -'18-10-2023-00-28.pcm', -'18-10-2023-00-29.ndx', -'18-10-2023-00-29.pcm', -'18-10-2023-00-30.ndx', -'18-10-2023-00-30.pcm', -'18-10-2023-00-31.ndx', -'18-10-2023-00-31.pcm', -'18-10-2023-00-32.ndx', -'18-10-2023-00-32.pcm', -'18-10-2023-00-33.ndx', -'18-10-2023-00-33.pcm', -'18-10-2023-00-34.ndx', -'18-10-2023-00-34.pcm', -'18-10-2023-00-35.ndx', -'18-10-2023-00-35.pcm', -'18-10-2023-00-36.ndx', -'18-10-2023-00-36.pcm', -'18-10-2023-00-37.ndx', -'18-10-2023-00-37.pcm', -'18-10-2023-00-38.ndx', -'18-10-2023-00-38.pcm', -'18-10-2023-00-39.ndx', -'18-10-2023-00-39.pcm', -'18-10-2023-00-40.ndx', -'18-10-2023-00-40.pcm', -'18-10-2023-00-41.ndx', -'18-10-2023-00-41.pcm', -'18-10-2023-00-42.ndx', -'18-10-2023-00-42.pcm', -'18-10-2023-00-43.ndx', -'18-10-2023-00-43.pcm', -'18-10-2023-00-45.ndx', -'18-10-2023-00-45.pcm', -'18-10-2023-00-46.ndx', -'18-10-2023-00-46.pcm', -'18-10-2023-00-47.ndx', -'18-10-2023-00-47.pcm', -'18-10-2023-00-48.ndx', -'18-10-2023-00-48.pcm', -'18-10-2023-00-49.ndx', -'18-10-2023-00-49.pcm', -'18-10-2023-00-50.ndx', -'18-10-2023-00-50.pcm', -'18-10-2023-00-51.ndx', -'18-10-2023-00-51.pcm', -'18-10-2023-00-52.ndx', -'18-10-2023-00-52.pcm', -'18-10-2023-00-53.ndx', -'18-10-2023-00-53.pcm', -'18-10-2023-00-54.ndx', -'18-10-2023-00-54.pcm', -'18-10-2023-00-55.ndx', -'18-10-2023-00-55.pcm', -'18-10-2023-00-56.ndx', -'18-10-2023-00-56.pcm', -'18-10-2023-00-57.ndx', -'18-10-2023-00-57.pcm', -'18-10-2023-00-58.ndx', -'18-10-2023-00-58.pcm', -'18-10-2023-00-59.ndx', -'18-10-2023-00-59.pcm', -'18-10-2023-01-00.ndx', -'18-10-2023-01-00.pcm', -'18-10-2023-01-01.ndx', -'18-10-2023-01-01.pcm', -'18-10-2023-01-02.ndx', -'18-10-2023-01-02.pcm', -'18-10-2023-01-03.ndx', -'18-10-2023-01-03.pcm', -'18-10-2023-01-05.ndx', -'18-10-2023-01-05.pcm', -'18-10-2023-01-06.ndx', -'18-10-2023-01-06.pcm', -'18-10-2023-01-07.ndx', -'18-10-2023-01-07.pcm', -'18-10-2023-01-08.ndx', -'18-10-2023-01-08.pcm', -'18-10-2023-01-09.ndx', -'18-10-2023-01-09.pcm', -'18-10-2023-01-10.ndx', -'18-10-2023-01-10.pcm', -'18-10-2023-01-11.ndx', -'18-10-2023-01-11.pcm', -'18-10-2023-01-12.ndx', -'18-10-2023-01-12.pcm', -'18-10-2023-01-13.ndx', -'18-10-2023-01-13.pcm', -'18-10-2023-01-14.ndx', -'18-10-2023-01-14.pcm', -'18-10-2023-01-15.ndx', -'18-10-2023-01-15.pcm', -'18-10-2023-01-16.ndx', -'18-10-2023-01-16.pcm', -'18-10-2023-01-17.ndx', -'18-10-2023-01-17.pcm', -'18-10-2023-01-18.ndx', -'18-10-2023-01-18.pcm', -'18-10-2023-01-19.mjp', -'18-10-2023-01-19.ndx', -'18-10-2023-01-19.pcm', -'18-10-2023-11-04.ndx', -'18-10-2023-11-04.pcm', -'18-10-2023-11-05.avi.done', -'18-10-2023-11-05.gpg', -'18-10-2023-11-05.mp4.done', -'18-10-2023-11-10.avi.done', -'18-10-2023-11-10.gpg', -'18-10-2023-11-10.mp4.done', -'18-10-2023-11-15.avi.done', -'18-10-2023-11-15.gpg', -'18-10-2023-11-15.mp4.done', -'18-10-2023-11-20.avi.done', -'18-10-2023-11-20.gpg', -'18-10-2023-11-20.mp4.done', -'18-10-2023-11-25.avi.done', -'18-10-2023-11-25.gpg', -'18-10-2023-11-25.mp4.done', -'18-10-2023-11-30.avi.done', -'18-10-2023-11-30.gpg', -'18-10-2023-11-30.mp4.done', -'18-10-2023-11-35.avi.done', -'18-10-2023-11-35.gpg', -'18-10-2023-11-35.mp4.done', -'18-10-2023-11-40.mjp', -'18-10-2023-11-40.ndx', -'18-10-2023-11-40.pcm', -'18-10-2023-11-41.mjp', -'18-10-2023-11-41.ndx', -'18-10-2023-11-41.pcm', -'18-10-2023-11-42.mjp', -'18-10-2023-11-42.ndx', -'18-10-2023-11-42.pcm', -'18-10-2023-11-43.mjp', -'18-10-2023-11-43.ndx', -'18-10-2023-11-43.pcm', -'18-10-2023-11-45.avi.done', -'18-10-2023-11-45.gpg', -'18-10-2023-11-45.mp4.done', -'18-10-2023-11-46.ndx', -'18-10-2023-11-46.pcm', -'18-10-2023-11-47.ndx', -'18-10-2023-11-47.pcm', -'18-10-2023-11-50.avi.done', -'18-10-2023-11-50.gpg', -'18-10-2023-11-50.mp4.done', -'18-10-2023-11-55.avi.done', -'18-10-2023-11-55.gpg', -'18-10-2023-11-55.mp4.done', -'18-10-2023-12-00.avi.done', -'18-10-2023-12-00.gpg', -'18-10-2023-12-00.mp4.done', -'18-10-2023-12-05.avi.done', -'18-10-2023-12-05.gpg', -'18-10-2023-12-05.mp4.done', -'18-10-2023-12-10.avi.done', -'18-10-2023-12-10.gpg', -'18-10-2023-12-10.mp4.done', -'18-10-2023-12-15.avi.done', -'18-10-2023-12-15.gpg', -'18-10-2023-12-15.mp4.done', -'18-10-2023-12-20.avi.done', -'18-10-2023-12-20.gpg', -'18-10-2023-12-20.mp4.done', -'18-10-2023-12-25.avi.done', -'18-10-2023-12-25.gpg', -'18-10-2023-12-25.mp4.done', -'18-10-2023-12-30.avi.done', -'18-10-2023-12-30.gpg', -'18-10-2023-12-30.mp4.done', -'18-10-2023-12-35.avi.done', -'18-10-2023-12-35.gpg', -'18-10-2023-12-35.mp4.done', -'18-10-2023-12-40.avi.done', -'18-10-2023-12-40.gpg', -'18-10-2023-12-40.mp4.done', -'18-10-2023-12-45.avi.done', -'18-10-2023-12-45.gpg', -'18-10-2023-12-45.mp4.done', -'18-10-2023-12-50.avi.done', -'18-10-2023-12-50.gpg', -'18-10-2023-12-50.mp4.done', -'18-10-2023-12-55.avi.done', -'18-10-2023-12-55.gpg', -'18-10-2023-12-55.mp4.done', -'18-10-2023-13-00.avi.done', -'18-10-2023-13-00.gpg', -'18-10-2023-13-00.mp4.done', -'18-10-2023-13-05.avi.done', -'18-10-2023-13-05.gpg', -'18-10-2023-13-05.mp4.done', -'18-10-2023-13-10.avi.done', -'18-10-2023-13-10.gpg', -'18-10-2023-13-10.mp4.done', -'18-10-2023-13-15.avi.done', -'18-10-2023-13-15.gpg', -'18-10-2023-13-15.mp4.done', -'18-10-2023-13-20.avi.done', -'18-10-2023-13-20.gpg', -'18-10-2023-13-20.mp4.done', -'18-10-2023-13-25.avi.done', -'18-10-2023-13-25.gpg', -'18-10-2023-13-25.mp4.done', -'18-10-2023-13-30.avi.done', -'18-10-2023-13-30.gpg', -'18-10-2023-13-30.mp4.done', -'18-10-2023-13-35.avi.done', -'18-10-2023-13-35.gpg', -'18-10-2023-13-35.mp4.done', -'18-10-2023-13-40.avi.done', -'18-10-2023-13-40.gpg', -'18-10-2023-13-40.mp4.done', -'18-10-2023-13-45.avi.done', -'18-10-2023-13-45.gpg', -'18-10-2023-13-45.mp4.done', -'18-10-2023-13-50.avi.done', -'18-10-2023-13-50.gpg', -'18-10-2023-13-50.mp4.done', -'18-10-2023-13-55.avi.done', -'18-10-2023-13-55.gpg', -'18-10-2023-13-55.mp4.done', -'18-10-2023-14-00.avi.done', -'18-10-2023-14-00.gpg', -'18-10-2023-14-00.mp4.done', -'18-10-2023-14-05.avi.done', -'18-10-2023-14-05.gpg', -'18-10-2023-14-05.mp4.done', -'18-10-2023-14-10.avi.done', -'18-10-2023-14-10.gpg', -'18-10-2023-14-10.mp4.done', -'18-10-2023-14-15.avi.done', -'18-10-2023-14-15.gpg', -'18-10-2023-14-15.mp4.done', -'18-10-2023-14-20.avi.done', -'18-10-2023-14-20.gpg', -'18-10-2023-14-20.mp4.done', -'18-10-2023-14-25.avi.done', -'18-10-2023-14-25.gpg', -'18-10-2023-14-25.mp4.done', -'18-10-2023-14-30.avi.done', -'18-10-2023-14-30.gpg', -'18-10-2023-14-30.mp4.done', -'18-10-2023-14-35.avi.done', -'18-10-2023-14-35.gpg', -'18-10-2023-14-35.mp4.done', -'18-10-2023-14-40.avi.done', -'18-10-2023-14-40.gpg', -'18-10-2023-14-40.mp4.done', -'18-10-2023-14-41.ndx', -'18-10-2023-14-41.pcm', -'18-10-2023-14-42.ndx', -'18-10-2023-14-42.pcm', -'18-10-2023-14-43.ndx', -'18-10-2023-14-43.pcm', -'18-10-2023-14-44.ndx', -'18-10-2023-14-44.pcm', -'18-10-2023-14-45.ndx', -'18-10-2023-14-45.pcm', -'18-10-2023-14-46.ndx', -'18-10-2023-14-46.pcm', -'18-10-2023-14-47.ndx', -'18-10-2023-14-47.pcm', -'18-10-2023-14-48.ndx', -'18-10-2023-14-48.pcm', -'18-10-2023-14-49.ndx', -'18-10-2023-14-49.pcm', -'18-10-2023-14-50.ndx', -'18-10-2023-14-50.pcm', -'18-10-2023-14-51.ndx', -'18-10-2023-14-51.pcm', -'18-10-2023-14-52.ndx', -'18-10-2023-14-52.pcm', -'18-10-2023-14-53.ndx', -'18-10-2023-14-53.pcm', -'18-10-2023-14-54.ndx', -'18-10-2023-14-54.pcm', -'18-10-2023-14-55.ndx', -'18-10-2023-14-55.pcm', -'18-10-2023-14-56.ndx', -'18-10-2023-14-56.pcm', -'18-10-2023-14-57.ndx', -'18-10-2023-14-57.pcm', -'18-10-2023-14-58.ndx', -'18-10-2023-14-58.pcm', -'18-10-2023-14-59.ndx', -'18-10-2023-14-59.pcm', -'18-10-2023-15-00.ndx', -'18-10-2023-15-00.pcm', -'18-10-2023-15-01.ndx', -'18-10-2023-15-01.pcm', -'18-10-2023-15-02.ndx', -'18-10-2023-15-02.pcm', -'18-10-2023-15-03.ndx', -'18-10-2023-15-03.pcm', -'18-10-2023-15-04.ndx', -'18-10-2023-15-04.pcm', -'18-10-2023-15-05.ndx', -'18-10-2023-15-05.pcm', -'18-10-2023-15-06.ndx', -'18-10-2023-15-06.pcm', -'18-10-2023-15-07.ndx', -'18-10-2023-15-07.pcm', -'18-10-2023-15-08.ndx', -'18-10-2023-15-08.pcm', -'18-10-2023-15-09.ndx', -'18-10-2023-15-09.pcm', -'18-10-2023-15-10.ndx', -'18-10-2023-15-10.pcm', -'18-10-2023-15-11.ndx', -'18-10-2023-15-11.pcm', -'18-10-2023-15-12.ndx', -'18-10-2023-15-12.pcm', -'18-10-2023-15-13.ndx', -'18-10-2023-15-13.pcm', -'18-10-2023-15-14.ndx', -'18-10-2023-15-14.pcm', -'18-10-2023-15-15.ndx', -'18-10-2023-15-15.pcm', -'18-10-2023-15-16.ndx', -'18-10-2023-15-16.pcm', -'18-10-2023-15-17.ndx', -'18-10-2023-15-17.pcm', -'18-10-2023-15-18.ndx', -'18-10-2023-15-18.pcm', -'18-10-2023-15-19.ndx', -'18-10-2023-15-19.pcm', -'18-10-2023-15-20.ndx', -'18-10-2023-15-20.pcm', -'18-10-2023-15-21.ndx', -'18-10-2023-15-21.pcm', -'18-10-2023-15-22.ndx', -'18-10-2023-15-22.pcm', -'18-10-2023-15-23.ndx', -'18-10-2023-15-23.pcm', -'18-10-2023-15-24.ndx', -'18-10-2023-15-24.pcm', -'18-10-2023-15-25.ndx', -'18-10-2023-15-25.pcm', -'18-10-2023-15-26.ndx', -'18-10-2023-15-26.pcm', -'18-10-2023-15-27.ndx', -'18-10-2023-15-27.pcm', -'18-10-2023-15-28.ndx', -'18-10-2023-15-28.pcm', -'18-10-2023-15-30.ndx', -'18-10-2023-15-30.pcm', -'18-10-2023-15-31.ndx', -'18-10-2023-15-31.pcm', -'18-10-2023-15-32.ndx', -'18-10-2023-15-32.pcm', -'18-10-2023-15-33.ndx', -'18-10-2023-15-33.pcm', -'18-10-2023-15-35.ndx', -'18-10-2023-15-35.pcm', -'18-10-2023-15-36.ndx', -'18-10-2023-15-36.pcm', -'18-10-2023-15-37.ndx', -'18-10-2023-15-37.pcm', -'18-10-2023-15-38.ndx', -'18-10-2023-15-38.pcm', -'18-10-2023-15-39.ndx', -'18-10-2023-15-39.pcm', -'18-10-2023-15-40.ndx', -'18-10-2023-15-40.pcm', -'18-10-2023-15-41.ndx', -'18-10-2023-15-41.pcm', -'18-10-2023-15-42.ndx', -'18-10-2023-15-42.pcm', -'18-10-2023-15-43.ndx', -'18-10-2023-15-43.pcm', -'18-10-2023-15-44.ndx', -'18-10-2023-15-44.pcm', -'18-10-2023-15-45.ndx', -'18-10-2023-15-45.pcm', -'18-10-2023-15-46.ndx', -'18-10-2023-15-46.pcm', -'18-10-2023-15-47.ndx', -'18-10-2023-15-47.pcm', -'18-10-2023-15-48.ndx', -'18-10-2023-15-48.pcm', -'18-10-2023-15-50.ndx', -'18-10-2023-15-50.pcm', -'18-10-2023-15-51.ndx', -'18-10-2023-15-51.pcm', -'18-10-2023-15-52.ndx', -'18-10-2023-15-52.pcm', -'18-10-2023-15-53.ndx', -'18-10-2023-15-53.pcm', -'18-10-2023-15-54.ndx', -'18-10-2023-15-54.pcm', -'18-10-2023-15-55.ndx', -'18-10-2023-15-55.pcm', -'18-10-2023-15-56.ndx', -'18-10-2023-15-56.pcm', -'18-10-2023-15-57.ndx', -'18-10-2023-15-57.pcm', -'18-10-2023-15-58.ndx', -'18-10-2023-15-58.pcm', -'18-10-2023-15-59.ndx', -'18-10-2023-15-59.pcm', -'18-10-2023-16-00.ndx', -'18-10-2023-16-00.pcm', -'18-10-2023-16-01.ndx', -'18-10-2023-16-01.pcm', -'18-10-2023-16-02.ndx', -'18-10-2023-16-02.pcm', -'18-10-2023-16-03.ndx', -'18-10-2023-16-03.pcm', -'18-10-2023-16-04.ndx', -'18-10-2023-16-04.pcm', -'18-10-2023-16-05.ndx', -'18-10-2023-16-05.pcm', -'18-10-2023-16-06.ndx', -'18-10-2023-16-06.pcm', -'18-10-2023-16-07.ndx', -'18-10-2023-16-07.pcm', -'18-10-2023-16-08.ndx', -'18-10-2023-16-08.pcm', -'18-10-2023-16-09.ndx', -'18-10-2023-16-09.pcm', -'18-10-2023-16-10.ndx', -'18-10-2023-16-10.pcm', -'18-10-2023-16-11.ndx', -'18-10-2023-16-11.pcm', -'18-10-2023-16-12.ndx', -'18-10-2023-16-12.pcm', -'18-10-2023-16-13.ndx', -'18-10-2023-16-13.pcm', -'18-10-2023-16-14.ndx', -'18-10-2023-16-14.pcm', -'18-10-2023-16-15.ndx', -'18-10-2023-16-15.pcm', -'18-10-2023-16-16.ndx', -'18-10-2023-16-16.pcm', -'18-10-2023-16-17.ndx', -'18-10-2023-16-17.pcm', -'18-10-2023-16-18.ndx', -'18-10-2023-16-18.pcm', -'18-10-2023-16-19.ndx', -'18-10-2023-16-19.pcm', -'18-10-2023-16-20.ndx', -'18-10-2023-16-20.pcm', -'18-10-2023-16-21.ndx', -'18-10-2023-16-21.pcm', -'18-10-2023-16-22.ndx', -'18-10-2023-16-22.pcm', -'18-10-2023-16-23.ndx', -'18-10-2023-16-23.pcm', -'18-10-2023-16-24.ndx', -'18-10-2023-16-24.pcm', -'18-10-2023-16-25.ndx', -'18-10-2023-16-25.pcm', -'18-10-2023-16-26.ndx', -'18-10-2023-16-26.pcm', -'18-10-2023-16-27.ndx', -'18-10-2023-16-27.pcm', -'18-10-2023-16-28.ndx', -'18-10-2023-16-28.pcm', -'18-10-2023-16-29.ndx', -'18-10-2023-16-29.pcm', -'18-10-2023-16-30.ndx', -'18-10-2023-16-30.pcm', -'18-10-2023-16-31.ndx', -'18-10-2023-16-31.pcm', -'18-10-2023-16-32.ndx', -'18-10-2023-16-32.pcm', -'18-10-2023-16-33.ndx', -'18-10-2023-16-33.pcm', -'18-10-2023-16-34.ndx', -'18-10-2023-16-34.pcm', -'18-10-2023-16-35.ndx', -'18-10-2023-16-35.pcm', -'18-10-2023-16-36.ndx', -'18-10-2023-16-36.pcm', -'18-10-2023-16-37.ndx', -'18-10-2023-16-37.pcm', -'18-10-2023-16-38.ndx', -'18-10-2023-16-38.pcm', -'18-10-2023-16-39.ndx', -'18-10-2023-16-39.pcm', -'18-10-2023-16-40.ndx', -'18-10-2023-16-40.pcm', -'18-10-2023-16-41.ndx', -'18-10-2023-16-41.pcm', -'18-10-2023-16-42.ndx', -'18-10-2023-16-42.pcm', -'18-10-2023-16-43.ndx', -'18-10-2023-16-43.pcm', -'18-10-2023-16-45.ndx', -'18-10-2023-16-45.pcm', -'18-10-2023-16-46.ndx', -'18-10-2023-16-46.pcm', -'18-10-2023-16-47.ndx', -'18-10-2023-16-47.pcm', -'18-10-2023-16-48.ndx', -'18-10-2023-16-48.pcm', -'18-10-2023-16-49.ndx', -'18-10-2023-16-49.pcm', -'18-10-2023-16-50.ndx', -'18-10-2023-16-50.pcm', -'18-10-2023-16-51.ndx', -'18-10-2023-16-51.pcm', -'18-10-2023-16-52.ndx', -'18-10-2023-16-52.pcm', -'18-10-2023-16-53.ndx', -'18-10-2023-16-53.pcm', -'18-10-2023-16-55.ndx', -'18-10-2023-16-55.pcm', -'18-10-2023-16-56.ndx', -'18-10-2023-16-56.pcm', -'18-10-2023-16-57.ndx', -'18-10-2023-16-57.pcm', -'18-10-2023-16-58.ndx', -'18-10-2023-16-58.pcm', -'18-10-2023-16-59.ndx', -'18-10-2023-16-59.pcm', -'18-10-2023-17-00.ndx', -'18-10-2023-17-00.pcm', -'18-10-2023-17-01.ndx', -'18-10-2023-17-01.pcm', -'18-10-2023-17-02.ndx', -'18-10-2023-17-02.pcm', -'18-10-2023-17-03.ndx', -'18-10-2023-17-03.pcm', -'18-10-2023-17-04.ndx', -'18-10-2023-17-04.pcm', -'18-10-2023-17-05.ndx', -'18-10-2023-17-05.pcm', -'18-10-2023-17-06.ndx', -'18-10-2023-17-06.pcm', -'18-10-2023-17-07.ndx', -'18-10-2023-17-07.pcm', -'18-10-2023-17-08.ndx', -'18-10-2023-17-08.pcm', -'18-10-2023-17-09.ndx', -'18-10-2023-17-09.pcm', -'18-10-2023-17-10.ndx', -'18-10-2023-17-10.pcm', -'18-10-2023-17-11.ndx', -'18-10-2023-17-11.pcm', -'18-10-2023-17-12.ndx', -'18-10-2023-17-12.pcm', -'18-10-2023-17-13.ndx', -'18-10-2023-17-13.pcm', -'18-10-2023-17-14.ndx', -'18-10-2023-17-14.pcm', -'18-10-2023-17-15.ndx', -'18-10-2023-17-15.pcm', -'18-10-2023-17-16.ndx', -'18-10-2023-17-16.pcm', -'18-10-2023-17-17.ndx', -'18-10-2023-17-17.pcm', -'18-10-2023-17-18.ndx', -'18-10-2023-17-18.pcm', -'18-10-2023-17-19.ndx', -'18-10-2023-17-19.pcm', -'18-10-2023-17-20.ndx', -'18-10-2023-17-20.pcm', -'18-10-2023-17-21.ndx', -'18-10-2023-17-21.pcm', -'18-10-2023-17-22.ndx', -'18-10-2023-17-22.pcm', -'18-10-2023-17-23.ndx', -'18-10-2023-17-23.pcm', -'18-10-2023-17-25.ndx', -'18-10-2023-17-25.pcm', -'18-10-2023-17-26.ndx', -'18-10-2023-17-26.pcm', -'18-10-2023-17-27.ndx', -'18-10-2023-17-27.pcm', -'18-10-2023-17-28.ndx', -'18-10-2023-17-28.pcm', -'18-10-2023-17-29.ndx', -'18-10-2023-17-29.pcm', -'18-10-2023-17-30.ndx', -'18-10-2023-17-30.pcm', -'18-10-2023-17-31.ndx', -'18-10-2023-17-31.pcm', -'18-10-2023-17-32.ndx', -'18-10-2023-17-32.pcm', -'18-10-2023-17-33.ndx', -'18-10-2023-17-33.pcm', -'18-10-2023-17-34.ndx', -'18-10-2023-17-34.pcm', -'18-10-2023-17-35.ndx', -'18-10-2023-17-35.pcm', -'18-10-2023-17-36.ndx', -'18-10-2023-17-36.pcm', -'18-10-2023-17-37.ndx', -'18-10-2023-17-37.pcm', -'18-10-2023-17-38.ndx', -'18-10-2023-17-38.pcm', -'18-10-2023-17-39.ndx', -'18-10-2023-17-39.pcm', -'18-10-2023-17-40.ndx', -'18-10-2023-17-40.pcm', -'18-10-2023-17-41.ndx', -'18-10-2023-17-41.pcm', -'18-10-2023-17-42.ndx', -'18-10-2023-17-42.pcm', -'18-10-2023-17-43.ndx', -'18-10-2023-17-43.pcm', -'18-10-2023-17-44.ndx', -'18-10-2023-17-44.pcm', -'18-10-2023-17-45.ndx', -'18-10-2023-17-45.pcm', -'18-10-2023-17-46.ndx', -'18-10-2023-17-46.pcm', -'18-10-2023-17-47.ndx', -'18-10-2023-17-47.pcm', -'18-10-2023-17-48.ndx', -'18-10-2023-17-48.pcm', -'18-10-2023-17-50.ndx', -'18-10-2023-17-50.pcm', -'18-10-2023-17-51.ndx', -'18-10-2023-17-51.pcm', -'18-10-2023-17-52.ndx', -'18-10-2023-17-52.pcm', -'18-10-2023-17-53.ndx', -'18-10-2023-17-53.pcm', -'18-10-2023-17-54.ndx', -'18-10-2023-17-54.pcm', -'18-10-2023-17-55.ndx', -'18-10-2023-17-55.pcm', -'18-10-2023-17-56.ndx', -'18-10-2023-17-56.pcm', -'18-10-2023-17-57.ndx', -'18-10-2023-17-57.pcm', -'18-10-2023-17-58.mjp', -'18-10-2023-17-58.ndx', -'18-10-2023-17-58.pcm', -'18-10-2023-20-01.ndx', -'18-10-2023-20-01.pcm', -'18-10-2023-20-02.ndx', -'18-10-2023-20-02.pcm', -'18-10-2023-20-03.ndx', -'18-10-2023-20-03.pcm', -'18-10-2023-20-04.ndx', -'18-10-2023-20-04.pcm', -'18-10-2023-20-05.ndx', -'18-10-2023-20-05.pcm', -'18-10-2023-20-06.ndx', -'18-10-2023-20-06.pcm', -'18-10-2023-20-07.ndx', -'18-10-2023-20-07.pcm', -'18-10-2023-20-08.ndx', -'18-10-2023-20-08.pcm', -'18-10-2023-20-09.ndx', -'18-10-2023-20-09.pcm', -'18-10-2023-20-10.ndx', -'18-10-2023-20-10.pcm', -'18-10-2023-20-11.ndx', -'18-10-2023-20-11.pcm', -'18-10-2023-20-12.ndx', -'18-10-2023-20-12.pcm', -'18-10-2023-20-13.ndx', -'18-10-2023-20-13.pcm', -'18-10-2023-20-14.ndx', -'18-10-2023-20-14.pcm', -'18-10-2023-20-15.ndx', -'18-10-2023-20-15.pcm', -'18-10-2023-20-16.ndx', -'18-10-2023-20-16.pcm', -'18-10-2023-20-17.ndx', -'18-10-2023-20-17.pcm', -'18-10-2023-20-18.ndx', -'18-10-2023-20-18.pcm', -'18-10-2023-20-19.ndx', -'18-10-2023-20-19.pcm', -'18-10-2023-20-20.ndx', -'18-10-2023-20-20.pcm', -'18-10-2023-20-21.ndx', -'18-10-2023-20-21.pcm', -'18-10-2023-20-22.ndx', -'18-10-2023-20-22.pcm', -'18-10-2023-20-23.ndx', -'18-10-2023-20-23.pcm', -'18-10-2023-20-24.ndx', -'18-10-2023-20-24.pcm', -'18-10-2023-20-25.ndx', -'18-10-2023-20-25.pcm', -'18-10-2023-20-26.ndx', -'18-10-2023-20-26.pcm', -'18-10-2023-20-27.ndx', -'18-10-2023-20-27.pcm', -'18-10-2023-20-28.ndx', -'18-10-2023-20-28.pcm', -'18-10-2023-20-29.ndx', -'18-10-2023-20-29.pcm', -'18-10-2023-20-30.ndx', -'18-10-2023-20-30.pcm', -'18-10-2023-20-31.ndx', -'18-10-2023-20-31.pcm', -'18-10-2023-20-32.ndx', -'18-10-2023-20-32.pcm', -'18-10-2023-20-33.ndx', -'18-10-2023-20-33.pcm', -'18-10-2023-20-34.ndx', -'18-10-2023-20-34.pcm', -'18-10-2023-20-35.ndx', -'18-10-2023-20-35.pcm', -'18-10-2023-20-36.ndx', -'18-10-2023-20-36.pcm', -'18-10-2023-20-37.ndx', -'18-10-2023-20-37.pcm', -'18-10-2023-20-38.ndx', -'18-10-2023-20-38.pcm', -'18-10-2023-20-39.ndx', -'18-10-2023-20-39.pcm', -'18-10-2023-20-40.ndx', -'18-10-2023-20-40.pcm', -'18-10-2023-20-41.ndx', -'18-10-2023-20-41.pcm', -'18-10-2023-20-42.ndx', -'18-10-2023-20-42.pcm', -'18-10-2023-20-43.ndx', -'18-10-2023-20-43.pcm', -'18-10-2023-20-44.ndx', -'18-10-2023-20-44.pcm', -'18-10-2023-20-45.ndx', -'18-10-2023-20-45.pcm', -'18-10-2023-20-46.ndx', -'18-10-2023-20-46.pcm', -'18-10-2023-20-47.ndx', -'18-10-2023-20-47.pcm', -'18-10-2023-20-48.ndx', -'18-10-2023-20-48.pcm', -'18-10-2023-20-49.ndx', -'18-10-2023-20-49.pcm', -'18-10-2023-20-50.ndx', -'18-10-2023-20-50.pcm', -'18-10-2023-20-51.ndx', -'18-10-2023-20-51.pcm', -'18-10-2023-20-52.ndx', -'18-10-2023-20-52.pcm', -'18-10-2023-20-53.ndx', -'18-10-2023-20-53.pcm', -'18-10-2023-20-54.ndx', -'18-10-2023-20-54.pcm', -'18-10-2023-20-55.ndx', -'18-10-2023-20-55.pcm', -'18-10-2023-20-56.ndx', -'18-10-2023-20-56.pcm', -'18-10-2023-20-57.ndx', -'18-10-2023-20-57.pcm', -'18-10-2023-20-58.ndx', -'18-10-2023-20-58.pcm', -'18-10-2023-20-59.ndx', -'18-10-2023-20-59.pcm', -'18-10-2023-21-00.ndx', -'18-10-2023-21-00.pcm', -'18-10-2023-21-01.ndx', -'18-10-2023-21-01.pcm', -'18-10-2023-21-02.ndx', -'18-10-2023-21-02.pcm', -'18-10-2023-21-03.ndx', -'18-10-2023-21-03.pcm', -'18-10-2023-21-04.ndx', -'18-10-2023-21-04.pcm', -'18-10-2023-21-05.ndx', -'18-10-2023-21-05.pcm', -'18-10-2023-21-06.ndx', -'18-10-2023-21-06.pcm', -'18-10-2023-21-07.ndx', -'18-10-2023-21-07.pcm', -'18-10-2023-21-08.ndx', -'18-10-2023-21-08.pcm', -'18-10-2023-21-10.ndx', -'18-10-2023-21-10.pcm', -'18-10-2023-21-11.ndx', -'18-10-2023-21-11.pcm', -'18-10-2023-21-12.ndx', -'18-10-2023-21-12.pcm', -'18-10-2023-21-13.ndx', -'18-10-2023-21-13.pcm', -'18-10-2023-21-14.ndx', -'18-10-2023-21-14.pcm', -'18-10-2023-21-15.ndx', -'18-10-2023-21-15.pcm', -'18-10-2023-21-16.ndx', -'18-10-2023-21-16.pcm', -'18-10-2023-21-17.ndx', -'18-10-2023-21-17.pcm', -'18-10-2023-21-18.ndx', -'18-10-2023-21-18.pcm', -'18-10-2023-21-19.ndx', -'18-10-2023-21-19.pcm', -'18-10-2023-21-20.ndx', -'18-10-2023-21-20.pcm', -'18-10-2023-21-21.ndx', -'18-10-2023-21-21.pcm', -'18-10-2023-21-22.ndx', -'18-10-2023-21-22.pcm', -'18-10-2023-21-23.ndx', -'18-10-2023-21-23.pcm', -'18-10-2023-21-24.ndx', -'18-10-2023-21-24.pcm', -'18-10-2023-21-25.ndx', -'18-10-2023-21-25.pcm', -'18-10-2023-21-26.ndx', -'18-10-2023-21-26.pcm', -'18-10-2023-21-27.ndx', -'18-10-2023-21-27.pcm', -'18-10-2023-21-28.ndx', -'18-10-2023-21-28.pcm', -'18-10-2023-21-30.ndx', -'18-10-2023-21-30.pcm', -'18-10-2023-21-31.ndx', -'18-10-2023-21-31.pcm', -'18-10-2023-21-32.ndx', -'18-10-2023-21-32.pcm', -'18-10-2023-21-33.ndx', -'18-10-2023-21-33.pcm', -'18-10-2023-21-34.ndx', -'18-10-2023-21-34.pcm', -'18-10-2023-21-35.ndx', -'18-10-2023-21-35.pcm', -'18-10-2023-21-36.ndx', -'18-10-2023-21-36.pcm', -'18-10-2023-21-37.ndx', -'18-10-2023-21-37.pcm', -'18-10-2023-21-38.ndx', -'18-10-2023-21-38.pcm', -'18-10-2023-21-39.ndx', -'18-10-2023-21-39.pcm', -'18-10-2023-21-40.ndx', -'18-10-2023-21-40.pcm', -'18-10-2023-21-41.ndx', -'18-10-2023-21-41.pcm', -'18-10-2023-21-42.ndx', -'18-10-2023-21-42.pcm', -'18-10-2023-21-43.ndx', -'18-10-2023-21-43.pcm', -'18-10-2023-21-44.mjp', -'18-10-2023-21-44.ndx', -'18-10-2023-21-44.pcm', -'18-10-2023-00-00.ndx', -'18-10-2023-00-00.pcm', -'18-10-2023-00-01.ndx', -'18-10-2023-00-01.pcm', -'18-10-2023-00-02.ndx', -'18-10-2023-00-02.pcm', -'18-10-2023-00-03.ndx', -'18-10-2023-00-03.pcm', -'18-10-2023-00-04.ndx', -'18-10-2023-00-04.pcm', -'18-10-2023-00-05.ndx', -'18-10-2023-00-05.pcm', -'18-10-2023-00-06.ndx', -'18-10-2023-00-06.pcm', -'18-10-2023-00-07.ndx', -'18-10-2023-00-07.pcm', -'18-10-2023-00-08.ndx', -'18-10-2023-00-08.pcm', -'18-10-2023-00-09.ndx', -'18-10-2023-00-09.pcm', -'18-10-2023-00-10.ndx', -'18-10-2023-00-10.pcm', -'18-10-2023-00-11.ndx', -'18-10-2023-00-11.pcm', -'18-10-2023-00-12.ndx', -'18-10-2023-00-12.pcm', -'18-10-2023-00-13.ndx', -'18-10-2023-00-13.pcm', -'18-10-2023-00-14.ndx', -'18-10-2023-00-14.pcm', -'18-10-2023-00-15.ndx', -'18-10-2023-00-15.pcm', -'18-10-2023-00-16.ndx', -'18-10-2023-00-16.pcm', -'18-10-2023-00-17.ndx', -'18-10-2023-00-17.pcm', -'18-10-2023-00-18.ndx', -'18-10-2023-00-18.pcm', -'18-10-2023-00-19.ndx', -'18-10-2023-00-19.pcm', -'18-10-2023-00-20.ndx', -'18-10-2023-00-20.pcm', -'18-10-2023-00-21.ndx', -'18-10-2023-00-21.pcm', -'18-10-2023-00-22.ndx', -'18-10-2023-00-22.pcm', -'18-10-2023-00-23.ndx', -'18-10-2023-00-23.pcm', -'18-10-2023-00-24.ndx', -'18-10-2023-00-24.pcm', -'18-10-2023-00-25.ndx', -'18-10-2023-00-25.pcm', -'18-10-2023-00-26.ndx', -'18-10-2023-00-26.pcm', -'18-10-2023-00-27.ndx', -'18-10-2023-00-27.pcm', -'18-10-2023-00-28.ndx', -'18-10-2023-00-28.pcm', -'18-10-2023-00-29.ndx', -'18-10-2023-00-29.pcm', -'18-10-2023-00-30.ndx', -'18-10-2023-00-30.pcm', -'18-10-2023-00-31.ndx', -'18-10-2023-00-31.pcm', -'18-10-2023-00-32.ndx', -'18-10-2023-00-32.pcm', -'18-10-2023-00-33.ndx', -'18-10-2023-00-33.pcm', -'18-10-2023-00-34.ndx', -'18-10-2023-00-34.pcm', -'18-10-2023-00-35.ndx', -'18-10-2023-00-35.pcm', -'18-10-2023-00-36.ndx', -'18-10-2023-00-36.pcm', -'18-10-2023-00-37.ndx', -'18-10-2023-00-37.pcm', -'18-10-2023-00-38.ndx', -'18-10-2023-00-38.pcm', -'18-10-2023-00-39.ndx', -'18-10-2023-00-39.pcm', -'18-10-2023-00-40.ndx', -'18-10-2023-00-40.pcm', -'18-10-2023-00-41.ndx', -'18-10-2023-00-41.pcm', -'18-10-2023-00-42.ndx', -'18-10-2023-00-42.pcm', -'18-10-2023-00-43.ndx', -'18-10-2023-00-43.pcm', -'18-10-2023-00-44.ndx', -'18-10-2023-00-44.pcm', -'18-10-2023-00-45.ndx', -'18-10-2023-00-45.pcm', -'18-10-2023-00-46.ndx', -'18-10-2023-00-46.pcm', -'18-10-2023-00-47.ndx', -'18-10-2023-00-47.pcm', -'18-10-2023-00-48.ndx', -'18-10-2023-00-48.pcm', -'18-10-2023-00-49.ndx', -'18-10-2023-00-49.pcm', -'18-10-2023-00-50.ndx', -'18-10-2023-00-50.pcm', -'18-10-2023-00-51.ndx', -'18-10-2023-00-51.pcm', -'18-10-2023-00-52.ndx', -'18-10-2023-00-52.pcm', -'18-10-2023-00-53.ndx', -'18-10-2023-00-53.pcm', -'18-10-2023-00-54.ndx', -'18-10-2023-00-54.pcm', -'18-10-2023-00-55.ndx', -'18-10-2023-00-55.pcm', -'18-10-2023-00-56.ndx', -'18-10-2023-00-56.pcm', -'18-10-2023-00-57.ndx', -'18-10-2023-00-57.pcm', -'18-10-2023-00-58.ndx', -'18-10-2023-00-58.pcm', -'18-10-2023-01-00.ndx', -'18-10-2023-01-00.pcm', -'18-10-2023-01-01.ndx', -'18-10-2023-01-01.pcm', -'18-10-2023-01-02.ndx', -'18-10-2023-01-02.pcm', -'18-10-2023-01-03.ndx', -'18-10-2023-01-03.pcm', -'18-10-2023-01-04.ndx', -'18-10-2023-01-04.pcm', -'18-10-2023-01-05.ndx', -'18-10-2023-01-05.pcm', -'18-10-2023-01-06.ndx', -'18-10-2023-01-06.pcm', -'18-10-2023-01-07.ndx', -'18-10-2023-01-07.pcm', -'18-10-2023-01-08.ndx', -'18-10-2023-01-08.pcm', -'18-10-2023-01-09.ndx', -'18-10-2023-01-09.pcm', -'18-10-2023-01-10.ndx', -'18-10-2023-01-10.pcm', -'18-10-2023-01-11.ndx', -'18-10-2023-01-11.pcm', -'18-10-2023-01-12.ndx', -'18-10-2023-01-12.pcm', -'18-10-2023-01-13.ndx', -'18-10-2023-01-13.pcm', -'18-10-2023-01-14.ndx', -'18-10-2023-01-14.pcm', -'18-10-2023-01-15.ndx', -'18-10-2023-01-15.pcm', -'18-10-2023-01-16.ndx', -'18-10-2023-01-16.pcm', -'18-10-2023-01-17.ndx', -'18-10-2023-01-17.pcm', -'18-10-2023-01-18.ndx', -'18-10-2023-01-18.pcm', -'18-10-2023-01-19.mjp', -'18-10-2023-01-19.ndx', -'18-10-2023-01-19.pcm', -'18-10-2023-11-04.ndx', -'18-10-2023-11-04.pcm', -'18-10-2023-11-05.avi.done', -'18-10-2023-11-05.gpg', -'18-10-2023-11-05.mp4.done', -'18-10-2023-11-10.avi.done', -'18-10-2023-11-10.gpg', -'18-10-2023-11-10.mp4.done', -'18-10-2023-11-15.avi.done', -'18-10-2023-11-15.gpg', -'18-10-2023-11-15.mp4.done', -'18-10-2023-11-20.avi.done', -'18-10-2023-11-20.gpg', -'18-10-2023-11-20.mp4.done', -'18-10-2023-11-25.avi.done', -'18-10-2023-11-25.gpg', -'18-10-2023-11-25.mp4.done', -'18-10-2023-11-30.avi.done', -'18-10-2023-11-30.gpg', -'18-10-2023-11-30.mp4.done', -'18-10-2023-11-35.avi.done', -'18-10-2023-11-35.gpg', -'18-10-2023-11-35.mp4.done', -'18-10-2023-11-40.mjp', -'18-10-2023-11-40.ndx', -'18-10-2023-11-40.pcm', -'18-10-2023-11-41.mjp', -'18-10-2023-11-41.ndx', -'18-10-2023-11-41.pcm', -'18-10-2023-11-42.mjp', -'18-10-2023-11-42.ndx', -'18-10-2023-11-42.pcm', -'18-10-2023-11-43.mjp', -'18-10-2023-11-43.ndx', -'18-10-2023-11-43.pcm', -'18-10-2023-11-45.avi.done', -'18-10-2023-11-45.gpg', -'18-10-2023-11-45.mp4.done', -'18-10-2023-11-46.ndx', -'18-10-2023-11-46.pcm', -'18-10-2023-11-47.ndx', -'18-10-2023-11-47.pcm', -'18-10-2023-11-50.avi.done', -'18-10-2023-11-50.gpg', -'18-10-2023-11-50.mp4.done', -'18-10-2023-11-55.avi.done', -'18-10-2023-11-55.gpg', -'18-10-2023-11-55.mp4.done', -'18-10-2023-12-00.avi.done', -'18-10-2023-12-00.gpg', -'18-10-2023-12-00.mp4.done', -'18-10-2023-12-05.avi.done', -'18-10-2023-12-05.gpg', -'18-10-2023-12-05.mp4.done', -'18-10-2023-12-10.avi.done', -'18-10-2023-12-10.gpg', -'18-10-2023-12-10.mp4.done', -'18-10-2023-12-15.avi.done', -'18-10-2023-12-15.gpg', -'18-10-2023-12-15.mp4.done', -'18-10-2023-12-20.avi.done', -'18-10-2023-12-20.gpg', -'18-10-2023-12-20.mp4.done', -'18-10-2023-12-25.avi.done', -'18-10-2023-12-25.gpg', -'18-10-2023-12-25.mp4.done', -'18-10-2023-12-30.avi.done', -'18-10-2023-12-30.gpg', -'18-10-2023-12-30.mp4.done', -'18-10-2023-12-35.avi.done', -'18-10-2023-12-35.gpg', -'18-10-2023-12-35.mp4.done', -'18-10-2023-12-40.avi.done', -'18-10-2023-12-40.gpg', -'18-10-2023-12-40.mp4.done', -'18-10-2023-12-45.avi.done', -'18-10-2023-12-45.gpg', -'18-10-2023-12-45.mp4.done', -'18-10-2023-12-50.avi.done', -'18-10-2023-12-50.gpg', -'18-10-2023-12-50.mp4.done', -'18-10-2023-12-55.avi.done', -'18-10-2023-12-55.gpg', -'18-10-2023-12-55.mp4.done', -'18-10-2023-13-00.avi.done', -'18-10-2023-13-00.gpg', -'18-10-2023-13-00.mp4.done', -'18-10-2023-13-05.avi.done', -'18-10-2023-13-05.gpg', -'18-10-2023-13-05.mp4.done', -'18-10-2023-13-10.avi.done', -'18-10-2023-13-10.gpg', -'18-10-2023-13-10.mp4.done', -'18-10-2023-13-15.avi.done', -'18-10-2023-13-15.gpg', -'18-10-2023-13-15.mp4.done', -'18-10-2023-13-20.avi.done', -'18-10-2023-13-20.gpg', -'18-10-2023-13-20.mp4.done', -'18-10-2023-13-25.avi.done', -'18-10-2023-13-25.gpg', -'18-10-2023-13-25.mp4.done', -'18-10-2023-13-30.avi.done', -'18-10-2023-13-30.gpg', -'18-10-2023-13-30.mp4.done', -'18-10-2023-13-35.avi.done', -'18-10-2023-13-35.gpg', -'18-10-2023-13-35.mp4.done', -'18-10-2023-13-40.avi.done', -'18-10-2023-13-40.gpg', -'18-10-2023-13-40.mp4.done', -'18-10-2023-13-45.avi.done', -'18-10-2023-13-45.gpg', -'18-10-2023-13-45.mp4.done', -'18-10-2023-13-50.avi.done', -'18-10-2023-13-50.gpg', -'18-10-2023-13-50.mp4.done', -'18-10-2023-13-55.avi.done', -'18-10-2023-13-55.gpg', -'18-10-2023-13-55.mp4.done', -'18-10-2023-14-00.avi.done', -'18-10-2023-14-00.gpg', -'18-10-2023-14-00.mp4.done', -'18-10-2023-14-05.avi.done', -'18-10-2023-14-05.gpg', -'18-10-2023-14-05.mp4.done', -'18-10-2023-14-10.avi.done', -'18-10-2023-14-10.gpg', -'18-10-2023-14-10.mp4.done', -'18-10-2023-14-15.avi.done', -'18-10-2023-14-15.gpg', -'18-10-2023-14-15.mp4.done', -'18-10-2023-14-20.avi.done', -'18-10-2023-14-20.gpg', -'18-10-2023-14-20.mp4.done', -'18-10-2023-14-25.avi.done', -'18-10-2023-14-25.gpg', -'18-10-2023-14-25.mp4.done', -'18-10-2023-14-30.avi.done', -'18-10-2023-14-30.gpg', -'18-10-2023-14-30.mp4.done', -'18-10-2023-14-35.avi.done', -'18-10-2023-14-35.gpg', -'18-10-2023-14-35.mp4.done', -'18-10-2023-14-40.avi.done', -'18-10-2023-14-40.gpg', -'18-10-2023-14-40.mp4.done', -'18-10-2023-14-41.ndx', -'18-10-2023-14-41.pcm', -'18-10-2023-14-42.ndx', -'18-10-2023-14-42.pcm', -'18-10-2023-14-43.ndx', -'18-10-2023-14-43.pcm', -'18-10-2023-14-44.ndx', -'18-10-2023-14-44.pcm', -'18-10-2023-14-45.ndx', -'18-10-2023-14-45.pcm', -'18-10-2023-14-46.ndx', -'18-10-2023-14-46.pcm', -'18-10-2023-14-47.ndx', -'18-10-2023-14-47.pcm', -'18-10-2023-14-48.ndx', -'18-10-2023-14-48.pcm', -'18-10-2023-14-49.ndx', -'18-10-2023-14-49.pcm', -'18-10-2023-14-50.ndx', -'18-10-2023-14-50.pcm', -'18-10-2023-14-51.ndx', -'18-10-2023-14-51.pcm', -'18-10-2023-14-52.ndx', -'18-10-2023-14-52.pcm', -'18-10-2023-14-53.ndx', -'18-10-2023-14-53.pcm', -'18-10-2023-14-54.ndx', -'18-10-2023-14-54.pcm', -'18-10-2023-14-55.ndx', -'18-10-2023-14-55.pcm', -'18-10-2023-14-56.ndx', -'18-10-2023-14-56.pcm', -'18-10-2023-14-57.ndx', -'18-10-2023-14-57.pcm', -'18-10-2023-14-58.ndx', -'18-10-2023-14-58.pcm', -'18-10-2023-14-59.ndx', -'18-10-2023-14-59.pcm', -'18-10-2023-15-00.ndx', -'18-10-2023-15-00.pcm', -'18-10-2023-15-01.ndx', -'18-10-2023-15-01.pcm', -'18-10-2023-15-02.ndx', -'18-10-2023-15-02.pcm', -'18-10-2023-15-03.ndx', -'18-10-2023-15-03.pcm', -'18-10-2023-15-04.ndx', -'18-10-2023-15-04.pcm', -'18-10-2023-15-05.ndx', -'18-10-2023-15-05.pcm', -'18-10-2023-15-06.ndx', -'18-10-2023-15-06.pcm', -'18-10-2023-15-07.ndx', -'18-10-2023-15-07.pcm', -'18-10-2023-15-08.ndx', -'18-10-2023-15-08.pcm', -'18-10-2023-15-09.ndx', -'18-10-2023-15-09.pcm', -'18-10-2023-15-10.ndx', -'18-10-2023-15-10.pcm', -'18-10-2023-15-11.ndx', -'18-10-2023-15-11.pcm', -'18-10-2023-15-12.ndx', -'18-10-2023-15-12.pcm', -'18-10-2023-15-13.ndx', -'18-10-2023-15-13.pcm', -'18-10-2023-15-14.ndx', -'18-10-2023-15-14.pcm', -'18-10-2023-15-15.ndx', -'18-10-2023-15-15.pcm', -'18-10-2023-15-16.ndx', -'18-10-2023-15-16.pcm', -'18-10-2023-15-17.ndx', -'18-10-2023-15-17.pcm', -'18-10-2023-15-18.ndx', -'18-10-2023-15-18.pcm', -'18-10-2023-15-19.ndx', -'18-10-2023-15-19.pcm', -'18-10-2023-15-20.ndx', -'18-10-2023-15-20.pcm', -'18-10-2023-15-21.ndx', -'18-10-2023-15-21.pcm', -'18-10-2023-15-22.ndx', -'18-10-2023-15-22.pcm', -'18-10-2023-15-23.ndx', -'18-10-2023-15-23.pcm', -'18-10-2023-15-24.ndx', -'18-10-2023-15-24.pcm', -'18-10-2023-15-25.ndx', -'18-10-2023-15-25.pcm', -'18-10-2023-15-26.ndx', -'18-10-2023-15-26.pcm', -'18-10-2023-15-27.ndx', -'18-10-2023-15-27.pcm', -'18-10-2023-15-28.ndx', -'18-10-2023-15-28.pcm', -'18-10-2023-15-29.ndx', -'18-10-2023-15-29.pcm', -'18-10-2023-15-30.ndx', -'18-10-2023-15-30.pcm', -'18-10-2023-15-31.ndx', -'18-10-2023-15-31.pcm', -'18-10-2023-15-32.ndx', -'18-10-2023-15-32.pcm', -'18-10-2023-15-33.ndx', -'18-10-2023-15-33.pcm', -'18-10-2023-15-34.ndx', -'18-10-2023-15-34.pcm', -'18-10-2023-15-35.ndx', -'18-10-2023-15-35.pcm', -'18-10-2023-15-36.ndx', -'18-10-2023-15-36.pcm', -'18-10-2023-15-37.ndx', -'18-10-2023-15-37.pcm', -'18-10-2023-15-38.ndx', -'18-10-2023-15-38.pcm', -'18-10-2023-15-39.ndx', -'18-10-2023-15-39.pcm', -'18-10-2023-15-40.ndx', -'18-10-2023-15-40.pcm', -'18-10-2023-15-41.ndx', -'18-10-2023-15-41.pcm', -'18-10-2023-15-42.ndx', -'18-10-2023-15-42.pcm', -'18-10-2023-15-43.ndx', -'18-10-2023-15-43.pcm', -'18-10-2023-15-44.ndx', -'18-10-2023-15-44.pcm', -'18-10-2023-15-45.ndx', -'18-10-2023-15-45.pcm', -'18-10-2023-15-46.ndx', -'18-10-2023-15-46.pcm', -'18-10-2023-15-47.ndx', -'18-10-2023-15-47.pcm', -'18-10-2023-15-48.ndx', -'18-10-2023-15-48.pcm', -'18-10-2023-15-49.ndx', -'18-10-2023-15-49.pcm', -'18-10-2023-15-50.ndx', -'18-10-2023-15-50.pcm', -'18-10-2023-15-51.ndx', -'18-10-2023-15-51.pcm', -'18-10-2023-15-52.ndx', -'18-10-2023-15-52.pcm', -'18-10-2023-15-53.ndx', -'18-10-2023-15-53.pcm', -'18-10-2023-15-54.ndx', -'18-10-2023-15-54.pcm', -'18-10-2023-15-55.ndx', -'18-10-2023-15-55.pcm', -'18-10-2023-15-56.ndx', -'18-10-2023-15-56.pcm', -'18-10-2023-15-57.ndx', -'18-10-2023-15-57.pcm', -'18-10-2023-15-58.ndx', -'18-10-2023-15-58.pcm', -'18-10-2023-15-59.ndx', -'18-10-2023-15-59.pcm', -'18-10-2023-16-00.ndx', -'18-10-2023-16-00.pcm', -'18-10-2023-16-01.ndx', -'18-10-2023-16-01.pcm', -'18-10-2023-16-02.ndx', -'18-10-2023-16-02.pcm', -'18-10-2023-16-03.ndx', -'18-10-2023-16-03.pcm', -'18-10-2023-16-04.ndx', -'18-10-2023-16-04.pcm', -'18-10-2023-16-05.ndx', -'18-10-2023-16-05.pcm', -'18-10-2023-16-06.ndx', -'18-10-2023-16-06.pcm', -'18-10-2023-16-07.ndx', -'18-10-2023-16-07.pcm', -'18-10-2023-16-08.ndx', -'18-10-2023-16-08.pcm', -'18-10-2023-16-09.ndx', -'18-10-2023-16-09.pcm', -'18-10-2023-16-10.ndx', -'18-10-2023-16-10.pcm', -'18-10-2023-16-11.ndx', -'18-10-2023-16-11.pcm', -'18-10-2023-16-12.ndx', -'18-10-2023-16-12.pcm', -'18-10-2023-16-13.ndx', -'18-10-2023-16-13.pcm', -'18-10-2023-16-14.ndx', -'18-10-2023-16-14.pcm', -'18-10-2023-16-15.ndx', -'18-10-2023-16-15.pcm', -'18-10-2023-16-16.ndx', -'18-10-2023-16-16.pcm', -'18-10-2023-16-17.ndx', -'18-10-2023-16-17.pcm', -'18-10-2023-16-18.ndx', -'18-10-2023-16-18.pcm', -'18-10-2023-16-19.ndx', -'18-10-2023-16-19.pcm', -'18-10-2023-16-20.ndx', -'18-10-2023-16-20.pcm', -'18-10-2023-16-21.ndx', -'18-10-2023-16-21.pcm', -'18-10-2023-16-22.ndx', -'18-10-2023-16-22.pcm', -'18-10-2023-16-23.ndx', -'18-10-2023-16-23.pcm', -'18-10-2023-16-24.ndx', -'18-10-2023-16-24.pcm', -'18-10-2023-16-25.ndx', -'18-10-2023-16-25.pcm', -'18-10-2023-16-26.ndx', -'18-10-2023-16-26.pcm', -'18-10-2023-16-27.ndx', -'18-10-2023-16-27.pcm', -'18-10-2023-16-28.ndx', -'18-10-2023-16-28.pcm', -'18-10-2023-16-29.ndx', -'18-10-2023-16-29.pcm', -'18-10-2023-16-30.ndx', -'18-10-2023-16-30.pcm', -'18-10-2023-16-31.ndx', -'18-10-2023-16-31.pcm', -'18-10-2023-16-32.ndx', -'18-10-2023-16-32.pcm', -'18-10-2023-16-33.ndx', -'18-10-2023-16-33.pcm', -'18-10-2023-16-34.ndx', -'18-10-2023-16-34.pcm', -'18-10-2023-16-35.ndx', -'18-10-2023-16-35.pcm', -'18-10-2023-16-36.ndx', -'18-10-2023-16-36.pcm', -'18-10-2023-16-37.ndx', -'18-10-2023-16-37.pcm', -'18-10-2023-16-38.ndx', -'18-10-2023-16-38.pcm', -'18-10-2023-16-40.ndx', -'18-10-2023-16-40.pcm', -'18-10-2023-16-41.ndx', -'18-10-2023-16-41.pcm', -'18-10-2023-16-42.ndx', -'18-10-2023-16-42.pcm', -'18-10-2023-16-43.ndx', -'18-10-2023-16-43.pcm', -'18-10-2023-16-44.ndx', -'18-10-2023-16-44.pcm', -'18-10-2023-16-45.ndx', -'18-10-2023-16-45.pcm', -'18-10-2023-16-46.ndx', -'18-10-2023-16-46.pcm', -'18-10-2023-16-47.ndx', -'18-10-2023-16-47.pcm', -'18-10-2023-16-48.ndx', -'18-10-2023-16-48.pcm', -'18-10-2023-16-49.ndx', -'18-10-2023-16-49.pcm', -'18-10-2023-16-50.ndx', -'18-10-2023-16-50.pcm', -'18-10-2023-16-51.ndx', -'18-10-2023-16-51.pcm', -'18-10-2023-16-52.ndx', -'18-10-2023-16-52.pcm', -'18-10-2023-16-53.ndx', -'18-10-2023-16-53.pcm', -'18-10-2023-16-54.ndx', -'18-10-2023-16-54.pcm', -'18-10-2023-16-55.ndx', -'18-10-2023-16-55.pcm', -'18-10-2023-16-56.ndx', -'18-10-2023-16-56.pcm', -'18-10-2023-16-57.ndx', -'18-10-2023-16-57.pcm', -'18-10-2023-16-58.ndx', -'18-10-2023-16-58.pcm', -'18-10-2023-16-59.ndx', -'18-10-2023-16-59.pcm', -'18-10-2023-17-00.ndx', -'18-10-2023-17-00.pcm', -'18-10-2023-17-01.ndx', -'18-10-2023-17-01.pcm', -'18-10-2023-17-02.ndx', -'18-10-2023-17-02.pcm', -'18-10-2023-17-03.ndx', -'18-10-2023-17-03.pcm', -'18-10-2023-17-04.ndx', -'18-10-2023-17-04.pcm', -'18-10-2023-17-05.ndx', -'18-10-2023-17-05.pcm', -'18-10-2023-17-06.ndx', -'18-10-2023-17-06.pcm', -'18-10-2023-17-07.ndx', -'18-10-2023-17-07.pcm', -'18-10-2023-17-08.ndx', -'18-10-2023-17-08.pcm', -'18-10-2023-17-09.ndx', -'18-10-2023-17-09.pcm', -'18-10-2023-17-10.ndx', -'18-10-2023-17-10.pcm', -'18-10-2023-17-11.ndx', -'18-10-2023-17-11.pcm', -'18-10-2023-17-12.ndx', -'18-10-2023-17-12.pcm', -'18-10-2023-17-13.ndx', -'18-10-2023-17-13.pcm', -'18-10-2023-17-14.ndx', -'18-10-2023-17-14.pcm', -'18-10-2023-17-15.ndx', -'18-10-2023-17-15.pcm', -'18-10-2023-17-16.ndx', -'18-10-2023-17-16.pcm', -'18-10-2023-17-17.ndx', -'18-10-2023-17-17.pcm', -'18-10-2023-17-18.ndx', -'18-10-2023-17-18.pcm', -'18-10-2023-17-19.ndx', -'18-10-2023-17-19.pcm', -'18-10-2023-17-20.ndx', -'18-10-2023-17-20.pcm', -'18-10-2023-17-21.ndx', -'18-10-2023-17-21.pcm', -'18-10-2023-17-22.ndx', -'18-10-2023-17-22.pcm', -'18-10-2023-17-23.ndx', -'18-10-2023-17-23.pcm', -'18-10-2023-17-24.ndx', -'18-10-2023-17-24.pcm', -'18-10-2023-17-25.ndx', -'18-10-2023-17-25.pcm', -'18-10-2023-17-26.ndx', -'18-10-2023-17-26.pcm', -'18-10-2023-17-27.ndx', -'18-10-2023-17-27.pcm', -'18-10-2023-17-28.ndx', -'18-10-2023-17-28.pcm', -'18-10-2023-17-29.ndx', -'18-10-2023-17-29.pcm', -'18-10-2023-17-30.ndx', -'18-10-2023-17-30.pcm', -'18-10-2023-17-31.ndx', -'18-10-2023-17-31.pcm', -'18-10-2023-17-32.ndx', -'18-10-2023-17-32.pcm', -'18-10-2023-17-33.ndx', -'18-10-2023-17-33.pcm', -'18-10-2023-17-34.ndx', -'18-10-2023-17-34.pcm', -'18-10-2023-17-35.ndx', -'18-10-2023-17-35.pcm', -'18-10-2023-17-36.ndx', -'18-10-2023-17-36.pcm', -'18-10-2023-17-37.ndx', -'18-10-2023-17-37.pcm', -'18-10-2023-17-38.ndx', -'18-10-2023-17-38.pcm', -'18-10-2023-17-39.ndx', -'18-10-2023-17-39.pcm', -'18-10-2023-17-40.ndx', -'18-10-2023-17-40.pcm', -'18-10-2023-17-41.ndx', -'18-10-2023-17-41.pcm', -'18-10-2023-17-42.ndx', -'18-10-2023-17-42.pcm', -'18-10-2023-17-43.ndx', -'18-10-2023-17-43.pcm', -'18-10-2023-17-44.ndx', -'18-10-2023-17-44.pcm', -'18-10-2023-17-45.ndx', -'18-10-2023-17-45.pcm', -'18-10-2023-17-46.ndx', -'18-10-2023-17-46.pcm', -'18-10-2023-17-47.ndx', -'18-10-2023-17-47.pcm', -'18-10-2023-17-48.ndx', -'18-10-2023-17-48.pcm', -'18-10-2023-17-49.ndx', -'18-10-2023-17-49.pcm', -'18-10-2023-17-50.ndx', -'18-10-2023-17-50.pcm', -'18-10-2023-17-51.ndx', -'18-10-2023-17-51.pcm', -'18-10-2023-17-52.ndx', -'18-10-2023-17-52.pcm', -'18-10-2023-17-53.ndx', -'18-10-2023-17-53.pcm', -'18-10-2023-17-54.ndx', -'18-10-2023-17-54.pcm', -'18-10-2023-17-55.ndx', -'18-10-2023-17-55.pcm', -'18-10-2023-17-56.ndx', -'18-10-2023-17-56.pcm', -'18-10-2023-17-57.ndx', -'18-10-2023-17-57.pcm', -'18-10-2023-17-58.mjp', -'18-10-2023-17-58.ndx', -'18-10-2023-17-58.pcm', -'18-10-2023-20-01.ndx', -'18-10-2023-20-01.pcm', -'18-10-2023-20-02.ndx', -'18-10-2023-20-02.pcm', -'18-10-2023-20-03.ndx', -'18-10-2023-20-03.pcm', -'18-10-2023-20-04.ndx', -'18-10-2023-20-04.pcm', -'18-10-2023-20-05.ndx', -'18-10-2023-20-05.pcm', -'18-10-2023-20-06.ndx', -'18-10-2023-20-06.pcm', -'18-10-2023-20-07.ndx', -'18-10-2023-20-07.pcm', -'18-10-2023-20-08.ndx', -'18-10-2023-20-08.pcm', -'18-10-2023-20-09.ndx', -'18-10-2023-20-09.pcm', -'18-10-2023-20-10.ndx', -'18-10-2023-20-10.pcm', -'18-10-2023-20-11.ndx', -'18-10-2023-20-11.pcm', -'18-10-2023-20-12.ndx', -'18-10-2023-20-12.pcm', -'18-10-2023-20-13.ndx', -'18-10-2023-20-13.pcm', -'18-10-2023-20-14.ndx', -'18-10-2023-20-14.pcm', -'18-10-2023-20-15.ndx', -'18-10-2023-20-15.pcm', -'18-10-2023-20-16.ndx', -'18-10-2023-20-16.pcm', -'18-10-2023-20-17.ndx', -'18-10-2023-20-17.pcm', -'18-10-2023-20-18.ndx', -'18-10-2023-20-18.pcm', -'18-10-2023-20-19.ndx', -'18-10-2023-20-19.pcm', -'18-10-2023-20-20.ndx', -'18-10-2023-20-20.pcm', -'18-10-2023-20-21.ndx', -'18-10-2023-20-21.pcm', -'18-10-2023-20-22.ndx', -'18-10-2023-20-22.pcm', -'18-10-2023-20-23.ndx', -'18-10-2023-20-23.pcm', -'18-10-2023-20-24.ndx', -'18-10-2023-20-24.pcm', -'18-10-2023-20-25.ndx', -'18-10-2023-20-25.pcm', -'18-10-2023-20-26.ndx', -'18-10-2023-20-26.pcm', -'18-10-2023-20-27.ndx', -'18-10-2023-20-27.pcm', -'18-10-2023-20-28.ndx', -'18-10-2023-20-28.pcm', -'18-10-2023-20-29.ndx', -'18-10-2023-20-29.pcm', -'18-10-2023-20-30.ndx', -'18-10-2023-20-30.pcm', -'18-10-2023-20-31.ndx', -'18-10-2023-20-31.pcm', -'18-10-2023-20-32.ndx', -'18-10-2023-20-32.pcm', -'18-10-2023-20-33.ndx', -'18-10-2023-20-33.pcm', -'18-10-2023-20-34.ndx', -'18-10-2023-20-34.pcm', -'18-10-2023-20-35.ndx', -'18-10-2023-20-35.pcm', -'18-10-2023-20-36.ndx', -'18-10-2023-20-36.pcm', -'18-10-2023-20-37.ndx', -'18-10-2023-20-37.pcm', -'18-10-2023-20-38.ndx', -'18-10-2023-20-38.pcm', -'18-10-2023-20-39.ndx', -'18-10-2023-20-39.pcm', -'18-10-2023-20-40.ndx', -'18-10-2023-20-40.pcm', -'18-10-2023-20-41.ndx', -'18-10-2023-20-41.pcm', -'18-10-2023-20-42.ndx', -'18-10-2023-20-42.pcm', -'18-10-2023-20-43.ndx', -'18-10-2023-20-43.pcm', -'18-10-2023-20-44.ndx', -'18-10-2023-20-44.pcm', -'18-10-2023-20-45.ndx', -'18-10-2023-20-45.pcm', -'18-10-2023-20-46.ndx', -'18-10-2023-20-46.pcm', -'18-10-2023-20-47.ndx', -'18-10-2023-20-47.pcm', -'18-10-2023-20-48.ndx', -'18-10-2023-20-48.pcm', -'18-10-2023-20-49.ndx', -'18-10-2023-20-49.pcm', -'18-10-2023-20-50.ndx', -'18-10-2023-20-50.pcm', -'18-10-2023-20-51.ndx', -'18-10-2023-20-51.pcm', -'18-10-2023-20-52.ndx', -'18-10-2023-20-52.pcm', -'18-10-2023-20-53.ndx', -'18-10-2023-20-53.pcm', -'18-10-2023-20-54.ndx', -'18-10-2023-20-54.pcm', -'18-10-2023-20-55.ndx', -'18-10-2023-20-55.pcm', -'18-10-2023-20-56.ndx', -'18-10-2023-20-56.pcm', -'18-10-2023-20-57.ndx', -'18-10-2023-20-57.pcm', -'18-10-2023-20-58.ndx', -'18-10-2023-20-58.pcm', -'18-10-2023-20-59.ndx', -'18-10-2023-20-59.pcm', -'18-10-2023-21-00.ndx', -'18-10-2023-21-00.pcm', -'18-10-2023-21-01.ndx', -'18-10-2023-21-01.pcm', -'18-10-2023-21-02.ndx', -'18-10-2023-21-02.pcm', -'18-10-2023-21-03.ndx', -'18-10-2023-21-03.pcm', -'18-10-2023-21-04.ndx', -'18-10-2023-21-04.pcm', -'18-10-2023-21-05.ndx', -'18-10-2023-21-05.pcm', -'18-10-2023-21-06.ndx', -'18-10-2023-21-06.pcm', -'18-10-2023-21-07.ndx', -'18-10-2023-21-07.pcm', -'18-10-2023-21-08.ndx', -'18-10-2023-21-08.pcm', -'18-10-2023-21-09.ndx', -'18-10-2023-21-09.pcm', -'18-10-2023-21-10.ndx', -'18-10-2023-21-10.pcm', -'18-10-2023-21-11.ndx', -'18-10-2023-21-11.pcm', -'18-10-2023-21-12.ndx', -'18-10-2023-21-12.pcm', -'18-10-2023-21-13.ndx', -'18-10-2023-21-13.pcm', -'18-10-2023-21-14.ndx', -'18-10-2023-21-14.pcm', -'18-10-2023-21-15.ndx', -'18-10-2023-21-15.pcm', -'18-10-2023-21-16.ndx', -'18-10-2023-21-16.pcm', -'18-10-2023-21-17.ndx', -'18-10-2023-21-17.pcm', -'18-10-2023-21-18.ndx', -'18-10-2023-21-18.pcm', -'18-10-2023-21-19.ndx', -'18-10-2023-21-19.pcm', -'18-10-2023-21-20.ndx', -'18-10-2023-21-20.pcm', -'18-10-2023-21-21.ndx', -'18-10-2023-21-21.pcm', -'18-10-2023-21-22.ndx', -'18-10-2023-21-22.pcm', -'18-10-2023-21-23.ndx', -'18-10-2023-21-23.pcm', -'18-10-2023-21-24.ndx', -'18-10-2023-21-24.pcm', -'18-10-2023-21-25.ndx', -'18-10-2023-21-25.pcm', -'18-10-2023-21-26.ndx', -'18-10-2023-21-26.pcm', -'18-10-2023-21-27.ndx', -'18-10-2023-21-27.pcm', -'18-10-2023-21-28.ndx', -'18-10-2023-21-28.pcm', -'18-10-2023-21-29.ndx', -'18-10-2023-21-29.pcm', -'18-10-2023-21-30.ndx', -'18-10-2023-21-30.pcm', -'18-10-2023-21-31.ndx', -'18-10-2023-21-31.pcm', -'18-10-2023-21-32.ndx', -'18-10-2023-21-32.pcm', -'18-10-2023-21-33.ndx', -'18-10-2023-21-33.pcm', -'18-10-2023-21-34.ndx', -'18-10-2023-21-34.pcm', -'18-10-2023-21-35.ndx', -'18-10-2023-21-35.pcm', -'18-10-2023-21-36.ndx', -'18-10-2023-21-36.pcm', -'18-10-2023-21-37.ndx', -'18-10-2023-21-37.pcm', -'18-10-2023-21-38.ndx', -'18-10-2023-21-38.pcm', -'18-10-2023-21-39.ndx', -'18-10-2023-21-39.pcm', -'18-10-2023-21-40.ndx', -'18-10-2023-21-40.pcm', -'18-10-2023-21-41.ndx', -'18-10-2023-21-41.pcm', -'18-10-2023-21-42.ndx', -'18-10-2023-21-42.pcm', -'18-10-2023-21-43.ndx', -'18-10-2023-21-43.pcm', -'18-10-2023-21-44.mjp', -'18-10-2023-21-44.ndx', -'18-10-2023-21-44.pcm', +from datetime import datetime + +file_paths = [ + "18-10-2023-00-00.ndx", + "18-10-2023-00-00.pcm", + "18-10-2023-00-01.ndx", + "18-10-2023-00-01.pcm", + "18-10-2023-00-02.ndx", + "18-10-2023-00-02.pcm", + "18-10-2023-00-03.ndx", + "18-10-2023-00-03.pcm", + "18-10-2023-00-05.ndx", + "18-10-2023-00-05.pcm", + "18-10-2023-00-06.ndx", + "18-10-2023-00-06.pcm", + "18-10-2023-00-07.ndx", + "18-10-2023-00-07.pcm", + "18-10-2023-00-08.ndx", + "18-10-2023-00-08.pcm", + "18-10-2023-00-09.ndx", + "18-10-2023-00-09.pcm", + "18-10-2023-00-10.ndx", + "18-10-2023-00-10.pcm", + "18-10-2023-00-11.ndx", + "18-10-2023-00-11.pcm", + "18-10-2023-00-12.ndx", + "18-10-2023-00-12.pcm", + "18-10-2023-00-13.ndx", + "18-10-2023-00-13.pcm", + "18-10-2023-00-14.ndx", + "18-10-2023-00-14.pcm", + "18-10-2023-00-15.ndx", + "18-10-2023-00-15.pcm", + "18-10-2023-00-16.ndx", + "18-10-2023-00-16.pcm", + "18-10-2023-00-17.ndx", + "18-10-2023-00-17.pcm", + "18-10-2023-00-18.ndx", + "18-10-2023-00-18.pcm", + "18-10-2023-00-19.ndx", + "18-10-2023-00-19.pcm", + "18-10-2023-00-20.ndx", + "18-10-2023-00-20.pcm", + "18-10-2023-00-21.ndx", + "18-10-2023-00-21.pcm", + "18-10-2023-00-22.ndx", + "18-10-2023-00-22.pcm", + "18-10-2023-00-23.ndx", + "18-10-2023-00-23.pcm", + "18-10-2023-00-24.ndx", + "18-10-2023-00-24.pcm", + "18-10-2023-00-25.ndx", + "18-10-2023-00-25.pcm", + "18-10-2023-00-26.ndx", + "18-10-2023-00-26.pcm", + "18-10-2023-00-27.ndx", + "18-10-2023-00-27.pcm", + "18-10-2023-00-28.ndx", + "18-10-2023-00-28.pcm", + "18-10-2023-00-29.ndx", + "18-10-2023-00-29.pcm", + "18-10-2023-00-30.ndx", + "18-10-2023-00-30.pcm", + "18-10-2023-00-31.ndx", + "18-10-2023-00-31.pcm", + "18-10-2023-00-32.ndx", + "18-10-2023-00-32.pcm", + "18-10-2023-00-33.ndx", + "18-10-2023-00-33.pcm", + "18-10-2023-00-34.ndx", + "18-10-2023-00-34.pcm", + "18-10-2023-00-35.ndx", + "18-10-2023-00-35.pcm", + "18-10-2023-00-36.ndx", + "18-10-2023-00-36.pcm", + "18-10-2023-00-37.ndx", + "18-10-2023-00-37.pcm", + "18-10-2023-00-38.ndx", + "18-10-2023-00-38.pcm", + "18-10-2023-00-39.ndx", + "18-10-2023-00-39.pcm", + "18-10-2023-00-40.ndx", + "18-10-2023-00-40.pcm", + "18-10-2023-00-41.ndx", + "18-10-2023-00-41.pcm", + "18-10-2023-00-42.ndx", + "18-10-2023-00-42.pcm", + "18-10-2023-00-43.ndx", + "18-10-2023-00-43.pcm", + "18-10-2023-00-45.ndx", + "18-10-2023-00-45.pcm", + "18-10-2023-00-46.ndx", + "18-10-2023-00-46.pcm", + "18-10-2023-00-47.ndx", + "18-10-2023-00-47.pcm", + "18-10-2023-00-48.ndx", + "18-10-2023-00-48.pcm", + "18-10-2023-00-49.ndx", + "18-10-2023-00-49.pcm", + "18-10-2023-00-50.ndx", + "18-10-2023-00-50.pcm", + "18-10-2023-00-51.ndx", + "18-10-2023-00-51.pcm", + "18-10-2023-00-52.ndx", + "18-10-2023-00-52.pcm", + "18-10-2023-00-53.ndx", + "18-10-2023-00-53.pcm", + "18-10-2023-00-54.ndx", + "18-10-2023-00-54.pcm", + "18-10-2023-00-55.ndx", + "18-10-2023-00-55.pcm", + "18-10-2023-00-56.ndx", + "18-10-2023-00-56.pcm", + "18-10-2023-00-57.ndx", + "18-10-2023-00-57.pcm", + "18-10-2023-00-58.ndx", + "18-10-2023-00-58.pcm", + "18-10-2023-00-59.ndx", + "18-10-2023-00-59.pcm", + "18-10-2023-01-00.ndx", + "18-10-2023-01-00.pcm", + "18-10-2023-01-01.ndx", + "18-10-2023-01-01.pcm", + "18-10-2023-01-02.ndx", + "18-10-2023-01-02.pcm", + "18-10-2023-01-03.ndx", + "18-10-2023-01-03.pcm", + "18-10-2023-01-05.ndx", + "18-10-2023-01-05.pcm", + "18-10-2023-01-06.ndx", + "18-10-2023-01-06.pcm", + "18-10-2023-01-07.ndx", + "18-10-2023-01-07.pcm", + "18-10-2023-01-08.ndx", + "18-10-2023-01-08.pcm", + "18-10-2023-01-09.ndx", + "18-10-2023-01-09.pcm", + "18-10-2023-01-10.ndx", + "18-10-2023-01-10.pcm", + "18-10-2023-01-11.ndx", + "18-10-2023-01-11.pcm", + "18-10-2023-01-12.ndx", + "18-10-2023-01-12.pcm", + "18-10-2023-01-13.ndx", + "18-10-2023-01-13.pcm", + "18-10-2023-01-14.ndx", + "18-10-2023-01-14.pcm", + "18-10-2023-01-15.ndx", + "18-10-2023-01-15.pcm", + "18-10-2023-01-16.ndx", + "18-10-2023-01-16.pcm", + "18-10-2023-01-17.ndx", + "18-10-2023-01-17.pcm", + "18-10-2023-01-18.ndx", + "18-10-2023-01-18.pcm", + "18-10-2023-01-19.mjp", + "18-10-2023-01-19.ndx", + "18-10-2023-01-19.pcm", + "18-10-2023-11-04.ndx", + "18-10-2023-11-04.pcm", + "18-10-2023-11-05.avi.done", + "18-10-2023-11-05.gpg", + "18-10-2023-11-05.mp4.done", + "18-10-2023-11-10.avi.done", + "18-10-2023-11-10.gpg", + "18-10-2023-11-10.mp4.done", + "18-10-2023-11-15.avi.done", + "18-10-2023-11-15.gpg", + "18-10-2023-11-15.mp4.done", + "18-10-2023-11-20.avi.done", + "18-10-2023-11-20.gpg", + "18-10-2023-11-20.mp4.done", + "18-10-2023-11-25.avi.done", + "18-10-2023-11-25.gpg", + "18-10-2023-11-25.mp4.done", + "18-10-2023-11-30.avi.done", + "18-10-2023-11-30.gpg", + "18-10-2023-11-30.mp4.done", + "18-10-2023-11-35.avi.done", + "18-10-2023-11-35.gpg", + "18-10-2023-11-35.mp4.done", + "18-10-2023-11-40.mjp", + "18-10-2023-11-40.ndx", + "18-10-2023-11-40.pcm", + "18-10-2023-11-41.mjp", + "18-10-2023-11-41.ndx", + "18-10-2023-11-41.pcm", + "18-10-2023-11-42.mjp", + "18-10-2023-11-42.ndx", + "18-10-2023-11-42.pcm", + "18-10-2023-11-43.mjp", + "18-10-2023-11-43.ndx", + "18-10-2023-11-43.pcm", + "18-10-2023-11-45.avi.done", + "18-10-2023-11-45.gpg", + "18-10-2023-11-45.mp4.done", + "18-10-2023-11-46.ndx", + "18-10-2023-11-46.pcm", + "18-10-2023-11-47.ndx", + "18-10-2023-11-47.pcm", + "18-10-2023-11-50.avi.done", + "18-10-2023-11-50.gpg", + "18-10-2023-11-50.mp4.done", + "18-10-2023-11-55.avi.done", + "18-10-2023-11-55.gpg", + "18-10-2023-11-55.mp4.done", + "18-10-2023-12-00.avi.done", + "18-10-2023-12-00.gpg", + "18-10-2023-12-00.mp4.done", + "18-10-2023-12-05.avi.done", + "18-10-2023-12-05.gpg", + "18-10-2023-12-05.mp4.done", + "18-10-2023-12-10.avi.done", + "18-10-2023-12-10.gpg", + "18-10-2023-12-10.mp4.done", + "18-10-2023-12-15.avi.done", + "18-10-2023-12-15.gpg", + "18-10-2023-12-15.mp4.done", + "18-10-2023-12-20.avi.done", + "18-10-2023-12-20.gpg", + "18-10-2023-12-20.mp4.done", + "18-10-2023-12-25.avi.done", + "18-10-2023-12-25.gpg", + "18-10-2023-12-25.mp4.done", + "18-10-2023-12-30.avi.done", + "18-10-2023-12-30.gpg", + "18-10-2023-12-30.mp4.done", + "18-10-2023-12-35.avi.done", + "18-10-2023-12-35.gpg", + "18-10-2023-12-35.mp4.done", + "18-10-2023-12-40.avi.done", + "18-10-2023-12-40.gpg", + "18-10-2023-12-40.mp4.done", + "18-10-2023-12-45.avi.done", + "18-10-2023-12-45.gpg", + "18-10-2023-12-45.mp4.done", + "18-10-2023-12-50.avi.done", + "18-10-2023-12-50.gpg", + "18-10-2023-12-50.mp4.done", + "18-10-2023-12-55.avi.done", + "18-10-2023-12-55.gpg", + "18-10-2023-12-55.mp4.done", + "18-10-2023-13-00.avi.done", + "18-10-2023-13-00.gpg", + "18-10-2023-13-00.mp4.done", + "18-10-2023-13-05.avi.done", + "18-10-2023-13-05.gpg", + "18-10-2023-13-05.mp4.done", + "18-10-2023-13-10.avi.done", + "18-10-2023-13-10.gpg", + "18-10-2023-13-10.mp4.done", + "18-10-2023-13-15.avi.done", + "18-10-2023-13-15.gpg", + "18-10-2023-13-15.mp4.done", + "18-10-2023-13-20.avi.done", + "18-10-2023-13-20.gpg", + "18-10-2023-13-20.mp4.done", + "18-10-2023-13-25.avi.done", + "18-10-2023-13-25.gpg", + "18-10-2023-13-25.mp4.done", + "18-10-2023-13-30.avi.done", + "18-10-2023-13-30.gpg", + "18-10-2023-13-30.mp4.done", + "18-10-2023-13-35.avi.done", + "18-10-2023-13-35.gpg", + "18-10-2023-13-35.mp4.done", + "18-10-2023-13-40.avi.done", + "18-10-2023-13-40.gpg", + "18-10-2023-13-40.mp4.done", + "18-10-2023-13-45.avi.done", + "18-10-2023-13-45.gpg", + "18-10-2023-13-45.mp4.done", + "18-10-2023-13-50.avi.done", + "18-10-2023-13-50.gpg", + "18-10-2023-13-50.mp4.done", + "18-10-2023-13-55.avi.done", + "18-10-2023-13-55.gpg", + "18-10-2023-13-55.mp4.done", + "18-10-2023-14-00.avi.done", + "18-10-2023-14-00.gpg", + "18-10-2023-14-00.mp4.done", + "18-10-2023-14-05.avi.done", + "18-10-2023-14-05.gpg", + "18-10-2023-14-05.mp4.done", + "18-10-2023-14-10.avi.done", + "18-10-2023-14-10.gpg", + "18-10-2023-14-10.mp4.done", + "18-10-2023-14-15.avi.done", + "18-10-2023-14-15.gpg", + "18-10-2023-14-15.mp4.done", + "18-10-2023-14-20.avi.done", + "18-10-2023-14-20.gpg", + "18-10-2023-14-20.mp4.done", + "18-10-2023-14-25.avi.done", + "18-10-2023-14-25.gpg", + "18-10-2023-14-25.mp4.done", + "18-10-2023-14-30.avi.done", + "18-10-2023-14-30.gpg", + "18-10-2023-14-30.mp4.done", + "18-10-2023-14-35.avi.done", + "18-10-2023-14-35.gpg", + "18-10-2023-14-35.mp4.done", + "18-10-2023-14-40.avi.done", + "18-10-2023-14-40.gpg", + "18-10-2023-14-40.mp4.done", + "18-10-2023-14-41.ndx", + "18-10-2023-14-41.pcm", + "18-10-2023-14-42.ndx", + "18-10-2023-14-42.pcm", + "18-10-2023-14-43.ndx", + "18-10-2023-14-43.pcm", + "18-10-2023-14-44.ndx", + "18-10-2023-14-44.pcm", + "18-10-2023-14-45.ndx", + "18-10-2023-14-45.pcm", + "18-10-2023-14-46.ndx", + "18-10-2023-14-46.pcm", + "18-10-2023-14-47.ndx", + "18-10-2023-14-47.pcm", + "18-10-2023-14-48.ndx", + "18-10-2023-14-48.pcm", + "18-10-2023-14-49.ndx", + "18-10-2023-14-49.pcm", + "18-10-2023-14-50.ndx", + "18-10-2023-14-50.pcm", + "18-10-2023-14-51.ndx", + "18-10-2023-14-51.pcm", + "18-10-2023-14-52.ndx", + "18-10-2023-14-52.pcm", + "18-10-2023-14-53.ndx", + "18-10-2023-14-53.pcm", + "18-10-2023-14-54.ndx", + "18-10-2023-14-54.pcm", + "18-10-2023-14-55.ndx", + "18-10-2023-14-55.pcm", + "18-10-2023-14-56.ndx", + "18-10-2023-14-56.pcm", + "18-10-2023-14-57.ndx", + "18-10-2023-14-57.pcm", + "18-10-2023-14-58.ndx", + "18-10-2023-14-58.pcm", + "18-10-2023-14-59.ndx", + "18-10-2023-14-59.pcm", + "18-10-2023-15-00.ndx", + "18-10-2023-15-00.pcm", + "18-10-2023-15-01.ndx", + "18-10-2023-15-01.pcm", + "18-10-2023-15-02.ndx", + "18-10-2023-15-02.pcm", + "18-10-2023-15-03.ndx", + "18-10-2023-15-03.pcm", + "18-10-2023-15-04.ndx", + "18-10-2023-15-04.pcm", + "18-10-2023-15-05.ndx", + "18-10-2023-15-05.pcm", + "18-10-2023-15-06.ndx", + "18-10-2023-15-06.pcm", + "18-10-2023-15-07.ndx", + "18-10-2023-15-07.pcm", + "18-10-2023-15-08.ndx", + "18-10-2023-15-08.pcm", + "18-10-2023-15-09.ndx", + "18-10-2023-15-09.pcm", + "18-10-2023-15-10.ndx", + "18-10-2023-15-10.pcm", + "18-10-2023-15-11.ndx", + "18-10-2023-15-11.pcm", + "18-10-2023-15-12.ndx", + "18-10-2023-15-12.pcm", + "18-10-2023-15-13.ndx", + "18-10-2023-15-13.pcm", + "18-10-2023-15-14.ndx", + "18-10-2023-15-14.pcm", + "18-10-2023-15-15.ndx", + "18-10-2023-15-15.pcm", + "18-10-2023-15-16.ndx", + "18-10-2023-15-16.pcm", + "18-10-2023-15-17.ndx", + "18-10-2023-15-17.pcm", + "18-10-2023-15-18.ndx", + "18-10-2023-15-18.pcm", + "18-10-2023-15-19.ndx", + "18-10-2023-15-19.pcm", + "18-10-2023-15-20.ndx", + "18-10-2023-15-20.pcm", + "18-10-2023-15-21.ndx", + "18-10-2023-15-21.pcm", + "18-10-2023-15-22.ndx", + "18-10-2023-15-22.pcm", + "18-10-2023-15-23.ndx", + "18-10-2023-15-23.pcm", + "18-10-2023-15-24.ndx", + "18-10-2023-15-24.pcm", + "18-10-2023-15-25.ndx", + "18-10-2023-15-25.pcm", + "18-10-2023-15-26.ndx", + "18-10-2023-15-26.pcm", + "18-10-2023-15-27.ndx", + "18-10-2023-15-27.pcm", + "18-10-2023-15-28.ndx", + "18-10-2023-15-28.pcm", + "18-10-2023-15-30.ndx", + "18-10-2023-15-30.pcm", + "18-10-2023-15-31.ndx", + "18-10-2023-15-31.pcm", + "18-10-2023-15-32.ndx", + "18-10-2023-15-32.pcm", + "18-10-2023-15-33.ndx", + "18-10-2023-15-33.pcm", + "18-10-2023-15-35.ndx", + "18-10-2023-15-35.pcm", + "18-10-2023-15-36.ndx", + "18-10-2023-15-36.pcm", + "18-10-2023-15-37.ndx", + "18-10-2023-15-37.pcm", + "18-10-2023-15-38.ndx", + "18-10-2023-15-38.pcm", + "18-10-2023-15-39.ndx", + "18-10-2023-15-39.pcm", + "18-10-2023-15-40.ndx", + "18-10-2023-15-40.pcm", + "18-10-2023-15-41.ndx", + "18-10-2023-15-41.pcm", + "18-10-2023-15-42.ndx", + "18-10-2023-15-42.pcm", + "18-10-2023-15-43.ndx", + "18-10-2023-15-43.pcm", + "18-10-2023-15-44.ndx", + "18-10-2023-15-44.pcm", + "18-10-2023-15-45.ndx", + "18-10-2023-15-45.pcm", + "18-10-2023-15-46.ndx", + "18-10-2023-15-46.pcm", + "18-10-2023-15-47.ndx", + "18-10-2023-15-47.pcm", + "18-10-2023-15-48.ndx", + "18-10-2023-15-48.pcm", + "18-10-2023-15-50.ndx", + "18-10-2023-15-50.pcm", + "18-10-2023-15-51.ndx", + "18-10-2023-15-51.pcm", + "18-10-2023-15-52.ndx", + "18-10-2023-15-52.pcm", + "18-10-2023-15-53.ndx", + "18-10-2023-15-53.pcm", + "18-10-2023-15-54.ndx", + "18-10-2023-15-54.pcm", + "18-10-2023-15-55.ndx", + "18-10-2023-15-55.pcm", + "18-10-2023-15-56.ndx", + "18-10-2023-15-56.pcm", + "18-10-2023-15-57.ndx", + "18-10-2023-15-57.pcm", + "18-10-2023-15-58.ndx", + "18-10-2023-15-58.pcm", + "18-10-2023-15-59.ndx", + "18-10-2023-15-59.pcm", + "18-10-2023-16-00.ndx", + "18-10-2023-16-00.pcm", + "18-10-2023-16-01.ndx", + "18-10-2023-16-01.pcm", + "18-10-2023-16-02.ndx", + "18-10-2023-16-02.pcm", + "18-10-2023-16-03.ndx", + "18-10-2023-16-03.pcm", + "18-10-2023-16-04.ndx", + "18-10-2023-16-04.pcm", + "18-10-2023-16-05.ndx", + "18-10-2023-16-05.pcm", + "18-10-2023-16-06.ndx", + "18-10-2023-16-06.pcm", + "18-10-2023-16-07.ndx", + "18-10-2023-16-07.pcm", + "18-10-2023-16-08.ndx", + "18-10-2023-16-08.pcm", + "18-10-2023-16-09.ndx", + "18-10-2023-16-09.pcm", + "18-10-2023-16-10.ndx", + "18-10-2023-16-10.pcm", + "18-10-2023-16-11.ndx", + "18-10-2023-16-11.pcm", + "18-10-2023-16-12.ndx", + "18-10-2023-16-12.pcm", + "18-10-2023-16-13.ndx", + "18-10-2023-16-13.pcm", + "18-10-2023-16-14.ndx", + "18-10-2023-16-14.pcm", + "18-10-2023-16-15.ndx", + "18-10-2023-16-15.pcm", + "18-10-2023-16-16.ndx", + "18-10-2023-16-16.pcm", + "18-10-2023-16-17.ndx", + "18-10-2023-16-17.pcm", + "18-10-2023-16-18.ndx", + "18-10-2023-16-18.pcm", + "18-10-2023-16-19.ndx", + "18-10-2023-16-19.pcm", + "18-10-2023-16-20.ndx", + "18-10-2023-16-20.pcm", + "18-10-2023-16-21.ndx", + "18-10-2023-16-21.pcm", + "18-10-2023-16-22.ndx", + "18-10-2023-16-22.pcm", + "18-10-2023-16-23.ndx", + "18-10-2023-16-23.pcm", + "18-10-2023-16-24.ndx", + "18-10-2023-16-24.pcm", + "18-10-2023-16-25.ndx", + "18-10-2023-16-25.pcm", + "18-10-2023-16-26.ndx", + "18-10-2023-16-26.pcm", + "18-10-2023-16-27.ndx", + "18-10-2023-16-27.pcm", + "18-10-2023-16-28.ndx", + "18-10-2023-16-28.pcm", + "18-10-2023-16-29.ndx", + "18-10-2023-16-29.pcm", + "18-10-2023-16-30.ndx", + "18-10-2023-16-30.pcm", + "18-10-2023-16-31.ndx", + "18-10-2023-16-31.pcm", + "18-10-2023-16-32.ndx", + "18-10-2023-16-32.pcm", + "18-10-2023-16-33.ndx", + "18-10-2023-16-33.pcm", + "18-10-2023-16-34.ndx", + "18-10-2023-16-34.pcm", + "18-10-2023-16-35.ndx", + "18-10-2023-16-35.pcm", + "18-10-2023-16-36.ndx", + "18-10-2023-16-36.pcm", + "18-10-2023-16-37.ndx", + "18-10-2023-16-37.pcm", + "18-10-2023-16-38.ndx", + "18-10-2023-16-38.pcm", + "18-10-2023-16-39.ndx", + "18-10-2023-16-39.pcm", + "18-10-2023-16-40.ndx", + "18-10-2023-16-40.pcm", + "18-10-2023-16-41.ndx", + "18-10-2023-16-41.pcm", + "18-10-2023-16-42.ndx", + "18-10-2023-16-42.pcm", + "18-10-2023-16-43.ndx", + "18-10-2023-16-43.pcm", + "18-10-2023-16-45.ndx", + "18-10-2023-16-45.pcm", + "18-10-2023-16-46.ndx", + "18-10-2023-16-46.pcm", + "18-10-2023-16-47.ndx", + "18-10-2023-16-47.pcm", + "18-10-2023-16-48.ndx", + "18-10-2023-16-48.pcm", + "18-10-2023-16-49.ndx", + "18-10-2023-16-49.pcm", + "18-10-2023-16-50.ndx", + "18-10-2023-16-50.pcm", + "18-10-2023-16-51.ndx", + "18-10-2023-16-51.pcm", + "18-10-2023-16-52.ndx", + "18-10-2023-16-52.pcm", + "18-10-2023-16-53.ndx", + "18-10-2023-16-53.pcm", + "18-10-2023-16-55.ndx", + "18-10-2023-16-55.pcm", + "18-10-2023-16-56.ndx", + "18-10-2023-16-56.pcm", + "18-10-2023-16-57.ndx", + "18-10-2023-16-57.pcm", + "18-10-2023-16-58.ndx", + "18-10-2023-16-58.pcm", + "18-10-2023-16-59.ndx", + "18-10-2023-16-59.pcm", + "18-10-2023-17-00.ndx", + "18-10-2023-17-00.pcm", + "18-10-2023-17-01.ndx", + "18-10-2023-17-01.pcm", + "18-10-2023-17-02.ndx", + "18-10-2023-17-02.pcm", + "18-10-2023-17-03.ndx", + "18-10-2023-17-03.pcm", + "18-10-2023-17-04.ndx", + "18-10-2023-17-04.pcm", + "18-10-2023-17-05.ndx", + "18-10-2023-17-05.pcm", + "18-10-2023-17-06.ndx", + "18-10-2023-17-06.pcm", + "18-10-2023-17-07.ndx", + "18-10-2023-17-07.pcm", + "18-10-2023-17-08.ndx", + "18-10-2023-17-08.pcm", + "18-10-2023-17-09.ndx", + "18-10-2023-17-09.pcm", + "18-10-2023-17-10.ndx", + "18-10-2023-17-10.pcm", + "18-10-2023-17-11.ndx", + "18-10-2023-17-11.pcm", + "18-10-2023-17-12.ndx", + "18-10-2023-17-12.pcm", + "18-10-2023-17-13.ndx", + "18-10-2023-17-13.pcm", + "18-10-2023-17-14.ndx", + "18-10-2023-17-14.pcm", + "18-10-2023-17-15.ndx", + "18-10-2023-17-15.pcm", + "18-10-2023-17-16.ndx", + "18-10-2023-17-16.pcm", + "18-10-2023-17-17.ndx", + "18-10-2023-17-17.pcm", + "18-10-2023-17-18.ndx", + "18-10-2023-17-18.pcm", + "18-10-2023-17-19.ndx", + "18-10-2023-17-19.pcm", + "18-10-2023-17-20.ndx", + "18-10-2023-17-20.pcm", + "18-10-2023-17-21.ndx", + "18-10-2023-17-21.pcm", + "18-10-2023-17-22.ndx", + "18-10-2023-17-22.pcm", + "18-10-2023-17-23.ndx", + "18-10-2023-17-23.pcm", + "18-10-2023-17-25.ndx", + "18-10-2023-17-25.pcm", + "18-10-2023-17-26.ndx", + "18-10-2023-17-26.pcm", + "18-10-2023-17-27.ndx", + "18-10-2023-17-27.pcm", + "18-10-2023-17-28.ndx", + "18-10-2023-17-28.pcm", + "18-10-2023-17-29.ndx", + "18-10-2023-17-29.pcm", + "18-10-2023-17-30.ndx", + "18-10-2023-17-30.pcm", + "18-10-2023-17-31.ndx", + "18-10-2023-17-31.pcm", + "18-10-2023-17-32.ndx", + "18-10-2023-17-32.pcm", + "18-10-2023-17-33.ndx", + "18-10-2023-17-33.pcm", + "18-10-2023-17-34.ndx", + "18-10-2023-17-34.pcm", + "18-10-2023-17-35.ndx", + "18-10-2023-17-35.pcm", + "18-10-2023-17-36.ndx", + "18-10-2023-17-36.pcm", + "18-10-2023-17-37.ndx", + "18-10-2023-17-37.pcm", + "18-10-2023-17-38.ndx", + "18-10-2023-17-38.pcm", + "18-10-2023-17-39.ndx", + "18-10-2023-17-39.pcm", + "18-10-2023-17-40.ndx", + "18-10-2023-17-40.pcm", + "18-10-2023-17-41.ndx", + "18-10-2023-17-41.pcm", + "18-10-2023-17-42.ndx", + "18-10-2023-17-42.pcm", + "18-10-2023-17-43.ndx", + "18-10-2023-17-43.pcm", + "18-10-2023-17-44.ndx", + "18-10-2023-17-44.pcm", + "18-10-2023-17-45.ndx", + "18-10-2023-17-45.pcm", + "18-10-2023-17-46.ndx", + "18-10-2023-17-46.pcm", + "18-10-2023-17-47.ndx", + "18-10-2023-17-47.pcm", + "18-10-2023-17-48.ndx", + "18-10-2023-17-48.pcm", + "18-10-2023-17-50.ndx", + "18-10-2023-17-50.pcm", + "18-10-2023-17-51.ndx", + "18-10-2023-17-51.pcm", + "18-10-2023-17-52.ndx", + "18-10-2023-17-52.pcm", + "18-10-2023-17-53.ndx", + "18-10-2023-17-53.pcm", + "18-10-2023-17-54.ndx", + "18-10-2023-17-54.pcm", + "18-10-2023-17-55.ndx", + "18-10-2023-17-55.pcm", + "18-10-2023-17-56.ndx", + "18-10-2023-17-56.pcm", + "18-10-2023-17-57.ndx", + "18-10-2023-17-57.pcm", + "18-10-2023-17-58.mjp", + "18-10-2023-17-58.ndx", + "18-10-2023-17-58.pcm", + "18-10-2023-20-01.ndx", + "18-10-2023-20-01.pcm", + "18-10-2023-20-02.ndx", + "18-10-2023-20-02.pcm", + "18-10-2023-20-03.ndx", + "18-10-2023-20-03.pcm", + "18-10-2023-20-04.ndx", + "18-10-2023-20-04.pcm", + "18-10-2023-20-05.ndx", + "18-10-2023-20-05.pcm", + "18-10-2023-20-06.ndx", + "18-10-2023-20-06.pcm", + "18-10-2023-20-07.ndx", + "18-10-2023-20-07.pcm", + "18-10-2023-20-08.ndx", + "18-10-2023-20-08.pcm", + "18-10-2023-20-09.ndx", + "18-10-2023-20-09.pcm", + "18-10-2023-20-10.ndx", + "18-10-2023-20-10.pcm", + "18-10-2023-20-11.ndx", + "18-10-2023-20-11.pcm", + "18-10-2023-20-12.ndx", + "18-10-2023-20-12.pcm", + "18-10-2023-20-13.ndx", + "18-10-2023-20-13.pcm", + "18-10-2023-20-14.ndx", + "18-10-2023-20-14.pcm", + "18-10-2023-20-15.ndx", + "18-10-2023-20-15.pcm", + "18-10-2023-20-16.ndx", + "18-10-2023-20-16.pcm", + "18-10-2023-20-17.ndx", + "18-10-2023-20-17.pcm", + "18-10-2023-20-18.ndx", + "18-10-2023-20-18.pcm", + "18-10-2023-20-19.ndx", + "18-10-2023-20-19.pcm", + "18-10-2023-20-20.ndx", + "18-10-2023-20-20.pcm", + "18-10-2023-20-21.ndx", + "18-10-2023-20-21.pcm", + "18-10-2023-20-22.ndx", + "18-10-2023-20-22.pcm", + "18-10-2023-20-23.ndx", + "18-10-2023-20-23.pcm", + "18-10-2023-20-24.ndx", + "18-10-2023-20-24.pcm", + "18-10-2023-20-25.ndx", + "18-10-2023-20-25.pcm", + "18-10-2023-20-26.ndx", + "18-10-2023-20-26.pcm", + "18-10-2023-20-27.ndx", + "18-10-2023-20-27.pcm", + "18-10-2023-20-28.ndx", + "18-10-2023-20-28.pcm", + "18-10-2023-20-29.ndx", + "18-10-2023-20-29.pcm", + "18-10-2023-20-30.ndx", + "18-10-2023-20-30.pcm", + "18-10-2023-20-31.ndx", + "18-10-2023-20-31.pcm", + "18-10-2023-20-32.ndx", + "18-10-2023-20-32.pcm", + "18-10-2023-20-33.ndx", + "18-10-2023-20-33.pcm", + "18-10-2023-20-34.ndx", + "18-10-2023-20-34.pcm", + "18-10-2023-20-35.ndx", + "18-10-2023-20-35.pcm", + "18-10-2023-20-36.ndx", + "18-10-2023-20-36.pcm", + "18-10-2023-20-37.ndx", + "18-10-2023-20-37.pcm", + "18-10-2023-20-38.ndx", + "18-10-2023-20-38.pcm", + "18-10-2023-20-39.ndx", + "18-10-2023-20-39.pcm", + "18-10-2023-20-40.ndx", + "18-10-2023-20-40.pcm", + "18-10-2023-20-41.ndx", + "18-10-2023-20-41.pcm", + "18-10-2023-20-42.ndx", + "18-10-2023-20-42.pcm", + "18-10-2023-20-43.ndx", + "18-10-2023-20-43.pcm", + "18-10-2023-20-44.ndx", + "18-10-2023-20-44.pcm", + "18-10-2023-20-45.ndx", + "18-10-2023-20-45.pcm", + "18-10-2023-20-46.ndx", + "18-10-2023-20-46.pcm", + "18-10-2023-20-47.ndx", + "18-10-2023-20-47.pcm", + "18-10-2023-20-48.ndx", + "18-10-2023-20-48.pcm", + "18-10-2023-20-49.ndx", + "18-10-2023-20-49.pcm", + "18-10-2023-20-50.ndx", + "18-10-2023-20-50.pcm", + "18-10-2023-20-51.ndx", + "18-10-2023-20-51.pcm", + "18-10-2023-20-52.ndx", + "18-10-2023-20-52.pcm", + "18-10-2023-20-53.ndx", + "18-10-2023-20-53.pcm", + "18-10-2023-20-54.ndx", + "18-10-2023-20-54.pcm", + "18-10-2023-20-55.ndx", + "18-10-2023-20-55.pcm", + "18-10-2023-20-56.ndx", + "18-10-2023-20-56.pcm", + "18-10-2023-20-57.ndx", + "18-10-2023-20-57.pcm", + "18-10-2023-20-58.ndx", + "18-10-2023-20-58.pcm", + "18-10-2023-20-59.ndx", + "18-10-2023-20-59.pcm", + "18-10-2023-21-00.ndx", + "18-10-2023-21-00.pcm", + "18-10-2023-21-01.ndx", + "18-10-2023-21-01.pcm", + "18-10-2023-21-02.ndx", + "18-10-2023-21-02.pcm", + "18-10-2023-21-03.ndx", + "18-10-2023-21-03.pcm", + "18-10-2023-21-04.ndx", + "18-10-2023-21-04.pcm", + "18-10-2023-21-05.ndx", + "18-10-2023-21-05.pcm", + "18-10-2023-21-06.ndx", + "18-10-2023-21-06.pcm", + "18-10-2023-21-07.ndx", + "18-10-2023-21-07.pcm", + "18-10-2023-21-08.ndx", + "18-10-2023-21-08.pcm", + "18-10-2023-21-10.ndx", + "18-10-2023-21-10.pcm", + "18-10-2023-21-11.ndx", + "18-10-2023-21-11.pcm", + "18-10-2023-21-12.ndx", + "18-10-2023-21-12.pcm", + "18-10-2023-21-13.ndx", + "18-10-2023-21-13.pcm", + "18-10-2023-21-14.ndx", + "18-10-2023-21-14.pcm", + "18-10-2023-21-15.ndx", + "18-10-2023-21-15.pcm", + "18-10-2023-21-16.ndx", + "18-10-2023-21-16.pcm", + "18-10-2023-21-17.ndx", + "18-10-2023-21-17.pcm", + "18-10-2023-21-18.ndx", + "18-10-2023-21-18.pcm", + "18-10-2023-21-19.ndx", + "18-10-2023-21-19.pcm", + "18-10-2023-21-20.ndx", + "18-10-2023-21-20.pcm", + "18-10-2023-21-21.ndx", + "18-10-2023-21-21.pcm", + "18-10-2023-21-22.ndx", + "18-10-2023-21-22.pcm", + "18-10-2023-21-23.ndx", + "18-10-2023-21-23.pcm", + "18-10-2023-21-24.ndx", + "18-10-2023-21-24.pcm", + "18-10-2023-21-25.ndx", + "18-10-2023-21-25.pcm", + "18-10-2023-21-26.ndx", + "18-10-2023-21-26.pcm", + "18-10-2023-21-27.ndx", + "18-10-2023-21-27.pcm", + "18-10-2023-21-28.ndx", + "18-10-2023-21-28.pcm", + "18-10-2023-21-30.ndx", + "18-10-2023-21-30.pcm", + "18-10-2023-21-31.ndx", + "18-10-2023-21-31.pcm", + "18-10-2023-21-32.ndx", + "18-10-2023-21-32.pcm", + "18-10-2023-21-33.ndx", + "18-10-2023-21-33.pcm", + "18-10-2023-21-34.ndx", + "18-10-2023-21-34.pcm", + "18-10-2023-21-35.ndx", + "18-10-2023-21-35.pcm", + "18-10-2023-21-36.ndx", + "18-10-2023-21-36.pcm", + "18-10-2023-21-37.ndx", + "18-10-2023-21-37.pcm", + "18-10-2023-21-38.ndx", + "18-10-2023-21-38.pcm", + "18-10-2023-21-39.ndx", + "18-10-2023-21-39.pcm", + "18-10-2023-21-40.ndx", + "18-10-2023-21-40.pcm", + "18-10-2023-21-41.ndx", + "18-10-2023-21-41.pcm", + "18-10-2023-21-42.ndx", + "18-10-2023-21-42.pcm", + "18-10-2023-21-43.ndx", + "18-10-2023-21-43.pcm", + "18-10-2023-21-44.mjp", + "18-10-2023-21-44.ndx", + "18-10-2023-21-44.pcm", + "18-10-2023-00-00.ndx", + "18-10-2023-00-00.pcm", + "18-10-2023-00-01.ndx", + "18-10-2023-00-01.pcm", + "18-10-2023-00-02.ndx", + "18-10-2023-00-02.pcm", + "18-10-2023-00-03.ndx", + "18-10-2023-00-03.pcm", + "18-10-2023-00-04.ndx", + "18-10-2023-00-04.pcm", + "18-10-2023-00-05.ndx", + "18-10-2023-00-05.pcm", + "18-10-2023-00-06.ndx", + "18-10-2023-00-06.pcm", + "18-10-2023-00-07.ndx", + "18-10-2023-00-07.pcm", + "18-10-2023-00-08.ndx", + "18-10-2023-00-08.pcm", + "18-10-2023-00-09.ndx", + "18-10-2023-00-09.pcm", + "18-10-2023-00-10.ndx", + "18-10-2023-00-10.pcm", + "18-10-2023-00-11.ndx", + "18-10-2023-00-11.pcm", + "18-10-2023-00-12.ndx", + "18-10-2023-00-12.pcm", + "18-10-2023-00-13.ndx", + "18-10-2023-00-13.pcm", + "18-10-2023-00-14.ndx", + "18-10-2023-00-14.pcm", + "18-10-2023-00-15.ndx", + "18-10-2023-00-15.pcm", + "18-10-2023-00-16.ndx", + "18-10-2023-00-16.pcm", + "18-10-2023-00-17.ndx", + "18-10-2023-00-17.pcm", + "18-10-2023-00-18.ndx", + "18-10-2023-00-18.pcm", + "18-10-2023-00-19.ndx", + "18-10-2023-00-19.pcm", + "18-10-2023-00-20.ndx", + "18-10-2023-00-20.pcm", + "18-10-2023-00-21.ndx", + "18-10-2023-00-21.pcm", + "18-10-2023-00-22.ndx", + "18-10-2023-00-22.pcm", + "18-10-2023-00-23.ndx", + "18-10-2023-00-23.pcm", + "18-10-2023-00-24.ndx", + "18-10-2023-00-24.pcm", + "18-10-2023-00-25.ndx", + "18-10-2023-00-25.pcm", + "18-10-2023-00-26.ndx", + "18-10-2023-00-26.pcm", + "18-10-2023-00-27.ndx", + "18-10-2023-00-27.pcm", + "18-10-2023-00-28.ndx", + "18-10-2023-00-28.pcm", + "18-10-2023-00-29.ndx", + "18-10-2023-00-29.pcm", + "18-10-2023-00-30.ndx", + "18-10-2023-00-30.pcm", + "18-10-2023-00-31.ndx", + "18-10-2023-00-31.pcm", + "18-10-2023-00-32.ndx", + "18-10-2023-00-32.pcm", + "18-10-2023-00-33.ndx", + "18-10-2023-00-33.pcm", + "18-10-2023-00-34.ndx", + "18-10-2023-00-34.pcm", + "18-10-2023-00-35.ndx", + "18-10-2023-00-35.pcm", + "18-10-2023-00-36.ndx", + "18-10-2023-00-36.pcm", + "18-10-2023-00-37.ndx", + "18-10-2023-00-37.pcm", + "18-10-2023-00-38.ndx", + "18-10-2023-00-38.pcm", + "18-10-2023-00-39.ndx", + "18-10-2023-00-39.pcm", + "18-10-2023-00-40.ndx", + "18-10-2023-00-40.pcm", + "18-10-2023-00-41.ndx", + "18-10-2023-00-41.pcm", + "18-10-2023-00-42.ndx", + "18-10-2023-00-42.pcm", + "18-10-2023-00-43.ndx", + "18-10-2023-00-43.pcm", + "18-10-2023-00-44.ndx", + "18-10-2023-00-44.pcm", + "18-10-2023-00-45.ndx", + "18-10-2023-00-45.pcm", + "18-10-2023-00-46.ndx", + "18-10-2023-00-46.pcm", + "18-10-2023-00-47.ndx", + "18-10-2023-00-47.pcm", + "18-10-2023-00-48.ndx", + "18-10-2023-00-48.pcm", + "18-10-2023-00-49.ndx", + "18-10-2023-00-49.pcm", + "18-10-2023-00-50.ndx", + "18-10-2023-00-50.pcm", + "18-10-2023-00-51.ndx", + "18-10-2023-00-51.pcm", + "18-10-2023-00-52.ndx", + "18-10-2023-00-52.pcm", + "18-10-2023-00-53.ndx", + "18-10-2023-00-53.pcm", + "18-10-2023-00-54.ndx", + "18-10-2023-00-54.pcm", + "18-10-2023-00-55.ndx", + "18-10-2023-00-55.pcm", + "18-10-2023-00-56.ndx", + "18-10-2023-00-56.pcm", + "18-10-2023-00-57.ndx", + "18-10-2023-00-57.pcm", + "18-10-2023-00-58.ndx", + "18-10-2023-00-58.pcm", + "18-10-2023-01-00.ndx", + "18-10-2023-01-00.pcm", + "18-10-2023-01-01.ndx", + "18-10-2023-01-01.pcm", + "18-10-2023-01-02.ndx", + "18-10-2023-01-02.pcm", + "18-10-2023-01-03.ndx", + "18-10-2023-01-03.pcm", + "18-10-2023-01-04.ndx", + "18-10-2023-01-04.pcm", + "18-10-2023-01-05.ndx", + "18-10-2023-01-05.pcm", + "18-10-2023-01-06.ndx", + "18-10-2023-01-06.pcm", + "18-10-2023-01-07.ndx", + "18-10-2023-01-07.pcm", + "18-10-2023-01-08.ndx", + "18-10-2023-01-08.pcm", + "18-10-2023-01-09.ndx", + "18-10-2023-01-09.pcm", + "18-10-2023-01-10.ndx", + "18-10-2023-01-10.pcm", + "18-10-2023-01-11.ndx", + "18-10-2023-01-11.pcm", + "18-10-2023-01-12.ndx", + "18-10-2023-01-12.pcm", + "18-10-2023-01-13.ndx", + "18-10-2023-01-13.pcm", + "18-10-2023-01-14.ndx", + "18-10-2023-01-14.pcm", + "18-10-2023-01-15.ndx", + "18-10-2023-01-15.pcm", + "18-10-2023-01-16.ndx", + "18-10-2023-01-16.pcm", + "18-10-2023-01-17.ndx", + "18-10-2023-01-17.pcm", + "18-10-2023-01-18.ndx", + "18-10-2023-01-18.pcm", + "18-10-2023-01-19.mjp", + "18-10-2023-01-19.ndx", + "18-10-2023-01-19.pcm", + "18-10-2023-11-04.ndx", + "18-10-2023-11-04.pcm", + "18-10-2023-11-05.avi.done", + "18-10-2023-11-05.gpg", + "18-10-2023-11-05.mp4.done", + "18-10-2023-11-10.avi.done", + "18-10-2023-11-10.gpg", + "18-10-2023-11-10.mp4.done", + "18-10-2023-11-15.avi.done", + "18-10-2023-11-15.gpg", + "18-10-2023-11-15.mp4.done", + "18-10-2023-11-20.avi.done", + "18-10-2023-11-20.gpg", + "18-10-2023-11-20.mp4.done", + "18-10-2023-11-25.avi.done", + "18-10-2023-11-25.gpg", + "18-10-2023-11-25.mp4.done", + "18-10-2023-11-30.avi.done", + "18-10-2023-11-30.gpg", + "18-10-2023-11-30.mp4.done", + "18-10-2023-11-35.avi.done", + "18-10-2023-11-35.gpg", + "18-10-2023-11-35.mp4.done", + "18-10-2023-11-40.mjp", + "18-10-2023-11-40.ndx", + "18-10-2023-11-40.pcm", + "18-10-2023-11-41.mjp", + "18-10-2023-11-41.ndx", + "18-10-2023-11-41.pcm", + "18-10-2023-11-42.mjp", + "18-10-2023-11-42.ndx", + "18-10-2023-11-42.pcm", + "18-10-2023-11-43.mjp", + "18-10-2023-11-43.ndx", + "18-10-2023-11-43.pcm", + "18-10-2023-11-45.avi.done", + "18-10-2023-11-45.gpg", + "18-10-2023-11-45.mp4.done", + "18-10-2023-11-46.ndx", + "18-10-2023-11-46.pcm", + "18-10-2023-11-47.ndx", + "18-10-2023-11-47.pcm", + "18-10-2023-11-50.avi.done", + "18-10-2023-11-50.gpg", + "18-10-2023-11-50.mp4.done", + "18-10-2023-11-55.avi.done", + "18-10-2023-11-55.gpg", + "18-10-2023-11-55.mp4.done", + "18-10-2023-12-00.avi.done", + "18-10-2023-12-00.gpg", + "18-10-2023-12-00.mp4.done", + "18-10-2023-12-05.avi.done", + "18-10-2023-12-05.gpg", + "18-10-2023-12-05.mp4.done", + "18-10-2023-12-10.avi.done", + "18-10-2023-12-10.gpg", + "18-10-2023-12-10.mp4.done", + "18-10-2023-12-15.avi.done", + "18-10-2023-12-15.gpg", + "18-10-2023-12-15.mp4.done", + "18-10-2023-12-20.avi.done", + "18-10-2023-12-20.gpg", + "18-10-2023-12-20.mp4.done", + "18-10-2023-12-25.avi.done", + "18-10-2023-12-25.gpg", + "18-10-2023-12-25.mp4.done", + "18-10-2023-12-30.avi.done", + "18-10-2023-12-30.gpg", + "18-10-2023-12-30.mp4.done", + "18-10-2023-12-35.avi.done", + "18-10-2023-12-35.gpg", + "18-10-2023-12-35.mp4.done", + "18-10-2023-12-40.avi.done", + "18-10-2023-12-40.gpg", + "18-10-2023-12-40.mp4.done", + "18-10-2023-12-45.avi.done", + "18-10-2023-12-45.gpg", + "18-10-2023-12-45.mp4.done", + "18-10-2023-12-50.avi.done", + "18-10-2023-12-50.gpg", + "18-10-2023-12-50.mp4.done", + "18-10-2023-12-55.avi.done", + "18-10-2023-12-55.gpg", + "18-10-2023-12-55.mp4.done", + "18-10-2023-13-00.avi.done", + "18-10-2023-13-00.gpg", + "18-10-2023-13-00.mp4.done", + "18-10-2023-13-05.avi.done", + "18-10-2023-13-05.gpg", + "18-10-2023-13-05.mp4.done", + "18-10-2023-13-10.avi.done", + "18-10-2023-13-10.gpg", + "18-10-2023-13-10.mp4.done", + "18-10-2023-13-15.avi.done", + "18-10-2023-13-15.gpg", + "18-10-2023-13-15.mp4.done", + "18-10-2023-13-20.avi.done", + "18-10-2023-13-20.gpg", + "18-10-2023-13-20.mp4.done", + "18-10-2023-13-25.avi.done", + "18-10-2023-13-25.gpg", + "18-10-2023-13-25.mp4.done", + "18-10-2023-13-30.avi.done", + "18-10-2023-13-30.gpg", + "18-10-2023-13-30.mp4.done", + "18-10-2023-13-35.avi.done", + "18-10-2023-13-35.gpg", + "18-10-2023-13-35.mp4.done", + "18-10-2023-13-40.avi.done", + "18-10-2023-13-40.gpg", + "18-10-2023-13-40.mp4.done", + "18-10-2023-13-45.avi.done", + "18-10-2023-13-45.gpg", + "18-10-2023-13-45.mp4.done", + "18-10-2023-13-50.avi.done", + "18-10-2023-13-50.gpg", + "18-10-2023-13-50.mp4.done", + "18-10-2023-13-55.avi.done", + "18-10-2023-13-55.gpg", + "18-10-2023-13-55.mp4.done", + "18-10-2023-14-00.avi.done", + "18-10-2023-14-00.gpg", + "18-10-2023-14-00.mp4.done", + "18-10-2023-14-05.avi.done", + "18-10-2023-14-05.gpg", + "18-10-2023-14-05.mp4.done", + "18-10-2023-14-10.avi.done", + "18-10-2023-14-10.gpg", + "18-10-2023-14-10.mp4.done", + "18-10-2023-14-15.avi.done", + "18-10-2023-14-15.gpg", + "18-10-2023-14-15.mp4.done", + "18-10-2023-14-20.avi.done", + "18-10-2023-14-20.gpg", + "18-10-2023-14-20.mp4.done", + "18-10-2023-14-25.avi.done", + "18-10-2023-14-25.gpg", + "18-10-2023-14-25.mp4.done", + "18-10-2023-14-30.avi.done", + "18-10-2023-14-30.gpg", + "18-10-2023-14-30.mp4.done", + "18-10-2023-14-35.avi.done", + "18-10-2023-14-35.gpg", + "18-10-2023-14-35.mp4.done", + "18-10-2023-14-40.avi.done", + "18-10-2023-14-40.gpg", + "18-10-2023-14-40.mp4.done", + "18-10-2023-14-41.ndx", + "18-10-2023-14-41.pcm", + "18-10-2023-14-42.ndx", + "18-10-2023-14-42.pcm", + "18-10-2023-14-43.ndx", + "18-10-2023-14-43.pcm", + "18-10-2023-14-44.ndx", + "18-10-2023-14-44.pcm", + "18-10-2023-14-45.ndx", + "18-10-2023-14-45.pcm", + "18-10-2023-14-46.ndx", + "18-10-2023-14-46.pcm", + "18-10-2023-14-47.ndx", + "18-10-2023-14-47.pcm", + "18-10-2023-14-48.ndx", + "18-10-2023-14-48.pcm", + "18-10-2023-14-49.ndx", + "18-10-2023-14-49.pcm", + "18-10-2023-14-50.ndx", + "18-10-2023-14-50.pcm", + "18-10-2023-14-51.ndx", + "18-10-2023-14-51.pcm", + "18-10-2023-14-52.ndx", + "18-10-2023-14-52.pcm", + "18-10-2023-14-53.ndx", + "18-10-2023-14-53.pcm", + "18-10-2023-14-54.ndx", + "18-10-2023-14-54.pcm", + "18-10-2023-14-55.ndx", + "18-10-2023-14-55.pcm", + "18-10-2023-14-56.ndx", + "18-10-2023-14-56.pcm", + "18-10-2023-14-57.ndx", + "18-10-2023-14-57.pcm", + "18-10-2023-14-58.ndx", + "18-10-2023-14-58.pcm", + "18-10-2023-14-59.ndx", + "18-10-2023-14-59.pcm", + "18-10-2023-15-00.ndx", + "18-10-2023-15-00.pcm", + "18-10-2023-15-01.ndx", + "18-10-2023-15-01.pcm", + "18-10-2023-15-02.ndx", + "18-10-2023-15-02.pcm", + "18-10-2023-15-03.ndx", + "18-10-2023-15-03.pcm", + "18-10-2023-15-04.ndx", + "18-10-2023-15-04.pcm", + "18-10-2023-15-05.ndx", + "18-10-2023-15-05.pcm", + "18-10-2023-15-06.ndx", + "18-10-2023-15-06.pcm", + "18-10-2023-15-07.ndx", + "18-10-2023-15-07.pcm", + "18-10-2023-15-08.ndx", + "18-10-2023-15-08.pcm", + "18-10-2023-15-09.ndx", + "18-10-2023-15-09.pcm", + "18-10-2023-15-10.ndx", + "18-10-2023-15-10.pcm", + "18-10-2023-15-11.ndx", + "18-10-2023-15-11.pcm", + "18-10-2023-15-12.ndx", + "18-10-2023-15-12.pcm", + "18-10-2023-15-13.ndx", + "18-10-2023-15-13.pcm", + "18-10-2023-15-14.ndx", + "18-10-2023-15-14.pcm", + "18-10-2023-15-15.ndx", + "18-10-2023-15-15.pcm", + "18-10-2023-15-16.ndx", + "18-10-2023-15-16.pcm", + "18-10-2023-15-17.ndx", + "18-10-2023-15-17.pcm", + "18-10-2023-15-18.ndx", + "18-10-2023-15-18.pcm", + "18-10-2023-15-19.ndx", + "18-10-2023-15-19.pcm", + "18-10-2023-15-20.ndx", + "18-10-2023-15-20.pcm", + "18-10-2023-15-21.ndx", + "18-10-2023-15-21.pcm", + "18-10-2023-15-22.ndx", + "18-10-2023-15-22.pcm", + "18-10-2023-15-23.ndx", + "18-10-2023-15-23.pcm", + "18-10-2023-15-24.ndx", + "18-10-2023-15-24.pcm", + "18-10-2023-15-25.ndx", + "18-10-2023-15-25.pcm", + "18-10-2023-15-26.ndx", + "18-10-2023-15-26.pcm", + "18-10-2023-15-27.ndx", + "18-10-2023-15-27.pcm", + "18-10-2023-15-28.ndx", + "18-10-2023-15-28.pcm", + "18-10-2023-15-29.ndx", + "18-10-2023-15-29.pcm", + "18-10-2023-15-30.ndx", + "18-10-2023-15-30.pcm", + "18-10-2023-15-31.ndx", + "18-10-2023-15-31.pcm", + "18-10-2023-15-32.ndx", + "18-10-2023-15-32.pcm", + "18-10-2023-15-33.ndx", + "18-10-2023-15-33.pcm", + "18-10-2023-15-34.ndx", + "18-10-2023-15-34.pcm", + "18-10-2023-15-35.ndx", + "18-10-2023-15-35.pcm", + "18-10-2023-15-36.ndx", + "18-10-2023-15-36.pcm", + "18-10-2023-15-37.ndx", + "18-10-2023-15-37.pcm", + "18-10-2023-15-38.ndx", + "18-10-2023-15-38.pcm", + "18-10-2023-15-39.ndx", + "18-10-2023-15-39.pcm", + "18-10-2023-15-40.ndx", + "18-10-2023-15-40.pcm", + "18-10-2023-15-41.ndx", + "18-10-2023-15-41.pcm", + "18-10-2023-15-42.ndx", + "18-10-2023-15-42.pcm", + "18-10-2023-15-43.ndx", + "18-10-2023-15-43.pcm", + "18-10-2023-15-44.ndx", + "18-10-2023-15-44.pcm", + "18-10-2023-15-45.ndx", + "18-10-2023-15-45.pcm", + "18-10-2023-15-46.ndx", + "18-10-2023-15-46.pcm", + "18-10-2023-15-47.ndx", + "18-10-2023-15-47.pcm", + "18-10-2023-15-48.ndx", + "18-10-2023-15-48.pcm", + "18-10-2023-15-49.ndx", + "18-10-2023-15-49.pcm", + "18-10-2023-15-50.ndx", + "18-10-2023-15-50.pcm", + "18-10-2023-15-51.ndx", + "18-10-2023-15-51.pcm", + "18-10-2023-15-52.ndx", + "18-10-2023-15-52.pcm", + "18-10-2023-15-53.ndx", + "18-10-2023-15-53.pcm", + "18-10-2023-15-54.ndx", + "18-10-2023-15-54.pcm", + "18-10-2023-15-55.ndx", + "18-10-2023-15-55.pcm", + "18-10-2023-15-56.ndx", + "18-10-2023-15-56.pcm", + "18-10-2023-15-57.ndx", + "18-10-2023-15-57.pcm", + "18-10-2023-15-58.ndx", + "18-10-2023-15-58.pcm", + "18-10-2023-15-59.ndx", + "18-10-2023-15-59.pcm", + "18-10-2023-16-00.ndx", + "18-10-2023-16-00.pcm", + "18-10-2023-16-01.ndx", + "18-10-2023-16-01.pcm", + "18-10-2023-16-02.ndx", + "18-10-2023-16-02.pcm", + "18-10-2023-16-03.ndx", + "18-10-2023-16-03.pcm", + "18-10-2023-16-04.ndx", + "18-10-2023-16-04.pcm", + "18-10-2023-16-05.ndx", + "18-10-2023-16-05.pcm", + "18-10-2023-16-06.ndx", + "18-10-2023-16-06.pcm", + "18-10-2023-16-07.ndx", + "18-10-2023-16-07.pcm", + "18-10-2023-16-08.ndx", + "18-10-2023-16-08.pcm", + "18-10-2023-16-09.ndx", + "18-10-2023-16-09.pcm", + "18-10-2023-16-10.ndx", + "18-10-2023-16-10.pcm", + "18-10-2023-16-11.ndx", + "18-10-2023-16-11.pcm", + "18-10-2023-16-12.ndx", + "18-10-2023-16-12.pcm", + "18-10-2023-16-13.ndx", + "18-10-2023-16-13.pcm", + "18-10-2023-16-14.ndx", + "18-10-2023-16-14.pcm", + "18-10-2023-16-15.ndx", + "18-10-2023-16-15.pcm", + "18-10-2023-16-16.ndx", + "18-10-2023-16-16.pcm", + "18-10-2023-16-17.ndx", + "18-10-2023-16-17.pcm", + "18-10-2023-16-18.ndx", + "18-10-2023-16-18.pcm", + "18-10-2023-16-19.ndx", + "18-10-2023-16-19.pcm", + "18-10-2023-16-20.ndx", + "18-10-2023-16-20.pcm", + "18-10-2023-16-21.ndx", + "18-10-2023-16-21.pcm", + "18-10-2023-16-22.ndx", + "18-10-2023-16-22.pcm", + "18-10-2023-16-23.ndx", + "18-10-2023-16-23.pcm", + "18-10-2023-16-24.ndx", + "18-10-2023-16-24.pcm", + "18-10-2023-16-25.ndx", + "18-10-2023-16-25.pcm", + "18-10-2023-16-26.ndx", + "18-10-2023-16-26.pcm", + "18-10-2023-16-27.ndx", + "18-10-2023-16-27.pcm", + "18-10-2023-16-28.ndx", + "18-10-2023-16-28.pcm", + "18-10-2023-16-29.ndx", + "18-10-2023-16-29.pcm", + "18-10-2023-16-30.ndx", + "18-10-2023-16-30.pcm", + "18-10-2023-16-31.ndx", + "18-10-2023-16-31.pcm", + "18-10-2023-16-32.ndx", + "18-10-2023-16-32.pcm", + "18-10-2023-16-33.ndx", + "18-10-2023-16-33.pcm", + "18-10-2023-16-34.ndx", + "18-10-2023-16-34.pcm", + "18-10-2023-16-35.ndx", + "18-10-2023-16-35.pcm", + "18-10-2023-16-36.ndx", + "18-10-2023-16-36.pcm", + "18-10-2023-16-37.ndx", + "18-10-2023-16-37.pcm", + "18-10-2023-16-38.ndx", + "18-10-2023-16-38.pcm", + "18-10-2023-16-40.ndx", + "18-10-2023-16-40.pcm", + "18-10-2023-16-41.ndx", + "18-10-2023-16-41.pcm", + "18-10-2023-16-42.ndx", + "18-10-2023-16-42.pcm", + "18-10-2023-16-43.ndx", + "18-10-2023-16-43.pcm", + "18-10-2023-16-44.ndx", + "18-10-2023-16-44.pcm", + "18-10-2023-16-45.ndx", + "18-10-2023-16-45.pcm", + "18-10-2023-16-46.ndx", + "18-10-2023-16-46.pcm", + "18-10-2023-16-47.ndx", + "18-10-2023-16-47.pcm", + "18-10-2023-16-48.ndx", + "18-10-2023-16-48.pcm", + "18-10-2023-16-49.ndx", + "18-10-2023-16-49.pcm", + "18-10-2023-16-50.ndx", + "18-10-2023-16-50.pcm", + "18-10-2023-16-51.ndx", + "18-10-2023-16-51.pcm", + "18-10-2023-16-52.ndx", + "18-10-2023-16-52.pcm", + "18-10-2023-16-53.ndx", + "18-10-2023-16-53.pcm", + "18-10-2023-16-54.ndx", + "18-10-2023-16-54.pcm", + "18-10-2023-16-55.ndx", + "18-10-2023-16-55.pcm", + "18-10-2023-16-56.ndx", + "18-10-2023-16-56.pcm", + "18-10-2023-16-57.ndx", + "18-10-2023-16-57.pcm", + "18-10-2023-16-58.ndx", + "18-10-2023-16-58.pcm", + "18-10-2023-16-59.ndx", + "18-10-2023-16-59.pcm", + "18-10-2023-17-00.ndx", + "18-10-2023-17-00.pcm", + "18-10-2023-17-01.ndx", + "18-10-2023-17-01.pcm", + "18-10-2023-17-02.ndx", + "18-10-2023-17-02.pcm", + "18-10-2023-17-03.ndx", + "18-10-2023-17-03.pcm", + "18-10-2023-17-04.ndx", + "18-10-2023-17-04.pcm", + "18-10-2023-17-05.ndx", + "18-10-2023-17-05.pcm", + "18-10-2023-17-06.ndx", + "18-10-2023-17-06.pcm", + "18-10-2023-17-07.ndx", + "18-10-2023-17-07.pcm", + "18-10-2023-17-08.ndx", + "18-10-2023-17-08.pcm", + "18-10-2023-17-09.ndx", + "18-10-2023-17-09.pcm", + "18-10-2023-17-10.ndx", + "18-10-2023-17-10.pcm", + "18-10-2023-17-11.ndx", + "18-10-2023-17-11.pcm", + "18-10-2023-17-12.ndx", + "18-10-2023-17-12.pcm", + "18-10-2023-17-13.ndx", + "18-10-2023-17-13.pcm", + "18-10-2023-17-14.ndx", + "18-10-2023-17-14.pcm", + "18-10-2023-17-15.ndx", + "18-10-2023-17-15.pcm", + "18-10-2023-17-16.ndx", + "18-10-2023-17-16.pcm", + "18-10-2023-17-17.ndx", + "18-10-2023-17-17.pcm", + "18-10-2023-17-18.ndx", + "18-10-2023-17-18.pcm", + "18-10-2023-17-19.ndx", + "18-10-2023-17-19.pcm", + "18-10-2023-17-20.ndx", + "18-10-2023-17-20.pcm", + "18-10-2023-17-21.ndx", + "18-10-2023-17-21.pcm", + "18-10-2023-17-22.ndx", + "18-10-2023-17-22.pcm", + "18-10-2023-17-23.ndx", + "18-10-2023-17-23.pcm", + "18-10-2023-17-24.ndx", + "18-10-2023-17-24.pcm", + "18-10-2023-17-25.ndx", + "18-10-2023-17-25.pcm", + "18-10-2023-17-26.ndx", + "18-10-2023-17-26.pcm", + "18-10-2023-17-27.ndx", + "18-10-2023-17-27.pcm", + "18-10-2023-17-28.ndx", + "18-10-2023-17-28.pcm", + "18-10-2023-17-29.ndx", + "18-10-2023-17-29.pcm", + "18-10-2023-17-30.ndx", + "18-10-2023-17-30.pcm", + "18-10-2023-17-31.ndx", + "18-10-2023-17-31.pcm", + "18-10-2023-17-32.ndx", + "18-10-2023-17-32.pcm", + "18-10-2023-17-33.ndx", + "18-10-2023-17-33.pcm", + "18-10-2023-17-34.ndx", + "18-10-2023-17-34.pcm", + "18-10-2023-17-35.ndx", + "18-10-2023-17-35.pcm", + "18-10-2023-17-36.ndx", + "18-10-2023-17-36.pcm", + "18-10-2023-17-37.ndx", + "18-10-2023-17-37.pcm", + "18-10-2023-17-38.ndx", + "18-10-2023-17-38.pcm", + "18-10-2023-17-39.ndx", + "18-10-2023-17-39.pcm", + "18-10-2023-17-40.ndx", + "18-10-2023-17-40.pcm", + "18-10-2023-17-41.ndx", + "18-10-2023-17-41.pcm", + "18-10-2023-17-42.ndx", + "18-10-2023-17-42.pcm", + "18-10-2023-17-43.ndx", + "18-10-2023-17-43.pcm", + "18-10-2023-17-44.ndx", + "18-10-2023-17-44.pcm", + "18-10-2023-17-45.ndx", + "18-10-2023-17-45.pcm", + "18-10-2023-17-46.ndx", + "18-10-2023-17-46.pcm", + "18-10-2023-17-47.ndx", + "18-10-2023-17-47.pcm", + "18-10-2023-17-48.ndx", + "18-10-2023-17-48.pcm", + "18-10-2023-17-49.ndx", + "18-10-2023-17-49.pcm", + "18-10-2023-17-50.ndx", + "18-10-2023-17-50.pcm", + "18-10-2023-17-51.ndx", + "18-10-2023-17-51.pcm", + "18-10-2023-17-52.ndx", + "18-10-2023-17-52.pcm", + "18-10-2023-17-53.ndx", + "18-10-2023-17-53.pcm", + "18-10-2023-17-54.ndx", + "18-10-2023-17-54.pcm", + "18-10-2023-17-55.ndx", + "18-10-2023-17-55.pcm", + "18-10-2023-17-56.ndx", + "18-10-2023-17-56.pcm", + "18-10-2023-17-57.ndx", + "18-10-2023-17-57.pcm", + "18-10-2023-17-58.mjp", + "18-10-2023-17-58.ndx", + "18-10-2023-17-58.pcm", + "18-10-2023-20-01.ndx", + "18-10-2023-20-01.pcm", + "18-10-2023-20-02.ndx", + "18-10-2023-20-02.pcm", + "18-10-2023-20-03.ndx", + "18-10-2023-20-03.pcm", + "18-10-2023-20-04.ndx", + "18-10-2023-20-04.pcm", + "18-10-2023-20-05.ndx", + "18-10-2023-20-05.pcm", + "18-10-2023-20-06.ndx", + "18-10-2023-20-06.pcm", + "18-10-2023-20-07.ndx", + "18-10-2023-20-07.pcm", + "18-10-2023-20-08.ndx", + "18-10-2023-20-08.pcm", + "18-10-2023-20-09.ndx", + "18-10-2023-20-09.pcm", + "18-10-2023-20-10.ndx", + "18-10-2023-20-10.pcm", + "18-10-2023-20-11.ndx", + "18-10-2023-20-11.pcm", + "18-10-2023-20-12.ndx", + "18-10-2023-20-12.pcm", + "18-10-2023-20-13.ndx", + "18-10-2023-20-13.pcm", + "18-10-2023-20-14.ndx", + "18-10-2023-20-14.pcm", + "18-10-2023-20-15.ndx", + "18-10-2023-20-15.pcm", + "18-10-2023-20-16.ndx", + "18-10-2023-20-16.pcm", + "18-10-2023-20-17.ndx", + "18-10-2023-20-17.pcm", + "18-10-2023-20-18.ndx", + "18-10-2023-20-18.pcm", + "18-10-2023-20-19.ndx", + "18-10-2023-20-19.pcm", + "18-10-2023-20-20.ndx", + "18-10-2023-20-20.pcm", + "18-10-2023-20-21.ndx", + "18-10-2023-20-21.pcm", + "18-10-2023-20-22.ndx", + "18-10-2023-20-22.pcm", + "18-10-2023-20-23.ndx", + "18-10-2023-20-23.pcm", + "18-10-2023-20-24.ndx", + "18-10-2023-20-24.pcm", + "18-10-2023-20-25.ndx", + "18-10-2023-20-25.pcm", + "18-10-2023-20-26.ndx", + "18-10-2023-20-26.pcm", + "18-10-2023-20-27.ndx", + "18-10-2023-20-27.pcm", + "18-10-2023-20-28.ndx", + "18-10-2023-20-28.pcm", + "18-10-2023-20-29.ndx", + "18-10-2023-20-29.pcm", + "18-10-2023-20-30.ndx", + "18-10-2023-20-30.pcm", + "18-10-2023-20-31.ndx", + "18-10-2023-20-31.pcm", + "18-10-2023-20-32.ndx", + "18-10-2023-20-32.pcm", + "18-10-2023-20-33.ndx", + "18-10-2023-20-33.pcm", + "18-10-2023-20-34.ndx", + "18-10-2023-20-34.pcm", + "18-10-2023-20-35.ndx", + "18-10-2023-20-35.pcm", + "18-10-2023-20-36.ndx", + "18-10-2023-20-36.pcm", + "18-10-2023-20-37.ndx", + "18-10-2023-20-37.pcm", + "18-10-2023-20-38.ndx", + "18-10-2023-20-38.pcm", + "18-10-2023-20-39.ndx", + "18-10-2023-20-39.pcm", + "18-10-2023-20-40.ndx", + "18-10-2023-20-40.pcm", + "18-10-2023-20-41.ndx", + "18-10-2023-20-41.pcm", + "18-10-2023-20-42.ndx", + "18-10-2023-20-42.pcm", + "18-10-2023-20-43.ndx", + "18-10-2023-20-43.pcm", + "18-10-2023-20-44.ndx", + "18-10-2023-20-44.pcm", + "18-10-2023-20-45.ndx", + "18-10-2023-20-45.pcm", + "18-10-2023-20-46.ndx", + "18-10-2023-20-46.pcm", + "18-10-2023-20-47.ndx", + "18-10-2023-20-47.pcm", + "18-10-2023-20-48.ndx", + "18-10-2023-20-48.pcm", + "18-10-2023-20-49.ndx", + "18-10-2023-20-49.pcm", + "18-10-2023-20-50.ndx", + "18-10-2023-20-50.pcm", + "18-10-2023-20-51.ndx", + "18-10-2023-20-51.pcm", + "18-10-2023-20-52.ndx", + "18-10-2023-20-52.pcm", + "18-10-2023-20-53.ndx", + "18-10-2023-20-53.pcm", + "18-10-2023-20-54.ndx", + "18-10-2023-20-54.pcm", + "18-10-2023-20-55.ndx", + "18-10-2023-20-55.pcm", + "18-10-2023-20-56.ndx", + "18-10-2023-20-56.pcm", + "18-10-2023-20-57.ndx", + "18-10-2023-20-57.pcm", + "18-10-2023-20-58.ndx", + "18-10-2023-20-58.pcm", + "18-10-2023-20-59.ndx", + "18-10-2023-20-59.pcm", + "18-10-2023-21-00.ndx", + "18-10-2023-21-00.pcm", + "18-10-2023-21-01.ndx", + "18-10-2023-21-01.pcm", + "18-10-2023-21-02.ndx", + "18-10-2023-21-02.pcm", + "18-10-2023-21-03.ndx", + "18-10-2023-21-03.pcm", + "18-10-2023-21-04.ndx", + "18-10-2023-21-04.pcm", + "18-10-2023-21-05.ndx", + "18-10-2023-21-05.pcm", + "18-10-2023-21-06.ndx", + "18-10-2023-21-06.pcm", + "18-10-2023-21-07.ndx", + "18-10-2023-21-07.pcm", + "18-10-2023-21-08.ndx", + "18-10-2023-21-08.pcm", + "18-10-2023-21-09.ndx", + "18-10-2023-21-09.pcm", + "18-10-2023-21-10.ndx", + "18-10-2023-21-10.pcm", + "18-10-2023-21-11.ndx", + "18-10-2023-21-11.pcm", + "18-10-2023-21-12.ndx", + "18-10-2023-21-12.pcm", + "18-10-2023-21-13.ndx", + "18-10-2023-21-13.pcm", + "18-10-2023-21-14.ndx", + "18-10-2023-21-14.pcm", + "18-10-2023-21-15.ndx", + "18-10-2023-21-15.pcm", + "18-10-2023-21-16.ndx", + "18-10-2023-21-16.pcm", + "18-10-2023-21-17.ndx", + "18-10-2023-21-17.pcm", + "18-10-2023-21-18.ndx", + "18-10-2023-21-18.pcm", + "18-10-2023-21-19.ndx", + "18-10-2023-21-19.pcm", + "18-10-2023-21-20.ndx", + "18-10-2023-21-20.pcm", + "18-10-2023-21-21.ndx", + "18-10-2023-21-21.pcm", + "18-10-2023-21-22.ndx", + "18-10-2023-21-22.pcm", + "18-10-2023-21-23.ndx", + "18-10-2023-21-23.pcm", + "18-10-2023-21-24.ndx", + "18-10-2023-21-24.pcm", + "18-10-2023-21-25.ndx", + "18-10-2023-21-25.pcm", + "18-10-2023-21-26.ndx", + "18-10-2023-21-26.pcm", + "18-10-2023-21-27.ndx", + "18-10-2023-21-27.pcm", + "18-10-2023-21-28.ndx", + "18-10-2023-21-28.pcm", + "18-10-2023-21-29.ndx", + "18-10-2023-21-29.pcm", + "18-10-2023-21-30.ndx", + "18-10-2023-21-30.pcm", + "18-10-2023-21-31.ndx", + "18-10-2023-21-31.pcm", + "18-10-2023-21-32.ndx", + "18-10-2023-21-32.pcm", + "18-10-2023-21-33.ndx", + "18-10-2023-21-33.pcm", + "18-10-2023-21-34.ndx", + "18-10-2023-21-34.pcm", + "18-10-2023-21-35.ndx", + "18-10-2023-21-35.pcm", + "18-10-2023-21-36.ndx", + "18-10-2023-21-36.pcm", + "18-10-2023-21-37.ndx", + "18-10-2023-21-37.pcm", + "18-10-2023-21-38.ndx", + "18-10-2023-21-38.pcm", + "18-10-2023-21-39.ndx", + "18-10-2023-21-39.pcm", + "18-10-2023-21-40.ndx", + "18-10-2023-21-40.pcm", + "18-10-2023-21-41.ndx", + "18-10-2023-21-41.pcm", + "18-10-2023-21-42.ndx", + "18-10-2023-21-42.pcm", + "18-10-2023-21-43.ndx", + "18-10-2023-21-43.pcm", + "18-10-2023-21-44.mjp", + "18-10-2023-21-44.ndx", + "18-10-2023-21-44.pcm", ] -from datetime import datetime, timezone -for s in a: - i = s.split('.')[0] - dt = datetime.strptime(i, '%d-%m-%Y-%H-%M') - # print(dt) -a.sort(key=lambda x: datetime.strptime(x.split('.')[0], '%d-%m-%Y-%H-%M')) +for s in file_paths: + i = s.split(".")[0] + dt = datetime.strptime(i, "%d-%m-%Y-%H-%M") -for i in a: - print(i) +file_paths.sort(key=lambda x: datetime.strptime(x.split(".")[0], "%d-%m-%Y-%H-%M")) +for i in file_paths: + print(i) diff --git a/vector/__init__.py b/vector/__init__.py index f8cafd9..1f1982a 100644 --- a/vector/__init__.py +++ b/vector/__init__.py @@ -1,17 +1,9 @@ - - -from .gps import GpsVector - +from .catchcountA import CatchCountA +from .elogtimegaps import ElogTimeGapsVector +from .equipment_outage_agg import EquipmentOutageAggVector from .fish_ai import FishAiEventsComeInFourHourBurstsVector - +from .gps import GpsVector from .internet import InternetVector - -from .equipment_outage_agg import EquipmentOutageAggVector - +from .tegrastats import TegrastatsVector from .thalos_mount import ThalosMountVector from .thalos_vids_exist import ThalosVideosExistVector - -from .elogtimegaps import ElogTimeGapsVector - -from .catchcountA import CatchCountA -from .tegrastats import TegrastatsVector diff --git a/vector/catchcountA.py b/vector/catchcountA.py index 3d29717..4c6d9dd 100644 --- a/vector/catchcountA.py +++ b/vector/catchcountA.py @@ -1,74 +1,82 @@ -from datetime import datetime, timezone, timedelta -from model import Base, OndeckData, VideoFile, DeckhandEventView, Track -from model import RiskVector, Test -from pathlib import Path -from sqlalchemy.orm import Session -from statistics import mean, fmean - import json import math -import numpy as np import os -import pandas as pa -import re +from datetime import datetime, timedelta, timezone +from pathlib import Path +from statistics import fmean + +import numpy as np +import pandas as pd import sqlalchemy as sa -import subprocess +from sqlalchemy.orm import Session + +from model import Base, DeckhandEventView, OndeckData, RiskVector, Test, Track, VideoFile -class CatchCountA(): +class CatchCountA: # tests = relationship("Test") def __init__(self, session: Session, rv) -> None: self.session: Session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.window_minutes = config['window_minutes'] - self.ai_table = config['ai_table'] + self.window_minutes = config["window_minutes"] + self.ai_table = config["ai_table"] self.confidence_filter = None - if 'confidence_filter' in config.keys(): - self.confidence_filter = config['confidence_filter'] + if "confidence_filter" in config.keys(): + self.confidence_filter = config["confidence_filter"] self.ok_p_coeff = 0.2 - if 'ok_p_coeff' in config.keys(): - self.ok_p_coeff = config['ok_p_coeff'] - + if "ok_p_coeff" in config.keys(): + self.ok_p_coeff = config["ok_p_coeff"] + print(self.rv) print(self.ai_table) - - def execute(self, expected_timedelta): datetime_to = datetime.now(tz=timezone.utc) - datetime_from = datetime_to - timedelta(minutes = self.window_minutes) + datetime_from = datetime_to - timedelta(minutes=self.window_minutes) - - result = Test(name=f"catch count A test from {datetime_from:%Y-%m-%d %H:%M} to {datetime_to:%Y-%m-%d %H:%M}", vector=self.rv) + result = Test( + name=f"catch count A test from {datetime_from:%Y-%m-%d %H:%M} to {datetime_to:%Y-%m-%d %H:%M}", + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - - datas = [] - recent_elogs: list[DeckhandEventView] = self.session.query(DeckhandEventView).where( - DeckhandEventView.datetime > datetime_to - 2*timedelta(minutes = self.window_minutes)).all() + datas = [] - if self.ai_table == 'ondeckdata': - ondeck_datas = self.session.query(OndeckData) \ - .join(OndeckData.video_file) \ - .options(sa.orm.joinedload(OndeckData.video_file)) \ + recent_elogs: list[DeckhandEventView] = ( + self.session.query(DeckhandEventView) + .where( + DeckhandEventView.datetime + > datetime_to - 2 * timedelta(minutes=self.window_minutes) + ) + .all() + ) + + if self.ai_table == "ondeckdata": + ondeck_datas = ( + self.session.query(OndeckData) + .join(OndeckData.video_file) + .options(sa.orm.joinedload(OndeckData.video_file)) .where( VideoFile.start_datetime < datetime_to, - VideoFile.start_datetime >= datetime_from) \ - .order_by(OndeckData.datetime).all() - + VideoFile.start_datetime >= datetime_from, + ) + .order_by(OndeckData.datetime) + .all() + ) + ondeck_datas: list[OndeckData] = list(ondeck_datas) expected_videos = self.window_minutes / 5.0 - errored = len(list(filter(lambda x: x.status != 'done', ondeck_datas))) + errored = len(list(filter(lambda x: x.status != "done", ondeck_datas))) # print(f"ondeck errored: {errored}") @@ -80,10 +88,10 @@ def execute(self, expected_timedelta): s = file.stat() if s.st_size <= 0: print("empty file", file) - + except Exception as e: print("exception", e) - + fish_counts = [] is_fishings = [] dts = [] @@ -93,32 +101,39 @@ def execute(self, expected_timedelta): continue fish_counts.append(row.overallcatches) dts.append(row.video_file.start_datetime) - is_fishing = any(map(lambda elog: \ - elog.systemstarthauldatetime < row.video_file.start_datetime and \ - row.video_file.start_datetime < elog.systemendhauldatetime, recent_elogs)) + is_fishing = any( + map( + lambda elog: elog.systemstarthauldatetime < row.video_file.start_datetime + and row.video_file.start_datetime < elog.systemendhauldatetime, + recent_elogs, + ) + ) is_fishings.append(int(is_fishing)) - a = pa.DataFrame({ - 'datetime': dts, - "fish_counts": fish_counts, - "is_fishings": is_fishings, - }) - + a = pd.DataFrame( + { + "datetime": dts, + "fish_counts": fish_counts, + "is_fishings": is_fishings, + } + ) + # from matplotlib import pyplot # pyplot.axis() # pyplot.plot(a['datetime'],a['fish_counts']) # pyplot.plot(a['datetime'],a['is_fishings']) # pyplot.show() - + if not np.any(np.diff(is_fishings)): # this means there is no overlap with the elogs, so the is_fishings data is a flat line # running a p_coeff when one input is a flat line is meaningless # so this test can't continue - result.detail = "elog reports a flat is_fishing variable over time. p_coeff can't work" + result.detail = ( + "elog reports a flat is_fishing variable over time. p_coeff can't work" + ) self.session.commit() return - if not np.any(np.diff(fish_counts)): result.detail = "ondeck reports a flat fish count over time. p_coeff can't work" self.session.commit() @@ -128,20 +143,24 @@ def execute(self, expected_timedelta): print(p_coeffs) p_coeff = p_coeffs[0][1] - - + result.score = math.sqrt(self.ok_p_coeff - p_coeff) if p_coeff <= self.ok_p_coeff else 0 - + self.session.commit() - elif self.ai_table == 'tracks': - tracks_rows = self.session.execute(sa.text('select t.*, v.start_datetime from tracks t \ + elif self.ai_table == "tracks": + tracks_rows = self.session.execute( + sa.text( + "select t.*, v.start_datetime from tracks t \ join video_files v on t.video_uri = v.decrypted_path \ where v.start_datetime > :datetime_from \ and v.start_datetime <= :datetime_to \ - order by t.datetime asc;'), { - 'datetime_from': datetime_from, - 'datetime_to': datetime_to, - }) + order by t.datetime asc;" + ), + { + "datetime_from": datetime_from, + "datetime_to": datetime_to, + }, + ) tracks: list[Track] = list(tracks_rows) expected_videos = self.window_minutes / 5.0 @@ -164,24 +183,25 @@ def execute(self, expected_timedelta): fish_counts[row.start_datetime] = 0 fish_counts[row.start_datetime] += 1 - fishCountS =pa.Series(fish_counts) + fishCountS = pd.Series(fish_counts) fishCountS.sort_index(inplace=True) is_fishings = {} for start_datetime in fish_counts.keys(): - is_fishing = any(map(lambda elog: \ - elog.systemstarthauldatetime < start_datetime and \ - start_datetime < elog.systemendhauldatetime, recent_elogs)) + is_fishing = any( + map( + lambda elog: elog.systemstarthauldatetime < start_datetime + and start_datetime < elog.systemendhauldatetime, + recent_elogs, + ) + ) is_fishings[start_datetime] = int(is_fishing) - isFishingS = pa.Series(is_fishings) + isFishingS = pd.Series(is_fishings) isFishingS.sort_index(inplace=True) - a = pa.DataFrame({ - "fish_counts": fishCountS, - "is_fishings": isFishingS - }) + a = pd.DataFrame({"fish_counts": fishCountS, "is_fishings": isFishingS}) # print(a) # from matplotlib import pyplot @@ -189,16 +209,17 @@ def execute(self, expected_timedelta): # pyplot.plot(isFishingS) # pyplot.plot(fishCountS) # pyplot.show() - + if not np.any(np.diff(isFishingS.values)): # this means there is no overlap with the elogs, so the is_fishings data is a flat line # running a p_coeff when one input is a flat line is meaningless # so this test can't continue - result.detail = "elog reports a flat is_fishing variable over time. p_coeff can't work" + result.detail = ( + "elog reports a flat is_fishing variable over time. p_coeff can't work" + ) self.session.commit() return - if not np.any(np.diff(fishCountS.values)): result.detail = "ondeck reports a flat fish count over time. p_coeff can't work" self.session.commit() @@ -208,54 +229,49 @@ def execute(self, expected_timedelta): print("p_coeffs:", p_coeffs) p_coeff = p_coeffs[0][1] - - + result.score = math.sqrt(self.ok_p_coeff - p_coeff) if p_coeff <= self.ok_p_coeff else 0 - + self.session.commit() return - - # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - +if __name__ == "__main__": from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): - import sqlalchemy as sa from sqlalchemy.orm import sessionmaker as SessionMaker - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) session = sessionmaker() # results = list(session.query(RiskVector).filter(RiskVector.name == ThalosVideosExistVector.__name__)) - session.execute('delete from tests where vector_id = -1;') - session.execute('delete from vectors where id = -1;') + session.execute("delete from tests where vector_id = -1;") + session.execute("delete from vectors where id = -1;") rv = RiskVector() rv.id = -1 rv.name = CatchCountA.__name__ - rv.schedule_string = 'every 1 minutes' + rv.schedule_string = "every 1 minutes" rv.configblob = '{"window_minutes": 60000, "ai_table":"ondeckdata"}' rv.tests = [] tmv = CatchCountA(session, rv=rv) tmv.execute(timedelta(minutes=5)) - - main() \ No newline at end of file + main() diff --git a/vector/elogtimegaps.py b/vector/elogtimegaps.py index ccd29bf..8e29b51 100644 --- a/vector/elogtimegaps.py +++ b/vector/elogtimegaps.py @@ -1,99 +1,94 @@ - -from model import Base, RiskVector, Test +import json +import os +from datetime import datetime, timedelta, timezone import sqlalchemy as sa from sqlalchemy.orm.session import Session -# from sqlalchemy.orm import session -from model.internetdata import InternetData - -import json -import subprocess - -import re -import codecs -import os +from model import Base, RiskVector, Test -from datetime import datetime, timedelta, timezone +# from sqlalchemy.orm import session -class ElogTimeGapsVector(): +class ElogTimeGapsVector: # tests = relationship("Test") def __init__(self, session: Session, rv) -> None: self.session: Session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) print(self.rv) - - def execute(self, expected_timedelta: timedelta): datetime_to = datetime.now(tz=timezone.utc) datetime_from = datetime_to - expected_timedelta - last_departure_res = self.session.execute(sa.text("""select max(datetime) last_departure from port_departures;""")) + last_departure_res = self.session.execute( + sa.text("""select max(datetime) last_departure from port_departures;""") + ) last_departure: datetime = last_departure_res.first()[0] - - result = Test(name="elog time gap vector at %s"%(datetime_to.strftime('%Y-%m-%d %H:%M:%SZ')), vector=self.rv) + + result = Test( + name="elog time gap vector at %s" % (datetime_to.strftime("%Y-%m-%d %H:%M:%SZ")), + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - - res = self.session.execute(sa.text(""" - select coalesce(max(score), 0) - from elog_time_gap_score - where ts_prev >= :recent_departure ;"""), { - "recent_departure": last_departure - }) + + res = self.session.execute( + sa.text(""" + select coalesce(max(score), 0) + from elog_time_gap_score + where ts_prev >= :recent_departure ;"""), + {"recent_departure": last_departure}, + ) result.score = res.first()[0] - + self.session.commit() return result # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - +if __name__ == "__main__": from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): - import sqlalchemy as sa from sqlalchemy.orm import sessionmaker as SessionMaker - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) session = sessionmaker() # results = list(session.query(RiskVector).filter(RiskVector.name == ThalosVideosExistVector.__name__)) - session.execute(sa.text('delete from tests where vector_id = -1;')) - session.execute(sa.text('delete from vectors where id = -1;')) + session.execute(sa.text("delete from tests where vector_id = -1;")) + session.execute(sa.text("delete from vectors where id = -1;")) rv = RiskVector() rv.id = -1 rv.name = ElogTimeGapsVector.__name__ - rv.schedule_string = 'every 1 minutes' - rv.configblob = '{}' + rv.schedule_string = "every 1 minutes" + rv.configblob = "{}" rv.tests = [] tmv = ElogTimeGapsVector(session, rv=rv) tmv.execute(timedelta(minutes=5)) - main() diff --git a/vector/equipment_outage_agg.py b/vector/equipment_outage_agg.py index b5e62c8..63019d0 100644 --- a/vector/equipment_outage_agg.py +++ b/vector/equipment_outage_agg.py @@ -1,70 +1,68 @@ - -from model import RiskVector, Test - -from sqlalchemy.orm.session import Session -from sqlalchemy.orm.query import Query -from model import Test -from vector import InternetVector - import json - from datetime import datetime, timedelta, timezone +from sqlalchemy.orm.query import Query +from sqlalchemy.orm.session import Session + +from model import RiskVector, Test -class EquipmentOutageAggVector(): +class EquipmentOutageAggVector: rv: RiskVector sessin: Session def __init__(self, session: Session, rv) -> None: self.session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.observed_riskvector_ids = config['observed_riskvector_ids'] + self.observed_riskvector_ids = config["observed_riskvector_ids"] print(self.rv) - def execute(self, expected_timedelta: timedelta): - datetime_to = datetime.now(tz=timezone.utc) datetime_from = datetime_to - expected_timedelta - result = Test(name="equipment outage aggregator from %s to %s"%(datetime_from, datetime_to), vector=self.rv) + result = Test( + name="equipment outage aggregator from %s to %s" % (datetime_from, datetime_to), + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - - q: Query[Test] = self.session.query(Test).filter( - Test.datetime >= datetime_from, - Test.datetime <= datetime_to, - Test.vector_id.in_(self.observed_riskvector_ids), - ).order_by(Test.datetime) - - tests: list[Test] = list(q.all()) + q: Query[Test] = ( + self.session.query(Test) + .filter( + Test.datetime >= datetime_from, + Test.datetime <= datetime_to, + Test.vector_id.in_(self.observed_riskvector_ids), + ) + .order_by(Test.datetime) + ) - groups : dict[int, list[Test]] = {} - group_scores : dict[int, int] = {} + tests: list[Test] = list(q.all()) + + groups: dict[int, list[Test]] = {} + group_scores: dict[int, int] = {} for test in tests: if test.vector_id not in groups.keys(): groups[test.vector_id] = [] groups[test.vector_id].append(test) - for vector_id in groups.keys(): group: list[Test] = groups[vector_id] expweighted = 0.0 outage = 0.0 - # find all sequential outages. + # find all sequential outages. # Determine a score based on how many and how long the outages are. # longer sequences have a much higher weight by cubing its length # divide by a constant scaling factor, then equalize to 0 0.0: outage += 1 else: @@ -72,22 +70,23 @@ def execute(self, expected_timedelta: timedelta): # this is the end of a sequence, cube its length expweighted += outage * outage * outage / 200.0 outage = 0 - + if outage > 0: expweighted += outage * outage * outage / 200.0 - - print("expweighted: %s outage: %d "%(expweighted,outage)) - - group_scores[vector_id] = 1.0 - 1.0/(expweighted+1.0) - - result.detail = "vector_id=score: " + ", ".join([ "{}={}".format(k, i) for (k,i) in group_scores.items()]) + + print("expweighted: %s outage: %d " % (expweighted, outage)) + + group_scores[vector_id] = 1.0 - 1.0 / (expweighted + 1.0) + + result.detail = "vector_id=score: " + ", ".join( + ["{}={}".format(k, i) for (k, i) in group_scores.items()] + ) result.score = max(group_scores.values()) self.session.commit() return result -if __name__ == '__main__': +if __name__ == "__main__": pass - diff --git a/vector/fish_ai.py b/vector/fish_ai.py index fc8ce26..ebdf810 100644 --- a/vector/fish_ai.py +++ b/vector/fish_ai.py @@ -1,46 +1,45 @@ - -from functools import reduce -from model import RiskVector, Test - -from sqlalchemy.orm import session -from model.aifishdata import AifishData - -from model.test import T import json -from datetime import datetime, timezone, timedelta +import os +from datetime import datetime, timedelta, timezone + from dateutil.parser import isoparse +from sqlalchemy.orm import session -import os -import math +from model import RiskVector, Test +from model.aifishdata import AifishData -class FishAiEventsComeInFourHourBurstsVector(): +class FishAiEventsComeInFourHourBurstsVector: rv: RiskVector - # tests = relationship("Test") def __init__(self, s: session, rv) -> None: self.session = s self.config(rv) - + def config(self, rv): self.rv = rv confblob = json.loads(rv.configblob) - self.target_category_id = confblob['target_category_id'] - self.video_fps = confblob['video_fps'] - self.event_grouping_timedelta = timedelta(seconds=confblob['event_grouping_timedelta_seconds']) - self.expected_gap_between_groups = timedelta(seconds=confblob['expected_gap_between_groups_seconds']) + self.target_category_id = confblob["target_category_id"] + self.video_fps = confblob["video_fps"] + self.event_grouping_timedelta = timedelta( + seconds=confblob["event_grouping_timedelta_seconds"] + ) + self.expected_gap_between_groups = timedelta( + seconds=confblob["expected_gap_between_groups_seconds"] + ) print(self.rv) print(confblob) - - def execute(self, expected_timedelta): - datetime_to = datetime.now(tz=timezone.utc) datetime_from = datetime_to - expected_timedelta - - fishAiDatas = self.session.query(AifishData).filter(AifishData.datetime > datetime_from).filter(AifishData.datetime < datetime_to) + + fishAiDatas = ( + self.session.query(AifishData) + .filter(AifishData.datetime > datetime_from) + .filter(AifishData.datetime < datetime_to) + ) scores_per_file = [] @@ -49,76 +48,92 @@ def execute(self, expected_timedelta): cocofilestat = os.stat(cocofilename) with open(cocofilename) as f: raw = json.load(f) - starttime = isoparse(raw['info']['date_created']) + starttime = isoparse(raw["info"]["date_created"]) endtime = fishAiData.datetime - annos = raw['annotations'] + annos = raw["annotations"] tracks_set = {} for anno in annos: - if not anno['category_id'] == self.target_category_id: + if not anno["category_id"] == self.target_category_id: continue - track_id = anno['attributes']['track_id'] + track_id = anno["attributes"]["track_id"] if track_id not in tracks_set: tracks_set[track_id] = { - 'track_id': track_id, + "track_id": track_id, "mintime": datetime(9999, 12, 31, 23, 59, 59), - "maxtime": datetime.fromtimestamp(0) + "maxtime": datetime.fromtimestamp(0), } track = tracks_set[track_id] - frameTime = frameToTime(starttime, self.video_fps, anno['image_id']) - if frameTime < track['mintime']: - track['mintime'] = frameTime - if frameTime > track['maxtime']: - track['maxtime'] = frameTime + frameTime = frameToTime(starttime, self.video_fps, anno["image_id"]) + if frameTime < track["mintime"]: + track["mintime"] = frameTime + if frameTime > track["maxtime"]: + track["maxtime"] = frameTime tracksByStartTime = list(tracks_set.values()) - tracksByStartTime.sort(key=lambda x: x['mintime']) - + tracksByStartTime.sort(key=lambda x: x["mintime"]) + # greedy left-to-right grouping algorithm: - groups=[] + groups = [] for t in tracksByStartTime: if len(groups) == 0: groups.append(dict(t)) continue - if groups[-1]['maxtime'] + self.event_grouping_timedelta >= t['mintime']: - latertime = t['maxtime'] if t['maxtime'] > groups[-1]['maxtime'] else groups[-1]['maxtime'] - groups[-1]['maxtime'] = latertime + if groups[-1]["maxtime"] + self.event_grouping_timedelta >= t["mintime"]: + latertime = ( + t["maxtime"] + if t["maxtime"] > groups[-1]["maxtime"] + else groups[-1]["maxtime"] + ) + groups[-1]["maxtime"] = latertime else: groups.append(dict(t)) - + score = 0 if len(groups) > 1: - mingap=None - for i in range(len(groups) - 1 ): - curr_gap = groups[i+1]['mintime'] - groups[i]['maxtime'] + mingap = None + for i in range(len(groups) - 1): + curr_gap = groups[i + 1]["mintime"] - groups[i]["maxtime"] mingap = curr_gap if mingap == None or curr_gap < mingap else mingap - + if mingap < self.expected_gap_between_groups: - score = 1.0 / ( ( 10.0 / self.expected_gap_between_groups.seconds ) * ( mingap.seconds - self.expected_gap_between_groups.seconds ) - 1.0) + 1.0 + score = ( + 1.0 + / ( + (10.0 / self.expected_gap_between_groups.seconds) + * (mingap.seconds - self.expected_gap_between_groups.seconds) + - 1.0 + ) + + 1.0 + ) scores_per_file.append(score) - if len(scores_per_file) > 0: - t = Test(name="Higher score from a short gap between ai detection events. Test bounds from %s to %s"%(datetime_from,datetime_to), vector=self.rv, score=sum(scores_per_file)) + t = Test( + name="Higher score from a short gap between ai detection events. Test bounds from %s to %s" + % (datetime_from, datetime_to), + vector=self.rv, + score=sum(scores_per_file), + ) self.session.add(t) - + self.session.commit() -def frameToTime(starttime, video_fps, frameno ): - return starttime + timedelta(seconds=float(frameno)/video_fps) +def frameToTime(starttime, video_fps, frameno): + return starttime + timedelta(seconds=float(frameno) / video_fps) # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': +if __name__ == "__main__": """ Test """ - - + + import sqlite3 + from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from model import Base as ModelBase - import sqlite3 engine = create_engine("sqlite:///db.db", echo=True) SessionMaker = sessionmaker(engine) @@ -128,9 +143,10 @@ def frameToTime(starttime, video_fps, frameno ): with sqlite3.connect("db.db") as conn: with SessionMaker() as s: print("start of cron") - q = s.query(RiskVector).filter(RiskVector.name == FishAiEventsComeInFourHourBurstsVector.__name__) + q = s.query(RiskVector).filter( + RiskVector.name == FishAiEventsComeInFourHourBurstsVector.__name__ + ) for rv in q.all(): f = FishAiEventsComeInFourHourBurstsVector(s, rv) f.execute((datetime.now() - timedelta(weeks=500), datetime.now())) - diff --git a/vector/gps.py b/vector/gps.py index 111ce5c..b881ea8 100644 --- a/vector/gps.py +++ b/vector/gps.py @@ -1,16 +1,14 @@ +import json +from datetime import datetime, timezone + from pynmeagps import NMEAReader +from sqlalchemy.orm import session from model import RiskVector, Test - -from sqlalchemy.orm import session from model.gpsdata import GpsData -from model.test import T -import json -from datetime import datetime, timezone - -class GpsVector(): +class GpsVector: rv: RiskVector boundarysegments = [] @@ -19,25 +17,27 @@ class GpsVector(): def __init__(self, session: session, rv) -> None: self.session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.boundarysegments = boundingSegmentsFromVertices(config['boundary_vertices']) + self.boundarysegments = boundingSegmentsFromVertices(config["boundary_vertices"]) print(self.rv) print(self.boundarysegments) - def execute(self, expected_timedelta, gpsDataSelect): datetime_to = datetime.now(tz=timezone.utc) datetime_from = datetime_to - expected_timedelta - - last = self.session.query(Test)\ - .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to)\ - .order_by(Test.datetime_to.desc())\ - .limit(1).all() - - if len(list(last)) : + + last = ( + self.session.query(Test) + .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to) + .order_by(Test.datetime_to.desc()) + .limit(1) + .all() + ) + + if len(list(last)): last_datetime = last[0].datetime_to if datetime_to - last_datetime < expected_timedelta * 2: datetime_from = last_datetime @@ -47,30 +47,37 @@ def execute(self, expected_timedelta, gpsDataSelect): else: print("no previous run found, using expected_timedelta") - result = Test(name="gps test from %s to %s"%(datetime_from,datetime_to), - vector=self.rv, datetime_from=datetime_from, datetime_to=datetime_to) + result = Test( + name="gps test from %s to %s" % (datetime_from, datetime_to), + vector=self.rv, + datetime_from=datetime_from, + datetime_to=datetime_to, + ) self.session.add(result) self.session.commit() # print(result) - - gpsDatas = self.session.query(GpsData).filter(GpsData.datetime > datetime_from).filter(GpsData.datetime < datetime_to) + gpsDatas = ( + self.session.query(GpsData) + .filter(GpsData.datetime > datetime_from) + .filter(GpsData.datetime < datetime_to) + ) gpsPointsOutOfBounds = 0 for gpsData in gpsDatas: - if hasattr(gpsData, 'sentence'): - nmea = NMEAReader.parse(gpsData.sentence) + if hasattr(gpsData, "sentence"): + nmea = NMEAReader.parse(gpsData.sentence) point = (nmea.lat, nmea.lon) - elif hasattr(gpsData, 'lat'): + elif hasattr(gpsData, "lat"): point = (gpsData.lat, gpsData.lon) else: continue if not pointInBoundingBox(point, self.boundarysegments): gpsPointsOutOfBounds += 1 - - result.score = -1.0/(gpsPointsOutOfBounds+1.0) + 1.0 - + + result.score = -1.0 / (gpsPointsOutOfBounds + 1.0) + 1.0 + self.session.commit() return result @@ -80,7 +87,7 @@ def boundingSegmentsFromVertices(vertices): ret = [] first_bs = None last_bs = None - for (x, y) in vertices: + for x, y in vertices: if first_bs is None: first_bs = (x, y) else: @@ -90,6 +97,7 @@ def boundingSegmentsFromVertices(vertices): ret.append((last_bs, first_bs)) return ret + def pointInBoundingBox(point, boundarysegments): """ https://en.wikipedia.org/wiki/Point_in_polygon#Ray_casting_algorithm @@ -100,24 +108,24 @@ def pointInBoundingBox(point, boundarysegments): """ cnt = 0 for seg in boundarysegments: - if intersects( ((361.0,361.0), point), seg): + if intersects(((361.0, 361.0), point), seg): cnt += 1 - + return cnt % 2 == 1 def intersects(seg1, seg2): # slope = rise/run # slope = (y2-y1)/(x2-x1) - slope1 = (seg1[1][1]-seg1[0][1])/(seg1[1][0]-seg1[0][0]) - slope2 = (seg2[1][1]-seg2[0][1])/(seg2[1][0]-seg2[0][0]) + slope1 = (seg1[1][1] - seg1[0][1]) / (seg1[1][0] - seg1[0][0]) + slope2 = (seg2[1][1] - seg2[0][1]) / (seg2[1][0] - seg2[0][0]) # print("slope1", slope1, "slope2", slope2) - if abs( slope1 - slope2 ) < 0.0001: + if abs(slope1 - slope2) < 0.0001: # these lines are nearly parallel. parallel lines don't intersect. # also, this algorithm does 1/(s1-s2) later, so let's not divide by 0 return False - + # run mx+b math to find the intersecting x coordinate # m1 * (x-x1) + y1 = m2*(x-x2) + y2 # slope1 * (isectx - seg1[0][0]) + seg1[0][1] == slope2 * (isectx - seg2[0][0]) + seg2[0][1] @@ -127,64 +135,69 @@ def intersects(seg1, seg2): # slope1 * isectx - slope2 * isectx == slope1 * seg1[0][0] - seg1[0][1] - slope2 * seg2[0][0] + seg2[0][1] # / (slope1 - slope2) on both sides - isectx = (slope1 * seg1[0][0] - seg1[0][1] - slope2 * seg2[0][0] + seg2[0][1]) / (slope1 - slope2) + isectx = (slope1 * seg1[0][0] - seg1[0][1] - slope2 * seg2[0][0] + seg2[0][1]) / ( + slope1 - slope2 + ) # print("isectx", isectx) - - + # I don't actually care what the y coordinate is. I only care if the intersection is on both lines. # I can find that just by comparing x boundaries - between1 = (seg1[0][0] >= isectx and isectx >= seg1[1][0]) or \ - (seg1[0][0] <= isectx and isectx <= seg1[1][0]) - - between2 = (seg2[0][0] >= isectx and isectx >= seg2[1][0]) or \ - (seg2[0][0] <= isectx and isectx <= seg2[1][0]) + between1 = (seg1[0][0] >= isectx and isectx >= seg1[1][0]) or ( + seg1[0][0] <= isectx and isectx <= seg1[1][0] + ) + + between2 = (seg2[0][0] >= isectx and isectx >= seg2[1][0]) or ( + seg2[0][0] <= isectx and isectx <= seg2[1][0] + ) return between1 and between2 - + + # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': +if __name__ == "__main__": a = [ - NMEAReader.parse('$GPGGA,210230,3855.4487,N,09446.0071,W,1,07,1.1,370.5,M,-29.5,M,,*7A'), - NMEAReader.parse('$GPGSV,2,1,08,02,74,042,45,04,18,190,36,07,67,279,42,12,29,323,36*77'), - NMEAReader.parse('$GPGSV,2,2,08,15,30,050,47,19,09,158,,26,12,281,40,27,38,173,41*7B'), - NMEAReader.parse('$GPRMC,210230,A,3855.4487,N,09446.0071,W,0.0,076.2,130495,003.8,E*69'), + NMEAReader.parse("$GPGGA,210230,3855.4487,N,09446.0071,W,1,07,1.1,370.5,M,-29.5,M,,*7A"), + NMEAReader.parse("$GPGSV,2,1,08,02,74,042,45,04,18,190,36,07,67,279,42,12,29,323,36*77"), + NMEAReader.parse("$GPGSV,2,2,08,15,30,050,47,19,09,158,,26,12,281,40,27,38,173,41*7B"), + NMEAReader.parse("$GPRMC,210230,A,3855.4487,N,09446.0071,W,0.0,076.2,130495,003.8,E*69"), ] for i in a: print(i) - print(intersects(((0,0), (10, 20)), ((0,0), (20, 10)))) - print(intersects(((0,1), (10, 20)), ((1,0), (20, 10)))) - print(intersects(((1,0), (10, 20)), ((0,1), (20, 10)))) - print(intersects(((0,0), (10, 20)), ((0,0), (-20, 10)))) - print(intersects(((0,0), (10, 20)), ((-20, 10), (0,0)))) - print(intersects(((0,1), (10, 20)), ((0,0), (-20, 10)))) - print(intersects(((0,1), (10, 20)), ((-20, 10), (0,0)))) - - bv = [[36.9756611, -122.0273566], - [36.9758839, -122.0255113], - [36.9736554, -122.0240521], - [36.9694039, -122.0231509], - [36.9686324, -122.0227218], - [36.9683924, -122.0248246], - [36.9690267, -122.0263481], - [36.9734497, -122.0270348]] + print(intersects(((0, 0), (10, 20)), ((0, 0), (20, 10)))) + print(intersects(((0, 1), (10, 20)), ((1, 0), (20, 10)))) + print(intersects(((1, 0), (10, 20)), ((0, 1), (20, 10)))) + print(intersects(((0, 0), (10, 20)), ((0, 0), (-20, 10)))) + print(intersects(((0, 0), (10, 20)), ((-20, 10), (0, 0)))) + print(intersects(((0, 1), (10, 20)), ((0, 0), (-20, 10)))) + print(intersects(((0, 1), (10, 20)), ((-20, 10), (0, 0)))) + + bv = [ + [36.9756611, -122.0273566], + [36.9758839, -122.0255113], + [36.9736554, -122.0240521], + [36.9694039, -122.0231509], + [36.9686324, -122.0227218], + [36.9683924, -122.0248246], + [36.9690267, -122.0263481], + [36.9734497, -122.0270348], + ] bs = boundingSegmentsFromVertices(bv) print("segments", bs) # see visualization_of_gps_test.jpg to look at what this test is doing - print("in box", pointInBoundingBox((36.970,-122.022), bs)) - print("in box", pointInBoundingBox((36.972,-122.022), bs)) - print("in box", pointInBoundingBox((36.975,-122.022), bs)) - print("in box", pointInBoundingBox((36.976,-122.022), bs)) - print("in box", pointInBoundingBox((36.970,-122.024), bs)) - print("in box", pointInBoundingBox((36.972,-122.024), bs)) - print("in box", pointInBoundingBox((36.975,-122.024), bs)) - print("in box", pointInBoundingBox((36.976,-122.024), bs)) - print("in box", pointInBoundingBox((36.970,-122.026), bs)) - print("in box", pointInBoundingBox((36.972,-122.026), bs)) - print("in box", pointInBoundingBox((36.975,-122.026), bs)) - print("in box", pointInBoundingBox((36.976,-122.026), bs)) - print("in box", pointInBoundingBox((36.970,-122.028), bs)) - print("in box", pointInBoundingBox((36.972,-122.028), bs)) - print("in box", pointInBoundingBox((36.975,-122.028), bs)) - print("in box", pointInBoundingBox((36.976,-122.028), bs)) - + print("in box", pointInBoundingBox((36.970, -122.022), bs)) + print("in box", pointInBoundingBox((36.972, -122.022), bs)) + print("in box", pointInBoundingBox((36.975, -122.022), bs)) + print("in box", pointInBoundingBox((36.976, -122.022), bs)) + print("in box", pointInBoundingBox((36.970, -122.024), bs)) + print("in box", pointInBoundingBox((36.972, -122.024), bs)) + print("in box", pointInBoundingBox((36.975, -122.024), bs)) + print("in box", pointInBoundingBox((36.976, -122.024), bs)) + print("in box", pointInBoundingBox((36.970, -122.026), bs)) + print("in box", pointInBoundingBox((36.972, -122.026), bs)) + print("in box", pointInBoundingBox((36.975, -122.026), bs)) + print("in box", pointInBoundingBox((36.976, -122.026), bs)) + print("in box", pointInBoundingBox((36.970, -122.028), bs)) + print("in box", pointInBoundingBox((36.972, -122.028), bs)) + print("in box", pointInBoundingBox((36.975, -122.028), bs)) + print("in box", pointInBoundingBox((36.976, -122.028), bs)) diff --git a/vector/internet.py b/vector/internet.py index 20b7d61..a051d7f 100644 --- a/vector/internet.py +++ b/vector/internet.py @@ -1,49 +1,50 @@ - -from model import RiskVector, Test - -from sqlalchemy.orm import session -from model.internetdata import InternetData - import json +import re import subprocess +from datetime import datetime, timedelta, timezone -import re -import codecs +from sqlalchemy.orm import session + +from model import Test +from model.internetdata import InternetData -from datetime import datetime, timedelta, timezone -class InternetVector(): +class InternetVector: target_ips = [] # tests = relationship("Test") def __init__(self, session: session, rv) -> None: self.session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.target_ips: list[str] = config['target_ips'] - self.run_traceroute: bool = config['run_traceroute'] + self.target_ips: list[str] = config["target_ips"] + self.run_traceroute: bool = config["run_traceroute"] print(self.rv) print(self.target_ips) - - def execute(self, expected_timedelta: timedelta): datetime_to = datetime.now(tz=timezone.utc) datetime_from = datetime_to - expected_timedelta - last = self.session.query(Test)\ - .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to)\ - .order_by(Test.datetime_to.desc())\ - .limit(1).all() - - result = Test(name="internet test at %s"%(datetime_to.strftime('%Y-%m-%d %H:%M:%SZ')), vector=self.rv) + last = ( + self.session.query(Test) + .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to) + .order_by(Test.datetime_to.desc()) + .limit(1) + .all() + ) + + result = Test( + name="internet test at %s" % (datetime_to.strftime("%Y-%m-%d %H:%M:%SZ")), + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - + datas = [] for ip in self.target_ips: @@ -52,53 +53,55 @@ def execute(self, expected_timedelta: timedelta): datas.append(data) self.session.add(data) self.session.commit() - + data = ping(ip) datas.append(data) self.session.add(data) self.session.commit() - - f = filter(lambda data: data.returncode != 0 or data.packetloss and data.packetloss > 33.4 , datas) - result.score = -1.0/(len(list(f))+1.0) + 1.0 - + + f = filter( + lambda data: data.returncode != 0 or data.packetloss and data.packetloss > 33.4, datas + ) + result.score = -1.0 / (len(list(f)) + 1.0) + 1.0 + self.session.commit() return result + def traceroute(ip): - cmd = "traceroute -m 12 %s | grep -E '^\s*[0-9][0-9]*\s*' | grep -v '\* \* \*' | awk '{{print $2 \" \" $3}}' "%(ip) + cmd = ( + "traceroute -m 12 %s | grep -E '^\s*[0-9][0-9]*\s*' | grep -v '\* \* \*' | awk '{{print $2 \" \" $3}}' " + % (ip) + ) p = subprocess.run(cmd, shell=True, capture_output=True, text=True) lines = json.dumps(p.stdout.strip().split("\n")) - - + return InternetData(returncode=p.returncode, traceroute=lines) - + def ping(ip): - cmd = "ping -c 3 -W 5 -q %s "%(ip) + cmd = "ping -c 3 -W 5 -q %s " % (ip) p = subprocess.run(cmd, shell=True, capture_output=True, text=True) - + data = InternetData(returncode=p.returncode) for line in p.stdout.strip().split("\n"): # print(line) - m = re.search('([\d\.]+)\% packet loss', line) + m = re.search("([\d\.]+)\% packet loss", line) if m: # print("loss", m[1]) data.packetloss = float(m[1]) continue - m = re.search('min/avg/.*= [\d\.]+/([\d\.]+)/', line) + m = re.search("min/avg/.*= [\d\.]+/([\d\.]+)/", line) if m: # print("ping", m[1]) data.ping = float(m[1]) continue return data - -# test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - - - print(traceroute('1.1.1.1')) - print(ping('1.1.1.1')) +# test by running directly with `python3 -m vector.fname` +if __name__ == "__main__": + print(traceroute("1.1.1.1")) + print(ping("1.1.1.1")) diff --git a/vector/tegrastats.py b/vector/tegrastats.py index 95f9d09..9046f02 100644 --- a/vector/tegrastats.py +++ b/vector/tegrastats.py @@ -1,83 +1,81 @@ - -from model import RiskVector, Test +import json +from datetime import datetime, timedelta, timezone from sqlalchemy.orm import session -from model.internetdata import InternetData - -import json -import subprocess -import re -import codecs +from model import Test -from datetime import datetime, timedelta, timezone -class TegrastatsVector(): +class TegrastatsVector: namedpipe = "" # tests = relationship("Test") def __init__(self, session: session, rv) -> None: self.session = session self.config(rv) - + def config(self, rv): self.rv = rv config = json.loads(rv.configblob) - self.namedpipe: list[str] = config['namedpipe'] + self.namedpipe: list[str] = config["namedpipe"] print(self.namedpipe) - - def execute(self, expected_timedelta: timedelta): datetime_to = datetime.now(tz=timezone.utc) datetime_from = datetime_to - expected_timedelta - last = self.session.query(Test)\ - .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to)\ - .order_by(Test.datetime_to.desc())\ - .limit(1).all() - - result = Test(name="tegrastats test at %s"%(datetime_to.strftime('%Y-%m-%d %H:%M:%SZ')), vector=self.rv) + last = ( + self.session.query(Test) + .where(Test.vector_id == self.rv.id, Test.datetime_to < datetime_to) + .order_by(Test.datetime_to.desc()) + .limit(1) + .all() + ) + + result = Test( + name="tegrastats test at %s" % (datetime_to.strftime("%Y-%m-%d %H:%M:%SZ")), + vector=self.rv, + ) self.session.add(result) self.session.commit() # print(result) - + datas = [] for statsline in tegrastats(self.namedpipe): print(statsline) - + result.score = 0.0 - + self.session.commit() return result + def tegrastats(namedpipe): with open(namedpipe) as f: for l in f.readlines(): - yield(l) + yield (l) -if __name__ == '__main__': - +if __name__ == "__main__": import os + from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) - @click.option('--namedpipe') + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) + @click.option("--namedpipe") def main(dbname, dbuser, namedpipe): - for i in tegrastats(namedpipe): print(i) main() - diff --git a/vector/thalos_mount.py b/vector/thalos_mount.py index b7de8aa..3cbefb3 100644 --- a/vector/thalos_mount.py +++ b/vector/thalos_mount.py @@ -1,96 +1,84 @@ - -from model import Base, RiskVector, Test - -from sqlalchemy.orm.session import Session - +import os from datetime import datetime, timedelta, timezone - from pathlib import Path -import os -import json +from sqlalchemy.orm.session import Session +from model import Base, RiskVector, Test -class ThalosMountVector(): +class ThalosMountVector: rv: RiskVector session: Session - schedule_string: str = 'every 10 minutes' + schedule_string: str = "every 10 minutes" def __init__(self, session: Session, rv) -> None: self.session = session self.config(rv) - - + def config(self, rv): self.rv = rv # config = json.loads(rv.configblob) print(self.rv) - def execute(self, expected_timedelta: timedelta): - - now = datetime.now(); + now = datetime.now() datetime_from = now - expected_timedelta - nowstr = now.astimezone(timezone.utc).strftime('%Y-%m-%d %H:%M:%SZ') - result = Test(name="thalos mounted network dir, run at %s "%(nowstr), vector=self.rv) + nowstr = now.astimezone(timezone.utc).strftime("%Y-%m-%d %H:%M:%SZ") + result = Test(name="thalos mounted network dir, run at %s " % (nowstr), vector=self.rv) - self.session.add(result) self.session.commit() result.score = 1.0 try: - thalosdir = Path('/thalos/') + thalosdir = Path("/thalos/") for boatpath in thalosdir.iterdir(): for camdirs in (boatpath / "videos").iterdir(): - datedirs = [ datedir.name for datedir in camdirs.iterdir() ] + datedirs = [datedir.name for datedir in camdirs.iterdir()] if len(datedirs) > 0: result.score -= 0.125 - if now.astimezone(timezone.utc).strftime('%d-%m-%Y') in datedirs: + if now.astimezone(timezone.utc).strftime("%d-%m-%Y") in datedirs: result.score -= 0.125 if result.score < 1.0: result.score -= 0.5 except Exception as e: print("error", type(e), e) result.detail = str(e) - finally: + finally: self.session.commit() return result - - - # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - +if __name__ == "__main__": from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): - import sqlalchemy as sa from sqlalchemy.orm import sessionmaker as SessionMaker - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) session = sessionmaker() - results = list(session.query(RiskVector).filter(RiskVector.name == ThalosMountVector.__name__)) + results = list( + session.query(RiskVector).filter(RiskVector.name == ThalosMountVector.__name__) + ) tmv = ThalosMountVector(session, rv=results[0]) tmv.execute(None, None) - main() diff --git a/vector/thalos_vids_exist.py b/vector/thalos_vids_exist.py index 66a2949..4743e30 100644 --- a/vector/thalos_vids_exist.py +++ b/vector/thalos_vids_exist.py @@ -1,86 +1,99 @@ - -from model import Base, RiskVector, Test - -from sqlalchemy.orm.session import Session - -from datetime import datetime, timedelta, timezone +import os import time - +from datetime import datetime, timedelta, timezone from pathlib import Path -import os -import json +from flask.config import Config as FlaskConfig +from sqlalchemy.orm.session import Session -MAGIC_20_MINUTES_IN_SECONDS = 20*60.0 +from model import Base, RiskVector, Test -from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +MAGIC_20_MINUTES_IN_SECONDS = 20 * 60.0 +flaskconfig = FlaskConfig(root_path="") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -thalosviddir = flaskconfig.get('THALOS_VIDEO_DIR') +thalosviddir = flaskconfig.get("THALOS_VIDEO_DIR") -class ThalosVideosExistVector(): + +class ThalosVideosExistVector: rv: RiskVector session: Session - schedule_string: str = 'every 5 minutes' + schedule_string: str = "every 5 minutes" def __init__(self, session: Session, rv) -> None: self.session = session self.config(rv) - - + def config(self, rv): self.rv = rv # config = json.loads(rv.configblob) print(self.rv) - def execute(self, expected_timedelta: timedelta): - if time.monotonic() < MAGIC_20_MINUTES_IN_SECONDS: # to recent from system boot time. don't run. return - now = datetime.now().astimezone(timezone.utc); + now = datetime.now().astimezone(timezone.utc) datetime_from = now - expected_timedelta - nowfloor5min = now.replace(minute=(now.minute//5)*5, second=0, microsecond=0) + nowfloor5min = now.replace(minute=(now.minute // 5) * 5, second=0, microsecond=0) nowfloorminus10min = nowfloor5min - timedelta(minutes=5) nowfloorminus15min = nowfloor5min - timedelta(minutes=10) - - nowstr = nowfloorminus15min.strftime('%d-%m-%Y-%H-%M') - result = Test(name="thalos video files check, looking for %s "%(nowstr), vector=self.rv) + nowstr = nowfloorminus15min.strftime("%d-%m-%Y-%H-%M") + result = Test(name="thalos video files check, looking for %s " % (nowstr), vector=self.rv) - self.session.add(result) self.session.commit() result.score = 1.0 errors = [] - for cam in ['cam1', 'cam2']: + for cam in ["cam1", "cam2"]: try: - mp4vid = Path(thalosviddir + '/' + cam + '/' + nowfloorminus15min.strftime('%d-%m-%Y') + '/' + nowfloorminus15min.strftime('%H') + '/' + nowstr + ".mp4.done") + mp4vid = Path( + thalosviddir + + "/" + + cam + + "/" + + nowfloorminus15min.strftime("%d-%m-%Y") + + "/" + + nowfloorminus15min.strftime("%H") + + "/" + + nowstr + + ".mp4.done" + ) st = mp4vid.stat() # score based on size? I guess? larger than 1MiB is like 65% confident that the file is ok if st.st_size > 0: - result.score -= 0.25 * ( 1.0 - (1.0 / (1.0 + st.st_size / 500000.0 )) ) + result.score -= 0.25 * (1.0 - (1.0 / (1.0 + st.st_size / 500000.0))) except Exception as e: print("error", type(e), e) errors.append(str(e)) try: - avivid = Path(thalosviddir + '/' + cam + '/' + nowfloorminus15min.strftime('%d-%m-%Y') + '/' + nowfloorminus15min.strftime('%H') + '/' + nowstr + ".avi.done") + avivid = Path( + thalosviddir + + "/" + + cam + + "/" + + nowfloorminus15min.strftime("%d-%m-%Y") + + "/" + + nowfloorminus15min.strftime("%H") + + "/" + + nowstr + + ".avi.done" + ) st = avivid.stat() - + # score based on size? I guess? larger than 1MiB is like 65% confident that the file is ok if st.st_size > 0: - result.score -= 0.25 * ( 1.0 - (1.0 / (1.0 + st.st_size / 500000.0 )) ) + result.score -= 0.25 * (1.0 - (1.0 / (1.0 + st.st_size / 500000.0))) except Exception as e: print("error", type(e), e) errors.append(str(e)) - if len(errors)> 0: + if len(errors) > 0: result.detail = "\n".join(errors) self.session.commit() @@ -88,26 +101,25 @@ def execute(self, expected_timedelta: timedelta): # test by running directly with `python3 -m vector.fname` -if __name__ == '__main__': - +if __name__ == "__main__": from flask.config import Config as FlaskConfig - flaskconfig = FlaskConfig(root_path='') - flaskconfig.from_object('config.defaults') - if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') + flaskconfig = FlaskConfig(root_path="") + + flaskconfig.from_object("config.defaults") + if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") import click @click.command() - @click.option('--dbname', default=flaskconfig.get('DBNAME')) - @click.option('--dbuser', default=flaskconfig.get('DBUSER')) + @click.option("--dbname", default=flaskconfig.get("DBNAME")) + @click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): - import sqlalchemy as sa from sqlalchemy.orm import sessionmaker as SessionMaker - sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + sa_engine = sa.create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) sessionmaker = SessionMaker(sa_engine) Base.metadata.create_all(sa_engine) @@ -116,12 +128,11 @@ def main(dbname, dbuser): rv = RiskVector() rv.id = -1 rv.name = ThalosVideosExistVector.__name__ - rv.schedule_string = 'every 1 minutes' - rv.configblob = '{}' + rv.schedule_string = "every 1 minutes" + rv.configblob = "{}" rv.tests = [] tmv = ThalosVideosExistVector(session, rv=rv) tmv.execute(timedelta(minutes=5)) - main() diff --git a/vector_schedule.py b/vector_schedule.py index 6510a6e..5b628e8 100644 --- a/vector_schedule.py +++ b/vector_schedule.py @@ -1,77 +1,70 @@ -import json -import io - -from flask import Flask -from flask_admin import Admin - -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker, Session import os - -from model import Base as ModelBase, RiskVector, RiskVectorModelView, Test, TestModelView -from vector import GpsVector, FishAiEventsComeInFourHourBurstsVector, InternetVector, EquipmentOutageAggVector, ThalosMountVector, ThalosVideosExistVector, ElogTimeGapsVector,CatchCountA - -import sqlite3 -from datetime import datetime, timedelta, timezone - -import click - -import schedule import re import time +from datetime import timedelta - +import boto3 +import click +import schedule from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') +from model import Base as ModelBase +from model import RiskVector +from vector import ( + CatchCountA, + ElogTimeGapsVector, + EquipmentOutageAggVector, + FishAiEventsComeInFourHourBurstsVector, + GpsVector, + InternetVector, + ThalosMountVector, + ThalosVideosExistVector, +) +flaskconfig = FlaskConfig(root_path="") -import boto3 +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -s3 = boto3.resource('s3') -bucket = s3.Bucket('51-gema-dev-dp-raw') +s3 = boto3.resource("s3") +bucket = s3.Bucket("51-gema-dev-dp-raw") def parse_and_schedule(vector: RiskVector, execute_func, *args): - if not vector.schedule_string: return - if m := re.match('every (\\d+) minutes', vector.schedule_string ): - + if m := re.match("every (\\d+) minutes", vector.schedule_string): d = timedelta(minutes=int(m.group(1))) schedule.every(int(m.group(1))).minutes.do(execute_func, d, *args) - elif m := re.match('every (\\d+) hours', vector.schedule_string ): - + elif m := re.match("every (\\d+) hours", vector.schedule_string): d = timedelta(hours=int(m.group(1))) schedule.every(int(m.group(1))).hours.do(execute_func, d, *args) else: click.echo("VECTOR NOT SCHEDULED: {}".format(vector.name)) - @click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) def main(dbname, dbuser): # engine = create_engine("sqlite:///db.db", echo=True) - # print(os.environ, dbuser, dbname) - engine = create_engine("postgresql+psycopg2://%s@/%s"%(dbuser, dbname), echo=True) + engine = create_engine("postgresql+psycopg2://%s@/%s" % (dbuser, dbname), echo=True) SessionMaker = sessionmaker(engine) ModelBase.metadata.create_all(engine) with SessionMaker() as session: print("start of cron") - + q = session.query(RiskVector) - + all_vectors = [] gps_vectors = [] @@ -86,7 +79,6 @@ def main(dbname, dbuser): for v in q.all(): print("start of vector", v) all_vectors.append(v) - if v.name == GpsVector.__name__: g = GpsVector(session, v) @@ -100,7 +92,6 @@ def main(dbname, dbuser): # res = f.execute(daterange) # print("end of vector", res) - if v.name == InternetVector.__name__: f = InternetVector(session, v) inet_vectors.append(f) @@ -121,14 +112,14 @@ def main(dbname, dbuser): parse_and_schedule(v, tmv.execute) # res = eov.execute(daterange) # print("end of vector", res) - + if v.name == ThalosVideosExistVector.__name__: tve = ThalosVideosExistVector(session, v) tve_vectors.append(tve) parse_and_schedule(v, tve.execute) # res = eov.execute(daterange) # print("end of vector", res) - + if v.name == ElogTimeGapsVector.__name__: eltg = ElogTimeGapsVector(session, v) eltg_vectors.append(eltg) @@ -139,11 +130,9 @@ def main(dbname, dbuser): cca_vectors.append(cca) parse_and_schedule(v, cca.execute) - for v in all_vectors: pass - while 1: n = schedule.idle_seconds() if n is None: @@ -155,5 +144,6 @@ def main(dbname, dbuser): time.sleep(n) schedule.run_pending() -if __name__ == '__main__': - main() \ No newline at end of file + +if __name__ == "__main__": + main() diff --git a/video_fetch.py b/video_fetch.py index eb8dbc5..960248e 100644 --- a/video_fetch.py +++ b/video_fetch.py @@ -1,53 +1,60 @@ - -from datetime import datetime,timezone,timedelta -import click -import codecs import os -from pathlib import Path -import psycopg2 -from psycopg2.pool import SimpleConnectionPool import re -import schedule import subprocess import time +from datetime import datetime, timedelta, timezone +from pathlib import Path - +import click +import psycopg2 +import schedule from flask.config import Config as FlaskConfig -flaskconfig = FlaskConfig(root_path='') +from psycopg2.pool import SimpleConnectionPool + +flaskconfig = FlaskConfig(root_path="") + +flaskconfig.from_object("config.defaults") +if "ENVIRONMENT" in os.environ: + flaskconfig.from_envvar("ENVIRONMENT") -flaskconfig.from_object('config.defaults') -if 'ENVIRONMENT' in os.environ: - flaskconfig.from_envvar('ENVIRONMENT') def depth_first_video_files(cameradir: Path): try: date_dirs = [x for x in cameradir.iterdir() if x.is_dir()] - date_dirs.sort(key=lambda x: datetime.strptime(x.name, '%d-%m-%Y'), reverse=True) + date_dirs.sort(key=lambda x: datetime.strptime(x.name, "%d-%m-%Y"), reverse=True) for date_dir in date_dirs: hour_dirs = [x for x in date_dir.iterdir() if x.is_dir()] hour_dirs.sort(key=lambda x: int(x.name), reverse=True) for hour_dir in hour_dirs: - vid_files = [x for x in hour_dir.iterdir() if x.is_file() and re.match('.*-(\d+)\.', x.name)] - vid_files.sort(key=lambda x: re.match('.*-(\d+)\.', x.name)[1], reverse=True) + vid_files = [ + x for x in hour_dir.iterdir() if x.is_file() and re.match(".*-(\d+)\.", x.name) + ] + vid_files.sort(key=lambda x: re.match(".*-(\d+)\.", x.name)[1], reverse=True) for v in vid_files: if v.name.endswith(".avi.done") or v.name.endswith(".avi"): yield v except GeneratorExit: return + def is_gpg(f: Path, passphrase_file: str): - cmd = "cat %s | gpg --pinentry-mode loopback --passphrase-fd 0 \ - --list-packets %s "%( - passphrase_file, - str(f.absolute()) - ) + cmd = ( + "cat %s | gpg --pinentry-mode loopback --passphrase-fd 0 \ + --list-packets %s " + % (passphrase_file, str(f.absolute())) + ) p = subprocess.run(cmd, shell=True, capture_output=True, text=True) return p.returncode == 0 -def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, passphrase_file: str, thalos_video_suffix: str): - + +def video_fetch( + cpool: SimpleConnectionPool, + thalos_dir: Path, + output_dir: Path, + passphrase_file: str, + thalos_video_suffix: str, +): for cameradir in filter(lambda x: x.is_dir(), thalos_dir.iterdir()): - new_vids: list[Path] = [] discovered_matching_last_modified = 0 @@ -57,39 +64,51 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, try: with conn.cursor() as cur: for vid_file in depth_first_video_files(cameradir): - vid_file_absolute_str = str(vid_file.absolute()) - vid_file_done_alt_str = vid_file_absolute_str[0:-len('.done')] if vid_file_absolute_str.endswith('.done') else vid_file_absolute_str+".done" + vid_file_done_alt_str = ( + vid_file_absolute_str[0 : -len(".done")] + if vid_file_absolute_str.endswith(".done") + else vid_file_absolute_str + ".done" + ) start_datetime: datetime = datetime.strptime( - vid_file.name[0:len('20-07-2023-22-20')], - '%d-%m-%Y-%H-%M') + vid_file.name[0 : len("20-07-2023-22-20")], "%d-%m-%Y-%H-%M" + ) start_datetime = start_datetime.replace(tzinfo=timezone.utc) if last_start_datetime is None: last_start_datetime = start_datetime - if start_datetime + timedelta(days=2) < last_start_datetime : + if start_datetime + timedelta(days=2) < last_start_datetime: # ok, we're too far back in time now. No reason to keep going back # I'm done searching. break s = vid_file.stat() last_modified: datetime = datetime.fromtimestamp(s.st_mtime, tz=timezone.utc) - cur.execute("select original_path, last_modified from video_files where original_path in (%s, %s);", - (vid_file_absolute_str,vid_file_done_alt_str,)) + cur.execute( + "select original_path, last_modified from video_files where original_path in (%s, %s);", + ( + vid_file_absolute_str, + vid_file_done_alt_str, + ), + ) rows = list(cur) if len(rows) == 0: # we have never seen this file before! new_vids.append(vid_file) - cur.execute("insert into video_files \ + cur.execute( + "insert into video_files \ (original_path, last_modified, start_datetime, cam_name) \ - values (%s, %s, %s, %s);", ( - vid_file_absolute_str, last_modified, start_datetime, cameradir.name)) + values (%s, %s, %s, %s);", + (vid_file_absolute_str, last_modified, start_datetime, cameradir.name), + ) conn.commit() elif rows[0][1] != last_modified: # found it, update the lastmodified - cur.execute("update video_files set last_modified = %s where original_path in (%s, %s);", - (last_modified, vid_file_absolute_str, vid_file_done_alt_str)) + cur.execute( + "update video_files set last_modified = %s where original_path in (%s, %s);", + (last_modified, vid_file_absolute_str, vid_file_done_alt_str), + ) conn.commit() elif discovered_matching_last_modified > 3: # I found files 4 where the lastmodified matches @@ -98,7 +117,7 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, else: # I can only be here if a row was found and the last_modified matches discovered_matching_last_modified += 1 - + finally: cpool.putconn(conn) @@ -110,9 +129,12 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, new_vids.reverse() for new_vid in new_vids: - new_vid_absolute_str = str(new_vid.absolute()) - new_vid_done_alt_str = new_vid_absolute_str[0:-len('.done')] if new_vid_absolute_str.endswith('.done') else new_vid_absolute_str+".done" + new_vid_done_alt_str = ( + new_vid_absolute_str[0 : -len(".done")] + if new_vid_absolute_str.endswith(".done") + else new_vid_absolute_str + ".done" + ) s = new_vid.stat() last_modified = datetime.fromtimestamp(s.st_mtime, tz=timezone.utc) @@ -120,10 +142,16 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, conn: psycopg2.connection = cpool.getconn() try: with conn.cursor() as cur: - cur.execute("select original_path, last_modified, start_datetime, \ + cur.execute( + "select original_path, last_modified, start_datetime, \ decrypted_path, decrypted_datetime, stdout, stderr \ - from video_files where original_path in ( %s, %s );", (new_vid_absolute_str, new_vid_done_alt_str,)) - #schema: (original_path, last_modified, start_datetime, decrypted_path, decrypted_datetime, stdout, stderr) + from video_files where original_path in ( %s, %s );", + ( + new_vid_absolute_str, + new_vid_done_alt_str, + ), + ) + # schema: (original_path, last_modified, start_datetime, decrypted_path, decrypted_datetime, stdout, stderr) rows = list(cur) if len(rows) == 1 and rows[0][3] is not None: # this script has already decrypted this video @@ -134,7 +162,12 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, start_time: datetime = rows[0][2] start_time = start_time.astimezone(timezone.utc) # print(start_time) - str_start_time = start_time.isoformat().replace('-', '').replace(':', '').replace('+0000', 'Z') + str_start_time = ( + start_time.isoformat() + .replace("-", "") + .replace(":", "") + .replace("+0000", "Z") + ) output_filename = str_start_time + "_" + cameradir.name + ".avi" # if output_filename.endswith('.done'): # output_filename = output_filename[0:-5] @@ -143,56 +176,66 @@ def video_fetch(cpool: SimpleConnectionPool, thalos_dir: Path, output_dir: Path, # gpg decrypt the video cmd = None if is_gpg(new_vid, passphrase_file): - cmd = "cat %s | gpg --batch --yes \ + cmd = ( + "cat %s | gpg --batch --yes \ --pinentry-mode loopback --passphrase-fd 0 \ - --decrypt --output %s %s "%( - passphrase_file, - str(output_file.absolute()), - new_vid_absolute_str - ) + --decrypt --output %s %s " + % (passphrase_file, str(output_file.absolute()), new_vid_absolute_str) + ) else: - cmd = "cp %s %s"%( - new_vid_absolute_str, - str(output_file.absolute()) - ) + cmd = "cp %s %s" % (new_vid_absolute_str, str(output_file.absolute())) p = subprocess.run(cmd, shell=True, capture_output=True, text=True) if p.returncode == 0: - cur.execute("update video_files set decrypted_path = %s, \ + cur.execute( + "update video_files set decrypted_path = %s, \ decrypted_datetime = %s, stdout = %s, stderr = %s \ - where original_path in ( %s, %s );", ( - str(output_file.absolute()), datetime.now(tz=timezone.utc), - p.stdout, p.stderr, - new_vid_absolute_str, new_vid_done_alt_str) + where original_path in ( %s, %s );", + ( + str(output_file.absolute()), + datetime.now(tz=timezone.utc), + p.stdout, + p.stderr, + new_vid_absolute_str, + new_vid_done_alt_str, + ), ) conn.commit() else: - cur.execute("update video_files set decrypted_path = %s, \ + cur.execute( + "update video_files set decrypted_path = %s, \ decrypted_datetime = %s, stdout = %s, stderr = %s \ - where original_path in ( %s, %s );", ( - None, datetime.now(tz=timezone.utc), - p.stdout, p.stderr, - new_vid_absolute_str, new_vid_done_alt_str) + where original_path in ( %s, %s );", + ( + None, + datetime.now(tz=timezone.utc), + p.stdout, + p.stderr, + new_vid_absolute_str, + new_vid_done_alt_str, + ), ) conn.commit() finally: cpool.putconn(conn) -@click.command() -@click.option('--dbname', default=flaskconfig.get('DBNAME')) -@click.option('--dbuser', default=flaskconfig.get('DBUSER')) -@click.option('--thalos_video_dir', default=flaskconfig.get('THALOS_VIDEO_DIR')) -@click.option('--output_dir', default=flaskconfig.get('VIDEO_OUTPUT_DIR')) -@click.option('--passphrase_file', default=flaskconfig.get('VIDEO_PASSPHRASE_FILE')) -@click.option('--thalos_video_suffix', default=flaskconfig.get('THALOS_VIDEO_SUFFIX')) -@click.option('--print_latest', is_flag=True) -def main(dbname, dbuser, thalos_video_dir, output_dir, passphrase_file, thalos_video_suffix, print_latest): +@click.command() +@click.option("--dbname", default=flaskconfig.get("DBNAME")) +@click.option("--dbuser", default=flaskconfig.get("DBUSER")) +@click.option("--thalos_video_dir", default=flaskconfig.get("THALOS_VIDEO_DIR")) +@click.option("--output_dir", default=flaskconfig.get("VIDEO_OUTPUT_DIR")) +@click.option("--passphrase_file", default=flaskconfig.get("VIDEO_PASSPHRASE_FILE")) +@click.option("--thalos_video_suffix", default=flaskconfig.get("THALOS_VIDEO_SUFFIX")) +@click.option("--print_latest", is_flag=True) +def main( + dbname, dbuser, thalos_video_dir, output_dir, passphrase_file, thalos_video_suffix, print_latest +): thalos_dir = Path(thalos_video_dir) output_dir = Path(output_dir) if print_latest: for cameradir in filter(lambda x: x.is_dir(), thalos_dir.iterdir()): - i=0 + i = 0 for vid_file in depth_first_video_files(cameradir): if i > 1: break @@ -200,7 +243,7 @@ def main(dbname, dbuser, thalos_video_dir, output_dir, passphrase_file, thalos_v s = vid_file.stat() last_modified = datetime.fromtimestamp(s.st_mtime, tz=timezone.utc) click.echo("{} ({})".format(str(vid_file.absolute()), str(last_modified))) - i+=1 + i += 1 return cpool = SimpleConnectionPool(1, 1, database=dbname, user=dbuser) @@ -209,10 +252,13 @@ def runonce(cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix) video_fetch(cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix) return schedule.CancelJob - schedule.every(1).seconds.do(runonce, cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix ) - - schedule.every(5).minutes.do(video_fetch, cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix ) + schedule.every(1).seconds.do( + runonce, cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix + ) + schedule.every(5).minutes.do( + video_fetch, cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix + ) while 1: n = schedule.idle_seconds() @@ -226,5 +272,6 @@ def runonce(cpool, thalos_dir, output_dir, passphrase_file, thalos_video_suffix) time.sleep(n) schedule.run_pending() -if __name__ == '__main__': + +if __name__ == "__main__": main()