MIT-Fishery-App / inference.py
aus10powell's picture
Update inference.py
209fb19
raw
history blame
4.69 kB
# Import necessary libraries
import matplotlib
# Use Agg backend for Matplotlib
matplotlib.use("Agg")
# Libraries for the app
import streamlit as st
import time
import io
import argparse
import sys
import os.path
import subprocess
import tempfile
import logging
# Visualization libraries
import altair as alt
import av
# Machine Learning and Image Processing libraries
import numpy as np
import pandas as pd
import cv2 as cv
from PIL import Image, ImageOps
from tqdm import tqdm
# Custom modules
import inference
from app_utils import *
@st.cache_data
def load_video(video_url):
video_bytes = open(video_url, "rb").read()
return video_bytes
@st.cache_data
def load_historical(fpath):
return pd.read_csv(fpath)
st.set_page_config(layout="wide")
def process_uploaded_file():
st.subheader("Upload your own video...")
# Initialize accepted file types for upload
img_types = ["jpg", "png", "jpeg"]
video_types = ["mp4", "avi"]
# Allow user to upload an image or video file
uploaded_file = st.file_uploader("Select an image or video file...", type=img_types + video_types)
# Display the uploaded file
if uploaded_file is not None:
if str(uploaded_file.type).split("/")[-1] in img_types:
# Display uploaded image
image = Image.open(uploaded_file)
st.image(image, caption="Uploaded image", use_column_width=True)
# TBD: Inference code to run and display for single image
elif str(uploaded_file.type).split("/")[-1] in video_types:
# Display uploaded video
st.video(uploaded_file)
# Convert streamlit video object to OpenCV format to run inferences
tfile = tempfile.NamedTemporaryFile(delete=False)
tfile.write(uploaded_file.read())
vf = cv.VideoCapture(tfile.name)
# Run inference on the uploaded video
with st.spinner("Running inference..."):
frames, counts, timestamps = inference.main(vf)
logging.info("INFO: Completed running inference on frames")
st.balloons()
# Convert OpenCV Numpy frames in-memory to IO Bytes for streamlit
streamlit_video_file = frames_to_video(frames=frames, fps=11)
# Show processed video and provide download button
st.video(streamlit_video_file)
st.download_button(
label="Download processed video",
data=streamlit_video_file,
mime="mp4",
file_name="processed_video.mp4",
)
# Create dataframe for fish counts and timestamps
df_counts_time = pd.DataFrame(
data={"fish_count": counts, "timestamps": timestamps[1:]}
)
# Display fish count vs. timestamp chart
st.altair_chart(
plot_count_date(dataframe=df_counts_time),
use_container_width=True,
)
else:
st.write("No file uploaded")
# Define the main function to run the Streamlit app
def run_app():
# Set Streamlit options
st.set_option("deprecation.showfileUploaderEncoding", False)
# App title and description
st.title("MIT Count Fish Counter")
st.text("Upload a video file to detect and count fish")
# Example video URL or file path (replace with actual video URL or file path)
video_url = "yolo2_out_py.mp4"
video_bytes = load_video(video_url)
# Load historical herring
df_historical_herring = load_historical(fpath="herring_count_all.csv")
main_tab, upload_tab = st.tabs(["Analysis", "Upload video for analysis"])
with main_tab:
# Create two columns for layout
col1, col2 = st.columns(2)
## Col1 #########################################
with col1:
## Initial visualizations
# Plot historical data
st.altair_chart(
plot_historical_data(df_historical_herring),
use_container_width=True,
)
# Display map of fishery locations
st.subheader("Map of Fishery Locations")
st.map(
pd.DataFrame(
np.random.randn(5, 2) / [50, 50] + [42.41, -71.38],
columns=["lat", "lon"],
),use_container_width=True
)
with col2:
# Display example processed video
st.subheader("Example of processed video")
st.video(video_bytes)
with upload_tab:
process_uploaded_file()
# Run the app if the script is executed directly
if __name__ == "__main__":
run_app()