Local ML Model Trainer Interface

import streamlit as st

import pandas as pd

import numpy as np

import matplotlib.pyplot as plt

import seaborn as sns


from sklearn.model_selection import train_test_split

from sklearn.metrics import (

    accuracy_score, precision_score, recall_score, f1_score,

    mean_squared_error, confusion_matrix

)


from sklearn.preprocessing import StandardScaler


from sklearn.linear_model import LogisticRegression, LinearRegression

from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor

from sklearn.svm import SVC, SVR

from sklearn.neighbors import KNeighborsClassifier, KNeighborsRegressor



st.set_page_config(page_title="Local ML Model Trainer", layout="wide")

st.title("Local ML Model Trainer Interface")

st.write("Upload a dataset → choose an algorithm → train → view results")



# ────────────────────────────────────────────────

# Upload Dataset

# ────────────────────────────────────────────────

uploaded_file = st.file_uploader("📤 Upload CSV Dataset", type=["csv"])


if uploaded_file:

    df = pd.read_csv(uploaded_file)

    st.success("Dataset Loaded Successfully!")

    st.write("###  Data Preview")

    st.dataframe(df.head())


    st.write("###  Dataset Info")

    st.write(df.describe())


    # Target column selection

    target_col = st.selectbox(" Select Target Column (Y)", df.columns)


    # Feature columns

    X = df.drop(columns=[target_col])

    y = df[target_col]


    # Auto detect problem type

    if df[target_col].dtype == object or df[target_col].nunique() < 15:

        problem_type = "classification"

    else:

        problem_type = "regression"


    st.info(f"Detected Problem Type: **{problem_type.upper()}**")


    # Choose model based on problem type

    if problem_type == "classification":

        model_choice = st.selectbox(

            "Choose Model",

            ["Logistic Regression", "Random Forest Classifier", "SVM Classifier", "KNN Classifier"]

        )

    else:

        model_choice = st.selectbox(

            "Choose Model",

            ["Linear Regression", "Random Forest Regressor", "SVM Regressor", "KNN Regressor"]

        )


    test_size = st.slider("Test Size (Train %)", 0.1, 0.5, 0.2)


    # Train button

    if st.button(" Train Model"):

        # Preprocessing

        scaler = StandardScaler()

        X_scaled = scaler.fit_transform(X.select_dtypes(include=np.number))


        X_train, X_test, y_train, y_test = train_test_split(

            X_scaled, y, test_size=test_size, random_state=42

        )


        # Model Selection

        if model_choice == "Logistic Regression":

            model = LogisticRegression()

        elif model_choice == "Random Forest Classifier":

            model = RandomForestClassifier()

        elif model_choice == "SVM Classifier":

            model = SVC()

        elif model_choice == "KNN Classifier":

            model = KNeighborsClassifier()

        elif model_choice == "Linear Regression":

            model = LinearRegression()

        elif model_choice == "Random Forest Regressor":

            model = RandomForestRegressor()

        elif model_choice == "SVM Regressor":

            model = SVR()

        elif model_choice == "KNN Regressor":

            model = KNeighborsRegressor()


        # Train

        model.fit(X_train, y_train)

        y_pred = model.predict(X_test)


        st.success("Model Trained Successfully!")


        # ────────────────────────────────────────────────

        # Show Metrics

        # ────────────────────────────────────────────────

        st.write("## 📈 Model Performance")


        if problem_type == "classification":

            st.write("### 🔹 Classification Metrics")

            st.write(f"Accuracy: **{accuracy_score(y_test, y_pred):.4f}**")

            st.write(f"Precision: **{precision_score(y_test, y_pred, average='weighted'):.4f}**")

            st.write(f"Recall: **{recall_score(y_test, y_pred, average='weighted'):.4f}**")

            st.write(f"F1 Score: **{f1_score(y_test, y_pred, average='weighted'):.4f}**")


            # Confusion Matrix

            cm = confusion_matrix(y_test, y_pred)

            fig, ax = plt.subplots(figsize=(5, 4))

            sns.heatmap(cm, annot=True, fmt="d", cmap="Blues", ax=ax)

            st.write("###  Confusion Matrix")

            st.pyplot(fig)


        else:

            st.write("### 🔹 Regression Metrics")

            rmse = np.sqrt(mean_squared_error(y_test, y_pred))

            st.write(f"RMSE: **{rmse:.4f}**")


        # ────────────────────────────────────────────────

        # Feature Importance (for tree models)

        # ────────────────────────────────────────────────

        if "Forest" in model_choice:

            st.write("##  Feature Importance")

            importance = model.feature_importances_

            fig, ax = plt.subplots(figsize=(6, 4))

            sns.barplot(x=importance, y=X.columns, ax=ax)

            st.pyplot(fig)


 

Automatic Dataset Cleaner

#!/usr/bin/env python3

"""

Automatic Dataset Cleaner


Usage:

    python automatic_dataset_cleaner.py --file /path/to/data.csv

    python automatic_dataset_cleaner.py --file /path/to/data.csv --missing-strategy mean --outliers cap --encode --scale standard


Outputs:

    - /path/to/data_cleaned.csv           (cleaned dataset)

    - /path/to/data_cleaning_report.json  (summary of cleaning actions)

"""


import argparse

import pandas as pd

import numpy as np

import json

import os

from datetime import datetime

from sklearn.preprocessing import StandardScaler, MinMaxScaler


# ---------- Helpers ----------

def read_csv(path):

    df = pd.read_csv(path)

    return df


def save_csv(df, path):

    df.to_csv(path, index=False)


def save_json(obj, path):

    with open(path, "w", encoding="utf-8") as f:

        json.dump(obj, f, indent=2, default=str)


def summary_stats(df):

    return {

        "rows": int(df.shape[0]),

        "columns": int(df.shape[1]),

        "missing_per_column": df.isnull().sum().to_dict(),

        "dtypes": {c: str(t) for c, t in df.dtypes.items()},

        "sample_head": df.head(3).to_dict(orient="records")

    }


def auto_cast_columns(df):

    """Try to cast columns to numeric/datetime where appropriate."""

    conversions = {}

    for col in df.columns:

        if df[col].dtype == object:

            # try datetime

            try:

                parsed = pd.to_datetime(df[col], errors="coerce")

                non_null = parsed.notnull().sum()

                if non_null / max(1, len(parsed)) > 0.6:

                    df[col] = parsed

                    conversions[col] = "datetime"

                    continue

            except Exception:

                pass

            # try numeric

            coerced = pd.to_numeric(df[col].str.replace(",", "").replace(" ", ""), errors="coerce")

            if coerced.notnull().sum() / max(1, len(coerced)) > 0.6:

                df[col] = coerced

                conversions[col] = "numeric"

    return df, conversions


# ---------- Missing values ----------

def handle_missing(df, strategy="mean", fill_value=None, threshold_drop_col=0.5):

    """

    strategy: 'drop-row', 'drop-col', 'mean', 'median', 'mode', 'ffill', 'bfill', 'constant'

    threshold_drop_col: if fraction of missing > threshold, drop column

    """

    report = {"strategy": strategy, "dropped_columns": [], "details": {}}

    # drop columns with too many missing values

    missing_frac = df.isnull().mean()

    cols_to_drop = missing_frac[missing_frac > threshold_drop_col].index.tolist()

    if cols_to_drop:

        df = df.drop(columns=cols_to_drop)

        report["dropped_columns"] = cols_to_drop


    if strategy == "drop-row":

        before = len(df)

        df = df.dropna(axis=0)

        report["rows_dropped"] = before - len(df)

    elif strategy == "drop-col":

        before_cols = df.shape[1]

        df = df.dropna(axis=1)

        report["cols_dropped"] = before_cols - df.shape[1]

    elif strategy in ("mean", "median", "mode", "ffill", "bfill", "constant"):

        for col in df.columns:

            if df[col].isnull().any():

                if strategy == "mean" and pd.api.types.is_numeric_dtype(df[col]):

                    val = df[col].mean()

                    df[col] = df[col].fillna(val)

                    report["details"][col] = f"filled mean={val}"

                elif strategy == "median" and pd.api.types.is_numeric_dtype(df[col]):

                    val = df[col].median()

                    df[col] = df[col].fillna(val)

                    report["details"][col] = f"filled median={val}"

                elif strategy == "mode":

                    mode_val = df[col].mode()

                    if not mode_val.empty:

                        val = mode_val.iloc[0]

                        df[col] = df[col].fillna(val)

                        report["details"][col] = f"filled mode={val}"

                    else:

                        df[col] = df[col].fillna(fill_value)

                        report["details"][col] = f"filled mode_empty used const={fill_value}"

                elif strategy == "ffill":

                    df[col] = df[col].fillna(method="ffill").fillna(method="bfill")

                    report["details"][col] = "filled forward/backward"

                elif strategy == "bfill":

                    df[col] = df[col].fillna(method="bfill").fillna(method="ffill")

                    report["details"][col] = "filled backward/forward"

                else:  # constant

                    df[col] = df[col].fillna(fill_value)

                    report["details"][col] = f"filled const={fill_value}"

    else:

        raise ValueError("Unknown missing strategy")

    return df, report


# ---------- Outlier detection/treatment ----------

def iqr_outlier_bounds(series, k=1.5):

    q1 = series.quantile(0.25)

    q3 = series.quantile(0.75)

    iqr = q3 - q1

    low = q1 - k * iqr

    high = q3 + k * iqr

    return low, high


def handle_outliers(df, method="remove", k=1.5, numeric_only=True):

    """

    method: 'remove' (drop rows with outlier), 'cap' (clip to bounds), 'mark' (add boolean column)

    returns df, report

    """

    report = {"method": method, "columns": {}}

    numeric_cols = df.select_dtypes(include=[np.number]).columns.tolist() if numeric_only else df.columns.tolist()

    rows_before = df.shape[0]

    for col in numeric_cols:

        series = df[col].dropna()

        if series.empty:

            continue

        low, high = iqr_outlier_bounds(series, k=k)

        is_out = (df[col] < low) | (df[col] > high)

        out_count = int(is_out.sum())

        if out_count == 0:

            continue

        report["columns"][col] = {"outliers": out_count, "bounds": (float(low), float(high))}

        if method == "remove":

            df = df.loc[~is_out]

        elif method == "cap":

            df[col] = df[col].clip(lower=low, upper=high)

        elif method == "mark":

            df[f"{col}_outlier"] = is_out.astype(int)

        else:

            raise ValueError("Unknown outlier method")

    rows_after = df.shape[0]

    report["rows_before"] = int(rows_before)

    report["rows_after"] = int(rows_after)

    return df, report


# ---------- Duplicates ----------

def handle_duplicates(df, subset=None, keep="first"):

    before = df.shape[0]

    df2 = df.drop_duplicates(subset=subset, keep=keep)

    after = df2.shape[0]

    report = {"rows_before": int(before), "rows_after": int(after), "dropped": int(before-after)}

    return df2, report


# ---------- Encoding ----------

def encode_categoricals(df, one_hot=False, max_unique_for_onehot=20):

    report = {"encoded_columns": {}}

    cat_cols = df.select_dtypes(include=["category", "object"]).columns.tolist()

    if not cat_cols:

        return df, report

    for col in cat_cols:

        nunique = df[col].nunique(dropna=False)

        if one_hot and nunique <= max_unique_for_onehot:

            dummies = pd.get_dummies(df[col].astype(str), prefix=col, dummy_na=True)

            df = pd.concat([df.drop(columns=[col]), dummies], axis=1)

            report["encoded_columns"][col] = {"method": "one_hot", "new_cols": list(dummies.columns)}

        else:

            # label encoding (simple mapping)

            mapping = {val: i for i, val in enumerate(df[col].astype(str).unique())}

            df[col] = df[col].astype(str).map(mapping)

            report["encoded_columns"][col] = {"method": "label", "mapping_sample": dict(list(mapping.items())[:10])}

    return df, report


# ---------- Scaling ----------

def scale_numeric(df, method="standard"):

    numeric_cols = df.select_dtypes(include=[np.number]).columns.tolist()

    report = {"method": method, "scaled_columns": numeric_cols}

    if not numeric_cols:

        return df, report

    arr = df[numeric_cols].values.astype(float)

    if method == "standard":

        scaler = StandardScaler()

    elif method == "minmax":

        scaler = MinMaxScaler()

    else:

        raise ValueError("Unknown scaling method")

    scaled = scaler.fit_transform(arr)

    df[numeric_cols] = scaled

    return df, report


# ---------- Main cleaning pipeline ----------

def clean_dataset(

    input_path,

    missing_strategy="mean",

    missing_constant=None,

    missing_drop_threshold=0.5,

    outlier_method="remove",

    outlier_k=1.5,

    outlier_numeric_only=True,

    dedupe_subset=None,

    encode=False,

    one_hot=False,

    scale_method=None

):

    # read

    df = read_csv(input_path)

    report = {"input_path": input_path, "start_time": str(datetime.now()), "initial_summary": summary_stats(df), "steps": {}}


    # auto-cast columns

    df, convs = auto_cast_columns(df)

    report["steps"]["auto_cast"] = convs


    # missing

    df, missing_report = handle_missing(df, strategy=missing_strategy, fill_value=missing_constant, threshold_drop_col=missing_drop_threshold)

    report["steps"]["missing"] = missing_report


    # duplicates

    df, dup_report = handle_duplicates(df, subset=dedupe_subset, keep="first")

    report["steps"]["duplicates"] = dup_report


    # outliers

    df, out_report = handle_outliers(df, method=outlier_method, k=outlier_k, numeric_only=outlier_numeric_only)

    report["steps"]["outliers"] = out_report


    # encode

    if encode:

        df, enc_report = encode_categoricals(df, one_hot=one_hot)

        report["steps"]["encoding"] = enc_report


    # scale

    if scale_method:

        df, scale_report = scale_numeric(df, method=scale_method)

        report["steps"]["scaling"] = scale_report


    report["final_summary"] = summary_stats(df)

    report["end_time"] = str(datetime.now())

    # output

    base, ext = os.path.splitext(input_path)

    cleaned_path = f"{base}_cleaned{ext}"

    report_path = f"{base}_cleaning_report.json"

    save_csv(df, cleaned_path)

    save_json(report, report_path)

    return cleaned_path, report_path, report


# ---------- CLI ----------

def parse_args():

    p = argparse.ArgumentParser(description="Automatic Dataset Cleaner")

    p.add_argument("--file", "-f", required=True, help="Path to CSV file")

    p.add_argument("--missing-strategy", default="mean", choices=["drop-row","drop-col","mean","median","mode","ffill","bfill","constant"], help="Missing value strategy")

    p.add_argument("--missing-constant", default=None, help="Constant value to fill missing when strategy=constant")

    p.add_argument("--missing-drop-threshold", type=float, default=0.5, help="Drop columns with missing fraction > threshold")

    p.add_argument("--outliers", default="remove", choices=["remove","cap","mark","none"], help="Outlier handling method")

    p.add_argument("--outlier-k", type=float, default=1.5, help="IQR multiplier for outlier detection")

    p.add_argument("--dedupe-subset", default=None, help="Comma separated columns to consider for duplicates (default=all columns)")

    p.add_argument("--encode", action="store_true", help="Encode categorical columns")

    p.add_argument("--one-hot", action="store_true", help="One-hot encode small cardinality categoricals (used with --encode)")

    p.add_argument("--scale", default=None, choices=["standard","minmax"], help="Scale numeric columns")

    return p.parse_args()


def main_cli():

    args = parse_args()

    dedupe_subset = args.dedupe_subset.split(",") if args.dedupe_subset else None

    outlier_method = args.outliers if args.outliers != "none" else None


    cleaned_path, report_path, report = clean_dataset(

        input_path=args.file,

        missing_strategy=args.missing_strategy,

        missing_constant=args.missing_constant,

        missing_drop_threshold=args.missing_drop_threshold,

        outlier_method=outlier_method,

        outlier_k=args.outlier_k,

        outlier_numeric_only=True,

        dedupe_subset=dedupe_subset,

        encode=args.encode,

        one_hot=args.one_hot,

        scale_method=args.scale

    )


    print("Cleaning complete.")

    print("Cleaned file:", cleaned_path)

    print("Report saved to:", report_path)

    # Also print a short summary

    print(json.dumps({

        "rows_before": report["initial_summary"]["rows"],

        "rows_after": report["final_summary"]["rows"],

        "columns_before": report["initial_summary"]["columns"],

        "columns_after": report["final_summary"]["columns"],

        "missing_info": report["initial_summary"]["missing_per_column"]

    }, indent=2))


if __name__ == "__main__":

    main_cli()

 

Local Chat App Over LAN (Socket-Based)

 chat_server.py

import socket

import threading


HOST = "0.0.0.0"     # Accept connections from LAN

PORT = 5000


clients = []

usernames = {}


# Broadcast message to all connected clients

def broadcast(msg, sender_conn=None):

    for client in clients:

        if client != sender_conn:

            try:

                client.send(msg.encode())

            except:

                pass


def handle_client(conn, addr):

    print(f"[NEW CONNECTION] {addr}")


    try:

        username = conn.recv(1024).decode()

        usernames[conn] = username


        broadcast(f" {username} joined the chat!")

        print(f"User '{username}' connected.")


        while True:

            msg = conn.recv(1024).decode()

            if not msg:

                break


            full_msg = f"{username}: {msg}"

            print(full_msg)

            broadcast(full_msg, conn)


    except Exception as e:

        print("Error:", e)


    finally:

        print(f"{addr} disconnected")

        clients.remove(conn)

        broadcast(f"{usernames[conn]} left the chat.")

        conn.close()


def start_server():

    server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

    server.bind((HOST, PORT))

    server.listen()


    print(f"Server started on {socket.gethostbyname(socket.gethostname())}:{PORT}")


    while True:

        conn, addr = server.accept()

        clients.append(conn)


        thread = threading.Thread(target=handle_client, args=(conn, addr))

        thread.daemon = True

        thread.start()


if __name__ == "__main__":

    start_server()

chat_client.py

import socket
import threading
import tkinter as tk
from tkinter import scrolledtext, messagebox

class ChatClient:
    def __init__(self, root):
        self.root = root
        root.title("LAN Chat Client")
        root.geometry("400x500")

        tk.Label(root, text="Server IP:").pack()
        self.ip_entry = tk.Entry(root)
        self.ip_entry.pack()

        tk.Label(root, text="Username:").pack()
        self.username_entry = tk.Entry(root)
        self.username_entry.pack()

        tk.Button(root, text="Connect", command=self.connect_server).pack(pady=10)

        self.chat_area = scrolledtext.ScrolledText(root, state="disabled")
        self.chat_area.pack(expand=True, fill="both", pady=10)

        self.msg_entry = tk.Entry(root)
        self.msg_entry.pack(fill="x")

        tk.Button(root, text="Send", command=self.send_message).pack(pady=5)

        self.sock = None
        self.running = False

    def connect_server(self):
        ip = self.ip_entry.get()
        username = self.username_entry.get()

        if not ip or not username:
            messagebox.showerror("Error", "Enter IP and Username!")
            return

        try:
            self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            self.sock.connect((ip, 5000))
            self.sock.send(username.encode())
        except:
            messagebox.showerror("Error", "Cannot connect to server!")
            return

        self.running = True
        threading.Thread(target=self.receive_messages, daemon=True).start()
        self.chat_area.config(state="normal")
        self.chat_area.insert("end", "Connected!\n")
        self.chat_area.config(state="disabled")

    def receive_messages(self):
        while self.running:
            try:
                msg = self.sock.recv(1024).decode()
                if msg:
                    self.chat_area.config(state="normal")
                    self.chat_area.insert("end", msg + "\n")
                    self.chat_area.yview("end")
                    self.chat_area.config(state="disabled")
            except:
                break

    def send_message(self):
        msg = self.msg_entry.get()
        if msg and self.sock:
            try:
                self.sock.send(msg.encode())
                self.msg_entry.delete(0, tk.END)
            except:
                pass

    def on_close(self):
        self.running = False
        if self.sock:
            self.sock.close()
        self.root.destroy()

if __name__ == "__main__":
    root = tk.Tk()
    client = ChatClient(root)
    root.protocol("WM_DELETE_WINDOW", client.on_close)
    root.mainloop()

Library Book Borrowing System

 """

Library Book Borrowing System (Tkinter + SQLite)

Features:

- User Login / Register

- Admin Login

- Borrow / Return Books

- Due Date Tracking

- Overdue Alerts

"""


import sqlite3

import tkinter as tk

from tkinter import ttk, messagebox

from datetime import datetime, timedelta


DB = "library.db"


# ----------------- DATABASE SETUP -----------------

def init_db():

    conn = sqlite3.connect(DB)

    c = conn.cursor()


    # Users Table

    c.execute("""

        CREATE TABLE IF NOT EXISTS users (

            id INTEGER PRIMARY KEY AUTOINCREMENT,

            username TEXT UNIQUE,

            password TEXT,

            role TEXT

        )

    """)


    # Books Table

    c.execute("""

        CREATE TABLE IF NOT EXISTS books (

            id INTEGER PRIMARY KEY AUTOINCREMENT,

            title TEXT,

            author TEXT,

            available INTEGER DEFAULT 1

        )

    """)


    # Borrow Table

    c.execute("""

        CREATE TABLE IF NOT EXISTS borrowed (

            id INTEGER PRIMARY KEY AUTOINCREMENT,

            user_id INTEGER,

            book_id INTEGER,

            borrowed_date TEXT,

            due_date TEXT,

            FOREIGN KEY(user_id) REFERENCES users(id),

            FOREIGN KEY(book_id) REFERENCES books(id)

        )

    """)


    # Default admin

    c.execute("SELECT * FROM users WHERE role='admin'")

    if not c.fetchone():

        c.execute("INSERT INTO users(username, password, role) VALUES('admin','admin','admin')")

        print("Default admin created: admin / admin")


    conn.commit()

    conn.close()


# ----------------- LOGIN WINDOW -----------------

class LoginWindow:

    def __init__(self, root):

        self.root = root

        root.title("Library System - Login")

        root.geometry("350x250")


        tk.Label(root, text="Username:", font=("Arial", 12)).pack(pady=5)

        self.username_entry = tk.Entry(root)

        self.username_entry.pack()


        tk.Label(root, text="Password:", font=("Arial", 12)).pack(pady=5)

        self.password_entry = tk.Entry(root, show="*")

        self.password_entry.pack()


        tk.Button(root, text="Login", command=self.login).pack(pady=10)

        tk.Button(root, text="Register", command=self.register).pack()


    def login(self):

        username = self.username_entry.get()

        password = self.password_entry.get()


        conn = sqlite3.connect(DB)

        c = conn.cursor()

        c.execute("SELECT * FROM users WHERE username=? AND password=?", (username, password))

        user = c.fetchone()

        conn.close()


        if user:

            role = user[3]

            if role == "admin":

                AdminDashboard(tk.Toplevel(), user)

            else:

                UserDashboard(tk.Toplevel(), user)

        else:

            messagebox.showerror("Error", "Invalid credentials!")


    def register(self):

        RegisterWindow(tk.Toplevel())


# ----------------- REGISTER WINDOW -----------------

class RegisterWindow:

    def __init__(self, root):

        self.root = root

        root.title("Register")

        root.geometry("300x250")


        tk.Label(root, text="Create Username").pack(pady=5)

        self.user_entry = tk.Entry(root)

        self.user_entry.pack()


        tk.Label(root, text="Create Password").pack(pady=5)

        self.pass_entry = tk.Entry(root, show="*")

        self.pass_entry.pack()


        tk.Button(root, text="Register", command=self.register_user).pack(pady=10)


    def register_user(self):

        username = self.user_entry.get()

        password = self.pass_entry.get()


        conn = sqlite3.connect(DB)

        c = conn.cursor()


        try:

            c.execute("INSERT INTO users(username,password,role) VALUES(?,?,?)",

                      (username, password, "user"))

            conn.commit()

            messagebox.showinfo("Success", "Registration complete!")

            self.root.destroy()

        except:

            messagebox.showerror("Error", "Username already exists.")

        conn.close()


# ----------------- ADMIN DASHBOARD -----------------

class AdminDashboard:

    def __init__(self, root, user):

        self.root = root

        root.title("Admin Dashboard")

        root.geometry("600x500")


        tk.Label(root, text="Admin Dashboard", font=("Arial", 16)).pack(pady=10)


        tk.Button(root, text="Add Book", width=20, command=self.add_book_window).pack(pady=5)

        tk.Button(root, text="Remove Book", width=20, command=self.remove_book_window).pack(pady=5)

        tk.Button(root, text="View All Books", width=20, command=self.view_books).pack(pady=5)

        tk.Button(root, text="View Users", width=20, command=self.view_users).pack(pady=5)


    def add_book_window(self):

        win = tk.Toplevel()

        win.title("Add Book")

        win.geometry("300x200")


        tk.Label(win, text="Title").pack()

        title = tk.Entry(win)

        title.pack()


        tk.Label(win, text="Author").pack()

        author = tk.Entry(win)

        author.pack()


        def save():

            conn = sqlite3.connect(DB)

            c = conn.cursor()

            c.execute("INSERT INTO books(title, author) VALUES(?,?)", (title.get(), author.get()))

            conn.commit()

            conn.close()

            messagebox.showinfo("Success", "Book Added!")

            win.destroy()


        tk.Button(win, text="Save", command=save).pack(pady=10)


    def remove_book_window(self):

        win = tk.Toplevel()

        win.title("Remove Book")

        win.geometry("300x200")


        tk.Label(win, text="Book ID").pack()

        book_id = tk.Entry(win)

        book_id.pack()


        def delete():

            conn = sqlite3.connect(DB)

            c = conn.cursor()

            c.execute("DELETE FROM books WHERE id=?", (book_id.get(),))

            conn.commit()

            conn.close()

            messagebox.showinfo("Removed", "Book deleted!")

            win.destroy()


        tk.Button(win, text="Delete", command=delete).pack(pady=10)


    def view_books(self):

        BookListWindow(tk.Toplevel(), admin=True)


    def view_users(self):

        UsersListWindow(tk.Toplevel())


# ----------------- USER DASHBOARD -----------------

class UserDashboard:

    def __init__(self, root, user):

        self.root = root

        self.user = user

        root.title("User Dashboard")

        root.geometry("600x500")


        tk.Label(root, text=f"Welcome {user[1]}", font=("Arial", 16)).pack(pady=10)


        tk.Button(root, text="Borrow Book", width=20, command=self.borrow_window).pack(pady=10)

        tk.Button(root, text="Return Book", width=20, command=self.return_window).pack(pady=10)

        tk.Button(root, text="My Borrowed Books", width=20, command=self.my_books).pack(pady=10)


        self.check_due_alerts()


    def borrow_window(self):

        BookListWindow(tk.Toplevel(), user=self.user)


    def return_window(self):

        ReturnBookWindow(tk.Toplevel(), self.user)


    def my_books(self):

        UserBorrowedBooks(tk.Toplevel(), self.user)


    def check_due_alerts(self):

        conn = sqlite3.connect(DB)

        c = conn.cursor()

        today = datetime.now()


        c.execute("""SELECT books.title, borrowed.due_date 

                     FROM borrowed 

                     JOIN books ON books.id = borrowed.book_id 

                     WHERE borrowed.user_id=?""",

                  (self.user[0],))

        rows = c.fetchall()

        conn.close()


        alerts = []

        for title, due_date in rows:

            due = datetime.strptime(due_date, "%Y-%m-%d")

            days_left = (due - today).days


            if days_left < 0:

                alerts.append(f"OVERDUE: {title} (Due {due_date})")

            elif days_left <= 2:

                alerts.append(f"Due Soon: {title} (Due {due_date})")


        if alerts:

            messagebox.showwarning("Due Date Alerts", "\n".join(alerts))


# ----------------- BOOK LIST WINDOW -----------------

class BookListWindow:

    def __init__(self, root, admin=False, user=None):

        self.root = root

        self.user = user

        root.title("Books List")

        root.geometry("600x400")


        columns = ("ID","Title","Author","Available")

        tree = ttk.Treeview(root, columns=columns, show="headings")

        for col in columns:

            tree.heading(col, text=col)

        tree.pack(fill="both", expand=True)


        conn = sqlite3.connect(DB)

        c = conn.cursor()

        c.execute("SELECT * FROM books")

        rows = c.fetchall()

        conn.close()


        for r in rows:

            tree.insert("", tk.END, values=r)


        if user:  

            tk.Button(root, text="Borrow Selected", command=lambda: self.borrow(tree)).pack(pady=10)


    def borrow(self, tree):

        selected = tree.focus()

        if not selected:

            messagebox.showwarning("Select", "Select a book first!")

            return


        values = tree.item(selected)["values"]

        book_id, title, author, available = values


        if available == 0:

            messagebox.showerror("Unavailable", "Book already borrowed!")

            return


        due = (datetime.now() + timedelta(days=7)).strftime("%Y-%m-%d")


        conn = sqlite3.connect(DB)

        c = conn.cursor()

        c.execute("UPDATE books SET available=0 WHERE id=?", (book_id,))

        c.execute("INSERT INTO borrowed(user_id, book_id, borrowed_date, due_date) VALUES(?,?,?,?)",

                  (self.user[0], book_id, datetime.now().strftime("%Y-%m-%d"), due))

        conn.commit()

        conn.close()


        messagebox.showinfo("Success", f"Book borrowed! Due on {due}")

        self.root.destroy()


# ----------------- RETURN BOOK WINDOW -----------------

class ReturnBookWindow:

    def __init__(self, root, user):

        self.root = root

        self.user = user

        root.title("Return Book")

        root.geometry("500x350")


        columns = ("Borrow ID","Book Title","Due Date")

        self.tree = ttk.Treeview(root, columns=columns, show="headings")

        for col in columns:

            self.tree.heading(col, text=col)

        self.tree.pack(fill="both", expand=True)


        conn = sqlite3.connect(DB)

        c = conn.cursor()

        c.execute("""SELECT borrowed.id, books.title, borrowed.due_date

                     FROM borrowed 

                     JOIN books ON books.id = borrowed.book_id 

                     WHERE borrowed.user_id=?""",

                  (user[0],))

        rows = c.fetchall()

        conn.close()


        for r in rows:

            self.tree.insert("", tk.END, values=r)


        tk.Button(root, text="Return Selected", command=self.return_book).pack(pady=10)


    def return_book(self):

        selected = self.tree.focus()

        if not selected:

            messagebox.showwarning("Select", "Select a book!")

            return


        borrow_id, title, due = self.tree.item(selected)["values"]


        conn = sqlite3.connect(DB)

        c = conn.cursor()

        c.execute("SELECT book_id FROM borrowed WHERE id=?", (borrow_id,))

        book_id = c.fetchone()[0]


        c.execute("DELETE FROM borrowed WHERE id=?", (borrow_id,))

        c.execute("UPDATE books SET available=1 WHERE id=?", (book_id,))

        conn.commit()

        conn.close()


        messagebox.showinfo("Returned", f"{title} returned successfully!")

        self.root.destroy()


# ----------------- USER BORROWED LIST -----------------

class UserBorrowedBooks:

    def __init__(self, root, user):

        self.root = root

        root.title("My Borrowed Books")

        root.geometry("600x350")


        columns = ("Book Title","Borrowed Date","Due Date")

        tree = ttk.Treeview(root, columns=columns, show="headings")

        for col in columns:

            tree.heading(col, text=col)

        tree.pack(fill="both", expand=True)


        conn = sqlite3.connect(DB)

        c = conn.cursor()

        c.execute("""SELECT books.title, borrowed.borrowed_date, borrowed.due_date

                     FROM borrowed 

                     JOIN books ON books.id = borrowed.book_id 

                     WHERE borrowed.user_id=?""",

                  (user[0],))

        rows = c.fetchall()

        conn.close()


        for r in rows:

            tree.insert("", tk.END, values=r)


# ----------------- USERS LIST WINDOW (ADMIN) -----------------

class UsersListWindow:

    def __init__(self, root):

        root.title("All Users")

        root.geometry("600x300")


        columns = ("ID","Username","Role")

        tree = ttk.Treeview(root, columns=columns, show="headings")

        for col in columns:

            tree.heading(col, text=col)

        tree.pack(fill="both", expand=True)


        conn = sqlite3.connect(DB)

        c = conn.cursor()

        c.execute("SELECT id, username, role FROM users")

        rows = c.fetchall()

        conn.close()


        for r in rows:

            tree.insert("", tk.END, values=r)


# ----------------- MAIN APP -----------------

if __name__ == "__main__":

    init_db()

    root = tk.Tk()

    LoginWindow(root)

    root.mainloop()


Motion Detection Security Recorder

"""

Motion Detection Security Recorder

---------------------------------

Usage:

    python motion_recorder.py

    python motion_recorder.py --source 0                      # default webcam

    python motion_recorder.py --source "video.mp4"

    python motion_recorder.py --source "rtsp://...."


Outputs:

    ./recordings/YYYYMMDD_HHMMSS_motion.mp4

    ./recordings/recording_log.csv  (optional)


Notes:

- Adjust MIN_AREA and SENSITIVITY for your camera / scene.

- Pre-buffer keeps recent frames so clip contains seconds before detection.

"""


import cv2

import numpy as np

import argparse

import time

from datetime import datetime

from collections import deque

import os

import csv


# -------------------------

# Configuration (tweakable)

# -------------------------

OUTPUT_DIR = "recordings"

LOG_CSV = True            # write a CSV log of recorded clips

FPS = 20                  # expected framerate for recording (adjust to your camera)

FRAME_WIDTH = 640         # resize frames for faster processing

FRAME_HEIGHT = 480

MIN_AREA = 1200           # minimum contour area to be considered motion (tweak)

SENSITIVITY = 25          # how much difference triggers motion (lower => more sensitive)

PRE_BUFFER_SECONDS = 3    # include 3 seconds before motion started

POST_RECORD_SECONDS = 4   # record N seconds after motion stops

CODEC = "mp4v"            # codec fourcc (try 'XVID' or 'avc1' if 'mp4v' not available)


# -------------------------

# Helpers

# -------------------------

def ensure_dir(path):

    if not os.path.exists(path):

        os.makedirs(path, exist_ok=True)


def timestamp_str():

    return datetime.now().strftime("%Y%m%d_%H%M%S")


def make_output_filename():

    return f"{timestamp_str()}_motion.mp4"


def write_log(csv_path, row):

    header = ["filename","start_time","end_time","duration_s","frames","source"]

    exists = os.path.exists(csv_path)

    with open(csv_path, "a", newline="", encoding="utf-8") as f:

        w = csv.writer(f)

        if not exists:

            w.writerow(header)

        w.writerow(row)


# -------------------------

# Motion Recorder class

# -------------------------

class MotionRecorder:

    def __init__(self, source=0, output_dir=OUTPUT_DIR):

        self.source = source

        self.output_dir = output_dir

        ensure_dir(self.output_dir)

        self.log_path = os.path.join(self.output_dir, "recording_log.csv") if LOG_CSV else None


        self.cap = cv2.VideoCapture(self.source)

        if not self.cap.isOpened():

            raise RuntimeError(f"Cannot open source: {source}")


        # If camera provides FPS, override

        native_fps = self.cap.get(cv2.CAP_PROP_FPS)

        if native_fps and native_fps > 0:

            self.fps = native_fps

        else:

            self.fps = FPS


        # Use resize dims

        self.width = FRAME_WIDTH

        self.height = FRAME_HEIGHT


        # background subtractor (more robust) + simple diff fallback

        self.bg_sub = cv2.createBackgroundSubtractorMOG2(history=500, varThreshold=16, detectShadows=True)

        self.pre_buffer = deque(maxlen=int(self.fps * PRE_BUFFER_SECONDS))

        self.is_recording = False

        self.writer = None

        self.record_start_time = None

        self.frames_recorded = 0

        self.last_motion_time = None


    def release(self):

        if self.cap:

            self.cap.release()

        if self.writer:

            self.writer.release()

        cv2.destroyAllWindows()


    def start(self):

        print("Starting motion detection. Press 'q' to quit.")

        try:

            while True:

                ret, frame = self.cap.read()

                if not ret:

                    print("Stream ended or cannot fetch frame.")

                    break


                # resize for consistent processing

                frame = cv2.resize(frame, (self.width, self.height))

                gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)

                gray_blur = cv2.GaussianBlur(gray, (5,5), 0)


                # background subtraction mask

                fgmask = self.bg_sub.apply(gray_blur)

                # threshold to reduce noise

                _, thresh = cv2.threshold(fgmask, SENSITIVITY, 255, cv2.THRESH_BINARY)

                # morphological operations to reduce small noise

                kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5,5))

                clean = cv2.morphologyEx(thresh, cv2.MORPH_OPEN, kernel, iterations=1)

                clean = cv2.morphologyEx(clean, cv2.MORPH_DILATE, kernel, iterations=2)


                # find contours

                contours, _ = cv2.findContours(clean, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)

                motion_detected = False

                for cnt in contours:

                    if cv2.contourArea(cnt) >= MIN_AREA:

                        motion_detected = True

                        (x,y,w,h) = cv2.boundingRect(cnt)

                        # draw rectangle for preview

                        cv2.rectangle(frame, (x,y), (x+w, y+h), (0,255,0), 2)


                # push frame into pre-buffer (store color frames)

                self.pre_buffer.append(frame.copy())


                # Recording logic

                now = time.time()

                if motion_detected:

                    self.last_motion_time = now

                    if not self.is_recording:

                        # start new recording using pre-buffer

                        fname = make_output_filename()

                        out_path = os.path.join(self.output_dir, fname)

                        fourcc = cv2.VideoWriter_fourcc(*CODEC)

                        self.writer = cv2.VideoWriter(out_path, fourcc, self.fps, (self.width, self.height))

                        if not self.writer.isOpened():

                            print("Warning: VideoWriter failed to open. Check codec availability.")

                        # flush pre-buffer to writer

                        for bf in list(self.pre_buffer):

                            if self.writer:

                                self.writer.write(bf)

                        self.is_recording = True

                        self.record_start_time = datetime.now()

                        self.frames_recorded = len(self.pre_buffer)

                        self.current_out_path = out_path

                        print(f"[{self.record_start_time}] Motion started -> Recording to {out_path}")


                # If recording, write current frame and manage stop condition

                if self.is_recording:

                    if self.writer:

                        self.writer.write(frame)

                    self.frames_recorded += 1

                    # stop if no motion for POST_RECORD_SECONDS

                    if self.last_motion_time and (now - self.last_motion_time) > POST_RECORD_SECONDS:

                        # finalize

                        record_end = datetime.now()

                        duration = (record_end - self.record_start_time).total_seconds()

                        print(f"[{record_end}] Motion ended. Duration: {duration:.2f}s, Frames: {self.frames_recorded}")

                        # close writer

                        if self.writer:

                            self.writer.release()

                            self.writer = None

                        # write log

                        if LOG_CSV and self.log_path:

                            write_log(self.log_path, [

                                os.path.basename(self.current_out_path),

                                self.record_start_time.strftime("%Y-%m-%d %H:%M:%S"),

                                record_end.strftime("%Y-%m-%d %H:%M:%S"),

                                f"{duration:.2f}",

                                str(self.frames_recorded),

                                str(self.source)

                            ])

                        self.is_recording = False

                        self.frames_recorded = 0

                        # clear pre_buffer so next record begins clean

                        self.pre_buffer.clear()


                # Show simple preview window (optional)

                preview = frame.copy()

                status_text = f"REC" if self.is_recording else "Idle"

                cv2.putText(preview, f"Status: {status_text}", (10,20), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0,0,255) if self.is_recording else (0,255,0), 2)

                cv2.imshow("Motion Recorder - Preview", preview)


                # keyboard quit

                key = cv2.waitKey(1) & 0xFF

                if key == ord('q'):

                    print("Quit requested by user.")

                    break


        except KeyboardInterrupt:

            print("Interrupted by user.")

        finally:

            # cleanup

            if self.writer:

                self.writer.release()

                if LOG_CSV and self.log_path:

                    # if we were recording when interrupted, log end

                    end_time = datetime.now()

                    duration = (end_time - self.record_start_time).total_seconds() if self.record_start_time else 0

                    write_log(self.log_path, [

                        os.path.basename(self.current_out_path),

                        self.record_start_time.strftime("%Y-%m-%d %H:%M:%S") if self.record_start_time else "",

                        end_time.strftime("%Y-%m-%d %H:%M:%S"),

                        f"{duration:.2f}",

                        str(self.frames_recorded),

                        str(self.source)

                    ])

            self.release()

            print("Released resources. Exiting.")


# -------------------------

# CLI arg parsing

# -------------------------

def parse_args():

    parser = argparse.ArgumentParser(description="Motion Detection Security Recorder")

    parser.add_argument("--source", type=str, default="0", help="Video source: 0 (webcam), file path, or RTSP URL")

    parser.add_argument("--out", type=str, default=OUTPUT_DIR, help="Output recordings folder")

    parser.add_argument("--fps", type=int, default=FPS, help="Recording FPS fallback")

    parser.add_argument("--w", type=int, default=FRAME_WIDTH, help="Frame width (resize)")

    parser.add_argument("--h", type=int, default=FRAME_HEIGHT, help="Frame height (resize)")

    parser.add_argument("--min-area", type=int, default=MIN_AREA, help="Min contour area to detect motion")

    parser.add_argument("--sensitivity", type=int, default=SENSITIVITY, help="Threshold sensitivity for mask")

    args = parser.parse_args()

    return args


# -------------------------

# Entrypoint

# -------------------------

def main():

    args = parse_args()

    source = args.source

    # convert "0" -> 0 for webcam

    if source.isdigit():

        source = int(source)

    global FPS, FRAME_WIDTH, FRAME_HEIGHT, MIN_AREA, SENSITIVITY, OUTPUT_DIR

    FPS = args.fps

    FRAME_WIDTH = args.w

    FRAME_HEIGHT = args.h

    MIN_AREA = args.min_area

    SENSITIVITY = args.sensitivity

    OUTPUT_DIR = args.out


    ensure_dir(OUTPUT_DIR)

    recorder = MotionRecorder(source=source, output_dir=OUTPUT_DIR)

    recorder.start()


if __name__ == "__main__":

    main()