Virtual Stock Trading Game

 """

Virtual Stock Trading Game (Streamlit)


Features:

- Simple username registration/login

- Buy / Sell simulated orders at current market price (via yfinance)

- Portfolio view, transaction history

- Leaderboard by total portfolio value

- SQLite persistence


Run:

    streamlit run virtual_trading_app.py

"""


import streamlit as st

import yfinance as yf

import pandas as pd

import sqlite3

from datetime import datetime

import altair as alt

import os


# -----------------------

# Config

# -----------------------

DB_FILE = "trading.db"

STARTING_CASH = 100000.0  # default starting cash for new users


# -----------------------

# Database helpers

# -----------------------

def get_conn():

    conn = sqlite3.connect(DB_FILE, check_same_thread=False)

    return conn


def init_db():

    conn = get_conn()

    cur = conn.cursor()

    cur.execute("""

    CREATE TABLE IF NOT EXISTS users (

        id INTEGER PRIMARY KEY AUTOINCREMENT,

        username TEXT UNIQUE,

        cash REAL,

        created_at TEXT

    )

    """)

    cur.execute("""

    CREATE TABLE IF NOT EXISTS holdings (

        id INTEGER PRIMARY KEY AUTOINCREMENT,

        user_id INTEGER,

        ticker TEXT,

        quantity REAL,

        avg_price REAL,

        FOREIGN KEY(user_id) REFERENCES users(id)

    )

    """)

    cur.execute("""

    CREATE TABLE IF NOT EXISTS transactions (

        id INTEGER PRIMARY KEY AUTOINCREMENT,

        user_id INTEGER,

        ticker TEXT,

        quantity REAL,

        price REAL,

        side TEXT,              -- 'BUY' or 'SELL'

        timestamp TEXT,

        FOREIGN KEY(user_id) REFERENCES users(id)

    )

    """)

    conn.commit()

    conn.close()


def create_user(username, starting_cash=STARTING_CASH):

    conn = get_conn()

    cur = conn.cursor()

    now = datetime.utcnow().isoformat()

    try:

        cur.execute("INSERT INTO users (username, cash, created_at) VALUES (?, ?, ?)",

                    (username, float(starting_cash), now))

        conn.commit()

    except sqlite3.IntegrityError:

        pass

    conn.close()


def get_user(username):

    conn = get_conn()

    cur = conn.cursor()

    cur.execute("SELECT id, username, cash, created_at FROM users WHERE username=?", (username,))

    row = cur.fetchone()

    conn.close()

    if row:

        return {"id": row[0], "username": row[1], "cash": row[2], "created_at": row[3]}

    return None


def update_cash(user_id, new_cash):

    conn = get_conn()

    cur = conn.cursor()

    cur.execute("UPDATE users SET cash=? WHERE id=?", (new_cash, user_id))

    conn.commit()

    conn.close()


def get_holdings(user_id):

    conn = get_conn()

    cur = conn.cursor()

    cur.execute("SELECT ticker, quantity, avg_price FROM holdings WHERE user_id=?", (user_id,))

    rows = cur.fetchall()

    conn.close()

    df = pd.DataFrame(rows, columns=["ticker", "quantity", "avg_price"])

    if df.empty:

        return pd.DataFrame(columns=["ticker", "quantity", "avg_price"])

    return df


def upsert_holding(user_id, ticker, qty_delta, trade_price):

    """

    Add or update holdings:

    - If buying: qty_delta positive -> update quantity and avg_price

    - If selling: qty_delta negative -> reduce quantity; if qty becomes 0 remove row

    """

    conn = get_conn()

    cur = conn.cursor()

    cur.execute("SELECT id, quantity, avg_price FROM holdings WHERE user_id=? AND ticker=?", (user_id, ticker))

    row = cur.fetchone()

    if row:

        hid, qty, avg = row

        new_qty = qty + qty_delta

        if new_qty <= 0.000001:

            cur.execute("DELETE FROM holdings WHERE id=?", (hid,))

        else:

            if qty_delta > 0:

                # new weighted avg: (qty*avg + qty_delta*trade_price) / (qty+qty_delta)

                new_avg = (qty * avg + qty_delta * trade_price) / (qty + qty_delta)

            else:

                new_avg = avg

            cur.execute("UPDATE holdings SET quantity=?, avg_price=? WHERE id=?", (new_qty, new_avg, hid))

    else:

        if qty_delta > 0:

            cur.execute("INSERT INTO holdings (user_id, ticker, quantity, avg_price) VALUES (?,?,?,?)",

                        (user_id, ticker, qty_delta, trade_price))

    conn.commit()

    conn.close()


def record_transaction(user_id, ticker, quantity, price, side):

    conn = get_conn()

    cur = conn.cursor()

    now = datetime.utcnow().isoformat()

    cur.execute("INSERT INTO transactions (user_id, ticker, quantity, price, side, timestamp) VALUES (?, ?, ?, ?, ?, ?)",

                (user_id, ticker, quantity, price, side, now))

    conn.commit()

    conn.close()


def get_transactions(user_id, limit=200):

    conn = get_conn()

    cur = conn.cursor()

    cur.execute("SELECT ticker, quantity, price, side, timestamp FROM transactions WHERE user_id=? ORDER BY id DESC LIMIT ?",

                (user_id, limit))

    rows = cur.fetchall()

    conn.close()

    df = pd.DataFrame(rows, columns=["ticker", "quantity", "price", "side", "timestamp"])

    if df.empty:

        return pd.DataFrame(columns=["ticker", "quantity", "price", "side", "timestamp"])

    return df


def get_leaderboard(top_n=20):

    conn = get_conn()

    cur = conn.cursor()

    cur.execute("SELECT id, username, cash FROM users")

    users = cur.fetchall()

    leaderboard = []

    for uid, username, cash in users:

        # compute portfolio market value

        holdings = get_holdings(uid)

        total = float(cash)

        if not holdings.empty:

            tickers = list(holdings["ticker"].unique())

            market = fetch_market_prices(tickers)

            for _, row in holdings.iterrows():

                t = row["ticker"]

                q = float(row["quantity"])

                price = market.get(t, 0.0)

                total += q * price

        leaderboard.append({"username": username, "total": total})

    conn.close()

    lb = pd.DataFrame(leaderboard).sort_values("total", ascending=False).reset_index(drop=True)

    return lb.head(top_n)


# -----------------------

# Market helpers (yfinance)

# -----------------------

def fetch_price(ticker):

    """

    Return latest price (use fast yfinance call). If ticker invalid, raises.

    """

    try:

        t = yf.Ticker(ticker)

        # use fast info if available

        price = None

        # prefer real-time quote

        quote = t.history(period="1d", interval="1m")

        if not quote.empty:

            price = quote["Close"].iloc[-1]

        else:

            info = t.info

            price = info.get("regularMarketPrice")

        if price is None:

            raise ValueError("Price not available")

        return float(price)

    except Exception as e:

        raise ValueError(f"Could not fetch price for {ticker}: {e}")


def fetch_market_prices(tickers):

    """

    Bulk fetch latest closing prices for a list of tickers using yfinance download -> faster.

    Returns dict ticker -> price

    """

    out = {}

    if not tickers:

        return out

    # yfinance can handle list

    try:

        df = yf.download(tickers, period="1d", interval="1m", progress=False)

        # df['Close'] may be single or multi-column

        if isinstance(df.columns, pd.MultiIndex):

            closes = df['Close'].iloc[-1]

            for t in tickers:

                try:

                    out[t] = float(closes[t])

                except Exception:

                    out[t] = 0.0

        else:

            out[tickers[0]] = float(df['Close'].iloc[-1])

    except Exception:

        # fallback to single fetch

        for t in tickers:

            try:

                out[t] = fetch_price(t)

            except Exception:

                out[t] = 0.0

    return out


# -----------------------

# Trading logic

# -----------------------

def attempt_buy(user, ticker, quantity):

    """

    Attempt to buy `quantity` shares at current price. Returns (success, message).

    """

    try:

        qty = float(quantity)

        if qty <= 0:

            return False, "Quantity must be > 0"

    except:

        return False, "Invalid quantity"


    try:

        price = fetch_price(ticker)

    except Exception as e:

        return False, f"Price fetch error: {e}"


    cost = qty * price

    if cost > user["cash"] + 1e-9:

        return False, f"Insufficient funds: need {cost:.2f}, available {user['cash']:.2f}"


    # perform transaction

    new_cash = float(user["cash"]) - cost

    update_cash(user["id"], new_cash)

    upsert_holding(user["id"], ticker.upper(), qty, price)

    record_transaction(user["id"], ticker.upper(), qty, price, "BUY")

    # refresh user

    return True, f"Bought {qty} shares of {ticker.upper()} at {price:.2f} (cost {cost:.2f})"


def attempt_sell(user, ticker, quantity):

    try:

        qty = float(quantity)

        if qty <= 0:

            return False, "Quantity must be > 0"

    except:

        return False, "Invalid quantity"


    holdings = get_holdings(user["id"])

    if holdings.empty or ticker.upper() not in list(holdings["ticker"].str.upper()):

        return False, "No holdings for this ticker"


    row = holdings[holdings["ticker"].str.upper() == ticker.upper()].iloc[0]

    owned = float(row["quantity"])

    if qty > owned + 1e-9:

        return False, f"Not enough shares to sell (owned {owned})"


    try:

        price = fetch_price(ticker)

    except Exception as e:

        return False, f"Price fetch error: {e}"


    proceeds = qty * price

    new_cash = float(get_user(user["username"])["cash"]) + proceeds

    update_cash(user["id"], new_cash)

    upsert_holding(user["id"], ticker.upper(), -qty, price)

    record_transaction(user["id"], ticker.upper(), qty, price, "SELL")

    return True, f"Sold {qty} shares of {ticker.upper()} at {price:.2f} (proceeds {proceeds:.2f})"


# -----------------------

# UI

# -----------------------

def login_ui():

    st.sidebar.header("Player Login / Register")

    username = st.sidebar.text_input("Enter username", key="login_username")

    if st.sidebar.button("Login / Register"):

        if not username.strip():

            st.sidebar.error("Please enter a username")

            return None

        create_user(username.strip())

        user = get_user(username.strip())

        st.session_state["user"] = user

        st.sidebar.success(f"Logged in as {user['username']}")

        return user

    return None


def main_app(user):

    st.title("šŸ“ˆ Virtual Stock Trading Game")

    st.write("**Simulation only — not financial advice.**")

    st.markdown("---")


    # show user summary

    col1, col2 = st.columns([2,1])

    with col1:

        st.subheader(f"Hello, {user['username']} šŸ‘‹")

        st.write(f"**Cash:** ${user['cash']:.2f}")

        holdings = get_holdings(user["id"])

        if holdings.empty:

            st.info("You have no holdings yet. Search a ticker and buy to get started.")

        else:

            st.write("Your holdings:")

            # Fetch market prices for tickers

            tickers = list(holdings["ticker"].unique())

            market = fetch_market_prices([t for t in tickers])

            holdings_display = holdings.copy()

            holdings_display["market_price"] = holdings_display["ticker"].apply(lambda t: market.get(t, 0.0))

            holdings_display["market_value"] = holdings_display["quantity"] * holdings_display["market_price"]

            holdings_display["unreal_pnl"] = holdings_display["market_value"] - holdings_display["quantity"] * holdings_display["avg_price"]

            st.dataframe(holdings_display.style.format({"quantity":"{:.3f}", "avg_price":"{:.2f}", "market_price":"{:.2f}", "market_value":"{:.2f}", "unreal_pnl":"{:.2f}"}), use_container_width=True)

            total_market = holdings_display["market_value"].sum()

            st.write(f"Total holdings market value: ${total_market:.2f}")


    with col2:

        st.subheader("Leaderboard")

        lb = get_leaderboard()

        if lb.empty:

            st.write("No players yet.")

        else:

            st.table(lb.style.format({"total":"${:,.2f}"}).head(10))


    st.markdown("---")

    # Trading panel

    st.header("Trade")

    tcol1, tcol2 = st.columns(2)

    with tcol1:

        ticker = st.text_input("Ticker (e.g., AAPL)", key="trade_ticker")

        qty = st.number_input("Quantity", min_value=0.0, value=1.0, step=1.0, key="trade_qty")

    with tcol2:

        if st.button("Fetch Price"):

            try:

                price = fetch_price(ticker)

                st.success(f"Price for {ticker.upper()}: ${price:.2f}")

            except Exception as e:

                st.error(str(e))


        if st.button("Buy"):

            if not ticker:

                st.error("Enter ticker")

            else:

                ok, msg = attempt_buy(user, ticker, qty)

                if ok:

                    st.success(msg)

                    # refresh user object

                    st.session_state["user"] = get_user(user["username"])

                else:

                    st.error(msg)


        if st.button("Sell"):

            if not ticker:

                st.error("Enter ticker")

            else:

                ok, msg = attempt_sell(user, ticker, qty)

                if ok:

                    st.success(msg)

                    st.session_state["user"] = get_user(user["username"])

                else:

                    st.error(msg)


    st.markdown("---")

    # Transaction history and portfolio chart

    st.header("Transaction History & Portfolio Value")

    tx = get_transactions(user["id"], limit=500)

    st.subheader("Recent Transactions")

    if tx.empty:

        st.info("No transactions yet.")

    else:

        st.dataframe(tx, use_container_width=True)


    # Portfolio value over time (reconstruct from transactions)

    st.subheader("Portfolio Value (by re-using transactions)")

    # basic reconstruction: assume each transaction timestamp, compute cash and holdings snapshot

    # We will create a simple time series from transactions for demo

    conn = get_conn()

    q = conn.cursor()

    q.execute("SELECT timestamp, ticker, quantity, price, side FROM transactions WHERE user_id=? ORDER BY id ASC", (user["id"],))

    rows = q.fetchall()

    conn.close()

    if rows:

        df_tx = pd.DataFrame(rows, columns=["timestamp","ticker","quantity","price","side"])

        df_tx["timestamp"] = pd.to_datetime(df_tx["timestamp"])

        # sample points: we compute portfolio value at each tx time using latest market prices (this is approximate)

        records = []

        cash = get_user(user["username"])["cash"]

        # Instead compute forward: start with starting cash and apply transactions in order to track cash (we need starting cash)

        start_user = get_user(user["username"])

        # To compute portfolio value over time properly we'd need historic prices at each tx time — skip heavy calls; instead show current portfolio snapshot vs time by trade counts

        # So we'll create a simple chart: cumulative invested vs current market value

        holdings_now = get_holdings(user["id"])

        if not holdings_now.empty:

            prices = fetch_market_prices(list(holdings_now["ticker"].unique()))

            holdings_now["market_price"] = holdings_now["ticker"].apply(lambda t: prices.get(t, 0.0))

            holdings_now["market_value"] = holdings_now["quantity"] * holdings_now["market_price"]

            chart_df = holdings_now[["ticker","market_value"]]

            chart_df = chart_df.rename(columns={"market_value":"value"})

            st.write("Current holdings market values:")

            st.dataframe(holdings_now)

            chart = alt.Chart(chart_df).mark_bar().encode(x="ticker", y="value")

            st.altair_chart(chart, use_container_width=True)


    st.markdown("---")

    st.sidebar.markdown("## Player Actions")

    if st.sidebar.button("Refresh Data"):

        st.session_state["user"] = get_user(user["username"])

        st.experimental_rerun()


    if st.sidebar.button("Log out"):

        st.session_state.pop("user", None)

        st.experimental_rerun()


# -----------------------

# App entrypoint

# -----------------------

def main():

    st.set_page_config(page_title="Virtual Stock Trading Game", layout="wide")

    init_db()


    st.sidebar.title("Virtual Trading")

    user = st.session_state.get("user", None)

    if not user:

        ui_user = login_ui()

        if ui_user:

            user = ui_user

    else:

        # refresh user data from DB

        user = get_user(user["username"])

        st.session_state["user"] = user


    if user:

        main_app(user)

    else:

        st.title("Welcome to the Virtual Stock Trading Game")

        st.write("Create a username in the left panel to start. You'll receive some starting cash to practice trading.")

        st.info("This app uses real market prices via yfinance but only simulates trades with fake money.")


if __name__ == "__main__":

    main()

AI Meeting Scheduler Bot

import os

import json

from datetime import datetime, timedelta, time as dtime

from dateutil import parser as dateparse

import pytz

import re


from flask import Flask, redirect, url_for, session, request, render_template_string, flash

from google.oauth2.credentials import Credentials

from google_auth_oauthlib.flow import Flow

from googleapiclient.discovery import build


import nltk

nltk.download("punkt")  # ensure tokens are available


# ---------- Config ----------

CLIENT_SECRETS_FILE = "credentials.json"   # downloaded from Google Cloud

SCOPES = ["https://www.googleapis.com/auth/calendar.readonly"]

TOKEN_FOLDER = "tokens"

if not os.path.exists(TOKEN_FOLDER):

    os.makedirs(TOKEN_FOLDER)


# Flask config

app = Flask(__name__)

app.secret_key = os.environ.get("FLASK_SECRET", "dev-secret")  # change in prod

# Make sure redirect URI in Cloud Console matches this

os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"  # only for local dev


# ---------- Helpers ----------

def token_path_for_email(email):

    safe = email.replace("@", "_at_").replace(".", "_dot_")

    return os.path.join(TOKEN_FOLDER, f"token_{safe}.json")


def save_credentials(creds: Credentials, email: str):

    p = token_path_for_email(email)

    with open(p, "w") as f:

        f.write(creds.to_json())


def load_credentials(email: str):

    p = token_path_for_email(email)

    if not os.path.exists(p):

        return None

    with open(p, "r") as f:

        data = json.load(f)

    return Credentials.from_authorized_user_info(data, SCOPES)


def create_flow(state=None):

    return Flow.from_client_secrets_file(

        CLIENT_SECRETS_FILE,

        scopes=SCOPES,

        redirect_uri=url_for("oauth2callback", _external=True)

    )


# ---------- NLP for simple preference parsing ----------

def parse_natural_preferences(text):

    """

    Very lightweight preference extraction:

    - looks for 'morning', 'afternoon', 'evening', 'tomorrow', 'next week', 'this week'

    - returns bias window (start_hour, end_hour) and date-range hints

    """

    text = text.lower()

    prefs = {"hours": None, "date_hint": None}

    if re.search(r"\bmorn(ing)?\b", text):

        prefs["hours"] = (8, 12)

    elif re.search(r"\bafternoon\b", text):

        prefs["hours"] = (13, 17)

    elif re.search(r"\bevening\b", text):

        prefs["hours"] = (17, 21)

    # dates

    if "tomorrow" in text:

        prefs["date_hint"] = ("tomorrow", 1)

    elif "next week" in text:

        prefs["date_hint"] = ("next_week", 7)

    elif "this week" in text:

        prefs["date_hint"] = ("this_week", 0)

    # specific dates (try parsing)

    found_dates = re.findall(r"\b(?:on\s)?([A-Za-z]{3,9}\s+\d{1,2}(?:st|nd|rd|th)?(?:,\s*\d{4})?)\b", text)

    if found_dates:

        # take first parseable date

        try:

            d = dateparse.parse(found_dates[0])

            prefs["explicit_date"] = d.date().isoformat()

        except Exception:

            pass

    return prefs


# ---------- Availability logic ----------

def query_freebusy(service, calendar_ids, start_dt, end_dt, timezone="UTC"):

    body = {

        "timeMin": start_dt.isoformat(),

        "timeMax": end_dt.isoformat(),

        "items": [{"id": cid} for cid in calendar_ids]

    }

    resp = service.freebusy().query(body=body).execute()

    busy = {}

    for cal_id, cal_data in resp["calendars"].items():

        busy[cal_id] = cal_data.get("busy", [])

    return busy


def invert_busy_to_free(busy_intervals, start_dt, end_dt, min_slot_minutes=30):

    """

    Given busy intervals (list of {"start": iso, "end": iso"}), return free intervals between start_dt and end_dt.

    """

    tz = pytz.UTC

    # Merge busy and compute free windows

    intervals = []

    for b in busy_intervals:

        s = dateparse.parse(b["start"]).astimezone(tz)

        e = dateparse.parse(b["end"]).astimezone(tz)

        intervals.append((s, e))

    # sort and merge overlaps

    intervals.sort()

    merged = []

    for s,e in intervals:

        if not merged:

            merged.append([s,e])

        else:

            if s <= merged[-1][1]:

                if e > merged[-1][1]:

                    merged[-1][1] = e

            else:

                merged.append([s,e])

    free_windows = []

    cur = start_dt

    for s,e in merged:

        if s > cur:

            if (s - cur).total_seconds() / 60 >= min_slot_minutes:

                free_windows.append((cur, s))

        if e > cur:

            cur = e

    if end_dt > cur:

        if (end_dt - cur).total_seconds() / 60 >= min_slot_minutes:

            free_windows.append((cur, end_dt))

    return free_windows


def intersect_free_lists(list_of_free_lists, meeting_duration_minutes=30):

    """

    Each free list is a list of (start, end) windows. We want intersections across calendars and then break into slots of meeting_duration_minutes.

    Very simple sweep approach.

    """

    # flatten all interval endpoints with tags

    events = []

    for free_list in list_of_free_lists:

        for s,e in free_list:

            events.append((s, 1))

            events.append((e, -1))

    # sort by time

    events.sort()

    needed = len(list_of_free_lists)

    cur_count = 0

    last_time = None

    intersections = []

    for t, delta in events:

        prev = cur_count

        cur_count += delta

        if prev < needed and cur_count == needed:

            # interval started

            last_time = t

        elif prev == needed and cur_count < needed and last_time is not None:

            # interval ended at t

            intersections.append((last_time, t))

            last_time = None

    # Break intersections into meeting_duration-sized slots

    slots = []

    for s,e in intersections:

        start = s

        while start + timedelta(minutes=meeting_duration_minutes) <= e:

            slot_end = start + timedelta(minutes=meeting_duration_minutes)

            slots.append((start, slot_end))

            start = start + timedelta(minutes=meeting_duration_minutes)  # non-overlapping contiguous slots

    return slots


# ---------- Flask routes ----------

INDEX_HTML = """

<!doctype html>

<title>AI Meeting Scheduler Bot</title>

<h2>AI Meeting Scheduler Bot — Demo</h2>

<p>1) Authorize yourself (and any other calendar accounts you own) via Google OAuth.</p>

<p>2) Add participant calendar emails (must have given access or be your own authorized accounts).</p>

<form action="/suggest" method="post">

  <label>Participant emails (comma separated):</label><br>

  <input type="text" name="emails" size="60"><br><br>

  <label>Meeting duration (minutes):</label>

  <input type="number" name="duration" value="30"><br><br>

  <label>Search days ahead (default 7):</label>

  <input type="number" name="days" value="7"><br><br>

  <label>Optional email/preference text (paste):</label><br>

  <textarea name="pref" rows="4" cols="80"></textarea><br><br>

  <button type="submit">Suggest slots</button>

</form>

<hr>

<p>To authorize a calendar, go to <a href="/authorize">/authorize</a>, sign in and allow calendar access. The app will remember your token locally.</p>

"""


@app.route("/")

def index():

    return render_template_string(INDEX_HTML)


@app.route("/authorize")

def authorize():

    # start OAuth flow - will ask user for email after consent

    flow = create_flow()

    auth_url, state = flow.authorization_url(prompt="consent", access_type="offline", include_granted_scopes="true")

    session["flow_state"] = state

    return redirect(auth_url)


@app.route("/oauth2callback")

def oauth2callback():

    state = session.get("flow_state", None)

    flow = create_flow(state=state)

    flow.fetch_token(authorization_response=request.url)

    creds = flow.credentials

    # get email of the authenticated user via token info

    service = build("oauth2", "v2", credentials=creds)

    try:

        info = service.userinfo().get().execute()

        email = info.get("email")

    except Exception:

        # fallback: ask user to input an identifier; but for demo we assume success

        email = creds.token_uri or "unknown"

    # save credentials

    save_credentials(creds, email)

    return f"Authorized for {email}. You can now close this tab and return to the app (Home)."


@app.route("/suggest", methods=["POST"])

def suggest():

    emails_raw = request.form.get("emails", "")

    duration = int(request.form.get("duration", "30"))

    days = int(request.form.get("days", "7"))

    pref_text = request.form.get("pref", "")


    # parse emails

    emails = [e.strip() for e in emails_raw.split(",") if e.strip()]

    if not emails:

        return "Please provide at least one participant email (your authorized account or someone who shared calendar)."


    # load credentials for each email (must have tokens saved)

    creds_for = {}

    for e in emails:

        creds = load_credentials(e)

        if creds is None:

            return f"No token found for {e}. Please authorize that account (visit /authorize and sign in with that email)."

        creds_for[e] = creds


    # timezone & date range (use UTC for simplicity, better: detect user's tz)

    tz = pytz.UTC

    now = datetime.now(tz)

    start_dt = now + timedelta(hours=1)  # start searching from +1 hour

    end_dt = now + timedelta(days=days)


    # parse preferences

    prefs = parse_natural_preferences(pref_text)

    # adjust hours if prefs provided

    if prefs.get("hours"):

        pref_start_hour, pref_end_hour = prefs["hours"]

    else:

        pref_start_hour, pref_end_hour = 9, 17  # default business hours


    # Build calendar service for freebusy queries: we can reuse the first user's creds to call freebusy for multiple calendars

    # But Google freebusy requires a service with credentials that have access to calendars queried.

    # We'll use each user's own service to fetch busy; however freebusy can accept many items in single query if the caller has access.

    # For demo: call freebusy per account, query that account's own calendar id primary.

    list_free_lists = []

    for e, creds in creds_for.items():

        service = build("calendar", "v3", credentials=creds)

        # use 'primary' for that account

        cal_id = "primary"

        # Query busy for that calendar

        body = {

            "timeMin": start_dt.isoformat(),

            "timeMax": end_dt.isoformat(),

            "items": [{"id": cal_id}]

        }

        resp = service.freebusy().query(body=body).execute()

        busy = resp["calendars"][cal_id].get("busy", [])

        free = invert_busy_to_free(busy, start_dt, end_dt, min_slot_minutes=duration)

        # apply daily hours restriction: cut free windows to business hours or prefs

        filtered_free = []

        for s,e in free:

            # slice s..e into days and keep only time within pref hours

            ptr = s

            while ptr < e:

                day_end = (ptr.replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(days=1))

                seg_end = min(e, day_end)

                # define allowed window for this day in tz

                allowed_start = ptr.replace(hour=pref_start_hour, minute=0, second=0, microsecond=0)

                allowed_end = ptr.replace(hour=pref_end_hour, minute=0, second=0, microsecond=0)

                # clamp allowed_start to ptr/day start/...

                window_start = max(ptr, allowed_start)

                window_end = min(seg_end, allowed_end)

                if window_end > window_start and (window_end - window_start).total_seconds()/60 >= duration:

                    filtered_free.append((window_start, window_end))

                ptr = seg_end

        list_free_lists.append(filtered_free)


    # intersect free lists

    candidate_slots = intersect_free_lists(list_free_lists, meeting_duration_minutes=duration)

    # Format result: show top 20 slots

    candidate_slots = sorted(candidate_slots)[:20]


    # Render simple HTML response

    out = "<h2>Suggested Meeting Slots (UTC)</h2><ol>"

    for s,e in candidate_slots:

        out += f"<li>{s.isoformat()} → {e.isoformat()}</li>"

    out += "</ol>"

    if not candidate_slots:

        out += "<p><b>No common slots found in that range & preferences. Try increasing days or changing hours.</b></p>"

    out += '<p><a href="/">Back</a></p>'

    return out


if __name__ == "__main__":

    app.run(debug=True)


Dynamic QR Code Generator

Project Structure

dynamic_qr/

├── app.py

├── templates/

│   ├── index.html

│   ├── qr_display.html

│   ├── update.html

└── qr_data.db



 app.py

from flask import Flask, render_template, request, redirect, url_for

import qrcode

import sqlite3

import io, base64


app = Flask(__name__)

DB = 'qr_data.db'


# --- Initialize Database ---

def init_db():

    conn = sqlite3.connect(DB)

    c = conn.cursor()

    c.execute('''CREATE TABLE IF NOT EXISTS qr_links (

                    id INTEGER PRIMARY KEY AUTOINCREMENT,

                    code TEXT UNIQUE,

                    target_url TEXT

                )''')

    conn.commit()

    conn.close()


init_db()


# --- Generate Dynamic QR ---

@app.route('/', methods=['GET', 'POST'])

def index():

    if request.method == 'POST':

        link = request.form['link']

        code = str(hash(link))[-6:]  # simple code for uniqueness

        short_url = request.host_url + "r/" + code


        conn = sqlite3.connect(DB)

        c = conn.cursor()

        c.execute("INSERT OR REPLACE INTO qr_links (code, target_url) VALUES (?,?)", (code, link))

        conn.commit()

        conn.close()


        # Generate QR

        img = qrcode.make(short_url)

        buf = io.BytesIO()

        img.save(buf, format='PNG')

        qr_base64 = base64.b64encode(buf.getvalue()).decode('utf-8')


        return render_template('qr_display.html', qr_img=qr_base64, short_url=short_url)

    return render_template('index.html')



# --- Redirect Handler ---

@app.route('/r/<code>')

def redirect_qr(code):

    conn = sqlite3.connect(DB)

    c = conn.cursor()

    c.execute("SELECT target_url FROM qr_links WHERE code=?", (code,))

    row = c.fetchone()

    conn.close()

    if row:

        return redirect(row[0])

    return "QR link not found!", 404



# --- Update Target URL ---

@app.route('/update', methods=['GET', 'POST'])

def update_qr():

    if request.method == 'POST':

        code = request.form['code']

        new_link = request.form['new_link']

        conn = sqlite3.connect(DB)

        c = conn.cursor()

        c.execute("UPDATE qr_links SET target_url=? WHERE code=?", (new_link, code))

        conn.commit()

        conn.close()

        return " Updated successfully!"

    return render_template('update.html')



if __name__ == "__main__":

    app.run(debug=True)

templates/index.html

<!DOCTYPE html>
<html>
<head>
    <title>Dynamic QR Generator</title>
</head>
<body>
    <h2> Generate Dynamic QR Code</h2>
    <form method="POST">
        <input type="text" name="link" placeholder="Enter destination URL" required>
        <button type="submit">Generate QR</button>
    </form>
    <br>
    <a href="/update">Update existing QR link</a>
</body>
</html>

templates/qr_display.html

<!DOCTYPE html>
<html>
<head><title>QR Generated</title></head>
<body>
    <h2> Your Dynamic QR Code</h2>
    <img src="data:image/png;base64,{{ qr_img }}" alt="QR Code"><br>
    <p>Share this link: <b>{{ short_url }}</b></p>
    <a href="/">Generate another</a>
</body>
</html>
templates/update.html

<!DOCTYPE html>
<html>
<head><title>Update QR Link</title></head>
<body>
    <h2>Update QR Destination</h2>
    <form method="POST">
        <input type="text" name="code" placeholder="Enter QR code ID" required><br><br>
        <input type="text" name="new_link" placeholder="Enter new destination URL" required><br><br>
        <button type="submit">Update</button>
    </form>
</body>
</html>

Smart Resume Gap Detector

pip install spacy pandas dateparser

python -m spacy download en_core_web_sm


import re

import spacy

import pandas as pd

import dateparser

from datetime import datetime

from pathlib import Path


# Optional PDF reader (only if you want to support .pdf)

try:

    import fitz  # PyMuPDF

except ImportError:

    fitz = None


nlp = spacy.load("en_core_web_sm")


# ----------------------------

# Skill upgrade recommendations

# ----------------------------

SKILL_UPGRADE_MAP = {

    "developer": ["AI/ML fundamentals", "Cloud platforms (AWS, Azure, GCP)", "DevOps basics"],

    "data": ["Data visualization (Power BI, Tableau)", "SQL optimization", "Machine learning pipelines"],

    "designer": ["Figma advanced", "UI motion design", "UX research"],

    "manager": ["Agile certification (Scrum)", "People analytics", "Data-driven decision-making"],

    "analyst": ["Data storytelling", "Python for data", "Business intelligence tools"],

    "tester": ["Automation (Selenium, Cypress)", "Performance testing", "API testing"],

    "student": ["Internships", "Portfolio projects", "Personal GitHub projects"],

}



# ----------------------------

# Helper functions

# ----------------------------


def extract_text(file_path):

    """Extracts text from .txt or .pdf resume"""

    p = Path(file_path)

    if not p.exists():

        raise FileNotFoundError(p)

    if p.suffix.lower() == ".pdf" and fitz:

        doc = fitz.open(file_path)

        text = " ".join([page.get_text("text") for page in doc])

        return text

    elif p.suffix.lower() == ".txt":

        return open(file_path, "r", encoding="utf-8").read()

    else:

        raise ValueError("Please provide a .txt or .pdf file")



def extract_date_ranges(text):

    """Finds date ranges in resume text"""

    # Match patterns like: Jan 2018 - Mar 2020, 2015–2017, July 2019 to Present, etc.

    pattern = r"([A-Za-z]{3,9}\s*\d{4}|\d{4})\s*(?:-|to|–|—)\s*(Present|[A-Za-z]{3,9}\s*\d{4}|\d{4})"

    matches = re.findall(pattern, text, flags=re.IGNORECASE)

    

    date_pairs = []

    for start, end in matches:

        start_date = dateparser.parse(start)

        end_date = datetime.now() if re.search("present", end, re.I) else dateparser.parse(end)

        if start_date and end_date:

            date_pairs.append((start_date.date(), end_date.date()))

    return date_pairs



def detect_jobs(text):

    """Extracts potential job titles using NLP entities + heuristics"""

    doc = nlp(text)

    job_titles = []

    for ent in doc.ents:

        if ent.label_ in ["ORG", "WORK_OF_ART"]:

            continue

        # Common title indicators

        if re.search(r"(developer|engineer|manager|designer|analyst|intern|tester|consultant|officer)", ent.text, re.I):

            job_titles.append(ent.text.strip())

    return list(set(job_titles))



def calculate_gaps(date_pairs):

    """Find time gaps between consecutive jobs"""

    if not date_pairs:

        return []

    date_pairs = sorted(date_pairs, key=lambda x: x[0])

    gaps = []

    for i in range(1, len(date_pairs)):

        prev_end = date_pairs[i - 1][1]

        curr_start = date_pairs[i][0]

        gap_days = (curr_start - prev_end).days

        if gap_days > 60:  # > 2 months considered a gap

            gaps.append({

                "gap_start": prev_end,

                "gap_end": curr_start,

                "gap_months": round(gap_days / 30.4, 1)

            })

    return gaps



def suggest_skills(jobs):

    """Suggest skills based on last known job title"""

    if not jobs:

        return SKILL_UPGRADE_MAP["student"]

    last_job = jobs[-1].lower()

    for key, recs in SKILL_UPGRADE_MAP.items():

        if key in last_job:

            return recs

    return ["Explore AI basics", "Cloud fundamentals", "Soft skill enhancement"]



# ----------------------------

# Main pipeline

# ----------------------------


def analyze_resume(file_path):

    text = extract_text(file_path)


    date_pairs = extract_date_ranges(text)

    jobs = detect_jobs(text)

    gaps = calculate_gaps(date_pairs)

    skill_recs = suggest_skills(jobs)


    print("\n Analyzing Resume:", file_path)

    print("=" * 60)

    print(f" Detected job titles: {', '.join(jobs) if jobs else 'None found'}")

    print(f" Work periods found: {len(date_pairs)}")


    if gaps:

        print("\n Career Gaps Detected:")

        for g in gaps:

            print(f"   - {g['gap_start']} → {g['gap_end']} ({g['gap_months']} months)")

    else:

        print("\n No significant gaps detected.")


    print("\n Skill Upgrade Suggestions:")

    for s in skill_recs:

        print("   •", s)


    # Optional: return structured result

    result = {

        "jobs": jobs,

        "dates": date_pairs,

        "gaps": gaps,

        "suggestions": skill_recs

    }

    return result



# ----------------------------

# Run Example

# ----------------------------

if __name__ == "__main__":

    import argparse

    parser = argparse.ArgumentParser(description="Smart Resume Gap Detector")

    parser.add_argument("resume_file", help="Path to resume (.txt or .pdf)")

    args = parser.parse_args()


    analyze_resume(args.resume_file)


AI Whiteboard Digitizer

#!/usr/bin/env python3

"""

AI Whiteboard Digitizer (prototype)


Usage:

    python whiteboard_digitizer.py input_image.jpg


Outputs:

 - ocr_texts.txt          : OCR'd text lines (raw)

 - equations_latex.tex    : LaTeX for parseable math expressions

 - diagram.svg (or diagram.png) : vector-like rendering of detected lines/circles

 - several debug images in ./debug_*.png

"""


import sys

import os

import cv2

import numpy as np

from PIL import Image

import pytesseract

from sympy import sympify, latex

from sympy.core.sympify import SympifyError

import svgwrite

import matplotlib.pyplot as plt


# If on Windows and tesseract is not in PATH, set path here (uncomment and adjust)

# pytesseract.pytesseract.tesseract_cmd = r"C:\Program Files\Tesseract-OCR\tesseract.exe"


# ---------- Utilities ----------

def ensure_dir(path):

    if not os.path.exists(path):

        os.makedirs(path)


# Preprocess: grayscale, denoise, adaptive threshold, deskew

def preprocess_image(img_bgr, max_dim=1600):

    # Resize to manageable size, keep aspect

    h, w = img_bgr.shape[:2]

    scale = min(1.0, float(max_dim) / max(h, w))

    if scale != 1.0:

        img_bgr = cv2.resize(img_bgr, (int(w*scale), int(h*scale)), interpolation=cv2.INTER_AREA)

    gray = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2GRAY)

    # Denoise

    gray = cv2.fastNlMeansDenoising(gray, None, 10, 7, 21)

    # Bilateral to preserve edges

    gray = cv2.bilateralFilter(gray, 9, 75, 75)

    # Adaptive threshold (whiteboard: dark text on light background)

    th = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C,

                               cv2.THRESH_BINARY_INV, 25, 12)

    # Deskew based on largest text contours or Hough lines

    coords = np.column_stack(np.where(th > 0))

    if coords.shape[0] > 0:

        angle = cv2.minAreaRect(coords)[-1]

        # Correction of angle

        if angle < -45:

            angle = -(90 + angle)

        else:

            angle = -angle

        # Rotate

        (h2, w2) = gray.shape[:2]

        M = cv2.getRotationMatrix2D((w2//2, h2//2), angle, 1.0)

        gray = cv2.warpAffine(gray, M, (w2, h2), flags=cv2.INTER_CUBIC, borderMode=cv2.BORDER_REPLICATE)

        th = cv2.warpAffine(th, M, (w2, h2), flags=cv2.INTER_CUBIC, borderMode=cv2.BORDER_REPLICATE)

    return gray, th


# Find text boxes using MSER or connected components (we'll use morphological dilation + contours)

def detect_text_regions(thresh_img, min_area=200, debug_out=None):

    # dilate to join letters into words/lines

    kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (15, 5))

    dil = cv2.dilate(thresh_img, kernel, iterations=2)

    contours, _ = cv2.findContours(dil, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)

    boxes = []

    for cnt in contours:

        x, y, w, h = cv2.boundingRect(cnt)

        if w*h < min_area:

            continue

        # filter very tall or very wide noise

        if h < 10 or w < 20:

            continue

        boxes.append((x, y, w, h))

    # sort top to bottom, left to right

    boxes = sorted(boxes, key=lambda b: (b[1], b[0]))

    if debug_out is not None:

        vis = cv2.cvtColor(thresh_img, cv2.COLOR_GRAY2BGR)

        for (x,y,w,h) in boxes:

            cv2.rectangle(vis, (x,y), (x+w,y+h), (0,255,0), 2)

        cv2.imwrite(debug_out, vis)

    return boxes


# OCR each region (use appropriate psm)

def ocr_regions(gray_img, boxes, ocr_lang='eng'):

    lines = []

    for (x, y, w, h) in boxes:

        pad = 4

        x0 = max(0, x-pad)

        y0 = max(0, y-pad)

        x1 = min(gray_img.shape[1], x+w+pad)

        y1 = min(gray_img.shape[0], y+h+pad)

        crop = gray_img[y0:y1, x0:x1]

        # increase contrast and invert if necessary

        # Convert to PIL for pytesseract

        pil = Image.fromarray(crop)

        # Tesseract config: treat as a single line or single block

        config = "--psm 7"  # treat as a single text line (good for equations on a line)

        text = pytesseract.image_to_string(pil, lang=ocr_lang, config=config)

        text = text.strip()

        if text:

            lines.append({'box': (x0,y0,x1,y1), 'text': text})

    return lines


# Heuristic to check if a line looks like an equation/expression

def looks_like_equation(s):

    # Accept digits, letters, operators and = ^ / * + - parentheses, fractions-like '/', greek? etc.

    import re

    s2 = s.replace(' ', '')

    # Must have at least one operator or equal sign or variable

    if re.search(r'[=\+\-\*/\^]', s2):

        return True

    # Or something like 'lim', 'sin', 'cos' etc.

    if re.search(r'\b(sin|cos|tan|log|ln|lim|sqrt)\b', s.lower()):

        return True

    # Or presence of digits next to letters (like 2x or x2)

    if re.search(r'\d+[a-zA-Z]|[a-zA-Z]\d+', s):

        return True

    return False


# Clean OCR text for sympy: replace common OCR artifacts

def clean_ocr_for_sympy(s):

    # Basic replacements; adapt as needed

    repl = {

        '×': '*',

        'X': 'x',

        '—': '-',

        '−': '-',

        '–': '-',

        '÷': '/',

        '’': "'",

        '‘': "'",

        '“': '"',

        '”': '"',

        'O': '0',  # risky: only if look like zero

    }

    out = s

    # Remove stray non-ascii except math symbols

    for k,v in repl.items():

        out = out.replace(k, v)

    # convert superscript-like to **: e.g., x^2 -> x**2 (keep ^ too)

    out = out.replace('^', '**')

    # Remove weird characters

    allowed = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ+-*/=().,** _^"

    # keep letters and mathematical common symbols; but avoid removing letters like greek

    # Simpler: strip only control characters

    out = ''.join(ch for ch in out if (ch.isprintable()))

    out = out.strip()

    return out


# Try to parse with sympy and produce latex

def parse_equation_to_latex(s):

    s_clean = clean_ocr_for_sympy(s)

    # If contains '=' treat as equation; else expression

    try:

        if '=' in s_clean:

            # sympy's Eq expects left and right; split on first '='

            left, right = s_clean.split('=', 1)

            eleft = sympify(left)

            eright = sympify(right)

            eq = eleft - eright  # expression equal zero

            # represent as LaTeX equation

            latex_str = latex(eleft) + " = " + latex(eright)

        else:

            expr = sympify(s_clean)

            latex_str = latex(expr)

        return latex_str, None

    except SympifyError as e:

        return None, f"SympifyError: {e}"

    except Exception as e:

        return None, str(e)


# Detect simple geometric primitives (lines via Hough, circles via HoughCircles)

def detect_shapes(gray_img, debug_prefix=None):

    # Use edge detection

    edges = cv2.Canny(gray_img, 50, 150, apertureSize=3)

    # Hough lines

    lines = cv2.HoughLinesP(edges, 1, np.pi/180, threshold=80, minLineLength=50, maxLineGap=10)

    line_list = []

    if lines is not None:

        for l in lines:

            x1,y1,x2,y2 = l[0]

            line_list.append((int(x1),int(y1),int(x2),int(y2)))

    # Hough circles

    circles = None

    try:

        circ = cv2.HoughCircles(gray_img, cv2.HOUGH_GRADIENT, dp=1.2, minDist=30,

                                param1=100, param2=30, minRadius=8, maxRadius=200)

        if circ is not None:

            circ = np.round(circ[0, :]).astype("int")

            circles = [(int(x),int(y),int(r)) for (x,y,r) in circ]

    except Exception:

        circles = None

    # Save debug

    if debug_prefix:

        vis = cv2.cvtColor(gray_img, cv2.COLOR_GRAY2BGR)

        for (x1,y1,x2,y2) in line_list:

            cv2.line(vis, (x1,y1), (x2,y2), (0,255,0), 2)

        if circles:

            for (x,y,r) in circles:

                cv2.circle(vis, (x,y), r, (0,0,255), 2)

        cv2.imwrite(f"{debug_prefix}_shapes.png", vis)

    return line_list, circles


# Render vector-style diagram to SVG using svgwrite or Matplotlib

def render_vector_diagram(svg_path, image_size, lines, circles, boxes=None):

    w, h = image_size

    dwg = svgwrite.Drawing(svg_path, size=(w, h))

    # background white

    dwg.add(dwg.rect(insert=(0,0), size=(w,h), fill='white'))

    # optional: draw boxes for text regions (thin gray)

    if boxes:

        for (x0,y0,x1,y1) in boxes:

            dwg.add(dwg.rect(insert=(x0,y0), size=(x1-x0,y1-y0), fill='none', stroke='lightgray', stroke_width=1))

    # draw lines

    for (x1,y1,x2,y2) in lines:

        dwg.add(dwg.line(start=(x1,y1), end=(x2,y2), stroke=svgwrite.rgb(10, 10, 16, '%'), stroke_width=2))

    # draw circles

    if circles:

        for (x,y,r) in circles:

            dwg.add(dwg.circle(center=(x,y), r=r, stroke='black', fill='none', stroke_width=2))

    dwg.save()


# Main pipeline

def process_whiteboard_image(in_path, out_dir="wb_outputs"):

    ensure_dir(out_dir)

    img_bgr = cv2.imread(in_path)

    if img_bgr is None:

        raise FileNotFoundError(in_path)

    gray, th = preprocess_image(img_bgr)

    debug_pre = os.path.join(out_dir, "debug_preprocess.png")

    cv2.imwrite(debug_pre, gray)

    cv2.imwrite(os.path.join(out_dir, "debug_thresh.png"), th)


    # detect regions (text lines)

    boxes = detect_text_regions(th, debug_out=os.path.join(out_dir, "debug_boxes.png"))

    # OCR

    ocr_lines = ocr_regions(gray, boxes)

    # write OCR results

    ocr_txt_file = os.path.join(out_dir, "ocr_texts.txt")

    with open(ocr_txt_file, "w", encoding='utf-8') as f:

        for item in ocr_lines:

            f.write(item['text'] + "\n")

    print(f"[+] OCR lines saved to {ocr_txt_file}")


    # Filter likely equations

    eq_candidates = [it for it in ocr_lines if looks_like_equation(it['text'])]

    latex_results = []

    for it in eq_candidates:

        txt = it['text']

        latex_str, err = parse_equation_to_latex(txt)

        if latex_str:

            latex_results.append((txt, latex_str))

        else:

            latex_results.append((txt, f"UNPARSEABLE: {err}"))

    # write equations latex

    eq_file = os.path.join(out_dir, "equations_latex.tex")

    with open(eq_file, "w", encoding='utf-8') as f:

        f.write("% Generated LaTeX (auto) — review and correct as needed\n")

        for orig, out in latex_results:

            f.write("% OCR: " + orig.replace("\n"," ") + "\n")

            f.write(out + "\n\n")

    print(f"[+] Equation LaTeX saved to {eq_file}")


    # shape detection

    lines, circles = detect_shapes(gray, debug_prefix=os.path.join(out_dir, "debug"))

    svg_path = os.path.join(out_dir, "diagram.svg")

    # convert boxes to x0,y0,x1,y1 format for svg (optional)

    bboxes = [(x,y,x+w,y+h) for (x,y,w,h) in boxes]

    render_vector_diagram(svg_path, (gray.shape[1], gray.shape[0]), lines, circles, boxes=bboxes)

    print(f"[+] Diagram SVG saved to {svg_path}")


    # Also produce a matplotlib PNG overlay visualization

    overlay = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)

    for (x1,y1,x2,y2) in lines:

        cv2.line(overlay, (x1,y1), (x2,y2), (0,255,0), 2)

    if circles:

        for (x,y,r) in circles:

            cv2.circle(overlay, (x,y), r, (0,0,255), 2)

    for (x0,y0,x1,y1) in bboxes:

        cv2.rectangle(overlay, (x0,y0), (x1,y1), (255,0,0), 1)

    cv2.imwrite(os.path.join(out_dir, "overlay_debug.png"), overlay)

    print(f"[+] Debug overlay saved to {os.path.join(out_dir, 'overlay_debug.png')}")

    return {

        "ocr_lines": ocr_lines,

        "equations": latex_results,

        "svg": svg_path,

        "debug": out_dir

    }


# --------- CLI ----------

if __name__ == "__main__":

    if len(sys.argv) < 2:

        print("Usage: python whiteboard_digitizer.py input_image.jpg")

        sys.exit(1)

    inp = sys.argv[1]

    out = "wb_outputs"

    res = process_whiteboard_image(inp, out_dir=out)

    print("Done. Outputs in:", out)


Virtual AI Travel Planner

import os

import json

import time

import requests

import pandas as pd

import streamlit as st

from datetime import date, timedelta


# -----------------------------

# Setup: API Keys from env vars

# -----------------------------

OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")

GOOGLE_MAPS_API_KEY = os.getenv("GOOGLE_MAPS_API_KEY", "")


# -----------------------------

# Google Places helpers

# -----------------------------

PLACES_TEXT_SEARCH_URL = "https://maps.googleapis.com/maps/api/place/textsearch/json"


def places_text_search(query, key, max_results=20):

    """

    Simple wrapper for Google Places 'Text Search' API.

    We page through results (up to ~60) but cap by max_results.

    """

    if not key:

        return []


    params = {

        "query": query,

        "key": key,

    }

    out = []

    next_page_token = None

    while True:

        if next_page_token:

            params["pagetoken"] = next_page_token

            # Google requires short delay before using next_page_token

            time.sleep(2)


        r = requests.get(PLACES_TEXT_SEARCH_URL, params=params, timeout=30)

        if r.status_code != 200:

            break

        data = r.json()

        out.extend(data.get("results", []))


        next_page_token = data.get("next_page_token")

        if not next_page_token or len(out) >= max_results:

            break


    return out[:max_results]



def fetch_hotels(destination: str, max_results=20):

    """

    Fetch hotels around destination. Uses price_level (0-4) and rating when available.

    """

    results = places_text_search(f"hotels in {destination}", GOOGLE_MAPS_API_KEY, max_results=max_results)

    rows = []

    for r in results:

        rows.append({

            "name": r.get("name"),

            "rating": r.get("rating"),

            "reviews": r.get("user_ratings_total"),

            "price_level(0-4)": r.get("price_level"),

            "address": r.get("formatted_address"),

            "lat": r.get("geometry", {}).get("location", {}).get("lat"),

            "lng": r.get("geometry", {}).get("location", {}).get("lng")

        })

    df = pd.DataFrame(rows)

    # sort: rating desc, then reviews desc

    if not df.empty:

        df = df.sort_values(by=["rating", "reviews"], ascending=[False, False], na_position="last")

    return df



def fetch_attractions(destination: str, max_results=20):

    """

    Fetch attractions/POIs.

    """

    query = f"top attractions in {destination}"

    results = places_text_search(query, GOOGLE_MAPS_API_KEY, max_results=max_results)

    rows = []

    for r in results:

        rows.append({

            "name": r.get("name"),

            "category": ", ".join(r.get("types", [])),

            "rating": r.get("rating"),

            "reviews": r.get("user_ratings_total"),

            "address": r.get("formatted_address"),

            "lat": r.get("geometry", {}).get("location", {}).get("lat"),

            "lng": r.get("geometry", {}).get("location", {}).get("lng")

        })

    df = pd.DataFrame(rows)

    if not df.empty:

        df = df.sort_values(by=["rating", "reviews"], ascending=[False, False], na_position="last")

    return df


# -----------------------------

# Budget helper

# -----------------------------

def simple_budget_breakdown(total_budget, days, travelers=1):

    """

    Very coarse split of a trip budget (adjust as needed).

    Returns per-trip & per-day guide.

    """

    total_budget = float(total_budget)

    days = max(1, int(days))

    travelers = max(1, int(travelers))


    # Example split: 45% stay, 30% food, 15% local transport, 10% activities

    stay = total_budget * 0.45

    food = total_budget * 0.30

    transport = total_budget * 0.15

    activities = total_budget * 0.10


    per_day = {

        "Stay": round(stay / days, 2),

        "Food": round(food / days, 2),

        "Local Transport": round(transport / days, 2),

        "Activities": round(activities / days, 2),

        "Total/day": round(total_budget / days, 2)

    }

    per_person = round(total_budget / travelers, 2)

    return per_day, per_person


# -----------------------------

# OpenAI itinerary helper

# -----------------------------

def generate_itinerary_with_openai(destination, start_date, days, interests, budget_hint, travelers):

    """

    Calls OpenAI Chat Completions to generate an itinerary.

    Expects OPENAI_API_KEY in env. Uses the Chat Completions HTTP endpoint.

    """

    if not OPENAI_API_KEY:

        return "OpenAI API key not set. Please set OPENAI_API_KEY in your environment."


    # Build system & user prompts

    sys_prompt = (

        "You are a helpful travel planner. Create practical, walkable day-by-day itineraries with morning, afternoon, and evening blocks, "

        "add short logistic hints, and keep it realistic for the location. Keep each day concise and bulleted."

    )

    user_prompt = (

        f"Destination: {destination}\n"

        f"Start date: {start_date}\n"

        f"Trip length: {days} days\n"

        f"Travelers: {travelers}\n"

        f"Interests: {', '.join(interests) if interests else 'general sightseeing'}\n"

        f"Budget guidance: {budget_hint}\n\n"

        "Please produce:\n"

        "1) A short overview paragraph (tone: friendly & practical)\n"

        "2) Day-by-day plan, each day with 3 bullet sections: Morning / Afternoon / Evening\n"

        "3) A compact list of neighborhood/area suggestions for dining\n"

        "4) 6 packing tips relevant to weather & activities\n"

    )


    # Minimal direct HTTP call to OpenAI Chat Completions (compatible with current API).

    url = "https://api.openai.com/v1/chat/completions"

    headers = {

        "Authorization": f"Bearer {OPENAI_API_KEY}",

        "Content-Type": "application/json",

    }

    payload = {

        "model": "gpt-4o-mini",   # pick a chat-capable model available to your account

        "messages": [

            {"role": "system", "content": sys_prompt},

            {"role": "user", "content": user_prompt}

        ],

        "temperature": 0.7,

        "max_tokens": 1200

    }


    try:

        resp = requests.post(url, headers=headers, data=json.dumps(payload), timeout=60)

        resp.raise_for_status()

        data = resp.json()

        return data["choices"][0]["message"]["content"].strip()

    except Exception as e:

        return f"Failed to generate itinerary: {e}"


# -----------------------------

# Streamlit UI

# -----------------------------

st.set_page_config(page_title="Virtual AI Travel Planner", page_icon="🧭", layout="wide")


st.title("🧭 Virtual AI Travel Planner")

st.caption("Enter your destination & budget → get hotels, attractions, and an AI-built itinerary.\n\n*Educational demo. Always verify details before booking.*")


with st.sidebar:

    st.header("šŸ”‘ API Keys")

    st.write("Set these as environment variables before running:\n- `OPENAI_API_KEY`\n- `GOOGLE_MAPS_API_KEY`")

    st.write("Detected:")

    st.code(f"OPENAI_API_KEY set: {bool(OPENAI_API_KEY)}\nGOOGLE_MAPS_API_KEY set: {bool(GOOGLE_MAPS_API_KEY)}")


col1, col2, col3 = st.columns([1.2,1,1])


with col1:

    destination = st.text_input("Destination (city/country)", placeholder="e.g., Tokyo, Japan")

    start = st.date_input("Start date", value=date.today() + timedelta(days=14))

    days = st.number_input("Trip length (days)", min_value=1, max_value=21, value=5, step=1)


with col2:

    budget = st.number_input("Total budget (your currency)", min_value=0.0, value=1000.0, step=100.0, help="Rough trip budget total")

    travelers = st.number_input("Travelers", min_value=1, value=2, step=1)

    interests = st.multiselect("Interests", [

        "Food", "Museums", "Nature", "Architecture", "Shopping", "Nightlife", "Adventure", "History", "Beaches", "Hiking"

    ], default=["Food", "Museums"])


with col3:

    max_hotels = st.slider("Max hotels to fetch", 5, 40, 15)

    max_attractions = st.slider("Max attractions to fetch", 5, 40, 20)

    run = st.button("✨ Plan my trip")


if run:

    if not destination.strip():

        st.error("Please enter a destination.")

        st.stop()


    # Budget breakdown

    per_day, per_person = simple_budget_breakdown(budget, days, travelers)

    st.subheader("šŸ’ø Budget Guide")

    c1, c2 = st.columns(2)

    with c1:

        st.write("**Per-day guide** (rough):")

        st.table(pd.DataFrame([per_day]))

    with c2:

        st.metric("Per-person total", f"{per_person:.2f}")


    # Google results

    st.subheader("šŸØ Hotels (Google Places)")

    hotels_df = fetch_hotels(destination, max_results=max_hotels)

    if hotels_df.empty:

        st.info("No hotel data (check your Google API key & billing).")

    else:

        st.dataframe(hotels_df, use_container_width=True)


    st.subheader("šŸ“ Attractions / Things to do (Google Places)")

    attractions_df = fetch_attractions(destination, max_results=max_attractions)

    if attractions_df.empty:

        st.info("No attractions data (check your Google API key & billing).")

    else:

        st.dataframe(attractions_df, use_container_width=True)


    # AI itinerary

    st.subheader("🧠 AI Itinerary")

    budget_hint = f"Total budget approx {budget} for {travelers} travelers over {days} days. Per-day guide: {per_day}."

    itinerary = generate_itinerary_with_openai(destination, start, days, interests, budget_hint, travelers)

    st.write(itinerary)


    # Optional: CSV downloads

    st.download_button(

        "⬇️ Download hotels CSV",

        data=hotels_df.to_csv(index=False).encode("utf-8"),

        file_name=f"{destination.replace(' ','_').lower()}_hotels.csv",

        mime="text/csv"

    )

    st.download_button(

        "⬇️ Download attractions CSV",

        data=attractions_df.to_csv(index=False).encode("utf-8"),

        file_name=f"{destination.replace(' ','_').lower()}_attractions.csv",

        mime="text/csv"

    )


    st.success("Done! Scroll up to view tables and itinerary. šŸŒ✈️")

else:

    st.info("Fill the form and click **Plan my trip**.")


Fake News Image Detector

import tkinter as tk

from tkinter import filedialog, messagebox

from PIL import Image, ImageTk

import exifread

import requests

import io


# -----------------------

# Metadata Extraction

# -----------------------

def extract_metadata(image_path):

    with open(image_path, 'rb') as f:

        tags = exifread.process_file(f)

    return {tag: str(tags[tag]) for tag in tags.keys()}


# -----------------------

# Reverse Search (TinEye / Google)

# -----------------------

def reverse_search(image_path):

    # Normally we would use an API like TinEye or Google Custom Search.

    # For demo, we simulate by uploading image to https://postimages.org and returning link.

    try:

        with open(image_path, 'rb') as f:

            files = {"file": f}

            r = requests.post("https://api.imgbb.com/1/upload",

                              files=files,

                              params={"key": "YOUR_IMGBB_API_KEY"})

            if r.status_code == 200:

                return r.json()["data"]["url"]

    except Exception as e:

        return f"Reverse search not available: {e}"

    return "Reverse search failed."


# -----------------------

# GUI Functions

# -----------------------

def open_image():

    file_path = filedialog.askopenfilename(filetypes=[("Image Files", "*.jpg;*.jpeg;*.png")])

    if not file_path:

        return


    # Show image

    img = Image.open(file_path)

    img.thumbnail((250, 250))

    img_tk = ImageTk.PhotoImage(img)

    lbl_image.config(image=img_tk)

    lbl_image.image = img_tk


    # Metadata

    metadata = extract_metadata(file_path)

    txt_metadata.delete(1.0, tk.END)

    if metadata:

        for k, v in metadata.items():

            txt_metadata.insert(tk.END, f"{k}: {v}\n")

    else:

        txt_metadata.insert(tk.END, "No metadata found.\n")


    # Reverse search (optional)

    link = reverse_search(file_path)

    txt_metadata.insert(tk.END, f"\nšŸ” Reverse Search Hint: {link}\n")


# -----------------------

# Main App

# -----------------------

root = tk.Tk()

root.title("šŸ“° Fake News Image Detector")

root.geometry("600x500")


frame_top = tk.Frame(root)

frame_top.pack(pady=10)


btn_upload = tk.Button(frame_top, text="šŸ“¤ Upload Image", command=open_image, font=("Arial", 12, "bold"))

btn_upload.pack()


lbl_image = tk.Label(root)

lbl_image.pack(pady=10)


lbl_meta = tk.Label(root, text="Image Metadata & Clues:", font=("Arial", 12, "bold"))

lbl_meta.pack()


txt_metadata = tk.Text(root, wrap=tk.WORD, width=70, height=15)

txt_metadata.pack(pady=10)


root.mainloop()


Smart Parking System Simulator pro

import tkinter as tk

from tkinter import messagebox

import sqlite3

import random


# -----------------------

# Database Setup

# -----------------------

def init_db():

    conn = sqlite3.connect("parking.db")

    cur = conn.cursor()

    cur.execute(

        """CREATE TABLE IF NOT EXISTS parking_slots (

                id INTEGER PRIMARY KEY,

                slot_number TEXT UNIQUE,

                status TEXT

            )"""

    )


    # Initialize 10 slots if not exist

    cur.execute("SELECT COUNT(*) FROM parking_slots")

    count = cur.fetchone()[0]

    if count == 0:

        for i in range(1, 11):

            cur.execute("INSERT INTO parking_slots (slot_number, status) VALUES (?, ?)",

                        (f"SLOT-{i}", "Free"))

    conn.commit()

    conn.close()


# -----------------------

# Database Functions

# -----------------------

def get_slots():

    conn = sqlite3.connect("parking.db")

    cur = conn.cursor()

    cur.execute("SELECT * FROM parking_slots")

    slots = cur.fetchall()

    conn.close()

    return slots


def update_slot(slot_id, status):

    conn = sqlite3.connect("parking.db")

    cur = conn.cursor()

    cur.execute("UPDATE parking_slots SET status=? WHERE id=?", (status, slot_id))

    conn.commit()

    conn.close()


# -----------------------

# GUI Functions

# -----------------------

def refresh_slots():

    for widget in frame_slots.winfo_children():

        widget.destroy()


    slots = get_slots()

    for slot in slots:

        slot_id, slot_number, status = slot

        color = "green" if status == "Free" else "red"

        btn = tk.Button(frame_slots, text=f"{slot_number}\n{status}",

                        bg=color, fg="white", width=12, height=3,

                        command=lambda s=slot: toggle_slot(s))

        btn.pack(side=tk.LEFT, padx=5, pady=5)


def toggle_slot(slot):

    slot_id, slot_number, status = slot

    if status == "Free":

        update_slot(slot_id, "Booked")

        messagebox.showinfo("Booked", f"You booked {slot_number}")

    else:

        update_slot(slot_id, "Free")

        messagebox.showinfo("Freed", f"You freed {slot_number}")

    refresh_slots()


def random_update():

    slots = get_slots()

    random_slot = random.choice(slots)

    slot_id, slot_number, status = random_slot

    new_status = "Free" if status == "Booked" else "Booked"

    update_slot(slot_id, new_status)

    refresh_slots()

    root.after(5000, random_update)  # auto change every 5s


# -----------------------

# Main App

# -----------------------

if __name__ == "__main__":

    init_db()


    root = tk.Tk()

    root.title("Smart Parking System Simulator")

    root.geometry("800x400")


    tk.Label(root, text="šŸš— Smart Parking System Simulator", font=("Arial", 16, "bold")).pack(pady=10)


    frame_slots = tk.Frame(root)

    frame_slots.pack(pady=20)


    refresh_slots()


    # Auto slot updates (simulate cars parking)

    root.after(5000, random_update)


    root.mainloop()