# app.py import json import re import tempfile from datetime import datetime, timedelta from dateutil import tz import logging import gradio as gr import pandas as pd import matplotlib.pyplot as plt import numpy as np import matplotlib.dates as mdates import folium from matplotlib import cm import branca.colormap as bcm import folium.plugins as plugins from matplotlib.patches import Wedge, Rectangle, FancyArrowPatch # -------- grafanalib(可選;未安裝則降級) -------- try: from grafanalib.core import ( Dashboard, Graph, Row, Target, YAxis, YAxes, Time, BarGauge ) GRAFANA_AVAILABLE = True except Exception: GRAFANA_AVAILABLE = False # 日誌 logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) TAIPEI = tz.gettz("Asia/Taipei") # ----------------------------- # Google Drive 連結處理 # ----------------------------- DRIVE_PRESETS = [ "https://drive.google.com/file/d/15yZ4QicICKZCnX6vjcD9JNXjnJmMFJD4/view?usp=drivesdk", "https://drive.google.com/file/d/1dqazYh_YzNNMbkUpgLRKSE9Y3ioPhtFu/view?usp=drivesdk", "https://drive.google.com/file/d/1A23f4q8DXHpoRIN5UQsDd6eM8jJ_Ruf8/view?usp=drivesdk", ] def normalize_drive_url(url: str) -> str: if not isinstance(url, str) or not url.strip(): raise ValueError("請提供有效的 Google 連結") url = url.strip() m = re.search(r"https://docs\.google\.com/spreadsheets/d/([a-zA-Z0-9-_]+)", url) if m: sheet_id = m.group(1) return f"https://docs.google.com/spreadsheets/d/{sheet_id}/export?format=csv" m = re.search(r"https://drive\.google\.com/file/d/([a-zA-Z0-9-_]+)/", url) if m: file_id = m.group(1) return f"https://drive.google.com/uc?export=download&id={file_id}" return url # ----------------------------- # Demo / Data loading with dynamic update # ----------------------------- def make_demo_dataframe(last_time=None) -> tuple[pd.DataFrame, datetime]: if last_time is None: last_time = datetime.now(tz=TAIPEI) - timedelta(minutes=60) else: last_time = last_time + timedelta(minutes=1) times = [last_time + timedelta(minutes=i) for i in range(61)] amp = np.random.rand(len(times)) cnt = np.random.randint(0, 11, size=len(times)) lats = np.random.uniform(21.8, 25.3, size=len(times)) lons = np.random.uniform(120.0, 122.0, size=len(times)) df = pd.DataFrame({"time": times, "amplitude": amp, "count": cnt, "lat": lats, "lon": lons}) df["pid"] = np.arange(len(df)) logger.debug(f"Generated new data with last_time: {last_time}") return df, last_time def _finalize_time(df: pd.DataFrame) -> pd.DataFrame: time_col = next((c for c in ["time", "timestamp", "datetime", "date"] if c in df.columns), None) if time_col is None: raise ValueError("資料需包含時間欄位(time/timestamp/datetime/date 其一)") df[time_col] = pd.to_datetime(df[time_col], errors="coerce") if df[time_col].isna().all(): raise ValueError("時間欄位解析失敗,請確認格式") df = df.rename(columns={time_col: "time"}) try: if df["time"].dt.tz is None: df["time"] = df["time"].dt.tz_localize(TAIPEI) else: df["time"] = df["time"].dt.tz_convert(TAIPEI) except Exception: def _to_tpe(t): if t.tzinfo is None: return t.tz_localize(TAIPEI) return t.tz_convert(TAIPEI) df["time"] = df["time"].apply(_to_tpe) return df.sort_values("time").reset_index(drop=True) def load_csv(file: gr.File | None) -> pd.DataFrame: try: df = pd.read_csv(file.name) return _finalize_time(df) except Exception as e: raise ValueError(f"CSV 載入失敗:{str(e)}") def load_drive_csv(sheet_or_file_url: str) -> pd.DataFrame: try: url = normalize_drive_url(sheet_or_file_url) df = pd.read_csv(url) return _finalize_time(df) except Exception as e: raise ValueError(f"Google 連結載入失敗:{str(e)}") def load_data(source: str, file: gr.File | None = None, sheet_url: str = "", last_time=None) -> tuple[pd.DataFrame, datetime | None]: if source == "drive": if not sheet_url: raise ValueError("請選擇 Google 連結") return load_drive_csv(sheet_url), None elif source == "upload": if file is None: raise ValueError("請上傳 CSV 檔") return load_csv(file), None else: return make_demo_dataframe(last_time) # ----------------------------- # 資料過濾(時區安全) # ----------------------------- def _to_taipei(dt_like): ts = pd.to_datetime(dt_like, errors="coerce") if pd.isna(ts): return None if ts.tzinfo is None: return ts.tz_localize(TAIPEI) return ts.tz_convert(TAIPEI) def filter_data(df: pd.DataFrame, start_time: str, end_time: str) -> pd.DataFrame: if start_time: st = _to_taipei(start_time) if st is not None: df = df[df["time"] >= st] if end_time: et = _to_taipei(end_time) if et is not None: df = df[df["time"] <= et] return df # ----------------------------- # grafanalib JSON(可降級) # ----------------------------- def build_grafanalib_dashboard(series_columns: list[str], dual_axis: bool, rolling_window: int) -> dict: if not GRAFANA_AVAILABLE: return { "error": "grafanalib 未安裝。如需啟用,請在 requirements.txt 加入:grafanalib", "series": series_columns, "dual_axis": bool(dual_axis), "rolling_window": int(rolling_window), } panels = [ Graph( title=f"{series_columns[0]}", dataSource="(example)", targets=[Target(expr=f"{series_columns[0]}", legendFormat=series_columns[0])], lines=True, bars=False, points=False, yAxes=YAxes(left=YAxis(format="short"), right=YAxis(format="short")) ), ] if len(series_columns) > 1: targets = [Target(expr=f"{series_columns[1]}", legendFormat=series_columns[1])] lines, bars, title = False, True, f"{series_columns[1]} (bar)" if dual_axis: targets.append(Target(expr=f"{series_columns[0]}", legendFormat=f"{series_columns[0]} (line)")) lines, bars = True, True title = f"{series_columns[1]} (bar) + {series_columns[0]} (line)" panels.append( Graph( title=title, dataSource="(example)", targets=targets, lines=lines, bars=bars, points=False, yAxes=YAxes(left=YAxis(format="short"), right=YAxis(format="short")) ) ) panels.extend([ Graph( title=f"{series_columns[0]} rolling({rolling_window})", dataSource="(example)", targets=[Target(expr=f"{series_columns[0]}_rolling{rolling_window}", legendFormat=f"{series_columns[0]}_rolling{rolling_window}")], lines=True, bars=False, points=False, yAxes=YAxes(left=YAxis(format="short"), right=YAxis(format="short")) ), BarGauge( title=f"Latest {series_columns[0]}", dataSource="(example)", targets=[Target(expr=f"last({series_columns[0]})", legendFormat=series_columns[0])] ), ]) return Dashboard( title="Grafana-like Demo (grafanalib + Gradio)", rows=[Row(panels=panels)], timezone="browser", time=Time("now-1h", "now") ).to_json_data() # ----------------------------- # Matplotlib helpers # ----------------------------- def _style_time_axis(ax): locator = mdates.AutoDateLocator(minticks=3, maxticks=6) formatter = mdates.ConciseDateFormatter(locator) ax.xaxis.set_major_locator(locator) ax.xaxis.set_major_formatter(formatter) ax.tick_params(axis="x", labelrotation=20, labelsize=9) ax.tick_params(axis="y", labelsize=9) ax.grid(True, which="major", alpha=0.25) plt.margins(x=0.02, y=0.05) def _normalize_times(series: pd.Series) -> pd.Series: s = series.copy() try: if s.dt.tz is not None: s = s.dt.tz_convert("UTC").dt.tz_localize(None) except Exception: s = pd.to_datetime(s, errors="coerce").dt.tz_convert("UTC").dt.tz_localize(None) return s def render_line(df, col): times = _normalize_times(df["time"]) fig, ax = plt.subplots(figsize=(6, 3)) ax.plot(times, df[col], linewidth=1.6) ax.set_title(col, fontsize=12, pad=8) ax.set_xlabel("Time") ax.set_ylabel(col) _style_time_axis(ax) fig.tight_layout() return fig def render_bar_or_dual(df, second_col, first_col, dual_axis): times = _normalize_times(df["time"]) if len(times) >= 2: delta_sec = pd.to_timedelta(times.diff(), errors="coerce").dt.total_seconds().median() if pd.isna(delta_sec) or delta_sec <= 0: delta_sec = 60.0 else: delta_sec = 60.0 width_days = max(10.0, float(delta_sec) * 0.8) / 86400.0 x = mdates.date2num(times.dt.to_pydatetime().tolist()) fig, ax = plt.subplots(figsize=(6, 3)) ax.bar(x, df[second_col], width=width_days, align="center", label=second_col) title = f"{second_col} (bar)" if dual_axis: ax2 = ax.twinx() ax2.plot(times, df[first_col], linewidth=1.6, label=f"{first_col} (line)") title = f"{second_col} (bar) + {first_col} (line)" h1, l1 = ax.get_legend_handles_labels() h2, l2 = ax2.get_legend_handles_labels() ax.legend(h1 + h2, l1 + l2, loc="upper left") else: ax.legend(loc="upper left") ax.set_title(title, fontsize=12, pad=8) _style_time_axis(ax) fig.tight_layout() return fig def render_rolling(df, col, window=5): times = _normalize_times(df["time"]) roll_col = f"{col}_rolling{window}" if roll_col not in df.columns: df[roll_col] = df[col].rolling(window=window, min_periods=1).mean() fig, ax = plt.subplots(figsize=(6, 3)) ax.plot(times, df[roll_col], linewidth=1.6) ax.set_title(f"{col} rolling({window})", fontsize=12, pad=8) ax.set_xlabel("Time") ax.set_ylabel(roll_col) _style_time_axis(ax) fig.tight_layout() return fig, df # ----------------------------- # Gauge # ----------------------------- def degree_range(n): start = np.linspace(0, 180, n + 1, endpoint=True)[0:-1] end = np.linspace(0, 180, n + 1, endpoint=True)[1:] mid_points = start + ((end - start) / 2.) return np.c_[start, end], mid_points def rot_text(ang): rotation = np.degrees(np.radians(ang) * np.pi / np.pi - np.radians(90)) return rotation def render_gauge(df, col): if df.empty: value = 0.0 min_val, max_val = 0.0, 1.0 else: value = float(df[col].iloc[-1]) min_val, max_val = float(df[col].min()), float(df[col].max()) normalized = (value - min_val) / (max_val - min_val + 1e-9) if max_val > min_val else 0.0 labels = ['LOW', 'MEDIUM', 'HIGH'] N = len(labels) colors = ['#007A00', '#FFCC00', '#ED1C24'] arrow = 1 if normalized < 0.33 else 2 if normalized < 0.66 else 3 fig, ax = plt.subplots(figsize=(5, 3.5)) ang_range, mid_points = degree_range(N) labels = labels[::-1] patches = [Wedge((0., 0.), .4, *ang, facecolor='w', lw=2) for ang in ang_range] + \ [Wedge((0., 0.), .4, *ang, width=0.10, facecolor=c, lw=2, alpha=0.5) for ang, c in zip(ang_range, colors)] for p in patches: ax.add_patch(p) for mid, lab in zip(mid_points, labels): ax.text(0.35 * np.cos(np.radians(mid)), 0.35 * np.sin(np.radians(mid)), lab, ha='center', va='center', fontsize=12, fontweight='bold', rotation=rot_text(mid)) ax.add_patch(Rectangle((-0.4, -0.1), 0.8, 0.1, facecolor='w', lw=2)) ax.text(0, -0.05, f"Latest {col}: {value:.2f}", ha='center', va='center', fontsize=12, fontweight='bold') pos = mid_points[abs(arrow - N)] ax.arrow(0, 0, 0.225 * np.cos(np.radians(pos)), 0.225 * np.sin(np.radians(pos)), width=0.04, head_width=0.09, head_length=0.1, fc='k', ec='k') ax.add_patch(FancyArrowPatch((0, 0), (0.01 * np.cos(np.radians(pos)), 0.01 * np.sin(np.radians(pos))), mutation_scale=10, fc='k', ec='k')) ax.set_frame_on(False) ax.set_xticks([]) ax.set_yticks([]) ax.axis('equal') plt.tight_layout() return fig # ----------------------------- # Folium map # ----------------------------- def _to_hex_color(value: float, cmap=cm.viridis) -> str: rgba = cmap(value) return "#{:02x}{:02x}{:02x}".format(int(rgba[0]*255), int(rgba[1]*255), int(rgba[2]*255)) def render_map_folium(df: pd.DataFrame, value_col: str = "amplitude", size_col: str = "count", cmap_name: str = "viridis", tiles: str = "OpenStreetMap", show_heatmap: bool = False) -> str: if df.empty: return "
無資料可顯示地圖
" center_lat, center_lon = df["lat"].mean(), df["lon"].mean() m = folium.Map(location=[center_lat, center_lon], zoom_start=7, tiles=tiles) vmin, vmax = float(df[value_col].min()), float(df[value_col].max()) cmap = getattr(cm, cmap_name) colormap = bcm.LinearColormap([_to_hex_color(i, cmap) for i in np.linspace(0, 1, 128)], vmin=vmin, vmax=vmax) colormap.caption = f"{value_col} (color scale)" colormap.add_to(m) if show_heatmap: heat_data = [[row["lat"], row["lon"], row[value_col]] for _, row in df.iterrows()] plugins.HeatMap(heat_data, radius=15, blur=10).add_to(m) else: for _, row in df.iterrows(): norm_val = (row[value_col] - vmin) / (vmax - vmin + 1e-9) if vmax > vmin else 0.0 popup_html = ( f"#ID: {int(row['pid'])}" f"錯誤:無資料顯示
", "", None, pd.DataFrame(), None, gr.update(choices=[], value=None, interactive=False), pd.DataFrame(), str(e), last_time, "(無資料)" ) def update_detail(df: pd.DataFrame, choice: str): return pick_detail(df, choice) # ----------------------------- # UI # ----------------------------- with gr.Blocks(theme=gr.themes.Soft()) as demo: gr.Markdown("## 動態時間序列 - Grafana-like Demo + Folium Map") last_time_state = gr.State(value=None) with gr.Row(): with gr.Column(scale=1): source_radio = gr.Radio(["upload", "drive", "demo"], label="資料來源", value="demo") file_in = gr.File(label="上傳 CSV", file_types=[".csv"]) preset_dd = gr.Dropdown(label="Google 預設來源", choices=DRIVE_PRESETS, value=DRIVE_PRESETS[0]) with gr.Row(): start_time_in = gr.Textbox(label="開始時間", placeholder="2023-01-01 00:00:00") end_time_in = gr.Textbox(label="結束時間", placeholder="2023-12-31 23:59:59") with gr.Column(scale=1): series_multiselect = gr.CheckboxGroup(label="數值欄位", choices=[]) dual_axis_chk = gr.Checkbox(label="第二面板雙軸", value=False) rolling_dd = gr.Dropdown(label="Rolling window", choices=["3", "5", "10", "20"], value="5") cmap_dd = gr.Dropdown(label="地圖配色", choices=["viridis", "plasma", "inferno", "magma", "cividis", "coolwarm"], value="viridis") tiles_dd = gr.Dropdown(label="地圖底圖", choices=["OpenStreetMap", "Stamen Terrain", "Stamen Toner", "CartoDB positron", "CartoDB dark_matter"], value="OpenStreetMap") heatmap_chk = gr.Checkbox(label="顯示熱圖", value=False) with gr.Row(): run_btn = gr.Button("產生 Dashboard", scale=1) update_btn = gr.Button("手動更新數據", scale=1, elem_id="update_btn") interval = gr.Slider(5, 60, value=10, step=5, label="自動更新間隔 (秒)", elem_id="interval_slider") error_msg = gr.Markdown(value="", label="錯誤訊息", visible=True) with gr.Tabs(): with gr.Tab("圖表"): with gr.Row(): with gr.Column(scale=1): plot1 = gr.Plot(label="1:Line") with gr.Column(scale=1): plot2 = gr.Plot(label="2:Bar / Dual Axis") with gr.Row(): with gr.Column(scale=1): plot3 = gr.Plot(label="3:Rolling Mean") with gr.Column(scale=1): plot4 = gr.Plot(label="4:Gauge") with gr.Tab("地圖"): map_out = gr.HTML(label="5:Geo Map") with gr.Tab("JSON & 檔案"): json_box = gr.Code(label="grafanalib Dashboard JSON", language="json") json_file = gr.File(label="下載 dashboard.json") demo_csv_file = gr.File(label="下載示範資料 demo.csv") with gr.Tab("資料預覽"): df_view = gr.Dataframe(label="資料預覽") data_info = gr.Markdown("") # 新增:資料概況 with gr.Tab("點位詳情"): gr.Markdown("### 點位詳情") point_selector = gr.Dropdown(label="選擇點位", choices=[], value=None) detail_view = gr.Dataframe(label="選取點詳細資料") # 欄位探測 def probe_columns(source, file, preset_url, start_time, end_time): sheet_url = preset_url if source == "drive" else "" try: df, _ = load_data(source, file, sheet_url) df = filter_data(df, start_time, end_time) numeric_cols = [c for c in df.columns if c not in ["time", "lat", "lon", "pid"] and pd.api.types.is_numeric_dtype(df[c])] return gr.update(choices=numeric_cols, value=numeric_cols[:2]), df, "" except Exception as e: return gr.update(choices=[], value=[]), pd.DataFrame(), str(e) source_radio.change(probe_columns, [source_radio, file_in, preset_dd, start_time_in, end_time_in], [series_multiselect, df_view, error_msg]) file_in.change(probe_columns, [source_radio, file_in, preset_dd, start_time_in, end_time_in], [series_multiselect, df_view, error_msg]) preset_dd.change(probe_columns, [source_radio, file_in, preset_dd, start_time_in, end_time_in], [series_multiselect, df_view, error_msg]) start_time_in.change(probe_columns, [source_radio, file_in, preset_dd, start_time_in, end_time_in], [series_multiselect, df_view, error_msg]) end_time_in.change(probe_columns, [source_radio, file_in, preset_dd, start_time_in, end_time_in], [series_multiselect, df_view, error_msg]) # 初次載入:改用 demo(避免 Drive 權限問題造成「無資料」) demo.load( fn=lambda: pipeline("demo", None, "", [], False, "5", "viridis", "OpenStreetMap", "", "", False, None), outputs=[ plot1, plot2, plot3, plot4, map_out, json_box, json_file, df_view, demo_csv_file, point_selector, detail_view, error_msg, last_time_state, data_info ] ) # 產生與更新 run_btn.click( fn=pipeline, inputs=[source_radio, file_in, preset_dd, series_multiselect, dual_axis_chk, rolling_dd, cmap_dd, tiles_dd, start_time_in, end_time_in, heatmap_chk, last_time_state], outputs=[plot1, plot2, plot3, plot4, map_out, json_box, json_file, df_view, demo_csv_file, point_selector, detail_view, error_msg, last_time_state, data_info] ) update_btn.click( fn=pipeline, inputs=[source_radio, file_in, preset_dd, series_multiselect, dual_axis_chk, rolling_dd, cmap_dd, tiles_dd, start_time_in, end_time_in, heatmap_chk, last_time_state], outputs=[plot1, plot2, plot3, plot4, map_out, json_box, json_file, df_view, demo_csv_file, point_selector, detail_view, error_msg, last_time_state, data_info] ) # 點位詳情 point_selector.change(fn=update_detail, inputs=[df_view, point_selector], outputs=[detail_view]) # 自動更新(以 elem_id 綁定,避免 aria-label 變動) gr.HTML(""" """) if __name__ == "__main__": demo.launch()