import streamlit as st import pandas as pd import numpy as np import torch import matplotlib.pyplot as plt import pickle import os import warnings from lllm_model_all_token import LLMConcreteModel # Optimize for deployment warnings.filterwarnings('ignore') torch.set_num_threads(2) # Canva-style colors CANVA_PURPLE = "#8B5CF6" CANVA_LIGHT_PURPLE = "#A78BFA" CANVA_DARK_PURPLE = "#7C3AED" CANVA_BACKGROUND = "#FAFAFA" CANVA_WHITE = "#FFFFFF" # Set page config with Canva-style theme st.set_page_config( page_title="Concrete Creep Prediction", page_icon="๐๏ธ", layout="centered", initial_sidebar_state="collapsed" ) # Custom CSS for Canva-style design st.markdown(f""" """, unsafe_allow_html=True) # Simple CreepScaler class class CreepScaler: def __init__(self, factor=1000): self.factor = factor self.mean_ = 0 self.scale_ = factor self.is_standard_scaler = False def transform(self, X): if self.is_standard_scaler: return (X - self.mean_) / self.scale_ return X / self.factor def inverse_transform(self, X): if self.is_standard_scaler: return (X * self.scale_) + self.mean_ return X * self.factor @st.cache_resource def load_model(): """Load model and scalers""" # Find model file model_files = ['best_llm_model-17.pt', 'final_llm_model-5.pt'] model_path = None for file in model_files: if os.path.exists(file): model_path = file break if model_path is None: st.error("โ Model file not found") st.stop() # Load scalers try: with open('scalers/feature_scaler.pkl', 'rb') as f: feature_scaler = pickle.load(f) try: with open('scalers/creep_scaler.pkl', 'rb') as f: creep_scaler = pickle.load(f) except: creep_scaler = CreepScaler(factor=1000) try: with open('scalers/time_values.pkl', 'rb') as f: time_values = pickle.load(f) except: time_values = np.arange(1, 1001) # Default 1000 time points except Exception as e: st.error(f"โ Error loading files: {e}") st.stop() # Load model device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') model = LLMConcreteModel( feature_dim=3, d_model=192, num_layers=4, num_heads=4, d_ff=768, dropout=0.057, target_len=1, pooling_method='hybrid' ) try: model.load_state_dict(torch.load(model_path, map_location=device)) model = model.to(device) model.eval() except Exception as e: st.error(f"โ Error loading model: {e}") st.stop() return model, feature_scaler, creep_scaler, time_values, device def predict_creep(model, features, time_values, feature_scaler, creep_scaler, device, max_days=365): """Simple prediction function""" # Scale features scaled_features = feature_scaler.transform(features) scaled_features_tensor = torch.FloatTensor(scaled_features).to(device) # Limit time values pred_time_values = time_values[:max_days] if max_days < len(time_values) else time_values predictions = [0.0] # Start with 0 scaled_predictions = [0.0] with torch.no_grad(): for i in range(1, len(pred_time_values)): history = np.array(scaled_predictions) history_tensor = torch.FloatTensor(history).unsqueeze(0).to(device) time_history = np.log1p(pred_time_values[:i]) time_tensor = torch.FloatTensor(time_history).unsqueeze(0).to(device) length = torch.tensor([len(history)], device=device) next_value = model( creep_history=history_tensor, features=scaled_features_tensor, lengths=length, time_history=time_tensor ).item() scaled_predictions.append(next_value) next_creep = creep_scaler.inverse_transform(np.array([[next_value]])).flatten()[0] predictions.append(next_creep) return np.array(predictions), pred_time_values # Load model model, feature_scaler, creep_scaler, time_values, device = load_model() def get_base64_of_image(path): """Convert image to base64 string""" import base64 try: with open(path, "rb") as img_file: return base64.b64encode(img_file.read()).decode() except: return "" # App title with logo st.markdown("""
AI-Powered Concrete Analysis
Final Creep (ยตฮต)
Maximum Creep (ยตฮต)
๐๏ธ Concrete Creep Prediction Tool
Developed by CIFIR and AI Research Group KMUTT