Spaces:
Runtime error
Runtime error
File size: 3,225 Bytes
96408b9 c98f89b b636b92 0e37cb9 c98f89b b636b92 f2a829a 326ee9c f2a829a b636b92 f2a829a b636b92 326ee9c b636b92 326ee9c b636b92 326ee9c b636b92 f2a829a b636b92 326ee9c f2a829a 326ee9c f2a829a b636b92 326ee9c b636b92 f2a829a b636b92 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
import sys, os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
UTILS_DIR = os.path.join(BASE_DIR, "utils")
if UTILS_DIR not in sys.path:
sys.path.insert(0, UTILS_DIR)
import streamlit as st
import os, sys
# ─── Ensure omniscientframework package is importable ────────────────
ROOT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
PACKAGE_PATH = os.path.abspath(os.path.join(ROOT_PATH, ".."))
if PACKAGE_PATH not in sys.path:
sys.path.insert(0, PACKAGE_PATH)
# ─── Import project utilities ────────────────────────────────────────
from omniscientframework.utils.backend import run_llm
# ─── Page Setup ─────────────────────────────────────────────────────
st.title("🧪 Example Page with Chatbot")
st.write("This demo chatbot also ingests Omnieye + Omnilog outputs.")
# ─── Initialize Session State ───────────────────────────────────────
if "example_chat" not in st.session_state:
st.session_state.example_chat = []
# ─── Collect context from Omnieye + Omnilog ─────────────────────────
system_context = []
if "omnieye_output" in st.session_state:
preview = st.session_state.omnieye_output.get("file_preview", "")
matches = st.session_state.omnieye_output.get("matches", [])
if preview:
system_context.append(f"Omnieye preview:\n{preview}")
if matches:
system_context.append("Keyword matches:\n" + "\n".join(matches))
if "omnilog_output" in st.session_state:
normalized = st.session_state.omnilog_output.get("normalized_preview", "")
matches = st.session_state.omnilog_output.get("matches", [])
if normalized:
system_context.append(f"Omnilog preview:\n{normalized}")
if matches:
system_context.append("Log matches:\n" + "\n".join(matches))
# ─── Display Chat History ───────────────────────────────────────────
for msg in st.session_state.example_chat:
with st.chat_message(msg["role"]):
st.markdown(msg["content"])
# ─── Chat Input ─────────────────────────────────────────────────────
if prompt := st.chat_input("Ask the Example Chatbot about files or logs..."):
st.session_state.example_chat.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
# Build context-aware input
ai_input = "\n\n".join(system_context + [prompt])
# Generate AI response
try:
ai_reply = run_llm(ai_input)
except Exception as e:
ai_reply = f"⚠️ Error running LLM: {e}"
with st.chat_message("assistant"):
st.markdown(ai_reply)
st.session_state.example_chat.append({"role": "assistant", "content": ai_reply})
|