-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain_interactive.py
More file actions
42 lines (36 loc) · 1.53 KB
/
main_interactive.py
File metadata and controls
42 lines (36 loc) · 1.53 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
from agents.ai_scientist import AIScientistAgent
from agents.hybrid_query_manager import HybridQueryManager
import torch
import datetime
import os
def log_message(mode: str, question: str, answer: str, sources=None):
"""Write concise query logs with timestamp."""
timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
msg = f"[{timestamp}] [{mode.upper()}] {question} -> {answer}"
#print(msg)
os.makedirs("logs", exist_ok=True)
with open("logs/session.log", "a") as f:
f.write(msg + "\n")
if sources:
f.write(f"Sources: {sources}\n\n")
if __name__ == "__main__":
print("Available GPUs:", torch.cuda.device_count())
ai_agent = AIScientistAgent()
router = HybridQueryManager(ai_agent)
print("\n=== 🧪 AI Scientist Interactive Chat ===")
print("Type your question below. Type 'exit' or 'quit' to stop.\n")
while True:
user_input = input("You: ").strip()
if user_input.lower() in {"exit", "quit"}:
print("👋 Exiting chat. Goodbye!")
break
# Route intelligently between retrieval or direct reasoning
response = router.query(user_input)
# Nicely formatted output
print(f"\n[{response['mode'].upper()} MODE]")
print(f"AI Scientist: {response['answer']}")
# if response["sources"]:
# print("Sources:", ", ".join(response["sources"]))
print() # spacing
# Log
log_message(response['mode'], user_input, response["answer"], response["sources"])