Skip to content
Merged
4 changes: 2 additions & 2 deletions backend/app/agents/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from .devrel.agent import DevRelAgent
from .shared.base_agent import BaseAgent, AgentState
from .shared.classification_router import ClassificationRouter
from .base_agent import BaseAgent, AgentState
from .classification_router import ClassificationRouter

__all__ = [
"DevRelAgent",
Expand Down
1 change: 1 addition & 0 deletions backend/app/agents/devrel/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

18 changes: 9 additions & 9 deletions backend/app/agents/devrel/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,18 @@
from langgraph.graph import StateGraph, END
from langchain_google_genai import ChatGoogleGenerativeAI
from langgraph.checkpoint.memory import InMemorySaver
from ..shared.base_agent import BaseAgent, AgentState
from ..shared.classification_router import MessageCategory
from ..base_agent import BaseAgent, AgentState
from ..classification_router import MessageCategory
from .tools.search_tool import TavilySearchTool
from .tools.faq_tool import FAQTool
from app.core.config import settings
from .nodes.gather_context_node import gather_context_node
from .nodes.handle_faq_node import handle_faq_node
from .nodes.handle_web_search_node import handle_web_search_node
from .nodes.handle_technical_support_node import handle_technical_support_node
from .nodes.handle_onboarding_node import handle_onboarding_node
from .nodes.generate_response_node import generate_response_node
from .nodes.summarization_node import check_summarization_needed, summarize_conversation_node, store_summary_to_database
from .nodes.gather_context import gather_context_node
from .nodes.handlers.faq import handle_faq_node
from .nodes.handlers.web_search import handle_web_search_node
from .nodes.handlers.technical_support import handle_technical_support_node
from .nodes.handlers.onboarding import handle_onboarding_node
from .generate_response_node import generate_response_node
from .nodes.summarization import check_summarization_needed, summarize_conversation_node, store_summary_to_database

logger = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,28 +1,12 @@
import logging
from typing import Dict, Any
from app.agents.shared.state import AgentState
from app.agents.state import AgentState
from langchain_core.messages import HumanMessage
from ..prompts.base_prompt import GENERAL_LLM_RESPONSE_PROMPT
from .prompts.base_prompt import GENERAL_LLM_RESPONSE_PROMPT
from .nodes.handlers.web_search import create_search_response

logger = logging.getLogger(__name__)

async def _create_search_response(task_result: Dict[str, Any]) -> str:
"""Create a response string from search results."""
query = task_result.get("query")
results = task_result.get("results", [])
if not results:
return f"I couldn't find any information for '{query}'. You might want to try rephrasing your search."

response_parts = [f"Here's what I found for '{query}':"]
for i, result in enumerate(results[:3]):
title = result.get('title', 'N/A')
snippet = result.get('snippet', 'N/A')
url = result.get('url', '#')
result_line = f"{i+1}. {title}: {snippet}"
response_parts.append(result_line)
response_parts.append(f" (Source: {url})")
response_parts.append("You can ask me to search again with a different query if these aren't helpful.")
return "\n".join(response_parts)

async def _create_llm_response(state: AgentState, task_result: Dict[str, Any], llm) -> str:
"""Generate a response using the LLM based on the current state and task result."""
Expand Down Expand Up @@ -89,7 +73,7 @@ async def generate_response_node(state: AgentState, llm) -> dict:
if task_result.get("type") == "faq":
final_response = task_result.get("response", "I don't have a specific answer for that question.")
elif task_result.get("type") == "web_search":
final_response = await _create_search_response(task_result)
final_response = create_search_response(task_result)
else:
final_response = await _create_llm_response(state, task_result, llm)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import logging
from datetime import datetime
from app.agents.shared.state import AgentState
from app.agents.shared.classification_router import MessageCategory
from app.agents.state import AgentState

logger = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from app.agents.shared.state import AgentState
from app.agents.state import AgentState

logger = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from app.agents.shared.state import AgentState
from app.agents.state import AgentState

logger = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from app.agents.shared.state import AgentState
from app.agents.state import AgentState

logger = logging.getLogger(__name__)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
import logging
from app.agents.shared.state import AgentState
from typing import Dict, Any
from app.agents.state import AgentState
from langchain_core.messages import HumanMessage
from ..prompts.search_prompt import EXTRACT_SEARCH_QUERY_PROMPT
from app.agents.devrel.prompts.search_prompt import EXTRACT_SEARCH_QUERY_PROMPT


logger = logging.getLogger(__name__)

async def _extract_search_query(message: str, llm) -> str:
"""Extract a concise search query from the user's message."""
"""
Extract a concise search query from the user's message by invoking the LLM.
"""
logger.info(f"Extracting search query from: {message[:100]}")
try:
prompt = EXTRACT_SEARCH_QUERY_PROMPT.format(message=message)
Expand All @@ -19,7 +23,9 @@ async def _extract_search_query(message: str, llm) -> str:
return search_query

async def handle_web_search_node(state: AgentState, search_tool, llm) -> dict:
"""Handle web search requests"""
"""
Handle web search requests
"""
logger.info(f"Handling web search for session {state.session_id}")

latest_message = ""
Expand All @@ -41,3 +47,25 @@ async def handle_web_search_node(state: AgentState, search_tool, llm) -> dict:
"tools_used": ["tavily_search"],
"current_task": "web_search_handled"
}

def create_search_response(task_result: Dict[str, Any]) -> str:
"""
Create a user-friendly response string from search results.
"""
query = task_result.get("query")
results = task_result.get("results", [])

if not results:
return f"I couldn't find any information for '{query}'. You might want to try rephrasing your search."

response_parts = [f"Here's what I found for '{query}':"]
for i, result in enumerate(results[:5]):
title = result.get('title', 'N/A')
snippet = result.get('snippet', 'N/A')
url = result.get('url', '#')
result_line = f"{i+1}. {title}: {snippet}"
response_parts.append(result_line)
response_parts.append(f" (Source: {url})")

response_parts.append("You can ask me to search again with a different query if these aren't helpful.")
return "\n".join(response_parts)
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
from datetime import datetime, timedelta
from typing import Dict, Any
from app.agents.shared.state import AgentState
from app.agents.state import AgentState
from langchain_core.messages import HumanMessage
from app.agents.devrel.prompts.summarization_prompt import CONVERSATION_SUMMARY_PROMPT

Expand All @@ -12,7 +12,9 @@
THREAD_TIMEOUT_HOURS = 1

async def check_summarization_needed(state: AgentState) -> Dict[str, Any]:
"""Check if summarization is needed and update interaction count"""
"""
Check if summarization is needed and update interaction count
"""

current_count = getattr(state, 'interaction_count', 0)
new_count = current_count + 1
Expand Down Expand Up @@ -46,14 +48,15 @@ async def check_summarization_needed(state: AgentState) -> Dict[str, Any]:
return updates

async def summarize_conversation_node(state: AgentState, llm) -> Dict[str, Any]:
"""Summarize the conversation and update the state"""
"""
Summarize the conversation and update the state
"""
logger.info(f"Summarizing conversation for session {state.session_id}")

try:
current_count = state.interaction_count
logger.info(f"Summarizing at interaction count: {current_count}")

# Get the recent messages
all_messages = state.messages

if not all_messages:
Expand All @@ -66,7 +69,6 @@ async def summarize_conversation_node(state: AgentState, llm) -> Dict[str, Any]:
for msg in all_messages
])

# Create prompt
existing_summary = state.conversation_summary
if not existing_summary or existing_summary == "This is the beginning of our conversation.":
existing_summary = "No previous summary - this is the start of our conversation tracking."
Expand All @@ -85,11 +87,9 @@ async def summarize_conversation_node(state: AgentState, llm) -> Dict[str, Any]:
logger.info(f"Generating summary with {len(all_messages)} messages, "
f"conversation text length: {len(conversation_text)}")

# Generate summary
response = await llm.ainvoke([HumanMessage(content=prompt)])
new_summary = response.content.strip()

# Extract key topics from summary
new_topics = await _extract_key_topics(new_summary, llm)

logger.info(f"Conversation summarized successfully for session {state.session_id}")
Expand Down Expand Up @@ -121,7 +121,6 @@ async def _extract_key_topics(summary: str, llm) -> list[str]:
response = await llm.ainvoke([HumanMessage(content=topic_prompt)])
topics_text = response.content.strip()

# Parse topics from response
topics = [topic.strip() for topic in topics_text.split(',') if topic.strip()]
return topics[:5] # Limiting to 5 topics

Expand Down
File renamed without changes.
11 changes: 11 additions & 0 deletions backend/app/api/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
"""
API package for the Devr.AI backend.

This package contains all API-related components:
- router: Main API router with all endpoints
- v1: Version 1 API endpoints
"""

from .router import api_router

__all__ = ["api_router"]
19 changes: 19 additions & 0 deletions backend/app/api/router.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from fastapi import APIRouter
from .v1.auth import router as auth_router
from .v1.health import router as health_router

api_router = APIRouter()

api_router.include_router(
auth_router,
prefix="/v1/auth",
tags=["Authentication"]
)

api_router.include_router(
health_router,
prefix="/v1",
tags=["Health"]
)

__all__ = ["api_router"]
1 change: 1 addition & 0 deletions backend/app/api/v1/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

6 changes: 3 additions & 3 deletions backend/app/api/v1/auth.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from fastapi import APIRouter, Request, HTTPException, Query
from fastapi.responses import HTMLResponse
from app.db.supabase.supabase_client import get_supabase_client
from app.db.supabase.users_service import find_user_by_session_and_verify, get_verification_session_info
from app.db.weaviate.user_profiling import profile_user_from_github
from app.database.supabase.client import get_supabase_client
from app.services.auth.verification import find_user_by_session_and_verify, get_verification_session_info
from app.services.user.profiling import profile_user_from_github
from typing import Optional
import logging
import asyncio
Expand Down
86 changes: 86 additions & 0 deletions backend/app/api/v1/health.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import logging
from fastapi import APIRouter, HTTPException, Depends
from app.database.weaviate.client import get_weaviate_client
from app.core.dependencies import get_app_instance
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from main import DevRAIApplication

router = APIRouter()
logger = logging.getLogger(__name__)


@router.get("/health")
async def health_check(app_instance: "DevRAIApplication" = Depends(get_app_instance)):
"""
General health check endpoint to verify services are running.

Returns:
dict: Status of the application and its services
"""
try:
async with get_weaviate_client() as client:
weaviate_ready = await client.is_ready()

return {
"status": "healthy",
"services": {
"weaviate": "ready" if weaviate_ready else "not_ready",
"discord_bot": "running" if app_instance.discord_bot and not app_instance.discord_bot.is_closed() else "stopped"
}
}
except Exception as e:
logger.error(f"Health check failed: {e}")
raise HTTPException(
status_code=503,
detail={
"status": "unhealthy",
"error": str(e)
}
) from e


@router.get("/health/weaviate")
async def weaviate_health():
"""Check specifically Weaviate service health."""
try:
async with get_weaviate_client() as client:
is_ready = await client.is_ready()

return {
"service": "weaviate",
"status": "ready" if is_ready else "not_ready"
}
except Exception as e:
logger.error(f"Weaviate health check failed: {e}")
raise HTTPException(
status_code=503,
detail={
"service": "weaviate",
"status": "unhealthy",
"error": str(e)
}
) from e


@router.get("/health/discord")
async def discord_health(app_instance: "DevRAIApplication" = Depends(get_app_instance)):
"""Check specifically Discord bot health."""
try:
bot_status = "running" if app_instance.discord_bot and not app_instance.discord_bot.is_closed() else "stopped"

return {
"service": "discord_bot",
"status": bot_status
}
except Exception as e:
logger.error(f"Discord bot health check failed: {e}")
raise HTTPException(
status_code=503,
detail={
"service": "discord_bot",
"status": "unhealthy",
"error": str(e)
}
) from e
3 changes: 3 additions & 0 deletions backend/app/core/config/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .settings import settings

__all__ = ["settings"]
File renamed without changes.
12 changes: 12 additions & 0 deletions backend/app/core/dependencies.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from fastapi import Request
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from main import DevRAIApplication

async def get_app_instance(request: Request) -> "DevRAIApplication":
"""
Dependency to get the application instance from FastAPI's state.
This avoids circular imports by using dependency injection.
"""
return request.app.state.app_instance
4 changes: 2 additions & 2 deletions backend/app/core/orchestration/agent_coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
from app.agents.devrel.agent import DevRelAgent
# TODO: Implement GitHub agent
# from app.agents.github.agent import GitHubAgent
from app.agents.shared.state import AgentState
from app.agents.state import AgentState
from app.core.orchestration.queue_manager import AsyncQueueManager
from app.agents.devrel.nodes.summarization_node import store_summary_to_database
from app.agents.devrel.nodes.summarization import store_summary_to_database
from langsmith import traceable

logger = logging.getLogger(__name__)
Expand Down
File renamed without changes.
Loading