Skip to content

Commit ce2004e

Browse files
committed
fix lint
1 parent e16a245 commit ce2004e

File tree

3 files changed

+25
-24
lines changed

3 files changed

+25
-24
lines changed

__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
⚡ Transform research papers into working code automatically
66
"""
77

8-
__version__ = "1.0.2"
8+
__version__ = "1.0.3"
99
__author__ = "DeepCode Team"
1010
__url__ = "https://github.com/HKUDS/DeepCode"
1111

tools/bocha_search_server.py

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import asyncio
21
import os
32
import sys
43
import json
@@ -15,13 +14,13 @@
1514
"bocha-search-mcp",
1615
prompt="""
1716
# Bocha Search MCP Server
18-
17+
1918
Bocha is a Chinese search engine for AI, This server provides tools for searching the web using Bocha Search API.
2019
It allows you to get enhanced search details from billions of web documents, including weather, news, wikis, healthcare, train tickets, images, and more.
2120
2221
## Available Tools
23-
24-
### 1. bocha_web_search
22+
23+
### 1. bocha_web_search
2524
Search with Bocha Web Search and get enhanced search details from billions of web documents, including page titles, urls, summaries, site names, site icons, publication dates, image links, and more.
2625
2726
### 2. bocha_ai_search
@@ -36,7 +35,7 @@
3635
- Bocha AI search: Title, URL, Description, Published date, Site name, and structured data card
3736
3837
If the API key is missing or invalid, appropriate error messages will be returned.
39-
"""
38+
""",
4039
)
4140

4241

@@ -69,7 +68,7 @@ async def bocha_web_search(
6968
"query": query,
7069
"summary": True,
7170
"freshness": freshness,
72-
"count": count
71+
"count": count,
7372
}
7473

7574
headers = {
@@ -86,7 +85,7 @@ async def bocha_web_search(
8685
resp = response.json()
8786
if "data" not in resp:
8887
return "Search error."
89-
88+
9089
data = resp["data"]
9190

9291
if "webPages" not in data:
@@ -142,7 +141,7 @@ async def bocha_ai_search(
142141
"freshness": freshness,
143142
"count": count,
144143
"answer": False,
145-
"stream": False
144+
"stream": False,
146145
}
147146

148147
headers = {
@@ -163,9 +162,9 @@ async def bocha_ai_search(
163162
content = {}
164163
try:
165164
content = json.loads(message["content"])
166-
except:
165+
except (json.JSONDecodeError, TypeError):
167166
content = {}
168-
167+
169168
# 网页
170169
if message["content_type"] == "webpage":
171170
if "value" in content:
@@ -177,12 +176,15 @@ async def bocha_ai_search(
177176
f"Published date: {item['datePublished']}\n"
178177
f"Site name: {item['siteName']}"
179178
)
180-
elif message["content_type"] != "image" and message["content"] != "{}":
179+
elif (
180+
message["content_type"] != "image"
181+
and message["content"] != "{}"
182+
):
181183
results.append(message["content"])
182184

183185
if not results:
184186
return "No results found."
185-
187+
186188
return "\n\n".join(results)
187189

188190
except httpx.HTTPStatusError as e:
@@ -203,8 +205,7 @@ def main():
203205
file=sys.stderr,
204206
)
205207
print(
206-
"Get a Bocha API key from: "
207-
"https://open.bochaai.com",
208+
"Get a Bocha API key from: " "https://open.bochaai.com",
208209
file=sys.stderr,
209210
)
210211
sys.exit(1)
@@ -215,4 +216,4 @@ def main():
215216

216217

217218
if __name__ == "__main__":
218-
main()
219+
main()

workflows/agent_orchestration_engine.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ def get_default_search_server(config_path: str = "mcp_agent.config.yaml"):
113113
if os.path.exists(config_path):
114114
with open(config_path, "r", encoding="utf-8") as f:
115115
config = yaml.safe_load(f)
116-
116+
117117
default_server = config.get("default_search_server", "brave")
118118
print(f"🔍 Using search server: {default_server}")
119119
return default_server
@@ -126,7 +126,9 @@ def get_default_search_server(config_path: str = "mcp_agent.config.yaml"):
126126
return "brave"
127127

128128

129-
def get_search_server_names(additional_servers: Optional[List[str]] = None) -> List[str]:
129+
def get_search_server_names(
130+
additional_servers: Optional[List[str]] = None,
131+
) -> List[str]:
130132
"""
131133
Get server names list with the configured default search server.
132134
@@ -138,13 +140,13 @@ def get_search_server_names(additional_servers: Optional[List[str]] = None) -> L
138140
"""
139141
default_search = get_default_search_server()
140142
server_names = [default_search]
141-
143+
142144
if additional_servers:
143145
# Add additional servers, avoiding duplicates
144146
for server in additional_servers:
145147
if server not in server_names:
146148
server_names.append(server)
147-
149+
148150
return server_names
149151

150152

@@ -486,7 +488,7 @@ async def paper_reference_analyzer(paper_dir: str, logger) -> str:
486488
Focus on:
487489
1. **References section analysis** - Extract all citations from the References/Bibliography part
488490
2. References with high-quality GitHub implementations
489-
3. Papers cited for methodology, algorithms, or core techniques
491+
3. Papers cited for methodology, algorithms, or core techniques
490492
4. Related work that shares similar technical approaches
491493
5. Implementation references that could provide code patterns
492494
@@ -628,9 +630,7 @@ async def orchestrate_reference_intelligence_agent(
628630
return f.read()
629631

630632
# Execute reference analysis
631-
reference_result = await paper_reference_analyzer(
632-
dir_info["paper_dir"], logger
633-
)
633+
reference_result = await paper_reference_analyzer(dir_info["paper_dir"], logger)
634634

635635
# Save reference analysis result
636636
with open(reference_path, "w", encoding="utf-8") as f:

0 commit comments

Comments
 (0)