Skip to content

Commit

Permalink
fix (#8322 followup): resolve the violation of pylint rules (#8391)
Browse files Browse the repository at this point in the history
  • Loading branch information
bowenliang123 authored Sep 13, 2024
1 parent a1104ab commit aad6f34
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions api/core/tools/provider/builtin/firecrawl/tools/crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMe
scrapeOptions["excludeTags"] = get_array_params(tool_parameters, "excludeTags")
scrapeOptions["onlyMainContent"] = tool_parameters.get("onlyMainContent", False)
scrapeOptions["waitFor"] = tool_parameters.get("waitFor", 0)
scrapeOptions = {k: v for k, v in scrapeOptions.items() if v not in (None, "")}
scrapeOptions = {k: v for k, v in scrapeOptions.items() if v not in {None, ""}}
payload["scrapeOptions"] = scrapeOptions or None

payload = {k: v for k, v in payload.items() if v not in (None, "")}
payload = {k: v for k, v in payload.items() if v not in {None, ""}}

crawl_result = app.crawl_url(url=tool_parameters["url"], wait=wait_for_results, **payload)

Expand Down
4 changes: 2 additions & 2 deletions api/core/tools/provider/builtin/firecrawl/tools/scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> list[ToolInv
extract["schema"] = get_json_params(tool_parameters, "schema")
extract["systemPrompt"] = tool_parameters.get("systemPrompt")
extract["prompt"] = tool_parameters.get("prompt")
extract = {k: v for k, v in extract.items() if v not in (None, "")}
extract = {k: v for k, v in extract.items() if v not in {None, ""}}
payload["extract"] = extract or None

payload = {k: v for k, v in payload.items() if v not in (None, "")}
payload = {k: v for k, v in payload.items() if v not in {None, ""}}

crawl_result = app.scrape_url(url=tool_parameters["url"], **payload)
markdown_result = crawl_result.get("data", {}).get("markdown", "")
Expand Down

0 comments on commit aad6f34

Please sign in to comment.