mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-06-04 11:24:40 +08:00
fix:python-sdk
This commit is contained in:
parent
0e027fe430
commit
a4323d8f23
@ -13,7 +13,7 @@ import os
|
|||||||
|
|
||||||
from .firecrawl import FirecrawlApp, JsonConfig, ScrapeOptions # noqa
|
from .firecrawl import FirecrawlApp, JsonConfig, ScrapeOptions # noqa
|
||||||
|
|
||||||
__version__ = "2.1.1"
|
__version__ = "2.1.2"
|
||||||
|
|
||||||
# Define the logger for the Firecrawl project
|
# Define the logger for the Firecrawl project
|
||||||
logger: logging.Logger = logging.getLogger("firecrawl")
|
logger: logging.Logger = logging.getLogger("firecrawl")
|
||||||
|
@ -1849,24 +1849,33 @@ class FirecrawlApp:
|
|||||||
show_full_text=show_full_text,
|
show_full_text=show_full_text,
|
||||||
experimental_stream=experimental_stream
|
experimental_stream=experimental_stream
|
||||||
)
|
)
|
||||||
if not response.get('success') or 'id' not in response:
|
|
||||||
return response
|
if not response.success or not response.id:
|
||||||
|
return GenerateLLMsTextStatusResponse(
|
||||||
|
success=False,
|
||||||
|
error='Failed to start LLMs.txt generation',
|
||||||
|
status='failed',
|
||||||
|
expiresAt=''
|
||||||
|
)
|
||||||
|
|
||||||
job_id = response['id']
|
job_id = response.id
|
||||||
while True:
|
while True:
|
||||||
status = self.check_generate_llms_text_status(job_id)
|
status = self.check_generate_llms_text_status(job_id)
|
||||||
|
|
||||||
if status['status'] == 'completed':
|
if status.status == 'completed':
|
||||||
return status
|
return status
|
||||||
elif status['status'] == 'failed':
|
elif status.status == 'failed':
|
||||||
raise Exception(f'LLMs.txt generation failed. Error: {status.get("error")}')
|
return status
|
||||||
elif status['status'] != 'processing':
|
elif status.status != 'processing':
|
||||||
break
|
return GenerateLLMsTextStatusResponse(
|
||||||
|
success=False,
|
||||||
|
error='LLMs.txt generation job terminated unexpectedly',
|
||||||
|
status='failed',
|
||||||
|
expiresAt=''
|
||||||
|
)
|
||||||
|
|
||||||
time.sleep(2) # Polling interval
|
time.sleep(2) # Polling interval
|
||||||
|
|
||||||
return {'success': False, 'error': 'LLMs.txt generation job terminated unexpectedly'}
|
|
||||||
|
|
||||||
def async_generate_llms_text(
|
def async_generate_llms_text(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
@ -1903,10 +1912,13 @@ class FirecrawlApp:
|
|||||||
json_data['origin'] = f"python-sdk@{version}"
|
json_data['origin'] = f"python-sdk@{version}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self._post_request(f'{self.api_url}/v1/llmstxt', json_data, headers)
|
req = self._post_request(f'{self.api_url}/v1/llmstxt', json_data, headers)
|
||||||
if response.status_code == 200:
|
response = req.json()
|
||||||
|
print("json_data", json_data)
|
||||||
|
print("response", response)
|
||||||
|
if response.get('success'):
|
||||||
try:
|
try:
|
||||||
return response.json()
|
return GenerateLLMsTextResponse(**response)
|
||||||
except:
|
except:
|
||||||
raise Exception('Failed to parse Firecrawl response as JSON.')
|
raise Exception('Failed to parse Firecrawl response as JSON.')
|
||||||
else:
|
else:
|
||||||
@ -1914,7 +1926,10 @@ class FirecrawlApp:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ValueError(str(e))
|
raise ValueError(str(e))
|
||||||
|
|
||||||
return {'success': False, 'error': 'Internal server error'}
|
return GenerateLLMsTextResponse(
|
||||||
|
success=False,
|
||||||
|
error='Internal server error'
|
||||||
|
)
|
||||||
|
|
||||||
def check_generate_llms_text_status(self, id: str) -> GenerateLLMsTextStatusResponse:
|
def check_generate_llms_text_status(self, id: str) -> GenerateLLMsTextStatusResponse:
|
||||||
"""
|
"""
|
||||||
@ -1941,9 +1956,10 @@ class FirecrawlApp:
|
|||||||
response = self._get_request(f'{self.api_url}/v1/llmstxt/{id}', headers)
|
response = self._get_request(f'{self.api_url}/v1/llmstxt/{id}', headers)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
try:
|
try:
|
||||||
return response.json()
|
json_data = response.json()
|
||||||
except:
|
return GenerateLLMsTextStatusResponse(**json_data)
|
||||||
raise Exception('Failed to parse Firecrawl response as JSON.')
|
except Exception as e:
|
||||||
|
raise Exception(f'Failed to parse Firecrawl response as GenerateLLMsTextStatusResponse: {str(e)}')
|
||||||
elif response.status_code == 404:
|
elif response.status_code == 404:
|
||||||
raise Exception('LLMs.txt generation job not found')
|
raise Exception('LLMs.txt generation job not found')
|
||||||
else:
|
else:
|
||||||
@ -1951,7 +1967,7 @@ class FirecrawlApp:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ValueError(str(e))
|
raise ValueError(str(e))
|
||||||
|
|
||||||
return {'success': False, 'error': 'Internal server error'}
|
return GenerateLLMsTextStatusResponse(success=False, error='Internal server error', status='failed', expiresAt='')
|
||||||
|
|
||||||
def _prepare_headers(
|
def _prepare_headers(
|
||||||
self,
|
self,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user