Update robots.py to address error on line 57

Attempting to work around an error that prevents parsing the Dark Visitors site
This commit is contained in:
Adam Newbold 2025-12-01 20:18:29 -05:00 committed by GitHub
commit 6d75f3c1c9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -52,7 +52,16 @@ def updated_robots_json(soup):
for agent in section.find_all("a", href=True): for agent in section.find_all("a", href=True):
name = agent.find("div", {"class": "agent-name"}).get_text().strip() name = agent.find("div", {"class": "agent-name"}).get_text().strip()
name = clean_robot_name(name) name = clean_robot_name(name)
desc = agent.find("p").get_text().strip()
# This line below occasionally throws this error: AttributeError: 'NoneType' object has no attribute 'get_text'
#desc = agent.find("p").get_text().strip()
# Attempting a different way to handle to avoid errors:
p_tag = agent.find("p")
if p_tag is not None:
desc = p_tag.get_text().strip()
else:
desc = "Description unavailable from darkvisitors.com"
default_values = { default_values = {
"Unclear at this time.", "Unclear at this time.",