Skip to content

Commit

Permalink
run black, remove default locale param
Browse files Browse the repository at this point in the history
  • Loading branch information
neon-ninja committed Jul 21, 2022
1 parent b69eb53 commit d8b4a9f
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 9 deletions.
18 changes: 11 additions & 7 deletions facebook_scraper/extractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -1106,10 +1106,14 @@ def extract_comment_replies(self, replies_url):
try:
# Some users have to use an AJAX POST method to get replies.
# Check if this is the case by checking for the element that holds the encrypted response token
use_ajax_post = self.full_post_html.find("input[name='fb_dtsg']", first=True) is not None
use_ajax_post = (
self.full_post_html.find("input[name='fb_dtsg']", first=True) is not None
)

if use_ajax_post:
fb_dtsg = self.full_post_html.find("input[name='fb_dtsg']", first=True).attrs["value"]
fb_dtsg = self.full_post_html.find("input[name='fb_dtsg']", first=True).attrs[
"value"
]
encryptedAjaxResponseToken = re.search(
r'encrypted":"([^"]+)', self.full_post_html.html
).group(1)
Expand All @@ -1121,7 +1125,7 @@ def extract_comment_replies(self, replies_url):
else:
use_ajax_post = False
response = self.request(replies_url)

except exceptions.TemporarilyBanned:
raise
except Exception as e:
Expand All @@ -1140,24 +1144,24 @@ def extract_comment_replies(self, replies_url):
break

reply_selector = 'div[data-sigil="comment inline-reply"]'

if self.options.get("noscript"):
reply_selector = '#root div[id]'
replies = html.find(reply_selector)

else:
# Skip first element, as it will be this comment itself
reply_selector = 'div[data-sigil="comment"]'
replies = response.html.find(reply_selector)[1:]

try:
for reply in replies:
yield self.parse_comment(reply)
except exceptions.TemporarilyBanned:
raise
except Exception as e:
logger.error(f"Unable to parse comment {replies_url} replies {replies}: {e}")

def extract_comment_with_replies(self, comment):
try:
result = self.parse_comment(comment)
Expand Down
5 changes: 3 additions & 2 deletions facebook_scraper/facebook_scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ def __init__(self, session=None, requests_kwargs=None):
requests_kwargs = {}

self.session = session
requests_kwargs["params"] = {"locale": "en_US"}
self.requests_kwargs = requests_kwargs

def set_user_agent(self, user_agent):
Expand Down Expand Up @@ -654,7 +653,9 @@ def get_page_info(self, page, **kwargs) -> Profile:
).text
except:
logger.error("No ld+json element")
likes_and_follows = community_resp.html.find("#page_suggestions_on_liking+div", first=True).text.split("\n")
likes_and_follows = community_resp.html.find(
"#page_suggestions_on_liking+div", first=True
).text.split("\n")
result["followers"] = utils.convert_numeric_abbr(likes_and_follows[2])
except:
pass
Expand Down

0 comments on commit d8b4a9f

Please sign in to comment.