summaryrefslogtreecommitdiff
path: root/bin/fb-scrape/get-fb-content.py
diff options
context:
space:
mode:
Diffstat (limited to 'bin/fb-scrape/get-fb-content.py')
-rw-r--r--bin/fb-scrape/get-fb-content.py42
1 files changed, 42 insertions, 0 deletions
diff --git a/bin/fb-scrape/get-fb-content.py b/bin/fb-scrape/get-fb-content.py
new file mode 100644
index 0000000..5b22f70
--- /dev/null
+++ b/bin/fb-scrape/get-fb-content.py
@@ -0,0 +1,42 @@
+import json
+import logging
+import sys
+import time
+import warnings
+from os import path
+from pytz_deprecation_shim import PytzUsageWarning
+from random import randint
+
+from facebook_scraper import get_posts, enable_logging
+from requests.exceptions import RequestException
+
+debug = len(sys.argv) > 2 and sys.argv[2] == 'debug'
+
+if debug:
+ enable_logging(logging.DEBUG)
+
+warnings.filterwarnings(
+ action='ignore',
+ message=r'A low page limit'
+)
+warnings.filterwarnings(
+ action='ignore',
+ category=PytzUsageWarning
+)
+
+BASEDIR = path.dirname(__file__)
+
+posts = []
+try:
+ for post in get_posts(sys.argv[1], pages=2, cookies=path.join(BASEDIR, '../../config/facebook.com_cookies.txt')):
+ posts.append({
+ 'id': post['post_id'],
+ 'time': str(post['time']),
+ 'texts': [t.strip() for t in post['text'].split('\n') if t] if post['text'] else [],
+ 'images': post['images']
+ })
+ time.sleep(randint(10, 15))
+except RequestException:
+ pass
+
+print(json.dumps(posts))