import json import logging import sys import time import warnings from os import path from pytz_deprecation_shim import PytzUsageWarning from random import randint from facebook_scraper import get_posts, enable_logging from requests.exceptions import RequestException debug = len(sys.argv) > 2 and sys.argv[2] == 'debug' if debug: enable_logging(logging.DEBUG) warnings.filterwarnings( action='ignore', message=r'A low page limit' ) warnings.filterwarnings( action='ignore', category=PytzUsageWarning ) BASEDIR = path.dirname(__file__) posts = [] try: for post in get_posts(sys.argv[1], pages=2, cookies=path.join(BASEDIR, '../../config/facebook.com_cookies.txt')): posts.append({ 'id': post['post_id'], 'time': str(post['time']), 'texts': [t.strip() for t in post['text'].split('\n') if t] if post['text'] else [], 'images': post['images'] }) time.sleep(randint(10, 15)) except RequestException: pass print(json.dumps(posts))