You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

94 lines
2.9 KiB

import json
import os
import time
from collections import defaultdict
from datetime import datetime
from pprint import pprint
import requests
from loguru import logger
from twi_api import get_list, login
from twi_parser import parse_timeline
LATEST_TWEET_ID_DICT = {}
LATEST_TWEET_TS_DICT = {}
def check_new_tweets(tweets, url):
global LATEST_TWEET_ID_DICT
new_tweets = []
if url in LATEST_TWEET_ID_DICT:
for tweet in tweets:
if tweet["rest_id"] == LATEST_TWEET_ID_DICT[url]:
break
if tweet["timestamp"] < LATEST_TWEET_TS_DICT[url]:
break
if time.time() - tweet["timestamp"] > 1200:
break
new_tweets.append(tweet)
LATEST_TWEET_ID_DICT[url] = tweets[0]["rest_id"]
LATEST_TWEET_TS_DICT[url] = tweets[0]["timestamp"]
return new_tweets
def filter_tweets(tweets, filter_list):
if "only_image" in filter_list:
tweets = [t for t in tweets if t["media"]]
if "only_origin" in filter_list:
tweets = [t for t in tweets if (not t["quoted"]) and (not t["retweeted"])]
return tweets
def check_timeline(config):
list_id = int(config["url"].split("/")[-1])
data = get_list(list_id)
if data:
tweets = parse_timeline(data)
new_tweets = check_new_tweets(tweets, config["url"])
return filter_tweets(new_tweets, config["filter"])
else:
return []
if __name__ == "__main__":
if not os.path.exists("headers.json"):
login()
with open("config.json", 'r') as f:
config = json.load(f)
check_list = config.get("check_list", [])
check_interval = config.get("check_interval", 42)
check_interval_slow = config.get("check_interval_slow", 600)
slow_hours = config.get("slow_hours", [0, 1, 2, 3, 4, 5, 6])
last_check_time = defaultdict(float)
while 1:
json_data = {}
for group_id, group_config in check_list.items():
group_interval = group_config.get("interval", check_interval)
if time.time() - last_check_time[group_id] > group_interval:
new_tweets = check_timeline(group_config)
if new_tweets:
json_data[group_id] = new_tweets
last_check_time[group_id] = time.time()
if json_data:
pprint(json_data)
try:
resp = requests.post(config["callback_url"],
json=json_data, timeout=10)
logger.info(resp.content)
except Exception as e:
logger.error(str(e))
if datetime.now().hour in slow_hours:
time.sleep(check_interval_slow)
else:
time.sleep(check_interval)
# with open("lovelive.json", 'r', encoding="utf8") as f: pprint(parse_timeline(json.load(f)))