|
@@ -4,6 +4,8 @@ import datetime
|
|
|
from enum import Enum
|
|
from enum import Enum
|
|
|
|
|
|
|
|
import globals
|
|
import globals
|
|
|
|
|
+import myanimebot
|
|
|
|
|
+import utils
|
|
|
|
|
|
|
|
ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co'
|
|
ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co'
|
|
|
|
|
|
|
@@ -21,6 +23,16 @@ class MediaType(Enum):
|
|
|
ANIME="ANIME"
|
|
ANIME="ANIME"
|
|
|
MANGA="MANGA"
|
|
MANGA="MANGA"
|
|
|
|
|
|
|
|
|
|
+ @staticmethod
|
|
|
|
|
+ def from_str(label: str):
|
|
|
|
|
+ if label.upper() in ('ANIME', 'ANIME_LIST'):
|
|
|
|
|
+ return MediaType.ANIME
|
|
|
|
|
+ elif label.upper() in ('MANGA', 'MANGA_LIST'):
|
|
|
|
|
+ return MediaType.MANGA
|
|
|
|
|
+ else:
|
|
|
|
|
+ raise NotImplementedError('Error: Cannot convert "{}" to a MediaType'.format(label))
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
def get_mal_id_from_anilist_id(anilist_media_id, media_type: MediaType):
|
|
def get_mal_id_from_anilist_id(anilist_media_id, media_type: MediaType):
|
|
|
""" Converts an AniList media ID to a MyAnimeList ID and returns it """
|
|
""" Converts an AniList media ID to a MyAnimeList ID and returns it """
|
|
|
|
|
|
|
@@ -48,6 +60,27 @@ def get_mal_id_from_anilist_id(anilist_media_id, media_type: MediaType):
|
|
|
print(e)
|
|
print(e)
|
|
|
return None
|
|
return None
|
|
|
|
|
|
|
|
|
|
+def get_thumbnail_from_anilist_id(anilist_media_id, media_type: MediaType):
|
|
|
|
|
+ """ Returns the MAL thumbnail from an AniList media ID """
|
|
|
|
|
+
|
|
|
|
|
+ # TODO Catch exception or if is None
|
|
|
|
|
+ print("Trying to get MAL ID from AniList ID {}".format(anilist_media_id))
|
|
|
|
|
+ mal_id = get_mal_id_from_anilist_id(anilist_media_id, media_type)
|
|
|
|
|
+ print("Got MAL ID {} from AniList ID {}".format(mal_id, anilist_media_id))
|
|
|
|
|
+
|
|
|
|
|
+ # Building MyAnimeList URL
|
|
|
|
|
+ mal_url = globals.MAL_URL
|
|
|
|
|
+ if media_type == MediaType.ANIME:
|
|
|
|
|
+ mal_url += "anime/"
|
|
|
|
|
+ elif media_type == MediaType.MANGA:
|
|
|
|
|
+ mal_url += "manga/"
|
|
|
|
|
+ else:
|
|
|
|
|
+ raise Exception("Error when getting thumbnail from AniList ID {} : Unknown Mediatype {}".format(anilist_media_id, media_type))
|
|
|
|
|
+ mal_url += str(mal_id)
|
|
|
|
|
+
|
|
|
|
|
+ print("Getting thumbnail from URL '{}'".format(mal_url))
|
|
|
|
|
+ return utils.getThumbnail(mal_url)
|
|
|
|
|
+
|
|
|
|
|
|
|
|
def get_anilist_userId_from_name(user_name : str):
|
|
def get_anilist_userId_from_name(user_name : str):
|
|
|
""" Searches an AniList user by its name and returns its ID """
|
|
""" Searches an AniList user by its name and returns its ID """
|
|
@@ -162,6 +195,60 @@ def get_latest_activity(users_id):
|
|
|
return None
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
+async def send_embed_to_channels(activity):
|
|
|
|
|
+
|
|
|
|
|
+ # Fetch user's data
|
|
|
|
|
+ try:
|
|
|
|
|
+ db_user = globals.conn.cursor(buffered=True)
|
|
|
|
|
+ db_user.execute("SELECT mal_user, servers FROM t_users")
|
|
|
|
|
+ data_user = db_user.fetchone()
|
|
|
|
|
+ except Exception as e:
|
|
|
|
|
+ # TODO Catch exception
|
|
|
|
|
+ globals.logger.critical("Database unavailable! (" + str(e) + ")")
|
|
|
|
|
+ quit()
|
|
|
|
|
+
|
|
|
|
|
+ # TODO Fetch and insert AniList thumbnail
|
|
|
|
|
+ # Fetch image's data
|
|
|
|
|
+ # cursor.execute("SELECT thumbnail FROM t_animes WHERE guid=%s LIMIT 1", [item.guid])
|
|
|
|
|
+ # data_img = cursor.fetchone()
|
|
|
|
|
+
|
|
|
|
|
+ # if data_img is None:
|
|
|
|
|
+ try:
|
|
|
|
|
+ # TODO Directly send malId instead
|
|
|
|
|
+ image = get_thumbnail_from_anilist_id(activity["media"]["id"], MediaType.from_str(activity["type"]))
|
|
|
|
|
+
|
|
|
|
|
+ globals.logger.info("First time seeing this " + activity["media"]["title"]["english"] + ", adding thumbnail into database: " + image)
|
|
|
|
|
+ except Exception as e:
|
|
|
|
|
+ globals.logger.warning("Error while getting the thumbnail: " + str(e))
|
|
|
|
|
+ image = ""
|
|
|
|
|
+
|
|
|
|
|
+ # cursor.execute("INSERT INTO t_animes (guid, title, thumbnail, found, discoverer, media) VALUES (%s, %s, %s, NOW(), %s, %s)", [item.guid, item.title, image, user, media])
|
|
|
|
|
+ # globals.conn.commit()
|
|
|
|
|
+ # else: image = data_img[0]
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+ for server in data_user[1].split(","):
|
|
|
|
|
+ db_srv = globals.conn.cursor(buffered=True)
|
|
|
|
|
+ db_srv.execute("SELECT channel FROM t_servers WHERE server = %s", [server])
|
|
|
|
|
+ data_channel = db_srv.fetchone()
|
|
|
|
|
+
|
|
|
|
|
+ # FIXME 'Completed None'
|
|
|
|
|
+ while data_channel is not None:
|
|
|
|
|
+ for channel in data_channel:
|
|
|
|
|
+ await myanimebot.send_embed_wrapper(None,
|
|
|
|
|
+ channel,
|
|
|
|
|
+ globals.client,
|
|
|
|
|
+ myanimebot.build_embed(activity["user"]["name"],
|
|
|
|
|
+ activity["media"]["title"]["english"],
|
|
|
|
|
+ activity["media"]["siteUrl"],
|
|
|
|
|
+ "{} {}".format(activity["status"], activity["progress"]),
|
|
|
|
|
+ datetime.datetime.fromtimestamp(activity["createdAt"]),
|
|
|
|
|
+ image))
|
|
|
|
|
+
|
|
|
|
|
+ data_channel = db_srv.fetchone()
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
def insert_feed_db(activity):
|
|
def insert_feed_db(activity):
|
|
|
cursor = globals.conn.cursor(buffered=True)
|
|
cursor = globals.conn.cursor(buffered=True)
|
|
|
|
|
|
|
@@ -175,7 +262,7 @@ def insert_feed_db(activity):
|
|
|
globals.conn.commit()
|
|
globals.conn.commit()
|
|
|
|
|
|
|
|
|
|
|
|
|
-def process_new_activities(last_activity_date):
|
|
|
|
|
|
|
+async def process_new_activities(last_activity_date):
|
|
|
""" Fetch and process all newest activities """
|
|
""" Fetch and process all newest activities """
|
|
|
|
|
|
|
|
continue_fetching = True
|
|
continue_fetching = True
|
|
@@ -194,15 +281,19 @@ def process_new_activities(last_activity_date):
|
|
|
|
|
|
|
|
# Get time difference between now and activity creation date
|
|
# Get time difference between now and activity creation date
|
|
|
diffTime = datetime.datetime.now(globals.timezone) - datetime.datetime.fromtimestamp(activity["createdAt"], globals.timezone)
|
|
diffTime = datetime.datetime.now(globals.timezone) - datetime.datetime.fromtimestamp(activity["createdAt"], globals.timezone)
|
|
|
|
|
+
|
|
|
|
|
+ print("Time difference between feed and now = {}".format(diffTime))
|
|
|
# If the activity is older than the last_activity_date, we processed all the newest activities
|
|
# If the activity is older than the last_activity_date, we processed all the newest activities
|
|
|
# Also, if the time difference is bigger than the config's "secondMax", we can stop processing them
|
|
# Also, if the time difference is bigger than the config's "secondMax", we can stop processing them
|
|
|
- if activity["createdAt"] < last_activity_date or diffTime.total_seconds() > globals.secondMax:
|
|
|
|
|
|
|
+ if activity["createdAt"] <= last_activity_date or diffTime.total_seconds() > globals.secondMax:
|
|
|
|
|
+ # FIXME If two or more feeds are published at the same time, this would skip them
|
|
|
continue_fetching = False
|
|
continue_fetching = False
|
|
|
break
|
|
break
|
|
|
# Process activity
|
|
# Process activity
|
|
|
# TODO Add logger infos
|
|
# TODO Add logger infos
|
|
|
insert_feed_db(activity)
|
|
insert_feed_db(activity)
|
|
|
# TODO Create embed and send to channels
|
|
# TODO Create embed and send to channels
|
|
|
|
|
+ await send_embed_to_channels(activity)
|
|
|
|
|
|
|
|
# Load next activities page
|
|
# Load next activities page
|
|
|
# TODO How can I avoid duplicate if insertion in between? With storing ids?
|
|
# TODO How can I avoid duplicate if insertion in between? With storing ids?
|
|
@@ -213,18 +304,22 @@ def process_new_activities(last_activity_date):
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_last_activity_date_db():
|
|
def get_last_activity_date_db():
|
|
|
|
|
+ # Refresh database
|
|
|
|
|
+ globals.conn.commit()
|
|
|
|
|
+
|
|
|
|
|
+ # Get last activity date
|
|
|
cursor = globals.conn.cursor(buffered=True)
|
|
cursor = globals.conn.cursor(buffered=True)
|
|
|
cursor.execute("SELECT published FROM t_feeds WHERE service=%s ORDER BY published DESC LIMIT 1", [globals.SERVICE_ANILIST])
|
|
cursor.execute("SELECT published FROM t_feeds WHERE service=%s ORDER BY published DESC LIMIT 1", [globals.SERVICE_ANILIST])
|
|
|
data = cursor.fetchone()
|
|
data = cursor.fetchone()
|
|
|
|
|
|
|
|
- print(data)
|
|
|
|
|
- if data is None:
|
|
|
|
|
|
|
+ print("Getting last activity date : {}".format(data))
|
|
|
|
|
+ if data is None or len(data) == 0:
|
|
|
return 0
|
|
return 0
|
|
|
else:
|
|
else:
|
|
|
- return int(data)
|
|
|
|
|
|
|
+ return data[0].timestamp()
|
|
|
|
|
|
|
|
|
|
|
|
|
-def check_new_activities():
|
|
|
|
|
|
|
+async def check_new_activities():
|
|
|
""" Check if there is new activities and process them """
|
|
""" Check if there is new activities and process them """
|
|
|
|
|
|
|
|
# last_activity_date = 1608340203 # TODO SELECT DATE IN DB
|
|
# last_activity_date = 1608340203 # TODO SELECT DATE IN DB
|
|
@@ -238,13 +333,18 @@ def check_new_activities():
|
|
|
# If the latest activity is more recent than the last we stored
|
|
# If the latest activity is more recent than the last we stored
|
|
|
if last_activity_date < latest_activity["createdAt"]:
|
|
if last_activity_date < latest_activity["createdAt"]:
|
|
|
print("Latest activity is more recent")
|
|
print("Latest activity is more recent")
|
|
|
- process_new_activities(last_activity_date)
|
|
|
|
|
|
|
+ await process_new_activities(last_activity_date)
|
|
|
|
|
|
|
|
|
|
|
|
|
# [x] Convertir AniList ID en MAL ID
|
|
# [x] Convertir AniList ID en MAL ID
|
|
|
# [ ] Recuperer utilisateurs qui nous interessent
|
|
# [ ] Recuperer utilisateurs qui nous interessent
|
|
|
# [X] Recuperer activites de ces users
|
|
# [X] Recuperer activites de ces users
|
|
|
# [X] Traiter les donnees et les mettre en DB
|
|
# [X] Traiter les donnees et les mettre en DB
|
|
|
-# [ ] Creer embed et envoyer messages
|
|
|
|
|
|
|
+# [X] Creer embed et envoyer messages
|
|
|
# [ ] Faire task pour fetch automatiquement
|
|
# [ ] Faire task pour fetch automatiquement
|
|
|
-# [ ] Rajouter requests dans la liste de dependances pip (Site de Penta)
|
|
|
|
|
|
|
+# [ ] Rajouter requests dans la liste de dependances pip (Site de Penta)
|
|
|
|
|
+
|
|
|
|
|
+# TODO Changer titre (Pour l'instant c'est MAL de XXX)
|
|
|
|
|
+# TODO Bien renvoyer vers AniList (Liens/Liste/Anime)
|
|
|
|
|
+# TODO Recuperer image d'AniList
|
|
|
|
|
+# TODO Comment eviter doublons MAL/AniList
|