1
0

anilist.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433
  1. import asyncio
  2. import datetime
  3. import time
  4. from enum import Enum
  5. from typing import Dict, List
  6. import requests
  7. import myanimebot.globals as globals
  8. import myanimebot.utils as utils
  9. ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co'
  10. class MediaListStatus(Enum):
  11. CURRENT=0
  12. PLANNING=1
  13. COMPLETED=2
  14. DROPPED=3
  15. PAUSED=4
  16. REPEATING=5
  17. @staticmethod
  18. def from_str(label: str):
  19. if label.upper().startswith('READ') or \
  20. label.upper().startswith('WATCHED') :
  21. return MediaListStatus.CURRENT
  22. elif label.upper().startswith('PLANS'):
  23. return MediaListStatus.PLANNING
  24. elif label.upper().startswith('COMPLETED'):
  25. return MediaListStatus.COMPLETED
  26. elif label.upper().startswith('DROPPED'):
  27. return MediaListStatus.DROPPED
  28. elif label.upper().startswith('PAUSED'):
  29. return MediaListStatus.PAUSED
  30. elif label.upper().startswith('REREAD') or \
  31. label.upper().startswith('REWATCHED'):
  32. return MediaListStatus.REPEATING
  33. else:
  34. raise NotImplementedError('Error: Cannot convert "{}" to a MediaListStatus'.format(label))
  35. def get_media_name(activity):
  36. ''' Returns the media name in english if possible '''
  37. english_name = activity["media"]["title"]["english"]
  38. if english_name is not None:
  39. return english_name
  40. romaji_name = activity["media"]["title"]["romaji"]
  41. if romaji_name is not None:
  42. return romaji_name
  43. native_name = activity["media"]["title"]["native"]
  44. if native_name is not None:
  45. return native_name
  46. return ''
  47. def get_progress(activity):
  48. progress = activity["progress"]
  49. if progress is None:
  50. return '?'
  51. return progress
  52. def build_status_string(activity):
  53. status_str = activity["status"].capitalize()
  54. status = MediaListStatus.from_str(status_str)
  55. progress = get_progress(activity)
  56. episodes = ''
  57. media_label = ''
  58. media_type = utils.MediaType.from_str(activity["type"])
  59. # TODO Manage Completed/Dropped/Planned episodes/chapters count
  60. if status == MediaListStatus.CURRENT \
  61. or status == MediaListStatus.REPEATING:
  62. if media_type == utils.MediaType.ANIME:
  63. episodes = activity["media"]["episodes"]
  64. if episodes is None:
  65. episodes = '?'
  66. media_label = 'episodes'
  67. elif media_type == utils.MediaType.MANGA:
  68. episodes = activity["media"]["chapters"]
  69. if episodes is None:
  70. episodes = '?'
  71. media_label = 'chapters'
  72. return '{} | {} of {} {}'.format(status_str, progress, episodes, media_label)
  73. else:
  74. return '{}'.format(status_str)
  75. def build_feed_from_activity(activity, user : utils.User):
  76. if activity is None: return None
  77. media = utils.Media(name=get_media_name(activity),
  78. url=activity["media"]["siteUrl"],
  79. episodes=utils.Media.get_number_episodes(activity),
  80. image=activity["media"]["coverImage"]["large"],
  81. type=utils.MediaType.from_str(activity["media"]["type"]))
  82. feed = utils.Feed(service=utils.Service.ANILIST,
  83. date_publication=datetime.datetime.fromtimestamp(activity["createdAt"], globals.timezone),
  84. user=user,
  85. status=build_status_string(activity),
  86. description=activity["status"],
  87. media=media)
  88. return feed
  89. def get_anilist_userId_from_name(user_name : str) -> int:
  90. """ Searches an AniList user by its name and returns its ID """
  91. query = '''query($userName: String){
  92. User(name: $userName) {
  93. id
  94. }
  95. }'''
  96. variables = {
  97. 'userName': user_name
  98. }
  99. try:
  100. response = requests.post(ANILIST_GRAPHQL_URL, json={'query': query, 'variables': variables})
  101. response.raise_for_status()
  102. return response.json()["data"]["User"]["id"]
  103. except requests.HTTPError as e:
  104. #TODO Correct error response
  105. print('ERROR WRONG RESPONSE CODE')
  106. except Exception as e:
  107. #TODO Correct error response
  108. print('UNKNOWN Error when trying to get user id :')
  109. print(e)
  110. return None
  111. def get_latest_users_activities(users : List[utils.User], page: int, perPage = 5) -> List[utils.Feed]:
  112. """ Get latest users' activities """
  113. query = '''query ($userIds: [Int], $page: Int, $perPage: Int) {
  114. Page (page: $page, perPage: $perPage) {
  115. activities (userId_in: $userIds, sort: ID_DESC) {
  116. __typename
  117. ... on ListActivity {
  118. id
  119. type
  120. status
  121. progress
  122. isLocked
  123. createdAt
  124. user {
  125. id
  126. name
  127. }
  128. media {
  129. id
  130. siteUrl
  131. episodes
  132. chapters
  133. type
  134. title {
  135. romaji
  136. english
  137. native
  138. }
  139. coverImage {
  140. large
  141. }
  142. }
  143. }
  144. }
  145. }
  146. }'''
  147. variables = {
  148. "userIds": [user.service_id for user in users],
  149. "perPage": perPage,
  150. "page": page
  151. }
  152. try:
  153. # Execute GraphQL query
  154. response = requests.post(ANILIST_GRAPHQL_URL, json={'query': query, 'variables': variables})
  155. response.raise_for_status()
  156. data = response.json()["data"]["Page"]["activities"]
  157. # Create feeds from data
  158. feeds = []
  159. for activity in data:
  160. # Check if activity is a ListActivity
  161. if activity["__typename"] != 'ListActivity':
  162. continue
  163. # Find corresponding user for this ListActivity
  164. user = next((user for user in users if user.name == activity["user"]["name"]), None)
  165. if user is None:
  166. raise RuntimeError('Cannot find {} in our registered users'.format(activity["user"]["name"]))
  167. # Add new builded feed
  168. feeds.append(build_feed_from_activity(activity, user))
  169. return feeds
  170. except requests.HTTPError as e:
  171. #TODO Correct error response
  172. print('ERROR WRONG RESPONSE CODE')
  173. except Exception as e:
  174. #TODO Correct error response
  175. print('UNKNOWN Error when trying to get the users\' activities :')
  176. print(e)
  177. return []
  178. def check_username_validity(username) -> bool:
  179. """ Check if the AniList username exists """
  180. query = '''query($name: String) {
  181. User(name: $name) {
  182. name
  183. }
  184. }'''
  185. variables = {
  186. 'name': username
  187. }
  188. try:
  189. response = requests.post(ANILIST_GRAPHQL_URL, json={'query': query, 'variables': variables})
  190. response.raise_for_status()
  191. return response.json()["data"]["User"]["name"] == username
  192. except requests.HTTPError as e:
  193. return False
  194. except Exception as e:
  195. #TODO Correct error response
  196. print('UNKNOWN Error when trying to get mal id : {}'.format(e))
  197. return False
  198. def get_latest_activity(users : List[utils.User]):
  199. """ Get the latest users' activity """
  200. # TODO Will fail if last activity is not a ListActivity
  201. query = '''query ($userIds: [Int]) {
  202. Activity(userId_in: $userIds, sort: ID_DESC) {
  203. __typename
  204. ... on ListActivity {
  205. id
  206. userId
  207. createdAt
  208. }
  209. }
  210. }'''
  211. variables = {
  212. "userIds": [user.service_id for user in users]
  213. }
  214. try:
  215. response = requests.post(ANILIST_GRAPHQL_URL, json={'query': query, 'variables': variables})
  216. response.raise_for_status()
  217. return response.json()["data"]["Activity"]
  218. except requests.HTTPError as e:
  219. #TODO Correct error response
  220. print('ERROR WRONG RESPONSE CODE')
  221. except Exception as e:
  222. #TODO Correct error response
  223. print('UNKNOWN Error when trying to get the latest activity :')
  224. print(e)
  225. return None
  226. def get_users_db():
  227. ''' Returns the registered users using AniList '''
  228. # TODO Make generic execute
  229. cursor = globals.conn.cursor(buffered=True, dictionary=True)
  230. cursor.execute("SELECT id, {}, servers FROM t_users WHERE service = %s".format(globals.DB_USER_NAME), [globals.SERVICE_ANILIST])
  231. users_data = cursor.fetchall()
  232. cursor.close()
  233. return users_data
  234. def get_users() -> List[utils.User]:
  235. users = []
  236. users_data = get_users_db()
  237. if users_data is not None:
  238. for user_data in users_data:
  239. users.append(utils.User(id=user_data["id"],
  240. service_id=get_anilist_userId_from_name(user_data[globals.DB_USER_NAME]),
  241. name=user_data[globals.DB_USER_NAME],
  242. servers=user_data["servers"].split(',')))
  243. return users
  244. def get_users_id(users_data) -> List[int]:
  245. ''' Returns the id of the registered users using AniList '''
  246. users_ids = []
  247. # Get users using AniList
  248. if users_data is not None:
  249. print("Users found: {}".format(users_data))
  250. for user_data in users_data:
  251. users_ids.append(get_anilist_userId_from_name(user_data[globals.DB_USER_NAME]))
  252. # TODO Normalement pas besoin de recuperer les ids vu que je peux faire la recherche avec les noms
  253. return users_ids
  254. async def send_embed_to_channels(activity : utils.Feed):
  255. # TODO Doc
  256. for server in activity.user.servers:
  257. data_channels = utils.get_channels(server)
  258. if data_channels is not None:
  259. for channel in data_channels:
  260. await utils.send_embed_wrapper(None,
  261. channel["channel"],
  262. globals.client,
  263. utils.build_embed(activity.user.name,
  264. activity.media.name,
  265. activity.media.url,
  266. activity.status,
  267. activity.date_publication,
  268. activity.media.image,
  269. activity.service))
  270. def insert_feed_db(activity: utils.Feed):
  271. cursor = globals.conn.cursor(buffered=True)
  272. cursor.execute("INSERT INTO t_feeds (published, title, url, user, found, type, service) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, NOW(), %s, %s)",
  273. (activity.date_publication.timestamp(),
  274. activity.media.name,
  275. activity.media.url,
  276. activity.user.name,
  277. activity.description, # TODO Create enum to make it generic
  278. globals.SERVICE_ANILIST))
  279. globals.conn.commit()
  280. async def process_new_activities(last_activity_date, users : List[utils.User]):
  281. """ Fetch and process all newest activities """
  282. continue_fetching = True
  283. page_number = 1
  284. while continue_fetching:
  285. # Get activities
  286. activities = get_latest_users_activities(users, page_number)
  287. # Processing them
  288. for activity in activities:
  289. print(activity) # TODO Remove, DEBUG
  290. # Get time difference between now and activity creation date
  291. diffTime = datetime.datetime.now(globals.timezone) - activity.date_publication
  292. print("Time difference between feed and now = {}".format(diffTime))
  293. # If the activity is older than the last_activity_date, we processed all the newest activities
  294. # Also, if the time difference is bigger than the config's "secondMax", we can stop processing them
  295. if activity.date_publication.timestamp() <= last_activity_date \
  296. or diffTime.total_seconds() > globals.secondMax:
  297. # FIXME If two or more feeds are published at the same time, this would skip them
  298. continue_fetching = False
  299. break
  300. # Process activity
  301. # TODO Add logger infos
  302. insert_feed_db(activity)
  303. # TODO Create embed and send to channels
  304. await send_embed_to_channels(activity)
  305. # Load next activities page
  306. # TODO How can I avoid duplicate if insertion in between? With storing ids?
  307. if continue_fetching:
  308. print('Fetching next page') # TODO Remove, Debug
  309. page_number += 1
  310. time.sleep(1)
  311. def get_last_activity_date_db() -> float:
  312. # Refresh database
  313. globals.conn.commit()
  314. # Get last activity date
  315. cursor = globals.conn.cursor(buffered=True)
  316. cursor.execute("SELECT published FROM t_feeds WHERE service=%s ORDER BY published DESC LIMIT 1", [globals.SERVICE_ANILIST])
  317. data = cursor.fetchone()
  318. if data is None or len(data) == 0:
  319. return 0.0
  320. else:
  321. return data[0].timestamp()
  322. async def check_new_activities():
  323. """ Check if there is new activities and process them """
  324. last_activity_date = get_last_activity_date_db()
  325. # Get latest activity on AniList
  326. users = get_users()
  327. latest_activity = get_latest_activity(users)
  328. if latest_activity is not None:
  329. # If the latest activity is more recent than the last we stored
  330. print('Last registered = {} | {} = latest feed'.format(last_activity_date, latest_activity["createdAt"]))
  331. if last_activity_date < latest_activity["createdAt"]:
  332. globals.logger.debug("Found a more recent AniList feed")
  333. await process_new_activities(last_activity_date, users)
  334. async def background_check_feed(asyncioloop):
  335. ''' Main function that check the AniList feeds '''
  336. globals.logger.info("Starting up Anilist.background_check_feed")
  337. await globals.client.wait_until_ready()
  338. globals.logger.debug("Discord client connected, unlocking Anilist.background_check_feed...")
  339. while not globals.client.is_closed():
  340. globals.logger.debug('Fetching Anilist feeds')
  341. try:
  342. await check_new_activities()
  343. except Exception as e:
  344. globals.logger.error('Error while fetching Anilist feeds : ({})'.format(e))
  345. await asyncio.sleep(globals.ANILIST_SECONDS_BETWEEN_FETCHES)
  346. # TODO Bien renvoyer vers AniList (Liens/Liste/Anime)
  347. # TODO Comment eviter doublons MAL/AniList -> Ne pas faire je pense
  348. # TODO Insert anime into DB
  349. # TODO Uniformiser labels status feed entre MAL et ANILIST