1
0

anilist.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434
  1. import asyncio
  2. import datetime
  3. import time
  4. from enum import Enum
  5. from typing import Dict, List
  6. import requests
  7. import myanimebot.globals as globals
  8. import myanimebot.myanimebot as mab
  9. import myanimebot.utils as utils
  10. ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co'
  11. class MediaListStatus(Enum):
  12. CURRENT=0
  13. PLANNING=1
  14. COMPLETED=2
  15. DROPPED=3
  16. PAUSED=4
  17. REPEATING=5
  18. @staticmethod
  19. def from_str(label: str):
  20. if label.upper().startswith('READ') or \
  21. label.upper().startswith('WATCHED') :
  22. return MediaListStatus.CURRENT
  23. elif label.upper().startswith('PLANS'):
  24. return MediaListStatus.PLANNING
  25. elif label.upper().startswith('COMPLETED'):
  26. return MediaListStatus.COMPLETED
  27. elif label.upper().startswith('DROPPED'):
  28. return MediaListStatus.DROPPED
  29. elif label.upper().startswith('PAUSED'):
  30. return MediaListStatus.PAUSED
  31. elif label.upper().startswith('REREAD') or \
  32. label.upper().startswith('REWATCHED'):
  33. return MediaListStatus.REPEATING
  34. else:
  35. raise NotImplementedError('Error: Cannot convert "{}" to a MediaListStatus'.format(label))
  36. def get_media_name(activity):
  37. ''' Returns the media name in english if possible '''
  38. english_name = activity["media"]["title"]["english"]
  39. if english_name is not None:
  40. return english_name
  41. romaji_name = activity["media"]["title"]["romaji"]
  42. if romaji_name is not None:
  43. return romaji_name
  44. native_name = activity["media"]["title"]["native"]
  45. if native_name is not None:
  46. return native_name
  47. return ''
  48. def get_progress(activity):
  49. progress = activity["progress"]
  50. if progress is None:
  51. return '?'
  52. return progress
  53. def build_status_string(activity):
  54. status_str = activity["status"].capitalize()
  55. status = MediaListStatus.from_str(status_str)
  56. progress = get_progress(activity)
  57. episodes = ''
  58. media_label = ''
  59. media_type = utils.MediaType.from_str(activity["type"])
  60. # TODO Manage Completed/Dropped/Planned episodes/chapters count
  61. if status == MediaListStatus.CURRENT \
  62. or status == MediaListStatus.REPEATING:
  63. if media_type == utils.MediaType.ANIME:
  64. episodes = activity["media"]["episodes"]
  65. if episodes is None:
  66. episodes = '?'
  67. media_label = 'episodes'
  68. elif media_type == utils.MediaType.MANGA:
  69. episodes = activity["media"]["chapters"]
  70. if episodes is None:
  71. episodes = '?'
  72. media_label = 'chapters'
  73. return '{} | {} of {} {}'.format(status_str, progress, episodes, media_label)
  74. else:
  75. return '{}'.format(status_str)
  76. def build_feed_from_activity(activity, user : utils.User):
  77. if activity is None: return None
  78. media = utils.Media(name=get_media_name(activity),
  79. url=activity["media"]["siteUrl"],
  80. episodes=utils.Media.get_number_episodes(activity),
  81. image=activity["media"]["coverImage"]["large"],
  82. type=utils.MediaType.from_str(activity["media"]["type"]))
  83. feed = utils.Feed(service=utils.Service.ANILIST,
  84. date_publication=datetime.datetime.fromtimestamp(activity["createdAt"], globals.timezone),
  85. user=user,
  86. status=build_status_string(activity),
  87. description=activity["status"],
  88. media=media)
  89. return feed
  90. def get_anilist_userId_from_name(user_name : str) -> int:
  91. """ Searches an AniList user by its name and returns its ID """
  92. query = '''query($userName: String){
  93. User(name: $userName) {
  94. id
  95. }
  96. }'''
  97. variables = {
  98. 'userName': user_name
  99. }
  100. try:
  101. response = requests.post(ANILIST_GRAPHQL_URL, json={'query': query, 'variables': variables})
  102. response.raise_for_status()
  103. return response.json()["data"]["User"]["id"]
  104. except requests.HTTPError as e:
  105. #TODO Correct error response
  106. print('ERROR WRONG RESPONSE CODE')
  107. except Exception as e:
  108. #TODO Correct error response
  109. print('UNKNOWN Error when trying to get user id :')
  110. print(e)
  111. return None
  112. def get_latest_users_activities(users : List[utils.User], page: int, perPage = 5) -> List[utils.Feed]:
  113. """ Get latest users' activities """
  114. query = '''query ($userIds: [Int], $page: Int, $perPage: Int) {
  115. Page (page: $page, perPage: $perPage) {
  116. activities (userId_in: $userIds, sort: ID_DESC) {
  117. __typename
  118. ... on ListActivity {
  119. id
  120. type
  121. status
  122. progress
  123. isLocked
  124. createdAt
  125. user {
  126. id
  127. name
  128. }
  129. media {
  130. id
  131. siteUrl
  132. episodes
  133. chapters
  134. type
  135. title {
  136. romaji
  137. english
  138. native
  139. }
  140. coverImage {
  141. large
  142. }
  143. }
  144. }
  145. }
  146. }
  147. }'''
  148. variables = {
  149. "userIds": [user.service_id for user in users],
  150. "perPage": perPage,
  151. "page": page
  152. }
  153. try:
  154. # Execute GraphQL query
  155. response = requests.post(ANILIST_GRAPHQL_URL, json={'query': query, 'variables': variables})
  156. response.raise_for_status()
  157. data = response.json()["data"]["Page"]["activities"]
  158. # Create feeds from data
  159. feeds = []
  160. for activity in data:
  161. # Check if activity is a ListActivity
  162. if activity["__typename"] != 'ListActivity':
  163. continue
  164. # Find corresponding user for this ListActivity
  165. user = next((user for user in users if user.name == activity["user"]["name"]), None)
  166. if user is None:
  167. raise RuntimeError('Cannot find {} in our registered users'.format(activity["user"]["name"]))
  168. # Add new builded feed
  169. feeds.append(build_feed_from_activity(activity, user))
  170. return feeds
  171. except requests.HTTPError as e:
  172. #TODO Correct error response
  173. print('ERROR WRONG RESPONSE CODE')
  174. except Exception as e:
  175. #TODO Correct error response
  176. print('UNKNOWN Error when trying to get the users\' activities :')
  177. print(e)
  178. return []
  179. def check_username_validity(username) -> bool:
  180. """ Check if the AniList username exists """
  181. query = '''query($name: String) {
  182. User(name: $name) {
  183. name
  184. }
  185. }'''
  186. variables = {
  187. 'name': username
  188. }
  189. try:
  190. response = requests.post(ANILIST_GRAPHQL_URL, json={'query': query, 'variables': variables})
  191. response.raise_for_status()
  192. return response.json()["data"]["User"]["name"] == username
  193. except requests.HTTPError as e:
  194. return False
  195. except Exception as e:
  196. #TODO Correct error response
  197. print('UNKNOWN Error when trying to get mal id : {}'.format(e))
  198. return False
  199. def get_latest_activity(users : List[utils.User]):
  200. """ Get the latest users' activity """
  201. # TODO Will fail if last activity is not a ListActivity
  202. query = '''query ($userIds: [Int]) {
  203. Activity(userId_in: $userIds, sort: ID_DESC) {
  204. __typename
  205. ... on ListActivity {
  206. id
  207. userId
  208. createdAt
  209. }
  210. }
  211. }'''
  212. variables = {
  213. "userIds": [user.service_id for user in users]
  214. }
  215. try:
  216. response = requests.post(ANILIST_GRAPHQL_URL, json={'query': query, 'variables': variables})
  217. response.raise_for_status()
  218. return response.json()["data"]["Activity"]
  219. except requests.HTTPError as e:
  220. #TODO Correct error response
  221. print('ERROR WRONG RESPONSE CODE')
  222. except Exception as e:
  223. #TODO Correct error response
  224. print('UNKNOWN Error when trying to get the latest activity :')
  225. print(e)
  226. return None
  227. def get_users_db():
  228. ''' Returns the registered users using AniList '''
  229. # TODO Make generic execute
  230. cursor = globals.conn.cursor(buffered=True, dictionary=True)
  231. cursor.execute("SELECT id, {}, servers FROM t_users WHERE service = %s".format(globals.DB_USER_NAME), [globals.SERVICE_ANILIST])
  232. users_data = cursor.fetchall()
  233. cursor.close()
  234. return users_data
  235. def get_users() -> List[utils.User]:
  236. users = []
  237. users_data = get_users_db()
  238. if users_data is not None:
  239. for user_data in users_data:
  240. users.append(utils.User(id=user_data["id"],
  241. service_id=get_anilist_userId_from_name(user_data[globals.DB_USER_NAME]),
  242. name=user_data[globals.DB_USER_NAME],
  243. servers=user_data["servers"].split(',')))
  244. return users
  245. def get_users_id(users_data) -> List[int]:
  246. ''' Returns the id of the registered users using AniList '''
  247. users_ids = []
  248. # Get users using AniList
  249. if users_data is not None:
  250. print("Users found: {}".format(users_data))
  251. for user_data in users_data:
  252. users_ids.append(get_anilist_userId_from_name(user_data[globals.DB_USER_NAME]))
  253. # TODO Normalement pas besoin de recuperer les ids vu que je peux faire la recherche avec les noms
  254. return users_ids
  255. async def send_embed_to_channels(activity : utils.Feed):
  256. # TODO Doc
  257. for server in activity.user.servers:
  258. data_channels = utils.get_channels(server)
  259. if data_channels is not None:
  260. for channel in data_channels:
  261. await mab.send_embed_wrapper(None,
  262. channel["channel"],
  263. globals.client,
  264. mab.build_embed(activity.user.name,
  265. activity.media.name,
  266. activity.media.url,
  267. activity.status,
  268. activity.date_publication,
  269. activity.media.image,
  270. activity.service))
  271. def insert_feed_db(activity: utils.Feed):
  272. cursor = globals.conn.cursor(buffered=True)
  273. cursor.execute("INSERT INTO t_feeds (published, title, url, user, found, type, service) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, NOW(), %s, %s)",
  274. (activity.date_publication.timestamp(),
  275. activity.media.name,
  276. activity.media.url,
  277. activity.user.name,
  278. activity.description, # TODO Create enum to make it generic
  279. globals.SERVICE_ANILIST))
  280. globals.conn.commit()
  281. async def process_new_activities(last_activity_date, users : List[utils.User]):
  282. """ Fetch and process all newest activities """
  283. continue_fetching = True
  284. page_number = 1
  285. while continue_fetching:
  286. # Get activities
  287. activities = get_latest_users_activities(users, page_number)
  288. # Processing them
  289. for activity in activities:
  290. print(activity) # TODO Remove, DEBUG
  291. # Get time difference between now and activity creation date
  292. diffTime = datetime.datetime.now(globals.timezone) - activity.date_publication
  293. print("Time difference between feed and now = {}".format(diffTime))
  294. # If the activity is older than the last_activity_date, we processed all the newest activities
  295. # Also, if the time difference is bigger than the config's "secondMax", we can stop processing them
  296. if activity.date_publication.timestamp() <= last_activity_date \
  297. or diffTime.total_seconds() > globals.secondMax:
  298. # FIXME If two or more feeds are published at the same time, this would skip them
  299. continue_fetching = False
  300. break
  301. # Process activity
  302. # TODO Add logger infos
  303. insert_feed_db(activity)
  304. # TODO Create embed and send to channels
  305. await send_embed_to_channels(activity)
  306. # Load next activities page
  307. # TODO How can I avoid duplicate if insertion in between? With storing ids?
  308. if continue_fetching:
  309. print('Fetching next page') # TODO Remove, Debug
  310. page_number += 1
  311. time.sleep(1)
  312. def get_last_activity_date_db() -> float:
  313. # Refresh database
  314. globals.conn.commit()
  315. # Get last activity date
  316. cursor = globals.conn.cursor(buffered=True)
  317. cursor.execute("SELECT published FROM t_feeds WHERE service=%s ORDER BY published DESC LIMIT 1", [globals.SERVICE_ANILIST])
  318. data = cursor.fetchone()
  319. if data is None or len(data) == 0:
  320. return 0.0
  321. else:
  322. return data[0].timestamp()
  323. async def check_new_activities():
  324. """ Check if there is new activities and process them """
  325. last_activity_date = get_last_activity_date_db()
  326. # Get latest activity on AniList
  327. users = get_users()
  328. latest_activity = get_latest_activity(users)
  329. if latest_activity is not None:
  330. # If the latest activity is more recent than the last we stored
  331. print('Last registered = {} | {} = latest feed'.format(last_activity_date, latest_activity["createdAt"]))
  332. if last_activity_date < latest_activity["createdAt"]:
  333. globals.logger.debug("Found a more recent AniList feed")
  334. await process_new_activities(last_activity_date, users)
  335. async def background_check_feed(asyncioloop):
  336. ''' Main function that check the AniList feeds '''
  337. globals.logger.info("Starting up Anilist.background_check_feed")
  338. await globals.client.wait_until_ready()
  339. globals.logger.debug("Discord client connected, unlocking Anilist.background_check_feed...")
  340. while not globals.client.is_closed():
  341. globals.logger.debug('Fetching Anilist feeds')
  342. try:
  343. await check_new_activities()
  344. except Exception as e:
  345. globals.logger.error('Error while fetching Anilist feeds : ({})'.format(e))
  346. await asyncio.sleep(globals.ANILIST_SECONDS_BETWEEN_FETCHES)
  347. # TODO Bien renvoyer vers AniList (Liens/Liste/Anime)
  348. # TODO Comment eviter doublons MAL/AniList -> Ne pas faire je pense
  349. # TODO Insert anime into DB
  350. # TODO Uniformiser labels status feed entre MAL et ANILIST