Coverage for src/analyse.py: 0%

260 statements  

« prev     ^ index     » next       coverage.py v7.8.2, created at 2025-06-03 19:06 +0000

1import logging 

2 

3import pandas as pd 

4 

5from src.configuration import store, config 

6from src.static.static_values_enum import Edition, Element, CardType, Rarity, ManaCap, MatchType, Format 

7 

8 

9def get_image_url_markdown(card_name, level, edition): 

10 base_card_url = 'https://images.hive.blog/100x0/https://d36mxiodymuqjm.cloudfront.net/cards_by_level/' 

11 edition_name = Edition(edition).name 

12 markdown_prefix = "![" + str(card_name) + "]" 

13 card_name = str(card_name).replace(" ", "%20") 

14 card_url = str(base_card_url) + str(edition_name) + "/" + card_name + "_lv" + str(level) + ".png" 

15 return str(markdown_prefix) + "(" + str(card_url) + ")" 

16 

17 

18def get_art_url_markdown(card_name): 

19 base_card_url = 'https://d36mxiodymuqjm.cloudfront.net/card_art/' 

20 markdown_prefix = "![" + str(card_name) + "]" 

21 card_name = str(card_name).replace(" ", "%20") 

22 card_url = str(base_card_url) + card_name + ".png" 

23 return str(markdown_prefix) + "(" + str(card_url) + ")" 

24 

25 

26def get_art_url(card_name): 

27 base_card_url = 'https://d36mxiodymuqjm.cloudfront.net/card_art/' 

28 card_name = str(card_name).replace(" ", "%20") 

29 card_url = str(base_card_url) + card_name + ".png" 

30 return str(card_url) 

31 

32 

33def get_image_url(card_name, level, edition): 

34 base_card_url = 'https://d36mxiodymuqjm.cloudfront.net/cards_by_level/' 

35 edition_name = Edition(edition).name 

36 card_name = str(card_name).replace(" ", "%20") 

37 card_url = str(base_card_url) + str(edition_name) + "/" + card_name + "_lv" + str(level) + ".png" 

38 return str(card_url) 

39 

40 

41def get_losing_df(filter_account=None, filter_match_type=None, filter_type=None): 

42 temp_df = filter_battles(store.losing_big, filter_account, filter_match_type, filter_type) 

43 if not temp_df.empty: 

44 temp_df = temp_df.groupby(['card_detail_id', 'card_name', 'level', 'edition'], as_index=False) \ 

45 .agg(number_of_losses=pd.NamedAgg(column='xp', aggfunc='count')) 

46 temp_df['url_markdown'] = temp_df.apply(lambda row: get_image_url_markdown(row['card_name'], 

47 row['level'], 

48 row['edition']), axis=1) 

49 temp_df['url'] = temp_df.apply(lambda row: get_image_url(row['card_name'], 

50 row['level'], 

51 row['edition']), axis=1) 

52 

53 temp_df.sort_values('number_of_losses', ascending=False, inplace=True) 

54 

55 return temp_df 

56 

57 

58def get_losing_battles_count(filter_account=None, filter_match_type=None, filter_type=None): 

59 temp_df = filter_battles(store.losing_big, filter_account, filter_match_type, filter_type) 

60 if not temp_df.empty: 

61 return temp_df.battle_id.unique().size 

62 return 'NA' 

63 

64 

65def filter_battles(df, filter_account=None, filter_match_type=None, filter_type=None): 

66 temp_df = df.copy() 

67 # if ALL filter None :) 

68 if filter_account == 'ALL': 

69 filter_account = None 

70 if filter_type == 'ALL': 

71 filter_type = None 

72 if filter_match_type == 'ALL': 

73 filter_match_type = None 

74 

75 if not temp_df.empty: 

76 if filter_account: 

77 temp_df = temp_df.loc[(temp_df.account == filter_account)] 

78 

79 if filter_match_type: 

80 temp_df = temp_df.loc[(temp_df.match_type == filter_match_type)] 

81 

82 if filter_type: 

83 temp_df = temp_df.loc[(temp_df.card_type == filter_type)] 

84 else: 

85 logging.info('No battles found at all') 

86 return temp_df 

87 

88 

89def get_top_3_losing_account(account, filter_match_type): 

90 if store.losing_big.empty: 

91 return store.losing_big 

92 else: 

93 temp_df = filter_battles(store.losing_big, filter_account=account, filter_match_type=filter_match_type) 

94 temp_df = temp_df[['battle_id', 'opponent']] 

95 temp_df = temp_df.drop_duplicates(subset=['battle_id', 'opponent']) 

96 temp_df = temp_df.groupby(['opponent'], as_index=False).count() 

97 temp_df.sort_values('battle_id', ascending=False, inplace=True) 

98 return temp_df.head(3) 

99 

100 

101def process_battles_win_percentage(df, group_levels=False): 

102 if df.empty: 

103 return df 

104 

105 group_by_columns = ['card_detail_id', 

106 'card_name', 

107 'card_type', 

108 'rarity', 

109 'edition', 

110 'color', 

111 'secondary_color', 

112 'result'] 

113 merge_columns = ['card_detail_id', 

114 'card_name', 

115 'card_type', 

116 'rarity', 

117 'edition', 

118 'color', 

119 'secondary_color'] 

120 

121 if not group_levels: 

122 group_by_columns.append('level') 

123 merge_columns.append('level') 

124 

125 total_df = pd.DataFrame() 

126 if not df.empty: 

127 grouped = df.groupby(group_by_columns, as_index=False, dropna=False) 

128 new_df = grouped.agg(count=pd.NamedAgg(column='account', aggfunc='count')) 

129 win = new_df.loc[(new_df.result == 'win')].rename(columns={"count": "win", }).drop(['result'], axis=1) 

130 loss = new_df.loc[(new_df.result == 'loss')].rename(columns={"count": "loss", }).drop(['result'], axis=1) 

131 total_df = win.merge(loss, on=merge_columns, how='outer') 

132 total_df = total_df.fillna(0) 

133 

134 if group_levels: 

135 total_df['level'] = total_df.apply(lambda row: df.loc[df.card_detail_id == row.card_detail_id].level.max(), 

136 axis=1) 

137 

138 total_df['win_to_loss_ratio'] = total_df.win / total_df.loss 

139 total_df['battles'] = total_df.win + total_df.loss 

140 total_df['win_ratio'] = total_df.win / total_df.battles 

141 total_df['win_percentage'] = total_df.win_ratio * 100 

142 total_df = total_df.round(2) 

143 

144 total_df['url_markdown'] = total_df.apply(lambda row: get_image_url_markdown(row['card_name'], 

145 row['level'], 

146 row['edition']), axis=1) 

147 total_df['url'] = total_df.apply(lambda row: get_image_url(row['card_name'], 

148 row['level'], 

149 row['edition']), axis=1) 

150 

151 total_df.sort_values(['battles', 'win_percentage'], ascending=False, inplace=True) 

152 

153 return total_df 

154 

155 

156def filter_element(input_df, filter_settings): 

157 if input_df.empty: 

158 return input_df 

159 

160 list_of_colors = [] 

161 for element in Element: 

162 if element.name in filter_settings: 

163 active = filter_settings[element.name] 

164 if active: 

165 list_of_colors.append(element.value) 

166 if len(list_of_colors) == 0 or len(list_of_colors) == len(Element): 

167 return input_df 

168 else: 

169 return input_df.loc[(input_df.color.isin(list_of_colors) | input_df.secondary_color.isin(list_of_colors))] 

170 

171 

172def filter_edition(input_df, filter_settings): 

173 if input_df.empty: 

174 return input_df 

175 

176 list_of_edition_values = [] 

177 for edition in Edition: 

178 if edition.name in filter_settings: 

179 active = filter_settings[edition.name] 

180 if active: 

181 list_of_edition_values.append(edition.value) 

182 

183 # When no items are select or all items are selected do not filter 

184 if len(list_of_edition_values) == 0 or len(list_of_edition_values) == len(Edition): 

185 return input_df 

186 else: 

187 return input_df.loc[input_df.edition.isin(list_of_edition_values)] 

188 

189 

190def filter_match_type(input_df, filter_settings): 

191 if input_df.empty: 

192 return input_df 

193 

194 list_of_match_type_values = [] 

195 for match_type in MatchType: 

196 if match_type.name in filter_settings: 

197 active = filter_settings[match_type.name] 

198 if active: 

199 list_of_match_type_values.append(match_type.value) 

200 

201 # When no items are select or all items are selected do not filter 

202 if len(list_of_match_type_values) == 0 or len(list_of_match_type_values) == len(MatchType): 

203 return input_df 

204 else: 

205 return input_df.loc[input_df.match_type.isin(list_of_match_type_values)] 

206 

207 

208def filter_card_type(input_df, filter_settings): 

209 if input_df.empty: 

210 return input_df 

211 

212 values = [] 

213 for card_type in CardType: 

214 if card_type.name in filter_settings: 

215 active = filter_settings[card_type.name] 

216 if active: 

217 values.append(card_type.value) 

218 if len(values) == 0 or len(values) == len(CardType): 

219 return input_df 

220 else: 

221 return input_df.loc[input_df.card_type.isin(values)] 

222 

223 

224def filter_rarity(input_df, filter_settings): 

225 if input_df.empty: 

226 return input_df 

227 

228 values = [] 

229 for rarity in Rarity: 

230 if rarity.name in filter_settings: 

231 active = filter_settings[rarity.name] 

232 if active: 

233 values.append(rarity.value) 

234 if len(values) == 0 or len(values) == len(Rarity): 

235 return input_df 

236 else: 

237 return input_df.loc[input_df.rarity.isin(values)] 

238 

239 

240def filter_battle_count(input_df, filter_settings): 

241 if not input_df.empty and 'minimal-battles' in filter_settings: 

242 return input_df.loc[(input_df.battles >= filter_settings['minimal-battles'])] 

243 

244 return input_df 

245 

246 

247def filter_mana_cap(input_df, filter_settings): 

248 if input_df.empty: 

249 return input_df 

250 

251 total_df = pd.DataFrame() 

252 filtered = False 

253 for mana_cap in ManaCap: 

254 if mana_cap.name in filter_settings: 

255 active = filter_settings[mana_cap.name] 

256 if active: 

257 filtered = True 

258 min_value = int(mana_cap.value.split('-')[0]) 

259 max_value = int(mana_cap.value.split('-')[1]) 

260 

261 temp_df = input_df.loc[(input_df.mana_cap >= min_value) & (input_df.mana_cap <= max_value)] 

262 total_df = pd.concat([total_df, temp_df]) 

263 

264 if filtered: 

265 return total_df 

266 else: 

267 return input_df 

268 

269 

270def filter_format(input_df, filter_settings): 

271 if input_df.empty: 

272 return input_df 

273 

274 total_df = pd.DataFrame() 

275 filtered = False 

276 for battle_format in Format: 

277 if battle_format.value in filter_settings: 

278 active = filter_settings[battle_format.value] 

279 if active: 

280 filtered = True 

281 temp_df = input_df.loc[(input_df.format == battle_format.value)] 

282 total_df = pd.concat([total_df, temp_df]) 

283 

284 if filtered: 

285 return total_df 

286 else: 

287 return input_df 

288 

289 

290def filter_date(input_df, filter_settings): 

291 if input_df.empty: 

292 return input_df 

293 if 'from_date' in filter_settings: 

294 from_date = filter_settings['from_date'] 

295 input_df.created_date = pd.to_datetime(input_df.created_date) 

296 input_df = input_df.loc[input_df.created_date > pd.to_datetime(from_date)] 

297 return input_df 

298 

299 

300def filter_rule_sets(input_df, filter_settings): 

301 if not input_df.empty and 'rule_sets' in filter_settings: 

302 rule_sets = filter_settings['rule_sets'] 

303 if len(rule_sets) != 0: 

304 return input_df.loc[(input_df.ruleset1.isin(rule_sets) | 

305 input_df.ruleset2.isin(rule_sets) | 

306 input_df.ruleset3.isin(rule_sets))] 

307 return input_df 

308 

309 

310def sort_by(input_df, filter_settings): 

311 if not input_df.empty and 'sort_by' in filter_settings: 

312 columns = [] 

313 for sort in filter_settings['sort_by']: 

314 if sort == 'percentage': 

315 columns.append('win_percentage') 

316 else: 

317 columns.append(sort) 

318 return input_df.sort_values(columns, ascending=False) 

319 return input_df 

320 

321 

322def get_daily_battle_stats(daily_df): 

323 result_df = pd.DataFrame() 

324 if not daily_df.empty: 

325 # Select Ranked battle only 

326 daily_df = daily_df.loc[(daily_df.match_type == MatchType.RANKED.value)] 

327 

328 # Select Ranked battles and make dates on day 

329 daily_df.loc[:, 'created_date'] = pd.to_datetime(daily_df.loc[:, 'created_date']).dt.date 

330 

331 # First group on battle_id 

332 daily_df = daily_df.groupby(['battle_id'], as_index=False).agg({'result': 'first', 

333 'created_date': 'first', 

334 'format': 'first'}) 

335 # second group on day 

336 win_df = daily_df.loc[daily_df.result == 'win'].groupby( 

337 ['created_date', 'result', 'format'], as_index=False).agg({'result': 'count'}) 

338 loss_df = daily_df.loc[daily_df.result == 'loss'].groupby( 

339 ['created_date', 'result', 'format'], as_index=False).agg({'result': 'count'}) 

340 result_df = pd.merge(left=win_df, right=loss_df, on=['created_date', 'format'], how='outer') 

341 result_df.fillna(0, inplace=True) 

342 result_df.rename(columns={"result_x": "win", "result_y": "loss"}, inplace=True) 

343 result_df['battles'] = result_df.win + result_df.loss 

344 return result_df 

345 

346 

347def get_battles_with_used_card(df, card_name): 

348 result_df = pd.DataFrame() 

349 if not df.empty: 

350 battle_ids = df.loc[(df.card_name == card_name)].battle_id.tolist() 

351 result_df = df.loc[df.battle_id.isin(battle_ids)] 

352 

353 return result_df 

354 

355 

356def get_losing_battles(df, battle_ids): 

357 result_df = pd.DataFrame() 

358 if not df.empty: 

359 result_df = df.loc[df.battle_id.isin(battle_ids)] 

360 

361 return result_df 

362 

363 

364def has_ability(cards, name, level, abilities): 

365 row = cards.loc[(cards.name == name)] 

366 # Check if the row exists 

367 if not row.empty: 

368 # Extract the stats dictionary 

369 stats = row['stats'].iloc[0] 

370 

371 if row.iloc[0].type == CardType.summoner.value: 

372 # Check if 'abilities' key exists in the stats dictionary 

373 if 'abilities' in stats: 

374 for ability in abilities: 

375 if ability in stats['abilities']: 

376 return True 

377 

378 else: 

379 # Check if 'abilities' key exists in the stats dictionary 

380 if 'abilities' in stats: 

381 for i in range(level): 

382 for ability in abilities: 

383 if ability in stats['abilities'][i]: 

384 return True 

385 return False 

386 

387 

388def filter_rows(row, cards, abilities): 

389 return has_ability(cards, row['card_name'], row['level'], abilities) 

390 

391 

392def filter_abilities(df, filter_settings): 

393 cards = config.card_details_df 

394 if not df.empty: 

395 if 'abilities' in filter_settings and filter_settings['abilities']: 

396 df = df[df.apply(filter_rows, axis=1, args=(cards, filter_settings['abilities'],))] 

397 return df