Coverage for src / analyse.py: 0%
260 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-01 10:28 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-01 10:28 +0000
1import logging
3import pandas as pd
5from src.configuration import store, config
6from src.static.static_values_enum import Element, CardType, Rarity, ManaCap, MatchType, Format, \
7 edition_img_mapping, Edition
10def get_image_url_markdown(card_name, level, edition):
11 base_card_url = 'https://images.hive.blog/100x0/https://d36mxiodymuqjm.cloudfront.net/cards_by_level/'
12 edition_name = edition_img_mapping.get(edition)
14 markdown_prefix = "![" + str(card_name) + "]"
15 card_name = str(card_name).replace(" ", "%20")
16 card_url = str(base_card_url) + str(edition_name) + "/" + card_name + "_lv" + str(level) + ".png"
17 return str(markdown_prefix) + "(" + str(card_url) + ")"
20def get_art_url_markdown(card_name):
21 base_card_url = 'https://d36mxiodymuqjm.cloudfront.net/card_art/'
22 markdown_prefix = "![" + str(card_name) + "]"
23 card_name = str(card_name).replace(" ", "%20")
24 card_url = str(base_card_url) + card_name + ".png"
25 return str(markdown_prefix) + "(" + str(card_url) + ")"
28def get_art_url(card_name):
29 base_card_url = 'https://d36mxiodymuqjm.cloudfront.net/card_art/'
30 card_name = str(card_name).replace(" ", "%20")
31 card_url = str(base_card_url) + card_name + ".png"
32 return str(card_url)
35def get_image_url(card_name, level, edition):
36 base_card_url = 'https://d36mxiodymuqjm.cloudfront.net/cards_by_level/'
37 # edition_name = Edition(edition).name
38 edition_name = edition_img_mapping.get(edition)
40 card_name = str(card_name).replace(" ", "%20")
41 card_url = str(base_card_url) + str(edition_name) + "/" + card_name + "_lv" + str(level) + ".png"
42 return str(card_url)
45def get_losing_df(filter_account=None, filter_match_type=None, filter_type=None):
46 temp_df = filter_battles(store.losing_big, filter_account, filter_match_type, filter_type)
47 if not temp_df.empty:
48 temp_df = temp_df.groupby(['card_detail_id', 'card_name', 'level', 'edition'], as_index=False) \
49 .agg(number_of_losses=pd.NamedAgg(column='xp', aggfunc='count'))
50 temp_df['url_markdown'] = temp_df.apply(lambda row: get_image_url_markdown(row['card_name'],
51 row['level'],
52 row['edition']), axis=1)
53 temp_df['url'] = temp_df.apply(lambda row: get_image_url(row['card_name'],
54 row['level'],
55 row['edition']), axis=1)
57 temp_df.sort_values('number_of_losses', ascending=False, inplace=True)
59 return temp_df
62def get_losing_battles_count(filter_account=None, filter_match_type=None, filter_type=None):
63 temp_df = filter_battles(store.losing_big, filter_account, filter_match_type, filter_type)
64 if not temp_df.empty:
65 return temp_df.battle_id.unique().size
66 return 'NA'
69def filter_battles(df, filter_account=None, filter_match_type=None, filter_type=None):
70 temp_df = df.copy()
71 # if ALL filter None :)
72 if filter_account == 'ALL':
73 filter_account = None
74 if filter_type == 'ALL':
75 filter_type = None
76 if filter_match_type == 'ALL':
77 filter_match_type = None
79 if not temp_df.empty:
80 if filter_account:
81 temp_df = temp_df.loc[(temp_df.account == filter_account)]
83 if filter_match_type:
84 temp_df = temp_df.loc[(temp_df.match_type == filter_match_type)]
86 if filter_type:
87 temp_df = temp_df.loc[(temp_df.card_type == filter_type)]
88 else:
89 logging.info('No battles found at all')
90 return temp_df
93def get_top_3_losing_account(account, filter_match_type):
94 if store.losing_big.empty:
95 return store.losing_big
96 else:
97 temp_df = filter_battles(store.losing_big, filter_account=account, filter_match_type=filter_match_type)
98 temp_df = temp_df[['battle_id', 'opponent']]
99 temp_df = temp_df.drop_duplicates(subset=['battle_id', 'opponent'])
100 temp_df = temp_df.groupby(['opponent'], as_index=False).count()
101 temp_df.sort_values('battle_id', ascending=False, inplace=True)
102 return temp_df.head(3)
105def process_battles_win_percentage(df, group_levels=False):
106 if df.empty:
107 return df
109 group_by_columns = ['card_detail_id',
110 'card_name',
111 'card_type',
112 'rarity',
113 'edition',
114 'color',
115 'secondary_color',
116 'result']
117 merge_columns = ['card_detail_id',
118 'card_name',
119 'card_type',
120 'rarity',
121 'edition',
122 'color',
123 'secondary_color']
125 if not group_levels:
126 group_by_columns.append('level')
127 merge_columns.append('level')
129 total_df = pd.DataFrame()
130 if not df.empty:
131 grouped = df.groupby(group_by_columns, as_index=False, dropna=False)
132 new_df = grouped.agg(count=pd.NamedAgg(column='account', aggfunc='count'))
133 win = new_df.loc[(new_df.result == 'win')].rename(columns={"count": "win", }).drop(['result'], axis=1)
134 loss = new_df.loc[(new_df.result == 'loss')].rename(columns={"count": "loss", }).drop(['result'], axis=1)
135 total_df = win.merge(loss, on=merge_columns, how='outer')
136 total_df = total_df.fillna(0)
138 if group_levels:
139 total_df['level'] = total_df.apply(lambda row: df.loc[df.card_detail_id == row.card_detail_id].level.max(),
140 axis=1)
142 total_df['win_to_loss_ratio'] = total_df.win / total_df.loss
143 total_df['battles'] = total_df.win + total_df.loss
144 total_df['win_ratio'] = total_df.win / total_df.battles
145 total_df['win_percentage'] = total_df.win_ratio * 100
146 total_df = total_df.round(2)
148 total_df['url_markdown'] = total_df.apply(lambda row: get_image_url_markdown(row['card_name'],
149 row['level'],
150 row['edition']), axis=1)
151 total_df['url'] = total_df.apply(lambda row: get_image_url(row['card_name'],
152 row['level'],
153 row['edition']), axis=1)
155 total_df.sort_values(['battles', 'win_percentage'], ascending=False, inplace=True)
157 return total_df
160def filter_element(input_df, filter_settings):
161 if input_df.empty:
162 return input_df
164 list_of_colors = []
165 for element in Element:
166 if element.name in filter_settings:
167 active = filter_settings[element.name]
168 if active:
169 list_of_colors.append(element.value)
170 if len(list_of_colors) == 0 or len(list_of_colors) == len(Element):
171 return input_df
172 else:
173 return input_df.loc[(input_df.color.isin(list_of_colors) | input_df.secondary_color.isin(list_of_colors))]
176def filter_edition(input_df, filter_settings):
177 if input_df.empty:
178 return input_df
180 list_of_edition_values = []
181 for edition in Edition:
182 if edition.name in filter_settings:
183 active = filter_settings[edition.name]
184 if active:
185 list_of_edition_values.append(edition.value)
187 # When no items are select or all items are selected do not filter
188 if len(list_of_edition_values) == 0 or len(list_of_edition_values) == len(Edition):
189 return input_df
190 else:
191 return input_df.loc[input_df.edition.isin(list_of_edition_values)]
194def filter_match_type(input_df, filter_settings):
195 if input_df.empty:
196 return input_df
198 list_of_match_type_values = []
199 for match_type in MatchType:
200 if match_type.name in filter_settings:
201 active = filter_settings[match_type.name]
202 if active:
203 list_of_match_type_values.append(match_type.value)
205 # When no items are select or all items are selected do not filter
206 if len(list_of_match_type_values) == 0 or len(list_of_match_type_values) == len(MatchType):
207 return input_df
208 else:
209 return input_df.loc[input_df.match_type.isin(list_of_match_type_values)]
212def filter_card_type(input_df, filter_settings):
213 if input_df.empty:
214 return input_df
216 values = []
217 for card_type in CardType:
218 if card_type.name in filter_settings:
219 active = filter_settings[card_type.name]
220 if active:
221 values.append(card_type.value)
222 if len(values) == 0 or len(values) == len(CardType):
223 return input_df
224 else:
225 return input_df.loc[input_df.card_type.isin(values)]
228def filter_rarity(input_df, filter_settings):
229 if input_df.empty:
230 return input_df
232 values = []
233 for rarity in Rarity:
234 if rarity.name in filter_settings:
235 active = filter_settings[rarity.name]
236 if active:
237 values.append(rarity.value)
238 if len(values) == 0 or len(values) == len(Rarity):
239 return input_df
240 else:
241 return input_df.loc[input_df.rarity.isin(values)]
244def filter_battle_count(input_df, filter_settings):
245 if not input_df.empty and 'minimal-battles' in filter_settings:
246 return input_df.loc[(input_df.battles >= filter_settings['minimal-battles'])]
248 return input_df
251def filter_mana_cap(input_df, filter_settings):
252 if input_df.empty:
253 return input_df
255 total_df = pd.DataFrame()
256 filtered = False
257 for mana_cap in ManaCap:
258 if mana_cap.name in filter_settings:
259 active = filter_settings[mana_cap.name]
260 if active:
261 filtered = True
262 min_value = int(mana_cap.value.split('-')[0])
263 max_value = int(mana_cap.value.split('-')[1])
265 temp_df = input_df.loc[(input_df.mana_cap >= min_value) & (input_df.mana_cap <= max_value)]
266 total_df = pd.concat([total_df, temp_df])
268 if filtered:
269 return total_df
270 else:
271 return input_df
274def filter_format(input_df, filter_settings):
275 if input_df.empty:
276 return input_df
278 total_df = pd.DataFrame()
279 filtered = False
280 for battle_format in Format:
281 if battle_format.value in filter_settings:
282 active = filter_settings[battle_format.value]
283 if active:
284 filtered = True
285 temp_df = input_df.loc[(input_df.format == battle_format.value)]
286 total_df = pd.concat([total_df, temp_df])
288 if filtered:
289 return total_df
290 else:
291 return input_df
294def filter_date(input_df, filter_settings):
295 if input_df.empty:
296 return input_df
297 if 'from_date' in filter_settings:
298 from_date = filter_settings['from_date']
299 input_df.created_date = pd.to_datetime(input_df.created_date)
300 input_df = input_df.loc[input_df.created_date > pd.to_datetime(from_date)]
301 return input_df
304def filter_rule_sets(input_df, filter_settings):
305 if not input_df.empty and 'rule_sets' in filter_settings:
306 rule_sets = filter_settings['rule_sets']
307 if len(rule_sets) != 0:
308 return input_df.loc[(input_df.ruleset1.isin(rule_sets) |
309 input_df.ruleset2.isin(rule_sets) |
310 input_df.ruleset3.isin(rule_sets))]
311 return input_df
314def sort_by(input_df, filter_settings):
315 if not input_df.empty and 'sort_by' in filter_settings:
316 columns = []
317 for sort in filter_settings['sort_by']:
318 if sort == 'percentage':
319 columns.append('win_percentage')
320 else:
321 columns.append(sort)
322 return input_df.sort_values(columns, ascending=False)
323 return input_df
326def get_daily_battle_stats(daily_df):
327 result_df = pd.DataFrame()
328 if not daily_df.empty:
329 # Select Ranked battle only
330 daily_df = daily_df.loc[(daily_df.match_type == MatchType.RANKED.value)]
332 # Select Ranked battles and make dates on day
333 daily_df.loc[:, 'created_date'] = pd.to_datetime(daily_df.loc[:, 'created_date']).dt.date
335 # First group on battle_id
336 daily_df = daily_df.groupby(['battle_id'], as_index=False).agg({'result': 'first',
337 'created_date': 'first',
338 'format': 'first'})
339 # second group on day
340 win_df = daily_df.loc[daily_df.result == 'win'].groupby(
341 ['created_date', 'result', 'format'], as_index=False).agg({'result': 'count'})
342 loss_df = daily_df.loc[daily_df.result == 'loss'].groupby(
343 ['created_date', 'result', 'format'], as_index=False).agg({'result': 'count'})
344 result_df = pd.merge(left=win_df, right=loss_df, on=['created_date', 'format'], how='outer')
345 result_df.fillna(0, inplace=True)
346 result_df.rename(columns={"result_x": "win", "result_y": "loss"}, inplace=True)
347 result_df['battles'] = result_df.win + result_df.loss
348 return result_df
351def get_battles_with_used_card(df, card_name):
352 result_df = pd.DataFrame()
353 if not df.empty:
354 battle_ids = df.loc[(df.card_name == card_name)].battle_id.tolist()
355 result_df = df.loc[df.battle_id.isin(battle_ids)]
357 return result_df
360def get_losing_battles(df, battle_ids):
361 result_df = pd.DataFrame()
362 if not df.empty:
363 result_df = df.loc[df.battle_id.isin(battle_ids)]
365 return result_df
368def has_ability(cards, name, level, abilities):
369 row = cards.loc[(cards.name == name)]
370 # Check if the row exists
371 if not row.empty:
372 # Extract the stats dictionary
373 stats = row['stats'].iloc[0]
375 if row.iloc[0].type == CardType.summoner.value:
376 # Check if 'abilities' key exists in the stats dictionary
377 if 'abilities' in stats:
378 for ability in abilities:
379 if ability in stats['abilities']:
380 return True
382 else:
383 # Check if 'abilities' key exists in the stats dictionary
384 if 'abilities' in stats:
385 for i in range(level):
386 for ability in abilities:
387 if ability in stats['abilities'][i]:
388 return True
389 return False
392def filter_rows(row, cards, abilities):
393 return has_ability(cards, row['card_name'], row['level'], abilities)
396def filter_abilities(df, filter_settings):
397 cards = config.card_details_df
398 if not df.empty:
399 if 'abilities' in filter_settings and filter_settings['abilities']:
400 df = df[df.apply(filter_rows, axis=1, args=(cards, filter_settings['abilities'],))]
401 return df