add temp scripts
This commit is contained in:
parent
4043970511
commit
1eced2ee66
|
@ -0,0 +1,47 @@
|
|||
import argparse
|
||||
from datetime import datetime
|
||||
import json
|
||||
from progress.bar import IncrementalBar
|
||||
|
||||
from vcinema_utils import VCinemaUtils
|
||||
|
||||
import base64
|
||||
from collections import Counter, OrderedDict
|
||||
import csv
|
||||
import os
|
||||
import pyvips
|
||||
import worldmap
|
||||
import warnings
|
||||
|
||||
from wiki_pages.FilmsByCountry import draw_map
|
||||
|
||||
|
||||
def get_date_map(token_id, token_secret, year):
|
||||
print("Getting viewings")
|
||||
viewings = VCinemaUtils.get_vcinema_viewings(token_id, token_secret)
|
||||
|
||||
with IncrementalBar('Retrieving movie data', max=len(viewings), suffix='%(percent).1f%% - %(eta)ds remaining', check_tty=False) as bar:
|
||||
VCinemaUtils.add_imdb_data_to_viewings(viewings, ["countries"], bar)
|
||||
|
||||
viewings_before_year = VCinemaUtils.get_viewings_before_date(viewings, datetime(day=31, month=12, year=year-1))
|
||||
|
||||
viewings_before_year_filtered_by_year = VCinemaUtils.filter_viewings(viewings_before_year, "countries")
|
||||
|
||||
country_counter = Counter(viewings_before_year_filtered_by_year)
|
||||
png_data = draw_map(country_counter)
|
||||
|
||||
with open("map-2022.png", "wb") as f:
|
||||
f.write(png_data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Update wiki pages.')
|
||||
|
||||
parser.add_argument('year', type=int)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
with open('../token.json') as json_file:
|
||||
token = json.load(json_file)
|
||||
|
||||
get_date_map(token['token_id'], token['token_secret'], args.year)
|
|
@ -0,0 +1,53 @@
|
|||
import argparse
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime
|
||||
import json
|
||||
from progress.bar import IncrementalBar
|
||||
|
||||
from wiki_pages import KeywordScores
|
||||
from vcinema_utils import VCinemaUtils
|
||||
|
||||
|
||||
def get_new_years(token_id, token_secret, year):
|
||||
print("Getting viewings")
|
||||
viewings = VCinemaUtils.get_vcinema_viewings(token_id, token_secret)
|
||||
|
||||
with IncrementalBar('Retrieving movie data', max=len(viewings), suffix='%(percent).1f%% - %(eta)ds remaining', check_tty=False) as bar:
|
||||
VCinemaUtils.add_imdb_data_to_viewings(viewings, ["keywords"], bar)
|
||||
|
||||
viewings_before_year = VCinemaUtils.get_viewings_before_date(viewings, datetime(day=31, month=12, year=year-1))
|
||||
|
||||
scores = KeywordScores.get_keyword_scores(viewings_before_year)
|
||||
|
||||
keyword_data = {k: v for k, v in scores.items() if 'score' in v and v['score'] >= 1.0}
|
||||
keyword_data = OrderedDict(sorted(keyword_data.items(), key=lambda t: t[1]['score'], reverse=True))
|
||||
|
||||
|
||||
table = "| Keyword | Number of VCinema Films | Total IMDb entries | Score |\n| - | - | - | - |"
|
||||
|
||||
for keyword, data in keyword_data.items():
|
||||
table += "\n"
|
||||
|
||||
row_data = []
|
||||
row_data.append(str(keyword))
|
||||
row_data.append(str(len(data['vcinema_films'])))
|
||||
row_data.append(str(data['total']))
|
||||
row_data.append(str(round(data['score'], 3)))
|
||||
table += " | ".join(row_data)
|
||||
|
||||
# return table
|
||||
|
||||
print(scores)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Update wiki pages.')
|
||||
|
||||
parser.add_argument('year', type=int)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
with open('../token.json') as json_file:
|
||||
token = json.load(json_file)
|
||||
|
||||
get_new_years(token['token_id'], token['token_secret'], args.year)
|
Loading…
Reference in New Issue