diff options
Diffstat (limited to 'scripts')
-rw-r--r-- | scripts/datafile.py | 30 | ||||
-rw-r--r-- | scripts/editions.py | 36 | ||||
-rw-r--r-- | scripts/generate-json.py | 31 | ||||
-rw-r--r-- | scripts/players.py | 91 | ||||
-rw-r--r-- | scripts/pyranking/__init__.py | 0 | ||||
-rw-r--r-- | scripts/pyranking/db.py | 13 | ||||
-rw-r--r-- | scripts/pyranking/fetch.py | 42 | ||||
-rw-r--r-- | scripts/ranking.py | 21 | ||||
-rw-r--r-- | scripts/static-menu.py | 24 | ||||
-rw-r--r-- | scripts/static.py | 34 |
10 files changed, 322 insertions, 0 deletions
diff --git a/scripts/datafile.py b/scripts/datafile.py new file mode 100644 index 0000000..5cee451 --- /dev/null +++ b/scripts/datafile.py @@ -0,0 +1,30 @@ +import json, sys +from decimal import Decimal + +from pyranking.fetch import fetch_ranking + +ranking_date = sys.argv[1] +ranking = fetch_ranking(ranking_date) +old_ranking = fetch_ranking(sys.argv[2], True) if len(sys.argv) > 2 else {} + +for row in ranking: + if row['pid'] in old_ranking: + row['place-change'] = old_ranking[row['pid']]['place'] - row['place'] + row['place-change-class'] = 'success' if row['place-change'] > 0 else 'danger' + row['place-change'] = '%+d' % (row['place-change']) + if row['place-change'] == '+0': + row['place-change'] = '=' + row['place-change-class'] = 'default' + for category in ['gender', 'age', 'region']: + if row[category] == old_ranking[row['pid']][category]: + row[category + '-change'] = old_ranking[row['pid']][category + '-place'] - row[category + '-place'] + row[category + '-change-class'] = 'success' if row[category + '-change'] > 0 else 'danger' + row[category + '-change'] = '%+d' % (row[category + '-change']) + if row[category + '-change'] == '+0': + row[category + '-change'] = '=' + row[category + '-change-class'] = 'default' + for field in row: + if isinstance(row[field], Decimal): + row[field] = float(row[field]) + +print json.dumps(ranking) diff --git a/scripts/editions.py b/scripts/editions.py new file mode 100644 index 0000000..42d036f --- /dev/null +++ b/scripts/editions.py @@ -0,0 +1,36 @@ +import copy, json, sys + +from bs4 import BeautifulSoup as bs4 + +dates_config = json.load(file('config/dates.json')) +output_file = bs4(file(sys.argv[1]), 'lxml') + +editions = {} +for date_config in dates_config: + year = date_config['name'].split(' ')[1] + if year not in editions: + editions[year] = [] + editions[year].append(( + '%s (%s)' % (date_config['name'].split(' ')[0], date_config['index']), + date_config['url'], + date_config['date'] + )) + +template = bs4(file('templates/ranking.html'), 'lxml') + +date_group = template.select('#editions')[0].extract() +year_group = date_group.select('div[role="group"]')[0].extract() +ranking_link = year_group.select('.btn-default')[0].extract() +for year, dates in editions.iteritems(): + group = copy.copy(year_group) + group.select('.year')[0].string = str(year) + for date in dates[::-1]: + link = copy.copy(ranking_link) + link.string = date[0] + link['href'] = date[1] + link['datetime'] = date[2] + group.append(link) + date_group.append(group) + +output_file.select('#editions')[0].replace_with(date_group) +file(sys.argv[1], 'w').write(output_file.prettify().encode('utf-8')) diff --git a/scripts/generate-json.py b/scripts/generate-json.py new file mode 100644 index 0000000..54bb26d --- /dev/null +++ b/scripts/generate-json.py @@ -0,0 +1,31 @@ +import json, sys +from pyranking.db import cursor + +date = sys.argv[1] + +sql = '''SELECT + rankings.place, + players.id, + players.rank, + rankings.score +FROM players +LEFT JOIN rankings + ON players.id = rankings.pid + AND rankings.date = %(date)s +''' +cursor.execute(sql, {'date': date}) + +result = {} +for row in cursor.fetchall(): + if row['place'] is not None: + result[row['id']] = { + 'place': int(row['place']), + 'rank': float(row['rank']), + 'score': float(row['score']) + } + else: + result[row['id']] = { + 'rank': float(row['rank']) + } + +print json.dumps(result) diff --git a/scripts/players.py b/scripts/players.py new file mode 100644 index 0000000..eddb2e9 --- /dev/null +++ b/scripts/players.py @@ -0,0 +1,91 @@ +import copy, json, os, sys +from bs4 import BeautifulSoup as bs4 +from math import ceil +from pyranking.fetch import fetch_ranking + +output_directory = sys.argv[1] + +dates = {} +for date_config in json.load(file('config/dates.json')): + dates[date_config['date']] = date_config['url'] + +players = {} + +for date in sorted(dates.keys()): + for player, ranking in fetch_ranking(date, True).iteritems(): + if player not in players: + players[player] = {'rankings':{}} + players[player]['name'] = ranking['player'] + players[player]['club'] = ranking['club'] + players[player]['rankings'][date] = {'change': 'N','change-class':'primary'} + for field in ['place', 'score']: + players[player]['rankings'][date][field] = ranking[field] + for field in ['gender', 'age', 'region']: + players[player]['rankings'][date][field] = ranking[field] + players[player]['rankings'][date][field + '-place'] = ranking[field + '-place'] + players[player]['rankings'][date][field + '-change'] = 'N' + players[player]['rankings'][date][field + '-change-class'] = 'primary' + +for pid, player in players.iteritems(): + + template = bs4(file('templates/player.html'), 'lxml') + template.select('h2.name')[0].insert(0, player['name']) + template.select('h3.club')[0].string = player['club'] + template.select('a.pid-link')[0]['href'] = 'https://msc.com.pl/cezar/?p=21&pid=%d' % (pid) + + missing_row = template.select('tr.missing')[0].extract() + normal_row = template.select('tr.normal')[0].extract() + + for date in dates: + if date not in player['rankings']: + player['rankings'][date] = None + prev = None + for date, ranking in sorted(player['rankings'].iteritems(), lambda x,y: cmp(x[0], y[0])): + if prev is not None and ranking is not None: + ranking['change'] = prev['place'] - ranking['place'] + for field in ['gender', 'age', 'region']: + if prev[field] == ranking[field]: + ranking[field + '-change'] = prev[field + '-place'] - ranking[field + '-place'] + for field in ['', 'gender-', 'age-', 'region-']: + if ranking[field+'change'] == 0: + ranking[field+'change'] = '=' + ranking[field+'change-class'] = 'default' + elif ranking[field+'change'] == 'N': + ranking[field+'change-class'] = 'primary' + else: + ranking[field+'change-class'] = 'success' if ranking[field+'change'] > 0 else 'danger' + ranking[field+'change'] = '%+d' % (ranking[field+'change']) + prev = ranking + + row = copy.copy(missing_row) if ranking is None else copy.copy(normal_row) + rank_link = row.find('td').a + rank_link.string = '.'.join(date.split('-')[::-1]) + base_rank_link = '../%s' % (dates[date]) + if ranking is not None and ranking['place'] > 50: + rank_link['href'] = '../%s#page:%d' % ( + dates[date], ceil(ranking['place'] / 50.0) + ) + else: + rank_link['href'] = base_rank_link + if ranking is not None: + score_cell = row.select('.score span')[0] + score_cell.string = '%.2f' % (ranking['score']) + score_cell['title'] = str(ranking['score']) + for field in ['region', 'age', 'gender']: + link = row.select('td.'+field+' a')[0] + if ranking[field+'-place'] > 40: + link['href'] = base_rank_link + '#%s:%s;page:%d' % ( + field, ranking[field], ceil(ranking[field+'-place'] / 40.0) + ) + else: + link['href'] = base_rank_link + '#%s:%s' % ( + field, ranking[field] + ) + link.string = ranking[field] if len(ranking[field]) else '-' + for field in ['', 'region-', 'age-', 'gender-']: + row.select('td.'+field+'place')[0].string = '%d.' % (ranking[field+'place']) + change_label = row.select('td.'+field+'place-change span.label')[0] + change_label.string = ranking[field+'change'] + change_label['class'] = change_label['class'] + ['label-'+ranking[field+'change-class']] + template.select('table.table tbody')[0].insert(0, row) + file(os.path.join(output_directory, '%d.html' % pid), 'w').write(template.prettify().encode('utf-8')) diff --git a/scripts/pyranking/__init__.py b/scripts/pyranking/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/scripts/pyranking/__init__.py diff --git a/scripts/pyranking/db.py b/scripts/pyranking/db.py new file mode 100644 index 0000000..16595d3 --- /dev/null +++ b/scripts/pyranking/db.py @@ -0,0 +1,13 @@ +import json +import mysql.connector + +settings = json.load(file('config/db.json')) + +connection = mysql.connector.connect( + user=settings['user'], + password=settings['pass'], + host=settings['host'], + port=settings['port'], + database=settings['db'] +) +cursor = connection.cursor(dictionary=True, buffered=True) diff --git a/scripts/pyranking/fetch.py b/scripts/pyranking/fetch.py new file mode 100644 index 0000000..0d301d4 --- /dev/null +++ b/scripts/pyranking/fetch.py @@ -0,0 +1,42 @@ +from pyranking.db import cursor + +def fetch_ranking(date, assoc=False): + sql = '''SELECT + rankings.place, + rankings.pid, + CONCAT(players.name, " ", players.surname) player, + players.rank, + rankings.region, players.club, + REPLACE(rankings.flags, "K", "") age, + IF(rankings.flags LIKE 'K%', "K", "") gender, + rankings.score +FROM rankings +JOIN players + ON players.id = rankings.pid +WHERE rankings.date = %(date)s +ORDER BY rankings.place + ''' + cursor.execute(sql, {'date': date}) + ranks = { + 'gender': {}, + 'age': {}, + 'region': {} + } + ranking = cursor.fetchall() + for row in ranking: + if row['gender'] == '': + row['gender'] = 'M' + for category in ['gender', 'age', 'region']: + if row[category] not in ranks[category]: + ranks[category][row[category]] = 0 + ranks[category][row[category]] += 1 + row[category + '-place'] = ranks[category][row[category]] + for category in ['place', 'gender', 'age', 'region']: + row[category + '-change'] = 'N' + row[category + '-change-class'] = 'info' + if assoc: + result = {} + for row in ranking: + result[row['pid']] = row + return result + return ranking diff --git a/scripts/ranking.py b/scripts/ranking.py new file mode 100644 index 0000000..4a16867 --- /dev/null +++ b/scripts/ranking.py @@ -0,0 +1,21 @@ +import os, sys +from bs4 import BeautifulSoup as bs4 + +ranking_date = sys.argv[3] +subtitle = 'notowanie %s (%s), stan na %s' % ( + sys.argv[1], sys.argv[2], '.'.join(ranking_date.split('-')[::-1]) +) + +table = bs4(file('templates/ranking.html'), 'lxml') +table.select('.page-header h2 small')[0].string = subtitle + +table.select('table.data-table')[0]['data-ranking'] = 'data/%s.json' % ( + ranking_date) + +script_src = table.select('script[src="res/ranking.js"]')[0] +script_src['src'] = '%s?%d' % ('res/ranking.js', os.path.getmtime('http/res/ranking.js')) + +rawlink = table.select('a#rawlink')[0] +rawlink['href'] = '%s/%s.csv' % (rawlink['href'], ranking_date) + +print table.prettify().encode('utf-8') diff --git a/scripts/static-menu.py b/scripts/static-menu.py new file mode 100644 index 0000000..9474d12 --- /dev/null +++ b/scripts/static-menu.py @@ -0,0 +1,24 @@ +import copy, json, os, sys + +from bs4 import BeautifulSoup as bs4 + +config_file = sys.argv[1] +content_file = sys.argv[2] +link_prefix = sys.argv[3] if len(sys.argv) > 3 else '' + +content = bs4(file(content_file), 'lxml') + +for menu_container in content.select('.static-menu'): + menu_container.clear() + for menu_item in json.load(file(config_file)): + href = os.path.relpath( + os.path.join(link_prefix, menu_item['url']), + os.path.dirname(content_file) + ) + link = bs4('<a class="btn btn-default"></a>', 'html.parser') + link.a['href'] = href + link.a['title'] = menu_item['header'] + link.a.string = menu_item['label'] + menu_container.append(link.a) + +file(content_file, 'w').write(content.prettify().encode('utf-8')) diff --git a/scripts/static.py b/scripts/static.py new file mode 100644 index 0000000..16d82ae --- /dev/null +++ b/scripts/static.py @@ -0,0 +1,34 @@ +import copy, os, sys +from bs4 import BeautifulSoup as bs4 + +content_files = [] +page_header = '' + +arguments = sys.argv[1:] + +while True: + page_header = arguments[0] + arguments = arguments[1:] + if os.path.exists(page_header): + content_files.append(page_header) + page_header = '' + if len(arguments) == 0: + break + +template = bs4(file('templates/static.html'), 'lxml') + +content_wrapper = template.find('div', {'id': 'wrapper'}).extract() +del content_wrapper['id'] + +template.select('h2 small')[0].string = page_header + +footer = template.find('div', {'id': 'footer'}) + +for content_file in content_files: + content = copy.copy(content_wrapper) + content.div.append( + bs4(file(content_file).read(), 'html.parser') + ) + footer.insert_before(content) + +print template.prettify().encode('utf-8') |