lichess_elite_uci / prepare.py
austindavis's picture
Upload prepare.py
5861145 verified
raw
history blame contribute delete
No virus
4.95 kB
import os
import re
import io
import requests
from zipfile import ZipFile
from tqdm import tqdm
import chess.pgn as pgn
import pandas as pd
from datasets import Dataset, DatasetInfo
tqdm.pandas()
ZIP_URLS = [
"https://database.nikonoel.fr/lichess_elite_2023-01.zip",
"https://database.nikonoel.fr/lichess_elite_2023-02.zip",
"https://database.nikonoel.fr/lichess_elite_2023-03.zip",
"https://database.nikonoel.fr/lichess_elite_2023-04.zip",
"https://database.nikonoel.fr/lichess_elite_2023-05.zip",
"https://database.nikonoel.fr/lichess_elite_2023-06.zip",
"https://database.nikonoel.fr/lichess_elite_2023-07.zip",
"https://database.nikonoel.fr/lichess_elite_2023-08.zip",
"https://database.nikonoel.fr/lichess_elite_2023-09.zip",
"https://database.nikonoel.fr/lichess_elite_2023-10.zip",
"https://database.nikonoel.fr/lichess_elite_2023-11.zip",
"https://database.nikonoel.fr/lichess_elite_2023-12.zip",
]
def download_and_unzip(url, save_directory, force_download=False):
# Extract the filename from the URL
filename = url.split("/")[-1]
file_path = os.path.join(save_directory, filename)
# Check if the file already exists
if force_download or not os.path.exists(file_path):
# Download the file if it does not exist
print(f"Downloading {filename}...")
response = requests.get(url)
with open(file_path, "wb") as file:
file.write(response.content)
print(f"Downloaded {filename}.")
else:
print(f"{filename} already exists. Skipping download.")
# Unzip the file
with ZipFile(file_path, "r") as zip_ref:
print(f"Unzipping {filename}...")
zip_ref.extractall(save_directory)
print(f"Unzipped {filename}.")
def parse_pgn_dataset_to_dataframe(pgn_file_path):
# Regular expressions for matching headers and moves
header_pattern = re.compile(r"\[([A-Za-z0-9]+) \"(.+?)\"\]")
games_list = []
current_game = {}
transcript = []
with open(pgn_file_path, "r") as file:
for line in file:
line = line.encode("utf-8").decode("ascii", "ignore")
header_match = header_pattern.match(line)
if header_match:
# If a new game starts and current_game is not empty, save the current game
if header_match.group(1) == "Event" and current_game:
current_game["transcript"] = " ".join(transcript).strip()
games_list.append(current_game)
current_game = {}
transcript = []
current_game[header_match.group(1)] = header_match.group(2)
else:
# Add moves to transcript, ignoring empty lines and game results
clean_line = line.strip()
if (
clean_line
and not clean_line.startswith("1-0")
and not clean_line.startswith("1/2-1/2")
and not clean_line.startswith("0-1")
):
transcript.append(clean_line)
# Add the last game if it exists
if current_game:
current_game["transcript"] = " ".join(transcript).strip()
games_list.append(current_game)
# Create a DataFrame
df = pd.DataFrame(games_list)
return df
def pgn_to_uci_transcript(pgn_transcript):
game = pgn.read_game(io.StringIO(pgn_transcript))
if game is None:
return
board = game.board()
move_list = []
for move in game.mainline_moves():
move_list.append(board.uci(move))
board.push(move)
return " ".join(move_list)
if __name__ == "__main__":
save_directory = "."
if not os.path.exists(save_directory):
os.makedirs(save_directory)
for url in ZIP_URLS:
download_and_unzip(url, save_directory)
pgn_files = [file for file in os.listdir(
save_directory) if file.endswith(".pgn")]
file_dfs = []
for pgn_file in pgn_files:
print(f"Parsing PGN from: {pgn_file}")
df = parse_pgn_dataset_to_dataframe(pgn_file)
df = df[df["EventType"] == "rapid"]
file_dfs.append(df)
df = pd.concat(file_dfs)
# cast numeric columns
for column in df.columns:
if df[column].str.isnumeric().all():
df[column] = df[column].astype(int)
df["transcript"] = df["transcript"].progress_apply(pgn_to_uci_transcript)
# filter unresolved games
df = df[df["Result"] != "*"]
df.to_feather("elite_dataset.feather")
ds_info = DatasetInfo(
description="The Lichess Elite Dataset includes all (rapid) games from Lichess by players rated 2500+ against players rated 2300+ played during the year 2023. Only games with an outcome of 1/2-1/2, 1-0, or 0-1 are included."
)
ds = Dataset.from_pandas(df, info=ds_info)
ds.push_to_hub("austindavis/chess_world_lichess_elite")