| # code by Sett Sarverott A.A.B. @ 2026 | |
| # Inceptorium Apokryf for SJP | |
| # on terms of license GNU GPL 2.0 | |
| from huggingface_hub import login, upload_folder | |
| import requests | |
| import bs4 | |
| import markdownify | |
| import zipfile | |
| import urllib | |
| import json | |
| sjpUrl = "https://sjp.pl/sl/odmiany/" | |
| sourceserver = requests.get(sjpUrl) | |
| with open("./sjp.pl_sl_odmiany.md", "w") as sjpPage: | |
| sjpPage.write(markdownify.markdownify(sourceserver.text)) | |
| webpage = bs4.BeautifulSoup(sourceserver.text) | |
| links = list() | |
| for link in webpage.find_all("a"): | |
| print(link) | |
| try: | |
| links.append([ | |
| link.string, | |
| urllib.parse.urljoin(sjpUrl, str(link.get("href"))) | |
| ]) | |
| except Exception as error: | |
| print("ERROR:", error) | |
| with open("./links.json", "w") as sjpPage: | |
| sjpPage.write(json.dumps(links)) | |
| print(links) | |
| for link in links: | |
| print("ZIPTEST", link[1][-4:], link[1][-4:]==".zip") | |
| if link[1][-4:]==".zip": | |
| sjpDownload = requests.get(link[1]) | |
| with open("./sjp.zip", 'wb') as fd: | |
| for chunk in sjpDownload.iter_content(chunk_size=128): | |
| fd.write(chunk) | |
| # print(zipfile.ZipInfo("./sjp.zip")) | |
| zipfile.ZipFile("./sjp.zip").extractall() | |
| login() | |
| upload_folder( | |
| folder_path=".", | |
| repo_id="Apokryf/SJP", | |
| repo_type="dataset" | |
| ) |