| [build-system] |
| requires = ["setuptools>=61", "wheel"] |
| build-backend = "setuptools.build_meta" |
|
|
| [project] |
| name = "nedo-turkish-tokenizer" |
| version = "1.0.0" |
| description = "Turkish morphological tokenizer — TR-MMLU world record %92" |
| readme = "README.md" |
| license = { text = "MIT" } |
| authors = [{ name = "Ethosoft", email = "info@ethosoft.ai" }] |
| requires-python = ">=3.10" |
| keywords = ["turkish", "nlp", "tokenizer", "morphology", "huggingface"] |
| classifiers = [ |
| "Programming Language :: Python :: 3", |
| "License :: OSI Approved :: MIT License", |
| "Operating System :: OS Independent", |
| "Topic :: Scientific/Engineering :: Artificial Intelligence", |
| "Natural Language :: Turkish", |
| ] |
| dependencies = [ |
| "turkish-tokenizer>=0.1.0", |
| "jpype1>=1.4.0", |
| "requests>=2.28.0", |
| ] |
|
|
| [project.optional-dependencies] |
| dev = ["pytest", "huggingface_hub"] |
|
|
| [project.urls] |
| Homepage = "https://huggingface.co/Ethosoft/NedoTurkishTokenizer" |
| Repository = "https://huggingface.co/Ethosoft/NedoTurkishTokenizer" |
|
|
| [tool.setuptools.packages.find] |
| where = ["."] |
| include = ["nedo_turkish_tokenizer*"] |
|
|
| [tool.setuptools.package-data] |
| nedo_turkish_tokenizer = ["data/*.jar"] |
|
|