Upload 1971 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- feature_extractor/preprocessor_config.json +28 -0
- safety_checker/config.json +28 -0
- safety_checker/pytorch_model.bin +3 -0
- scheduler/scheduler_config.json +15 -0
- text_encoder/config.json +24 -0
- text_encoder/pytorch_model.bin +3 -0
- tokenizer/merges.txt +0 -0
- tokenizer/special_tokens_map.json +24 -0
- tokenizer/tokenizer_config.json +33 -0
- tokenizer/vocab.json +0 -0
- unet/config.json +66 -0
- unet/diffusion_pytorch_model.bin +3 -0
- vae/config.json +31 -0
- vae/diffusion_pytorch_model.bin +3 -0
- venv/.gitignore +2 -0
- venv/Lib/site-packages/PyYAML-6.0.2.dist-info/INSTALLER +1 -0
- venv/Lib/site-packages/PyYAML-6.0.2.dist-info/LICENSE +20 -0
- venv/Lib/site-packages/PyYAML-6.0.2.dist-info/METADATA +46 -0
- venv/Lib/site-packages/PyYAML-6.0.2.dist-info/RECORD +43 -0
- venv/Lib/site-packages/PyYAML-6.0.2.dist-info/WHEEL +5 -0
- venv/Lib/site-packages/PyYAML-6.0.2.dist-info/top_level.txt +2 -0
- venv/Lib/site-packages/__pycache__/_virtualenv.cpython-311.pyc +0 -0
- venv/Lib/site-packages/__pycache__/typing_extensions.cpython-311.pyc +0 -0
- venv/Lib/site-packages/_distutils_hack/__init__.py +227 -0
- venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc +0 -0
- venv/Lib/site-packages/_distutils_hack/override.py +1 -0
- venv/Lib/site-packages/_virtualenv.pth +3 -0
- venv/Lib/site-packages/_virtualenv.py +102 -0
- venv/Lib/site-packages/_yaml/__init__.py +33 -0
- venv/Lib/site-packages/_yaml/__pycache__/__init__.cpython-311.pyc +0 -0
- venv/Lib/site-packages/certifi-2024.12.14.dist-info/INSTALLER +1 -0
- venv/Lib/site-packages/certifi-2024.12.14.dist-info/LICENSE +20 -0
- venv/Lib/site-packages/certifi-2024.12.14.dist-info/METADATA +68 -0
- venv/Lib/site-packages/certifi-2024.12.14.dist-info/RECORD +14 -0
- venv/Lib/site-packages/certifi-2024.12.14.dist-info/WHEEL +5 -0
- venv/Lib/site-packages/certifi-2024.12.14.dist-info/top_level.txt +1 -0
- venv/Lib/site-packages/certifi/__init__.py +4 -0
- venv/Lib/site-packages/certifi/__main__.py +12 -0
- venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-311.pyc +0 -0
- venv/Lib/site-packages/certifi/__pycache__/__main__.cpython-311.pyc +0 -0
- venv/Lib/site-packages/certifi/__pycache__/core.cpython-311.pyc +0 -0
- venv/Lib/site-packages/certifi/cacert.pem +0 -0
- venv/Lib/site-packages/certifi/core.py +114 -0
- venv/Lib/site-packages/certifi/py.typed +0 -0
- venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER +1 -0
- venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/LICENSE +21 -0
- venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/METADATA +721 -0
- venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/RECORD +35 -0
- venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/WHEEL +5 -0
- venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt +2 -0
feature_extractor/preprocessor_config.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"crop_size": {
|
| 3 |
+
"height": 224,
|
| 4 |
+
"width": 224
|
| 5 |
+
},
|
| 6 |
+
"do_center_crop": true,
|
| 7 |
+
"do_convert_rgb": true,
|
| 8 |
+
"do_normalize": true,
|
| 9 |
+
"do_rescale": true,
|
| 10 |
+
"do_resize": true,
|
| 11 |
+
"feature_extractor_type": "CLIPFeatureExtractor",
|
| 12 |
+
"image_mean": [
|
| 13 |
+
0.48145466,
|
| 14 |
+
0.4578275,
|
| 15 |
+
0.40821073
|
| 16 |
+
],
|
| 17 |
+
"image_processor_type": "CLIPFeatureExtractor",
|
| 18 |
+
"image_std": [
|
| 19 |
+
0.26862954,
|
| 20 |
+
0.26130258,
|
| 21 |
+
0.27577711
|
| 22 |
+
],
|
| 23 |
+
"resample": 3,
|
| 24 |
+
"rescale_factor": 0.00392156862745098,
|
| 25 |
+
"size": {
|
| 26 |
+
"shortest_edge": 224
|
| 27 |
+
}
|
| 28 |
+
}
|
safety_checker/config.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "CompVis/stable-diffusion-safety-checker",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"StableDiffusionSafetyChecker"
|
| 5 |
+
],
|
| 6 |
+
"initializer_factor": 1.0,
|
| 7 |
+
"logit_scale_init_value": 2.6592,
|
| 8 |
+
"model_type": "clip",
|
| 9 |
+
"projection_dim": 768,
|
| 10 |
+
"text_config": {
|
| 11 |
+
"dropout": 0.0,
|
| 12 |
+
"hidden_size": 768,
|
| 13 |
+
"intermediate_size": 3072,
|
| 14 |
+
"model_type": "clip_text_model",
|
| 15 |
+
"num_attention_heads": 12
|
| 16 |
+
},
|
| 17 |
+
"torch_dtype": "float16",
|
| 18 |
+
"transformers_version": "4.33.1",
|
| 19 |
+
"vision_config": {
|
| 20 |
+
"dropout": 0.0,
|
| 21 |
+
"hidden_size": 1024,
|
| 22 |
+
"intermediate_size": 4096,
|
| 23 |
+
"model_type": "clip_vision_model",
|
| 24 |
+
"num_attention_heads": 16,
|
| 25 |
+
"num_hidden_layers": 24,
|
| 26 |
+
"patch_size": 14
|
| 27 |
+
}
|
| 28 |
+
}
|
safety_checker/pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e09c5d39c29d798137c7aa6f529bd44abe3616b054740a02d896f0381ca4f4f9
|
| 3 |
+
size 608099578
|
scheduler/scheduler_config.json
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_class_name": "PNDMScheduler",
|
| 3 |
+
"_diffusers_version": "0.21.2",
|
| 4 |
+
"beta_end": 0.012,
|
| 5 |
+
"beta_schedule": "scaled_linear",
|
| 6 |
+
"beta_start": 0.00085,
|
| 7 |
+
"clip_sample": false,
|
| 8 |
+
"num_train_timesteps": 1000,
|
| 9 |
+
"prediction_type": "epsilon",
|
| 10 |
+
"set_alpha_to_one": false,
|
| 11 |
+
"skip_prk_steps": true,
|
| 12 |
+
"steps_offset": 1,
|
| 13 |
+
"timestep_spacing": "leading",
|
| 14 |
+
"trained_betas": null
|
| 15 |
+
}
|
text_encoder/config.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"CLIPTextModel"
|
| 4 |
+
],
|
| 5 |
+
"attention_dropout": 0.0,
|
| 6 |
+
"bos_token_id": 0,
|
| 7 |
+
"dropout": 0.0,
|
| 8 |
+
"eos_token_id": 2,
|
| 9 |
+
"hidden_act": "quick_gelu",
|
| 10 |
+
"hidden_size": 768,
|
| 11 |
+
"initializer_factor": 1.0,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 3072,
|
| 14 |
+
"layer_norm_eps": 1e-05,
|
| 15 |
+
"max_position_embeddings": 77,
|
| 16 |
+
"model_type": "clip_text_model",
|
| 17 |
+
"num_attention_heads": 12,
|
| 18 |
+
"num_hidden_layers": 12,
|
| 19 |
+
"pad_token_id": 1,
|
| 20 |
+
"projection_dim": 768,
|
| 21 |
+
"torch_dtype": "float16",
|
| 22 |
+
"transformers_version": "4.33.1",
|
| 23 |
+
"vocab_size": 49408
|
| 24 |
+
}
|
text_encoder/pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:05a0cff1c5e772d0b0b3c77329d2976130569b3f44126717f9796eba20ced688
|
| 3 |
+
size 246185562
|
tokenizer/merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer/special_tokens_map.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<|startoftext|>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": true,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "<|endoftext|>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": true,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": "<|endoftext|>",
|
| 17 |
+
"unk_token": {
|
| 18 |
+
"content": "<|endoftext|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": true,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
}
|
| 24 |
+
}
|
tokenizer/tokenizer_config.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_prefix_space": false,
|
| 3 |
+
"bos_token": {
|
| 4 |
+
"__type": "AddedToken",
|
| 5 |
+
"content": "<|startoftext|>",
|
| 6 |
+
"lstrip": false,
|
| 7 |
+
"normalized": true,
|
| 8 |
+
"rstrip": false,
|
| 9 |
+
"single_word": false
|
| 10 |
+
},
|
| 11 |
+
"clean_up_tokenization_spaces": true,
|
| 12 |
+
"do_lower_case": true,
|
| 13 |
+
"eos_token": {
|
| 14 |
+
"__type": "AddedToken",
|
| 15 |
+
"content": "<|endoftext|>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": true,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false
|
| 20 |
+
},
|
| 21 |
+
"errors": "replace",
|
| 22 |
+
"model_max_length": 144,
|
| 23 |
+
"pad_token": "<|endoftext|>",
|
| 24 |
+
"tokenizer_class": "CLIPTokenizer",
|
| 25 |
+
"unk_token": {
|
| 26 |
+
"__type": "AddedToken",
|
| 27 |
+
"content": "<|endoftext|>",
|
| 28 |
+
"lstrip": false,
|
| 29 |
+
"normalized": true,
|
| 30 |
+
"rstrip": false,
|
| 31 |
+
"single_word": false
|
| 32 |
+
}
|
| 33 |
+
}
|
tokenizer/vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
unet/config.json
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_class_name": "UNet2DConditionModel",
|
| 3 |
+
"_diffusers_version": "0.21.2",
|
| 4 |
+
"act_fn": "silu",
|
| 5 |
+
"addition_embed_type": null,
|
| 6 |
+
"addition_embed_type_num_heads": 64,
|
| 7 |
+
"addition_time_embed_dim": null,
|
| 8 |
+
"attention_head_dim": 8,
|
| 9 |
+
"attention_type": "default",
|
| 10 |
+
"block_out_channels": [
|
| 11 |
+
320,
|
| 12 |
+
640,
|
| 13 |
+
1280,
|
| 14 |
+
1280
|
| 15 |
+
],
|
| 16 |
+
"center_input_sample": false,
|
| 17 |
+
"class_embed_type": null,
|
| 18 |
+
"class_embeddings_concat": false,
|
| 19 |
+
"conv_in_kernel": 3,
|
| 20 |
+
"conv_out_kernel": 3,
|
| 21 |
+
"cross_attention_dim": 768,
|
| 22 |
+
"cross_attention_norm": null,
|
| 23 |
+
"down_block_types": [
|
| 24 |
+
"CrossAttnDownBlock2D",
|
| 25 |
+
"CrossAttnDownBlock2D",
|
| 26 |
+
"CrossAttnDownBlock2D",
|
| 27 |
+
"DownBlock2D"
|
| 28 |
+
],
|
| 29 |
+
"downsample_padding": 1,
|
| 30 |
+
"dropout": 0.0,
|
| 31 |
+
"dual_cross_attention": false,
|
| 32 |
+
"encoder_hid_dim": null,
|
| 33 |
+
"encoder_hid_dim_type": null,
|
| 34 |
+
"flip_sin_to_cos": true,
|
| 35 |
+
"freq_shift": 0,
|
| 36 |
+
"in_channels": 4,
|
| 37 |
+
"layers_per_block": 2,
|
| 38 |
+
"mid_block_only_cross_attention": null,
|
| 39 |
+
"mid_block_scale_factor": 1,
|
| 40 |
+
"mid_block_type": "UNetMidBlock2DCrossAttn",
|
| 41 |
+
"norm_eps": 1e-05,
|
| 42 |
+
"norm_num_groups": 32,
|
| 43 |
+
"num_attention_heads": null,
|
| 44 |
+
"num_class_embeds": null,
|
| 45 |
+
"only_cross_attention": false,
|
| 46 |
+
"out_channels": 4,
|
| 47 |
+
"projection_class_embeddings_input_dim": null,
|
| 48 |
+
"resnet_out_scale_factor": 1.0,
|
| 49 |
+
"resnet_skip_time_act": false,
|
| 50 |
+
"resnet_time_scale_shift": "default",
|
| 51 |
+
"sample_size": 64,
|
| 52 |
+
"time_cond_proj_dim": null,
|
| 53 |
+
"time_embedding_act_fn": null,
|
| 54 |
+
"time_embedding_dim": null,
|
| 55 |
+
"time_embedding_type": "positional",
|
| 56 |
+
"timestep_post_act": null,
|
| 57 |
+
"transformer_layers_per_block": 1,
|
| 58 |
+
"up_block_types": [
|
| 59 |
+
"UpBlock2D",
|
| 60 |
+
"CrossAttnUpBlock2D",
|
| 61 |
+
"CrossAttnUpBlock2D",
|
| 62 |
+
"CrossAttnUpBlock2D"
|
| 63 |
+
],
|
| 64 |
+
"upcast_attention": false,
|
| 65 |
+
"use_linear_projection": false
|
| 66 |
+
}
|
unet/diffusion_pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d765244af846b9f267703b9457b5fe4aed4d71de192cb8973d5089af975700db
|
| 3 |
+
size 1719324918
|
vae/config.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_class_name": "AutoencoderKL",
|
| 3 |
+
"_diffusers_version": "0.21.2",
|
| 4 |
+
"act_fn": "silu",
|
| 5 |
+
"block_out_channels": [
|
| 6 |
+
128,
|
| 7 |
+
256,
|
| 8 |
+
512,
|
| 9 |
+
512
|
| 10 |
+
],
|
| 11 |
+
"down_block_types": [
|
| 12 |
+
"DownEncoderBlock2D",
|
| 13 |
+
"DownEncoderBlock2D",
|
| 14 |
+
"DownEncoderBlock2D",
|
| 15 |
+
"DownEncoderBlock2D"
|
| 16 |
+
],
|
| 17 |
+
"force_upcast": true,
|
| 18 |
+
"in_channels": 3,
|
| 19 |
+
"latent_channels": 4,
|
| 20 |
+
"layers_per_block": 2,
|
| 21 |
+
"norm_num_groups": 32,
|
| 22 |
+
"out_channels": 3,
|
| 23 |
+
"sample_size": 512,
|
| 24 |
+
"scaling_factor": 0.18215,
|
| 25 |
+
"up_block_types": [
|
| 26 |
+
"UpDecoderBlock2D",
|
| 27 |
+
"UpDecoderBlock2D",
|
| 28 |
+
"UpDecoderBlock2D",
|
| 29 |
+
"UpDecoderBlock2D"
|
| 30 |
+
]
|
| 31 |
+
}
|
vae/diffusion_pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:939371966fe64a42ea0cd0de5933ac7dd52e117c98770a588ec26e4ab6c10f79
|
| 3 |
+
size 167404866
|
venv/.gitignore
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# created by virtualenv automatically
|
| 2 |
+
*
|
venv/Lib/site-packages/PyYAML-6.0.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
venv/Lib/site-packages/PyYAML-6.0.2.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2017-2021 Ingy döt Net
|
| 2 |
+
Copyright (c) 2006-2016 Kirill Simonov
|
| 3 |
+
|
| 4 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
| 5 |
+
this software and associated documentation files (the "Software"), to deal in
|
| 6 |
+
the Software without restriction, including without limitation the rights to
|
| 7 |
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
| 8 |
+
of the Software, and to permit persons to whom the Software is furnished to do
|
| 9 |
+
so, subject to the following conditions:
|
| 10 |
+
|
| 11 |
+
The above copyright notice and this permission notice shall be included in all
|
| 12 |
+
copies or substantial portions of the Software.
|
| 13 |
+
|
| 14 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 15 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 16 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 17 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 18 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 19 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 20 |
+
SOFTWARE.
|
venv/Lib/site-packages/PyYAML-6.0.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: PyYAML
|
| 3 |
+
Version: 6.0.2
|
| 4 |
+
Summary: YAML parser and emitter for Python
|
| 5 |
+
Home-page: https://pyyaml.org/
|
| 6 |
+
Download-URL: https://pypi.org/project/PyYAML/
|
| 7 |
+
Author: Kirill Simonov
|
| 8 |
+
Author-email: xi@resolvent.net
|
| 9 |
+
License: MIT
|
| 10 |
+
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
|
| 11 |
+
Project-URL: CI, https://github.com/yaml/pyyaml/actions
|
| 12 |
+
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
|
| 13 |
+
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
|
| 14 |
+
Project-URL: Source Code, https://github.com/yaml/pyyaml
|
| 15 |
+
Platform: Any
|
| 16 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 17 |
+
Classifier: Intended Audience :: Developers
|
| 18 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 19 |
+
Classifier: Operating System :: OS Independent
|
| 20 |
+
Classifier: Programming Language :: Cython
|
| 21 |
+
Classifier: Programming Language :: Python
|
| 22 |
+
Classifier: Programming Language :: Python :: 3
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 29 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 30 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 31 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 32 |
+
Classifier: Topic :: Text Processing :: Markup
|
| 33 |
+
Requires-Python: >=3.8
|
| 34 |
+
License-File: LICENSE
|
| 35 |
+
|
| 36 |
+
YAML is a data serialization format designed for human readability
|
| 37 |
+
and interaction with scripting languages. PyYAML is a YAML parser
|
| 38 |
+
and emitter for Python.
|
| 39 |
+
|
| 40 |
+
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
|
| 41 |
+
support, capable extension API, and sensible error messages. PyYAML
|
| 42 |
+
supports standard YAML tags and provides Python-specific tags that
|
| 43 |
+
allow to represent an arbitrary Python object.
|
| 44 |
+
|
| 45 |
+
PyYAML is applicable for a broad range of tasks from complex
|
| 46 |
+
configuration files to object serialization and persistence.
|
venv/Lib/site-packages/PyYAML-6.0.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
PyYAML-6.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
|
| 3 |
+
PyYAML-6.0.2.dist-info/METADATA,sha256=9lwXqTOrXPts-jI2Lo5UwuaAYo0hiRA0BZqjch0WjAk,2106
|
| 4 |
+
PyYAML-6.0.2.dist-info/RECORD,,
|
| 5 |
+
PyYAML-6.0.2.dist-info/WHEEL,sha256=yEpuRje-u1Z_HrXQj-UTAfIAegW_HcP2GJ7Ek8BJkUM,102
|
| 6 |
+
PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
|
| 7 |
+
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
|
| 8 |
+
_yaml/__pycache__/__init__.cpython-311.pyc,,
|
| 9 |
+
yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311
|
| 10 |
+
yaml/__pycache__/__init__.cpython-311.pyc,,
|
| 11 |
+
yaml/__pycache__/composer.cpython-311.pyc,,
|
| 12 |
+
yaml/__pycache__/constructor.cpython-311.pyc,,
|
| 13 |
+
yaml/__pycache__/cyaml.cpython-311.pyc,,
|
| 14 |
+
yaml/__pycache__/dumper.cpython-311.pyc,,
|
| 15 |
+
yaml/__pycache__/emitter.cpython-311.pyc,,
|
| 16 |
+
yaml/__pycache__/error.cpython-311.pyc,,
|
| 17 |
+
yaml/__pycache__/events.cpython-311.pyc,,
|
| 18 |
+
yaml/__pycache__/loader.cpython-311.pyc,,
|
| 19 |
+
yaml/__pycache__/nodes.cpython-311.pyc,,
|
| 20 |
+
yaml/__pycache__/parser.cpython-311.pyc,,
|
| 21 |
+
yaml/__pycache__/reader.cpython-311.pyc,,
|
| 22 |
+
yaml/__pycache__/representer.cpython-311.pyc,,
|
| 23 |
+
yaml/__pycache__/resolver.cpython-311.pyc,,
|
| 24 |
+
yaml/__pycache__/scanner.cpython-311.pyc,,
|
| 25 |
+
yaml/__pycache__/serializer.cpython-311.pyc,,
|
| 26 |
+
yaml/__pycache__/tokens.cpython-311.pyc,,
|
| 27 |
+
yaml/_yaml.cp311-win_amd64.pyd,sha256=6BXrc7YC-BZJ911z64UDwJV3D0ay3GiyETPzbhl0iJc,272384
|
| 28 |
+
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
|
| 29 |
+
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
|
| 30 |
+
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
|
| 31 |
+
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
|
| 32 |
+
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
|
| 33 |
+
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
|
| 34 |
+
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
|
| 35 |
+
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
|
| 36 |
+
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
|
| 37 |
+
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
|
| 38 |
+
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
|
| 39 |
+
yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190
|
| 40 |
+
yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004
|
| 41 |
+
yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279
|
| 42 |
+
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
|
| 43 |
+
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
|
venv/Lib/site-packages/PyYAML-6.0.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.44.0)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp311-cp311-win_amd64
|
| 5 |
+
|
venv/Lib/site-packages/PyYAML-6.0.2.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
_yaml
|
| 2 |
+
yaml
|
venv/Lib/site-packages/__pycache__/_virtualenv.cpython-311.pyc
ADDED
|
Binary file (4.8 kB). View file
|
|
|
venv/Lib/site-packages/__pycache__/typing_extensions.cpython-311.pyc
ADDED
|
Binary file (151 kB). View file
|
|
|
venv/Lib/site-packages/_distutils_hack/__init__.py
ADDED
|
@@ -0,0 +1,227 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# don't import any costly modules
|
| 2 |
+
import sys
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
is_pypy = '__pypy__' in sys.builtin_module_names
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def warn_distutils_present():
|
| 10 |
+
if 'distutils' not in sys.modules:
|
| 11 |
+
return
|
| 12 |
+
if is_pypy and sys.version_info < (3, 7):
|
| 13 |
+
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
| 14 |
+
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
| 15 |
+
return
|
| 16 |
+
import warnings
|
| 17 |
+
|
| 18 |
+
warnings.warn(
|
| 19 |
+
"Distutils was imported before Setuptools, but importing Setuptools "
|
| 20 |
+
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
| 21 |
+
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
| 22 |
+
"using distutils directly, ensure that setuptools is installed in the "
|
| 23 |
+
"traditional way (e.g. not an editable install), and/or make sure "
|
| 24 |
+
"that setuptools is always imported before distutils."
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def clear_distutils():
|
| 29 |
+
if 'distutils' not in sys.modules:
|
| 30 |
+
return
|
| 31 |
+
import warnings
|
| 32 |
+
|
| 33 |
+
warnings.warn("Setuptools is replacing distutils.")
|
| 34 |
+
mods = [
|
| 35 |
+
name
|
| 36 |
+
for name in sys.modules
|
| 37 |
+
if name == "distutils" or name.startswith("distutils.")
|
| 38 |
+
]
|
| 39 |
+
for name in mods:
|
| 40 |
+
del sys.modules[name]
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def enabled():
|
| 44 |
+
"""
|
| 45 |
+
Allow selection of distutils by environment variable.
|
| 46 |
+
"""
|
| 47 |
+
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
| 48 |
+
return which == 'local'
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def ensure_local_distutils():
|
| 52 |
+
import importlib
|
| 53 |
+
|
| 54 |
+
clear_distutils()
|
| 55 |
+
|
| 56 |
+
# With the DistutilsMetaFinder in place,
|
| 57 |
+
# perform an import to cause distutils to be
|
| 58 |
+
# loaded from setuptools._distutils. Ref #2906.
|
| 59 |
+
with shim():
|
| 60 |
+
importlib.import_module('distutils')
|
| 61 |
+
|
| 62 |
+
# check that submodules load as expected
|
| 63 |
+
core = importlib.import_module('distutils.core')
|
| 64 |
+
assert '_distutils' in core.__file__, core.__file__
|
| 65 |
+
assert 'setuptools._distutils.log' not in sys.modules
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def do_override():
|
| 69 |
+
"""
|
| 70 |
+
Ensure that the local copy of distutils is preferred over stdlib.
|
| 71 |
+
|
| 72 |
+
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
| 73 |
+
for more motivation.
|
| 74 |
+
"""
|
| 75 |
+
if enabled():
|
| 76 |
+
warn_distutils_present()
|
| 77 |
+
ensure_local_distutils()
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class _TrivialRe:
|
| 81 |
+
def __init__(self, *patterns):
|
| 82 |
+
self._patterns = patterns
|
| 83 |
+
|
| 84 |
+
def match(self, string):
|
| 85 |
+
return all(pat in string for pat in self._patterns)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class DistutilsMetaFinder:
|
| 89 |
+
def find_spec(self, fullname, path, target=None):
|
| 90 |
+
# optimization: only consider top level modules and those
|
| 91 |
+
# found in the CPython test suite.
|
| 92 |
+
if path is not None and not fullname.startswith('test.'):
|
| 93 |
+
return
|
| 94 |
+
|
| 95 |
+
method_name = 'spec_for_{fullname}'.format(**locals())
|
| 96 |
+
method = getattr(self, method_name, lambda: None)
|
| 97 |
+
return method()
|
| 98 |
+
|
| 99 |
+
def spec_for_distutils(self):
|
| 100 |
+
if self.is_cpython():
|
| 101 |
+
return
|
| 102 |
+
|
| 103 |
+
import importlib
|
| 104 |
+
import importlib.abc
|
| 105 |
+
import importlib.util
|
| 106 |
+
|
| 107 |
+
try:
|
| 108 |
+
mod = importlib.import_module('setuptools._distutils')
|
| 109 |
+
except Exception:
|
| 110 |
+
# There are a couple of cases where setuptools._distutils
|
| 111 |
+
# may not be present:
|
| 112 |
+
# - An older Setuptools without a local distutils is
|
| 113 |
+
# taking precedence. Ref #2957.
|
| 114 |
+
# - Path manipulation during sitecustomize removes
|
| 115 |
+
# setuptools from the path but only after the hook
|
| 116 |
+
# has been loaded. Ref #2980.
|
| 117 |
+
# In either case, fall back to stdlib behavior.
|
| 118 |
+
return
|
| 119 |
+
|
| 120 |
+
class DistutilsLoader(importlib.abc.Loader):
|
| 121 |
+
def create_module(self, spec):
|
| 122 |
+
mod.__name__ = 'distutils'
|
| 123 |
+
return mod
|
| 124 |
+
|
| 125 |
+
def exec_module(self, module):
|
| 126 |
+
pass
|
| 127 |
+
|
| 128 |
+
return importlib.util.spec_from_loader(
|
| 129 |
+
'distutils', DistutilsLoader(), origin=mod.__file__
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
@staticmethod
|
| 133 |
+
def is_cpython():
|
| 134 |
+
"""
|
| 135 |
+
Suppress supplying distutils for CPython (build and tests).
|
| 136 |
+
Ref #2965 and #3007.
|
| 137 |
+
"""
|
| 138 |
+
return os.path.isfile('pybuilddir.txt')
|
| 139 |
+
|
| 140 |
+
def spec_for_pip(self):
|
| 141 |
+
"""
|
| 142 |
+
Ensure stdlib distutils when running under pip.
|
| 143 |
+
See pypa/pip#8761 for rationale.
|
| 144 |
+
"""
|
| 145 |
+
if sys.version_info >= (3, 12) or self.pip_imported_during_build():
|
| 146 |
+
return
|
| 147 |
+
clear_distutils()
|
| 148 |
+
self.spec_for_distutils = lambda: None
|
| 149 |
+
|
| 150 |
+
@classmethod
|
| 151 |
+
def pip_imported_during_build(cls):
|
| 152 |
+
"""
|
| 153 |
+
Detect if pip is being imported in a build script. Ref #2355.
|
| 154 |
+
"""
|
| 155 |
+
import traceback
|
| 156 |
+
|
| 157 |
+
return any(
|
| 158 |
+
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
@staticmethod
|
| 162 |
+
def frame_file_is_setup(frame):
|
| 163 |
+
"""
|
| 164 |
+
Return True if the indicated frame suggests a setup.py file.
|
| 165 |
+
"""
|
| 166 |
+
# some frames may not have __file__ (#2940)
|
| 167 |
+
return frame.f_globals.get('__file__', '').endswith('setup.py')
|
| 168 |
+
|
| 169 |
+
def spec_for_sensitive_tests(self):
|
| 170 |
+
"""
|
| 171 |
+
Ensure stdlib distutils when running select tests under CPython.
|
| 172 |
+
|
| 173 |
+
python/cpython#91169
|
| 174 |
+
"""
|
| 175 |
+
clear_distutils()
|
| 176 |
+
self.spec_for_distutils = lambda: None
|
| 177 |
+
|
| 178 |
+
sensitive_tests = (
|
| 179 |
+
[
|
| 180 |
+
'test.test_distutils',
|
| 181 |
+
'test.test_peg_generator',
|
| 182 |
+
'test.test_importlib',
|
| 183 |
+
]
|
| 184 |
+
if sys.version_info < (3, 10)
|
| 185 |
+
else [
|
| 186 |
+
'test.test_distutils',
|
| 187 |
+
]
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
for name in DistutilsMetaFinder.sensitive_tests:
|
| 192 |
+
setattr(
|
| 193 |
+
DistutilsMetaFinder,
|
| 194 |
+
f'spec_for_{name}',
|
| 195 |
+
DistutilsMetaFinder.spec_for_sensitive_tests,
|
| 196 |
+
)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
DISTUTILS_FINDER = DistutilsMetaFinder()
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def add_shim():
|
| 203 |
+
DISTUTILS_FINDER in sys.meta_path or insert_shim()
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
class shim:
|
| 207 |
+
def __enter__(self):
|
| 208 |
+
insert_shim()
|
| 209 |
+
|
| 210 |
+
def __exit__(self, exc, value, tb):
|
| 211 |
+
_remove_shim()
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def insert_shim():
|
| 215 |
+
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def _remove_shim():
|
| 219 |
+
try:
|
| 220 |
+
sys.meta_path.remove(DISTUTILS_FINDER)
|
| 221 |
+
except ValueError:
|
| 222 |
+
pass
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
if sys.version_info < (3, 12):
|
| 226 |
+
# DistutilsMetaFinder can only be disabled in Python < 3.12 (PEP 632)
|
| 227 |
+
remove_shim = _remove_shim
|
venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (11.3 kB). View file
|
|
|
venv/Lib/site-packages/_distutils_hack/override.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
__import__('_distutils_hack').do_override()
|
venv/Lib/site-packages/_virtualenv.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:69ac3d8f27e679c81b94ab30b3b56e9cd138219b1ba94a1fa3606d5a76a1433d
|
| 3 |
+
size 18
|
venv/Lib/site-packages/_virtualenv.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Patches that are applied at runtime to the virtual environment."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
from contextlib import suppress
|
| 8 |
+
|
| 9 |
+
VIRTUALENV_PATCH_FILE = os.path.join(__file__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def patch_dist(dist):
|
| 13 |
+
"""
|
| 14 |
+
Distutils allows user to configure some arguments via a configuration file:
|
| 15 |
+
https://docs.python.org/3/install/index.html#distutils-configuration-files.
|
| 16 |
+
|
| 17 |
+
Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up.
|
| 18 |
+
""" # noqa: D205
|
| 19 |
+
# we cannot allow some install config as that would get packages installed outside of the virtual environment
|
| 20 |
+
old_parse_config_files = dist.Distribution.parse_config_files
|
| 21 |
+
|
| 22 |
+
def parse_config_files(self, *args, **kwargs):
|
| 23 |
+
result = old_parse_config_files(self, *args, **kwargs)
|
| 24 |
+
install = self.get_option_dict("install")
|
| 25 |
+
|
| 26 |
+
if "prefix" in install: # the prefix governs where to install the libraries
|
| 27 |
+
install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix)
|
| 28 |
+
for base in ("purelib", "platlib", "headers", "scripts", "data"):
|
| 29 |
+
key = f"install_{base}"
|
| 30 |
+
if key in install: # do not allow global configs to hijack venv paths
|
| 31 |
+
install.pop(key, None)
|
| 32 |
+
return result
|
| 33 |
+
|
| 34 |
+
dist.Distribution.parse_config_files = parse_config_files
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# Import hook that patches some modules to ignore configuration values that break package installation in case
|
| 38 |
+
# of virtual environments.
|
| 39 |
+
_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist"
|
| 40 |
+
# https://docs.python.org/3/library/importlib.html#setting-up-an-importer
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class _Finder:
|
| 44 |
+
"""A meta path finder that allows patching the imported distutils modules."""
|
| 45 |
+
|
| 46 |
+
fullname = None
|
| 47 |
+
|
| 48 |
+
# lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup,
|
| 49 |
+
# because there are gevent-based applications that need to be first to import threading by themselves.
|
| 50 |
+
# See https://github.com/pypa/virtualenv/issues/1895 for details.
|
| 51 |
+
lock = [] # noqa: RUF012
|
| 52 |
+
|
| 53 |
+
def find_spec(self, fullname, path, target=None): # noqa: ARG002
|
| 54 |
+
if fullname in _DISTUTILS_PATCH and self.fullname is None:
|
| 55 |
+
# initialize lock[0] lazily
|
| 56 |
+
if len(self.lock) == 0:
|
| 57 |
+
import threading
|
| 58 |
+
|
| 59 |
+
lock = threading.Lock()
|
| 60 |
+
# there is possibility that two threads T1 and T2 are simultaneously running into find_spec,
|
| 61 |
+
# observing .lock as empty, and further going into hereby initialization. However due to the GIL,
|
| 62 |
+
# list.append() operation is atomic and this way only one of the threads will "win" to put the lock
|
| 63 |
+
# - that every thread will use - into .lock[0].
|
| 64 |
+
# https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe
|
| 65 |
+
self.lock.append(lock)
|
| 66 |
+
|
| 67 |
+
from functools import partial
|
| 68 |
+
from importlib.util import find_spec
|
| 69 |
+
|
| 70 |
+
with self.lock[0]:
|
| 71 |
+
self.fullname = fullname
|
| 72 |
+
try:
|
| 73 |
+
spec = find_spec(fullname, path)
|
| 74 |
+
if spec is not None:
|
| 75 |
+
# https://www.python.org/dev/peps/pep-0451/#how-loading-will-work
|
| 76 |
+
is_new_api = hasattr(spec.loader, "exec_module")
|
| 77 |
+
func_name = "exec_module" if is_new_api else "load_module"
|
| 78 |
+
old = getattr(spec.loader, func_name)
|
| 79 |
+
func = self.exec_module if is_new_api else self.load_module
|
| 80 |
+
if old is not func:
|
| 81 |
+
with suppress(AttributeError): # C-Extension loaders are r/o such as zipimporter with <3.7
|
| 82 |
+
setattr(spec.loader, func_name, partial(func, old))
|
| 83 |
+
return spec
|
| 84 |
+
finally:
|
| 85 |
+
self.fullname = None
|
| 86 |
+
return None
|
| 87 |
+
|
| 88 |
+
@staticmethod
|
| 89 |
+
def exec_module(old, module):
|
| 90 |
+
old(module)
|
| 91 |
+
if module.__name__ in _DISTUTILS_PATCH:
|
| 92 |
+
patch_dist(module)
|
| 93 |
+
|
| 94 |
+
@staticmethod
|
| 95 |
+
def load_module(old, name):
|
| 96 |
+
module = old(name)
|
| 97 |
+
if module.__name__ in _DISTUTILS_PATCH:
|
| 98 |
+
patch_dist(module)
|
| 99 |
+
return module
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
sys.meta_path.insert(0, _Finder())
|
venv/Lib/site-packages/_yaml/__init__.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This is a stub package designed to roughly emulate the _yaml
|
| 2 |
+
# extension module, which previously existed as a standalone module
|
| 3 |
+
# and has been moved into the `yaml` package namespace.
|
| 4 |
+
# It does not perfectly mimic its old counterpart, but should get
|
| 5 |
+
# close enough for anyone who's relying on it even when they shouldn't.
|
| 6 |
+
import yaml
|
| 7 |
+
|
| 8 |
+
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
|
| 9 |
+
# to tread carefully when poking at it here (it may not have the attributes we expect)
|
| 10 |
+
if not getattr(yaml, '__with_libyaml__', False):
|
| 11 |
+
from sys import version_info
|
| 12 |
+
|
| 13 |
+
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
|
| 14 |
+
raise exc("No module named '_yaml'")
|
| 15 |
+
else:
|
| 16 |
+
from yaml._yaml import *
|
| 17 |
+
import warnings
|
| 18 |
+
warnings.warn(
|
| 19 |
+
'The _yaml extension module is now located at yaml._yaml'
|
| 20 |
+
' and its location is subject to change. To use the'
|
| 21 |
+
' LibYAML-based parser and emitter, import from `yaml`:'
|
| 22 |
+
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
|
| 23 |
+
DeprecationWarning
|
| 24 |
+
)
|
| 25 |
+
del warnings
|
| 26 |
+
# Don't `del yaml` here because yaml is actually an existing
|
| 27 |
+
# namespace member of _yaml.
|
| 28 |
+
|
| 29 |
+
__name__ = '_yaml'
|
| 30 |
+
# If the module is top-level (i.e. not a part of any specific package)
|
| 31 |
+
# then the attribute should be set to ''.
|
| 32 |
+
# https://docs.python.org/3.8/library/types.html
|
| 33 |
+
__package__ = ''
|
venv/Lib/site-packages/_yaml/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (954 Bytes). View file
|
|
|
venv/Lib/site-packages/certifi-2024.12.14.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
venv/Lib/site-packages/certifi-2024.12.14.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
This package contains a modified version of ca-bundle.crt:
|
| 2 |
+
|
| 3 |
+
ca-bundle.crt -- Bundle of CA Root Certificates
|
| 4 |
+
|
| 5 |
+
This is a bundle of X.509 certificates of public Certificate Authorities
|
| 6 |
+
(CA). These were automatically extracted from Mozilla's root certificates
|
| 7 |
+
file (certdata.txt). This file can be found in the mozilla source tree:
|
| 8 |
+
https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
|
| 9 |
+
It contains the certificates in PEM format and therefore
|
| 10 |
+
can be directly used with curl / libcurl / php_curl, or with
|
| 11 |
+
an Apache+mod_ssl webserver for SSL client authentication.
|
| 12 |
+
Just configure this file as the SSLCACertificateFile.#
|
| 13 |
+
|
| 14 |
+
***** BEGIN LICENSE BLOCK *****
|
| 15 |
+
This Source Code Form is subject to the terms of the Mozilla Public License,
|
| 16 |
+
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
|
| 17 |
+
one at http://mozilla.org/MPL/2.0/.
|
| 18 |
+
|
| 19 |
+
***** END LICENSE BLOCK *****
|
| 20 |
+
@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
|
venv/Lib/site-packages/certifi-2024.12.14.dist-info/METADATA
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: certifi
|
| 3 |
+
Version: 2024.12.14
|
| 4 |
+
Summary: Python package for providing Mozilla's CA Bundle.
|
| 5 |
+
Home-page: https://github.com/certifi/python-certifi
|
| 6 |
+
Author: Kenneth Reitz
|
| 7 |
+
Author-email: me@kennethreitz.com
|
| 8 |
+
License: MPL-2.0
|
| 9 |
+
Project-URL: Source, https://github.com/certifi/python-certifi
|
| 10 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
|
| 13 |
+
Classifier: Natural Language :: English
|
| 14 |
+
Classifier: Programming Language :: Python
|
| 15 |
+
Classifier: Programming Language :: Python :: 3
|
| 16 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.6
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 25 |
+
Requires-Python: >=3.6
|
| 26 |
+
License-File: LICENSE
|
| 27 |
+
|
| 28 |
+
Certifi: Python SSL Certificates
|
| 29 |
+
================================
|
| 30 |
+
|
| 31 |
+
Certifi provides Mozilla's carefully curated collection of Root Certificates for
|
| 32 |
+
validating the trustworthiness of SSL certificates while verifying the identity
|
| 33 |
+
of TLS hosts. It has been extracted from the `Requests`_ project.
|
| 34 |
+
|
| 35 |
+
Installation
|
| 36 |
+
------------
|
| 37 |
+
|
| 38 |
+
``certifi`` is available on PyPI. Simply install it with ``pip``::
|
| 39 |
+
|
| 40 |
+
$ pip install certifi
|
| 41 |
+
|
| 42 |
+
Usage
|
| 43 |
+
-----
|
| 44 |
+
|
| 45 |
+
To reference the installed certificate authority (CA) bundle, you can use the
|
| 46 |
+
built-in function::
|
| 47 |
+
|
| 48 |
+
>>> import certifi
|
| 49 |
+
|
| 50 |
+
>>> certifi.where()
|
| 51 |
+
'/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
|
| 52 |
+
|
| 53 |
+
Or from the command line::
|
| 54 |
+
|
| 55 |
+
$ python -m certifi
|
| 56 |
+
/usr/local/lib/python3.7/site-packages/certifi/cacert.pem
|
| 57 |
+
|
| 58 |
+
Enjoy!
|
| 59 |
+
|
| 60 |
+
.. _`Requests`: https://requests.readthedocs.io/en/master/
|
| 61 |
+
|
| 62 |
+
Addition/Removal of Certificates
|
| 63 |
+
--------------------------------
|
| 64 |
+
|
| 65 |
+
Certifi does not support any addition/removal or other modification of the
|
| 66 |
+
CA trust store content. This project is intended to provide a reliable and
|
| 67 |
+
highly portable root of trust to python deployments. Look to upstream projects
|
| 68 |
+
for methods to use alternate trust.
|
venv/Lib/site-packages/certifi-2024.12.14.dist-info/RECORD
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
certifi-2024.12.14.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
certifi-2024.12.14.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
|
| 3 |
+
certifi-2024.12.14.dist-info/METADATA,sha256=z71eRGTFszr4qsHenZ_vG2Fd5bV9PBWmJgShthc8IkY,2274
|
| 4 |
+
certifi-2024.12.14.dist-info/RECORD,,
|
| 5 |
+
certifi-2024.12.14.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
| 6 |
+
certifi-2024.12.14.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
|
| 7 |
+
certifi/__init__.py,sha256=LqjNcwt1sYSS3uhPXrf6jJzVCuHtNVpuirg5rb7mVm8,94
|
| 8 |
+
certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
|
| 9 |
+
certifi/__pycache__/__init__.cpython-311.pyc,,
|
| 10 |
+
certifi/__pycache__/__main__.cpython-311.pyc,,
|
| 11 |
+
certifi/__pycache__/core.cpython-311.pyc,,
|
| 12 |
+
certifi/cacert.pem,sha256=gHiXJU84Oif0XkT0llbzeKurIUHt5DpK08JCCll90j8,294769
|
| 13 |
+
certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426
|
| 14 |
+
certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
venv/Lib/site-packages/certifi-2024.12.14.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (75.6.0)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
venv/Lib/site-packages/certifi-2024.12.14.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
certifi
|
venv/Lib/site-packages/certifi/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .core import contents, where
|
| 2 |
+
|
| 3 |
+
__all__ = ["contents", "where"]
|
| 4 |
+
__version__ = "2024.12.14"
|
venv/Lib/site-packages/certifi/__main__.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import argparse
|
| 2 |
+
|
| 3 |
+
from certifi import contents, where
|
| 4 |
+
|
| 5 |
+
parser = argparse.ArgumentParser()
|
| 6 |
+
parser.add_argument("-c", "--contents", action="store_true")
|
| 7 |
+
args = parser.parse_args()
|
| 8 |
+
|
| 9 |
+
if args.contents:
|
| 10 |
+
print(contents())
|
| 11 |
+
else:
|
| 12 |
+
print(where())
|
venv/Lib/site-packages/certifi/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (349 Bytes). View file
|
|
|
venv/Lib/site-packages/certifi/__pycache__/__main__.cpython-311.pyc
ADDED
|
Binary file (738 Bytes). View file
|
|
|
venv/Lib/site-packages/certifi/__pycache__/core.cpython-311.pyc
ADDED
|
Binary file (3.78 kB). View file
|
|
|
venv/Lib/site-packages/certifi/cacert.pem
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
venv/Lib/site-packages/certifi/core.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
certifi.py
|
| 3 |
+
~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module returns the installation location of cacert.pem or its contents.
|
| 6 |
+
"""
|
| 7 |
+
import sys
|
| 8 |
+
import atexit
|
| 9 |
+
|
| 10 |
+
def exit_cacert_ctx() -> None:
|
| 11 |
+
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
if sys.version_info >= (3, 11):
|
| 15 |
+
|
| 16 |
+
from importlib.resources import as_file, files
|
| 17 |
+
|
| 18 |
+
_CACERT_CTX = None
|
| 19 |
+
_CACERT_PATH = None
|
| 20 |
+
|
| 21 |
+
def where() -> str:
|
| 22 |
+
# This is slightly terrible, but we want to delay extracting the file
|
| 23 |
+
# in cases where we're inside of a zipimport situation until someone
|
| 24 |
+
# actually calls where(), but we don't want to re-extract the file
|
| 25 |
+
# on every call of where(), so we'll do it once then store it in a
|
| 26 |
+
# global variable.
|
| 27 |
+
global _CACERT_CTX
|
| 28 |
+
global _CACERT_PATH
|
| 29 |
+
if _CACERT_PATH is None:
|
| 30 |
+
# This is slightly janky, the importlib.resources API wants you to
|
| 31 |
+
# manage the cleanup of this file, so it doesn't actually return a
|
| 32 |
+
# path, it returns a context manager that will give you the path
|
| 33 |
+
# when you enter it and will do any cleanup when you leave it. In
|
| 34 |
+
# the common case of not needing a temporary file, it will just
|
| 35 |
+
# return the file system location and the __exit__() is a no-op.
|
| 36 |
+
#
|
| 37 |
+
# We also have to hold onto the actual context manager, because
|
| 38 |
+
# it will do the cleanup whenever it gets garbage collected, so
|
| 39 |
+
# we will also store that at the global level as well.
|
| 40 |
+
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
|
| 41 |
+
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
| 42 |
+
atexit.register(exit_cacert_ctx)
|
| 43 |
+
|
| 44 |
+
return _CACERT_PATH
|
| 45 |
+
|
| 46 |
+
def contents() -> str:
|
| 47 |
+
return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
|
| 48 |
+
|
| 49 |
+
elif sys.version_info >= (3, 7):
|
| 50 |
+
|
| 51 |
+
from importlib.resources import path as get_path, read_text
|
| 52 |
+
|
| 53 |
+
_CACERT_CTX = None
|
| 54 |
+
_CACERT_PATH = None
|
| 55 |
+
|
| 56 |
+
def where() -> str:
|
| 57 |
+
# This is slightly terrible, but we want to delay extracting the
|
| 58 |
+
# file in cases where we're inside of a zipimport situation until
|
| 59 |
+
# someone actually calls where(), but we don't want to re-extract
|
| 60 |
+
# the file on every call of where(), so we'll do it once then store
|
| 61 |
+
# it in a global variable.
|
| 62 |
+
global _CACERT_CTX
|
| 63 |
+
global _CACERT_PATH
|
| 64 |
+
if _CACERT_PATH is None:
|
| 65 |
+
# This is slightly janky, the importlib.resources API wants you
|
| 66 |
+
# to manage the cleanup of this file, so it doesn't actually
|
| 67 |
+
# return a path, it returns a context manager that will give
|
| 68 |
+
# you the path when you enter it and will do any cleanup when
|
| 69 |
+
# you leave it. In the common case of not needing a temporary
|
| 70 |
+
# file, it will just return the file system location and the
|
| 71 |
+
# __exit__() is a no-op.
|
| 72 |
+
#
|
| 73 |
+
# We also have to hold onto the actual context manager, because
|
| 74 |
+
# it will do the cleanup whenever it gets garbage collected, so
|
| 75 |
+
# we will also store that at the global level as well.
|
| 76 |
+
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
| 77 |
+
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
| 78 |
+
atexit.register(exit_cacert_ctx)
|
| 79 |
+
|
| 80 |
+
return _CACERT_PATH
|
| 81 |
+
|
| 82 |
+
def contents() -> str:
|
| 83 |
+
return read_text("certifi", "cacert.pem", encoding="ascii")
|
| 84 |
+
|
| 85 |
+
else:
|
| 86 |
+
import os
|
| 87 |
+
import types
|
| 88 |
+
from typing import Union
|
| 89 |
+
|
| 90 |
+
Package = Union[types.ModuleType, str]
|
| 91 |
+
Resource = Union[str, "os.PathLike"]
|
| 92 |
+
|
| 93 |
+
# This fallback will work for Python versions prior to 3.7 that lack the
|
| 94 |
+
# importlib.resources module but relies on the existing `where` function
|
| 95 |
+
# so won't address issues with environments like PyOxidizer that don't set
|
| 96 |
+
# __file__ on modules.
|
| 97 |
+
def read_text(
|
| 98 |
+
package: Package,
|
| 99 |
+
resource: Resource,
|
| 100 |
+
encoding: str = 'utf-8',
|
| 101 |
+
errors: str = 'strict'
|
| 102 |
+
) -> str:
|
| 103 |
+
with open(where(), encoding=encoding) as data:
|
| 104 |
+
return data.read()
|
| 105 |
+
|
| 106 |
+
# If we don't have importlib.resources, then we will just do the old logic
|
| 107 |
+
# of assuming we're on the filesystem and munge the path directly.
|
| 108 |
+
def where() -> str:
|
| 109 |
+
f = os.path.dirname(__file__)
|
| 110 |
+
|
| 111 |
+
return os.path.join(f, "cacert.pem")
|
| 112 |
+
|
| 113 |
+
def contents() -> str:
|
| 114 |
+
return read_text("certifi", "cacert.pem", encoding="ascii")
|
venv/Lib/site-packages/certifi/py.typed
ADDED
|
File without changes
|
venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2025 TAHRI Ahmed R.
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,721 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: charset-normalizer
|
| 3 |
+
Version: 3.4.1
|
| 4 |
+
Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
|
| 5 |
+
Author-email: "Ahmed R. TAHRI" <tahri.ahmed@proton.me>
|
| 6 |
+
Maintainer-email: "Ahmed R. TAHRI" <tahri.ahmed@proton.me>
|
| 7 |
+
License: MIT
|
| 8 |
+
Project-URL: Changelog, https://github.com/jawah/charset_normalizer/blob/master/CHANGELOG.md
|
| 9 |
+
Project-URL: Documentation, https://charset-normalizer.readthedocs.io/
|
| 10 |
+
Project-URL: Code, https://github.com/jawah/charset_normalizer
|
| 11 |
+
Project-URL: Issue tracker, https://github.com/jawah/charset_normalizer/issues
|
| 12 |
+
Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect
|
| 13 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 14 |
+
Classifier: Intended Audience :: Developers
|
| 15 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 16 |
+
Classifier: Operating System :: OS Independent
|
| 17 |
+
Classifier: Programming Language :: Python
|
| 18 |
+
Classifier: Programming Language :: Python :: 3
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 26 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 27 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 28 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 29 |
+
Classifier: Topic :: Text Processing :: Linguistic
|
| 30 |
+
Classifier: Topic :: Utilities
|
| 31 |
+
Classifier: Typing :: Typed
|
| 32 |
+
Requires-Python: >=3.7
|
| 33 |
+
Description-Content-Type: text/markdown
|
| 34 |
+
License-File: LICENSE
|
| 35 |
+
Provides-Extra: unicode-backport
|
| 36 |
+
|
| 37 |
+
<h1 align="center">Charset Detection, for Everyone 👋</h1>
|
| 38 |
+
|
| 39 |
+
<p align="center">
|
| 40 |
+
<sup>The Real First Universal Charset Detector</sup><br>
|
| 41 |
+
<a href="https://pypi.org/project/charset-normalizer">
|
| 42 |
+
<img src="https://img.shields.io/pypi/pyversions/charset_normalizer.svg?orange=blue" />
|
| 43 |
+
</a>
|
| 44 |
+
<a href="https://pepy.tech/project/charset-normalizer/">
|
| 45 |
+
<img alt="Download Count Total" src="https://static.pepy.tech/badge/charset-normalizer/month" />
|
| 46 |
+
</a>
|
| 47 |
+
<a href="https://bestpractices.coreinfrastructure.org/projects/7297">
|
| 48 |
+
<img src="https://bestpractices.coreinfrastructure.org/projects/7297/badge">
|
| 49 |
+
</a>
|
| 50 |
+
</p>
|
| 51 |
+
<p align="center">
|
| 52 |
+
<sup><i>Featured Packages</i></sup><br>
|
| 53 |
+
<a href="https://github.com/jawah/niquests">
|
| 54 |
+
<img alt="Static Badge" src="https://img.shields.io/badge/Niquests-Best_HTTP_Client-cyan">
|
| 55 |
+
</a>
|
| 56 |
+
<a href="https://github.com/jawah/wassima">
|
| 57 |
+
<img alt="Static Badge" src="https://img.shields.io/badge/Wassima-Certifi_Killer-cyan">
|
| 58 |
+
</a>
|
| 59 |
+
</p>
|
| 60 |
+
<p align="center">
|
| 61 |
+
<sup><i>In other language (unofficial port - by the community)</i></sup><br>
|
| 62 |
+
<a href="https://github.com/nickspring/charset-normalizer-rs">
|
| 63 |
+
<img alt="Static Badge" src="https://img.shields.io/badge/Rust-red">
|
| 64 |
+
</a>
|
| 65 |
+
</p>
|
| 66 |
+
|
| 67 |
+
> A library that helps you read text from an unknown charset encoding.<br /> Motivated by `chardet`,
|
| 68 |
+
> I'm trying to resolve the issue by taking a new approach.
|
| 69 |
+
> All IANA character set names for which the Python core library provides codecs are supported.
|
| 70 |
+
|
| 71 |
+
<p align="center">
|
| 72 |
+
>>>>> <a href="https://charsetnormalizerweb.ousret.now.sh" target="_blank">👉 Try Me Online Now, Then Adopt Me 👈 </a> <<<<<
|
| 73 |
+
</p>
|
| 74 |
+
|
| 75 |
+
This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.
|
| 76 |
+
|
| 77 |
+
| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |
|
| 78 |
+
|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|
|
| 79 |
+
| `Fast` | ❌ | ✅ | ✅ |
|
| 80 |
+
| `Universal**` | ❌ | ✅ | ❌ |
|
| 81 |
+
| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ |
|
| 82 |
+
| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ |
|
| 83 |
+
| `License` | LGPL-2.1<br>_restrictive_ | MIT | MPL-1.1<br>_restrictive_ |
|
| 84 |
+
| `Native Python` | ✅ | ✅ | ❌ |
|
| 85 |
+
| `Detect spoken language` | ❌ | ✅ | N/A |
|
| 86 |
+
| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ |
|
| 87 |
+
| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |
|
| 88 |
+
| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |
|
| 89 |
+
|
| 90 |
+
<p align="center">
|
| 91 |
+
<img src="https://i.imgflip.com/373iay.gif" alt="Reading Normalized Text" width="226"/><img src="https://media.tenor.com/images/c0180f70732a18b4965448d33adba3d0/tenor.gif" alt="Cat Reading Text" width="200"/>
|
| 92 |
+
</p>
|
| 93 |
+
|
| 94 |
+
*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*<br>
|
| 95 |
+
|
| 96 |
+
## ⚡ Performance
|
| 97 |
+
|
| 98 |
+
This package offer better performance than its counterpart Chardet. Here are some numbers.
|
| 99 |
+
|
| 100 |
+
| Package | Accuracy | Mean per file (ms) | File per sec (est) |
|
| 101 |
+
|-----------------------------------------------|:--------:|:------------------:|:------------------:|
|
| 102 |
+
| [chardet](https://github.com/chardet/chardet) | 86 % | 63 ms | 16 file/sec |
|
| 103 |
+
| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |
|
| 104 |
+
|
| 105 |
+
| Package | 99th percentile | 95th percentile | 50th percentile |
|
| 106 |
+
|-----------------------------------------------|:---------------:|:---------------:|:---------------:|
|
| 107 |
+
| [chardet](https://github.com/chardet/chardet) | 265 ms | 71 ms | 7 ms |
|
| 108 |
+
| charset-normalizer | 100 ms | 50 ms | 5 ms |
|
| 109 |
+
|
| 110 |
+
_updated as of december 2024 using CPython 3.12_
|
| 111 |
+
|
| 112 |
+
Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.
|
| 113 |
+
|
| 114 |
+
> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.
|
| 115 |
+
> And yes, these results might change at any time. The dataset can be updated to include more files.
|
| 116 |
+
> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.
|
| 117 |
+
> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability
|
| 118 |
+
> (e.g. Supported Encoding) Challenge-them if you want.
|
| 119 |
+
|
| 120 |
+
## ✨ Installation
|
| 121 |
+
|
| 122 |
+
Using pip:
|
| 123 |
+
|
| 124 |
+
```sh
|
| 125 |
+
pip install charset-normalizer -U
|
| 126 |
+
```
|
| 127 |
+
|
| 128 |
+
## 🚀 Basic Usage
|
| 129 |
+
|
| 130 |
+
### CLI
|
| 131 |
+
This package comes with a CLI.
|
| 132 |
+
|
| 133 |
+
```
|
| 134 |
+
usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]
|
| 135 |
+
file [file ...]
|
| 136 |
+
|
| 137 |
+
The Real First Universal Charset Detector. Discover originating encoding used
|
| 138 |
+
on text file. Normalize text to unicode.
|
| 139 |
+
|
| 140 |
+
positional arguments:
|
| 141 |
+
files File(s) to be analysed
|
| 142 |
+
|
| 143 |
+
optional arguments:
|
| 144 |
+
-h, --help show this help message and exit
|
| 145 |
+
-v, --verbose Display complementary information about file if any.
|
| 146 |
+
Stdout will contain logs about the detection process.
|
| 147 |
+
-a, --with-alternative
|
| 148 |
+
Output complementary possibilities if any. Top-level
|
| 149 |
+
JSON WILL be a list.
|
| 150 |
+
-n, --normalize Permit to normalize input file. If not set, program
|
| 151 |
+
does not write anything.
|
| 152 |
+
-m, --minimal Only output the charset detected to STDOUT. Disabling
|
| 153 |
+
JSON output.
|
| 154 |
+
-r, --replace Replace file when trying to normalize it instead of
|
| 155 |
+
creating a new one.
|
| 156 |
+
-f, --force Replace file without asking if you are sure, use this
|
| 157 |
+
flag with caution.
|
| 158 |
+
-t THRESHOLD, --threshold THRESHOLD
|
| 159 |
+
Define a custom maximum amount of chaos allowed in
|
| 160 |
+
decoded content. 0. <= chaos <= 1.
|
| 161 |
+
--version Show version information and exit.
|
| 162 |
+
```
|
| 163 |
+
|
| 164 |
+
```bash
|
| 165 |
+
normalizer ./data/sample.1.fr.srt
|
| 166 |
+
```
|
| 167 |
+
|
| 168 |
+
or
|
| 169 |
+
|
| 170 |
+
```bash
|
| 171 |
+
python -m charset_normalizer ./data/sample.1.fr.srt
|
| 172 |
+
```
|
| 173 |
+
|
| 174 |
+
🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.
|
| 175 |
+
|
| 176 |
+
```json
|
| 177 |
+
{
|
| 178 |
+
"path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt",
|
| 179 |
+
"encoding": "cp1252",
|
| 180 |
+
"encoding_aliases": [
|
| 181 |
+
"1252",
|
| 182 |
+
"windows_1252"
|
| 183 |
+
],
|
| 184 |
+
"alternative_encodings": [
|
| 185 |
+
"cp1254",
|
| 186 |
+
"cp1256",
|
| 187 |
+
"cp1258",
|
| 188 |
+
"iso8859_14",
|
| 189 |
+
"iso8859_15",
|
| 190 |
+
"iso8859_16",
|
| 191 |
+
"iso8859_3",
|
| 192 |
+
"iso8859_9",
|
| 193 |
+
"latin_1",
|
| 194 |
+
"mbcs"
|
| 195 |
+
],
|
| 196 |
+
"language": "French",
|
| 197 |
+
"alphabets": [
|
| 198 |
+
"Basic Latin",
|
| 199 |
+
"Latin-1 Supplement"
|
| 200 |
+
],
|
| 201 |
+
"has_sig_or_bom": false,
|
| 202 |
+
"chaos": 0.149,
|
| 203 |
+
"coherence": 97.152,
|
| 204 |
+
"unicode_path": null,
|
| 205 |
+
"is_preferred": true
|
| 206 |
+
}
|
| 207 |
+
```
|
| 208 |
+
|
| 209 |
+
### Python
|
| 210 |
+
*Just print out normalized text*
|
| 211 |
+
```python
|
| 212 |
+
from charset_normalizer import from_path
|
| 213 |
+
|
| 214 |
+
results = from_path('./my_subtitle.srt')
|
| 215 |
+
|
| 216 |
+
print(str(results.best()))
|
| 217 |
+
```
|
| 218 |
+
|
| 219 |
+
*Upgrade your code without effort*
|
| 220 |
+
```python
|
| 221 |
+
from charset_normalizer import detect
|
| 222 |
+
```
|
| 223 |
+
|
| 224 |
+
The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.
|
| 225 |
+
|
| 226 |
+
See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)
|
| 227 |
+
|
| 228 |
+
## 😇 Why
|
| 229 |
+
|
| 230 |
+
When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a
|
| 231 |
+
reliable alternative using a completely different method. Also! I never back down on a good challenge!
|
| 232 |
+
|
| 233 |
+
I **don't care** about the **originating charset** encoding, because **two different tables** can
|
| 234 |
+
produce **two identical rendered string.**
|
| 235 |
+
What I want is to get readable text, the best I can.
|
| 236 |
+
|
| 237 |
+
In a way, **I'm brute forcing text decoding.** How cool is that ? 😎
|
| 238 |
+
|
| 239 |
+
Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair Unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.
|
| 240 |
+
|
| 241 |
+
## 🍰 How
|
| 242 |
+
|
| 243 |
+
- Discard all charset encoding table that could not fit the binary content.
|
| 244 |
+
- Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.
|
| 245 |
+
- Extract matches with the lowest mess detected.
|
| 246 |
+
- Additionally, we measure coherence / probe for a language.
|
| 247 |
+
|
| 248 |
+
**Wait a minute**, what is noise/mess and coherence according to **YOU ?**
|
| 249 |
+
|
| 250 |
+
*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then
|
| 251 |
+
**I established** some ground rules about **what is obvious** when **it seems like** a mess (aka. defining noise in rendered text).
|
| 252 |
+
I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to
|
| 253 |
+
improve or rewrite it.
|
| 254 |
+
|
| 255 |
+
*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought
|
| 256 |
+
that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.
|
| 257 |
+
|
| 258 |
+
## ⚡ Known limitations
|
| 259 |
+
|
| 260 |
+
- Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))
|
| 261 |
+
- Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.
|
| 262 |
+
|
| 263 |
+
## ⚠️ About Python EOLs
|
| 264 |
+
|
| 265 |
+
**If you are running:**
|
| 266 |
+
|
| 267 |
+
- Python >=2.7,<3.5: Unsupported
|
| 268 |
+
- Python 3.5: charset-normalizer < 2.1
|
| 269 |
+
- Python 3.6: charset-normalizer < 3.1
|
| 270 |
+
- Python 3.7: charset-normalizer < 4.0
|
| 271 |
+
|
| 272 |
+
Upgrade your Python interpreter as soon as possible.
|
| 273 |
+
|
| 274 |
+
## 👤 Contributing
|
| 275 |
+
|
| 276 |
+
Contributions, issues and feature requests are very much welcome.<br />
|
| 277 |
+
Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.
|
| 278 |
+
|
| 279 |
+
## 📝 License
|
| 280 |
+
|
| 281 |
+
Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).<br />
|
| 282 |
+
This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.
|
| 283 |
+
|
| 284 |
+
Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/)
|
| 285 |
+
|
| 286 |
+
## 💼 For Enterprise
|
| 287 |
+
|
| 288 |
+
Professional support for charset-normalizer is available as part of the [Tidelift
|
| 289 |
+
Subscription][1]. Tidelift gives software development teams a single source for
|
| 290 |
+
purchasing and maintaining their software, with professional grade assurances
|
| 291 |
+
from the experts who know it best, while seamlessly integrating with existing
|
| 292 |
+
tools.
|
| 293 |
+
|
| 294 |
+
[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme
|
| 295 |
+
|
| 296 |
+
[](https://www.bestpractices.dev/projects/7297)
|
| 297 |
+
|
| 298 |
+
# Changelog
|
| 299 |
+
All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
| 300 |
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
| 301 |
+
|
| 302 |
+
## [3.4.1](https://github.com/Ousret/charset_normalizer/compare/3.4.0...3.4.1) (2024-12-24)
|
| 303 |
+
|
| 304 |
+
### Changed
|
| 305 |
+
- Project metadata are now stored using `pyproject.toml` instead of `setup.cfg` using setuptools as the build backend.
|
| 306 |
+
- Enforce annotation delayed loading for a simpler and consistent types in the project.
|
| 307 |
+
- Optional mypyc compilation upgraded to version 1.14 for Python >= 3.8
|
| 308 |
+
|
| 309 |
+
### Added
|
| 310 |
+
- pre-commit configuration.
|
| 311 |
+
- noxfile.
|
| 312 |
+
|
| 313 |
+
### Removed
|
| 314 |
+
- `build-requirements.txt` as per using `pyproject.toml` native build configuration.
|
| 315 |
+
- `bin/integration.py` and `bin/serve.py` in favor of downstream integration test (see noxfile).
|
| 316 |
+
- `setup.cfg` in favor of `pyproject.toml` metadata configuration.
|
| 317 |
+
- Unused `utils.range_scan` function.
|
| 318 |
+
|
| 319 |
+
### Fixed
|
| 320 |
+
- Converting content to Unicode bytes may insert `utf_8` instead of preferred `utf-8`. (#572)
|
| 321 |
+
- Deprecation warning "'count' is passed as positional argument" when converting to Unicode bytes on Python 3.13+
|
| 322 |
+
|
| 323 |
+
## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)
|
| 324 |
+
|
| 325 |
+
### Added
|
| 326 |
+
- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.
|
| 327 |
+
- Support for Python 3.13 (#512)
|
| 328 |
+
|
| 329 |
+
### Fixed
|
| 330 |
+
- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.
|
| 331 |
+
- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)
|
| 332 |
+
- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)
|
| 333 |
+
|
| 334 |
+
## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)
|
| 335 |
+
|
| 336 |
+
### Fixed
|
| 337 |
+
- Unintentional memory usage regression when using large payload that match several encoding (#376)
|
| 338 |
+
- Regression on some detection case showcased in the documentation (#371)
|
| 339 |
+
|
| 340 |
+
### Added
|
| 341 |
+
- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)
|
| 342 |
+
|
| 343 |
+
## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)
|
| 344 |
+
|
| 345 |
+
### Changed
|
| 346 |
+
- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8
|
| 347 |
+
- Improved the general detection reliability based on reports from the community
|
| 348 |
+
|
| 349 |
+
## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)
|
| 350 |
+
|
| 351 |
+
### Added
|
| 352 |
+
- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`
|
| 353 |
+
- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)
|
| 354 |
+
|
| 355 |
+
### Removed
|
| 356 |
+
- (internal) Redundant utils.is_ascii function and unused function is_private_use_only
|
| 357 |
+
- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant
|
| 358 |
+
|
| 359 |
+
### Changed
|
| 360 |
+
- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection
|
| 361 |
+
- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8
|
| 362 |
+
|
| 363 |
+
### Fixed
|
| 364 |
+
- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350)
|
| 365 |
+
|
| 366 |
+
## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)
|
| 367 |
+
|
| 368 |
+
### Changed
|
| 369 |
+
- Typehint for function `from_path` no longer enforce `PathLike` as its first argument
|
| 370 |
+
- Minor improvement over the global detection reliability
|
| 371 |
+
|
| 372 |
+
### Added
|
| 373 |
+
- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries
|
| 374 |
+
- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)
|
| 375 |
+
- Explicit support for Python 3.12
|
| 376 |
+
|
| 377 |
+
### Fixed
|
| 378 |
+
- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)
|
| 379 |
+
|
| 380 |
+
## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)
|
| 381 |
+
|
| 382 |
+
### Added
|
| 383 |
+
- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)
|
| 384 |
+
|
| 385 |
+
### Removed
|
| 386 |
+
- Support for Python 3.6 (PR #260)
|
| 387 |
+
|
| 388 |
+
### Changed
|
| 389 |
+
- Optional speedup provided by mypy/c 1.0.1
|
| 390 |
+
|
| 391 |
+
## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)
|
| 392 |
+
|
| 393 |
+
### Fixed
|
| 394 |
+
- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)
|
| 395 |
+
|
| 396 |
+
### Changed
|
| 397 |
+
- Speedup provided by mypy/c 0.990 on Python >= 3.7
|
| 398 |
+
|
| 399 |
+
## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)
|
| 400 |
+
|
| 401 |
+
### Added
|
| 402 |
+
- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
|
| 403 |
+
- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
|
| 404 |
+
- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
|
| 405 |
+
- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
|
| 406 |
+
|
| 407 |
+
### Changed
|
| 408 |
+
- Build with static metadata using 'build' frontend
|
| 409 |
+
- Make the language detection stricter
|
| 410 |
+
- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
|
| 411 |
+
|
| 412 |
+
### Fixed
|
| 413 |
+
- CLI with opt --normalize fail when using full path for files
|
| 414 |
+
- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
|
| 415 |
+
- Sphinx warnings when generating the documentation
|
| 416 |
+
|
| 417 |
+
### Removed
|
| 418 |
+
- Coherence detector no longer return 'Simple English' instead return 'English'
|
| 419 |
+
- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
|
| 420 |
+
- Breaking: Method `first()` and `best()` from CharsetMatch
|
| 421 |
+
- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
|
| 422 |
+
- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
|
| 423 |
+
- Breaking: Top-level function `normalize`
|
| 424 |
+
- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
|
| 425 |
+
- Support for the backport `unicodedata2`
|
| 426 |
+
|
| 427 |
+
## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)
|
| 428 |
+
|
| 429 |
+
### Added
|
| 430 |
+
- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
|
| 431 |
+
- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
|
| 432 |
+
- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
|
| 433 |
+
|
| 434 |
+
### Changed
|
| 435 |
+
- Build with static metadata using 'build' frontend
|
| 436 |
+
- Make the language detection stricter
|
| 437 |
+
|
| 438 |
+
### Fixed
|
| 439 |
+
- CLI with opt --normalize fail when using full path for files
|
| 440 |
+
- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
|
| 441 |
+
|
| 442 |
+
### Removed
|
| 443 |
+
- Coherence detector no longer return 'Simple English' instead return 'English'
|
| 444 |
+
- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
|
| 445 |
+
|
| 446 |
+
## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)
|
| 447 |
+
|
| 448 |
+
### Added
|
| 449 |
+
- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
|
| 450 |
+
|
| 451 |
+
### Removed
|
| 452 |
+
- Breaking: Method `first()` and `best()` from CharsetMatch
|
| 453 |
+
- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
|
| 454 |
+
|
| 455 |
+
### Fixed
|
| 456 |
+
- Sphinx warnings when generating the documentation
|
| 457 |
+
|
| 458 |
+
## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)
|
| 459 |
+
|
| 460 |
+
### Changed
|
| 461 |
+
- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
|
| 462 |
+
|
| 463 |
+
### Removed
|
| 464 |
+
- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
|
| 465 |
+
- Breaking: Top-level function `normalize`
|
| 466 |
+
- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
|
| 467 |
+
- Support for the backport `unicodedata2`
|
| 468 |
+
|
| 469 |
+
## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)
|
| 470 |
+
|
| 471 |
+
### Deprecated
|
| 472 |
+
- Function `normalize` scheduled for removal in 3.0
|
| 473 |
+
|
| 474 |
+
### Changed
|
| 475 |
+
- Removed useless call to decode in fn is_unprintable (#206)
|
| 476 |
+
|
| 477 |
+
### Fixed
|
| 478 |
+
- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)
|
| 479 |
+
|
| 480 |
+
## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)
|
| 481 |
+
|
| 482 |
+
### Added
|
| 483 |
+
- Output the Unicode table version when running the CLI with `--version` (PR #194)
|
| 484 |
+
|
| 485 |
+
### Changed
|
| 486 |
+
- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)
|
| 487 |
+
- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)
|
| 488 |
+
|
| 489 |
+
### Fixed
|
| 490 |
+
- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)
|
| 491 |
+
- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)
|
| 492 |
+
|
| 493 |
+
### Removed
|
| 494 |
+
- Support for Python 3.5 (PR #192)
|
| 495 |
+
|
| 496 |
+
### Deprecated
|
| 497 |
+
- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)
|
| 498 |
+
|
| 499 |
+
## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)
|
| 500 |
+
|
| 501 |
+
### Fixed
|
| 502 |
+
- ASCII miss-detection on rare cases (PR #170)
|
| 503 |
+
|
| 504 |
+
## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)
|
| 505 |
+
|
| 506 |
+
### Added
|
| 507 |
+
- Explicit support for Python 3.11 (PR #164)
|
| 508 |
+
|
| 509 |
+
### Changed
|
| 510 |
+
- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)
|
| 511 |
+
|
| 512 |
+
## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)
|
| 513 |
+
|
| 514 |
+
### Fixed
|
| 515 |
+
- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)
|
| 516 |
+
|
| 517 |
+
### Changed
|
| 518 |
+
- Skipping the language-detection (CD) on ASCII (PR #155)
|
| 519 |
+
|
| 520 |
+
## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)
|
| 521 |
+
|
| 522 |
+
### Changed
|
| 523 |
+
- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)
|
| 524 |
+
|
| 525 |
+
### Fixed
|
| 526 |
+
- Wrong logging level applied when setting kwarg `explain` to True (PR #146)
|
| 527 |
+
|
| 528 |
+
## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)
|
| 529 |
+
### Changed
|
| 530 |
+
- Improvement over Vietnamese detection (PR #126)
|
| 531 |
+
- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)
|
| 532 |
+
- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)
|
| 533 |
+
- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)
|
| 534 |
+
- Code style as refactored by Sourcery-AI (PR #131)
|
| 535 |
+
- Minor adjustment on the MD around european words (PR #133)
|
| 536 |
+
- Remove and replace SRTs from assets / tests (PR #139)
|
| 537 |
+
- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)
|
| 538 |
+
- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)
|
| 539 |
+
|
| 540 |
+
### Fixed
|
| 541 |
+
- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)
|
| 542 |
+
- Avoid using too insignificant chunk (PR #137)
|
| 543 |
+
|
| 544 |
+
### Added
|
| 545 |
+
- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)
|
| 546 |
+
- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)
|
| 547 |
+
|
| 548 |
+
## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)
|
| 549 |
+
### Added
|
| 550 |
+
- Add support for Kazakh (Cyrillic) language detection (PR #109)
|
| 551 |
+
|
| 552 |
+
### Changed
|
| 553 |
+
- Further, improve inferring the language from a given single-byte code page (PR #112)
|
| 554 |
+
- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)
|
| 555 |
+
- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)
|
| 556 |
+
- Various detection improvement (MD+CD) (PR #117)
|
| 557 |
+
|
| 558 |
+
### Removed
|
| 559 |
+
- Remove redundant logging entry about detected language(s) (PR #115)
|
| 560 |
+
|
| 561 |
+
### Fixed
|
| 562 |
+
- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)
|
| 563 |
+
|
| 564 |
+
## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)
|
| 565 |
+
### Fixed
|
| 566 |
+
- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)
|
| 567 |
+
- Fix CLI crash when using --minimal output in certain cases (PR #103)
|
| 568 |
+
|
| 569 |
+
### Changed
|
| 570 |
+
- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)
|
| 571 |
+
|
| 572 |
+
## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)
|
| 573 |
+
### Changed
|
| 574 |
+
- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)
|
| 575 |
+
- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)
|
| 576 |
+
- The Unicode detection is slightly improved (PR #93)
|
| 577 |
+
- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91)
|
| 578 |
+
|
| 579 |
+
### Removed
|
| 580 |
+
- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)
|
| 581 |
+
|
| 582 |
+
### Fixed
|
| 583 |
+
- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)
|
| 584 |
+
- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)
|
| 585 |
+
- The MANIFEST.in was not exhaustive (PR #78)
|
| 586 |
+
|
| 587 |
+
## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)
|
| 588 |
+
### Fixed
|
| 589 |
+
- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)
|
| 590 |
+
- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)
|
| 591 |
+
- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)
|
| 592 |
+
- Submatch factoring could be wrong in rare edge cases (PR #72)
|
| 593 |
+
- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)
|
| 594 |
+
- Fix line endings from CRLF to LF for certain project files (PR #67)
|
| 595 |
+
|
| 596 |
+
### Changed
|
| 597 |
+
- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)
|
| 598 |
+
- Allow fallback on specified encoding if any (PR #71)
|
| 599 |
+
|
| 600 |
+
## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)
|
| 601 |
+
### Changed
|
| 602 |
+
- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)
|
| 603 |
+
- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)
|
| 604 |
+
|
| 605 |
+
## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)
|
| 606 |
+
### Fixed
|
| 607 |
+
- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)
|
| 608 |
+
|
| 609 |
+
### Changed
|
| 610 |
+
- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)
|
| 611 |
+
|
| 612 |
+
## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)
|
| 613 |
+
### Fixed
|
| 614 |
+
- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)
|
| 615 |
+
- Using explain=False permanently disable the verbose output in the current runtime (PR #47)
|
| 616 |
+
- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)
|
| 617 |
+
- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)
|
| 618 |
+
|
| 619 |
+
### Changed
|
| 620 |
+
- Public function normalize default args values were not aligned with from_bytes (PR #53)
|
| 621 |
+
|
| 622 |
+
### Added
|
| 623 |
+
- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)
|
| 624 |
+
|
| 625 |
+
## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)
|
| 626 |
+
### Changed
|
| 627 |
+
- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.
|
| 628 |
+
- Accent has been made on UTF-8 detection, should perform rather instantaneous.
|
| 629 |
+
- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.
|
| 630 |
+
- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)
|
| 631 |
+
- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+
|
| 632 |
+
- utf_7 detection has been reinstated.
|
| 633 |
+
|
| 634 |
+
### Removed
|
| 635 |
+
- This package no longer require anything when used with Python 3.5 (Dropped cached_property)
|
| 636 |
+
- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.
|
| 637 |
+
- The exception hook on UnicodeDecodeError has been removed.
|
| 638 |
+
|
| 639 |
+
### Deprecated
|
| 640 |
+
- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0
|
| 641 |
+
|
| 642 |
+
### Fixed
|
| 643 |
+
- The CLI output used the relative path of the file(s). Should be absolute.
|
| 644 |
+
|
| 645 |
+
## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)
|
| 646 |
+
### Fixed
|
| 647 |
+
- Logger configuration/usage no longer conflict with others (PR #44)
|
| 648 |
+
|
| 649 |
+
## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)
|
| 650 |
+
### Removed
|
| 651 |
+
- Using standard logging instead of using the package loguru.
|
| 652 |
+
- Dropping nose test framework in favor of the maintained pytest.
|
| 653 |
+
- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.
|
| 654 |
+
- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.
|
| 655 |
+
- Stop support for UTF-7 that does not contain a SIG.
|
| 656 |
+
- Dropping PrettyTable, replaced with pure JSON output in CLI.
|
| 657 |
+
|
| 658 |
+
### Fixed
|
| 659 |
+
- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.
|
| 660 |
+
- Not searching properly for the BOM when trying utf32/16 parent codec.
|
| 661 |
+
|
| 662 |
+
### Changed
|
| 663 |
+
- Improving the package final size by compressing frequencies.json.
|
| 664 |
+
- Huge improvement over the larges payload.
|
| 665 |
+
|
| 666 |
+
### Added
|
| 667 |
+
- CLI now produces JSON consumable output.
|
| 668 |
+
- Return ASCII if given sequences fit. Given reasonable confidence.
|
| 669 |
+
|
| 670 |
+
## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)
|
| 671 |
+
|
| 672 |
+
### Fixed
|
| 673 |
+
- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)
|
| 674 |
+
|
| 675 |
+
## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)
|
| 676 |
+
|
| 677 |
+
### Fixed
|
| 678 |
+
- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)
|
| 679 |
+
|
| 680 |
+
## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)
|
| 681 |
+
|
| 682 |
+
### Fixed
|
| 683 |
+
- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)
|
| 684 |
+
|
| 685 |
+
## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)
|
| 686 |
+
|
| 687 |
+
### Changed
|
| 688 |
+
- Amend the previous release to allow prettytable 2.0 (PR #35)
|
| 689 |
+
|
| 690 |
+
## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)
|
| 691 |
+
|
| 692 |
+
### Fixed
|
| 693 |
+
- Fix error while using the package with a python pre-release interpreter (PR #33)
|
| 694 |
+
|
| 695 |
+
### Changed
|
| 696 |
+
- Dependencies refactoring, constraints revised.
|
| 697 |
+
|
| 698 |
+
### Added
|
| 699 |
+
- Add python 3.9 and 3.10 to the supported interpreters
|
| 700 |
+
|
| 701 |
+
MIT License
|
| 702 |
+
|
| 703 |
+
Copyright (c) 2025 TAHRI Ahmed R.
|
| 704 |
+
|
| 705 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 706 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 707 |
+
in the Software without restriction, including without limitation the rights
|
| 708 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 709 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 710 |
+
furnished to do so, subject to the following conditions:
|
| 711 |
+
|
| 712 |
+
The above copyright notice and this permission notice shall be included in all
|
| 713 |
+
copies or substantial portions of the Software.
|
| 714 |
+
|
| 715 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 716 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 717 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 718 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 719 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 720 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 721 |
+
SOFTWARE.
|
venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../Scripts/normalizer.exe,sha256=fhV7Irszb6Qvu8GfUCQzuFxbCuLEdt7Vp-3XGbbM7ys,108443
|
| 2 |
+
charset_normalizer-3.4.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
charset_normalizer-3.4.1.dist-info/LICENSE,sha256=GFd0hdNwTxpHne2OVzwJds_tMV_S_ReYP6mI2kwvcNE,1092
|
| 4 |
+
charset_normalizer-3.4.1.dist-info/METADATA,sha256=0_fAC3DknimRZusm6kkP4ylPD0JVzBq5mKHWLNBJM6w,36034
|
| 5 |
+
charset_normalizer-3.4.1.dist-info/RECORD,,
|
| 6 |
+
charset_normalizer-3.4.1.dist-info/WHEEL,sha256=nkBcd8Ko0v5sEcSagm2-x_RVrb8gBSkTa8VFFZ0Mr1o,101
|
| 7 |
+
charset_normalizer-3.4.1.dist-info/entry_points.txt,sha256=8C-Y3iXIfyXQ83Tpir2B8t-XLJYpxF5xbb38d_js-h4,65
|
| 8 |
+
charset_normalizer-3.4.1.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
|
| 9 |
+
charset_normalizer/__init__.py,sha256=0NT8MHi7SKq3juMqYfOdrkzjisK0L73lneNHH4qaUAs,1638
|
| 10 |
+
charset_normalizer/__main__.py,sha256=2sj_BS6H0sU25C1bMqz9DVwa6kOK9lchSEbSU-_iu7M,115
|
| 11 |
+
charset_normalizer/__pycache__/__init__.cpython-311.pyc,,
|
| 12 |
+
charset_normalizer/__pycache__/__main__.cpython-311.pyc,,
|
| 13 |
+
charset_normalizer/__pycache__/api.cpython-311.pyc,,
|
| 14 |
+
charset_normalizer/__pycache__/cd.cpython-311.pyc,,
|
| 15 |
+
charset_normalizer/__pycache__/constant.cpython-311.pyc,,
|
| 16 |
+
charset_normalizer/__pycache__/legacy.cpython-311.pyc,,
|
| 17 |
+
charset_normalizer/__pycache__/md.cpython-311.pyc,,
|
| 18 |
+
charset_normalizer/__pycache__/models.cpython-311.pyc,,
|
| 19 |
+
charset_normalizer/__pycache__/utils.cpython-311.pyc,,
|
| 20 |
+
charset_normalizer/__pycache__/version.cpython-311.pyc,,
|
| 21 |
+
charset_normalizer/api.py,sha256=2a0p2Gnhbdo9O6C04CNxTSN23fIbgOF20nxb0pWPNFM,23285
|
| 22 |
+
charset_normalizer/cd.py,sha256=uq8nVxRpR6Guc16ACvOWtL8KO3w7vYaCh8hHisuOyTg,12917
|
| 23 |
+
charset_normalizer/cli/__init__.py,sha256=d9MUx-1V_qD3x9igIy4JT4oC5CU0yjulk7QyZWeRFhg,144
|
| 24 |
+
charset_normalizer/cli/__main__.py,sha256=lZ89qRWun7FRxX0qm1GhK-m0DH0i048yiMAX1mVIuRg,10731
|
| 25 |
+
charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc,,
|
| 26 |
+
charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc,,
|
| 27 |
+
charset_normalizer/constant.py,sha256=7OKYi28cJjZxIcX3lQCwfK9ijoOgaVEbERww7SqqNSY,42475
|
| 28 |
+
charset_normalizer/legacy.py,sha256=v8An1aAQHUu036UWOhyIaDGkirZ0t4hfNVlyje5KInU,2394
|
| 29 |
+
charset_normalizer/md.cp311-win_amd64.pyd,sha256=4h9MQMKb4LEVRj57uKNllGpK_BUrn_9gKr1BxuDOaKI,10752
|
| 30 |
+
charset_normalizer/md.py,sha256=e452fhwIAguEUr3FJzG7QZvFgXI-dVLOh_M1ZUiFI6U,20666
|
| 31 |
+
charset_normalizer/md__mypyc.cp311-win_amd64.pyd,sha256=81AevOJCBfPcVBks2RfquaiZ_pNlcGUCU9TBRmOD7_E,121344
|
| 32 |
+
charset_normalizer/models.py,sha256=ZR2PE-fqf6dASZfqdE5Uhkmr0o1MciSdXOjuNqwkmvg,12754
|
| 33 |
+
charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 34 |
+
charset_normalizer/utils.py,sha256=oH9Q3WcAMwmsSB7uM8uDozz9DXnkYecbkTNbdnMbgzI,12410
|
| 35 |
+
charset_normalizer/version.py,sha256=7_thI7FzRQxEsbtUYwrJs3FCFWF666mw74H8mggPRR0,123
|
venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (75.6.0)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp311-cp311-win_amd64
|
| 5 |
+
|
venv/Lib/site-packages/charset_normalizer-3.4.1.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
normalizer = charset_normalizer:cli.cli_detect
|