Datasets:
File size: 5,841 Bytes
285c5ff dfb6440 285c5ff 501a428 285c5ff |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 |
#!/usr/bin/env -S uv run --script
#
# /// script
# requires-python = ">=3.12"
# dependencies = [
# ]
# ///
"""
Download UD metadata for the different releases from the lindat clarin-dspace
repository into `./etc/`:
- BibTeX "Howto Cite"
(use the item's own data from the webpage)
- dc.description
(use the clarin-dspace API to retrieve the item's metadata)
We assume a clarin-dspace infrastructure with:
- ".../repository/server/api/core/refbox/citations?type=bibtex&handle="
returning a bibtex entry for the given hdl
- ".../repository/server/api/core/items/" as the API endpoint (core/items)
"""
import json
import re
import argparse
import logging
from pathlib import Path
from urllib.error import URLError
from urllib.request import urlopen
from urllib.parse import urlparse
# URL prefix
citation_url_prefix = "https://lindat.mff.cuni.cz/repository/server/api/core/refbox/citations?type=bibtex&handle="
description_url_prefix = "https://lindat.mff.cuni.cz/repository/server/api/core/items/"
handle_url_prefix = "http://hdl.handle.net/"
handle_redirect_url_prefix = "https://lindat.mff.cuni.cz/repository/items/"
# OUTPUTFILE(s) directories
outfile_pathprefix = "./etc/"
citation_outfile_name = "citation-{rev}"
description_outfile_name = "description-{rev}"
# List of URL postfixes
url_postfixes = {
# https://github.com/UniversalDependencies/docs/blob/pages-source/download.md
# "2.18": "",
"2.17": "11234/1-6036", # 339 treebanks, 186 languages, released November 15, 2025.
"2.16": "11234/1-5901", # 319 treebanks, 179 languages, released May 15, 2025.
"2.15": "11234/1-5787", # 296 treebanks, 168 languages, released November 15, 2024.
"2.14": "11234/1-5502", # 283 treebanks, 161 languages, released May 15, 2024.
"2.13": "11234/1-5287", # 259 treebanks, 148 languages, released November 15, 2023.
"2.12": "11234/1-5150", # 245 treebanks, 141 languages, released May 15, 2023.
"2.11": "11234/1-4923", # 243 treebanks, 138 languages, released November 15, 2022.
"2.10": "11234/1-4758", # 228 treebanks, 130 languages, released May 15, 2022.
"2.9": "11234/1-4611", # 217 treebanks, 122 languages, released November 15, 2021.
"2.8": "11234/1-3687", # 202 treebanks, 114 languages, released May 15, 2021.
"2.7": "11234/1-3424", # 183 treebanks, 104 languages, released November 15, 2020.
}
# Parse command line arguments
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-o', '--override', action='store_true',
help='override output file if it already exists')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='increase verbosity level')
args = parser.parse_args()
# Set up logging
logging.basicConfig(
level = max(logging.DEBUG, logging.INFO - args.verbose * 10),
format='%(asctime)s [%(levelname)s] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
# Iterate over the URL postfixes
for rev,handle in url_postfixes.items():
try:
# Construct the full URL
citation_url = citation_url_prefix + handle
logging.debug(f"Using Citation URL: {citation_url}")
# Send a GET request to the URL
with urlopen(citation_url) as response:
# Read the JSON content
data = json.loads(response.read().decode())
# Extract the metadata field
metadata = data.get("metadata")
# Write the metadata to the output file
if metadata:
# Replace uneven space(s) at the beginning of lines with fixed
# number of spaces
metadata = "\n".join([re.sub('^( )+', ' ', line) for line in
metadata.split("\n")])
# Open the output file in write mode
output_fn = f"{outfile_pathprefix}"+citation_outfile_name.format(rev=rev)
if args.override or not Path(output_fn).exists():
with open(output_fn, "w") as fh:
fh.write(metadata + "\n")
logging.info(f"Successfully downloaded citation from {citation_url} and written to {output_fn}.")
else:
logging.info(f"Output {output_fn} already exists: Not overriding.")
# Construct the full URL
handle_url = handle_url_prefix + handle
logging.debug(f"Using handle URL: {handle_url}")
# Send a GET request to the URL
with urlopen(handle_url) as response:
if response.url.startswith(handle_redirect_url_prefix):
itemid = (urlparse(response.url)).path.rsplit("/", 1)[-1]
description_url = description_url_prefix + itemid
with urlopen(description_url) as response:
data = json.loads(response.read().decode())
description = data["metadata"]["dc.description"][0]["value"]
if description:
# Open the output file in write mode
output_fn = f"{outfile_pathprefix}"+description_outfile_name.format(rev=rev)
if args.override or not Path(output_fn).exists():
with open(output_fn, "w") as fh:
fh.write(description+ "\n")
logging.info(f"Successfully downloaded description from {description_url} and written to {output_fn}.")
else:
logging.info(f"Output {output_fn} already exists: Not overriding.")
except URLError as e:
logging.error(f"Error downloading: {e}")
except json.JSONDecodeError as e:
logging.error(f"Error decoding JSON: {e}")
|