iiegn commited on
Commit
285c5ff
·
verified ·
1 Parent(s): c85d2c3

Initial commit

Browse files
tools/00_fetch_ud_clarin-dspace_metadata.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env -S uv run --script
2
+ #
3
+ # /// script
4
+ # requires-python = ">=3.12"
5
+ # dependencies = [
6
+ # ]
7
+ # ///
8
+ """
9
+ Download UD metadata for the different releases from the lindat clarin-dspace
10
+ repository into `./templates/`:
11
+ - BibTeX "Howto Cite"
12
+ (use the item's own data from the webpage)
13
+ - dc.description
14
+ (use the clarin-dspace API to retrieve the item's metadata)
15
+
16
+ We assume a clarin-dspace infrastructure with:
17
+ - ".../repository/server/api/core/refbox/citations?type=bibtex&handle="
18
+ returning a bibtex entry for the given hdl
19
+ - ".../repository/server/api/core/items/" as the API endpoint (core/items)
20
+ """
21
+
22
+ import json
23
+ import re
24
+ import argparse
25
+ import logging
26
+ from pathlib import Path
27
+ from urllib.error import URLError
28
+ from urllib.request import urlopen
29
+ from urllib.parse import urlparse
30
+
31
+ # URL prefix
32
+ citation_url_prefix = "https://lindat.mff.cuni.cz/repository/server/api/core/refbox/citations?type=bibtex&handle="
33
+ description_url_prefix = "https://lindat.mff.cuni.cz/repository/server/api/core/items/"
34
+ handle_url_prefix = "http://hdl.handle.net/"
35
+ handle_redirect_url_prefix = "https://lindat.mff.cuni.cz/repository/items/"
36
+
37
+ # OUTPUTFILE(s) directories
38
+ outfile_pathprefix = "./etc/"
39
+ citation_outfile_name = "citation-{rev}"
40
+ description_outfile_name = "description-{rev}"
41
+
42
+ # List of URL postfixes
43
+ url_postfixes = {
44
+ # https://github.com/UniversalDependencies/docs/blob/bce7995aaa339c995c4e74a5568888fffec3d5a7/download.md?plain=1#L9
45
+ #
46
+ # "2.17": "", #
47
+ "2.16": "11234/1-5901", # 319 treebanks, 179 languages, released May 15, 2025.
48
+ "2.15": "11234/1-5787", # 296 treebanks, 168 languages, released November 15, 2024.
49
+ "2.14": "11234/1-5502", # 283 treebanks, 161 languages, released May 15, 2024.
50
+ "2.13": "11234/1-5287", # 259 treebanks, 148 languages, released November 15, 2023.
51
+ "2.12": "11234/1-5150", # 245 treebanks, 141 languages, released May 15, 2023.
52
+ "2.11": "11234/1-4923", # 243 treebanks, 138 languages, released November 15, 2022.
53
+ "2.10": "11234/1-4758", # 228 treebanks, 130 languages, released May 15, 2022.
54
+ "2.9": "11234/1-4611", # 217 treebanks, 122 languages, released November 15, 2021.
55
+ "2.8": "11234/1-3687", # 202 treebanks, 114 languages, released May 15, 2021.
56
+ "2.7": "11234/1-3424", # 183 treebanks, 104 languages, released November 15, 2020.
57
+ }
58
+
59
+ # Parse command line arguments
60
+ parser = argparse.ArgumentParser(description=__doc__,
61
+ formatter_class=argparse.RawDescriptionHelpFormatter)
62
+ parser.add_argument('-o', '--override', action='store_true',
63
+ help='override output file if it already exists')
64
+ parser.add_argument('-v', '--verbose', action='count', default=0,
65
+ help='increase verbosity level')
66
+ args = parser.parse_args()
67
+
68
+ # Set up logging
69
+ logging.basicConfig(
70
+ level = max(logging.DEBUG, logging.INFO - args.verbose * 10),
71
+ format='%(asctime)s [%(levelname)s] %(message)s',
72
+ datefmt='%Y-%m-%d %H:%M:%S'
73
+ )
74
+
75
+ # Iterate over the URL postfixes
76
+ for rev,handle in url_postfixes.items():
77
+
78
+ try:
79
+ # Construct the full URL
80
+ citation_url = citation_url_prefix + handle
81
+ logging.debug(f"Using Citation URL: {citation_url}")
82
+
83
+ # Send a GET request to the URL
84
+ with urlopen(citation_url) as response:
85
+
86
+ # Read the JSON content
87
+ data = json.loads(response.read().decode())
88
+
89
+ # Extract the metadata field
90
+ metadata = data.get("metadata")
91
+
92
+ # Write the metadata to the output file
93
+ if metadata:
94
+ # Replace uneven space(s) at the beginning of lines with fixed
95
+ # number of spaces
96
+ metadata = "\n".join([re.sub('^( )+', ' ', line) for line in
97
+ metadata.split("\n")])
98
+
99
+ # Open the output file in write mode
100
+ output_fn = f"{outfile_pathprefix}"+citation_outfile_name.format(rev=rev)
101
+ if args.override or not Path(output_fn).exists():
102
+ with open(output_fn, "w") as fh:
103
+ fh.write(metadata + "\n")
104
+ logging.info(f"Successfully downloaded citation from {citation_url} and written to {output_fn}.")
105
+ else:
106
+ logging.info(f"Output {output_fn} already exists: Not overriding.")
107
+
108
+
109
+ # Construct the full URL
110
+ handle_url = handle_url_prefix + handle
111
+ logging.debug(f"Using handle URL: {handle_url}")
112
+ # Send a GET request to the URL
113
+ with urlopen(handle_url) as response:
114
+ if response.url.startswith(handle_redirect_url_prefix):
115
+ itemid = (urlparse(response.url)).path.rsplit("/", 1)[-1]
116
+
117
+ description_url = description_url_prefix + itemid
118
+ with urlopen(description_url) as response:
119
+ data = json.loads(response.read().decode())
120
+ description = data["metadata"]["dc.description"][0]["value"]
121
+ if description:
122
+
123
+ # Open the output file in write mode
124
+ output_fn = f"{outfile_pathprefix}"+description_outfile_name.format(rev=rev)
125
+ if args.override or not Path(output_fn).exists():
126
+ with open(output_fn, "w") as fh:
127
+ fh.write(description+ "\n")
128
+ logging.info(f"Successfully downloaded description from {description_url} and written to {output_fn}.")
129
+ else:
130
+ logging.info(f"Output {output_fn} already exists: Not overriding.")
131
+
132
+ except URLError as e:
133
+ logging.error(f"Error downloading: {e}")
134
+ except json.JSONDecodeError as e:
135
+ logging.error(f"Error decoding JSON: {e}")
tools/00_fetch_ud_codes_and_flags.sh ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+ #
3
+
4
+ # This is a manual mapping of versions of codes_and_flags.yaml from:
5
+ # * https://github.com/UniversalDependencies/docs-automation/commits/master/codes_and_flags.yaml
6
+ # (identified by commit hash) matched to the closest date of UD versions from:
7
+ # * https://github.com/UniversalDependencies/docs
8
+ declare -A VER_MAPPING
9
+ VER_MAPPING["2.7"]="a5124b817c04ee6c8b01b80b2adad518b06aa9b4"
10
+ VER_MAPPING["2.8"]="26d595cd549e904a4d49b62fccb33602c9d18eb8"
11
+ VER_MAPPING["2.9"]="6167b04d6dc5d953f4007dcd75b5fbbf8c7645d6"
12
+ VER_MAPPING["2.10"]="953b1c6e70836b7a64b764e24f578ca787cfd89c"
13
+ VER_MAPPING["2.11"]="25ee28cd173c2fee5e3a7ad493a4c6e137e13f6b"
14
+ VER_MAPPING["2.12"]="32a51d28787795eea1c2e11b3572087a3dc228ac"
15
+ VER_MAPPING["2.13"]="6712e116f6fda486223e183f56ffb8921695d4a6"
16
+ VER_MAPPING["2.14"]="dabe6c89701b6127b867e27c0256eb4f9dfa9d46"
17
+ VER_MAPPING["2.15"]="b3b3d95333d603db6aede3601a3cf9929b3470ae"
18
+ VER_MAPPING["2.16"]="bc0cc9b94112253e3ea1e00b2dbe3a46c5f53fe3"
19
+ VER_MAPPING["latest"]="a47b209d5d42c31351cc393a86512fd913ac2bba"
20
+
21
+ ###################
22
+ #############
23
+ #
24
+ # USAGE
25
+ #
26
+ usage() { OUTPUT=${1:-"verbose"}
27
+ echo "Usage: $0 [-o]" >&2
28
+ [ $OUTPUT = "short" ] && exit 0
29
+ >&2 cat << EOF
30
+
31
+ Download versions of
32
+ * codes_and_flags.yaml
33
+ a yaml file mapping UD (long) language names to metadata.
34
+
35
+ For example:
36
+ German:
37
+ flag: DE
38
+ lcode: de
39
+ iso3: deu
40
+ family: IE
41
+ genus: Germanic
42
+
43
+ Options:
44
+ -h Print this help message
45
+ -o|--online Enable online mode: (re-fetch files from GitHub)
46
+ EOF
47
+ }
48
+ #
49
+ ######
50
+ ############
51
+ ##################
52
+
53
+ ONLINE_MODE=false
54
+
55
+ VALID_ARGS=$(getopt -o ho --long help,online -- "$@")
56
+ if [[ $? -ne 0 ]]; then
57
+ usage "short"
58
+ exit 1;
59
+ fi
60
+ eval set -- "$VALID_ARGS"
61
+ while [ : ]
62
+ do
63
+ case "$1" in
64
+ -o | --online) ONLINE_MODE=true; shift ;;
65
+ -h | --help) usage; shift ; exit 0;;
66
+ --) shift ; break ;;
67
+ *) >&2 echo Unsupported option: $1; usage; exit 1;;
68
+ esac
69
+ done
70
+
71
+ set -uo pipefail
72
+ for key in ${!VER_MAPPING[@]}
73
+ do
74
+ VER="${key}"
75
+ GIT_HASH="${VER_MAPPING[${key}]}"
76
+
77
+ CODES_AND_FLAGS_FN="etc/codes_and_flags-${VER}.yaml"
78
+ CODES_AND_FLAGS_URL="https://github.com/UniversalDependencies/docs-automation/blob/${GIT_HASH}/codes_and_flags.yaml"
79
+ CODES_AND_FLAGS_RAW_URL="https://raw.githubusercontent.com/UniversalDependencies/docs-automation/${GIT_HASH}/codes_and_flags.yaml"
80
+
81
+ # Fuction to get github API content
82
+ get_codes_and_flags() {
83
+ wget "${CODES_AND_FLAGS_RAW_URL}" -O "${CODES_AND_FLAGS_FN}"
84
+ }
85
+
86
+ # Check if the online argument is present
87
+ if [[ $ONLINE_MODE == true ]] || [[ ! -e ${CODES_AND_FLAGS_FN} ]]; then
88
+ # Download content
89
+ get_codes_and_flags
90
+ fi
91
+ done
tools/01_fetch_ud_repos.sh ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+ #
3
+ # This script is based on:
4
+ # https://github.com/UniversalDependencies/docs-automation
5
+ #
6
+ # ./fetch_ud_repos.sh -h
7
+ #
8
+
9
+ # Set default vars and names directories and output files
10
+ [ -e .env ] && . .env
11
+ ONLINE_MODE=false
12
+ UD_VER=${UD_VER:-"2.15"}
13
+ UD_REV="r${UD_VER}"
14
+ UDS_SUBDIR="UD_repos"; mkdir -p ${UDS_SUBDIR}
15
+ GIT_API_OUTPUT=".UniversalDependencies.repos"
16
+ GIT_SUBMODULES_ADD=".UD_submodules_add.commands"
17
+
18
+ # Additional arguments for the `git submodule add` command(s)
19
+
20
+ ###################
21
+ #############
22
+ #
23
+ # USAGE
24
+ #
25
+ usage() { OUTPUT=${1:-"verbose"}
26
+ echo "Usage: $0 [-o] [-r REV]" >&2
27
+ [ $OUTPUT = "short" ] && exit 0
28
+ >&2 cat << EOF
29
+
30
+ Identify all relevant UD repositories from GitHub:
31
+ * https://github.com/UniversalDependencies/
32
+ and generate a list of submodules to add to a (newly created) repository in the
33
+ subdirectory ${UDS_SUBDIR}/.
34
+
35
+ "${GIT_API_OUTPUT}" contains the (saved) list of GitHub repositories; the
36
+ file can be updated by running this script with the '-o|--online' option.
37
+ "${GIT_SUBMODULES_ADD}" contains the list of submodules to add to the main
38
+ repository.
39
+
40
+ Options:
41
+ -h Print this help message
42
+ -o|--online Enable online mode: (re-fetch UD repos from GitHub instead
43
+ of using ${GIT_API_OUTPUT})
44
+ -r|--rev VER The UD GitHub tagged version to checkout (default: ${UD_VER})
45
+ EOF
46
+ }
47
+ #
48
+ ######
49
+ ############
50
+ ##################
51
+
52
+ VALID_ARGS=$(getopt -o hor: --long help,online,rev: -- "$@")
53
+ if [[ $? -ne 0 ]]; then
54
+ usage "short"
55
+ exit 1;
56
+ fi
57
+
58
+ eval set -- "$VALID_ARGS"
59
+ while [ : ]
60
+ do
61
+ case "$1" in
62
+ -o | --online) ONLINE_MODE=true; shift ;;
63
+ -r | --rev) UD_VER="$2"; shift 2 ;;
64
+ -h | --help) usage; shift ; exit 0;;
65
+ --) shift ; break ;;
66
+ *) >&2 echo Unsupported option: $1; usage; exit 1;;
67
+ esac
68
+ done
69
+
70
+ set -uo pipefail
71
+
72
+ # Function to get github API content
73
+ get_github_api_content() {
74
+
75
+ # We are interested in these keys:
76
+ #
77
+ # "git_url": "git://github.com/UniversalDependencies/docs.git",
78
+ # "ssh_url": "git@github.com:UniversalDependencies/docs.git",
79
+ # "clone_url": "https://github.com/UniversalDependencies/docs.git",
80
+ # "svn_url": "https://github.com/UniversalDependencies/docs",
81
+ # "homepage": "http://universaldependencies.org/",
82
+ #
83
+ # -> "clone_url"
84
+
85
+ #This will last us to 300 repositories
86
+ (for pg in 1 2 3; do wget "https://api.github.com/orgs/UniversalDependencies/repos?page=$pg&per_page=100" -O - ; done ) \
87
+ | grep clone_url \
88
+ | grep -Po 'https://.*?(?=")' \
89
+ > ${UDS_SUBDIR}/${GIT_API_OUTPUT}
90
+ }
91
+
92
+ # Check if the online argument is present
93
+ if [[ $ONLINE_MODE == true ]] || [[ ! -e ${UDS_SUBDIR}/${GIT_API_OUTPUT} ]]; then
94
+ # Download github API content
95
+ get_github_api_content
96
+ fi
97
+
98
+
99
+ MSG=""
100
+ echo "" > ${UDS_SUBDIR}/${GIT_SUBMODULES_ADD}
101
+ for r in $(cat ${UDS_SUBDIR}/${GIT_API_OUTPUT} | sort)
102
+ do
103
+ l=$(echo $r | perl -pe 's/.*\///' | cut -f 1 -d.)
104
+
105
+ # Process all potential UD_ repos
106
+ if [[ $l == UD_* ]] && [[ $l != UD_v2 ]]
107
+ then
108
+ # Create a file for the full set
109
+ echo "git submodule add --depth 1 -- $r $l" \
110
+ >> ${UDS_SUBDIR}/${GIT_SUBMODULES_ADD}
111
+
112
+ # Missing subdir
113
+ if [[ ! -e ${UDS_SUBDIR}/$l ]]
114
+ then
115
+ # git submodule add
116
+ # [-b <branch>] [-f|--force] [--name <name>] [--reference <repository>]
117
+ # [--ref-format <format>] [--depth <depth>] [--] <repository> [<path>]
118
+
119
+ MSG="$MSG\n${UDS_SUBDIR}/$l"
120
+ fi
121
+ fi
122
+ done
123
+
124
+ # git show-ref refs/tags/${UD_REV}
125
+ # echo "git submodule --quiet foreach 'export name; bash -c \"(git fetch --depth 1 origin refs/tags/${UD_REV}:refs/tags/${UD_REV} ) || (echo \$name; pushd ../..; git submodule deinit -f \$name; popd)\" '" >> ${GIT_SUBMODULES_ADD}
126
+ echo "git submodule --quiet foreach 'export name; bash -c \"( REF=\$(git ls-remote --exit-code origin refs/tags/${UD_REV} | cut -f1) && [ ! -z \\\$REF ] && git fetch --depth=1 origin \\\$REF && echo \\\$REF > .tag-${UD_REV}) || ( echo No rev:${UD_REV} \\\$name )\" ' " >> ${UDS_SUBDIR}/${GIT_SUBMODULES_ADD}
127
+
128
+
129
+ echo "Missing repos:"
130
+ echo -e "$MSG"
131
+
132
+ if [[ ! -e ${UDS_SUBDIR}/.git ]]
133
+ then
134
+ echo "${UDS_SUBDIR}: "'missing .git: run `git init` in subdirectory!'
135
+ fi
tools/02_traverse_ud_repos.py ADDED
@@ -0,0 +1,261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env -S uv run --script
2
+ #
3
+ # /// script
4
+ # requires-python = ">=3.12"
5
+ # dependencies = [
6
+ # "pyyaml",
7
+ # "load-dotenv",
8
+ # ]
9
+ # ///
10
+ """
11
+ Collect relevant metadata from local UD directories:
12
+ - extracting the '# Summary' from the beginning and machine readable
13
+ metadata from the end of the README.{md,txt} file
14
+ - using the UD directory name for collecting metadata from the
15
+ codes_and_flags.yaml file
16
+ - collecting {dev,train,test}.conllu files.
17
+ """
18
+
19
+ import json
20
+ import os
21
+ import xml.etree.ElementTree as ET
22
+ import argparse
23
+ import logging
24
+ from collections import defaultdict
25
+ from pathlib import Path
26
+
27
+ import yaml
28
+ from dotenv import load_dotenv
29
+
30
+
31
+ load_dotenv()
32
+ UD_VER = os.getenv('UD_VER', "2.15")
33
+
34
+ # Parse command line arguments
35
+ parser = argparse.ArgumentParser(description=__doc__,
36
+ formatter_class=argparse.RawDescriptionHelpFormatter)
37
+ parser.add_argument('-o', '--override', action='store_true',
38
+ help='override output file if it already exists')
39
+ parser.add_argument('-v', '--verbose', action='count', default=0,
40
+ help='increase verbosity level')
41
+ args = parser.parse_args()
42
+
43
+ # Set up logging
44
+ logging.basicConfig(
45
+ level = max(logging.DEBUG, logging.INFO - args.verbose * 10),
46
+ format='%(asctime)s [%(levelname)s] %(message)s',
47
+ datefmt='%Y-%m-%d %H:%M:%S'
48
+ )
49
+
50
+ def extract_metadata(file_path) -> {}:
51
+ """
52
+ Collect relevant metadata from UD directories.
53
+
54
+ Args:
55
+ file_path (str): The path to the README.{md,txt} file.
56
+
57
+ Returns:
58
+ dict: The extracted metadata.
59
+ """
60
+ metadata = {
61
+ "summary": None,
62
+ "license": None,
63
+ "genre": None,
64
+ "lemmas": None,
65
+ "upos": None,
66
+ "xpos": None,
67
+ "language": None, # German:
68
+ "flag": None, # flag: DE
69
+ "lcode": None, # lcode: de
70
+ "iso3": None, # iso3: deu
71
+ "family": None, # family: IE
72
+ "genus": None, # genus: Germanic
73
+ # "splits": None, # {"dev": {"files": []},
74
+ # "train": {"files": []},
75
+ # "test": {"files": []},
76
+ # "unknown":{"files": []}}
77
+ }
78
+
79
+ with open(file_path, 'r') as file:
80
+ lines = [line.strip() for line in file.readlines()]
81
+ summary_start = None
82
+ summary_end = None
83
+ for i, line in enumerate(lines):
84
+ if not summary_start and line.strip().startswith('# '):
85
+ summary_start = i + 1
86
+ elif summary_start and line.strip().startswith('# '):
87
+ summary_end = i - 1
88
+ break
89
+
90
+ if summary_start and summary_end:
91
+ # we have a summary
92
+ metadata["summary"] = (' '.join(lines[summary_start:summary_end])).strip()
93
+
94
+ # This is a (quite hackish) approach inspired by:
95
+ # https://github.com/UniversalDependencies/LICENSE/blob/master/generate_license_for_lindat.pl
96
+ for line in lines[summary_end:]:
97
+ if ":" in line:
98
+ key, val = line.split(":", 1)
99
+ if key.lower() in metadata.keys():
100
+ if key.lower() == "genre":
101
+ val = val.strip().split(" ")
102
+ else:
103
+ val = val.strip()
104
+ metadata[key.lower()] = val
105
+ return metadata
106
+
107
+
108
+ def traverse_directory(directory):
109
+ """
110
+ Traverses the directory and its first-level subdirectories, finds the
111
+ specified files, and extracts the summary from the README.{md,txt} file.
112
+
113
+ Args:
114
+ directory (str): The path to the directory.
115
+
116
+ Returns:
117
+ dict: A dictionary containing the extracted summaries for each file.
118
+ """
119
+ results = defaultdict(lambda: defaultdict(dict))
120
+
121
+ with open(os.path.join('etc', f"codes_and_flags-latest.yaml"), 'r') as file:
122
+ codes_and_flags = yaml.safe_load(file)
123
+ logging.debug(codes_and_flags)
124
+
125
+ for item in os.listdir(directory):
126
+ if item.startswith("."):
127
+ continue
128
+
129
+ if os.path.isdir(os.path.join(directory, item)):
130
+ dir_path = os.path.join(directory, item)
131
+ logging.debug(dir_path)
132
+
133
+ tag_fn = os.path.join(dir_path, f".tag-r{UD_VER}")
134
+ if not Path(tag_fn).exists():
135
+ logging.info(f"No tag file:{tag_fn}: Ignoring content.")
136
+ continue
137
+
138
+ results[item]["splits"] = {
139
+ "dev": {"files": [], "num_bytes": 0},
140
+ "test": {"files": [], "num_bytes": 0},
141
+ "train": {"files": [], "num_bytes": 0},
142
+ "unknown": {"files": [], "num_bytes": 0}
143
+ }
144
+ for file in os.listdir(dir_path):
145
+ if file.endswith(".conllu"):
146
+ file_path = os.path.join(dir_path, file)
147
+ github_path = os.path.join(item, f"r{UD_VER}", file)
148
+ logging.debug(file_path)
149
+ match file:
150
+ case x if "dev" in file:
151
+ subset = "dev"
152
+ case x if "test" in file:
153
+ subset = "test"
154
+ pass
155
+ case x if "train" in file:
156
+ subset = "train"
157
+ case _:
158
+ subset = "unknown"
159
+ results[item]["splits"][subset]["files"].append(github_path)
160
+
161
+ sum_bytes = os.stat(file_path).st_size
162
+ results[item]["splits"][subset]["num_bytes"] += sum_bytes
163
+
164
+ elif file.startswith("README") and (file.endswith(
165
+ tuple(["md", "txt"]))):
166
+ results[item].update(
167
+ extract_metadata(os.path.join(dir_path, file)))
168
+ # print(results[item])
169
+
170
+ elif file == "stats.xml":
171
+ # Extract size values:
172
+ # <treebank>
173
+ # <size>
174
+ # <total><sentences>15589</sentences><tokens>287708</tokens><words>292756</words><fused>5048</fused></total>
175
+ # <train><sentences>13813</sentences><tokens>259167</tokens><words>263777</words><fused>4610</fused></train>
176
+ # <dev><sentences>799</sentences><tokens>12316</tokens><words>12480</words><fused>164</fused></dev>
177
+ # <test><sentences>977</sentences><tokens>16225</tokens><words>16499</words><fused>274</fused></test>
178
+ # </size>
179
+ tree = ET.parse(os.path.join(dir_path, file))
180
+ root = tree.getroot()
181
+ size_node = root.find('.//size')
182
+ if size_node is None:
183
+ continue
184
+
185
+ for child_node_name in ["train", "dev", "test"]:
186
+
187
+ child_node = size_node.find(child_node_name)
188
+ if child_node is None:
189
+ continue
190
+
191
+ for child_child_node_name in ["sentences", "tokens", "words"]:
192
+ value = child_node.find(child_child_node_name).text
193
+ # print(f"Item:{item} {child_node_name}-{child_child_node_name}: {value}")
194
+
195
+ if value and int(value) > 0:
196
+ results[item]["splits"][child_node_name][f"num_{child_child_node_name}"] = value
197
+ # print(f"key: {child_node_name}")
198
+ # print(f"value: {value}")
199
+
200
+ # 'Read' language name from the dirname (UD_Abaza-ATB/), extract
201
+ # the relevant metadata from 'codes_and_flags.yaml' and add it to
202
+ # the metadata.
203
+ # ```
204
+ # Abaza:
205
+ # flag: RU-ABAZA
206
+ # lcode: abq
207
+ # iso3: abq
208
+ # family: Northwest Caucasian
209
+ # ```
210
+ language = item[3:].rsplit("-", 1)[0].replace("_", " ")
211
+ results[item]["language"] = language
212
+ #print(f"item: {item}, language: {language}")
213
+ results[item].update(codes_and_flags[language])
214
+
215
+ # 'Read' the corpus name from the dirname (UD_Abaza-ATB/) and
216
+ # construct a short name for the data set:
217
+ # abq_atb
218
+ corpus_name = item[3:].rsplit("-", 1)[1].lower()
219
+ name = f"{results[item]["lcode"]}_{corpus_name}"
220
+ results[item]["name"] = name
221
+
222
+ # Delete empty
223
+ # 1. file subsets
224
+ for fileset_k,fileset_v in list(results[item]["splits"].items()):
225
+ if not fileset_v["files"]:
226
+ del results[item]["splits"][fileset_k]
227
+
228
+ # 2. sets without a summary, files or license
229
+ if not results[item]["summary"]:
230
+ del results[item]
231
+ print(f"ITEM DELETED - no summary: {item}")
232
+
233
+ if not any([value["files"] for value in
234
+ results[item]["splits"].values()]):
235
+ print(results[item]["splits"])
236
+ del results[item]
237
+ print(f"ITEM DELETED - no files : {item}")
238
+
239
+ if not results[item]["license"]:
240
+ del results[item]
241
+ print(f"ITEM DELETED - no license: {item}")
242
+
243
+ return results
244
+
245
+
246
+ if __name__ == '__main__':
247
+ directory = 'UD_repos'
248
+ results = traverse_directory(directory)
249
+ for file, metadata in results.items():
250
+ print(f"Directory: {file}")
251
+ # print(metadata)
252
+ # print('---')
253
+
254
+ # Write the metadata to json
255
+ output_fn = 'metadata.json'
256
+ if args.override or not open(output_fn, 'r').read():
257
+ with open(output_fn, 'w') as fh:
258
+ json.dump(results, fh, ensure_ascii=False)
259
+ print(f"{output_fn} written")
260
+ else:
261
+ logging.info(f"Output {output_fn} already exists: Not overriding.")
tools/03_fill_universal_dependencies_tamplate.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env -S uv run --script
2
+ #
3
+ # /// script
4
+ # requires-python = ">=3.12"
5
+ # dependencies = [
6
+ # "Jinja2",
7
+ # "load-dotenv",
8
+ # ]
9
+ # ///
10
+ """
11
+ FIXME
12
+ """
13
+
14
+ import json
15
+ import os
16
+ import argparse
17
+ import logging
18
+ from pathlib import Path
19
+
20
+ import jinja2
21
+ from dotenv import load_dotenv
22
+
23
+
24
+ load_dotenv()
25
+ UD_VER = os.getenv('UD_VER', "2.15")
26
+
27
+ # Parse command line arguments
28
+ parser = argparse.ArgumentParser(description=__doc__,
29
+ formatter_class=argparse.RawDescriptionHelpFormatter)
30
+ parser.add_argument('-o', '--override', action='store_true',
31
+ help='override output file if it already exists')
32
+ parser.add_argument('-v', '--verbose', action='count', default=0,
33
+ help='increase verbosity level')
34
+ args = parser.parse_args()
35
+
36
+ # Set up logging
37
+ logging.basicConfig(
38
+ level = max(logging.DEBUG, logging.INFO - args.verbose * 10),
39
+ format='%(asctime)s [%(levelname)s] %(message)s',
40
+ datefmt='%Y-%m-%d %H:%M:%S'
41
+ )
42
+
43
+
44
+ if __name__ == '__main__':
45
+ # Read the metadata from json
46
+ with open('metadata.json', 'r') as fh:
47
+ metadata = json.load(fh)
48
+
49
+ with open(f'etc/description-{UD_VER}', 'r') as fh:
50
+ description = (fh.read()).strip()
51
+
52
+ with open(f'etc/citation-{UD_VER}', 'r') as fh:
53
+ citation = (fh.read()).strip()
54
+
55
+ templates = {
56
+ "templates/universal_dependencies.tmpl":
57
+ f"universal_dependencies-{UD_VER}",
58
+ "templates/README.tmpl":
59
+ f"README-{UD_VER}",
60
+ }
61
+ for template_fn, output_fn in templates.items():
62
+ if args.override or not Path(output_fn).exists():
63
+ with open(template_fn, 'r') as fh:
64
+ template = jinja2.Template(fh.read())
65
+
66
+ with open(output_fn, 'w') as fh:
67
+ output = template.render(citation=citation,
68
+ description=description,
69
+ data=metadata, ud_ver=UD_VER,)
70
+ fh.write(output)
71
+ print(f"{output_fn} written")
72
+ else:
73
+ logging.info(f"Output {output_fn} already exists: Not overriding.")
tools/README.md ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # README
2
+
3
+ ## 00 (./etc/)
4
+ ### `00_fetch_ud_clarin-dspace_metadata.py`
5
+ Download `./etc/citation-VER` and `./etc/description-VER` UD metadata files for
6
+ the different releases from the lindat clarin-dspace repository.
7
+
8
+ ### `00_fetch_ud_codes_and_flags.sh`
9
+ Download `./etc/codes_and_flags.yaml`, a yaml file mapping UD (long) language
10
+ names to metadata from UD `docs-automation` at GitHub.
11
+
12
+
13
+ ## 01
14
+ Identify all relevant UD repositories from GitHub:
15
+ * https://github.com/UniversalDependencies/
16
+ and generate a list of submodules to add to a (newly created) repository in the
17
+ subdirectory `./UD_Repos/`.
18
+
19
+
20
+ ## 02
21
+ Collect relevant metadata from local UD directories:
22
+ - extracting the '# Summary' from the beginning and machine readable
23
+ metadata from the end of the README.{md,txt} file
24
+ - using the UD directory name for collecting metadata from the
25
+ codes_and_flags.yaml file
26
+ - collecting {dev,train,test}.conllu files.
27
+
28
+
29
+ ## 03
30
+ Use `./templates/` and generate:
31
+ - `universal_dependencies.py`
32
+ - `README.md`