text
stringlengths 1
93.6k
|
|---|
match data.book_fmt:
|
case "KFX":
|
return data.kfx_json[-1]["position"] + len( # type: ignore
|
data.kfx_json[-1]["content"] # type: ignore
|
)
|
case "AZW3" | "MOBI":
|
return len(data.mobi_html) # type: ignore
|
case _:
|
return 0
|
def create_files(data: ParseJobData, prefs: Prefs, notif: Any) -> None:
|
"""
|
This function runs in system Python subprocess for official(frozen) calibre build.
|
"""
|
is_epub = data.book_fmt == "EPUB"
|
data.plugin_path = Path(data.plugin_path)
|
insert_installed_libs(data.plugin_path)
|
nlp = load_spacy(
|
data.spacy_model,
|
data.book_path if data.create_x else None,
|
data.book_lang,
|
)
|
lemmas_conn = None
|
if data.create_ww:
|
lemmas_db_path = (
|
wiktionary_db_path(data.plugin_path, data.book_lang, prefs["gloss_lang"])
|
if is_epub
|
else kindle_db_path(data.plugin_path, data.book_lang, prefs)
|
)
|
lemmas_conn = sqlite3.connect(lemmas_db_path)
|
lemma_matcher = create_spacy_matcher(
|
nlp,
|
data.spacy_model,
|
data.book_lang,
|
not is_epub,
|
lemmas_conn,
|
data.plugin_path,
|
prefs,
|
)
|
if data.create_x:
|
mediawiki = MediaWiki(
|
prefs["mediawiki_api"],
|
data.book_lang,
|
data.useragent,
|
data.plugin_path,
|
prefs["zh_wiki_variant"],
|
)
|
wikidata = (
|
None
|
if len(prefs["mediawiki_api"]) > 0
|
else Wikidata(data.plugin_path, data.useragent)
|
)
|
custom_x_ray = load_custom_x_desc(data.book_path)
|
if is_epub:
|
if data.create_x:
|
wiki_commons = None
|
if prefs["mediawiki_api"] == "" and prefs["add_locator_map"]:
|
wiki_commons = Wikimedia_Commons(data.plugin_path, data.useragent)
|
epub = EPUB(
|
data.book_path,
|
mediawiki,
|
wiki_commons,
|
wikidata,
|
custom_x_ray,
|
lemmas_conn,
|
)
|
elif data.create_ww:
|
epub = EPUB(data.book_path, None, None, None, None, lemmas_conn)
|
for doc, (start, end, xhtml_path) in nlp.pipe(
|
epub.extract_epub(), as_tuples=True
|
):
|
intervals = []
|
if data.create_x:
|
intervals = find_named_entity(
|
start,
|
epub,
|
doc,
|
"",
|
data.book_lang,
|
None,
|
custom_x_ray,
|
xhtml_path,
|
end,
|
)
|
if data.create_ww:
|
interval_tree = None
|
if len(intervals) > 0:
|
random.shuffle(intervals)
|
interval_tree = IntervalTree()
|
interval_tree.insert_intervals(intervals)
|
epub_find_lemma(
|
doc,
|
lemma_matcher,
|
start,
|
end,
|
interval_tree,
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.