repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
facebook/ocamlrep | https://github.com/facebook/ocamlrep/blob/3ed6e41c17c6d05e19121b59beb8efbb1a7ce3b6/ocamlrep_ocamlpool/build.rs | ocamlrep_ocamlpool/build.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
// Assume an opam environment (`eval "$(opam env --switch=default
// --set-switch)"`) then to find the prevailing standard library caml
// headers, `OCAMLLIB=$(ocamlopt.opt -config | grep standard_library:
// | awk '{ print $2 }')`.
fn ocamllib_dir() -> std::path::PathBuf {
let mut sh = std::process::Command::new("sh");
sh.args([
"-c",
"ocamlopt.opt -config | grep standard_library: | awk '{ print $2 }'",
]);
let output = sh.output().unwrap().stdout;
let proposed_path = std::path::Path::new(std::str::from_utf8(&output).unwrap().trim());
// A supercaml 'ocamlopt.opt' can report standard library paths that don't
// exist.
if proposed_path.exists() {
proposed_path.to_path_buf()
} else {
// Fallback to guessing the location given knowledge of where
// 'ocamlopt.opt' itself it.
let mut sh = std::process::Command::new("sh");
sh.args(["-c", "which ocamlopt.opt"]);
let output = sh.output().unwrap().stdout;
std::path::Path::new(std::str::from_utf8(&output).unwrap().trim())
.ancestors()
.nth(2)
.unwrap()
.join("lib/ocaml")
}
}
fn main() {
let ocaml_dir = ocamllib_dir();
println!("cargo:rerun-if-changed=ocamlpool.c");
cc::Build::new()
.include(ocaml_dir.as_path())
.file("ocamlpool.c")
.compile("ocamlpool");
println!("cargo:rustc-link-search=native={}", ocaml_dir.display());
println!("cargo:rustc-link-lib=dylib=camlrun");
}
| rust | MIT | 3ed6e41c17c6d05e19121b59beb8efbb1a7ce3b6 | 2026-01-04T20:16:50.959951Z | false |
facebook/ocamlrep | https://github.com/facebook/ocamlrep/blob/3ed6e41c17c6d05e19121b59beb8efbb1a7ce3b6/ocamlrep_ocamlpool/test/ocamlpool_test.rs | ocamlrep_ocamlpool/test/ocamlpool_test.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
#![allow(unused_crate_dependencies)]
#![feature(exit_status_error)]
use ocamlrep_ocamlpool::FromOcamlRep;
use ocamlrep_ocamlpool::ocaml_ffi;
use ocamlrep_ocamlpool::ocaml_registered_function;
unsafe extern "C" {
fn ocamlpool_enter();
fn ocamlpool_reserve_block(tag: u8, size: usize) -> usize;
fn ocamlpool_leave();
}
// This test attempts to catch off by one issues in ocamlpool.c
// Magic constant needs to fulfill two requirements:
// Needs to be above the OCAMLPOOL_DEFAULT_SIZE constant in ocamlpool.h
// This requirement is easy to fulfill
// Needs to be the exact size of memory block allocated by ocamlpool_reserve_block
// which is given by the Chunk_size call in chunk_alloc in ocamlpool.c
// This requirement requires some magic
const MAGIC_MEMORY_SIZE: usize = 1053183;
ocaml_registered_function! {
fn f_unit_to_unit();
fn f_one_arg_to_unit(x: i64);
fn f_sum_tuple(args: (i64, i64)) -> i64;
}
ocaml_ffi! {
fn test() {
unsafe {
ocamlpool_enter();
// This line will crash on off by one error
ocamlpool_reserve_block(0, MAGIC_MEMORY_SIZE);
ocamlpool_leave();
}
}
fn test_call_ocaml_from_rust() {
for _ in 0..4 {
unsafe {
f_unit_to_unit();
f_one_arg_to_unit(3);
assert!(f_sum_tuple((3, 4)) == 7);
}
}
}
}
// [Note: Test blocks for Cargo]
// -----------------------------
// With buck, where testing involves compiling OCaml we make use of
// `ocaml_binary` & `custom_unnittest` rules.
//
// When testing with cargo we instead use `#[cfg(test_blocks)]` within which we
// compile OCaml "manually" using the rust `command` crate. Thus, in these cases
// the `#[cfg(test)]` blocks are for cargo only & not buck. We express that
// according to the following schema:
// ```
// rust_library(
// name = "foo_test",
// ...
// autocargo = {
// ...
// "test": True, // Yes, unittests for Cargo...
// ...
// },
// ...
// unittests=False, // No! No, no unittests for Buck!
// ...
// )
//```
//
// If in such a `#[cfg(test)]` block we now wish to use a crate not otherwise
// dependend upon & put it in the target's `deps` section in `TARGETS` there
// will be an unsused-crate error. If we put it in the target's `test_deps`
// section in `TARGETS` buck will rightly complain that `unittests=False` (so
// how can there be `tests_deps`?).
//
// The workaround I employ is to add `allow(unused_crate_dependencies)` to this
// module. That way they can be enumerated in the `deps` and are thereby
// availabe for use in the `#[cfg(test)]` blocks.
#[cfg(test)]
mod tests {
use anyhow::Result;
use cargo_test_utils::*;
use tempfile::TempDir;
#[test]
fn ocamlpool_test() -> Result<()> {
let tmp_dir = TempDir::with_prefix("ocamlpool_test.")?;
std::fs::copy(
"ocamlpool_test.ml",
tmp_dir.path().join("ocamlpool_test.ml"),
)?;
let compile_cmd = cmd(
"ocamlopt.opt",
&[
"-verbose",
"-c",
"ocamlpool_test.ml",
"-o",
"ocamlpool_test_ml.cmx",
],
Some(tmp_dir.path()),
);
assert_eq!(run(compile_cmd).map_err(fmt_exit_status_err), Ok(()));
let link_cmd = cmd(
"ocamlopt.opt",
&[
"-verbose",
"-o",
"ocamlpool_test",
"ocamlpool_test_ml.cmx",
"-ccopt",
&("-L".to_owned() + workspace_dir(&["target", build_flavor()]).to_str().unwrap()),
"-cclib",
"-locamlpool_test",
"-cclib",
"-locamlrep_ocamlpool",
],
Some(tmp_dir.path()),
);
assert_eq!(run(link_cmd).map_err(fmt_exit_status_err), Ok(()));
let ocamlpool_test_cmd = cmd(
tmp_dir
.path()
.join("ocamlpool_test")
.as_path()
.to_str()
.unwrap(),
&[],
None,
);
assert_eq!(run(ocamlpool_test_cmd).map_err(fmt_exit_status_err), Ok(()));
tmp_dir.close()?;
Ok(())
}
}
| rust | MIT | 3ed6e41c17c6d05e19121b59beb8efbb1a7ce3b6 | 2026-01-04T20:16:50.959951Z | false |
facebook/ocamlrep | https://github.com/facebook/ocamlrep/blob/3ed6e41c17c6d05e19121b59beb8efbb1a7ce3b6/signed_source/signed_source.rs | signed_source/signed_source.rs | // Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use bstr::ByteSlice;
use once_cell::sync::Lazy;
use regex::bytes::Regex;
/// This crate is a port of hphp/hack/src/utils/signed_source.ml, which was
/// based on a historical version of fbsource/tools/signedsource.py.
///
/// The signing token, which you must embed in the file you wish to sign.
/// Generally, you should put this in a header comment.
pub static SIGNING_TOKEN: &str = concat!(
"@",
"generated",
" ",
"<<SignedSource::*O*zOeWoEQle#+L!plEphiEmie@IsG>>"
);
/// Sign a source file into which you have previously embedded a signing token.
/// Signing modifies only the signing token, so the semantics of the file will
/// not change if the token is put in a comment.
///
/// Returns `TokenNotFoundError` if no signing token is present.
pub fn sign_file(data: &[u8]) -> Result<Vec<u8>, TokenNotFoundError> {
let data = SIGN_OR_OLD_TOKEN.replace_all(data, TOKEN.as_bytes());
if !data.contains_str(TOKEN) {
return Err(TokenNotFoundError);
}
let signature = format!("SignedSource<<{}>>", hash(&data));
Ok(TOKEN_REGEX
.replace_all(&data, signature.as_bytes())
.into_owned())
}
/// Sign a UTF-8 source file into which you have previously embedded a signing
/// token.
///
/// Signing modifies only the signing token, so the semantics of the file
/// will not change if the token is put in a comment.
///
/// Returns `TokenNotFoundError` if no signing token is present.
pub fn sign_utf8_file(data: &str) -> Result<String, TokenNotFoundError> {
let data = sign_file(data.as_bytes())?;
// SAFETY: `data` was a valid `&str` before signing, and signing only
// replaces ASCII characters with other ASCII characters.
unsafe { Ok(String::from_utf8_unchecked(data)) }
}
/// Determine whether a file is signed. This does NOT verify the signature.
pub fn is_signed(data: &[u8]) -> bool {
SIGNING_REGEX.is_match(data)
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum SignCheckResponse {
Ok,
Unsigned,
Invalid,
}
/// Verify a file's signature.
pub fn verify_signature(data: &[u8]) -> SignCheckResponse {
let expected_md5 = match SIGNING_REGEX.captures(data) {
None => return SignCheckResponse::Unsigned,
Some(caps) => match caps.get(1) {
None => return SignCheckResponse::Unsigned,
Some(cap) => cap.as_bytes(),
},
};
for tok in [TOKEN, OLD_TOKEN] {
let replacement = make_signing_token(tok);
let unsigned_data = SIGNING_REGEX.replace_all(data, replacement.as_bytes());
let actual_md5 = hash(&unsigned_data);
if expected_md5 == actual_md5.as_bytes() {
return SignCheckResponse::Ok;
}
}
SignCheckResponse::Invalid
}
static TOKEN: &str = "<<SignedSource::*O*zOeWoEQle#+L!plEphiEmie@IsG>>";
/// This old token was historically used as the signing token. It was replaced
/// because it is 2 characters shorter than the final signature, and as a result,
/// signing data with the old token forced the entire string to be rewritten
/// (everything after the token needs to be shifted forwards 2 bytes).
/// In this implementation, we rewrite the entire string anyway.
static OLD_TOKEN: &str = "<<SignedSource::*O*zOeWoEQle#+L!plEphiEmie@I>>";
fn make_signing_token(token: &str) -> String {
format!("@{} {}", "generated", token)
}
static SIGNATURE_RE: &str = r"SignedSource<<([a-f0-9]+)>>";
static SIGN_OR_OLD_TOKEN: Lazy<Regex> =
Lazy::new(|| Regex::new(&format!("{}|{}", SIGNATURE_RE, regex::escape(OLD_TOKEN))).unwrap());
static SIGNING_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(&make_signing_token(SIGNATURE_RE)).unwrap());
static TOKEN_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(®ex::escape(TOKEN)).unwrap());
fn hash(data: &[u8]) -> String {
use md5::Digest;
let mut digest = md5::Md5::new();
digest.update(data);
hex::encode(digest.finalize())
}
#[derive(Debug, thiserror::Error, PartialEq, Eq)]
#[error("Failed to sign file: input does not contain signing token")]
pub struct TokenNotFoundError;
#[cfg(test)]
mod test {
use super::SIGNING_TOKEN;
use super::SignCheckResponse;
use super::TOKEN;
use super::TokenNotFoundError;
use super::is_signed;
use super::make_signing_token;
use super::sign_utf8_file;
use super::verify_signature;
static NO_TOKEN: &str = concat!("// @", "generated\nfn foo() {}");
static INVALID: &str = concat!(
"// @",
"generated SignedSource<<48ab1081d9394843f184debf0b251a18>>\nfn foo() {}"
);
static UNSIGNED: &str = concat!(
"// @",
"generated <<SignedSource::*O*zOeWoEQle#+L!plEphiEmie@IsG>>\nfn foo() {}"
);
// Below signature was manually verified to be equal to the OCaml
// Signed_source output for `UNSIGNED`.
static SIGNED: &str = concat!(
"// @",
"generated SignedSource<<38ab1081d9394843f184debf0b251a18>>\nfn foo() {}"
);
#[test]
fn test_signing_token() {
// We use `concat!` so that `SIGNING_TOKEN` can be a `&str` rather than
// a `Lazy`, since `make_signing_token` can't be a `const fn` yet.
// Verify that we're producing the same result.
assert_eq!(SIGNING_TOKEN, make_signing_token(TOKEN))
}
#[test]
fn test_sign_utf8_file() {
assert_eq!(sign_utf8_file(UNSIGNED), Ok(SIGNED.to_owned()));
assert_eq!(sign_utf8_file(SIGNED), Ok(SIGNED.to_owned()));
assert_eq!(sign_utf8_file(NO_TOKEN), Err(TokenNotFoundError));
}
#[test]
fn test_is_signed() {
assert!(is_signed(SIGNED.as_bytes()));
assert!(is_signed(INVALID.as_bytes())); // `is_signed` doesn't validate
assert!(!is_signed(NO_TOKEN.as_bytes()));
assert!(!is_signed(UNSIGNED.as_bytes()));
}
#[test]
fn test_verify_signature() {
assert_eq!(verify_signature(SIGNED.as_bytes()), SignCheckResponse::Ok);
assert_eq!(
verify_signature(INVALID.as_bytes()),
SignCheckResponse::Invalid
);
assert_eq!(
verify_signature(NO_TOKEN.as_bytes()),
SignCheckResponse::Unsigned
);
assert_eq!(
verify_signature(UNSIGNED.as_bytes()),
SignCheckResponse::Unsigned
);
}
}
| rust | MIT | 3ed6e41c17c6d05e19121b59beb8efbb1a7ce3b6 | 2026-01-04T20:16:50.959951Z | false |
kurtbuilds/checkexec | https://github.com/kurtbuilds/checkexec/blob/55cb83eb0a724581fc715e5b6763be4fc85e570a/src/main.rs | src/main.rs | use std::borrow::Cow;
use std::fmt::{Display};
use std::path::{Path};
use std::process::{exit, Command};
use clap::{App, AppSettings, Arg};
use std::fs;
use shell_escape::escape;
const VERSION: &str = env!("CARGO_PKG_VERSION");
struct Error {
message: String,
}
impl std::fmt::Debug for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
impl std::error::Error for Error {}
macro_rules! err {
($($arg:tt)*) => {
Error {
message: format!($($arg)*),
}
}
}
fn infer_dependencies<'a>(command: &[&'a str]) -> Result<Vec<&'a str>, Error> {
let inferred_deps = command.iter()
.filter_map(|s| fs::metadata(s).ok().map(|_| *s))
.collect::<Vec<&str>>();
if inferred_deps.is_empty() {
Err(err!("--infer must find at least one accessible file in command arguments. Command arguments are: {}",
command.iter().map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" ")
))
} else {
Ok(inferred_deps)
}
}
fn should_execute<T: AsRef<Path> + Display>(target: &str, dependencies: &[T]) -> Result<bool, Error> {
match fs::metadata(target) {
Ok(meta) => {
let modified = meta.modified().unwrap();
for dependency in dependencies {
let dep_meta = fs::metadata(&dependency)
.map_err(|_| err!("{}: Could not read file metadata", &dependency))?;
if dep_meta.modified().unwrap() > modified {
return Ok(true);
}
}
Ok(false)
}
Err(_) => Ok(true)
}
}
fn main() -> std::result::Result<(), Error> {
let args = App::new("checkexec")
.version(VERSION)
.about("Conditionally run a command (like `make`)")
.setting(AppSettings::ArgRequiredElseHelp)
.setting(AppSettings::TrailingVarArg)
.arg(Arg::new("target")
.help("The file created by this checkexec execution.")
.required(true)
)
.arg(Arg::new("verbose")
.long("verbose")
.short('v')
.takes_value(false)
)
.arg(Arg::new("infer")
.long("infer")
.takes_value(false)
.conflicts_with("dependencies")
.help("Infer the dependency list. The inference takes all arguments to the command, filters it for files, and uses that list. \
--infer causes checkexec to fail if it creates an empty dependency list.")
)
.arg(Arg::new("dependencies").min_values(0)
.help("The list of files")
)
.arg(Arg::new("command").min_values(1)
.last(true)
.required(true)
.help("The command to execute if the check passes.")
)
.get_matches();
let verbose = args.is_present("verbose");
let target = args.value_of("target").unwrap();
let command_args = args.values_of("command").unwrap().into_iter().skip(1).collect::<Vec<&str>>();
let dependencies = if args.is_present("infer") {
infer_dependencies(&command_args)?
} else {
args.values_of("dependencies").map(|d| d.collect::<Vec<&str>>()).unwrap_or_default()
}
.iter()
.flat_map(|s| s.split('\n'))
.collect::<Vec<&str>>();
if verbose {
eprintln!("Found {} dependencies:\n{}", dependencies.len(), dependencies.iter().map(|d| escape(Cow::Borrowed(d))).collect::<Vec<_>>().join("\n"));
}
if should_execute(target, &dependencies)? {
let command = args.values_of("command").unwrap().collect::<Vec<&str>>();
if verbose {
eprintln!("{} {}", command[0], command.iter().skip(1).map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" "));
}
let output = Command::new(command[0])
.args(command[1..].iter())
.status()
.map_err(|_| err!("{}: command not found", command[0]))?;
exit(output.code().unwrap());
}
Ok(())
}
#[cfg(test)]
mod test {
use std::io::Write;
use super::*;
use tempfile::{TempDir, tempdir};
struct TempFiles {
#[allow(dead_code)]
dir: TempDir,
pub files: Vec<String>,
}
fn touch(path: &str) -> std::io::Result<()> {
let mut file = fs::File::create(path).unwrap();
file.write_all(b"")
}
fn touch_and_untouch(touched: usize, untouched: usize) -> TempFiles {
let tempdir = tempdir().unwrap();
let dir = tempdir.path();
let mut files: Vec<String> = Vec::new();
files.extend((0..touched).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string()));
files.extend((touched..(touched + untouched)).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string()));
for file in files.iter().take(touched) {
touch(file).unwrap();
// tries to eliminate ties between files. 1ms should be more than enough
// and we dont need a ton of tests for this program where 1ms is noticeable.
// apparently 1ms isn't enough for github actions because ...... reasons?
std::thread::sleep(std::time::Duration::from_millis(10));
}
TempFiles {
dir: tempdir,
files,
}
}
#[test]
fn test_infer_dependencies() {
let TempFiles { dir: _dir, files } = touch_and_untouch(3, 0);
let dependencies = infer_dependencies(&["cc",
&files[0],
&files[1]]).unwrap();
assert_eq!(dependencies, vec![&files[0], &files[1]]);
}
#[test]
fn test_no_inferred_dependencies_errors() {
let TempFiles { dir: _dir, files } = touch_and_untouch(0, 1);
assert!(infer_dependencies(&["cc",
&files[0]]).is_err())
}
#[test]
fn test_should_execute_errors_on_failed_dependency_access() {
let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1);
assert!(should_execute(&files[0], &files[1..]).is_err(), "Should have failed to access file");
}
#[test]
fn test_should_execute_target_doesnt_exist() {
let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1);
assert!(should_execute(&files[1], &files[0..1]).unwrap(), "Should execute because target doesn't exist");
}
#[test]
fn test_should_not_execute_newer_target() {
let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0);
assert!(!should_execute(&files[1], &files[0..1]).unwrap(), "Should not execute because target is newer");
}
#[test]
fn test_should_execute_newer_dependencies() {
let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0);
assert!(should_execute(&files[0], &files[1..]).unwrap())
}
}
| rust | MIT | 55cb83eb0a724581fc715e5b6763be4fc85e570a | 2026-01-04T20:17:04.062470Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/error_correction.rs | src/error_correction.rs | use crate::{
constants::{GEN_POLYNOMIALS, NUM_BLOCKS, NUM_DATA_MODULES, NUM_EC_CODEWORDS},
data::Data,
math::{EXP_TABLE, LOG_TABLE},
};
pub fn ecc_and_sequence(mut data: Data) -> Vec<u8> {
let modules = NUM_DATA_MODULES[data.version.0] as usize;
let codewords = modules / 8;
let remainder_bits = modules % 8;
let num_ec_codewords = NUM_EC_CODEWORDS[data.version.0][data.ecl as usize] as usize;
let num_data_codewords = codewords - num_ec_codewords;
// terminator
let remainder_data_bits = (num_data_codewords * 8) - (data.bits.len());
let term_len = if remainder_data_bits < 4 {
remainder_data_bits
} else {
4
};
data.bits.push_n(0, term_len);
// byte align
let byte_pad = (8 - (data.bits.len() % 8)) % 8;
data.bits.push_n(0, byte_pad);
// fill data capacity
let data_pad = num_data_codewords - (data.bits.len() / 8);
let mut alternating_byte = 0b1110_1100;
for _ in 0..data_pad {
data.bits.push_n(alternating_byte, 8);
alternating_byte ^= 0b1111_1101;
}
let blocks = NUM_BLOCKS[data.version.0][data.ecl as usize] as usize;
let group_2_blocks = codewords % blocks;
let group_1_blocks = blocks - group_2_blocks;
let byte_vec = data.bits.to_bytes();
let data_per_g1_block = num_data_codewords / blocks;
let data_per_g2_block = data_per_g1_block + 1;
let ecc_per_block = num_ec_codewords / blocks;
let mut interleaved = vec![0; codewords + (remainder_bits + 7) / 8];
for i in 0..group_1_blocks * data_per_g1_block {
let col = i % data_per_g1_block;
let row = i / data_per_g1_block;
interleaved[col * blocks + row] = byte_vec[i];
}
for i in 0..group_2_blocks * data_per_g2_block {
let col = i % data_per_g2_block;
let row = i / data_per_g2_block;
// 0 iff last column, else group_1_blocks
let row_offset = (1 - (col / (data_per_g2_block - 1))) * group_1_blocks;
interleaved[col * blocks + row + row_offset] =
byte_vec[i + (group_1_blocks * data_per_g1_block)];
}
let divisor = &GEN_POLYNOMIALS[ecc_per_block][..ecc_per_block];
for i in 0..group_1_blocks {
let start = i * data_per_g1_block;
let ec_codewords = remainder(&byte_vec[(start)..(start + data_per_g1_block)], divisor);
for j in 0..ec_codewords.len() {
interleaved[num_data_codewords + j * blocks + i] = ec_codewords[j];
}
}
let group_2_start = group_1_blocks * data_per_g1_block;
for i in 0..group_2_blocks {
let start = group_2_start + i * data_per_g2_block;
let ec_codewords = remainder(&byte_vec[(start)..(start + data_per_g2_block)], divisor);
for j in 0..ec_codewords.len() {
interleaved[num_data_codewords + j * blocks + i + group_1_blocks] = ec_codewords[j];
}
}
interleaved
}
// todo
// benchmark potential optimizations
pub fn remainder(data: &[u8], generator: &[u8]) -> Vec<u8> {
let num_codewords = generator.len();
let mut base = [0; 123 + 30];
base[..data.len()].copy_from_slice(data);
for i in 0..data.len() {
if base[i] == 0 {
continue;
}
let alpha_diff = LOG_TABLE[base[i] as usize];
for j in 0..num_codewords {
base[i + j + 1] ^= EXP_TABLE[(generator[j] as usize + alpha_diff as usize) % 255];
}
}
base[data.len()..(data.len() + num_codewords)].to_vec()
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/lib.rs | src/lib.rs | pub mod constants;
pub mod math;
pub mod data;
pub mod encoding;
pub mod error_correction;
pub mod mask;
pub mod matrix;
pub mod qr_code;
pub mod bit_info;
pub mod qart;
pub mod render;
#[cfg(feature = "wasm")]
mod wasm;
use crate::data::Data;
use crate::qr_code::{Mask, Mode, Version, ECL};
use encoding::encoding_mode;
use qart::{Qart, WeightPixel};
use qr_code::QrCode;
#[cfg(feature = "wasm")]
use wasm_bindgen::prelude::*;
#[cfg_attr(feature = "wasm", wasm_bindgen)]
#[derive(Debug)]
pub struct QrOptions {
min_version: Version,
min_ecl: ECL,
mode: Option<Mode>,
mask: Option<Mask>,
strict_version: bool,
strict_ecl: bool,
}
#[cfg_attr(feature = "wasm", wasm_bindgen)]
impl QrOptions {
#[cfg_attr(feature = "wasm", wasm_bindgen(constructor))]
pub fn new() -> Self {
QrOptions {
min_version: Version(1),
strict_version: false,
min_ecl: ECL::Low,
strict_ecl: false,
mode: None,
mask: None,
}
}
pub fn min_version(mut self, version: Version) -> Self {
self.min_version = version;
self
}
pub fn min_ecl(mut self, ecl: ECL) -> Self {
self.min_ecl = ecl;
self
}
pub fn mode(mut self, mode: Option<Mode>) -> Self {
self.mode = mode;
self
}
pub fn mask(mut self, mask: Option<Mask>) -> Self {
self.mask = mask;
self
}
pub fn strict_version(mut self, strict: bool) -> Self {
self.strict_version = strict;
self
}
pub fn strict_ecl(mut self, strict: bool) -> Self {
self.strict_ecl = strict;
self
}
}
#[cfg_attr(feature = "wasm", wasm_bindgen)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum QrError {
InvalidEncoding,
ExceedsMaxCapacity,
}
pub fn generate(input: &str, qr_options: &QrOptions) -> Result<QrCode, QrError> {
match resolve_data(input, qr_options) {
Ok(data) => Ok(QrCode::new(data, qr_options.mask)),
Err(err) => Err(err),
}
}
#[cfg_attr(feature = "wasm", wasm_bindgen)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum QartError {
InvalidEncoding,
ExceedsMaxCapacity,
InvalidPixelWeights,
}
impl From<QrError> for QartError {
fn from(value: QrError) -> Self {
match value {
QrError::InvalidEncoding => QartError::InvalidEncoding,
QrError::ExceedsMaxCapacity => QartError::ExceedsMaxCapacity,
}
}
}
pub fn generate_qart(
input: &str,
qr_options: &QrOptions,
pixel_weights: &[WeightPixel],
) -> Result<QrCode, QartError> {
let data = match resolve_data(input, qr_options) {
Ok(data) => data,
Err(err) => return Err(err.into()),
};
let qr_width = data.version.0 * 4 + 17;
if pixel_weights.len() != qr_width * qr_width {
return Err(QartError::InvalidPixelWeights);
}
let qart = Qart::new(data, qr_options.mask.unwrap_or(Mask::M0));
let qr_code = qart.to_qr_code(&pixel_weights);
Ok(qr_code)
}
fn resolve_data(input: &str, qr_options: &QrOptions) -> Result<Data, QrError> {
let mut mode = Mode::Byte;
if let Some(specified) = qr_options.mode {
if specified != Mode::Byte {
let lowest = encoding_mode(input);
if (lowest as u8) > (specified as u8) {
return Err(QrError::InvalidEncoding);
}
mode = specified;
}
} else {
mode = encoding_mode(input);
}
let data = Data::new_verbose(
input,
mode,
qr_options.min_version,
qr_options.strict_version,
qr_options.min_ecl,
qr_options.strict_ecl,
);
match data {
Some(x) => Ok(x),
None => Err(QrError::ExceedsMaxCapacity),
}
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/math.rs | src/math.rs | // note: a 256 * 256 mult table is a possible alternative
pub const EXP_TABLE: [u8; 255] = exp_table();
pub const LOG_TABLE: [u8; 256] = log_table();
/// 2 ^ x for 0 to 254
const fn exp_table() -> [u8; 255] {
let mut array = [0; 255];
array[0] = 1;
let mut i = 1;
while i < 255 {
array[i] = array[i - 1] << 1;
if array[i - 1] & 0b1000_0000 != 0 {
// 2^4 + 2^3 + 2^2 + 2^0
array[i] ^= 0b0001_1101;
}
i += 1;
}
array
}
/// log_2 of x for 1 to 255
const fn log_table() -> [u8; 256] {
let mut array = [0; 256];
let mut i = 1;
while i < 256 {
let mut j = 0;
while j < 256 {
if EXP_TABLE[j] == i as u8 {
array[i] = j as u8;
break;
}
j += 1;
}
i += 1;
}
array
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/bit_info.rs | src/bit_info.rs | use std::ops::BitOrAssign;
use crate::{
constants::{NUM_BLOCKS, NUM_DATA_MODULES, NUM_EC_CODEWORDS},
matrix::{Matrix, Module},
qr_code::{Mask, Mode, Version, ECL},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Info {
/// Module except meaning changes when DATA set
pub module: Module,
/// 0-indexed error correction block
pub block: u8,
/// bit index within block
pub bit: u16,
}
impl Info {
pub const DATA: Module = Module(Module::DATA.0 | Module::ON.0);
pub const EC: Module = Module(Module::DATA.0 | Module::MODIFIER.0);
pub const REMAINDER: Module = Module(Module::DATA.0);
}
impl From<Info> for Module {
fn from(value: Info) -> Self {
value.module
}
}
impl From<Module> for Info {
fn from(module: Module) -> Self {
Info {
module,
block: 0,
bit: 0,
}
}
}
impl BitOrAssign<Module> for Info {
fn bitor_assign(&mut self, rhs: Module) {
self.module |= rhs;
}
}
#[derive(Debug)]
pub struct BitInfo {
pub matrix: Matrix<Info>,
pub mode: Mode,
pub version: Version,
pub ecl: ECL,
pub mask: Mask,
}
impl BitInfo {
pub fn new(mode: Mode, version: Version, ecl: ECL, mask: Mask) -> Self {
let mut bit_info = BitInfo {
matrix: Matrix::new(
version,
Info {
module: Module(0),
block: 0,
bit: 0,
},
),
mode,
version,
ecl,
mask,
};
bit_info.matrix.set_finder();
bit_info.matrix.set_alignment();
bit_info.matrix.set_timing();
bit_info.matrix.set_format(bit_info.ecl, mask);
bit_info.matrix.set_version();
let modules = NUM_DATA_MODULES[bit_info.version.0] as usize;
let codewords = modules / 8;
let num_ec_codewords = NUM_EC_CODEWORDS[bit_info.version.0][bit_info.ecl as usize] as usize;
let num_data_codewords = codewords - num_ec_codewords;
let blocks = NUM_BLOCKS[bit_info.version.0][bit_info.ecl as usize] as usize;
let group_2_blocks = codewords % blocks;
let group_1_blocks = blocks - group_2_blocks;
let data_per_g1_block = num_data_codewords / blocks;
let data_end = num_data_codewords * 8;
let ecc_end = codewords * 8;
let mut i = 0;
let mut block = 0;
let mut bit = 0;
bit_info.matrix.set_data(|| {
let val = Info {
module: match i {
j if j < data_end => Info::DATA,
j if j < ecc_end * 8 => Info::EC,
_ => Info::REMAINDER,
},
block,
bit,
};
bit += 1;
i += 1;
if i % 8 != 0 {
return val;
}
if i < data_end {
let byte_i = i / 8;
let col = byte_i / blocks;
let row = if col < data_per_g1_block {
byte_i % blocks
} else {
(byte_i + group_1_blocks) % blocks
};
block = row as u8;
bit = (col * 8) as u16;
} else if i < ecc_end {
let ecc_i = (i / 8) - num_data_codewords;
let col = ecc_i / blocks;
let row = ecc_i % blocks;
block = row as u8;
bit = if row < group_1_blocks {
(data_per_g1_block + col) * 8
} else {
(data_per_g1_block + 1 + col) * 8
} as u16
} else {
block = 0;
bit = 0;
}
val
});
bit_info
}
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/mask.rs | src/mask.rs | use crate::matrix::{Matrix, Module};
// todo UNTESTED CODE: HERE BE DRAGONS
// if score is wrong for all masks, then this still works
pub fn score(matrix: &Matrix<Module>) -> u32 {
// todo what are perf implications of scoring all masks
// 8 masks * 5 iterations (blocks + rows are non sequential access)
fn dark_proportion(matrix: &Matrix<Module>) -> u32 {
let mut dark = 0;
for y in 0..matrix.width {
for x in 0..matrix.width {
if matrix.get(x, y).has(Module::DATA) {
dark += 1;
}
}
}
let percent = (dark * 20) / (20 * (matrix.width as u32) * (matrix.width as u32));
let middle = 50;
let diff = if percent < middle {
middle - percent
} else {
percent - middle
};
let k = (diff) / 5;
10 * k
}
fn blocks(matrix: &Matrix<Module>) -> u32 {
let mut score = 0;
for y in 0..matrix.width - 1 {
for x in 0..matrix.width - 1 {
let curr = matrix.get(x, y).has(Module::ON);
let tr = matrix.get(x + 1, y).has(Module::ON);
let bl = matrix.get(x, y + 1).has(Module::ON);
let br = matrix.get(x + 1, y + 1).has(Module::ON);
if curr == tr && curr == bl && curr == br {
score += 3;
}
}
}
score
}
// detects streaks >= 5 and finder patterns
fn line_patterns(matrix: &Matrix<Module>, col: bool) -> u32 {
let mut score = 0;
let (y_mult, x_mult) = match col {
true => (matrix.width, 1),
false => (1, matrix.width),
};
let pattern_1 = 0b0000_1011101;
let pattern_2 = 0b1011101_0000;
for y in 0..matrix.width {
let mut streak = 1;
let mut streak_v = matrix.value[y * y_mult + 0].has(Module::ON);
let mut window: u16 = streak_v as u16;
for x in 1..matrix.width {
let curr = matrix.value[y * y_mult + x * x_mult].has(Module::ON);
if curr == streak_v {
streak += 1;
if streak == 5 {
score += 3;
} else if streak > 5 {
score += 1;
}
} else {
streak = 1;
streak_v = curr;
}
window <<= 1;
window |= curr as u16;
// 10 = pattern.len() - 1
if x >= 10 {
window &= 0b111_1111_1111;
if window == pattern_1 || window == pattern_2 {
score += 40;
}
}
}
}
score
}
dark_proportion(matrix)
+ blocks(matrix)
+ line_patterns(matrix, true)
+ line_patterns(matrix, false)
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/render.rs | src/render.rs | #[cfg(feature = "svg")]
pub mod svg;
#[cfg(feature = "text")]
pub mod text;
#[cfg(feature = "svg")]
use crate::qr_code::QrCode;
#[cfg(feature = "svg")]
pub struct RenderData<'m> {
qr_code: &'m QrCode,
foreground: String,
background: String,
unit: usize,
margin: usize,
toggle_options: u8,
}
#[cfg(feature = "svg")]
pub enum Toggle {
Background,
BackgroundPixels,
ForegroundPixels,
}
#[cfg(feature = "svg")]
impl<'m> RenderData<'m> {
pub fn new(qr_code: &'m QrCode) -> Self {
RenderData {
qr_code,
foreground: "#000".into(),
background: "#fff".into(),
unit: 1,
margin: 2,
toggle_options: 0,
}
.toggle(Toggle::Background)
.toggle(Toggle::ForegroundPixels)
}
pub fn width(&self) -> usize {
(self.qr_code.matrix.width + self.margin * 2) * self.unit
}
pub fn unit(mut self, unit: usize) -> Self {
self.unit = unit;
self
}
pub fn margin(mut self, margin: usize) -> Self {
self.margin = margin;
self
}
pub fn foreground(mut self, foreground: String) -> Self {
self.foreground = foreground;
self
}
pub fn background(mut self, background: String) -> Self {
self.background = background;
self
}
pub fn toggle_options(mut self, toggle_options: u8) -> Self {
self.toggle_options = toggle_options;
self
}
pub fn toggle(mut self, toggle: Toggle) -> Self {
self.toggle_options ^= 1 << toggle as u8;
self
}
pub fn toggled(&self, option: Toggle) -> bool {
(self.toggle_options >> option as u8) & 1 == 1
}
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/qr_code.rs | src/qr_code.rs | use crate::{
data::Data,
error_correction::ecc_and_sequence,
mask::score,
matrix::{Matrix, Module},
};
#[cfg(feature = "wasm")]
use wasm_bindgen::prelude::*;
#[cfg_attr(feature = "wasm", wasm_bindgen)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum Mode {
Numeric,
Alphanumeric,
Byte,
// no plans for Kanji, ECI, StructuredAppend, FNC1,
}
#[cfg_attr(feature = "wasm", wasm_bindgen)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub enum ECL {
Low, // 7
Medium, // 15
Quartile, // 25
High, // 30
}
#[cfg_attr(feature = "wasm", wasm_bindgen)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct Version(pub usize);
#[cfg_attr(feature = "wasm", wasm_bindgen)]
impl Version {
#[cfg_attr(feature = "wasm", wasm_bindgen(constructor))]
pub fn new(version: usize) -> Self {
assert!(version >= 1 && version <= 40);
Version(version)
}
}
#[cfg_attr(feature = "wasm", wasm_bindgen)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum Mask {
M0,
M1,
M2,
M3,
M4,
M5,
M6,
M7,
}
#[derive(Debug)]
pub struct QrCode {
pub matrix: Matrix<Module>,
pub mode: Mode,
pub version: Version,
pub ecl: ECL,
pub mask: Mask,
}
// vec while in rust only land
// when wasm, we know we're gonna copy so -> use static buffer
impl QrCode {
pub fn new(data: Data, mask: Option<Mask>) -> Self {
let mut qr_code = QrCode {
matrix: Matrix::new(data.version, Module(0)),
mode: data.mode,
version: data.version,
ecl: data.ecl,
mask: if let Some(mask) = mask {
mask
} else {
Mask::M0
},
};
qr_code.matrix.set_finder();
qr_code.matrix.set_alignment();
qr_code.matrix.set_timing();
qr_code.matrix.set_format(qr_code.ecl, qr_code.mask);
qr_code.matrix.set_version();
let data = ecc_and_sequence(data);
let mut i = 0;
qr_code.matrix.set_data(|| {
let val = Module::DATA | ((data[i / 8] >> (7 - (i % 8))) & 1).into();
i += 1;
val
});
qr_code.apply_mask(qr_code.mask);
if let None = mask {
let mut min_score = score(&qr_code.matrix);
let mut min_mask = qr_code.mask;
for m in [
Mask::M1,
Mask::M2,
Mask::M3,
Mask::M4,
Mask::M5,
Mask::M6,
Mask::M7,
] {
// undo prev mask
qr_code.apply_mask(qr_code.mask);
qr_code.mask = m;
qr_code.apply_mask(qr_code.mask);
qr_code.matrix.set_format(qr_code.ecl, qr_code.mask);
let score = score(&qr_code.matrix);
if score < min_score {
min_score = score;
min_mask = qr_code.mask;
}
}
if min_mask != qr_code.mask {
// undo prev mask
qr_code.apply_mask(qr_code.mask);
qr_code.mask = min_mask;
qr_code.apply_mask(qr_code.mask);
qr_code.matrix.set_format(qr_code.ecl, qr_code.mask);
}
}
qr_code
}
fn apply_mask(&mut self, mask: Mask) {
let mask_bit = mask_fn(mask);
for y in 0..self.matrix.width {
for x in 0..self.matrix.width {
let module = self.matrix.get_mut(x, y);
if module.has(Module::DATA) {
*module ^= (mask_bit(x as u16, y as u16) as u8).into();
}
}
}
}
}
pub fn mask_fn(mask: Mask) -> fn(u16, u16) -> bool {
match mask {
Mask::M0 => |col: u16, row: u16| (row + col) % 2 == 0,
Mask::M1 => |_: u16, row: u16| (row) % 2 == 0,
Mask::M2 => |col: u16, _: u16| (col) % 3 == 0,
Mask::M3 => |col: u16, row: u16| (row + col) % 3 == 0,
Mask::M4 => |col: u16, row: u16| ((row / 2) + (col / 3)) % 2 == 0,
Mask::M5 => |col: u16, row: u16| ((row * col) % 2 + (row * col) % 3) == 0,
Mask::M6 => |col: u16, row: u16| ((row * col) % 2 + (row * col) % 3) % 2 == 0,
Mask::M7 => |col: u16, row: u16| ((row + col) % 2 + (row * col) % 3) % 2 == 0,
}
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/qart.rs | src/qart.rs | use crate::{
bit_info::{BitInfo, Info},
constants::{GEN_POLYNOMIALS, NUM_BLOCKS, NUM_DATA_MODULES, NUM_EC_CODEWORDS},
data::{BitVec, Data},
error_correction::remainder,
matrix::{Matrix, Module},
qr_code::{mask_fn, Mask, QrCode},
};
#[derive(Debug, Clone, Copy)]
pub struct WeightPixel(pub u8);
impl WeightPixel {
/// value representing on/off, weight from 0 - 127
pub fn new(value: bool, weight: u8) -> Self {
WeightPixel(value as u8 | (weight << 1))
}
/// on or off
pub fn value(&self) -> bool {
self.0 & 1 == 1
}
/// 0 - 127 representing how important current value is
pub fn weight(&self) -> u8 {
self.0 >> 1
}
}
#[derive(Debug)]
pub struct Qart {
pub bit_info: BitInfo,
pub blocks: Vec<BitVec>,
pub block_weights: Vec<Vec<WeightPixel>>,
}
impl Qart {
pub fn new(mut data: Data, mask: Mask) -> Self {
let modules = NUM_DATA_MODULES[data.version.0] as usize;
let codewords = modules / 8;
let num_ec_codewords = NUM_EC_CODEWORDS[data.version.0][data.ecl as usize] as usize;
let num_data_codewords = codewords - num_ec_codewords;
let blocks = NUM_BLOCKS[data.version.0][data.ecl as usize] as usize;
let group_2_blocks = codewords % blocks;
let group_1_blocks = blocks - group_2_blocks;
let data_per_g1_block = num_data_codewords / blocks;
let ecc_per_block = num_ec_codewords / blocks;
// terminator seems required
let remainder_data_bits = (num_data_codewords * 8) - (data.bits.len());
let term_len = if remainder_data_bits < 4 {
remainder_data_bits
} else {
4
};
data.bits.push_n(0, term_len);
let orig_data_bit_len = data.bits.len();
if data.bits.len() < num_data_codewords * 8 {
// TODO filling with 0 creates checkerboard
// perhaps fill with random noise
data.bits.resize(num_data_codewords * 8, 0b11101100);
}
let mut blocks = vec![];
let mut block_weights = vec![];
let mut data_i = 0;
for i in 0..(group_1_blocks + group_2_blocks) as usize {
let data_per_block = if i < group_1_blocks as usize {
data_per_g1_block
} else {
data_per_g1_block + 1
};
let byte_start = data_i / 8;
let mut data_codewords =
data.bits.as_ref()[byte_start..(byte_start + data_per_block)].to_vec();
let mut ecc = remainder(
&data_codewords,
&GEN_POLYNOMIALS[ecc_per_block][..ecc_per_block],
);
data_codewords.append(&mut ecc);
blocks.push(data_codewords.into());
block_weights.push(Vec::with_capacity(data_per_block + ecc_per_block));
for j in 0..(data_per_block + ecc_per_block) * 8 {
if data_i < orig_data_bit_len && j < (data_per_block * 8) {
block_weights[i].push(WeightPixel::new(data.bits.get(data_i), 127));
data_i += 1;
} else {
block_weights[i].push(WeightPixel::new(false, 0));
}
}
}
Qart {
bit_info: BitInfo::new(data.mode, data.version, data.ecl, mask),
blocks,
block_weights,
}
}
pub fn to_qr_code(mut self, pixel_weights: &[WeightPixel]) -> QrCode {
let width = self.bit_info.version.0 * 4 + 17;
assert_eq!(pixel_weights.len(), width * width);
let modules = NUM_DATA_MODULES[self.bit_info.version.0] as usize;
let codewords = modules / 8;
let num_ec_codewords =
NUM_EC_CODEWORDS[self.bit_info.version.0][self.bit_info.ecl as usize] as usize;
let num_data_codewords = codewords - num_ec_codewords;
let blocks = NUM_BLOCKS[self.bit_info.version.0][self.bit_info.ecl as usize] as usize;
let group_2_blocks = codewords % blocks;
let group_1_blocks = blocks - group_2_blocks;
let data_per_g1_block = num_data_codewords / blocks;
let ecc_per_block = num_ec_codewords / blocks;
let mask = mask_fn(self.bit_info.mask);
for y in 0..width {
for x in 0..width {
let bit = self.bit_info.matrix.get(x, y);
if !bit.module.has(Module::DATA) || bit.module == Info::REMAINDER {
continue;
}
// TODO reconsider randomizing "dead" pixels
// must be in image, and needs to not depend on order
if self.block_weights[bit.block as usize][bit.bit as usize].weight() < 127 {
let value = mask(x as u16, y as u16) ^ pixel_weights[y * width + x].value();
self.block_weights[bit.block as usize][bit.bit as usize] =
WeightPixel::new(value, pixel_weights[y * width + x].weight())
}
}
}
let mut g1_basis = vec![];
for i in 0..data_per_g1_block * 8 {
let mut v: BitVec = vec![0; data_per_g1_block].into();
v.set(i);
v.append(&mut remainder(
v.as_ref(),
&GEN_POLYNOMIALS[ecc_per_block][..ecc_per_block],
));
g1_basis.push(v);
}
let mut g2_basis = vec![];
for i in 0..(data_per_g1_block + 1) * 8 {
let mut v: BitVec = vec![0; data_per_g1_block + 1].into();
v.set(i);
v.append(&mut remainder(
v.as_ref(),
&GEN_POLYNOMIALS[ecc_per_block][..ecc_per_block],
));
g2_basis.push(v);
}
for i in 0..group_1_blocks {
apply_first_matches(&mut self.blocks[i], &self.block_weights[i], &g1_basis);
}
for i in 0..group_2_blocks {
apply_first_matches(
&mut self.blocks[group_1_blocks + i],
&self.block_weights[group_1_blocks + i],
&g2_basis,
);
}
let mut matrix = Matrix::new(self.bit_info.version, Module(0));
for y in 0..width {
for x in 0..width {
let info = self.bit_info.matrix.get(x, y);
if !info.module.has(Module::DATA) {
matrix.set(x, y, info.module);
} else if info.module == Info::REMAINDER {
let on = mask(x as u16, y as u16) ^ pixel_weights[y * width + x].value();
matrix.set(x, y, Module::DATA | (Module(on as u8)));
} else {
let on = mask(x as u16, y as u16)
^ self.blocks[info.block as usize].get(info.bit as usize);
matrix.set(x, y, Module::DATA | Module(on as u8));
}
}
}
QrCode {
matrix,
mode: self.bit_info.mode,
version: self.bit_info.version,
ecl: self.bit_info.ecl,
mask: self.bit_info.mask,
}
}
}
// based on https://github.com/andrewyur/qart b/c go version too confusing
//
// my understanding so far:
// first, apply matching basis to control a desired bit
// -> remove that basis from pool and XOR against all other basis vectors with that bit set
// -> therefore, no future basis vector can affect that bit
// repeat
//
// questions:
// why does greedy work?
// -> implies arbitrary subset of basis vectors can mostly span error correction bit vector space
// -> is this because num data bits >> num error correction bits?
// is it worth matching bits in order of location/contrast/importance
// -> unordered (current) works suprisingly well
fn apply_first_matches(
block: &mut BitVec,
block_weight: &Vec<WeightPixel>,
basis_matrix: &Vec<BitVec>,
) {
let mut basis_matrix: Vec<Option<BitVec>> = basis_matrix
.iter()
.map(|basis| Some(basis.as_ref().to_vec().into()))
.collect();
for (i, pixel) in block_weight.iter().enumerate() {
if pixel.weight() == 0 {
continue;
}
let mut found: Option<BitVec> = None;
for basis_opt in basis_matrix.iter_mut() {
if let Some(basis) = basis_opt {
if !basis.get(i) {
continue;
}
if let Some(found) = found.as_ref() {
let basis = basis.as_mut();
let found = found.as_ref();
for k in 0..basis.len() {
basis[k] ^= found[k]
}
} else {
found = basis_opt.take();
}
}
}
if let Some(found) = found {
if block.get(i) != pixel.value() {
let block = block.as_mut();
let found = found.as_ref();
for j in 0..block.len() {
block[j] ^= found[j];
}
}
}
}
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/wasm.rs | src/wasm.rs | use crate::{
bit_info::BitInfo,
qr_code::{Mask, Mode, QrCode, Version, ECL},
QartError, QrError, QrOptions,
};
use wasm_bindgen::prelude::*;
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
#[wasm_bindgen]
pub fn generate(input: &str, qr_options: &QrOptions) -> Result<JsValue, QrError> {
console_error_panic_hook::set_once();
let qr_code = match crate::generate(input, qr_options) {
Ok(m) => m,
Err(e) => return Err(e),
};
Ok(qr_code_to_obj(qr_code))
}
#[wasm_bindgen(js_name = generateQart)]
pub fn generate_qart(
input: &str,
qr_options: &QrOptions,
pixel_weights: &[u8],
) -> Result<JsValue, QartError> {
console_error_panic_hook::set_once();
let pixel_weights = unsafe { std::mem::transmute(pixel_weights) };
let qr_code = match crate::generate_qart(input, qr_options, pixel_weights) {
Ok(m) => m,
Err(e) => return Err(e),
};
Ok(qr_code_to_obj(qr_code))
}
fn qr_code_to_obj(qr_code: QrCode) -> JsValue {
let u = js_sys::Uint8Array::new_with_length(qr_code.matrix.value.len() as u32);
u.copy_from(unsafe { std::mem::transmute(qr_code.matrix.value.as_slice()) });
let obj = js_sys::Object::new();
// If these error, it's not recoverable
let _ = js_sys::Reflect::set(&obj, &"matrix".into(), &u);
let _ = js_sys::Reflect::set(&obj, &"mode".into(), &JsValue::from(qr_code.mode));
let _ = js_sys::Reflect::set(&obj, &"version".into(), &JsValue::from(qr_code.version.0));
let _ = js_sys::Reflect::set(&obj, &"ecl".into(), &JsValue::from(qr_code.ecl));
let _ = js_sys::Reflect::set(&obj, &"mask".into(), &JsValue::from(qr_code.mask));
obj.into()
}
#[wasm_bindgen(js_name = internalBitInfo)]
pub fn internal_bit_info(mode: Mode, version: Version, ecl: ECL, mask: Mask) -> JsValue {
console_error_panic_hook::set_once();
let bit_info = BitInfo::new(mode, version, ecl, mask);
let u = js_sys::Uint32Array::new_with_length(bit_info.matrix.value.len() as u32);
u.copy_from(unsafe { std::mem::transmute(bit_info.matrix.value.as_slice()) });
let obj = js_sys::Object::new();
// If these error, it's not recoverable
let _ = js_sys::Reflect::set(&obj, &"matrix".into(), &u);
let _ = js_sys::Reflect::set(&obj, &"mode".into(), &JsValue::from(bit_info.mode));
let _ = js_sys::Reflect::set(&obj, &"version".into(), &JsValue::from(bit_info.version.0));
let _ = js_sys::Reflect::set(&obj, &"ecl".into(), &JsValue::from(bit_info.ecl));
let _ = js_sys::Reflect::set(&obj, &"mask".into(), &JsValue::from(bit_info.mask));
obj.into()
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/constants.rs | src/constants.rs | use crate::{
math::{EXP_TABLE, LOG_TABLE},
qr_code::ECL,
};
pub const NUM_DATA_MODULES: [u16; 41] = num_data_modules();
pub const NUM_EC_CODEWORDS: [[u16; 4]; 41] = num_ec_codewords();
pub const NUM_BLOCKS: [[u8; 4]; 41] = num_blocks();
/// All generator polynomials for up to 30 error correction codewords.
/// The coefficients are stored as their exponent, starting from the second largest degree.
/// This EXCLUDES the coefficient of the largest degree, which is a^0.
pub const GEN_POLYNOMIALS: [[u8; 30]; 31] = gen_polynomials();
pub const VERSION_INFO: [usize; 41] = version_info();
pub const FORMAT_INFO: [[u32; 8]; 4] = format_info();
const fn num_data_modules() -> [u16; 41] {
let mut table = [0; 41];
let mut version = 1;
while version <= 40 {
let width = 4 * version + 17;
let mut modules = width * width;
modules -= 64 * 3; // finder markers + separator
modules -= 31; // format
modules -= 2 * (width - 16); // timing
let (align, overlap) = match version {
1 => (0, 0),
x if x <= 6 => (1, 0),
x if x <= 13 => (6, 2),
x if x <= 20 => (13, 4),
x if x <= 27 => (22, 6),
x if x <= 34 => (33, 8),
x if x <= 40 => (46, 10),
_ => unreachable!(),
};
modules -= align * 25;
modules += overlap * 5;
if version >= 7 {
modules -= 36; // 2 version
}
table[version] = modules as u16;
version += 1;
}
table
}
const fn num_ec_codewords() -> [[u16; 4]; 41] {
let mut table = [[0; 4]; 41];
table[1] = [7, 10, 13, 17];
table[2] = [10, 16, 22, 28];
table[3] = [15, 26, 36, 44];
table[4] = [20, 36, 52, 64];
table[5] = [26, 48, 72, 88];
table[6] = [36, 64, 96, 112];
table[7] = [40, 72, 108, 130];
table[8] = [48, 88, 132, 156];
table[9] = [60, 110, 160, 192];
table[10] = [72, 130, 192, 224];
table[11] = [80, 150, 224, 264];
table[12] = [96, 176, 260, 308];
table[13] = [104, 198, 288, 352];
table[14] = [120, 216, 320, 384];
table[15] = [132, 240, 360, 432];
table[16] = [144, 280, 408, 480];
table[17] = [168, 308, 448, 532];
table[18] = [180, 338, 504, 588];
table[19] = [196, 364, 546, 650];
table[20] = [224, 416, 600, 700];
table[21] = [224, 442, 644, 750];
table[22] = [252, 476, 690, 816];
table[23] = [270, 504, 750, 900];
table[24] = [300, 560, 810, 960];
table[25] = [312, 588, 870, 1050];
table[26] = [336, 644, 952, 1110];
table[27] = [360, 700, 1020, 1200];
table[28] = [390, 728, 1050, 1260];
table[29] = [420, 784, 1140, 1350];
table[30] = [450, 812, 1200, 1440];
table[31] = [480, 868, 1290, 1530];
table[32] = [510, 924, 1350, 1620];
table[33] = [540, 980, 1440, 1710];
table[34] = [570, 1036, 1530, 1800];
table[35] = [570, 1064, 1590, 1890];
table[36] = [600, 1120, 1680, 1980];
table[37] = [630, 1204, 1770, 2100];
table[38] = [660, 1260, 1860, 2220];
table[39] = [720, 1316, 1950, 2310];
table[40] = [750, 1372, 2040, 2430];
table
}
const fn num_blocks() -> [[u8; 4]; 41] {
let mut table = [[0; 4]; 41];
let mut version = 1;
while version <= 40 {
let mut ecl = 0;
while ecl < 4 {
let codewords = NUM_EC_CODEWORDS[version][ecl];
let correctable = codewords / 2;
if correctable <= 15 {
table[version][ecl] = 1;
ecl += 1;
continue;
}
let mut per_block = 15;
while per_block >= 8 {
if correctable % per_block == 0 {
let mut blocks = correctable / per_block;
if blocks == 3 {
// Edgecase: there are never 3 blocks
blocks += 1;
}
table[version][ecl] = blocks as u8; // max is 81
break;
}
per_block -= 1;
}
ecl += 1;
}
version += 1;
}
// More edgecases
table[15][ECL::Medium as usize] = 10;
table[19][ECL::Medium as usize] = 14;
table[38][ECL::Medium as usize] = 45;
table
}
const fn gen_polynomials() -> [[u8; 30]; 31] {
let mut table = [[0; 30]; 31];
// In this loop, i is the number of error correcting codewords this polynomial is for
// So, each loop multiplies the previous polynomial by x - a^(i-1)
let mut i = 2;
while i <= 30 {
// Multiply prev last coefficent by a^(i-1)
table[i][i - 1] = ((table[i - 1][i - 2] as usize + i - 1) % 255) as u8;
let mut j = i - 2;
while j > 0 {
// Add like terms
// coefficient of same power from previous polynomial (multiplied by a^i-1)
let exp = ((table[i - 1][j - 1] as usize + i - 1) % 255) as u8;
// coefficient of 1 lesser power from previous polynomial (multiplied by x)
let coeff = EXP_TABLE[table[i - 1][j] as usize] ^ EXP_TABLE[exp as usize];
table[i][j] = LOG_TABLE[coeff as usize];
j -= 1;
}
// Same logic as above, b/c first coefficient always 0
let coeff = EXP_TABLE[table[i - 1][0] as usize] ^ EXP_TABLE[i - 1];
table[i][0] = LOG_TABLE[coeff as usize];
i += 1;
}
table
}
const fn version_info() -> [usize; 41] {
let mut array = [0; 41];
let mut version = 7;
while version <= 40 {
let shifted_version = version << 12;
let mut dividend: usize = shifted_version;
while dividend >= 0b1_0000_0000_0000 {
let mut divisor = 0b1_1111_0010_0101;
divisor <<= (usize::BITS - dividend.leading_zeros()) - 13; // diff of highest set bit
dividend ^= divisor;
}
array[version] = shifted_version | dividend;
version += 1;
}
array
}
const fn format_info() -> [[u32; 8]; 4] {
let mut array = [[0; 8]; 4];
let mut i = 0;
let ecls = [ECL::Low, ECL::Medium, ECL::Quartile, ECL::High];
while i < 4 {
let ecl = ecls[i];
let value = match ecl {
ECL::Low => 1,
ECL::Medium => 0,
ECL::Quartile => 3,
ECL::High => 2,
};
let mut mask = 0;
while mask < 8 {
let format = ((((value) << 3) | mask as u8) as u32) << 10;
let mut dividend = format;
while dividend >= 0b100_0000_0000 {
let mut divisor = 0b101_0011_0111;
divisor <<= (32 - dividend.leading_zeros()) - 11;
dividend ^= divisor;
}
array[i][mask] = (format | dividend) ^ 0b10101_0000010010;
mask += 1;
}
i += 1;
}
array
}
#[cfg(test)]
mod tests {
use super::*;
use crate::qr_code::Mask;
#[test]
fn information_works() {
assert_eq!(VERSION_INFO[7], 0x07C94);
assert_eq!(VERSION_INFO[21], 0x15683);
assert_eq!(VERSION_INFO[40], 0x28C69);
}
#[test]
fn format_information_works() {
assert_eq!(FORMAT_INFO[ECL::Medium as usize][Mask::M0 as usize], 0x5412);
assert_eq!(FORMAT_INFO[ECL::High as usize][Mask::M0 as usize], 0x1689);
assert_eq!(FORMAT_INFO[ECL::High as usize][Mask::M7 as usize], 0x083B);
}
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/data.rs | src/data.rs | use crate::{
constants::{NUM_DATA_MODULES, NUM_EC_CODEWORDS},
encoding::{encode_alphanumeric, encode_byte, encode_numeric, num_cci_bits},
qr_code::{Mode, Version, ECL},
};
#[derive(Debug)]
pub struct Data {
pub bits: BitVec,
pub mode: Mode,
pub version: Version,
pub ecl: ECL,
}
impl Data {
pub fn new(text: &str, mode: Mode, min_version: Version, min_ecl: ECL) -> Option<Self> {
Self::new_verbose(text, mode, min_version, false, min_ecl, false)
}
pub fn new_verbose(
text: &str,
mode: Mode,
min_version: Version,
strict_version: bool,
min_ecl: ECL,
strict_ecl: bool,
) -> Option<Self> {
let mut bits = 0;
bits += 4 + num_cci_bits(min_version, mode);
let char_len = text.len();
match mode {
Mode::Numeric => {
bits += (char_len / 3) * 10;
match char_len % 3 {
2 => bits += 7,
1 => bits += 4,
_ => (),
}
}
Mode::Alphanumeric => {
bits += (char_len / 2) * 11 + (char_len % 2) * 6;
}
Mode::Byte => {
bits += char_len * 8;
}
}
let mut data_codewords = (NUM_DATA_MODULES[min_version.0] / 8) as usize;
let mut min_version = min_version.0;
let mut req_codewords = (bits + 7) / 8;
while req_codewords
> (data_codewords - NUM_EC_CODEWORDS[min_version][min_ecl as usize] as usize)
&& min_version < 40
{
if strict_version {
return None;
}
min_version += 1;
data_codewords = (NUM_DATA_MODULES[min_version] / 8) as usize;
// char count indicator length increase
match mode {
Mode::Byte => {
if min_version == 10 {
bits += 8;
}
}
_ => {
if min_version == 10 || min_version == 27 {
bits += 2;
}
}
}
req_codewords = (bits + 7) / 8;
}
if min_version > 40 {
return None;
}
let mut max_ecl = min_ecl;
if !strict_ecl {
let ecls = [ECL::Low, ECL::Medium, ECL::Quartile, ECL::High];
for new_ecl in (min_ecl as usize + 1..ecls.len()).rev() {
if req_codewords <= data_codewords - NUM_EC_CODEWORDS[min_version][new_ecl] as usize
{
max_ecl = ecls[new_ecl];
break;
}
}
}
let mut data = Data {
bits: BitVec::with_capacity(data_codewords * 8),
mode,
version: Version(min_version),
ecl: max_ecl,
};
match mode {
Mode::Numeric => encode_numeric(&mut data, text),
Mode::Alphanumeric => encode_alphanumeric(&mut data, text),
Mode::Byte => encode_byte(&mut data, text),
}
Some(data)
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct BitVec {
value: Vec<u8>,
len: usize,
}
impl BitVec {
pub fn new() -> Self {
BitVec {
value: Vec::new(),
len: 0,
}
}
pub fn with_capacity(capacity: usize) -> Self {
BitVec {
value: Vec::with_capacity((capacity + 7) / 8),
len: 0,
}
}
pub fn resize(&mut self, new_len: usize, byte: u8) {
self.value.resize((new_len + 7) / 8, byte);
self.len = new_len;
}
/// self must be byte aligned
pub fn append(&mut self, other: &mut Vec<u8>) {
self.value.append(other);
self.len += other.len() * 8;
}
pub fn to_bytes(self) -> Vec<u8> {
self.value
}
pub fn set(&mut self, i: usize) {
self.value[i / 8] = 1 << (7 - (i % 8));
}
pub fn get(&self, i: usize) -> bool {
((self.value[i / 8] >> (7 - (i % 8))) & 1) == 1
}
pub fn len(&self) -> usize {
self.len
}
pub fn push_n(&mut self, input: usize, n: usize) {
let gap = (8 - (self.len % 8)) % 8;
self.len += n;
if gap >= n {
let i = self.value.len() - 1;
self.value[i] |= (input << (gap - n)) as u8;
return;
}
let mut n = n - gap;
if gap > 0 {
let i = self.value.len() - 1;
self.value[i] |= (input >> n) as u8;
}
while n >= 8 {
n -= 8;
self.value.push((input >> n) as u8);
}
if n > 0 {
self.value.push((input << (8 - n)) as u8);
}
}
}
impl From<Vec<u8>> for BitVec {
fn from(value: Vec<u8>) -> Self {
BitVec {
len: value.len() * 8,
value,
}
}
}
impl AsRef<[u8]> for BitVec {
fn as_ref(&self) -> &[u8] {
&self.value
}
}
impl AsMut<[u8]> for BitVec {
fn as_mut(&mut self) -> &mut [u8] {
&mut self.value
}
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/encoding.rs | src/encoding.rs | use crate::{
data::Data,
qr_code::{Mode, Version},
};
pub fn encoding_mode(input: &str) -> Mode {
let mut mode = Mode::Numeric;
for b in input.bytes() {
if b >= b'0' && b <= b'9' {
continue;
}
if byte_to_b45(b) < 45 {
mode = Mode::Alphanumeric;
} else {
mode = Mode::Byte;
break;
}
}
mode
}
// input fits in u8 b/c numeric
pub fn encode_numeric(data: &mut Data, input: &str) {
data.bits.push_n(0b0001, 4);
data.bits
.push_n(input.len(), num_cci_bits(data.version, Mode::Numeric));
let input = input.as_bytes();
for i in 0..(input.len() / 3) {
let group = (input[i * 3] - b'0') as usize * 100
+ (input[i * 3 + 1] - b'0') as usize * 10
+ (input[i * 3 + 2] - b'0') as usize;
data.bits.push_n(group, 10);
}
match input.len() % 3 {
1 => {
let group = input[input.len() - 1] - b'0';
data.bits.push_n(group.into(), 4);
}
2 => {
let group = (input[input.len() - 2] - b'0') * 10 + (input[input.len() - 1] - b'0');
data.bits.push_n(group.into(), 7);
}
_ => (),
}
}
pub fn encode_alphanumeric(qrdata: &mut Data, input: &str) {
qrdata.bits.push_n(0b0010, 4);
qrdata.bits.push_n(
input.len(),
num_cci_bits(qrdata.version, Mode::Alphanumeric),
);
let input = input.as_bytes();
for i in 0..(input.len() / 2) {
let group =
byte_to_b45(input[i * 2]) as usize * 45 + byte_to_b45(input[i * 2 + 1]) as usize;
qrdata.bits.push_n(group, 11);
}
if (input.len() & 1) == 1 {
qrdata
.bits
.push_n(byte_to_b45(input[input.len() - 1]).into(), 6);
}
}
pub fn encode_byte(qrdata: &mut Data, input: &str) {
qrdata.bits.push_n(0b0100, 4);
qrdata
.bits
.push_n(input.len(), num_cci_bits(qrdata.version, Mode::Byte));
for c in input.as_bytes() {
qrdata.bits.push_n((*c).into(), 8);
}
}
pub fn num_cci_bits(version: Version, mode: Mode) -> usize {
if mode == Mode::Byte {
return if version.0 < 10 { 8 } else { 16 };
}
#[allow(unreachable_code)]
let mut base = match mode {
Mode::Numeric => 10,
Mode::Alphanumeric => 9,
_ => unreachable!("Unknown mode"),
};
if version.0 > 9 {
base += 2
}
if version.0 > 26 {
base += 2
}
base
}
fn byte_to_b45(c: u8) -> u8 {
match c {
x if x >= b'A' && x <= b'Z' => x - b'A' + 10,
b':' => 44,
x if x >= b'0' && x <= b'9' => x - b'0',
b' ' => 36,
b'$' => 37,
b'%' => 38,
b'*' => 39,
b'+' => 40,
b'-' => 41,
b'.' => 42,
b'/' => 43,
// All other values are invalid
// can use byte_to_b45 < 45 if validation needed
_ => 255,
}
}
#[cfg(test)]
mod tests {
use crate::{data::BitVec, qr_code::ECL};
use super::*;
fn get_data_bits(bits: &str) -> BitVec {
let mut v = BitVec::new();
let mut i = 0;
let mut num = 0;
for c in bits.chars() {
match c {
'1' => {
num += 1 << (7 - i);
i += 1;
}
'0' => i += 1,
_ => continue,
}
if i == 8 {
v.push_n(num, 1);
num = 0;
i = 0;
}
}
if i > 0 {
v.push_n(num, 1);
}
v
}
#[test]
fn encode_numeric_works() {
let data = Data::new("1", Mode::Numeric, Version(1), ECL::Low).unwrap();
assert_eq!(data.bits, get_data_bits("0001 0000000001 0001"));
let data = Data::new("99", Mode::Numeric, Version(1), ECL::Low).unwrap();
assert_eq!(data.bits, get_data_bits("0001 0000000010 1100011"));
let data = Data::new("123456", Mode::Numeric, Version(1), ECL::Low).unwrap();
assert_eq!(
data.bits,
get_data_bits("0001 0000000110 0001111011 0111001000")
);
}
#[test]
fn encode_alphanumeric_works() {
let data = Data::new("1", Mode::Alphanumeric, Version(1), ECL::Low).unwrap();
assert_eq!(data.bits, get_data_bits("0010 000000001 000001"));
let data = Data::new("99", Mode::Alphanumeric, Version(1), ECL::Low).unwrap();
assert_eq!(data.bits, get_data_bits("0010 000000010 00110011110"));
let data = Data::new("ABC1::4", Mode::Alphanumeric, Version(1), ECL::Low).unwrap();
assert_eq!(
data.bits,
get_data_bits("0010 000000111 00111001101 01000011101 11111101000 000100")
);
}
#[test]
fn encode_byte_works() {
let data = Data::new("0", Mode::Byte, Version(1), ECL::Low).unwrap();
assert_eq!(data.bits, get_data_bits("0100 00000001 00110000"));
}
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/matrix.rs | src/matrix.rs | use std::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign};
use crate::{
constants::{FORMAT_INFO, VERSION_INFO},
qr_code::{Mask, Version, ECL},
};
#[derive(Debug)]
pub struct Matrix<T: Copy + From<Module> + Into<Module> + BitOrAssign<Module>> {
pub value: Vec<T>,
pub width: usize,
}
impl<T: Copy + From<Module> + Into<Module> + BitOrAssign<Module>> Matrix<T> {
pub fn new(version: Version, init: T) -> Self {
let width = version.0 * 4 + 17;
Matrix {
value: vec![init; (width) * (width)],
width,
}
}
pub fn get(&self, x: usize, y: usize) -> T {
self.value[y * self.width + x]
}
pub fn get_mut(&mut self, x: usize, y: usize) -> &mut T {
&mut (self.value[y * self.width + x])
}
pub fn set(&mut self, x: usize, y: usize, value: T) {
self.value[y * self.width + x] = value;
}
pub fn set_finder(&mut self) {
for (x, mut y) in [(0, 0), (0, self.width - 7), (self.width - 7, 0)] {
for i in 0..7 {
self.set(x + i, y, (Module::FINDER | Module::ON).into());
}
y += 1;
self.set(x + 0, y, (Module::FINDER | Module::ON).into());
for i in 1..6 {
self.set(x + i, y, Module::FINDER.into());
}
self.set(x + 6, y, (Module::FINDER | Module::ON).into());
y += 1;
for _ in 0..3 {
self.set(x + 0, y, (Module::FINDER | Module::ON).into());
self.set(x + 1, y, (Module::FINDER).into());
self.set(x + 2, y, (Module::FINDER_CENTER | Module::ON).into());
self.set(x + 3, y, (Module::FINDER_CENTER | Module::ON).into());
self.set(x + 4, y, (Module::FINDER_CENTER | Module::ON).into());
self.set(x + 5, y, (Module::FINDER).into());
self.set(x + 6, y, (Module::FINDER | Module::ON).into());
y += 1;
}
self.set(x + 0, y, (Module::FINDER | Module::ON).into());
for i in 1..6 {
self.set(x + i, y, (Module::FINDER).into());
}
self.set(x + 6, y, (Module::FINDER | Module::ON).into());
y += 1;
for i in 0..7 {
self.set(x + i, y, (Module::FINDER | Module::ON).into());
}
}
}
pub fn set_alignment(&mut self) {
let version = (self.width - 17) / 4;
if version == 1 {
return;
}
let first = 6;
let last = self.width - 7;
let len = version / 7 + 2;
let mut coords = Vec::with_capacity(len);
coords.push(first);
if version >= 7 {
for i in (1..len - 1).rev() {
coords.push((last - i * ALIGN_OFFSETS[version - 7]) as usize);
}
}
coords.push(last);
for i in 0..len {
for j in 0..len {
if (i == 0 && (j == 0 || j == len - 1)) || (i == len - 1 && j == 0) {
continue;
}
let col = coords[i] - 2;
let row = coords[j] - 2;
for i in 0..5 {
self.set(col, row + i, (Module::ALIGNMENT | Module::ON).into());
}
for i in 1..4 {
self.set(col + i, row + 0, (Module::ALIGNMENT | Module::ON).into());
self.set(col + i, row + 1, (Module::ALIGNMENT).into());
self.set(col + i, row + 2, (Module::ALIGNMENT).into());
self.set(col + i, row + 3, (Module::ALIGNMENT).into());
self.set(col + i, row + 4, (Module::ALIGNMENT | Module::ON).into());
}
self.set(
col + 2,
row + 2,
(Module::ALIGNMENT_CENTER | Module::ON).into(),
);
for i in 0..5 {
self.set(col + 4, row + i, (Module::ALIGNMENT | Module::ON).into())
}
}
}
}
pub fn set_timing(&mut self) {
// overlaps with alignment pattern so must |=
let len = self.width - 16;
for i in 0..len {
let module = Module::TIMING | ((i as u8 & 1) ^ 1).into();
*self.get_mut(8 + i, 6) |= module;
}
for i in 0..len {
let module = Module::TIMING | ((i as u8 & 1) ^ 1).into();
*self.get_mut(6, 8 + i) |= module;
}
}
pub fn set_format(&mut self, ecl: ECL, mask: Mask) {
let format_info = FORMAT_INFO[ecl as usize][mask as usize];
for i in 0..15 {
let on = ((format_info >> i) as u8 & 1).into();
let y = match i {
i if i < 6 => i,
6 => 7,
_ => 8,
};
let x = match i {
i if i < 8 => 8,
8 => 7,
_ => 14 - i,
};
self.set(x, y, (Module::FORMAT | on).into());
let y = match i {
i if i < 8 => 8,
_ => self.width - (15 - i),
};
let x = match i {
i if i < 8 => self.width - (i + 1),
_ => 8,
};
self.set(x, y, (Module::FORMAT_COPY | on).into());
}
// always set bit, not part of format info
self.set(8, self.width - 8, (Module::FORMAT_COPY | Module::ON).into());
}
pub fn set_version(&mut self) {
let version = (self.width - 17) / 4;
if version < 7 {
return;
}
let info = VERSION_INFO[version];
for i in 0..18 {
let on = ((info >> i) as u8 & 1).into();
let x = i / 3;
let y = i % 3;
self.set(x, y + self.width - 11, (Module::VERSION | on).into());
self.set(y + self.width - 11, x, (Module::VERSION_COPY | on).into());
}
}
/// This must run AFTER alignment, timing, version placed
pub fn set_data(&mut self, mut get_value: impl FnMut() -> T) {
let mut col = self.width - 1;
let mut row = self.width - 1;
let mut row_dir = -1;
let mut row_end = 9;
let mut row_len = (self.width - 10) as isize;
loop {
loop {
if self.get(col, row).into() == Module(0) {
self.set(col, row, get_value());
}
if self.get(col - 1, row).into() == Module(0) {
self.set(col - 1, row, get_value());
}
if row == row_end {
break;
}
row = ((row as isize) + row_dir) as usize;
}
if col == 1 {
break;
}
col -= 2;
row_dir *= -1;
// passed first finder
if col == self.width - 9 {
row_len = (self.width - 1) as isize;
row_end = 0;
}
// between left finders
else if col == 8 {
row_len = (self.width - 18) as isize;
row_end = 9;
row = self.width - 9;
} else {
// vertical timing belt
if col == 6 {
col -= 1;
}
row_end = (row_end as isize + row_len * row_dir) as usize;
}
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[repr(transparent)]
pub struct Module(pub u8);
impl Module {
// bit flags
pub const ON: Module = Module(1 << 0);
pub const DATA: Module = Module(1 << 1);
pub const FINDER: Module = Module(1 << 2);
pub const ALIGNMENT: Module = Module(1 << 3);
pub const TIMING: Module = Module(1 << 4);
pub const FORMAT: Module = Module(1 << 5);
pub const VERSION: Module = Module(1 << 6);
pub const MODIFIER: Module = Module(1 << 7);
// modified flags
pub const FINDER_CENTER: Module = Module(Module::FINDER.0 | Module::MODIFIER.0);
pub const ALIGNMENT_CENTER: Module = Module(Module::ALIGNMENT.0 | Module::MODIFIER.0);
pub const FORMAT_COPY: Module = Module(Module::FORMAT.0 | Module::MODIFIER.0);
pub const VERSION_COPY: Module = Module(Module::VERSION.0 | Module::MODIFIER.0);
/// Returns true if self contains all flags set in `flags`, aka a superset
pub fn has(self, flags: Module) -> bool {
(self & flags) == flags
}
/// Returns true if self contains any flag set in `flags`, aka an intersection
pub fn any(self, flag: Module) -> bool {
(self & flag) != Module(0)
}
pub fn set(&mut self, flags: Module) {
*self |= flags;
}
}
impl From<u8> for Module {
fn from(value: u8) -> Self {
Module(value)
}
}
impl BitAnd for Module {
type Output = Module;
fn bitand(self, rhs: Self) -> Self::Output {
Module(self.0 & rhs.0)
}
}
impl BitAndAssign for Module {
fn bitand_assign(&mut self, rhs: Self) {
self.0 |= rhs.0;
}
}
impl BitOr for Module {
type Output = Module;
fn bitor(self, rhs: Self) -> Self::Output {
Module(self.0 | rhs.0)
}
}
impl BitOrAssign for Module {
fn bitor_assign(&mut self, rhs: Self) {
self.0 |= rhs.0;
}
}
impl BitXor for Module {
type Output = Module;
fn bitxor(self, rhs: Self) -> Self::Output {
Module(self.0 ^ rhs.0)
}
}
impl BitXorAssign for Module {
fn bitxor_assign(&mut self, rhs: Self) {
self.0 ^= rhs.0;
}
}
const ALIGN_OFFSETS: [usize; 34] = [
16, 18, 20, 22, 24, 26, 28, // 7-13
20, 22, 24, 24, 26, 28, 28, // 14-20
22, 24, 24, 26, 26, 28, 28, // 21-27
24, 24, 26, 26, 26, 28, 28, // 28-34
24, 26, 26, 26, 28, 28, // 35-40
];
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/render/svg.rs | src/render/svg.rs | use crate::matrix::Module;
use super::{RenderData, Toggle};
pub fn render_svg(render: &RenderData) -> String {
let mut output = String::with_capacity(40 * (render.width() * render.width()) / 2);
output.push_str(&format!(
r#"<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 {} {}">"#,
render.width(),
render.width()
));
if render.toggled(Toggle::Background) {
output.push_str(&format!(
r#"<rect width="{}" height="{}" fill="{}"/>"#,
render.width(),
render.width(),
render.background
));
}
if render.toggled(Toggle::BackgroundPixels) {
render_pixels(render, &mut output, false);
}
if render.toggled(Toggle::ForegroundPixels) {
render_pixels(render, &mut output, true);
}
output.push_str("</svg>");
output
}
fn render_pixels(render: &RenderData, output: &mut String, on: bool) {
output.push_str(&format!(
"<path fill=\"{}\" d=\"",
if on {
&render.foreground
} else {
&render.background
}
));
for y in 0..render.qr_code.matrix.width {
for x in 0..render.qr_code.matrix.width {
let module_on = render.qr_code.matrix.get(x, y).has(Module::ON);
if module_on != on {
continue;
}
// keep module centered if size != unit
output.push_str(&format!(
"M{},{}h{}v{}h-{}z",
(x as u32 + render.margin as u32) * render.unit as u32,
(y as u32 + render.margin as u32) * render.unit as u32,
render.unit,
render.unit,
render.unit
));
}
}
output.push_str("\"/>");
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/src/render/text.rs | src/render/text.rs | use crate::matrix::Module;
use super::RenderData;
pub fn render_utf8(render: &RenderData) -> String {
// row length +1 for \n and take ceil of rows / 2 if odd
let mut result = String::with_capacity((render.width() + 1) * (render.width() + 1) / 2);
let start = render.margin;
let end = render.qr_code.matrix.width + start;
for y in (0..render.width()).step_by(2) {
for x in 0..render.width() {
if x < start || x >= end {
result.push(' ');
continue;
}
let top = if y >= start && y < end {
render
.qr_code
.matrix
.get(x - start, y - start)
.has(Module::ON)
} else {
false
};
let bot = if y + 1 >= start && y + 1 < end {
render
.qr_code
.matrix
.get(x - start, y - start + 1)
.has(Module::ON)
} else {
false
};
let c = match (top, bot) {
(true, true) => 'โ',
(true, false) => 'โ',
(false, true) => 'โ',
(false, false) => ' ',
};
result.push(c);
}
result.push('\n');
}
result
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/benches/qr.rs | benches/qr.rs | // copied from https://github.com/erwanvivien/fast_qr/blob/master/benches/qr.rs
use criterion::{criterion_group, criterion_main, Criterion, Throughput};
use fuqr::QrOptions;
use std::hint::black_box;
use std::time::Duration;
fn bench_fastqr_qrcode(c: &mut Criterion) {
let bytes: &[u8] = b"https://example.com/";
for (
id,
fuqr_version,
fuqr_level,
fast_qr_version,
fast_qr_level,
qrcode_version,
qrcode_level,
) in &[
(
"V03H",
fuqr::qr_code::Version::new(3),
fuqr::qr_code::ECL::High,
fast_qr::Version::V03,
fast_qr::ECL::H,
qrcode::Version::Normal(3),
qrcode::EcLevel::H,
),
(
"V10H",
fuqr::qr_code::Version::new(10),
fuqr::qr_code::ECL::High,
fast_qr::Version::V10,
fast_qr::ECL::H,
qrcode::Version::Normal(10),
qrcode::EcLevel::H,
),
(
"V40H",
fuqr::qr_code::Version::new(40),
fuqr::qr_code::ECL::High,
fast_qr::Version::V40,
fast_qr::ECL::H,
qrcode::Version::Normal(40),
qrcode::EcLevel::H,
),
] {
let mut group = c.benchmark_group(*id);
group.measurement_time(Duration::from_secs(10));
group.throughput(Throughput::Bytes(bytes.len() as u64));
group.sample_size(200);
group.bench_function("fuqr", |b| {
b.iter(|| {
fuqr::generate(
black_box("https://example.com/"),
&QrOptions::new()
.min_ecl(*fuqr_level)
.min_version(*fuqr_version),
)
})
});
group.bench_function("qrcode", |b| {
b.iter(|| {
qrcode::QrCode::with_version(
black_box(b"https://example.com/"),
*qrcode_version,
*qrcode_level,
)
.unwrap()
})
});
group.bench_function("fast_qr", |b| {
b.iter(|| {
fast_qr::QRBuilder::new(black_box("https://example.com/"))
.ecl(*fast_qr_level)
.version(*fast_qr_version)
.build()
.unwrap()
})
});
group.finish();
}
}
criterion_group!(benches, bench_fastqr_qrcode);
criterion_main!(benches);
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/examples/bad_apple.rs | examples/bad_apple.rs | use fuqr::{
generate_qart,
matrix::Module,
qart::WeightPixel,
qr_code::{Mode, Version},
QrOptions,
};
use image::{ImageBuffer, Rgb};
use ffmpeg::software::scaling::{context::Context, flag::Flags};
use ffmpeg_next as ffmpeg;
const USE_PATTERN: bool = true;
const QR_VERSION: usize = 13;
const X_ASPECT: usize = 4;
const Y_ASPECT: usize = 3;
// Increase padding for more control over error correction bits
const PAD_L: usize = 2;
const PAD_R: usize = 2;
const FPS: u32 = 5;
const NTH_FRAME: u32 = 30 / FPS;
const QR_WIDTH: usize = QR_VERSION * 4 + 17;
const IMG_WIDTH: usize = QR_WIDTH - (PAD_L + PAD_R);
const IMG_HEIGHT: usize = ((IMG_WIDTH * Y_ASPECT) / X_ASPECT) | 1; // force odd
const PAD_T: usize = (QR_WIDTH - IMG_HEIGHT as usize) / 2 - 1;
const PAD_B: usize = (QR_WIDTH - IMG_HEIGHT as usize) / 2 + 1;
fn main() {
ffmpeg::init().unwrap();
// video excluded in .gitignore
let mut format_context = ffmpeg::format::input("examples/bad_apple/bad_apple.mp4").unwrap();
let stream = format_context
.streams()
.best(ffmpeg::media::Type::Video)
.ok_or(ffmpeg::Error::StreamNotFound)
.unwrap();
let video_stream_index = stream.index();
let decoder_context =
ffmpeg::codec::context::Context::from_parameters(stream.parameters()).unwrap();
let mut decoder = decoder_context.decoder().video().unwrap();
let mut scaler = Context::get(
decoder.format(),
decoder.width(),
decoder.height(),
ffmpeg::format::Pixel::RGB24,
IMG_WIDTH as u32,
IMG_HEIGHT as u32,
Flags::BILINEAR,
)
.unwrap();
let mut frame_index = 0;
let mut receive_and_process = |decoder: &mut ffmpeg::decoder::Video| {
let mut decoded = ffmpeg::frame::Video::empty();
while decoder.receive_frame(&mut decoded).is_ok() {
if frame_index % NTH_FRAME == 0 {
let mut rgb_frame = ffmpeg::frame::Video::empty();
scaler.run(&decoded, &mut rgb_frame).unwrap();
save_qr_frame(rgb_frame.data(0), rgb_frame.stride(0), frame_index);
}
frame_index += 1;
}
};
for (stream, packet) in format_context.packets() {
if stream.index() != video_stream_index {
continue;
}
decoder.send_packet(&packet).unwrap();
receive_and_process(&mut decoder);
}
decoder.send_eof().unwrap();
receive_and_process(&mut decoder);
}
fn save_qr_frame(frame: &[u8], frame_stride: usize, frame_index: u32) {
let mut weights = vec![WeightPixel::new(false, 0); QR_WIDTH * QR_WIDTH];
for y in 0..IMG_HEIGHT {
for x in 0..IMG_WIDTH {
let offset = (y * frame_stride) + x * 3;
let r = frame[offset];
// SOLID BLOCKS OF COLOR BREAK RECOGNITION
// apply different pattern to white and black sections to help
let value = if USE_PATTERN {
if r < 127 {
// black pattern
(x + y) % 6 != 0 || (IMG_WIDTH - 1 - x + y) % 6 != 0
} else {
// white pattern
x % 6 == (y % 6) || x % 6 == (IMG_WIDTH - 1 - y) % 6
}
} else {
r < 127
};
// NOTICE inverted x and swapped x and y
// 90deg ccw rotation
weights[(QR_WIDTH - 1 - (x + PAD_L)) * QR_WIDTH + (y + PAD_T)] =
WeightPixel::new(value, 127);
}
}
let qr_options = QrOptions::new()
.mode(Some(Mode::Byte))
.min_version(Version(QR_VERSION))
.strict_version(true)
.strict_ecl(true);
let qr_code = generate_qart(&get_lyric(frame_index / 3), &qr_options, &weights).unwrap();
let margin = 2;
let out_width = QR_WIDTH + 2 * margin;
let img_buf = ImageBuffer::from_fn(out_width as u32, out_width as u32, |rot_x, rot_y| {
// NOTICE
// undo rotation with 90deg cw rotation
let x = rot_y as usize;
let y = out_width - 1 - rot_x as usize;
if x < margin || y < margin || x > out_width - 1 - margin || y > out_width - 1 - margin {
return Rgb([255, 255, 255]);
}
let qr_x = (x - margin) as usize;
let qr_y = (y - margin) as usize;
let module = qr_code.matrix.get(qr_x, qr_y);
// NOTICE PAD IS FOR ROTATED IMAGE
let on = if (module.has(Module::TIMING) || module.has(Module::ALIGNMENT))
&& (qr_x >= PAD_T
&& qr_x <= QR_WIDTH - 1 - PAD_B
&& qr_y >= PAD_L
&& qr_y <= QR_WIDTH - 1 - PAD_R)
{
weights[(qr_y * QR_WIDTH) + qr_x].value().clone()
} else {
module.has(Module::ON)
};
if on {
Rgb([0 as u8, 0, 0])
} else {
Rgb([255, 255, 255])
}
});
img_buf
.save(format!(
"examples/bad_apple/frames/frame_{:05}.png",
frame_index
))
.unwrap();
}
pub fn get_lyric(deca_second: u32) -> &'static str {
// each tenth of second
match deca_second {
0..=184 => "๐ถ",
185..=224 => "We're no strangers to love",
225..=239 => "You know the rules",
240..=267 => "and so do I (do I)",
268..=289 => "A full commitment's what I'm",
290..=309 => "thinking of",
310..=329 => "You wouldn't get this from",
330..=354 => "any other guy",
355..=377 => "I just want to tell you",
378..=399 => "how I'm feeling",
400..=429 => "Gotta make you understand",
430..=449 => "Never gonna give you up",
450..=469 => "Never gonna let you down",
470..=489 => "Never gonna run around",
490..=514 => "and desert you",
515..=534 => "Never gonna make you cry",
535..=554 => "Never gonna say goodbye",
555..=579 => "Never gonna tell a lie",
580..=604 => "and hurt you",
605..=624 => "We've known each other",
625..=647 => "for so long",
648..=669 => "Your heart's been aching but",
670..=689 => "you're too shy to say it (say it)",
690..=709 => "Inside, we both know what's been",
710..=729 => "going on (going on)",
730..=749 => "We know the game and we're",
750..=769 => "gonna play it",
770..=799 => "And if you ask me",
800..=819 => "how I'm feeling",
820..=839 => "Don't tell me you're too",
840..=849 => "blind to see",
850..=869 => "Never gonna give you up",
870..=889 => "Never gonna let you down",
890..=909 => "Never gonna run around",
910..=934 => "and desert you",
935..=954 => "Never gonna make you cry",
955..=974 => "Never gonna say goodbye",
975..=999 => "Never gonna tell a lie",
1000..=1019 => "and hurt you",
1020..=1039 => "Never gonna give you up",
1040..=1059 => "Never gonna let you down",
1060..=1079 => "Never gonna run around",
1080..=1104 => "and desert you",
1105..=1124 => "Never gonna make you cry",
1125..=1144 => "Never gonna say goodbye",
1145..=1169 => "Never gonna tell a lie",
1170..=1194 => "and hurt you",
1195..=1214 => "๐ถ(hoooooo)๐ถ",
1215..=1234 => "(give you up)",
1235..=1259 => "๐ถ(hoooooo)๐ถ",
1260..=1279 => "(give you up)",
1280..=1299 => "Never gonna give, never gonna give",
1300..=1319 => "(give you up)",
1320..=1339 => "Never gonna give, never gonna give",
1340..=1364 => "(give you up)",
1365..=1384 => "We've known each other",
1385..=1407 => "for so long",
1408..=1429 => "Your heart's been aching but",
1430..=1449 => "you're too shy to say it (say it)",
1450..=1469 => "Inside, we both know what's been",
1470..=1489 => "going on (going on)",
1490..=1509 => "We know the game and we're",
1510..=1529 => "gonna play it",
1530..=1559 => "I just want to tell you",
1560..=1581 => "how I'm feeling",
1582..=1609 => "Gotta make you understand",
1610..=1629 => "Never gonna give you up",
1630..=1649 => "Never gonna let you down",
1650..=1669 => "Never gonna run around",
1670..=1694 => "and desert you",
1695..=1714 => "Never gonna make you cry",
1715..=1734 => "Never gonna say goodbye",
1735..=1759 => "Never gonna tell a lie",
1760..=1779 => "and hurt you",
1780..=1799 => "Never gonna give you up",
1800..=1819 => "Never gonna let you down",
1820..=1839 => "Never gonna run around",
1840..=1864 => "and desert you",
1865..=1884 => "Never gonna make you cry",
1885..=1904 => "Never gonna say goodbye",
1905..=1929 => "Never gonna tell a lie",
1930..=1949 => "and hurt you",
1950..=1969 => "Never gonna give you up",
1970..=1989 => "Never gonna let you down",
1990..=2009 => "Never gonna run around",
2010..=2034 => "and desert you",
2035..=2054 => "Never gonna make you cry",
2055..=2074 => "Never gonna say goodbye",
2075..=2099 => "Never gonna tell a lie",
2100..=2200 => "and hurt you",
_ => unreachable!("invalid frame"),
}
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/examples/layers.rs | examples/layers.rs | use std::{fs::File, io::BufReader};
use fuqr::{
generate,
matrix::{Matrix, Module},
QrOptions,
};
use image::{
codecs::gif::{GifDecoder, GifEncoder},
imageops::{self, FilterType},
AnimationDecoder, Delay, DynamicImage, Frame, GenericImage, GenericImageView, ImageBuffer,
ImageError, Rgba,
};
fn overlay(
matrix: &Matrix<Module>,
gif_path: &str,
out_path: &str,
pixel_size: u32,
bg_cover_size: u32,
fg_cover_size: u32,
) -> Result<(), ImageError> {
let margin = 2;
let width = (matrix.width as u32 + margin * 2) * pixel_size;
let out = File::create(out_path)?;
let mut encoder = GifEncoder::new(out);
encoder.set_repeat(image::codecs::gif::Repeat::Infinite)?;
let overlay = BufReader::new(File::open(gif_path)?);
let decoder = GifDecoder::new(overlay)?;
let o_frames = decoder.into_frames();
for o_frame in o_frames {
let mut img_buf = ImageBuffer::from_pixel(width, width, Rgba([255, 255, 255, 255]));
if bg_cover_size > 0 {
img_buf = draw_qr(
DynamicImage::ImageRgba8(img_buf),
matrix,
margin,
pixel_size,
bg_cover_size,
)?
.to_rgba8();
}
let o_frame = o_frame.unwrap();
let ratio = o_frame.buffer().width() as f64 / o_frame.buffer().height() as f64;
let o_width = (width as f64 * ratio) as u32;
let o_frame = imageops::resize(o_frame.buffer(), o_width, width, FilterType::Nearest);
imageops::overlay(
&mut img_buf,
&o_frame,
(width as i64 - o_width as i64) / 2,
0,
);
if fg_cover_size > 0 {
img_buf = draw_qr(
DynamicImage::ImageRgba8(img_buf),
matrix,
margin,
pixel_size,
fg_cover_size,
)?
.to_rgba8();
}
let frame = Frame::from_parts(img_buf, 0, 0, Delay::from_numer_denom_ms(1000, 6));
encoder.encode_frame(frame)?;
}
Ok(())
}
fn background(matrix: &Matrix<Module>) -> Result<(), ImageError> {
let img = image::open("examples/misc/jeancarloemer.jpg")?;
let pixel_size = 6;
let margin = 2;
let width = (matrix.width as u32 + margin * 2) * pixel_size;
let img = img
.resize(
width,
width,
// Nearest is fastest and noisiest resize filter
FilterType::Nearest,
)
.grayscale();
let img = draw_qr(img, matrix, margin, pixel_size, 2)?;
img.save("tmp/layers_background.png")?;
Ok(())
}
fn draw_qr(
mut img: DynamicImage,
matrix: &Matrix<Module>,
margin: u32,
pixel_size: u32,
cover_size: u32,
) -> Result<DynamicImage, ImageError> {
let size = (matrix.width as u32 + margin * 2) * pixel_size;
assert_eq!(size, img.width());
assert_eq!(size, img.height());
let gap = (pixel_size - cover_size) / 2;
let luma = img
.resize(
matrix.width as u32,
matrix.width as u32,
FilterType::Nearest,
)
.grayscale();
for y in 0..matrix.width {
for x in 0..matrix.width {
let module = matrix.get(x, y);
let on = module.has(Module::ON);
let pixel = if on {
[0, 0, 0, 255]
} else {
[255, 255, 255, 255]
};
if module.has(Module::FINDER) {
for dy in 0..pixel_size {
for dx in 0..pixel_size {
img.put_pixel(
(x as u32 + margin) * pixel_size + dx,
(y as u32 + margin) * pixel_size + dy,
image::Rgba(pixel),
)
}
}
}
// QR code scanners use local blackpoint thresholds,
// or at least a global blackpoint based on image heuristics
// We'll keep things simple
let l = luma.get_pixel(x as u32, y as u32).0[0];
if (on && l > 200) || (!on && l < 50) {
continue;
}
for dy in 0..cover_size {
for dx in 0..cover_size {
img.put_pixel(
(x as u32 + margin) * pixel_size + dx + gap,
(y as u32 + margin) * pixel_size + dy + gap,
image::Rgba(pixel),
)
}
}
}
}
Ok(img)
}
fn main() -> Result<(), ImageError> {
let qr_code = generate("https://github.com/zhengkyl/fuqr", &QrOptions::new()).unwrap();
overlay(
&qr_code.matrix,
"examples/misc/spin.gif",
"tmp/layers_min.gif",
6,
3,
0,
)?;
overlay(
&qr_code.matrix,
"examples/misc/4floss.gif",
"tmp/layers_max.gif",
6,
6,
1,
)?;
background(&qr_code.matrix)?;
Ok(())
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/examples/weave.rs | examples/weave.rs | use fuqr::{
generate,
matrix::{Matrix, Module},
QrOptions,
};
use image::ImageError;
fn weave(
matrix: &Matrix<Module>,
gap: u32,
flip: bool,
) -> image::ImageBuffer<image::Rgb<u8>, Vec<u8>> {
let density: u32 = 11;
let margin = 2;
let size = matrix.width + margin + margin;
let mut img_buf = image::ImageBuffer::from_pixel(
size as u32 * density,
size as u32 * density,
image::Rgb([180 as u8, 180, 180]),
);
let black = [0 as u8, 0, 0];
let white = [255 as u8, 255, 255];
for y in 0..size {
for x in 0..size {
let in_qr = y >= margin
&& y < matrix.width + margin
&& x >= margin
&& x < matrix.width + margin;
let px = y as u32 * density;
let py = x as u32 * density;
let gap = if in_qr
&& matrix
.get(x - margin, y - margin)
.has(Module::FINDER_CENTER)
{
0
} else {
gap
};
let (black, white) = if flip { (white, black) } else { (black, white) };
if (in_qr && matrix.get(x - margin, y - margin).has(Module::ON)) ^ flip {
for dx in 0..density {
for dy in gap..density - gap {
let p = img_buf.get_pixel_mut(px + dx, py + dy);
*p = image::Rgb(black);
}
}
for dx in gap..density - gap {
for dy in 0..gap {
let p = img_buf.get_pixel_mut(px + dx, py + dy);
*p = image::Rgb(white);
}
for dy in density - gap..density {
let p = img_buf.get_pixel_mut(px + dx, py + dy);
*p = image::Rgb(white);
}
}
} else {
for dy in 0..density {
for dx in gap..density - gap {
let p = img_buf.get_pixel_mut(px + dx, py + dy);
*p = image::Rgb(white);
}
}
for dy in gap..density - gap {
for dx in 0..gap {
let p = img_buf.get_pixel_mut(px + dx, py + dy);
*p = image::Rgb(black);
}
for dx in density - gap..density {
let p = img_buf.get_pixel_mut(px + dx, py + dy);
*p = image::Rgb(black);
}
}
}
}
}
img_buf
}
fn diag(
matrix: &Matrix<Module>,
d_gap: isize,
flip: bool,
) -> image::ImageBuffer<image::Rgb<u8>, Vec<u8>> {
let density = 11;
let margin = 2;
let size = matrix.width as isize + margin + margin;
let end = matrix.width as isize + margin;
let get_matrix = |x, y| {
if y < margin || y >= end || x < margin || x >= end {
return false;
}
return matrix
.get((x - margin) as usize, (y - margin) as usize)
.has(Module::ON);
};
let mut img_buf = image::ImageBuffer::from_pixel(
(size * density) as u32,
(size * density) as u32,
image::Rgb([180 as u8, 180, 180]),
);
let black = [0 as u8, 0, 0];
let white = [255 as u8, 255, 255];
for y in 0..size {
for x in 0..size {
let in_qr = y >= margin && y < end && x >= margin && x < end;
let px = x * density;
let py = y * density;
let (black, white) = if flip { (white, black) } else { (black, white) };
let gap = if in_qr
&& matrix
.get((x - margin) as usize, (y - margin) as usize)
.has(Module::FINDER_CENTER)
{
0
} else {
d_gap
};
if get_matrix(x, y) ^ flip {
let top_overflow = if get_matrix(x + 1, y) ^ flip { 4 } else { gap };
let bot_overflow = if get_matrix(x, y + 1) ^ flip { 4 } else { gap };
for dy in -(top_overflow)..density + bot_overflow {
if py + dy < 0 || py + dy >= size * density {
continue;
}
let mut start = density / 2 - dy;
let mut length = density;
if dy <= density / 2 - top_overflow {
length = 2 * (dy + top_overflow) + 1;
}
if dy >= density / 2 + bot_overflow {
start = dy - (density / 2 + bot_overflow) - bot_overflow;
length = 2 * (density - 1 + bot_overflow - dy) + 1;
}
for dx in start + gap..start + length - gap {
if (px + dx) < 0 || px + dx >= size * density {
continue;
}
let p = img_buf.get_pixel_mut((px + dx) as u32, (py + dy) as u32);
*p = image::Rgb(black);
}
}
} else {
let top_overflow = 7;
let bot_overflow = if !get_matrix(x, y + 1) ^ flip { 4 } else { gap };
for dy in -(top_overflow)..density + bot_overflow {
if py + dy < 0 || py + dy >= size * density {
continue;
}
let mut start = -density / 2 + dy;
let mut length = density;
if dy <= density / 2 - top_overflow {
start = -dy + density / 2 - top_overflow - top_overflow;
length = 2 * (dy + top_overflow) + 1;
}
if dy >= density / 2 + bot_overflow {
length = 2 * (density - 1 + bot_overflow - dy) + 1;
}
for dx in start + gap..start + length - gap {
if (px + dx) < 0 || px + dx >= size * density {
continue;
}
let p = img_buf.get_pixel_mut((px + dx) as u32, (py + dy) as u32);
*p = image::Rgb(white);
}
}
}
}
}
img_buf
}
fn main() -> Result<(), ImageError> {
let qr_code = generate("https://github.com/zhengkyl/fuqr", &QrOptions::new()).unwrap();
let img_buf = weave(&qr_code.matrix, 1, false);
img_buf.save("tmp/weave_thick.png").unwrap();
let img_buf = weave(&qr_code.matrix, 3, true);
img_buf.save("tmp/weave_thin.png").unwrap();
let img_buf = diag(&qr_code.matrix, 1, false);
img_buf.save("tmp/weave_diag.png").unwrap();
Ok(())
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/examples/nested.rs | examples/nested.rs | use fuqr::{
generate, generate_qart, matrix::Module, qart::WeightPixel, qr_code::Version, QrOptions,
};
use image::{ImageBuffer, Rgb};
fn main() {
doll();
donut();
}
fn doll() {
let margin = 2;
let version = 1;
let mut prev_width = version * 4 + 17;
let mut prev_full_width = prev_width + 2 * margin;
let mut prev_matrix = generate(
"1",
&QrOptions::new()
.strict_ecl(true)
.min_version(Version(version)),
)
.unwrap()
.matrix;
for (message, version) in [("2", 6), ("3", 11)] {
let width = version * 4 + 17;
let mut weights = vec![WeightPixel::new(false, 0); width * width];
let pad = (width - prev_full_width) / 2;
for y in 0..prev_full_width {
for x in 0..prev_full_width {
let value = if x < margin
|| y < margin
|| x > prev_full_width - 1 - margin
|| y > prev_full_width - 1 - margin
{
false
} else {
prev_matrix.get(x - margin, y - margin).has(Module::ON)
};
weights[(y + pad) * width + x + pad] = WeightPixel::new(value, 127);
}
}
let mut matrix = generate_qart(
message,
&QrOptions::new()
.strict_ecl(true)
.min_version(Version(version)),
&weights,
)
.unwrap()
.matrix;
for y in 0..width {
for x in 0..width {
let module = matrix.get(x, y);
let weight = weights[y * width + x];
if (module.has(Module::TIMING) || module.has(Module::ALIGNMENT))
&& weight.weight() > 0
{
matrix.set(x, y, Module(weight.value() as u8));
}
}
}
prev_matrix = matrix;
prev_width = version * 4 + 17;
prev_full_width = prev_width + 2 * margin;
}
let scale = 5;
let img_buf = ImageBuffer::from_fn(
(prev_full_width * scale) as u32,
(prev_full_width * scale) as u32,
|x, y| {
let x = x as usize / scale;
let y = y as usize / scale;
if x < margin
|| y < margin
|| x > prev_full_width - 1 - margin
|| y > prev_full_width - 1 - margin
{
return Rgb([255, 255, 255]);
}
let qr_x = (x - margin) as usize;
let qr_y = (y - margin) as usize;
let module = prev_matrix.get(qr_x, qr_y);
if module.has(Module::ON) {
Rgb([0 as u8, 0, 0])
} else {
Rgb([255, 255, 255])
}
},
);
img_buf.save("examples/nesting_doll.png").unwrap();
}
fn donut() {
let qr_inner = generate("NEAR", &QrOptions::new()).unwrap();
let qr_outer = generate("FAR", &QrOptions::new()).unwrap();
let version = 1;
let margin = 2;
let width = version * 4 + 17;
let full_width = width + 2 * margin;
let inner_scale = 3;
let outer_scale = 11;
let gap = (outer_scale - inner_scale) / 2;
let mut img_buf = ImageBuffer::from_pixel(
full_width * outer_scale,
full_width * outer_scale,
Rgb([255, 255, 255]),
);
let scaled_margin = margin * outer_scale;
for y in 0..width {
for x in 0..width {
let outer = if qr_outer.matrix.get(x as usize, y as usize).has(Module::ON) {
Rgb([0 as u8, 0, 0])
} else {
Rgb([255, 255, 255])
};
let inner = if qr_inner.matrix.get(x as usize, y as usize).has(Module::ON) {
Rgb([0 as u8, 0, 0])
} else {
Rgb([255, 255, 255])
};
let sy = y * outer_scale;
let sx = x * outer_scale;
for dy in 0..outer_scale {
for dx in 0..outer_scale {
img_buf.put_pixel(
sx + dx + scaled_margin,
sy + dy + scaled_margin,
if dx >= gap
&& dy >= gap
&& dx < gap + inner_scale
&& dy < gap + inner_scale
{
inner
} else {
outer
},
);
}
}
}
}
img_buf.save("examples/nesting_donut.png").unwrap();
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
zhengkyl/fuqr | https://github.com/zhengkyl/fuqr/blob/34df61784a809b2de44b21803501a841d8135dfc/examples/scale.rs | examples/scale.rs | use std::fs::File;
use fuqr::{
generate,
matrix::{Matrix, Module},
QrOptions,
};
use image::{codecs::gif::GifEncoder, Delay, Frame, ImageError, Rgb, Rgba};
fn circle(matrix: &Matrix<Module>) -> Result<(), ImageError> {
let center = matrix.width / 2;
let margin = 2;
let unit = 10;
let max_size = 18;
let min_size = 3;
let max_dist = f64::sqrt(2.0 * center as f64 * center as f64);
let per_dist = (max_size - min_size) as f64 / max_dist as f64;
let size = (matrix.width as u32 + margin * 2) * unit;
let mut buf: image::ImageBuffer<Rgb<u8>, Vec<u8>> =
image::ImageBuffer::from_pixel(size, size, Rgb([255, 255, 255]));
for y in 0..matrix.width {
for x in 0..matrix.width {
if !matrix.get(x, y).has(Module::ON) {
continue;
}
let dx = isize::abs(x as isize - (center as isize)) as f64;
let dy = isize::abs(y as isize - (center as isize)) as f64;
let dist = f64::sqrt(dx * dx + dy * dy);
let pixel_size = (per_dist * dist) as u32 + min_size;
let offset = (unit as isize - pixel_size as isize) / 2;
for dy in 0..pixel_size {
for dx in 0..pixel_size {
let xi = (x as u32 + margin) * unit + dx;
let yi = (y as u32 + margin) * unit + dy;
let pixel = buf.get_pixel_mut(
(xi as isize + offset) as u32,
(yi as isize + offset) as u32,
);
*pixel = image::Rgb([0, 0, 0])
}
}
}
}
buf.save("tmp/scale_circle.png")?;
Ok(())
}
fn stripes(matrix: &Matrix<Module>) -> Result<(), ImageError> {
let out = File::create("tmp/scale_stripes.gif")?;
let mut encoder = GifEncoder::new(out);
encoder.set_repeat(image::codecs::gif::Repeat::Infinite)?;
let period = 100;
let middle = period / 2;
let unit = 10;
let margin = 2;
let size = (matrix.width as u32 + margin * 2) * unit;
for j in 0..50 {
let mut buf: image::ImageBuffer<Rgba<u8>, Vec<u8>> =
image::ImageBuffer::from_pixel(size, size, Rgba([255, 255, 255, 255]));
for y in 0..matrix.width {
for x in 0..matrix.width {
if !matrix.get(x, y).has(Module::ON) {
continue;
}
let index = (x + y) as isize;
let pos = isize::abs(middle as isize - ((index * 5 + (j * 2)) % period) as isize);
let scale = 150 - 2 * pos;
let pixel_size = scale as u32 * unit / 100;
let offset = (unit as isize - pixel_size as isize) / 2;
for dy in 0..pixel_size {
for dx in 0..pixel_size {
let xi = (x as u32 + margin) * unit + dx;
let yi = (y as u32 + margin) * unit + dy;
let pixel = buf.get_pixel_mut(
(xi as isize + offset) as u32,
(yi as isize + offset) as u32,
);
*pixel = image::Rgba([0, 0, 0, 255])
}
}
}
}
// gifs are limited to 50fps, any higher and it resets to 10fps
let frame = Frame::from_parts(buf, 0, 0, Delay::from_numer_denom_ms(1000, 30));
encoder.encode_frame(frame)?;
}
Ok(())
}
fn waves(matrix: &Matrix<Module>) -> Result<(), ImageError> {
let out = File::create("tmp/scale_waves.gif")?;
let mut encoder = GifEncoder::new(out);
encoder.set_repeat(image::codecs::gif::Repeat::Infinite)?;
let unit = 10;
let margin = 2;
let size = (matrix.width as u32 + margin * 2) * unit;
let period = 100;
let middle = period / 3; // half -> smooth, anything less has an edge
let x_period = 10;
let x_middle = x_period / 2;
for j in (0..50).rev() {
let mut buf: image::ImageBuffer<Rgba<u8>, Vec<u8>> =
image::ImageBuffer::from_pixel(size, size, Rgba([255, 255, 255, 255]));
for y in 0..matrix.width {
for x in 0..matrix.width {
if !matrix.get(x, y).has(Module::ON) {
continue;
}
let index = (x + y) as isize;
let offset_x = isize::abs(x_middle - (x as isize % x_period));
let pos = isize::abs(
middle as isize - (((index + offset_x) * 3 + (j * 2)) % period) as isize,
);
let scale = 180 - 2 * pos;
let pixel_size = scale as u32 * unit / 100;
let offset = (unit as isize - pixel_size as isize) / 2;
for dy in 0..pixel_size {
for dx in 0..pixel_size {
let xi = (x as u32 + margin) * unit + dx;
let yi = (y as u32 + margin) * unit + dy;
let pixel = buf.get_pixel_mut(
(xi as isize + offset) as u32,
(yi as isize + offset) as u32,
);
*pixel = image::Rgba([0, 0, 0, 255])
}
}
}
}
// gifs are limited to 50fps, any higher and it resets to 10fps
let frame = Frame::from_parts(buf, 0, 0, Delay::from_numer_denom_ms(1000, 30));
encoder.encode_frame(frame)?;
}
Ok(())
}
fn main() -> Result<(), ImageError> {
let qr_code = generate("https://github.com/zhengkyl/fuqr", &QrOptions::new()).unwrap();
circle(&qr_code.matrix)?;
stripes(&qr_code.matrix)?;
waves(&qr_code.matrix)?;
Ok(())
}
| rust | MIT | 34df61784a809b2de44b21803501a841d8135dfc | 2026-01-04T20:16:52.445539Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/attribute.rs | ibus-sys/src/attribute.rs | use crate::glib::guint;
pub const IBusAttrType_IBUS_ATTR_TYPE_UNDERLINE: IBusAttrType = 1;
pub const IBusAttrType_IBUS_ATTR_TYPE_FOREGROUND: IBusAttrType = 2;
pub const IBusAttrType_IBUS_ATTR_TYPE_BACKGROUND: IBusAttrType = 3;
#[doc = " IBusAttrType:\n @IBUS_ATTR_TYPE_UNDERLINE: Decorate with underline.\n @IBUS_ATTR_TYPE_FOREGROUND: Foreground color.\n @IBUS_ATTR_TYPE_BACKGROUND: Background color.\n\n Type enumeration of IBusText attribute."]
pub type IBusAttrType = ::std::os::raw::c_uint;
pub const IBusAttrUnderline_IBUS_ATTR_UNDERLINE_NONE: IBusAttrUnderline = 0;
pub const IBusAttrUnderline_IBUS_ATTR_UNDERLINE_SINGLE: IBusAttrUnderline = 1;
pub const IBusAttrUnderline_IBUS_ATTR_UNDERLINE_DOUBLE: IBusAttrUnderline = 2;
pub const IBusAttrUnderline_IBUS_ATTR_UNDERLINE_LOW: IBusAttrUnderline = 3;
pub const IBusAttrUnderline_IBUS_ATTR_UNDERLINE_ERROR: IBusAttrUnderline = 4;
#[doc = " IBusAttrUnderline:\n @IBUS_ATTR_UNDERLINE_NONE: No underline.\n @IBUS_ATTR_UNDERLINE_SINGLE: Single underline.\n @IBUS_ATTR_UNDERLINE_DOUBLE: Double underline.\n @IBUS_ATTR_UNDERLINE_LOW: Low underline ? FIXME\n @IBUS_ATTR_UNDERLINE_ERROR: Error underline\n\n Type of IBusText attribute."]
pub type IBusAttrUnderline = ::std::os::raw::c_uint;
#[doc = " IBusAttribute:\n @type: IBusAttributeType\n @value: Value for the type.\n @start_index: The starting index, inclusive.\n @end_index: The ending index, exclusive.\n\n Signify the type, value and scope of the attribute.\n The scope starts from @start_index till the @end_index-1."]
pub type IBusAttribute = [u64; 8usize];
extern "C" {
// attribute
#[doc = " ibus_attribute_new:\n @type: Type of the attribute.\n @value: Value of the attribute.\n @start_index: Where attribute starts.\n @end_index: Where attribute ends.\n\n Creates a new IBusAttribute.\n\n Returns: (transfer none): A newly allocated IBusAttribute."]
pub fn ibus_attribute_new(
type_: guint,
value: guint,
start_index: guint,
end_index: guint,
) -> *mut IBusAttribute;
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/engine.rs | ibus-sys/src/engine.rs | use crate::glib::{gboolean, guint};
use crate::lookup_table::IBusLookupTable;
use crate::prop_list::IBusPropList;
use crate::property::IBusProperty;
use crate::text::IBusText;
extern "C" {
// engine
pub fn ibus_engine_commit_text(engine: *mut IBusEngine, text: *mut IBusText);
pub fn ibus_engine_hide_lookup_table(engine: *mut IBusEngine);
#[doc = " ibus_engine_update_preedit_text:\n @engine: An IBusEngine.\n @text: Update content.\n @cursor_pos: Current position of cursor\n @visible: Whether the pre-edit buffer is visible.\n\n Update the pre-edit buffer.\n\n (Note: The text object will be released, if it is floating.\n If caller want to keep the object, caller should make the object\n sink by g_object_ref_sink.)"]
pub fn ibus_engine_update_preedit_text(
engine: *mut IBusEngine,
text: *mut IBusText,
cursor_pos: guint,
visible: gboolean,
);
#[doc = " ibus_engine_hide_preedit_text:\n @engine: An IBusEngine.\n\n Hide the pre-edit buffer."]
pub fn ibus_engine_hide_preedit_text(engine: *mut IBusEngine);
#[doc = " ibus_engine_hide_auxiliary_text:\n @engine: An IBusEngine.\n\n Hide the auxiliary bar."]
pub fn ibus_engine_hide_auxiliary_text(engine: *mut IBusEngine);
#[doc = " ibus_engine_update_auxiliary_text:\n @engine: An IBusEngine.\n @text: Update content.\n @visible: Whether the auxiliary text bar is visible.\n\n Update the auxiliary bar.\n\n (Note: The text object will be released, if it is floating.\n If caller want to keep the object, caller should make the object\n sink by g_object_ref_sink.)"]
pub fn ibus_engine_update_auxiliary_text(
engine: *mut IBusEngine,
text: *mut IBusText,
visible: gboolean,
);
#[doc = " ibus_engine_update_lookup_table:\n @engine: An IBusEngine.\n @lookup_table: An lookup_table.\n @visible: Whether the lookup_table is visible.\n\n Update the lookup table.\n\n (Note: The table object will be released, if it is floating.\n If caller want to keep the object, caller should make the object\n sink by g_object_ref_sink.)"]
pub fn ibus_engine_update_lookup_table(
engine: *mut IBusEngine,
lookup_table: *mut IBusLookupTable,
visible: gboolean,
);
pub fn ibus_engine_register_properties(engine: *mut IBusEngine, prop_list: *mut IBusPropList);
pub fn ibus_engine_update_property(engine: *mut IBusEngine, prop: *mut IBusProperty);
}
pub type IBusEngine = [u64; 11usize];
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/lib.rs | ibus-sys/src/lib.rs | #![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
pub mod attr_list;
pub mod attribute;
pub mod core;
pub mod engine;
pub mod glib;
pub mod ibus_key;
pub mod keys;
pub mod lookup_table;
pub mod prop_list;
pub mod property;
pub mod text;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/lookup_table.rs | ibus-sys/src/lookup_table.rs | use crate::core::IBusSerializable;
use crate::glib::{g_object_ref_sink, gboolean, gint, gpointer, guint, GArray};
use crate::text::IBusText;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct IBusLookupTable {
pub parent: IBusSerializable,
pub page_size: guint,
pub cursor_pos: guint,
pub cursor_visible: gboolean,
pub round: gboolean,
pub orientation: gint,
pub candidates: *mut GArray,
pub labels: *mut GArray,
}
impl IBusLookupTable {
pub fn new(
page_size: guint,
cursor_pos: guint,
cursor_visible: gboolean,
round: gboolean,
) -> Self {
unsafe {
let lookup_table = g_object_ref_sink(ibus_lookup_table_new(
page_size,
cursor_pos,
cursor_visible,
round,
) as gpointer) as *mut IBusLookupTable;
*lookup_table
}
}
pub fn get_number_of_candidates(&self) -> guint {
// info!("get_number_of_candidates: {:?}", self);
unsafe { ibus_lookup_table_get_number_of_candidates(self as *const Self) }
}
pub fn clear(&mut self) {
unsafe { ibus_lookup_table_clear(self as *mut Self) }
}
pub fn cursor_up(&mut self) -> bool {
unsafe {
let b = ibus_lookup_table_cursor_up(self as *mut _);
b == 1
}
}
pub fn cursor_down(&mut self) -> bool {
unsafe {
let b = ibus_lookup_table_cursor_down(self as *mut _);
b == 1
}
}
pub fn get_cursor_pos(&mut self) -> guint {
unsafe { ibus_lookup_table_get_cursor_pos(self as *mut _) }
}
pub fn set_cursor_pos(&mut self, cursor_pos: guint) {
unsafe { ibus_lookup_table_set_cursor_pos(self as *mut _, cursor_pos) }
}
pub fn get_page_size(&mut self) -> guint {
unsafe { ibus_lookup_table_get_page_size(self as *mut _) }
}
pub fn page_up(&mut self) -> bool {
unsafe { 1 == (ibus_lookup_table_page_up(self as *mut _)) }
}
pub fn page_down(&mut self) -> bool {
unsafe { 1 == ibus_lookup_table_page_down(self as *mut _) }
}
pub fn append_candidate(&mut self, text: *mut IBusText) {
unsafe { ibus_lookup_table_append_candidate(self as *mut _, text as *mut _) }
}
}
extern "C" {
// lookup
#[doc = " ibus_lookup_table_new:\n @page_size: number of candidate shown per page, the max value is 16.\n @cursor_pos: position index of cursor.\n @cursor_visible: whether the cursor is visible.\n @round: TRUE for lookup table wrap around.\n\n Craetes a new #IBusLookupTable.\n\n Returns: A newly allocated #IBusLookupTable."]
pub fn ibus_lookup_table_new(
page_size: guint,
cursor_pos: guint,
cursor_visible: gboolean,
round: gboolean,
) -> *mut IBusLookupTable;
pub fn ibus_lookup_table_clear(table: *mut IBusLookupTable);
#[doc = " ibus_lookup_table_get_number_of_candidates:\n @table: An IBusLookupTable.\n\n Return the number of candidate in the table.\n\n Returns: The number of candidates in the table"]
pub fn ibus_lookup_table_get_number_of_candidates(table: *const IBusLookupTable) -> guint;
#[doc = " ibus_lookup_table_append_candidate:\n @table: An IBusLookupTable.\n @text: candidate word/phrase to be appended (in IBusText format).\n\n Append a candidate word/phrase to IBusLookupTable, and increase reference."]
fn ibus_lookup_table_append_candidate(table: *mut IBusLookupTable, text: *mut IBusText);
fn ibus_lookup_table_cursor_up(table: *mut IBusLookupTable) -> gboolean;
fn ibus_lookup_table_cursor_down(table: *mut IBusLookupTable) -> gboolean;
pub fn ibus_lookup_table_get_cursor_pos(table: *mut IBusLookupTable) -> guint;
fn ibus_lookup_table_set_cursor_pos(table: *mut IBusLookupTable, cursor_pos: guint);
fn ibus_lookup_table_get_page_size(table: *mut IBusLookupTable) -> guint;
fn ibus_lookup_table_page_up(table: *mut IBusLookupTable) -> gboolean;
fn ibus_lookup_table_page_down(table: *mut IBusLookupTable) -> gboolean;
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/ibus_key.rs | ibus-sys/src/ibus_key.rs | #![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
#![allow(dead_code)]
pub const IBUS_KEY_VoidSymbol: u32 = 16777215;
pub const IBUS_KEY_BackSpace: u32 = 65288;
pub const IBUS_KEY_Tab: u32 = 65289;
pub const IBUS_KEY_Linefeed: u32 = 65290;
pub const IBUS_KEY_Clear: u32 = 65291;
pub const IBUS_KEY_Return: u32 = 65293;
pub const IBUS_KEY_Pause: u32 = 65299;
pub const IBUS_KEY_Scroll_Lock: u32 = 65300;
pub const IBUS_KEY_Sys_Req: u32 = 65301;
pub const IBUS_KEY_Escape: u32 = 65307;
pub const IBUS_KEY_Delete: u32 = 65535;
pub const IBUS_KEY_Multi_key: u32 = 65312;
pub const IBUS_KEY_Codeinput: u32 = 65335;
pub const IBUS_KEY_SingleCandidate: u32 = 65340;
pub const IBUS_KEY_MultipleCandidate: u32 = 65341;
pub const IBUS_KEY_PreviousCandidate: u32 = 65342;
pub const IBUS_KEY_Kanji: u32 = 65313;
pub const IBUS_KEY_Muhenkan: u32 = 65314;
pub const IBUS_KEY_Henkan_Mode: u32 = 65315;
pub const IBUS_KEY_Henkan: u32 = 65315;
pub const IBUS_KEY_Romaji: u32 = 65316;
pub const IBUS_KEY_Hiragana: u32 = 65317;
pub const IBUS_KEY_Katakana: u32 = 65318;
pub const IBUS_KEY_Hiragana_Katakana: u32 = 65319;
pub const IBUS_KEY_Zenkaku: u32 = 65320;
pub const IBUS_KEY_Hankaku: u32 = 65321;
pub const IBUS_KEY_Zenkaku_Hankaku: u32 = 65322;
pub const IBUS_KEY_Touroku: u32 = 65323;
pub const IBUS_KEY_Massyo: u32 = 65324;
pub const IBUS_KEY_Kana_Lock: u32 = 65325;
pub const IBUS_KEY_Kana_Shift: u32 = 65326;
pub const IBUS_KEY_Eisu_Shift: u32 = 65327;
pub const IBUS_KEY_Eisu_toggle: u32 = 65328;
pub const IBUS_KEY_Kanji_Bangou: u32 = 65335;
pub const IBUS_KEY_Zen_Koho: u32 = 65341;
pub const IBUS_KEY_Mae_Koho: u32 = 65342;
pub const IBUS_KEY_Home: u32 = 65360;
pub const IBUS_KEY_Left: u32 = 65361;
pub const IBUS_KEY_Up: u32 = 65362;
pub const IBUS_KEY_Right: u32 = 65363;
pub const IBUS_KEY_Down: u32 = 65364;
pub const IBUS_KEY_Prior: u32 = 65365;
pub const IBUS_KEY_Page_Up: u32 = 65365;
pub const IBUS_KEY_Next: u32 = 65366;
pub const IBUS_KEY_Page_Down: u32 = 65366;
pub const IBUS_KEY_End: u32 = 65367;
pub const IBUS_KEY_Begin: u32 = 65368;
pub const IBUS_KEY_Select: u32 = 65376;
pub const IBUS_KEY_Print: u32 = 65377;
pub const IBUS_KEY_Execute: u32 = 65378;
pub const IBUS_KEY_Insert: u32 = 65379;
pub const IBUS_KEY_Undo: u32 = 65381;
pub const IBUS_KEY_Redo: u32 = 65382;
pub const IBUS_KEY_Menu: u32 = 65383;
pub const IBUS_KEY_Find: u32 = 65384;
pub const IBUS_KEY_Cancel: u32 = 65385;
pub const IBUS_KEY_Help: u32 = 65386;
pub const IBUS_KEY_Break: u32 = 65387;
pub const IBUS_KEY_Mode_switch: u32 = 65406;
pub const IBUS_KEY_script_switch: u32 = 65406;
pub const IBUS_KEY_Num_Lock: u32 = 65407;
pub const IBUS_KEY_KP_Space: u32 = 65408;
pub const IBUS_KEY_KP_Tab: u32 = 65417;
pub const IBUS_KEY_KP_Enter: u32 = 65421;
pub const IBUS_KEY_KP_F1: u32 = 65425;
pub const IBUS_KEY_KP_F2: u32 = 65426;
pub const IBUS_KEY_KP_F3: u32 = 65427;
pub const IBUS_KEY_KP_F4: u32 = 65428;
pub const IBUS_KEY_KP_Home: u32 = 65429;
pub const IBUS_KEY_KP_Left: u32 = 65430;
pub const IBUS_KEY_KP_Up: u32 = 65431;
pub const IBUS_KEY_KP_Right: u32 = 65432;
pub const IBUS_KEY_KP_Down: u32 = 65433;
pub const IBUS_KEY_KP_Prior: u32 = 65434;
pub const IBUS_KEY_KP_Page_Up: u32 = 65434;
pub const IBUS_KEY_KP_Next: u32 = 65435;
pub const IBUS_KEY_KP_Page_Down: u32 = 65435;
pub const IBUS_KEY_KP_End: u32 = 65436;
pub const IBUS_KEY_KP_Begin: u32 = 65437;
pub const IBUS_KEY_KP_Insert: u32 = 65438;
pub const IBUS_KEY_KP_Delete: u32 = 65439;
pub const IBUS_KEY_KP_Equal: u32 = 65469;
pub const IBUS_KEY_KP_Multiply: u32 = 65450;
pub const IBUS_KEY_KP_Add: u32 = 65451;
pub const IBUS_KEY_KP_Separator: u32 = 65452;
pub const IBUS_KEY_KP_Subtract: u32 = 65453;
pub const IBUS_KEY_KP_Decimal: u32 = 65454;
pub const IBUS_KEY_KP_Divide: u32 = 65455;
pub const IBUS_KEY_KP_0: u32 = 65456;
pub const IBUS_KEY_KP_1: u32 = 65457;
pub const IBUS_KEY_KP_2: u32 = 65458;
pub const IBUS_KEY_KP_3: u32 = 65459;
pub const IBUS_KEY_KP_4: u32 = 65460;
pub const IBUS_KEY_KP_5: u32 = 65461;
pub const IBUS_KEY_KP_6: u32 = 65462;
pub const IBUS_KEY_KP_7: u32 = 65463;
pub const IBUS_KEY_KP_8: u32 = 65464;
pub const IBUS_KEY_KP_9: u32 = 65465;
pub const IBUS_KEY_F1: u32 = 65470;
pub const IBUS_KEY_F2: u32 = 65471;
pub const IBUS_KEY_F3: u32 = 65472;
pub const IBUS_KEY_F4: u32 = 65473;
pub const IBUS_KEY_F5: u32 = 65474;
pub const IBUS_KEY_F6: u32 = 65475;
pub const IBUS_KEY_F7: u32 = 65476;
pub const IBUS_KEY_F8: u32 = 65477;
pub const IBUS_KEY_F9: u32 = 65478;
pub const IBUS_KEY_F10: u32 = 65479;
pub const IBUS_KEY_F11: u32 = 65480;
pub const IBUS_KEY_L1: u32 = 65480;
pub const IBUS_KEY_F12: u32 = 65481;
pub const IBUS_KEY_L2: u32 = 65481;
pub const IBUS_KEY_F13: u32 = 65482;
pub const IBUS_KEY_L3: u32 = 65482;
pub const IBUS_KEY_F14: u32 = 65483;
pub const IBUS_KEY_L4: u32 = 65483;
pub const IBUS_KEY_F15: u32 = 65484;
pub const IBUS_KEY_L5: u32 = 65484;
pub const IBUS_KEY_F16: u32 = 65485;
pub const IBUS_KEY_L6: u32 = 65485;
pub const IBUS_KEY_F17: u32 = 65486;
pub const IBUS_KEY_L7: u32 = 65486;
pub const IBUS_KEY_F18: u32 = 65487;
pub const IBUS_KEY_L8: u32 = 65487;
pub const IBUS_KEY_F19: u32 = 65488;
pub const IBUS_KEY_L9: u32 = 65488;
pub const IBUS_KEY_F20: u32 = 65489;
pub const IBUS_KEY_L10: u32 = 65489;
pub const IBUS_KEY_F21: u32 = 65490;
pub const IBUS_KEY_R1: u32 = 65490;
pub const IBUS_KEY_F22: u32 = 65491;
pub const IBUS_KEY_R2: u32 = 65491;
pub const IBUS_KEY_F23: u32 = 65492;
pub const IBUS_KEY_R3: u32 = 65492;
pub const IBUS_KEY_F24: u32 = 65493;
pub const IBUS_KEY_R4: u32 = 65493;
pub const IBUS_KEY_F25: u32 = 65494;
pub const IBUS_KEY_R5: u32 = 65494;
pub const IBUS_KEY_F26: u32 = 65495;
pub const IBUS_KEY_R6: u32 = 65495;
pub const IBUS_KEY_F27: u32 = 65496;
pub const IBUS_KEY_R7: u32 = 65496;
pub const IBUS_KEY_F28: u32 = 65497;
pub const IBUS_KEY_R8: u32 = 65497;
pub const IBUS_KEY_F29: u32 = 65498;
pub const IBUS_KEY_R9: u32 = 65498;
pub const IBUS_KEY_F30: u32 = 65499;
pub const IBUS_KEY_R10: u32 = 65499;
pub const IBUS_KEY_F31: u32 = 65500;
pub const IBUS_KEY_R11: u32 = 65500;
pub const IBUS_KEY_F32: u32 = 65501;
pub const IBUS_KEY_R12: u32 = 65501;
pub const IBUS_KEY_F33: u32 = 65502;
pub const IBUS_KEY_R13: u32 = 65502;
pub const IBUS_KEY_F34: u32 = 65503;
pub const IBUS_KEY_R14: u32 = 65503;
pub const IBUS_KEY_F35: u32 = 65504;
pub const IBUS_KEY_R15: u32 = 65504;
pub const IBUS_KEY_Shift_L: u32 = 65505;
pub const IBUS_KEY_Shift_R: u32 = 65506;
pub const IBUS_KEY_Control_L: u32 = 65507;
pub const IBUS_KEY_Control_R: u32 = 65508;
pub const IBUS_KEY_Caps_Lock: u32 = 65509;
pub const IBUS_KEY_Shift_Lock: u32 = 65510;
pub const IBUS_KEY_Meta_L: u32 = 65511;
pub const IBUS_KEY_Meta_R: u32 = 65512;
pub const IBUS_KEY_Alt_L: u32 = 65513;
pub const IBUS_KEY_Alt_R: u32 = 65514;
pub const IBUS_KEY_Super_L: u32 = 65515;
pub const IBUS_KEY_Super_R: u32 = 65516;
pub const IBUS_KEY_Hyper_L: u32 = 65517;
pub const IBUS_KEY_Hyper_R: u32 = 65518;
pub const IBUS_KEY_ISO_Lock: u32 = 65025;
pub const IBUS_KEY_ISO_Level2_Latch: u32 = 65026;
pub const IBUS_KEY_ISO_Level3_Shift: u32 = 65027;
pub const IBUS_KEY_ISO_Level3_Latch: u32 = 65028;
pub const IBUS_KEY_ISO_Level3_Lock: u32 = 65029;
pub const IBUS_KEY_ISO_Level5_Shift: u32 = 65041;
pub const IBUS_KEY_ISO_Level5_Latch: u32 = 65042;
pub const IBUS_KEY_ISO_Level5_Lock: u32 = 65043;
pub const IBUS_KEY_ISO_Group_Shift: u32 = 65406;
pub const IBUS_KEY_ISO_Group_Latch: u32 = 65030;
pub const IBUS_KEY_ISO_Group_Lock: u32 = 65031;
pub const IBUS_KEY_ISO_Next_Group: u32 = 65032;
pub const IBUS_KEY_ISO_Next_Group_Lock: u32 = 65033;
pub const IBUS_KEY_ISO_Prev_Group: u32 = 65034;
pub const IBUS_KEY_ISO_Prev_Group_Lock: u32 = 65035;
pub const IBUS_KEY_ISO_First_Group: u32 = 65036;
pub const IBUS_KEY_ISO_First_Group_Lock: u32 = 65037;
pub const IBUS_KEY_ISO_Last_Group: u32 = 65038;
pub const IBUS_KEY_ISO_Last_Group_Lock: u32 = 65039;
pub const IBUS_KEY_ISO_Left_Tab: u32 = 65056;
pub const IBUS_KEY_ISO_Move_Line_Up: u32 = 65057;
pub const IBUS_KEY_ISO_Move_Line_Down: u32 = 65058;
pub const IBUS_KEY_ISO_Partial_Line_Up: u32 = 65059;
pub const IBUS_KEY_ISO_Partial_Line_Down: u32 = 65060;
pub const IBUS_KEY_ISO_Partial_Space_Left: u32 = 65061;
pub const IBUS_KEY_ISO_Partial_Space_Right: u32 = 65062;
pub const IBUS_KEY_ISO_Set_Margin_Left: u32 = 65063;
pub const IBUS_KEY_ISO_Set_Margin_Right: u32 = 65064;
pub const IBUS_KEY_ISO_Release_Margin_Left: u32 = 65065;
pub const IBUS_KEY_ISO_Release_Margin_Right: u32 = 65066;
pub const IBUS_KEY_ISO_Release_Both_Margins: u32 = 65067;
pub const IBUS_KEY_ISO_Fast_Cursor_Left: u32 = 65068;
pub const IBUS_KEY_ISO_Fast_Cursor_Right: u32 = 65069;
pub const IBUS_KEY_ISO_Fast_Cursor_Up: u32 = 65070;
pub const IBUS_KEY_ISO_Fast_Cursor_Down: u32 = 65071;
pub const IBUS_KEY_ISO_Continuous_Underline: u32 = 65072;
pub const IBUS_KEY_ISO_Discontinuous_Underline: u32 = 65073;
pub const IBUS_KEY_ISO_Emphasize: u32 = 65074;
pub const IBUS_KEY_ISO_Center_Object: u32 = 65075;
pub const IBUS_KEY_ISO_Enter: u32 = 65076;
pub const IBUS_KEY_dead_grave: u32 = 65104;
pub const IBUS_KEY_dead_acute: u32 = 65105;
pub const IBUS_KEY_dead_circumflex: u32 = 65106;
pub const IBUS_KEY_dead_tilde: u32 = 65107;
pub const IBUS_KEY_dead_perispomeni: u32 = 65107;
pub const IBUS_KEY_dead_macron: u32 = 65108;
pub const IBUS_KEY_dead_breve: u32 = 65109;
pub const IBUS_KEY_dead_abovedot: u32 = 65110;
pub const IBUS_KEY_dead_diaeresis: u32 = 65111;
pub const IBUS_KEY_dead_abovering: u32 = 65112;
pub const IBUS_KEY_dead_doubleacute: u32 = 65113;
pub const IBUS_KEY_dead_caron: u32 = 65114;
pub const IBUS_KEY_dead_cedilla: u32 = 65115;
pub const IBUS_KEY_dead_ogonek: u32 = 65116;
pub const IBUS_KEY_dead_iota: u32 = 65117;
pub const IBUS_KEY_dead_voiced_sound: u32 = 65118;
pub const IBUS_KEY_dead_semivoiced_sound: u32 = 65119;
pub const IBUS_KEY_dead_belowdot: u32 = 65120;
pub const IBUS_KEY_dead_hook: u32 = 65121;
pub const IBUS_KEY_dead_horn: u32 = 65122;
pub const IBUS_KEY_dead_stroke: u32 = 65123;
pub const IBUS_KEY_dead_abovecomma: u32 = 65124;
pub const IBUS_KEY_dead_psili: u32 = 65124;
pub const IBUS_KEY_dead_abovereversedcomma: u32 = 65125;
pub const IBUS_KEY_dead_dasia: u32 = 65125;
pub const IBUS_KEY_dead_doublegrave: u32 = 65126;
pub const IBUS_KEY_dead_belowring: u32 = 65127;
pub const IBUS_KEY_dead_belowmacron: u32 = 65128;
pub const IBUS_KEY_dead_belowcircumflex: u32 = 65129;
pub const IBUS_KEY_dead_belowtilde: u32 = 65130;
pub const IBUS_KEY_dead_belowbreve: u32 = 65131;
pub const IBUS_KEY_dead_belowdiaeresis: u32 = 65132;
pub const IBUS_KEY_dead_invertedbreve: u32 = 65133;
pub const IBUS_KEY_dead_belowcomma: u32 = 65134;
pub const IBUS_KEY_dead_currency: u32 = 65135;
pub const IBUS_KEY_dead_lowline: u32 = 65168;
pub const IBUS_KEY_dead_aboveverticalline: u32 = 65169;
pub const IBUS_KEY_dead_belowverticalline: u32 = 65170;
pub const IBUS_KEY_dead_longsolidusoverlay: u32 = 65171;
pub const IBUS_KEY_dead_a: u32 = 65152;
pub const IBUS_KEY_dead_A: u32 = 65153;
pub const IBUS_KEY_dead_e: u32 = 65154;
pub const IBUS_KEY_dead_E: u32 = 65155;
pub const IBUS_KEY_dead_i: u32 = 65156;
pub const IBUS_KEY_dead_I: u32 = 65157;
pub const IBUS_KEY_dead_o: u32 = 65158;
pub const IBUS_KEY_dead_O: u32 = 65159;
pub const IBUS_KEY_dead_u: u32 = 65160;
pub const IBUS_KEY_dead_U: u32 = 65161;
pub const IBUS_KEY_dead_small_schwa: u32 = 65162;
pub const IBUS_KEY_dead_capital_schwa: u32 = 65163;
pub const IBUS_KEY_dead_greek: u32 = 65164;
pub const IBUS_KEY_First_Virtual_Screen: u32 = 65232;
pub const IBUS_KEY_Prev_Virtual_Screen: u32 = 65233;
pub const IBUS_KEY_Next_Virtual_Screen: u32 = 65234;
pub const IBUS_KEY_Last_Virtual_Screen: u32 = 65236;
pub const IBUS_KEY_Terminate_Server: u32 = 65237;
pub const IBUS_KEY_AccessX_Enable: u32 = 65136;
pub const IBUS_KEY_AccessX_Feedback_Enable: u32 = 65137;
pub const IBUS_KEY_RepeatKeys_Enable: u32 = 65138;
pub const IBUS_KEY_SlowKeys_Enable: u32 = 65139;
pub const IBUS_KEY_BounceKeys_Enable: u32 = 65140;
pub const IBUS_KEY_StickyKeys_Enable: u32 = 65141;
pub const IBUS_KEY_MouseKeys_Enable: u32 = 65142;
pub const IBUS_KEY_MouseKeys_Accel_Enable: u32 = 65143;
pub const IBUS_KEY_Overlay1_Enable: u32 = 65144;
pub const IBUS_KEY_Overlay2_Enable: u32 = 65145;
pub const IBUS_KEY_AudibleBell_Enable: u32 = 65146;
pub const IBUS_KEY_Pointer_Left: u32 = 65248;
pub const IBUS_KEY_Pointer_Right: u32 = 65249;
pub const IBUS_KEY_Pointer_Up: u32 = 65250;
pub const IBUS_KEY_Pointer_Down: u32 = 65251;
pub const IBUS_KEY_Pointer_UpLeft: u32 = 65252;
pub const IBUS_KEY_Pointer_UpRight: u32 = 65253;
pub const IBUS_KEY_Pointer_DownLeft: u32 = 65254;
pub const IBUS_KEY_Pointer_DownRight: u32 = 65255;
pub const IBUS_KEY_Pointer_Button_Dflt: u32 = 65256;
pub const IBUS_KEY_Pointer_Button1: u32 = 65257;
pub const IBUS_KEY_Pointer_Button2: u32 = 65258;
pub const IBUS_KEY_Pointer_Button3: u32 = 65259;
pub const IBUS_KEY_Pointer_Button4: u32 = 65260;
pub const IBUS_KEY_Pointer_Button5: u32 = 65261;
pub const IBUS_KEY_Pointer_DblClick_Dflt: u32 = 65262;
pub const IBUS_KEY_Pointer_DblClick1: u32 = 65263;
pub const IBUS_KEY_Pointer_DblClick2: u32 = 65264;
pub const IBUS_KEY_Pointer_DblClick3: u32 = 65265;
pub const IBUS_KEY_Pointer_DblClick4: u32 = 65266;
pub const IBUS_KEY_Pointer_DblClick5: u32 = 65267;
pub const IBUS_KEY_Pointer_Drag_Dflt: u32 = 65268;
pub const IBUS_KEY_Pointer_Drag1: u32 = 65269;
pub const IBUS_KEY_Pointer_Drag2: u32 = 65270;
pub const IBUS_KEY_Pointer_Drag3: u32 = 65271;
pub const IBUS_KEY_Pointer_Drag4: u32 = 65272;
pub const IBUS_KEY_Pointer_Drag5: u32 = 65277;
pub const IBUS_KEY_Pointer_EnableKeys: u32 = 65273;
pub const IBUS_KEY_Pointer_Accelerate: u32 = 65274;
pub const IBUS_KEY_Pointer_DfltBtnNext: u32 = 65275;
pub const IBUS_KEY_Pointer_DfltBtnPrev: u32 = 65276;
pub const IBUS_KEY_ch: u32 = 65184;
pub const IBUS_KEY_Ch: u32 = 65185;
pub const IBUS_KEY_CH: u32 = 65186;
pub const IBUS_KEY_c_h: u32 = 65187;
pub const IBUS_KEY_C_h: u32 = 65188;
pub const IBUS_KEY_C_H: u32 = 65189;
pub const IBUS_KEY_3270_Duplicate: u32 = 64769;
pub const IBUS_KEY_3270_FieldMark: u32 = 64770;
pub const IBUS_KEY_3270_Right2: u32 = 64771;
pub const IBUS_KEY_3270_Left2: u32 = 64772;
pub const IBUS_KEY_3270_BackTab: u32 = 64773;
pub const IBUS_KEY_3270_EraseEOF: u32 = 64774;
pub const IBUS_KEY_3270_EraseInput: u32 = 64775;
pub const IBUS_KEY_3270_Reset: u32 = 64776;
pub const IBUS_KEY_3270_Quit: u32 = 64777;
pub const IBUS_KEY_3270_PA1: u32 = 64778;
pub const IBUS_KEY_3270_PA2: u32 = 64779;
pub const IBUS_KEY_3270_PA3: u32 = 64780;
pub const IBUS_KEY_3270_Test: u32 = 64781;
pub const IBUS_KEY_3270_Attn: u32 = 64782;
pub const IBUS_KEY_3270_CursorBlink: u32 = 64783;
pub const IBUS_KEY_3270_AltCursor: u32 = 64784;
pub const IBUS_KEY_3270_KeyClick: u32 = 64785;
pub const IBUS_KEY_3270_Jump: u32 = 64786;
pub const IBUS_KEY_3270_Ident: u32 = 64787;
pub const IBUS_KEY_3270_Rule: u32 = 64788;
pub const IBUS_KEY_3270_Copy: u32 = 64789;
pub const IBUS_KEY_3270_Play: u32 = 64790;
pub const IBUS_KEY_3270_Setup: u32 = 64791;
pub const IBUS_KEY_3270_Record: u32 = 64792;
pub const IBUS_KEY_3270_ChangeScreen: u32 = 64793;
pub const IBUS_KEY_3270_DeleteWord: u32 = 64794;
pub const IBUS_KEY_3270_ExSelect: u32 = 64795;
pub const IBUS_KEY_3270_CursorSelect: u32 = 64796;
pub const IBUS_KEY_3270_PrintScreen: u32 = 64797;
pub const IBUS_KEY_3270_Enter: u32 = 64798;
pub const IBUS_KEY_space: u32 = 32;
pub const IBUS_KEY_exclam: u32 = 33;
pub const IBUS_KEY_quotedbl: u32 = 34;
pub const IBUS_KEY_numbersign: u32 = 35;
pub const IBUS_KEY_dollar: u32 = 36;
pub const IBUS_KEY_percent: u32 = 37;
pub const IBUS_KEY_ampersand: u32 = 38;
pub const IBUS_KEY_apostrophe: u32 = 39;
pub const IBUS_KEY_quoteright: u32 = 39;
pub const IBUS_KEY_parenleft: u32 = 40;
pub const IBUS_KEY_parenright: u32 = 41;
pub const IBUS_KEY_asterisk: u32 = 42;
pub const IBUS_KEY_plus: u32 = 43;
pub const IBUS_KEY_comma: u32 = 44;
pub const IBUS_KEY_minus: u32 = 45;
pub const IBUS_KEY_period: u32 = 46;
pub const IBUS_KEY_slash: u32 = 47;
pub const IBUS_KEY_0: u32 = 48;
pub const IBUS_KEY_1: u32 = 49;
pub const IBUS_KEY_2: u32 = 50;
pub const IBUS_KEY_3: u32 = 51;
pub const IBUS_KEY_4: u32 = 52;
pub const IBUS_KEY_5: u32 = 53;
pub const IBUS_KEY_6: u32 = 54;
pub const IBUS_KEY_7: u32 = 55;
pub const IBUS_KEY_8: u32 = 56;
pub const IBUS_KEY_9: u32 = 57;
pub const IBUS_KEY_colon: u32 = 58;
pub const IBUS_KEY_semicolon: u32 = 59;
pub const IBUS_KEY_less: u32 = 60;
pub const IBUS_KEY_equal: u32 = 61;
pub const IBUS_KEY_greater: u32 = 62;
pub const IBUS_KEY_question: u32 = 63;
pub const IBUS_KEY_at: u32 = 64;
pub const IBUS_KEY_A: u32 = 65;
pub const IBUS_KEY_B: u32 = 66;
pub const IBUS_KEY_C: u32 = 67;
pub const IBUS_KEY_D: u32 = 68;
pub const IBUS_KEY_E: u32 = 69;
pub const IBUS_KEY_F: u32 = 70;
pub const IBUS_KEY_G: u32 = 71;
pub const IBUS_KEY_H: u32 = 72;
pub const IBUS_KEY_I: u32 = 73;
pub const IBUS_KEY_J: u32 = 74;
pub const IBUS_KEY_K: u32 = 75;
pub const IBUS_KEY_L: u32 = 76;
pub const IBUS_KEY_M: u32 = 77;
pub const IBUS_KEY_N: u32 = 78;
pub const IBUS_KEY_O: u32 = 79;
pub const IBUS_KEY_P: u32 = 80;
pub const IBUS_KEY_Q: u32 = 81;
pub const IBUS_KEY_R: u32 = 82;
pub const IBUS_KEY_S: u32 = 83;
pub const IBUS_KEY_T: u32 = 84;
pub const IBUS_KEY_U: u32 = 85;
pub const IBUS_KEY_V: u32 = 86;
pub const IBUS_KEY_W: u32 = 87;
pub const IBUS_KEY_X: u32 = 88;
pub const IBUS_KEY_Y: u32 = 89;
pub const IBUS_KEY_Z: u32 = 90;
pub const IBUS_KEY_bracketleft: u32 = 91;
pub const IBUS_KEY_backslash: u32 = 92;
pub const IBUS_KEY_bracketright: u32 = 93;
pub const IBUS_KEY_asciicircum: u32 = 94;
pub const IBUS_KEY_underscore: u32 = 95;
pub const IBUS_KEY_grave: u32 = 96;
pub const IBUS_KEY_quoteleft: u32 = 96;
pub const IBUS_KEY_a: u32 = 97;
pub const IBUS_KEY_b: u32 = 98;
pub const IBUS_KEY_c: u32 = 99;
pub const IBUS_KEY_d: u32 = 100;
pub const IBUS_KEY_e: u32 = 101;
pub const IBUS_KEY_f: u32 = 102;
pub const IBUS_KEY_g: u32 = 103;
pub const IBUS_KEY_h: u32 = 104;
pub const IBUS_KEY_i: u32 = 105;
pub const IBUS_KEY_j: u32 = 106;
pub const IBUS_KEY_k: u32 = 107;
pub const IBUS_KEY_l: u32 = 108;
pub const IBUS_KEY_m: u32 = 109;
pub const IBUS_KEY_n: u32 = 110;
pub const IBUS_KEY_o: u32 = 111;
pub const IBUS_KEY_p: u32 = 112;
pub const IBUS_KEY_q: u32 = 113;
pub const IBUS_KEY_r: u32 = 114;
pub const IBUS_KEY_s: u32 = 115;
pub const IBUS_KEY_t: u32 = 116;
pub const IBUS_KEY_u: u32 = 117;
pub const IBUS_KEY_v: u32 = 118;
pub const IBUS_KEY_w: u32 = 119;
pub const IBUS_KEY_x: u32 = 120;
pub const IBUS_KEY_y: u32 = 121;
pub const IBUS_KEY_z: u32 = 122;
pub const IBUS_KEY_braceleft: u32 = 123;
pub const IBUS_KEY_bar: u32 = 124;
pub const IBUS_KEY_braceright: u32 = 125;
pub const IBUS_KEY_asciitilde: u32 = 126;
pub const IBUS_KEY_nobreakspace: u32 = 160;
pub const IBUS_KEY_exclamdown: u32 = 161;
pub const IBUS_KEY_cent: u32 = 162;
pub const IBUS_KEY_sterling: u32 = 163;
pub const IBUS_KEY_currency: u32 = 164;
pub const IBUS_KEY_yen: u32 = 165;
pub const IBUS_KEY_brokenbar: u32 = 166;
pub const IBUS_KEY_section: u32 = 167;
pub const IBUS_KEY_diaeresis: u32 = 168;
pub const IBUS_KEY_copyright: u32 = 169;
pub const IBUS_KEY_ordfeminine: u32 = 170;
pub const IBUS_KEY_guillemotleft: u32 = 171;
pub const IBUS_KEY_notsign: u32 = 172;
pub const IBUS_KEY_hyphen: u32 = 173;
pub const IBUS_KEY_registered: u32 = 174;
pub const IBUS_KEY_macron: u32 = 175;
pub const IBUS_KEY_degree: u32 = 176;
pub const IBUS_KEY_plusminus: u32 = 177;
pub const IBUS_KEY_twosuperior: u32 = 178;
pub const IBUS_KEY_threesuperior: u32 = 179;
pub const IBUS_KEY_acute: u32 = 180;
pub const IBUS_KEY_mu: u32 = 181;
pub const IBUS_KEY_paragraph: u32 = 182;
pub const IBUS_KEY_periodcentered: u32 = 183;
pub const IBUS_KEY_cedilla: u32 = 184;
pub const IBUS_KEY_onesuperior: u32 = 185;
pub const IBUS_KEY_masculine: u32 = 186;
pub const IBUS_KEY_guillemotright: u32 = 187;
pub const IBUS_KEY_onequarter: u32 = 188;
pub const IBUS_KEY_onehalf: u32 = 189;
pub const IBUS_KEY_threequarters: u32 = 190;
pub const IBUS_KEY_questiondown: u32 = 191;
pub const IBUS_KEY_Agrave: u32 = 192;
pub const IBUS_KEY_Aacute: u32 = 193;
pub const IBUS_KEY_Acircumflex: u32 = 194;
pub const IBUS_KEY_Atilde: u32 = 195;
pub const IBUS_KEY_Adiaeresis: u32 = 196;
pub const IBUS_KEY_Aring: u32 = 197;
pub const IBUS_KEY_AE: u32 = 198;
pub const IBUS_KEY_Ccedilla: u32 = 199;
pub const IBUS_KEY_Egrave: u32 = 200;
pub const IBUS_KEY_Eacute: u32 = 201;
pub const IBUS_KEY_Ecircumflex: u32 = 202;
pub const IBUS_KEY_Ediaeresis: u32 = 203;
pub const IBUS_KEY_Igrave: u32 = 204;
pub const IBUS_KEY_Iacute: u32 = 205;
pub const IBUS_KEY_Icircumflex: u32 = 206;
pub const IBUS_KEY_Idiaeresis: u32 = 207;
pub const IBUS_KEY_ETH: u32 = 208;
pub const IBUS_KEY_Eth: u32 = 208;
pub const IBUS_KEY_Ntilde: u32 = 209;
pub const IBUS_KEY_Ograve: u32 = 210;
pub const IBUS_KEY_Oacute: u32 = 211;
pub const IBUS_KEY_Ocircumflex: u32 = 212;
pub const IBUS_KEY_Otilde: u32 = 213;
pub const IBUS_KEY_Odiaeresis: u32 = 214;
pub const IBUS_KEY_multiply: u32 = 215;
pub const IBUS_KEY_Oslash: u32 = 216;
pub const IBUS_KEY_Ooblique: u32 = 216;
pub const IBUS_KEY_Ugrave: u32 = 217;
pub const IBUS_KEY_Uacute: u32 = 218;
pub const IBUS_KEY_Ucircumflex: u32 = 219;
pub const IBUS_KEY_Udiaeresis: u32 = 220;
pub const IBUS_KEY_Yacute: u32 = 221;
pub const IBUS_KEY_THORN: u32 = 222;
pub const IBUS_KEY_Thorn: u32 = 222;
pub const IBUS_KEY_ssharp: u32 = 223;
pub const IBUS_KEY_agrave: u32 = 224;
pub const IBUS_KEY_aacute: u32 = 225;
pub const IBUS_KEY_acircumflex: u32 = 226;
pub const IBUS_KEY_atilde: u32 = 227;
pub const IBUS_KEY_adiaeresis: u32 = 228;
pub const IBUS_KEY_aring: u32 = 229;
pub const IBUS_KEY_ae: u32 = 230;
pub const IBUS_KEY_ccedilla: u32 = 231;
pub const IBUS_KEY_egrave: u32 = 232;
pub const IBUS_KEY_eacute: u32 = 233;
pub const IBUS_KEY_ecircumflex: u32 = 234;
pub const IBUS_KEY_ediaeresis: u32 = 235;
pub const IBUS_KEY_igrave: u32 = 236;
pub const IBUS_KEY_iacute: u32 = 237;
pub const IBUS_KEY_icircumflex: u32 = 238;
pub const IBUS_KEY_idiaeresis: u32 = 239;
pub const IBUS_KEY_eth: u32 = 240;
pub const IBUS_KEY_ntilde: u32 = 241;
pub const IBUS_KEY_ograve: u32 = 242;
pub const IBUS_KEY_oacute: u32 = 243;
pub const IBUS_KEY_ocircumflex: u32 = 244;
pub const IBUS_KEY_otilde: u32 = 245;
pub const IBUS_KEY_odiaeresis: u32 = 246;
pub const IBUS_KEY_division: u32 = 247;
pub const IBUS_KEY_oslash: u32 = 248;
pub const IBUS_KEY_ooblique: u32 = 248;
pub const IBUS_KEY_ugrave: u32 = 249;
pub const IBUS_KEY_uacute: u32 = 250;
pub const IBUS_KEY_ucircumflex: u32 = 251;
pub const IBUS_KEY_udiaeresis: u32 = 252;
pub const IBUS_KEY_yacute: u32 = 253;
pub const IBUS_KEY_thorn: u32 = 254;
pub const IBUS_KEY_ydiaeresis: u32 = 255;
pub const IBUS_KEY_Aogonek: u32 = 417;
pub const IBUS_KEY_breve: u32 = 418;
pub const IBUS_KEY_Lstroke: u32 = 419;
pub const IBUS_KEY_Lcaron: u32 = 421;
pub const IBUS_KEY_Sacute: u32 = 422;
pub const IBUS_KEY_Scaron: u32 = 425;
pub const IBUS_KEY_Scedilla: u32 = 426;
pub const IBUS_KEY_Tcaron: u32 = 427;
pub const IBUS_KEY_Zacute: u32 = 428;
pub const IBUS_KEY_Zcaron: u32 = 430;
pub const IBUS_KEY_Zabovedot: u32 = 431;
pub const IBUS_KEY_aogonek: u32 = 433;
pub const IBUS_KEY_ogonek: u32 = 434;
pub const IBUS_KEY_lstroke: u32 = 435;
pub const IBUS_KEY_lcaron: u32 = 437;
pub const IBUS_KEY_sacute: u32 = 438;
pub const IBUS_KEY_caron: u32 = 439;
pub const IBUS_KEY_scaron: u32 = 441;
pub const IBUS_KEY_scedilla: u32 = 442;
pub const IBUS_KEY_tcaron: u32 = 443;
pub const IBUS_KEY_zacute: u32 = 444;
pub const IBUS_KEY_doubleacute: u32 = 445;
pub const IBUS_KEY_zcaron: u32 = 446;
pub const IBUS_KEY_zabovedot: u32 = 447;
pub const IBUS_KEY_Racute: u32 = 448;
pub const IBUS_KEY_Abreve: u32 = 451;
pub const IBUS_KEY_Lacute: u32 = 453;
pub const IBUS_KEY_Cacute: u32 = 454;
pub const IBUS_KEY_Ccaron: u32 = 456;
pub const IBUS_KEY_Eogonek: u32 = 458;
pub const IBUS_KEY_Ecaron: u32 = 460;
pub const IBUS_KEY_Dcaron: u32 = 463;
pub const IBUS_KEY_Dstroke: u32 = 464;
pub const IBUS_KEY_Nacute: u32 = 465;
pub const IBUS_KEY_Ncaron: u32 = 466;
pub const IBUS_KEY_Odoubleacute: u32 = 469;
pub const IBUS_KEY_Rcaron: u32 = 472;
pub const IBUS_KEY_Uring: u32 = 473;
pub const IBUS_KEY_Udoubleacute: u32 = 475;
pub const IBUS_KEY_Tcedilla: u32 = 478;
pub const IBUS_KEY_racute: u32 = 480;
pub const IBUS_KEY_abreve: u32 = 483;
pub const IBUS_KEY_lacute: u32 = 485;
pub const IBUS_KEY_cacute: u32 = 486;
pub const IBUS_KEY_ccaron: u32 = 488;
pub const IBUS_KEY_eogonek: u32 = 490;
pub const IBUS_KEY_ecaron: u32 = 492;
pub const IBUS_KEY_dcaron: u32 = 495;
pub const IBUS_KEY_dstroke: u32 = 496;
pub const IBUS_KEY_nacute: u32 = 497;
pub const IBUS_KEY_ncaron: u32 = 498;
pub const IBUS_KEY_odoubleacute: u32 = 501;
pub const IBUS_KEY_rcaron: u32 = 504;
pub const IBUS_KEY_uring: u32 = 505;
pub const IBUS_KEY_udoubleacute: u32 = 507;
pub const IBUS_KEY_tcedilla: u32 = 510;
pub const IBUS_KEY_abovedot: u32 = 511;
pub const IBUS_KEY_Hstroke: u32 = 673;
pub const IBUS_KEY_Hcircumflex: u32 = 678;
pub const IBUS_KEY_Iabovedot: u32 = 681;
pub const IBUS_KEY_Gbreve: u32 = 683;
pub const IBUS_KEY_Jcircumflex: u32 = 684;
pub const IBUS_KEY_hstroke: u32 = 689;
pub const IBUS_KEY_hcircumflex: u32 = 694;
pub const IBUS_KEY_idotless: u32 = 697;
pub const IBUS_KEY_gbreve: u32 = 699;
pub const IBUS_KEY_jcircumflex: u32 = 700;
pub const IBUS_KEY_Cabovedot: u32 = 709;
pub const IBUS_KEY_Ccircumflex: u32 = 710;
pub const IBUS_KEY_Gabovedot: u32 = 725;
pub const IBUS_KEY_Gcircumflex: u32 = 728;
pub const IBUS_KEY_Ubreve: u32 = 733;
pub const IBUS_KEY_Scircumflex: u32 = 734;
pub const IBUS_KEY_cabovedot: u32 = 741;
pub const IBUS_KEY_ccircumflex: u32 = 742;
pub const IBUS_KEY_gabovedot: u32 = 757;
pub const IBUS_KEY_gcircumflex: u32 = 760;
pub const IBUS_KEY_ubreve: u32 = 765;
pub const IBUS_KEY_scircumflex: u32 = 766;
pub const IBUS_KEY_kra: u32 = 930;
pub const IBUS_KEY_kappa: u32 = 930;
pub const IBUS_KEY_Rcedilla: u32 = 931;
pub const IBUS_KEY_Itilde: u32 = 933;
pub const IBUS_KEY_Lcedilla: u32 = 934;
pub const IBUS_KEY_Emacron: u32 = 938;
pub const IBUS_KEY_Gcedilla: u32 = 939;
pub const IBUS_KEY_Tslash: u32 = 940;
pub const IBUS_KEY_rcedilla: u32 = 947;
pub const IBUS_KEY_itilde: u32 = 949;
pub const IBUS_KEY_lcedilla: u32 = 950;
pub const IBUS_KEY_emacron: u32 = 954;
pub const IBUS_KEY_gcedilla: u32 = 955;
pub const IBUS_KEY_tslash: u32 = 956;
pub const IBUS_KEY_ENG: u32 = 957;
pub const IBUS_KEY_eng: u32 = 959;
pub const IBUS_KEY_Amacron: u32 = 960;
pub const IBUS_KEY_Iogonek: u32 = 967;
pub const IBUS_KEY_Eabovedot: u32 = 972;
pub const IBUS_KEY_Imacron: u32 = 975;
pub const IBUS_KEY_Ncedilla: u32 = 977;
pub const IBUS_KEY_Omacron: u32 = 978;
pub const IBUS_KEY_Kcedilla: u32 = 979;
pub const IBUS_KEY_Uogonek: u32 = 985;
pub const IBUS_KEY_Utilde: u32 = 989;
pub const IBUS_KEY_Umacron: u32 = 990;
pub const IBUS_KEY_amacron: u32 = 992;
pub const IBUS_KEY_iogonek: u32 = 999;
pub const IBUS_KEY_eabovedot: u32 = 1004;
pub const IBUS_KEY_imacron: u32 = 1007;
pub const IBUS_KEY_ncedilla: u32 = 1009;
pub const IBUS_KEY_omacron: u32 = 1010;
pub const IBUS_KEY_kcedilla: u32 = 1011;
pub const IBUS_KEY_uogonek: u32 = 1017;
pub const IBUS_KEY_utilde: u32 = 1021;
pub const IBUS_KEY_umacron: u32 = 1022;
pub const IBUS_KEY_Wcircumflex: u32 = 16777588;
pub const IBUS_KEY_wcircumflex: u32 = 16777589;
pub const IBUS_KEY_Ycircumflex: u32 = 16777590;
pub const IBUS_KEY_ycircumflex: u32 = 16777591;
pub const IBUS_KEY_Babovedot: u32 = 16784898;
pub const IBUS_KEY_babovedot: u32 = 16784899;
pub const IBUS_KEY_Dabovedot: u32 = 16784906;
pub const IBUS_KEY_dabovedot: u32 = 16784907;
pub const IBUS_KEY_Fabovedot: u32 = 16784926;
pub const IBUS_KEY_fabovedot: u32 = 16784927;
pub const IBUS_KEY_Mabovedot: u32 = 16784960;
pub const IBUS_KEY_mabovedot: u32 = 16784961;
pub const IBUS_KEY_Pabovedot: u32 = 16784982;
pub const IBUS_KEY_pabovedot: u32 = 16784983;
pub const IBUS_KEY_Sabovedot: u32 = 16784992;
pub const IBUS_KEY_sabovedot: u32 = 16784993;
pub const IBUS_KEY_Tabovedot: u32 = 16785002;
pub const IBUS_KEY_tabovedot: u32 = 16785003;
pub const IBUS_KEY_Wgrave: u32 = 16785024;
pub const IBUS_KEY_wgrave: u32 = 16785025;
pub const IBUS_KEY_Wacute: u32 = 16785026;
pub const IBUS_KEY_wacute: u32 = 16785027;
pub const IBUS_KEY_Wdiaeresis: u32 = 16785028;
pub const IBUS_KEY_wdiaeresis: u32 = 16785029;
pub const IBUS_KEY_Ygrave: u32 = 16785138;
pub const IBUS_KEY_ygrave: u32 = 16785139;
pub const IBUS_KEY_OE: u32 = 5052;
pub const IBUS_KEY_oe: u32 = 5053;
pub const IBUS_KEY_Ydiaeresis: u32 = 5054;
pub const IBUS_KEY_overline: u32 = 1150;
pub const IBUS_KEY_kana_fullstop: u32 = 1185;
pub const IBUS_KEY_kana_openingbracket: u32 = 1186;
pub const IBUS_KEY_kana_closingbracket: u32 = 1187;
pub const IBUS_KEY_kana_comma: u32 = 1188;
pub const IBUS_KEY_kana_conjunctive: u32 = 1189;
pub const IBUS_KEY_kana_middledot: u32 = 1189;
pub const IBUS_KEY_kana_WO: u32 = 1190;
pub const IBUS_KEY_kana_a: u32 = 1191;
pub const IBUS_KEY_kana_i: u32 = 1192;
pub const IBUS_KEY_kana_u: u32 = 1193;
pub const IBUS_KEY_kana_e: u32 = 1194;
pub const IBUS_KEY_kana_o: u32 = 1195;
pub const IBUS_KEY_kana_ya: u32 = 1196;
pub const IBUS_KEY_kana_yu: u32 = 1197;
pub const IBUS_KEY_kana_yo: u32 = 1198;
pub const IBUS_KEY_kana_tsu: u32 = 1199;
pub const IBUS_KEY_kana_tu: u32 = 1199;
pub const IBUS_KEY_prolongedsound: u32 = 1200;
pub const IBUS_KEY_kana_A: u32 = 1201;
pub const IBUS_KEY_kana_I: u32 = 1202;
pub const IBUS_KEY_kana_U: u32 = 1203;
pub const IBUS_KEY_kana_E: u32 = 1204;
pub const IBUS_KEY_kana_O: u32 = 1205;
pub const IBUS_KEY_kana_KA: u32 = 1206;
pub const IBUS_KEY_kana_KI: u32 = 1207;
pub const IBUS_KEY_kana_KU: u32 = 1208;
pub const IBUS_KEY_kana_KE: u32 = 1209;
pub const IBUS_KEY_kana_KO: u32 = 1210;
pub const IBUS_KEY_kana_SA: u32 = 1211;
pub const IBUS_KEY_kana_SHI: u32 = 1212;
pub const IBUS_KEY_kana_SU: u32 = 1213;
pub const IBUS_KEY_kana_SE: u32 = 1214;
pub const IBUS_KEY_kana_SO: u32 = 1215;
pub const IBUS_KEY_kana_TA: u32 = 1216;
pub const IBUS_KEY_kana_CHI: u32 = 1217;
pub const IBUS_KEY_kana_TI: u32 = 1217;
pub const IBUS_KEY_kana_TSU: u32 = 1218;
pub const IBUS_KEY_kana_TU: u32 = 1218;
pub const IBUS_KEY_kana_TE: u32 = 1219;
pub const IBUS_KEY_kana_TO: u32 = 1220;
pub const IBUS_KEY_kana_NA: u32 = 1221;
pub const IBUS_KEY_kana_NI: u32 = 1222;
pub const IBUS_KEY_kana_NU: u32 = 1223;
pub const IBUS_KEY_kana_NE: u32 = 1224;
pub const IBUS_KEY_kana_NO: u32 = 1225;
pub const IBUS_KEY_kana_HA: u32 = 1226;
pub const IBUS_KEY_kana_HI: u32 = 1227;
pub const IBUS_KEY_kana_FU: u32 = 1228;
pub const IBUS_KEY_kana_HU: u32 = 1228;
pub const IBUS_KEY_kana_HE: u32 = 1229;
pub const IBUS_KEY_kana_HO: u32 = 1230;
pub const IBUS_KEY_kana_MA: u32 = 1231;
pub const IBUS_KEY_kana_MI: u32 = 1232;
pub const IBUS_KEY_kana_MU: u32 = 1233;
pub const IBUS_KEY_kana_ME: u32 = 1234;
pub const IBUS_KEY_kana_MO: u32 = 1235;
pub const IBUS_KEY_kana_YA: u32 = 1236;
pub const IBUS_KEY_kana_YU: u32 = 1237;
pub const IBUS_KEY_kana_YO: u32 = 1238;
pub const IBUS_KEY_kana_RA: u32 = 1239;
pub const IBUS_KEY_kana_RI: u32 = 1240;
pub const IBUS_KEY_kana_RU: u32 = 1241;
pub const IBUS_KEY_kana_RE: u32 = 1242;
pub const IBUS_KEY_kana_RO: u32 = 1243;
pub const IBUS_KEY_kana_WA: u32 = 1244;
pub const IBUS_KEY_kana_N: u32 = 1245;
pub const IBUS_KEY_voicedsound: u32 = 1246;
pub const IBUS_KEY_semivoicedsound: u32 = 1247;
pub const IBUS_KEY_kana_switch: u32 = 65406;
pub const IBUS_KEY_Farsi_0: u32 = 16778992;
pub const IBUS_KEY_Farsi_1: u32 = 16778993;
pub const IBUS_KEY_Farsi_2: u32 = 16778994;
pub const IBUS_KEY_Farsi_3: u32 = 16778995;
pub const IBUS_KEY_Farsi_4: u32 = 16778996;
pub const IBUS_KEY_Farsi_5: u32 = 16778997;
pub const IBUS_KEY_Farsi_6: u32 = 16778998;
pub const IBUS_KEY_Farsi_7: u32 = 16778999;
pub const IBUS_KEY_Farsi_8: u32 = 16779000;
pub const IBUS_KEY_Farsi_9: u32 = 16779001;
pub const IBUS_KEY_Arabic_percent: u32 = 16778858;
pub const IBUS_KEY_Arabic_superscript_alef: u32 = 16778864;
pub const IBUS_KEY_Arabic_tteh: u32 = 16778873;
pub const IBUS_KEY_Arabic_peh: u32 = 16778878;
pub const IBUS_KEY_Arabic_tcheh: u32 = 16778886;
pub const IBUS_KEY_Arabic_ddal: u32 = 16778888;
pub const IBUS_KEY_Arabic_rreh: u32 = 16778897;
pub const IBUS_KEY_Arabic_comma: u32 = 1452;
pub const IBUS_KEY_Arabic_fullstop: u32 = 16778964;
pub const IBUS_KEY_Arabic_0: u32 = 16778848;
pub const IBUS_KEY_Arabic_1: u32 = 16778849;
pub const IBUS_KEY_Arabic_2: u32 = 16778850;
pub const IBUS_KEY_Arabic_3: u32 = 16778851;
pub const IBUS_KEY_Arabic_4: u32 = 16778852;
pub const IBUS_KEY_Arabic_5: u32 = 16778853;
pub const IBUS_KEY_Arabic_6: u32 = 16778854;
pub const IBUS_KEY_Arabic_7: u32 = 16778855;
pub const IBUS_KEY_Arabic_8: u32 = 16778856;
pub const IBUS_KEY_Arabic_9: u32 = 16778857;
pub const IBUS_KEY_Arabic_semicolon: u32 = 1467;
pub const IBUS_KEY_Arabic_question_mark: u32 = 1471;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | true |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/attr_list.rs | ibus-sys/src/attr_list.rs | use crate::attribute::IBusAttribute;
extern "C" {
#[doc = " ibus_attr_list_new:\n\n Creates an new #IBusAttrList.\n\n Returns: A newly allocated #IBusAttrList."]
pub fn ibus_attr_list_new() -> *mut IBusAttrList;
#[doc = " ibus_attr_list_append:\n @attr_list: An IBusAttrList instance.\n @attr: The IBusAttribute instance to be appended.\n\n Append an IBusAttribute to IBusAttrList, and increase reference."]
pub fn ibus_attr_list_append(attr_list: *mut IBusAttrList, attr: *mut IBusAttribute);
}
pub type IBusAttrList = [u64; 7usize];
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/glib.rs | ibus-sys/src/glib.rs | extern "C" {
// This method retain the object's reference count.n
pub fn g_object_ref_sink(object: gpointer) -> gpointer;
}
pub type gchar = ::std::os::raw::c_char;
pub type guint = ::std::os::raw::c_uint;
pub type gboolean = ::std::os::raw::c_int;
pub type gsize = ::std::os::raw::c_ulong;
pub type gssize = ::std::os::raw::c_long;
pub type gint = ::std::os::raw::c_int;
pub type gpointer = *mut ::std::os::raw::c_void;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GArray {
pub data: *mut gchar,
pub len: guint,
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/core.rs | ibus-sys/src/core.rs | #![allow(dead_code)]
// See bindgen.sh's output to improvement this file.
// TODO: maybe i can update this file as more rust native interface...
// ibus wrapper functions.
use crate::glib::gboolean;
pub type IBusSerializable = [u64; 6usize];
pub const IBusModifierType_IBUS_SHIFT_MASK: IBusModifierType = 1;
pub const IBusModifierType_IBUS_LOCK_MASK: IBusModifierType = 2;
pub const IBusModifierType_IBUS_CONTROL_MASK: IBusModifierType = 4;
pub const IBusModifierType_IBUS_MOD1_MASK: IBusModifierType = 8;
pub const IBusModifierType_IBUS_MOD2_MASK: IBusModifierType = 16;
pub const IBusModifierType_IBUS_MOD3_MASK: IBusModifierType = 32;
pub const IBusModifierType_IBUS_MOD4_MASK: IBusModifierType = 64;
pub const IBusModifierType_IBUS_MOD5_MASK: IBusModifierType = 128;
pub const IBusModifierType_IBUS_BUTTON1_MASK: IBusModifierType = 256;
pub const IBusModifierType_IBUS_BUTTON2_MASK: IBusModifierType = 512;
pub const IBusModifierType_IBUS_BUTTON3_MASK: IBusModifierType = 1024;
pub const IBusModifierType_IBUS_BUTTON4_MASK: IBusModifierType = 2048;
pub const IBusModifierType_IBUS_BUTTON5_MASK: IBusModifierType = 4096;
pub const IBusModifierType_IBUS_HANDLED_MASK: IBusModifierType = 16777216;
pub const IBusModifierType_IBUS_FORWARD_MASK: IBusModifierType = 33554432;
pub const IBusModifierType_IBUS_IGNORED_MASK: IBusModifierType = 33554432;
pub const IBusModifierType_IBUS_SUPER_MASK: IBusModifierType = 67108864;
pub const IBusModifierType_IBUS_HYPER_MASK: IBusModifierType = 134217728;
pub const IBusModifierType_IBUS_META_MASK: IBusModifierType = 268435456;
pub const IBusModifierType_IBUS_RELEASE_MASK: IBusModifierType = 1073741824;
pub const IBusModifierType_IBUS_MODIFIER_MASK: IBusModifierType = 1593843711;
#[doc = " IBusModifierType:\n @IBUS_SHIFT_MASK: Shift is activated.\n @IBUS_LOCK_MASK: Cap Lock is locked.\n @IBUS_CONTROL_MASK: Control key is activated.\n @IBUS_MOD1_MASK: Modifier 1 (Usually Alt_L (0x40), Alt_R (0x6c), Meta_L (0xcd)) activated.\n @IBUS_MOD2_MASK: Modifier 2 (Usually Num_Lock (0x4d)) activated.\n @IBUS_MOD3_MASK: Modifier 3 activated.\n @IBUS_MOD4_MASK: Modifier 4 (Usually Super_L (0xce), Hyper_L (0xcf)) activated.\n @IBUS_MOD5_MASK: Modifier 5 (ISO_Level3_Shift (0x5c), Mode_switch (0xcb)) activated.\n @IBUS_BUTTON1_MASK: Mouse button 1 (left) is activated.\n @IBUS_BUTTON2_MASK: Mouse button 2 (middle) is activated.\n @IBUS_BUTTON3_MASK: Mouse button 3 (right) is activated.\n @IBUS_BUTTON4_MASK: Mouse button 4 (scroll up) is activated.\n @IBUS_BUTTON5_MASK: Mouse button 5 (scroll down) is activated.\n @IBUS_HANDLED_MASK: Handled mask indicates the event has been handled by ibus.\n @IBUS_FORWARD_MASK: Forward mask indicates the event has been forward from ibus.\n @IBUS_IGNORED_MASK: It is an alias of IBUS_FORWARD_MASK.\n @IBUS_SUPER_MASK: Super (Usually Win) key is activated.\n @IBUS_HYPER_MASK: Hyper key is activated.\n @IBUS_META_MASK: Meta key is activated.\n @IBUS_RELEASE_MASK: Key is released.\n @IBUS_MODIFIER_MASK: Modifier mask for the all the masks above.\n\n Handles key modifier such as control, shift and alt and release event.\n Note that nits 15 - 25 are currently unused, while bit 29 is used internally."]
pub type IBusModifierType = ::std::os::raw::c_uint;
pub type IBusBus = [u64; 6usize];
extern "C" {
pub fn ibus_bus_new() -> *mut IBusBus;
pub fn ibus_init();
pub fn ibus_main();
}
pub fn to_gboolean(b: bool) -> gboolean {
i32::from(b)
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/text.rs | ibus-sys/src/text.rs | use std::ffi::CString;
use crate::attr_list::IBusAttrList;
use crate::glib::gchar;
extern "C" {
pub fn ibus_text_new_from_string(str_: *const gchar) -> *mut IBusText;
#[doc = " ibus_text_set_attributes:\n @text: An IBusText.\n @attrs: An IBusAttrList"]
pub fn ibus_text_set_attributes(text: *mut IBusText, attrs: *mut IBusAttrList);
}
pub type IBusText = [u64; 9usize];
pub trait StringExt {
fn to_ibus_text(&self) -> *mut IBusText;
}
impl StringExt for str {
fn to_ibus_text(&self) -> *mut IBusText {
unsafe {
let text_c_str = CString::new(self).unwrap();
ibus_text_new_from_string(text_c_str.as_ptr() as *const gchar)
}
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/prop_list.rs | ibus-sys/src/prop_list.rs | use crate::core::IBusSerializable;
use crate::glib::GArray;
use crate::property::IBusProperty;
extern "C" {
pub fn ibus_prop_list_new() -> *mut IBusPropList;
pub fn ibus_prop_list_append(prop_list: *mut IBusPropList, prop: *mut IBusProperty);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct IBusPropList {
parent: IBusSerializable,
candidates: *mut GArray,
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/property.rs | ibus-sys/src/property.rs | use crate::core::IBusSerializable;
use crate::glib::{gboolean, gchar, gpointer};
use crate::prop_list::IBusPropList;
use crate::text::IBusText;
pub const IBusPropState_PROP_STATE_UNCHECKED: IBusPropState = 0;
pub const IBusPropState_PROP_STATE_CHECKED: IBusPropState = 1;
pub const IBusPropState_PROP_STATE_INCONSISTENT: IBusPropState = 2;
pub type IBusPropState = ::std::os::raw::c_uint;
pub const IBusPropType_PROP_TYPE_NORMAL: IBusPropType = 0;
pub const IBusPropType_PROP_TYPE_TOGGLE: IBusPropType = 1;
pub const IBusPropType_PROP_TYPE_RADIO: IBusPropType = 2;
pub const IBusPropType_PROP_TYPE_MENU: IBusPropType = 3;
pub const IBusPropType_PROP_TYPE_SEPARATOR: IBusPropType = 4;
pub type IBusPropType = ::std::os::raw::c_uint;
extern "C" {
pub fn ibus_property_new(
key: *const gchar,
type_: IBusPropType,
label: *mut IBusText,
icon: *const gchar,
tooltip: *mut IBusText,
sensitive: gboolean,
visible: gboolean,
state: IBusPropState,
prop_list: *mut IBusPropList,
) -> *mut IBusProperty;
pub fn ibus_property_set_sub_props(prop: *mut IBusProperty, prop_list: *mut IBusPropList);
pub fn ibus_property_set_state(prop: *mut IBusProperty, state: IBusPropState);
pub fn ibus_property_set_label(prop: *mut IBusProperty, label: *mut IBusText);
pub fn ibus_property_set_symbol(prop: *mut IBusProperty, symbol: *mut IBusText);
pub fn ibus_property_set_icon(prop: *mut IBusProperty, icon: *const gchar);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct IBusPropertyPrivate {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct IBusProperty {
parent: IBusSerializable,
priv_: *mut IBusPropertyPrivate,
pdummy: [gpointer; 7usize],
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-sys/src/keys.rs | ibus-sys/src/keys.rs | use crate::glib::{gchar, guint};
extern "C" {
pub fn ibus_keyval_name(keyval: guint) -> *const gchar;
pub fn ibus_keyval_from_name(keyval_name: *const gchar) -> guint;
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-conf/src/lib.rs | akaza-conf/src/lib.rs | pub mod conf;
mod pane;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-conf/src/conf.rs | akaza-conf/src/conf.rs | use std::process::Command;
use std::sync::{Arc, Mutex};
use anyhow::Result;
use gtk::glib::signal::Inhibit;
use gtk::prelude::*;
use gtk::{Application, ApplicationWindow, Button, Label, Notebook};
use gtk4 as gtk;
use gtk4::gio::ApplicationFlags;
use gtk4::Grid;
use log::{error, info};
use libakaza::config::{Config, EngineConfig};
use crate::pane::{about_pane, core_pane, dict_pane};
pub fn open_configuration_window() -> Result<()> {
let config = Arc::new(Mutex::new(Config::load()?));
let app = Application::new(Some("com.github.akaza.config"), ApplicationFlags::empty());
app.connect_activate(move |app| {
connect_activate(app, config.clone()).unwrap();
});
let v: Vec<String> = Vec::new();
app.run_with_args(v.as_slice());
Ok(())
}
fn connect_activate(app: &Application, config: Arc<Mutex<Config>>) -> Result<()> {
let window = ApplicationWindow::builder()
.application(app)
.default_width(520)
.default_height(500)
.title("Akaza ใฎ่จญๅฎ")
.build();
let notebook = Notebook::builder().vexpand(true).hexpand(true).build();
notebook.append_page(
&core_pane::build_core_pane(config.clone())?,
Some(&Label::new(Some("ๅบๆฌ่จญๅฎ"))),
);
notebook.append_page(
&dict_pane::build_dict_pane(config.clone())?,
Some(&Label::new(Some("่พๆธ"))),
);
notebook.append_page(
&about_pane::build_about_pane(),
Some(&Label::new(Some("ใขใใฆใ"))),
);
let grid = Grid::builder().build();
grid.attach(¬ebook, 0, 0, 6, 1);
let ok_button = Button::with_label("OK");
let config = config;
ok_button.connect_clicked(move |_| {
eprintln!("Save the configuration...");
// TODO: ไฟๅญๅฆ็
let config = config.lock().unwrap();
let config = Config {
keymap: config.keymap.to_string(),
romkan: config.romkan.to_string(),
live_conversion: config.live_conversion,
engine: EngineConfig {
model: config.engine.model.to_string(),
dicts: config.engine.dicts.clone(),
dict_cache: true,
},
};
info!("Saving config: {}", serde_yaml::to_string(&config).unwrap());
config.save().unwrap();
// ๆๅพใซ ibus restart ใใใกใใใใ่จญๅฎใฎๅ่ชญใฟ่พผใฟใจใๅฎ่ฃ
ใใใฎใฏๅคงๅคใ
let output = Command::new("ibus").arg("restart").output().unwrap();
if !output.status.success() {
error!(
"Cannot run `ibus restart`: out={}, err={}",
String::from_utf8(output.stdout).unwrap(),
String::from_utf8(output.stderr).unwrap()
);
} else {
info!(
"Ran `ibus restart`: out={}, err={}",
String::from_utf8(output.stdout).unwrap(),
String::from_utf8(output.stderr).unwrap()
);
}
});
let cancel_button = Button::with_label("Cancel");
{
let window_clone = window.clone();
cancel_button.connect_clicked(move |_| {
eprintln!("Close the configuration window!");
window_clone.close();
});
}
grid.attach(&ok_button, 4, 1, 1, 1);
grid.attach(&cancel_button, 5, 1, 1, 1);
window.set_child(Some(&grid));
window.connect_close_request(move |window| {
if let Some(application) = window.application() {
application.remove_window(window);
}
Inhibit(false)
});
window.show();
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-conf/src/pane/dict_pane.rs | akaza-conf/src/pane/dict_pane.rs | use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use gtk4::builders::MessageDialogBuilder;
use gtk4::prelude::ButtonExt;
use gtk4::prelude::ComboBoxExt;
use gtk4::prelude::DialogExt;
use gtk4::prelude::EntryBufferExt;
use gtk4::prelude::EntryBufferExtManual;
use gtk4::prelude::FileChooserExt;
use gtk4::prelude::FileExt;
use gtk4::prelude::GridExt;
use gtk4::prelude::GtkWindowExt;
use gtk4::prelude::WidgetExt;
use gtk4::{
Button, ComboBoxText, FileChooserAction, FileChooserDialog, Grid, Label, MessageType,
ResponseType, ScrolledWindow, Text, TextBuffer, TextView, Window,
};
use log::info;
use libakaza::config::{Config, DictConfig, DictEncoding, DictType, DictUsage};
use libakaza::dict::skk::write::write_skk_dict;
pub fn build_dict_pane(config: Arc<Mutex<Config>>) -> anyhow::Result<ScrolledWindow> {
let scroll = ScrolledWindow::new();
let parent_grid = Grid::builder().column_spacing(10).build();
let grid = Grid::builder().column_spacing(10).build();
let dicts = config.lock().unwrap().engine.dicts.clone();
for (i, dict_config) in dicts.iter().enumerate() {
add_row(&grid, dict_config, &config.clone(), i);
}
parent_grid.attach(&grid, 0, 0, 1, 1);
{
let add_system_dict_btn = build_add_system_dict_btn(config.clone(), grid.clone());
parent_grid.attach(&add_system_dict_btn, 0, 1, 1, 1);
}
{
let add_user_dict_btn = build_add_user_dict_btn(grid, config);
parent_grid.attach(&add_user_dict_btn, 0, 2, 1, 1);
}
scroll.set_child(Some(&parent_grid));
Ok(scroll)
}
// TODO ใใใฏ TreeView ไฝฟใฃใๆนใใใฃใใๆธใใใฏใใ ใใๅใฎ GTK+ ๅใๅผใใใใฆๅฏพๅฟใงใใฆใใชใใ
// ่ชฐใใใฃใใไฝฟใๆใใใฆใปใใใ
fn add_row(grid: &Grid, dict_config: &DictConfig, config: &Arc<Mutex<Config>>, i: usize) {
grid.attach(
&Label::builder()
.xalign(0_f32)
.label(dict_config.path.as_str())
.build(),
0,
i as i32,
1,
1,
);
{
let cbt = ComboBoxText::builder().build();
for usage in vec![
DictUsage::Normal,
DictUsage::SingleTerm,
DictUsage::Disabled,
] {
cbt.append(Some(usage.as_str()), usage.text_jp());
}
cbt.set_active_id(Some(dict_config.usage.as_str()));
{
let config = config.clone();
let path = dict_config.path.clone();
cbt.connect_changed(move |f| {
if let Some(id) = f.active_id() {
let mut config = config.lock().unwrap();
for mut dict in &mut config.engine.dicts {
if dict.path == path {
dict.usage = DictUsage::from(&id).unwrap();
return;
}
}
config.engine.dicts.push(DictConfig {
dict_type: DictType::SKK,
encoding: DictEncoding::EucJp,
path: path.to_string(),
usage: DictUsage::from(&id).unwrap(),
})
}
});
}
grid.attach(&cbt, 1, i as i32, 1, 1);
}
grid.attach(
&Label::new(Some(dict_config.dict_type.as_str())),
2,
i as i32,
1,
1,
);
{
let cbt = ComboBoxText::builder().build();
for encoding in vec![DictEncoding::EucJp, DictEncoding::Utf8] {
cbt.append(
Some(&encoding.to_string()),
encoding.as_str().replace('_', "-").as_str(),
);
}
cbt.set_active_id(Some(dict_config.encoding.as_str()));
{
let config = config.clone();
let path = dict_config.path.clone();
cbt.connect_changed(move |f| {
if let Some(id) = f.active_id() {
let mut config = config.lock().unwrap();
for mut dict in &mut config.engine.dicts {
if dict.path == path {
dict.encoding = DictEncoding::from(&id).unwrap();
break;
}
}
}
});
}
grid.attach(&cbt, 3, i as i32, 1, 1);
}
{
let delete_btn = {
let path = dict_config.path.clone();
let config = config.clone();
let delete_btn = Button::with_label("ๅ้ค");
let grid = grid.clone();
delete_btn.connect_clicked(move |_| {
let mut config = config.lock().unwrap();
for (i, dict) in &mut config.engine.dicts.iter().enumerate() {
if dict.path == path {
config.engine.dicts.remove(i);
grid.remove_row(i as i32);
break;
}
}
});
delete_btn
};
grid.attach(&delete_btn, 4, i as i32, 1, 1);
}
}
fn build_add_system_dict_btn(config: Arc<Mutex<Config>>, grid: Grid) -> Button {
let add_btn = Button::with_label("ใทในใใ ่พๆธใฎ่ฟฝๅ ");
let config = config;
let grid = grid;
add_btn.connect_clicked(move |_| {
let dialog = FileChooserDialog::new(
Some("่พๆธใฎ้ธๆ"),
None::<&Window>,
FileChooserAction::Open,
&[
("้ใ", ResponseType::Accept),
("ใญใฃใณใปใซ", ResponseType::None),
],
);
let config = config.clone();
let grid = grid.clone();
dialog.connect_response(move |dialog, resp| match resp {
ResponseType::Accept => {
let file = dialog.file().unwrap();
let path = file.path().unwrap();
info!("File: {:?}", path);
let dict_config = &DictConfig {
path: path.to_string_lossy().to_string(),
encoding: DictEncoding::Utf8,
usage: DictUsage::Normal,
dict_type: DictType::SKK,
};
config
.lock()
.unwrap()
.engine
.dicts
.push(dict_config.clone());
add_row(
&grid,
dict_config,
&config.clone(),
config.lock().unwrap().engine.dicts.len(),
);
dialog.close();
}
ResponseType::Close
| ResponseType::Reject
| ResponseType::Yes
| ResponseType::No
| ResponseType::None
| ResponseType::DeleteEvent => {
dialog.close();
}
_ => {}
});
dialog.show();
});
add_btn
}
fn build_add_user_dict_btn(dict_list_grid: Grid, config: Arc<Mutex<Config>>) -> Button {
let add_btn = Button::with_label("ใฆใผใถใผ่พๆธใฎ่ฟฝๅ ");
let config = config;
let dict_list_grid = dict_list_grid;
add_btn.connect_clicked(move |_| {
let window = Window::builder()
.title("ใฆใผใถใผ่พๆธใฎ่ฟฝๅ ")
.default_width(300)
.default_height(100)
.destroy_with_parent(true)
.modal(true)
.build();
let grid = Grid::builder().build();
let label = TextView::builder()
.buffer(&TextBuffer::builder().text("่พๆธๅ: ").build())
.build();
grid.attach(&label, 0, 0, 1, 1);
let text = Text::builder().build();
grid.attach(&text, 1, 0, 2, 1);
let ok_btn = {
let window = window.clone();
let ok_btn = Button::with_label("OK");
let text = text.clone();
let config = config.clone();
let dict_list_grid = dict_list_grid.clone();
ok_btn.set_sensitive(false);
ok_btn.connect_clicked(move |_| match create_user_dict(&text.buffer().text()) {
Ok(path) => {
let dict_config = DictConfig {
path: path.to_string_lossy().to_string(),
encoding: DictEncoding::Utf8,
dict_type: DictType::SKK,
usage: DictUsage::Normal,
};
let mut locked_conf = config.lock().unwrap();
add_row(
&dict_list_grid,
&dict_config,
&config,
locked_conf.engine.dicts.len(),
);
locked_conf.engine.dicts.push(dict_config);
window.close();
}
Err(err) => {
let dialog = MessageDialogBuilder::new()
.message_type(MessageType::Error)
.text(&format!("Error: {err}"))
.build();
dialog.show();
}
});
grid.attach(&ok_btn, 1, 1, 1, 1);
ok_btn
};
{
let window = window.clone();
let cancel_btn = Button::with_label("Cancel");
cancel_btn.connect_clicked(move |_| {
window.close();
});
grid.attach(&cancel_btn, 2, 1, 1, 1);
}
// ่พๆธๅใๅ
ฅๅใใฆใใชใๅ ดๅใฏ OK ใใฟใณใๆผใใชใใ
text.buffer().connect_length_notify(move |t| {
ok_btn.set_sensitive(!t.text().is_empty());
});
window.set_child(Some(&grid));
window.show();
});
add_btn
}
fn create_user_dict(name: &str) -> anyhow::Result<PathBuf> {
let base = xdg::BaseDirectories::with_prefix("akaza")?;
let userdictdir = base.create_data_directory("userdict")?;
let path = userdictdir.join(name);
if !path.as_path().exists() {
// ใใกใคใซใใชใใใฐใซใฉใฎ SKK ่พๆธใใคใใฃใฆใใใ
write_skk_dict(&path.to_string_lossy(), vec![])?;
}
Ok(path)
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-conf/src/pane/core_pane.rs | akaza-conf/src/pane/core_pane.rs | use gtk4::prelude::{CheckButtonExt, ComboBoxExt, GridExt};
use gtk4::{CheckButton, ComboBoxText, Grid, Label};
use libakaza::config::Config;
use log::info;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
pub fn build_core_pane(config: Arc<Mutex<Config>>) -> anyhow::Result<Grid> {
// ใญใผใใใใจใญใผใๅญใใผใใซใใขใใซใฎ่จญๅฎใใงใใใใใซใใใ
let grid = Grid::new();
// xalign: 0 ใฏๅทฆๅฏใจใใๆๅณใ
grid.attach(
&Label::builder().label("ใญใผใใใ").xalign(0_f32).build(),
0,
0,
1,
1,
);
grid.attach(
&{
let cbt = ComboBoxText::new();
let keymap = get_list("keymap", |f| f.to_string_lossy().ends_with(".yml"));
for item in keymap {
cbt.append(Some(&item.path), &item.name);
}
cbt.set_active_id(Some(&config.lock().unwrap().keymap));
{
let config = config.clone();
cbt.connect_changed(move |f| {
if let Some(id) = f.active_id() {
config.lock().unwrap().keymap = id.to_string();
}
});
}
cbt
},
1,
0,
1,
1,
);
grid.attach(
&Label::builder()
.label("ใญใผใๅญใใผใใซ")
.xalign(0_f32)
.build(),
0,
1,
1,
1,
);
grid.attach(
&{
let cbt = ComboBoxText::new();
let romkan = get_list("romkan", |f| f.to_string_lossy().ends_with(".yml"));
info!("romkan: {:?}", romkan);
for item in romkan {
cbt.append(Some(&item.path), &item.name);
}
cbt.set_active_id(Some(&config.lock().unwrap().romkan));
let config = config.clone();
cbt.connect_changed(move |f| {
if let Some(id) = f.active_id() {
config.lock().unwrap().romkan = id.to_string();
}
});
cbt
},
1,
1,
1,
1,
);
grid.attach(
&Label::builder().label("่จ่ชใขใใซ").xalign(0_f32).build(),
0,
2,
1,
1,
);
grid.attach(
&{
let cbt = ComboBoxText::new();
let model = get_list("model", {
|f| !f.file_name().unwrap().to_string_lossy().starts_with('.')
});
info!("model: {:?}", model);
for item in model {
cbt.append(Some(&item.path), &item.name);
}
cbt.set_active_id(Some(&config.lock().unwrap().engine.model));
let config = config.clone();
cbt.connect_changed(move |f| {
if let Some(id) = f.active_id() {
config.lock().unwrap().engine.model = id.to_string();
}
});
cbt
},
1,
2,
1,
1,
);
{
let check_box = CheckButton::builder()
.label("ใฉใคใๅคๆ")
.active(config.lock().unwrap().live_conversion)
.build();
grid.attach(&check_box, 0, 3, 1, 1);
check_box.connect_toggled(move |f| {
config.lock().unwrap().live_conversion = f.is_active();
});
}
Ok(grid)
}
pub(crate) fn get_list<P>(path: &str, filter: P) -> Vec<PathConfItem>
where
P: FnMut(&&PathBuf) -> bool,
{
let p = xdg::BaseDirectories::with_prefix("akaza")
.unwrap()
.list_data_files(path);
p.iter()
.filter(filter)
.map(|f| PathConfItem {
name: f
.as_path()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_string()
.replace(".yml", ""),
path: f.to_string_lossy().to_string(),
})
.collect::<Vec<_>>()
}
#[derive(Debug)]
pub(crate) struct PathConfItem {
name: String,
path: String,
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-conf/src/pane/mod.rs | akaza-conf/src/pane/mod.rs | pub mod about_pane;
pub mod core_pane;
pub mod dict_pane;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-conf/src/pane/about_pane.rs | akaza-conf/src/pane/about_pane.rs | use gtk4::Label;
pub fn build_about_pane() -> Label {
Label::new(Some(
format!("Akaza version {}", env!("CARGO_PKG_VERSION")).as_str(),
))
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-conf/src/bin/akaza-conf.rs | akaza-conf/src/bin/akaza-conf.rs | use akaza_conf::conf::open_configuration_window;
use anyhow::Result;
use log::LevelFilter;
/// ใใใใฐ็จ
fn main() -> Result<()> {
let _ = env_logger::builder()
.filter_level(LevelFilter::Info)
.try_init();
open_configuration_window()?;
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/marisa-sys/build.rs | marisa-sys/build.rs | extern crate cc;
fn main() {
cc::Build::new()
.cpp(true)
.file("wrapper.cc")
.include("wrapper")
.compile("wrapper");
println!("cargo:rustc-link-lib=marisa");
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/marisa-sys/src/bindings.rs | marisa-sys/src/bindings.rs | // ---------------------------------------------------
// low level C wrappers
// ---------------------------------------------------
use std::ffi::{c_char, CString};
use std::os::raw::c_void;
use anyhow::{anyhow, Result};
#[repr(C)]
#[derive(Debug, Copy, Clone)]
struct marisa_obj {
trie: *mut c_void,
}
// โโ It's unsafe operation. I'll remove this in the future.
unsafe impl Send for marisa_obj {}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
struct marisa_keyset {
keyset: *mut c_void,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
struct marisa_exception {
error_message: *mut c_char,
}
pub type marisa_callback =
unsafe extern "C" fn(user_data: *mut c_void, key: *const u8, len: usize, id: usize) -> bool;
extern "C" {
fn marisa_new() -> *mut marisa_obj;
fn marisa_release(self_: *mut marisa_obj);
fn marisa_build(self_: *mut marisa_obj, keyset: *mut marisa_keyset);
fn marisa_load(self_: *mut marisa_obj, filename: *const c_char) -> *mut marisa_exception;
fn marisa_save(self_: *mut marisa_obj, filename: *const c_char) -> *mut marisa_exception;
fn marisa_predictive_search(
self_: *mut marisa_obj,
query: *const u8,
query_len: usize,
user_data: *mut c_void,
cb: marisa_callback,
);
fn marisa_common_prefix_search(
self_: *mut marisa_obj,
query: *const u8,
query_len: usize,
user_data: *mut c_void,
cb: marisa_callback,
);
fn marisa_num_keys(self_: *mut marisa_obj) -> usize;
fn marisa_keyset_new() -> *mut marisa_keyset;
fn marisa_keyset_push_back(self_: *mut marisa_keyset, ptr: *const u8, len: usize);
fn marisa_keyset_release(self_: *mut marisa_keyset);
fn marisa_exception_release(self_: *mut marisa_exception);
}
// ---------------------------------------------------
// high level API
// ---------------------------------------------------
pub type PredictiveSearchCallback = dyn FnMut(&[u8], usize) -> bool;
pub struct Marisa {
marisa: *mut marisa_obj,
}
impl Default for Marisa {
fn default() -> Marisa {
let marisa = unsafe { marisa_new() };
Marisa { marisa }
}
}
impl Marisa {
pub fn load(&mut self, filename: &str) -> Result<()> {
unsafe {
let cstring = CString::new(filename).unwrap();
let exc = marisa_load(self.marisa, cstring.as_ptr());
if exc.is_null() {
Ok(())
} else {
Err(anyhow!(
"Cannot load file: {}, file={}",
CString::from_raw((*exc).error_message)
.into_string()
.unwrap(),
filename
))
}
}
}
pub fn build(&mut self, keyset: &Keyset) {
unsafe {
marisa_build(self.marisa, keyset.keyset);
}
}
pub fn save(&self, filename: &str) -> Result<()> {
unsafe {
let cstring = CString::new(filename).unwrap();
let exc = marisa_save(self.marisa, cstring.as_ptr());
if exc.is_null() {
Ok(())
} else {
Err(anyhow!(
"Cannot save marisa file: {}, filename={}",
CString::from_raw((*exc).error_message)
.into_string()
.unwrap(),
filename
))
}
}
}
pub fn num_keys(&self) -> usize {
unsafe { marisa_num_keys(self.marisa) }
}
unsafe extern "C" fn trampoline<F>(
cookie: *mut c_void,
s: *const u8,
len: usize,
id: usize,
) -> bool
where
F: FnMut(&[u8], usize) -> bool,
{
let cookie = &mut *(cookie as *mut F);
let cs = std::slice::from_raw_parts(s, len);
cookie(cs, id)
}
fn get_trampoline<F>(_closure: &F) -> marisa_callback
where
F: FnMut(&[u8], usize) -> bool,
{
Marisa::trampoline::<F>
}
pub fn predictive_search<F>(&self, query: &[u8], callback: F)
where
F: FnMut(&[u8], usize) -> bool,
{
let mut closure = callback;
let cb = Marisa::get_trampoline(&closure);
unsafe {
marisa_predictive_search(
self.marisa,
query.as_ptr(),
query.len(),
&mut closure as *mut _ as *mut c_void,
cb,
);
}
}
pub fn common_prefix_search<F>(&self, query: &str, callback: F)
where
F: FnMut(&[u8], usize) -> bool,
{
let mut closure = callback;
let cb = Marisa::get_trampoline(&closure);
unsafe {
marisa_common_prefix_search(
self.marisa,
query.as_ptr(),
query.len(),
&mut closure as *mut _ as *mut c_void,
cb,
);
}
}
}
pub struct Keyset {
keyset: *mut marisa_keyset,
}
impl Default for Keyset {
fn default() -> Self {
unsafe {
Keyset {
keyset: marisa_keyset_new(),
}
}
}
}
impl Keyset {
pub fn push_back(&mut self, key: &[u8]) {
unsafe {
marisa_keyset_push_back(self.keyset, key.as_ptr(), key.len());
}
}
}
impl Drop for Keyset {
fn drop(&mut self) {
unsafe {
marisa_keyset_release(self.keyset);
}
}
}
#[cfg(test)]
mod tests {
use tempfile::NamedTempFile;
use super::{Keyset, Marisa};
#[test]
fn test() {
let tmpfile = NamedTempFile::new().unwrap();
let tmpfile = tmpfile.path().to_str().unwrap().to_string();
// let tmpfile = "/tmp/test.trie".to_string();
{
let mut keyset = Keyset::default();
keyset.push_back("apple".as_bytes());
keyset.push_back("age".as_bytes());
keyset.push_back("hola".as_bytes());
let mut marisa = Marisa::default();
marisa.build(&keyset);
marisa.save(&tmpfile).unwrap();
assert_eq!(marisa.num_keys(), 3)
}
// read it
{
let mut marisa = Marisa::default();
marisa.load(&tmpfile).unwrap();
assert_eq!(marisa.num_keys(), 3);
let mut i = 0;
let mut got: Vec<(String, usize)> = Vec::new();
assert_eq!("a".as_bytes().len(), 1);
marisa.predictive_search("a".as_bytes(), |bytes, id| {
i += 1;
let key = String::from_utf8(bytes.to_vec()).unwrap();
got.push((key, id));
true
});
assert_eq!(i, 2);
assert_eq!(got.len(), 2);
assert_eq!(got[0].0, "age");
assert_eq!(got[1].0, "apple");
}
}
#[test]
fn test_exc() {
{
let mut marisa = Marisa::default();
let result = marisa.load("UNKNOWN_PATH");
if let Err(err) = result {
assert!(err.to_string().contains("MARISA_IO_"));
} else {
panic!() // unreachable
}
}
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/marisa-sys/src/lib.rs | marisa-sys/src/lib.rs | #![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(dead_code)]
extern crate alloc;
include!("bindings.rs");
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/build.rs | ibus-akaza/build.rs | extern crate cc;
use std::io::Read;
use std::process::{Command, Stdio};
fn pkgconfig(module: &str, flag: &str) -> Vec<String> {
let child = Command::new("pkg-config")
.arg(module)
.arg(flag)
.stdout(Stdio::piped())
.spawn()
.expect("Failed to spawn child process");
let mut buf = String::new();
child.stdout.unwrap().read_to_string(&mut buf).unwrap();
let args: Vec<&str> = buf.trim().split(' ').collect();
args.iter().map(|f| f.to_string()).collect()
}
fn main() {
println!("cargo:rustc-link-lib=ibus-1.0");
println!("cargo:rerun-if-changed=wrapper.c");
println!("cargo:rerun-if-changed=wrapper.h");
let mut p = cc::Build::new();
let mut c = p.file("wrapper.c");
c = c.include("wrapper");
// Normally, I dislike following options.
// But, it's a temporary code.
// TODO: remove following options.
c = c.flag("-Wno-unused-parameter");
c = c.flag("-Wno-sign-compare");
c = c.flag("-Wno-incompatible-pointer-types");
{
let module = &"ibus-1.0";
for flag in pkgconfig(module, "--cflags") {
c = c.flag(flag.as_str());
}
for flag in pkgconfig(module, "--libs") {
println!("cargo:rustc-link-arg={flag}");
}
}
p.compile("wrapper");
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/src/commands.rs | ibus-akaza/src/commands.rs | use std::collections::HashMap;
use ibus_sys::engine::IBusEngine;
use crate::input_mode::{
INPUT_MODE_ALNUM, INPUT_MODE_FULLWIDTH_ALNUM, INPUT_MODE_HALFWIDTH_KATAKANA,
INPUT_MODE_HIRAGANA, INPUT_MODE_KATAKANA,
};
use crate::AkazaContext;
/**
* shortcut key ใ่จญๅฎๅฏ่ฝใชๆฉ่ฝใ
*/
pub type IbusAkazaCommand = fn(&mut AkazaContext, *mut IBusEngine) -> bool;
pub(crate) fn ibus_akaza_commands_map() -> HashMap<&'static str, IbusAkazaCommand> {
let mut function_map: HashMap<&'static str, IbusAkazaCommand> = HashMap::new();
// shorthand
let mut register = |name: &'static str, cmd: IbusAkazaCommand| function_map.insert(name, cmd);
register("commit_candidate", |context, engine| {
context.commit_candidate(engine);
true
});
// ็กๅคๆ็ถๆ
ใงใฏใใฒใใใชใซๅคๆใใฆใณใใใใใพใ
register("commit_preedit", |context, engine| {
context.commit_preedit(engine);
true
});
register("escape", |context, engine| {
context.escape(engine);
true
});
register("page_up", |context, engine| {
context.page_up(engine);
true
});
register("page_down", |context, engine| {
context.page_down(engine);
true
});
register("set_input_mode_hiragana", |context, engine| {
context.set_input_mode(engine, &INPUT_MODE_HIRAGANA);
true
});
register("set_input_mode_alnum", |context, engine| {
context.set_input_mode(engine, &INPUT_MODE_ALNUM);
true
});
register("set_input_mode_fullwidth_alnum", |context, engine| {
context.set_input_mode(engine, &INPUT_MODE_FULLWIDTH_ALNUM);
true
});
register("set_input_mode_katakana", |context, engine| {
context.set_input_mode(engine, &INPUT_MODE_KATAKANA);
true
});
register("set_input_mode_halfwidth_katakana", |context, engine| {
context.set_input_mode(engine, &INPUT_MODE_HALFWIDTH_KATAKANA);
true
});
register("update_candidates", |context, engine| {
context.update_candidates(engine)
});
register("erase_character_before_cursor", |context, engine| {
context.erase_character_before_cursor(engine);
true
});
register("cursor_up", |context, engine| {
context.cursor_up(engine);
true
});
register("cursor_down", |context, engine| {
context.cursor_down(engine);
true
});
register("cursor_right", |context, engine| {
context.cursor_right(engine);
true
});
register("cursor_left", |context, engine| {
context.cursor_left(engine);
true
});
register("extend_clause_right", |context, engine| {
context.extend_clause_right(engine).unwrap();
true
});
register("extend_clause_left", |context, engine| {
context.extend_clause_left(engine).unwrap();
true
});
register("convert_to_full_hiragana", |context, engine| {
context.convert_to_full_hiragana(engine).unwrap();
true
});
register("convert_to_full_katakana", |context, engine| {
context.convert_to_full_katakana(engine).unwrap();
true
});
register("convert_to_half_katakana", |context, engine| {
context.convert_to_half_katakana(engine).unwrap();
true
});
register("convert_to_full_romaji", |context, engine| {
context.convert_to_full_romaji(engine).unwrap();
true
});
register("convert_to_half_romaji", |context, engine| {
context.convert_to_half_romaji(engine).unwrap();
true
});
{
// TODO ใณใใใใใใใใใฏใญใใใพใใชใซใใงใพใจใใฆ็ป้ฒใงใใใใใซใใใใ
register("press_number_1", |context, engine| {
context.process_num_key(1, engine)
});
register("press_number_2", |context, engine| {
context.process_num_key(2, engine)
});
register("press_number_3", |context, engine| {
context.process_num_key(3, engine)
});
register("press_number_4", |context, engine| {
context.process_num_key(4, engine)
});
register("press_number_5", |context, engine| {
context.process_num_key(5, engine)
});
register("press_number_6", |context, engine| {
context.process_num_key(6, engine)
});
register("press_number_7", |context, engine| {
context.process_num_key(7, engine)
});
register("press_number_8", |context, engine| {
context.process_num_key(8, engine)
});
register("press_number_9", |context, engine| {
context.process_num_key(9, engine)
});
register("press_number_0", |context, engine| {
context.process_num_key(0, engine)
});
}
function_map
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/src/wrapper_bindings.rs | ibus-akaza/src/wrapper_bindings.rs | #![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
#![allow(dead_code)]
use std::ffi::c_void;
use ibus_sys::engine::IBusEngine;
use ibus_sys::glib::{gchar, guint};
// FFI for the wrapper.c
pub(crate) type ibus_akaza_callback_key_event = unsafe extern "C" fn(
context: *mut c_void,
engine: *mut IBusEngine,
keyval: guint,
keycode: guint,
modifiers: guint,
) -> bool;
pub(crate) type ibus_akaza_callback_candidate_clicked = unsafe extern "C" fn(
context: *mut c_void,
engine: *mut IBusEngine,
index: guint,
button: guint,
state: guint,
);
pub(crate) type ibus_akaza_callback_property_activate = unsafe extern "C" fn(
context: *mut c_void,
engine: *mut IBusEngine,
prop_name: *mut gchar,
prop_state: guint,
);
pub(crate) type ibus_akaza_callback_focus_in =
unsafe extern "C" fn(context: *mut c_void, engine: *mut IBusEngine);
extern "C" {
/// is_ibus: true if the project run with `--ibus` option.
pub fn ibus_akaza_init(is_ibus: bool);
pub(crate) fn ibus_akaza_set_callback(
context: *mut c_void,
key_event_cb: ibus_akaza_callback_key_event,
candidate_cb: ibus_akaza_callback_candidate_clicked,
focus_in_cb: ibus_akaza_callback_focus_in,
property_activate: ibus_akaza_callback_property_activate,
);
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/src/keymap.rs | ibus-akaza/src/keymap.rs | use alloc::ffi::CString;
use std::collections::HashMap;
use log::{error, trace};
use ibus_sys::core::{IBusModifierType_IBUS_CONTROL_MASK, IBusModifierType_IBUS_SHIFT_MASK};
use ibus_sys::glib::guint;
use ibus_sys::ibus_key::IBUS_KEY_VoidSymbol;
use ibus_sys::keys::ibus_keyval_from_name;
use libakaza::keymap::{KeyPattern, KeyState};
#[derive(Hash, PartialEq)]
struct IBusKeyPattern {
key_state: KeyState,
keyval: u32,
modifier: u32,
}
impl Eq for IBusKeyPattern {}
impl IBusKeyPattern {
fn new(key_state: KeyState, keyval: u32, modifier: u32) -> Self {
IBusKeyPattern {
key_state,
keyval,
modifier,
}
}
}
pub struct IBusKeyMap {
keymap: HashMap<IBusKeyPattern, String>,
}
impl IBusKeyMap {
fn to_ibus_key(s: &str) -> guint {
let cs = CString::new(s.to_string()).unwrap();
unsafe { ibus_keyval_from_name(cs.as_ptr()) }
}
pub(crate) fn new(keymap: HashMap<KeyPattern, String>) -> anyhow::Result<Self> {
let mut mapping: HashMap<IBusKeyPattern, String> = HashMap::new();
for (key_pattern, command) in keymap {
let key = &key_pattern.key;
let mut modifier = 0_u32;
if key_pattern.ctrl {
modifier |= IBusModifierType_IBUS_CONTROL_MASK;
}
if key_pattern.shift {
modifier |= IBusModifierType_IBUS_SHIFT_MASK;
}
let keyval = Self::to_ibus_key(key.as_str());
if keyval == IBUS_KEY_VoidSymbol {
error!("Unknown key symbol: {} {:?}", key, key_pattern);
continue;
}
trace!("Insert: {} {} {} {:?}", modifier, keyval, key, key_pattern);
for state in &key_pattern.states {
mapping.insert(
IBusKeyPattern::new(*state, keyval, modifier),
command.clone(),
);
}
}
Ok(IBusKeyMap { keymap: mapping })
}
pub fn get(&self, key_state: &KeyState, keyval: u32, modifier: u32) -> Option<&String> {
trace!("MODIFIER: {}", modifier);
self.keymap
.get(&IBusKeyPattern::new(*key_state, keyval, modifier))
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/src/main.rs | ibus-akaza/src/main.rs | #![allow(non_upper_case_globals)]
extern crate alloc;
use std::ffi::{c_char, c_void, CStr};
use std::sync::{Arc, Mutex};
use std::time::SystemTime;
use std::{thread, time};
use anyhow::Result;
use clap::Parser;
use log::{error, info, warn};
use ibus_sys::core::ibus_main;
use ibus_sys::engine::IBusEngine;
use ibus_sys::glib::{gchar, guint};
use libakaza::config::Config;
use libakaza::engine::bigram_word_viterbi_engine::BigramWordViterbiEngineBuilder;
use libakaza::user_side_data::user_data::UserData;
use crate::context::AkazaContext;
use crate::wrapper_bindings::{ibus_akaza_init, ibus_akaza_set_callback};
mod commands;
mod context;
mod current_state;
mod input_mode;
mod keymap;
mod ui;
mod wrapper_bindings;
unsafe extern "C" fn process_key_event(
context: *mut c_void,
engine: *mut IBusEngine,
keyval: guint,
keycode: guint,
modifiers: guint,
) -> bool {
let context_ref = &mut *(context as *mut AkazaContext);
context_ref.process_key_event(engine, keyval, keycode, modifiers)
}
unsafe extern "C" fn candidate_clicked(
context: *mut c_void,
engine: *mut IBusEngine,
index: guint,
button: guint,
state: guint,
) {
let context_ref = &mut *(context as *mut AkazaContext);
context_ref.do_candidate_clicked(engine, index, button, state);
}
unsafe extern "C" fn focus_in(context: *mut c_void, engine: *mut IBusEngine) {
let context_ref = &mut *(context as *mut AkazaContext);
context_ref.do_focus_in(engine);
}
unsafe extern "C" fn property_activate(
context: *mut c_void,
engine: *mut IBusEngine,
prop_name: *mut gchar,
prop_state: guint,
) {
let context_ref = &mut *(context as *mut AkazaContext);
context_ref.do_property_activate(
engine,
CStr::from_ptr(prop_name as *mut c_char)
.to_string_lossy()
.to_string(),
prop_state,
);
}
fn load_user_data() -> Arc<Mutex<UserData>> {
match UserData::load_from_default_path() {
Ok(user_data) => Arc::new(Mutex::new(user_data)),
Err(err) => {
error!("Cannot load user data: {}", err);
Arc::new(Mutex::new(UserData::default()))
}
}
}
#[derive(Debug, clap::Parser)]
#[command(author, version, about, long_about = None)]
struct IBusAkazaArgs {
#[clap(long)]
ibus: bool,
#[clap(flatten)]
verbose: clap_verbosity_flag::Verbosity,
}
fn main() -> Result<()> {
let arg: IBusAkazaArgs = IBusAkazaArgs::parse();
let logpath = xdg::BaseDirectories::with_prefix("akaza")?
.create_cache_directory("logs")?
.join("ibus-akaza.log");
println!("log file path: {}", logpath.to_string_lossy());
// log file ใใใกใคใซใซๆธใใฆใใใ
// ~/.cache/akaza/logs/ibus-akaza.log ใซๆธใใ
// https://superuser.com/questions/1293842/where-should-userspecific-application-log-files-be-stored-in-gnu-linux
fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"{}[{}][{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
record.target(),
record.level(),
message
))
})
.level(arg.verbose.log_level_filter())
.chain(std::io::stdout())
.chain(fern::log_file(logpath)?)
.apply()?;
info!("Starting ibus-akaza(rust version)");
unsafe {
let sys_time = SystemTime::now();
let user_data = load_user_data();
let config = Config::load()?;
let akaza = BigramWordViterbiEngineBuilder::new(Config::load()?.engine)
.user_data(user_data.clone())
.build()?;
let mut ac = AkazaContext::new(akaza, config);
let new_sys_time = SystemTime::now();
let difference = new_sys_time.duration_since(sys_time)?;
info!(
"Initialized ibus-akaza engine in {} milliseconds.",
difference.as_millis()
);
// ใฆใผใถใผ่พๆธใใใใฏใฐใฉใฆใณใใงไฟๅญใใในใฌใใใ
thread::Builder::new()
.name("user-data-save-thread".to_string())
.spawn(move || {
let interval = time::Duration::from_secs(3);
// ในใฌใใๅ
ใง้ใซไพๅคๆใใใจในใฌใใใจใพใฃใกใใใฎใงไธๅฏงใใซๅฆ็ใใใ
loop {
if let Ok(mut data) = user_data.lock() {
if let Err(e) = data.write_user_files() {
warn!("Cannot save user stats file: {}", e);
}
} else {
warn!("Cannot get mutex for saving user data")
};
thread::sleep(interval);
}
})?;
ibus_akaza_set_callback(
&mut ac as *mut _ as *mut c_void,
process_key_event,
candidate_clicked,
focus_in,
property_activate,
);
ibus_akaza_init(arg.ibus);
info!("Enter the ibus_main()");
// run main loop
ibus_main();
warn!("Should not reach here.");
}
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/src/input_mode.rs | ibus-akaza/src/input_mode.rs | use anyhow::bail;
#[derive(Copy, Clone, Debug)]
pub struct InputMode {
pub prop_name: &'static str,
pub mode_code: i32,
pub symbol: &'static str,
pub label: &'static str,
}
impl PartialEq for InputMode {
fn eq(&self, other: &Self) -> bool {
self.mode_code == other.mode_code
}
}
impl InputMode {
const fn new(
prop_name: &'static str,
mode_code: i32,
symbol: &'static str,
label: &'static str,
) -> InputMode {
InputMode {
prop_name,
mode_code,
symbol,
label,
}
}
}
pub const INPUT_MODE_ALNUM: InputMode =
InputMode::new("InputMode.Alphanumeric", 0, "_A", "ใขใซใใกใใใ (C-S-;)");
pub const INPUT_MODE_HIRAGANA: InputMode =
InputMode::new("InputMode.Hiragana", 1, "ใ", "ใฒใใใช (C-S-j)");
pub const INPUT_MODE_KATAKANA: InputMode =
InputMode::new("InputMode.Katakana", 2, "ใข", "ใซใฟใซใ (C-S-K)");
pub const INPUT_MODE_HALFWIDTH_KATAKANA: InputMode =
InputMode::new("InputMode.HalfWidthKatakana", 3, "_๏ฝฑ", "ๅ่งใซใฟใซใ");
pub const INPUT_MODE_FULLWIDTH_ALNUM: InputMode = InputMode::new(
"InputMode.FullWidthAlnum",
4,
"๏ผก",
"ๅ
จ่งใขใซใใกใใใ (C-S-l)",
);
const _ALL_INPUT_MODE: [InputMode; 5] = [
INPUT_MODE_ALNUM,
INPUT_MODE_HIRAGANA,
INPUT_MODE_KATAKANA,
INPUT_MODE_HALFWIDTH_KATAKANA,
INPUT_MODE_FULLWIDTH_ALNUM,
];
pub fn get_all_input_modes() -> &'static [InputMode; 5] {
&_ALL_INPUT_MODE
}
pub fn get_input_mode_from_prop_name(prop_code: &str) -> anyhow::Result<InputMode> {
for mode in _ALL_INPUT_MODE {
if mode.prop_name == prop_code {
return Ok(mode);
}
}
bail!("Unknown prop_code: {}", prop_code)
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/src/context.rs | ibus-akaza/src/context.rs | use std::collections::HashMap;
use anyhow::Result;
use kelp::{h2z, hira2kata, z2h, ConvOption};
use log::{error, info, trace, warn};
use akaza_conf::conf::open_configuration_window;
use akaza_dict::conf::open_userdict_window;
use ibus_sys::core::{
IBusModifierType_IBUS_CONTROL_MASK, IBusModifierType_IBUS_HYPER_MASK,
IBusModifierType_IBUS_META_MASK, IBusModifierType_IBUS_MOD1_MASK,
IBusModifierType_IBUS_MOD2_MASK, IBusModifierType_IBUS_MOD3_MASK,
IBusModifierType_IBUS_MOD4_MASK, IBusModifierType_IBUS_MOD5_MASK,
IBusModifierType_IBUS_RELEASE_MASK, IBusModifierType_IBUS_SHIFT_MASK,
};
use ibus_sys::engine::ibus_engine_commit_text;
use ibus_sys::engine::IBusEngine;
use ibus_sys::glib::guint;
use ibus_sys::property::IBusPropState_PROP_STATE_CHECKED;
use ibus_sys::text::StringExt;
use libakaza::config::Config;
use libakaza::engine::base::HenkanEngine;
use libakaza::engine::bigram_word_viterbi_engine::BigramWordViterbiEngine;
use libakaza::graph::candidate::Candidate;
use libakaza::kana_kanji::marisa_kana_kanji_dict::MarisaKanaKanjiDict;
use libakaza::keymap::Keymap;
use libakaza::lm::system_bigram::MarisaSystemBigramLM;
use libakaza::lm::system_unigram_lm::MarisaSystemUnigramLM;
use libakaza::romkan::RomKanConverter;
use crate::commands::{ibus_akaza_commands_map, IbusAkazaCommand};
use crate::current_state::CurrentState;
use crate::input_mode::get_input_mode_from_prop_name;
use crate::input_mode::InputMode;
use crate::input_mode::INPUT_MODE_HIRAGANA;
use crate::keymap::IBusKeyMap;
use crate::ui::prop_controller::PropController;
#[repr(C)]
pub struct AkazaContext {
// ==== ่จญๅฎ ====
keymap: IBusKeyMap,
command_map: HashMap<&'static str, IbusAkazaCommand>,
// ==== ็พๅจใฎๅ
ฅๅ็ถๆ
ใไฟๆ ====
current_state: CurrentState,
// ==== UI ้ข้ฃ ====
prop_controller: PropController,
}
impl AkazaContext {
pub(crate) fn new(
engine: BigramWordViterbiEngine<
MarisaSystemUnigramLM,
MarisaSystemBigramLM,
MarisaKanaKanjiDict,
>,
config: Config,
) -> Result<Self> {
let input_mode = INPUT_MODE_HIRAGANA;
let romkan = RomKanConverter::new(config.romkan.as_str())?;
let keymap = Keymap::load(config.keymap.as_str())?;
Ok(AkazaContext {
current_state: CurrentState::new(input_mode, config.live_conversion, romkan, engine),
command_map: ibus_akaza_commands_map(),
keymap: IBusKeyMap::new(keymap)?,
prop_controller: PropController::new(input_mode, config)?,
})
}
/// Set props
pub(crate) fn do_property_activate(
&mut self,
engine: *mut IBusEngine,
prop_name: String,
prop_state: guint,
) {
info!("do_property_activate: {}, {}", prop_name, prop_state);
if prop_name == "PrefPane" {
match open_configuration_window() {
Ok(_) => {}
Err(e) => info!("Err: {}", e),
}
} else if prop_state == IBusPropState_PROP_STATE_CHECKED
&& prop_name.starts_with("InputMode.")
{
self.input_mode_activate(engine, prop_name, prop_state);
} else if prop_name.starts_with("UserDict.") {
let dict_path = prop_name.replace("UserDict.", "");
info!("Edit the {}", dict_path);
match open_userdict_window(&dict_path) {
Ok(_) => {}
Err(e) => error!("Err: {}", e),
}
}
}
pub fn input_mode_activate(
&mut self,
engine: *mut IBusEngine,
prop_name: String,
_prop_state: guint,
) {
if let Ok(input_mode) = get_input_mode_from_prop_name(prop_name.as_str()) {
self.set_input_mode(engine, &input_mode);
} else {
warn!("Unknown prop_name: {}", prop_name);
}
}
}
impl AkazaContext {
pub(crate) fn process_num_key(&mut self, nn: i32, engine: *mut IBusEngine) -> bool {
let idx = if nn == 0 { 9 } else { nn - 1 };
if self.current_state.lookup_table_visible {
self.set_lookup_table_cursor_pos_in_current_page(engine, idx)
} else {
info!("ignore process_num_key. lookup table is not enabled.");
false
}
}
/// Sets the cursor in the lookup table to index in the current page
/// Returns True if successful, False if not.
fn set_lookup_table_cursor_pos_in_current_page(
&mut self,
engine: *mut IBusEngine,
idx: i32,
) -> bool {
trace!("set_lookup_table_cursor_pos_in_current_page: {}", idx);
let page_size = self.current_state.lookup_table.get_page_size();
if idx > (page_size as i32) {
info!("Index too big: {} > {}", idx, page_size);
return false;
}
let page = self.current_state.lookup_table.get_cursor_pos() / page_size;
// let pos_in_page = self.lookup_table.get_cursor_pos() % page_size;
let new_pos = page * page_size + (idx as u32);
if new_pos >= self.current_state.lookup_table.get_number_of_candidates() {
info!(
"new_pos too big: {} > {}",
new_pos,
self.current_state.lookup_table.get_number_of_candidates()
);
return false;
}
self.current_state.lookup_table.set_cursor_pos(new_pos);
let cursor_pos = self.current_state.lookup_table.get_cursor_pos() as usize;
self.current_state.select_candidate(engine, cursor_pos);
true
}
}
impl AkazaContext {
pub fn process_key_event(
&mut self,
engine: *mut IBusEngine,
keyval: guint,
keycode: guint,
modifiers: guint,
) -> bool {
trace!(
"process_key_event: keyval={}, keycode={}, modifiers={}",
keyval,
keycode,
modifiers
);
// ignore key release event
if modifiers & IBusModifierType_IBUS_RELEASE_MASK != 0 {
return false;
}
let key_state = self.current_state.get_key_state();
trace!("KeyState={:?}", key_state);
if let Some(callback) = self
.keymap
.get(
&key_state,
keyval,
modifiers
& (IBusModifierType_IBUS_CONTROL_MASK
| IBusModifierType_IBUS_SHIFT_MASK
| IBusModifierType_IBUS_META_MASK
| IBusModifierType_IBUS_HYPER_MASK
| IBusModifierType_IBUS_MOD1_MASK
| IBusModifierType_IBUS_MOD2_MASK
| IBusModifierType_IBUS_MOD3_MASK
| IBusModifierType_IBUS_MOD4_MASK
| IBusModifierType_IBUS_MOD5_MASK),
)
.cloned()
{
if self.run_callback_by_name(engine, callback.as_str()) {
return true;
}
}
match self.current_state.input_mode.prop_name {
"InputMode.Hiragana" | "InputMode.Katakana" | "InputMode.HalfWidthKatakana" => {
if modifiers
& (IBusModifierType_IBUS_CONTROL_MASK | IBusModifierType_IBUS_MOD1_MASK)
!= 0
{
return false;
}
if ('!' as u32) <= keyval && keyval <= ('~' as u32) {
trace!(
"Insert new character to preedit: '{}'",
self.current_state.get_raw_input()
);
if self.current_state.lookup_table_visible {
// ๅคๆใฎ้ไธญใซๅฅใฎๆๅญใๅ
ฅๅใใใใ
// ใใฃใฆใ็พๅจใฎ preedit ๆๅญๅใฏ็ขบๅฎใใใใ
self.commit_candidate(engine);
}
// ๆๅญๅใ่ฟฝๅ ใใใ
let ch = char::from_u32(keyval).unwrap();
self.current_state.append_raw_input(engine, ch);
return true;
}
}
"InputMode.Alphanumeric" => return false,
"InputMode.FullWidthAlnum" => {
if ('!' as u32) <= keyval
&& keyval <= ('~' as u32)
&& (modifiers
& (IBusModifierType_IBUS_CONTROL_MASK | IBusModifierType_IBUS_MOD1_MASK))
== 0
{
let option = ConvOption {
ascii: true,
digit: true,
..Default::default()
};
let text = h2z(char::from_u32(keyval).unwrap().to_string().as_str(), option);
unsafe { ibus_engine_commit_text(engine, text.to_ibus_text()) };
return true;
}
}
_ => {
warn!("Unknown prop: {}", self.current_state.input_mode.prop_name);
return false;
}
}
false // not proceeded
}
pub(crate) fn erase_character_before_cursor(&mut self, engine: *mut IBusEngine) {
if !self.current_state.live_conversion && !self.current_state.clauses.is_empty() {
// ใฉใคใๅคๆใงใฏใชใๆใงๅคๆใใงใผใบใชๆใซไธๆๅญๆถใใๅ ดๅใฏใๅคๆ็ถๆ
ใใๅคๆๅใฎ็ถๆ
ใซๆปใใ
// ๅคๆๅ่ฃใใฏใชใขใใ
self.current_state.clear_clauses(engine);
return;
}
// ใตใคใดใฎไธๆๅญใใใใใใๅญ้ณใๅ
่กใใฆใใใฐใใใฏใๅญ้ณใใคใใงใซใจใใ
self.current_state.set_raw_input(
engine,
self.current_state
.romkan
.remove_last_char(self.current_state.get_raw_input()),
)
}
}
impl Drop for AkazaContext {
fn drop(&mut self) {
warn!("Dropping AkazaContext");
}
}
impl AkazaContext {
/**
* ๅ
ฅๅใขใผใใฎๅคๆด
*/
pub(crate) fn set_input_mode(&mut self, engine: *mut IBusEngine, input_mode: &InputMode) {
info!("Changing input mode to : {:?}", input_mode);
// ๅคๆๅ่ฃใใใฃใใใณใใใใใใ
self.commit_candidate(engine);
self.prop_controller.set_input_mode(input_mode, engine);
// ๅฎ้ใซ input_mode ใ่จญๅฎใใ
self.current_state.set_input_mode(engine, input_mode);
}
pub(crate) fn run_callback_by_name(
&mut self,
engine: *mut IBusEngine,
function_name: &str,
) -> bool {
if let Some(function) = self.command_map.get(function_name) {
info!("Calling function '{}'", function_name);
function(self, engine)
} else {
error!("Unknown function '{}'", function_name);
false
}
}
pub fn commit_string(&mut self, engine: *mut IBusEngine, text: &str) {
if !self.current_state.clauses.is_empty() {
// ๅคๆใขใผใใฎใจใใฎใฟๅญฆ็ฟใๅฎๆฝใใ
self.current_state
.engine
.learn(self.current_state.get_first_candidates().as_slice());
}
unsafe {
ibus_engine_commit_text(engine, text.to_ibus_text());
}
self.current_state.clear_raw_input(engine);
self.current_state.update_lookup_table(engine, false);
self.current_state.set_auxiliary_text(engine, "");
}
pub fn commit_candidate(&mut self, engine: *mut IBusEngine) {
self.commit_string(engine, self.current_state.build_string().as_str());
}
// space key ใๆผใใฆใๆๅใซๅคๆใซๅ
ฅใๆใฎๅฆ็ใ
pub(crate) fn update_candidates(&mut self, engine: *mut IBusEngine) -> bool {
if self.current_state.get_raw_input().is_empty() {
return false;
}
self.current_state.henkan(engine).unwrap();
if self.current_state.clauses.is_empty() {
// ใใถใๅฐ้ใใชใใฏใ
return true;
}
// -- auxiliary text(ใใใใขใใใใฆใใใคใฎใปใ)
let current_yomi = self.current_state.clauses[self.current_state.current_clause][0]
.yomi
.clone();
self.current_state.set_auxiliary_text(engine, ¤t_yomi);
// ๆ็คบ็ใซๅคๆใใฆใใใฎใงใlookup table ใ่กจ็คบใใใ
self.current_state.update_lookup_table(engine, true);
true
}
/// ๅใฎๅคๆๅ่ฃใ้ธๆใใใ
pub(crate) fn cursor_up(&mut self, engine: *mut IBusEngine) {
if self.current_state.lookup_table.cursor_up() {
let cursor_pos = self.current_state.lookup_table.get_cursor_pos() as usize;
self.current_state.select_candidate(engine, cursor_pos);
// lookup table ใฎ่กจ็คบใๆดๆฐใใ
self.current_state.update_lookup_table(engine, true);
}
}
/// ๆฌกใฎๅคๆๅ่ฃใ้ธๆใใใ
pub fn cursor_down(&mut self, engine: *mut IBusEngine) {
if self.current_state.lookup_table.cursor_down() {
let cursor_pos = self.current_state.lookup_table.get_cursor_pos() as usize;
self.current_state.select_candidate(engine, cursor_pos);
// lookup table ใฎ่กจ็คบใๆดๆฐใใ
self.current_state.update_lookup_table(engine, true);
}
}
pub fn page_up(&mut self, engine: *mut IBusEngine) -> bool {
if self.current_state.lookup_table.page_up() {
let cursor_pos = self.current_state.lookup_table.get_cursor_pos() as usize;
self.current_state.select_candidate(engine, cursor_pos);
// lookup table ใฎ่กจ็คบใๆดๆฐใใ
self.current_state.update_lookup_table(engine, true);
true
} else {
false
}
}
pub fn page_down(&mut self, engine: *mut IBusEngine) -> bool {
if self.current_state.lookup_table.page_up() {
let cursor_pos = self.current_state.lookup_table.get_cursor_pos() as usize;
self.current_state.select_candidate(engine, cursor_pos);
// lookup table ใฎ่กจ็คบใๆดๆฐใใ
self.current_state.update_lookup_table(engine, true);
true
} else {
false
}
}
/// ้ธๆใใๅ็ฏใๅณใซใใใใ
pub(crate) fn cursor_right(&mut self, engine: *mut IBusEngine) {
// ๅ็ฏใใชใๅ ดๅใฏใไฝใใใชใใ
if self.current_state.clauses.is_empty() {
return;
}
self.current_state.select_right_clause(engine);
}
/// ้ธๆใใๅ็ฏใๅทฆใซใใใใ
pub(crate) fn cursor_left(&mut self, engine: *mut IBusEngine) {
// ๅ็ฏใใชใใใฐไฝใใใชใ
if self.current_state.clauses.is_empty() {
return;
}
self.current_state.select_left_clause(engine);
}
/// ๆ็ฏใฎ้ธๆ็ฏๅฒใๅณๆนๅใซๅบใใ
pub fn extend_clause_right(&mut self, engine: *mut IBusEngine) -> Result<()> {
self.current_state.extend_right(engine);
// -- auxiliary text(ใใใใขใใใใฆใใใคใฎใปใ)
let current_yomi = self.current_state.clauses[self.current_state.current_clause][0]
.yomi
.clone();
self.current_state.set_auxiliary_text(engine, ¤t_yomi);
Ok(())
}
/// ๆ็ฏใฎ้ธๆ็ฏๅฒใๅทฆๆนๅใซๅบใใ
pub fn extend_clause_left(&mut self, engine: *mut IBusEngine) -> Result<()> {
self.current_state.extend_left(engine);
// -- auxiliary text(ใใใใขใใใใฆใใใคใฎใปใ)
let current_yomi = self.current_state.clauses[self.current_state.current_clause][0]
.yomi
.clone();
self.current_state.set_auxiliary_text(engine, ¤t_yomi);
Ok(())
}
pub fn do_candidate_clicked(
&mut self,
engine: *mut IBusEngine,
index: guint,
_button: guint,
_state: guint,
) {
info!("do_candidate_clicked");
if self.set_lookup_table_cursor_pos_in_current_page(engine, index as i32) {
self.commit_candidate(engine)
}
}
pub fn do_focus_in(&mut self, engine: *mut IBusEngine) {
trace!("do_focus_in");
self.prop_controller.do_focus_in(engine);
}
/// convert selected word/characters to full-width hiragana (standard hiragana): ใใฏใคใ โ ใปใใใจ
pub fn convert_to_full_hiragana(&mut self, engine: *mut IBusEngine) -> Result<()> {
info!("Convert to full hiragana");
let hira = self
.current_state
.romkan
.to_hiragana(self.current_state.get_raw_input());
self.convert_to_single(engine, hira.as_str(), hira.as_str())
}
/// convert to full-width katakana (standard katakana): ใปใใใจ โ ใใฏใคใ
pub fn convert_to_full_katakana(&mut self, engine: *mut IBusEngine) -> Result<()> {
let hira = self
.current_state
.romkan
.to_hiragana(self.current_state.get_raw_input());
let kata = hira2kata(hira.as_str(), ConvOption::default());
self.convert_to_single(engine, hira.as_str(), kata.as_str())
}
/// convert to half-width katakana (standard katakana): ใปใใใจ โ ๏พ๏พ๏ฝฒ๏พ
pub fn convert_to_half_katakana(&mut self, engine: *mut IBusEngine) -> Result<()> {
let hira = self
.current_state
.romkan
.to_hiragana(self.current_state.get_raw_input());
let kata = z2h(
hira2kata(hira.as_str(), ConvOption::default()).as_str(),
ConvOption::default(),
);
self.convert_to_single(engine, hira.as_str(), kata.as_str())
}
/// convert to full-width romaji, all-capitals, proper noun capitalization (latin script inside
/// Japanese text): ใใฏใคใ โ ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ โ ๏ผจ๏ผฏ๏ผท๏ผก๏ผฉ๏ผด๏ผฏ โ ๏ผจ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ
pub fn convert_to_full_romaji(&mut self, engine: *mut IBusEngine) -> Result<()> {
let hira = self
.current_state
.romkan
.to_hiragana(self.current_state.get_raw_input());
let romaji = h2z(
self.current_state.get_raw_input(),
ConvOption {
kana: true,
digit: true,
ascii: true,
..Default::default()
},
);
self.convert_to_single(engine, hira.as_str(), romaji.as_str())
}
/// convert to half-width romaji, all-capitals, proper noun capitalization (latin script like
/// standard English): ใใฏใคใ โ howaito โ HOWAITO โ Howaito
pub fn convert_to_half_romaji(&mut self, engine: *mut IBusEngine) -> Result<()> {
let hira = self
.current_state
.romkan
.to_hiragana(self.current_state.get_raw_input());
let romaji = z2h(
self.current_state.get_raw_input(),
ConvOption {
kana: true,
digit: true,
ascii: true,
..Default::default()
},
);
self.convert_to_single(engine, hira.as_str(), romaji.as_str())
}
/// ็นๅฎใฎ1ๆ็ฏใฎๆ็ซ ใๅ่ฃใจใใฆ่กจ็คบใใใ
/// F6 ใชใฉใๆผใใๆ็จใ
fn convert_to_single(
&mut self,
engine: *mut IBusEngine,
yomi: &str,
surface: &str,
) -> Result<()> {
// ๅ่ฃใ่จญๅฎ
let candidate = Candidate::new(yomi, surface, 0_f32);
self.current_state.clear_force_selected_clause(engine);
self.current_state
.set_clauses(engine, vec![Vec::from([candidate.clone()])]);
// ใซใใฏใขใใใใผใใซใซๅ่ฃใ่จญๅฎ
self.current_state
.set_auxiliary_text(engine, &candidate.yomi);
// lookup table ใ่กจ็คบใใใ
self.current_state.update_lookup_table(engine, true);
Ok(())
}
pub(crate) fn commit_preedit(&mut self, engine: *mut IBusEngine) {
let (_, surface) = self.current_state.make_preedit_word_for_precomposition();
self.commit_string(engine, surface.as_str());
}
pub fn escape(&mut self, engine: *mut IBusEngine) {
trace!("escape");
if self.current_state.live_conversion {
self.current_state.clear_raw_input(engine);
} else {
// ๅคๆๅ่ฃใฎๅ็ฏใใฏใชใขใใใ
self.current_state.clear_clauses(engine);
}
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/src/current_state.rs | ibus-akaza/src/current_state.rs | use std::collections::HashMap;
use std::ops::Range;
use kelp::{hira2kata, z2h, ConvOption};
use log::info;
use ibus_sys::attr_list::{ibus_attr_list_append, ibus_attr_list_new};
use ibus_sys::attribute::{
ibus_attribute_new, IBusAttrType_IBUS_ATTR_TYPE_BACKGROUND,
IBusAttrType_IBUS_ATTR_TYPE_UNDERLINE, IBusAttrUnderline_IBUS_ATTR_UNDERLINE_SINGLE,
};
use ibus_sys::core::to_gboolean;
use ibus_sys::engine::{
ibus_engine_hide_auxiliary_text, ibus_engine_hide_preedit_text,
ibus_engine_update_auxiliary_text, ibus_engine_update_lookup_table,
ibus_engine_update_preedit_text, IBusEngine,
};
use ibus_sys::glib::guint;
use ibus_sys::lookup_table::IBusLookupTable;
use ibus_sys::text::{ibus_text_set_attributes, StringExt};
use libakaza::consonant::ConsonantSuffixExtractor;
use libakaza::engine::base::HenkanEngine;
use libakaza::engine::bigram_word_viterbi_engine::BigramWordViterbiEngine;
use libakaza::extend_clause::{extend_left, extend_right};
use libakaza::graph::candidate::Candidate;
use libakaza::kana_kanji::marisa_kana_kanji_dict::MarisaKanaKanjiDict;
use libakaza::keymap::KeyState;
use libakaza::lm::system_bigram::MarisaSystemBigramLM;
use libakaza::lm::system_unigram_lm::MarisaSystemUnigramLM;
use libakaza::romkan::RomKanConverter;
use crate::input_mode::{InputMode, INPUT_MODE_HALFWIDTH_KATAKANA, INPUT_MODE_KATAKANA};
#[derive(Debug)]
pub struct CurrentState {
pub(crate) input_mode: InputMode,
raw_input: String,
preedit: String,
auxiliary_text: String,
pub(crate) clauses: Vec<Vec<Candidate>>,
/// ็พๅจ้ธๆใใใฆใใๆ็ฏ
pub(crate) current_clause: usize,
// key ใฏใclause ็ชๅทใvalue ใฏใnode ใฎ indexใ
node_selected: HashMap<usize, usize>,
/// ใทใใ+ๅณ or ใทใใ+ๅทฆใงๅผทๅถๆๅฎใใใ็ฏๅฒ
pub(crate) force_selected_clause: Vec<Range<usize>>,
/// ใฉใคใใณใณใใผใธใงใณ
pub live_conversion: bool,
pub(crate) lookup_table_visible: bool,
pub lookup_table: IBusLookupTable,
pub romkan: RomKanConverter,
pub(crate) engine:
BigramWordViterbiEngine<MarisaSystemUnigramLM, MarisaSystemBigramLM, MarisaKanaKanjiDict>,
consonant_suffix_extractor: ConsonantSuffixExtractor,
}
impl CurrentState {
pub fn new(
input_mode: InputMode,
live_conversion: bool,
romkan: RomKanConverter,
engine: BigramWordViterbiEngine<
MarisaSystemUnigramLM,
MarisaSystemBigramLM,
MarisaKanaKanjiDict,
>,
) -> Self {
CurrentState {
input_mode,
raw_input: String::new(),
preedit: String::new(),
auxiliary_text: String::new(),
clauses: vec![],
current_clause: 0,
node_selected: HashMap::new(),
force_selected_clause: Vec::new(),
live_conversion,
lookup_table_visible: false,
lookup_table: IBusLookupTable::new(10, 0, 1, 1),
romkan,
engine,
consonant_suffix_extractor: ConsonantSuffixExtractor::default(),
}
}
pub(crate) fn set_input_mode(&mut self, engine: *mut IBusEngine, input_mode: &InputMode) {
self.clear_raw_input(engine);
self.clear_clauses(engine);
self.input_mode = *input_mode;
}
pub fn select_candidate(&mut self, engine: *mut IBusEngine, candidate_pos: usize) {
self.node_selected
.insert(self.current_clause, candidate_pos);
self.on_node_selected_change(engine);
}
pub fn clear_raw_input(&mut self, engine: *mut IBusEngine) {
if !self.raw_input.is_empty() {
self.raw_input.clear();
self.on_raw_input_change(engine);
}
}
pub fn get_raw_input(&self) -> &str {
&self.raw_input
}
pub fn clear_force_selected_clause(&mut self, engine: *mut IBusEngine) {
if !self.force_selected_clause.is_empty() {
self.force_selected_clause.clear();
self.on_force_selected_clause_change(engine);
}
}
pub fn clear_current_clause(&mut self, engine: *mut IBusEngine) {
if self.current_clause != 0 {
self.current_clause = 0;
self.on_current_clause_change(engine);
}
}
pub(crate) fn append_raw_input(&mut self, engine: *mut IBusEngine, ch: char) {
self.raw_input.push(ch);
self.on_raw_input_change(engine);
}
/// ใใใฏในใใผในใงไธๆๅญๅ้คใใๅ ดๅใชใฉใซๅผใฐใใใ
pub(crate) fn set_raw_input(&mut self, engine: *mut IBusEngine, raw_input: String) {
if self.raw_input != raw_input {
info!("set_raw_input: {:?}", raw_input);
self.raw_input = raw_input;
self.on_raw_input_change(engine);
}
}
pub(crate) fn henkan(&mut self, engine: *mut IBusEngine) -> anyhow::Result<()> {
if self.get_raw_input().is_empty() {
self.set_clauses(engine, vec![]);
} else {
let yomi = self.get_raw_input().to_string();
// ๅ
้ ญใๅคงๆๅญใชใฑใผในใจใURL ใฃใฝใๆๅญๅใฎใจใใฏๅคๆๅฆ็ใๅฎๆฝใใชใใ
let clauses = if (!yomi.is_empty()
&& yomi.chars().next().unwrap().is_ascii_uppercase()
&& self.force_selected_clause.is_empty())
|| yomi.starts_with("https://")
|| yomi.starts_with("http://")
{
vec![Vec::from([Candidate::new(
yomi.as_str(),
yomi.as_str(),
0_f32,
)])]
} else {
self.engine.convert(
self.romkan.to_hiragana(&yomi).as_str(),
Some(&self.force_selected_clause),
)?
};
self.set_clauses(engine, clauses);
self.adjust_current_clause(engine);
}
Ok(())
}
pub fn set_auxiliary_text(&mut self, engine: *mut IBusEngine, auxiliary_text: &str) {
if self.auxiliary_text != auxiliary_text {
self.auxiliary_text = auxiliary_text.to_string();
self.on_auxiliary_text_change(engine);
}
}
pub fn set_clauses(&mut self, engine: *mut IBusEngine, clause: Vec<Vec<Candidate>>) {
if self.clauses != clause {
self.clauses = clause;
self.clear_node_selected(engine);
self.clear_current_clause(engine);
self.on_clauses_change(engine);
}
}
pub fn clear_node_selected(&mut self, engine: *mut IBusEngine) {
if !self.node_selected.is_empty() {
self.node_selected.clear();
self.on_node_selected_change(engine);
}
}
/// ๅคๆใใฆใใใจใใซ backspace ใๅ
ฅๅใใๅ ดๅใ
/// ๅคๆๅ่ฃใใฏใชใขใใฆใConversion ใใ Composition ็ถๆ
ใซๆปใใ
pub fn clear_clauses(&mut self, engine: *mut IBusEngine) {
if !self.clauses.is_empty() {
self.clauses.clear();
self.on_clauses_change(engine);
// lookup table ใ้ ใ
self.update_lookup_table(engine, false);
}
self.clear_current_clause(engine);
self.clear_node_selected(engine);
self.clear_force_selected_clause(engine);
}
/**
* ็พๅจใฎๅ่ฃ้ธๆ็ถๆ
ใใใ lookup table ใๆง็ฏใใใ
*/
fn render_lookup_table(&mut self) {
info!("render_lookup_table");
// ไธๆฆใใซใใฏใขใใใใผใใซใใฏใชใขใใ
self.lookup_table.clear();
// ็พๅจใฎๆชๅคๆๆ
ๅ ฑใๅ
ใซใๅ่ฃใ็ฎๅบใใฆใใใ
if !self.clauses.is_empty() {
// lookup table ใซๅ่ฃใ่ฉฐใ่พผใใงใใใ
for node in &self.clauses[self.current_clause] {
let candidate = &node.surface_with_dynamic();
self.lookup_table.append_candidate(candidate.to_ibus_text());
}
}
}
pub fn get_first_candidates(&self) -> Vec<Candidate> {
let mut targets: Vec<Candidate> = Vec::new();
for (i, candidates) in self.clauses.iter().enumerate() {
let idx = self.node_selected.get(&i).unwrap_or(&0);
targets.push(candidates[*idx].clone());
}
targets
}
/// ไธๅๅณใฎๆ็ฏใ้ธๆใใ
pub fn select_right_clause(&mut self, engine: *mut IBusEngine) {
if self.current_clause == self.clauses.len() - 1 {
// ๆขใซไธ็ชๅณใ ใฃใๅ ดๅใไธ็ชๅทฆใซใใใ
if self.current_clause != 0 {
self.current_clause = 0;
self.on_current_clause_change(engine);
}
} else {
self.current_clause += 1;
self.on_current_clause_change(engine);
}
}
/// ไธๅๅทฆใฎๆ็ฏใ้ธๆใใ
pub fn select_left_clause(&mut self, engine: *mut IBusEngine) {
if self.current_clause == 0 {
// ๆขใซไธ็ชๅทฆใ ใฃใๅ ดๅใไธ็ชๅณใซใใ
self.current_clause = self.clauses.len() - 1;
self.on_current_clause_change(engine);
} else {
self.current_clause -= 1;
self.on_current_clause_change(engine);
}
}
pub fn adjust_current_clause(&mut self, engine: *mut IBusEngine) {
// [a][bc]
// ^^^^
// ไธ่จใฎๆงใซใใฉใผใซในใๅฝใใฃใฆใใๆใซ extend_clause_left ใใๅ ดๅ
// ๆ็ฏใฎๆฐใใใจใใๆธใใใจใใใใใใฎๅ ดๅใฏ index error ใซใชใฃใฆใใพใใฎใงใ
// current_clause ใๅใใใ
if self.current_clause >= self.clauses.len() {
self.current_clause = self.clauses.len() - 1;
self.on_current_clause_change(engine);
}
}
pub fn build_string(&self) -> String {
let mut result = String::new();
for (clauseid, nodes) in self.clauses.iter().enumerate() {
let idex = if let Some(i) = self.node_selected.get(&clauseid) {
*i
} else {
0
};
if idex >= nodes.len() {
// ็บ็ใใชใใฏใใ ใใ็บ็ใใฆใใใใใชใใ ใใ?
panic!("[BUG] self.node_selected and self.clauses missmatch")
}
result += &nodes[idex].surface_with_dynamic();
}
result
}
pub fn extend_right(&mut self, engine: *mut IBusEngine) {
self.force_selected_clause = extend_right(&self.clauses, self.current_clause);
self.on_force_selected_clause_change(engine);
}
pub fn extend_left(&mut self, engine: *mut IBusEngine) {
self.force_selected_clause = extend_left(&self.clauses, self.current_clause);
self.on_force_selected_clause_change(engine);
}
pub fn on_force_selected_clause_change(&mut self, engine: *mut IBusEngine) {
self.henkan(engine).unwrap();
}
pub fn on_clauses_change(&mut self, engine: *mut IBusEngine) {
self.update_preedit(engine);
self.update_auxiliary_text(engine);
self.render_lookup_table();
}
pub fn on_raw_input_change(&mut self, engine: *mut IBusEngine) {
// unicode character ใฎๅข็ใใใชใใจใใใซ force_selected ใๅ
ฅใฃใ็ถๆ
ใง hanken
// ใใใจ่ฝใกใใ
// ใชใฎใงใๅ
ใซใฏใชใขใใๅฟ
่ฆใใใใ
self.clear_force_selected_clause(engine);
if self.live_conversion {
self.henkan(engine).unwrap();
} else if !self.clauses.is_empty() {
self.clauses.clear();
self.on_clauses_change(engine);
}
self.clear_current_clause(engine);
self.clear_node_selected(engine);
self.update_preedit(engine);
let visible = if self.live_conversion {
false
} else {
self.lookup_table.get_number_of_candidates() > 0
};
self.update_lookup_table(engine, visible);
}
pub fn on_current_clause_change(&mut self, engine: *mut IBusEngine) {
self.update_preedit(engine);
self.render_lookup_table();
self.update_auxiliary_text(engine);
// ๅ่ฃใใใใฐใ้ธๆ่ขใ่กจ็คบใใใใ
let visible = self.lookup_table.get_number_of_candidates() > 0;
self.update_lookup_table(engine, visible);
}
pub fn update_auxiliary_text(&mut self, engine: *mut IBusEngine) {
// -- auxiliary text(ใใใใขใใใใฆใใใคใฎใปใ)
if !self.clauses.is_empty() {
let current_yomi = self.clauses[self.current_clause][0].yomi.clone();
self.set_auxiliary_text(engine, ¤t_yomi);
} else {
self.set_auxiliary_text(engine, "");
}
}
fn on_auxiliary_text_change(&self, engine: *mut IBusEngine) {
self.render_auxiliary_text(engine);
}
pub fn update_preedit(&mut self, engine: *mut IBusEngine) {
if self.live_conversion {
if self.clauses.is_empty() {
unsafe { ibus_engine_hide_preedit_text(engine) }
} else {
self.preedit = self.build_string();
self.render_preedit(engine);
}
} else if self.clauses.is_empty() {
// live conversion ใใใชใใฆใๅคๆไธญใใใชใใจใใ
let (_yomi, surface) = self.make_preedit_word_for_precomposition();
self.preedit = surface;
self.render_preedit(engine);
} else {
// live conversion ใใใชใใฆใๅคๆไธญใฎใจใใ
self.preedit = self.build_string();
self.render_preedit(engine);
}
}
pub fn render_preedit(&self, engine: *mut IBusEngine) {
unsafe {
let preedit_attrs = ibus_attr_list_new();
// ๅ
จ้จใซไธ็ทใใฒใใ
ibus_attr_list_append(
preedit_attrs,
ibus_attribute_new(
IBusAttrType_IBUS_ATTR_TYPE_UNDERLINE,
IBusAttrUnderline_IBUS_ATTR_UNDERLINE_SINGLE,
0,
self.preedit.len() as guint,
),
);
let bgstart: u32 = self
.clauses
.iter()
.map(|c| (c[0].surface).len() as u32)
.sum();
// ่ๆฏ่ฒใ่จญๅฎใใใ
ibus_attr_list_append(
preedit_attrs,
ibus_attribute_new(
IBusAttrType_IBUS_ATTR_TYPE_BACKGROUND,
0x00333333,
bgstart,
bgstart + (self.preedit.len() as u32),
),
);
let preedit_text = self.preedit.to_ibus_text();
ibus_text_set_attributes(preedit_text, preedit_attrs);
ibus_engine_update_preedit_text(
engine,
preedit_text,
self.preedit.len() as guint,
to_gboolean(!self.preedit.is_empty()),
);
}
}
pub(crate) fn get_key_state(&self) -> KeyState {
// ใญใผๅ
ฅๅ็ถๆ
ใ่ฟใใ
if self.raw_input.is_empty() {
// ๆชๅ
ฅๅ็ถๆ
ใ
KeyState::PreComposition
} else if !self.clauses.is_empty() {
// ๅคๆใใฆใใ็ถๆ
ใlookup table ใ่กจ็คบใใใฆใใ็ถๆ
KeyState::Conversion
} else {
// preedit ใซใชใซใๅ
ฅใฃใฆใใฆใใพใ ๅคๆใๅฎๆฝใใฆใใชใ็ถๆ
KeyState::Composition
}
}
fn render_auxiliary_text(&self, engine: *mut IBusEngine) {
unsafe {
if self.lookup_table_visible {
if self.auxiliary_text.is_empty() {
ibus_engine_hide_auxiliary_text(engine);
} else {
let auxiliary_text = self.auxiliary_text.to_ibus_text();
ibus_text_set_attributes(auxiliary_text, ibus_attr_list_new());
ibus_engine_update_auxiliary_text(
engine,
auxiliary_text,
to_gboolean(!self.raw_input.is_empty()),
);
}
} else {
ibus_engine_hide_auxiliary_text(engine);
}
}
}
/// lookup table ใฎ่กจ็คบใๆดๆฐใใ
pub fn update_lookup_table(&mut self, engine: *mut IBusEngine, visible: bool) {
self.lookup_table_visible = visible;
unsafe {
ibus_engine_update_lookup_table(
engine,
&mut self.lookup_table as *mut IBusLookupTable,
to_gboolean(visible),
);
}
}
fn on_node_selected_change(&mut self, engine: *mut IBusEngine) {
self.update_preedit(engine);
self.update_auxiliary_text(engine);
}
/// (yomi, surface)
pub fn make_preedit_word_for_precomposition(&self) -> (String, String) {
let preedit = self.get_raw_input().to_string();
// ๅ
้ ญๆๅญใๅคงๆๅญใชๅ ดๅใฏใใใฎใพใพ่ฟใใ
// "IME" ใชใฉใจๅ
ฅๅใใใๅ ดๅใฏใใใใใใฎใพใพ่ฟใใใใซใใใ
if !preedit.is_empty() && preedit.chars().next().unwrap().is_ascii_uppercase() {
return (preedit.clone(), preedit);
}
// hogen ใจๅ
ฅๅใใใๅ ดๅใ"ใปใn" ใจ่กจ็คบใใใ
// hogena ใจใชใฃใใ "ใปใใช"
// hogenn ใจใชใฃใใ "ใปใใ" ใจ่กจ็คบใใๅฟ
่ฆใใใใใใ
// ใใใใจไธๆฆ่กจ็คบใใใๅพใซใใชใใซๅคๅใใใใใใจๆฐๆใกๆชใๆใใใ
let (preedit, suffix) = if self.romkan.mapping_name == "default" {
self.consonant_suffix_extractor.extract(preedit.as_str())
} else {
(preedit, "".to_string())
};
let yomi = self.romkan.to_hiragana(preedit.as_str());
let surface = yomi.clone();
if self.input_mode == INPUT_MODE_KATAKANA {
(
yomi.to_string() + suffix.as_str(),
hira2kata(yomi.as_str(), ConvOption::default()) + suffix.as_str(),
)
} else if self.input_mode == INPUT_MODE_HALFWIDTH_KATAKANA {
(
yomi.to_string() + suffix.as_str(),
z2h(
hira2kata(yomi.as_str(), ConvOption::default()).as_str(),
ConvOption::default(),
) + suffix.as_str(),
)
} else {
(yomi + suffix.as_str(), surface + suffix.as_str())
}
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/src/ui/prop_controller.rs | ibus-akaza/src/ui/prop_controller.rs | use std::collections::HashMap;
use std::path::Path;
use anyhow::Result;
use ibus_sys::core::to_gboolean;
use ibus_sys::engine::{ibus_engine_register_properties, ibus_engine_update_property, IBusEngine};
use ibus_sys::glib::{g_object_ref_sink, gchar, gpointer};
use ibus_sys::prop_list::{ibus_prop_list_append, ibus_prop_list_new, IBusPropList};
use ibus_sys::property::{
ibus_property_new, ibus_property_set_label, ibus_property_set_state,
ibus_property_set_sub_props, ibus_property_set_symbol, IBusPropState_PROP_STATE_CHECKED,
IBusPropState_PROP_STATE_UNCHECKED, IBusPropType_PROP_TYPE_MENU, IBusPropType_PROP_TYPE_RADIO,
IBusProperty,
};
use ibus_sys::text::{IBusText, StringExt};
use libakaza::config::{Config, DictConfig};
use crate::input_mode::{get_all_input_modes, InputMode};
pub struct PropController {
prop_list: *mut IBusPropList,
/// input mode ใฎใกใใฅใผใฎ่ฆชใใญใใใฃใ
input_mode_prop: *mut IBusProperty,
/// ใกใใฅใผใฎ input mode ใใจใฎใกใใฅใผใใญใใใฃใใกใ
prop_dict: HashMap<String, *mut IBusProperty>,
}
impl PropController {
pub fn new(initial_input_mode: InputMode, config: Config) -> Result<Self> {
let (input_mode_prop, prop_list, prop_dict) = Self::init_props(initial_input_mode, config)?;
Ok(PropController {
prop_list,
input_mode_prop,
prop_dict,
})
}
/// ibus ใฎ do_focus_in ใฎใจใใซๅผใฐใใใ
pub fn do_focus_in(&self, engine: *mut IBusEngine) {
unsafe {
ibus_engine_register_properties(engine, self.prop_list);
}
}
/// ใฟในใฏใกใใฅใผใใใใใใขใใใใฆ้ธในใใกใใฅใผใๆง็ฏใใใ
///
/// * `initial_input_mode`: ๅๆ็ถๆ
ใฎ input_mode
fn init_props(
initial_input_mode: InputMode,
config: Config,
) -> Result<(
*mut IBusProperty,
*mut IBusPropList,
HashMap<String, *mut IBusProperty>,
)> {
unsafe {
let prop_list =
g_object_ref_sink(ibus_prop_list_new() as gpointer) as *mut IBusPropList;
let input_mode_prop = g_object_ref_sink(ibus_property_new(
"InputMode\0".as_ptr() as *const gchar,
IBusPropType_PROP_TYPE_MENU,
format!("ๅ
ฅๅใขใผใ: {}", initial_input_mode.symbol).to_ibus_text(),
"\0".as_ptr() as *const gchar,
"Switch input mode".to_ibus_text(),
to_gboolean(true),
to_gboolean(true),
IBusPropState_PROP_STATE_UNCHECKED,
std::ptr::null_mut() as *mut IBusPropList,
) as gpointer) as *mut IBusProperty;
ibus_prop_list_append(prop_list, input_mode_prop);
let props = g_object_ref_sink(ibus_prop_list_new() as gpointer) as *mut IBusPropList;
let mut prop_map: HashMap<String, *mut IBusProperty> = HashMap::new();
for input_mode in get_all_input_modes() {
let prop = g_object_ref_sink(ibus_property_new(
(input_mode.prop_name.to_string() + "\0").as_ptr() as *const gchar,
IBusPropType_PROP_TYPE_RADIO,
input_mode.label.to_ibus_text(),
"\0".as_ptr() as *const gchar,
std::ptr::null_mut() as *mut IBusText,
to_gboolean(true),
to_gboolean(true),
if input_mode.mode_code == initial_input_mode.mode_code {
IBusPropState_PROP_STATE_CHECKED
} else {
IBusPropState_PROP_STATE_UNCHECKED
},
std::ptr::null_mut() as *mut IBusPropList,
) as gpointer) as *mut IBusProperty;
prop_map.insert(input_mode.prop_name.to_string(), prop);
ibus_prop_list_append(props, prop);
}
ibus_property_set_sub_props(input_mode_prop, props);
// ใฆใผใถใผ่พๆธ
Self::build_user_dict(prop_list, config)?;
// ่จญๅฎใใกใคใซใ้ใใจใใใใค
Self::build_preference_menu(prop_list);
Ok((input_mode_prop, prop_list, prop_map))
}
}
unsafe fn build_user_dict(prop_list: *mut IBusPropList, config: Config) -> Result<()> {
let user_dict_prop = g_object_ref_sink(ibus_property_new(
"UserDict\0".as_ptr() as *const gchar,
IBusPropType_PROP_TYPE_MENU,
"ใฆใผใถใผ่พๆธ".to_ibus_text(),
"\0".as_ptr() as *const gchar,
"User dict".to_ibus_text(),
to_gboolean(true),
to_gboolean(true),
IBusPropState_PROP_STATE_UNCHECKED,
std::ptr::null_mut() as *mut IBusPropList,
) as gpointer) as *mut IBusProperty;
ibus_prop_list_append(prop_list, user_dict_prop);
let props = g_object_ref_sink(ibus_prop_list_new() as gpointer) as *mut IBusPropList;
for dict in Self::find_user_dicts(config)? {
let prop = g_object_ref_sink(ibus_property_new(
("UserDict.".to_string() + dict.path.as_str() + "\0").as_ptr() as *const gchar,
IBusPropType_PROP_TYPE_MENU,
Path::new(&dict.path)
.file_name()
.unwrap()
.to_string_lossy()
.to_ibus_text(),
"\0".as_ptr() as *const gchar,
std::ptr::null_mut() as *mut IBusText,
to_gboolean(true),
to_gboolean(true),
IBusPropState_PROP_STATE_UNCHECKED,
std::ptr::null_mut() as *mut IBusPropList,
) as gpointer) as *mut IBusProperty;
// prop_map.insert(input_mode.prop_name.to_string(), prop);
ibus_prop_list_append(props, prop);
}
ibus_property_set_sub_props(user_dict_prop, props);
Ok(())
}
fn find_user_dicts(config: Config) -> anyhow::Result<Vec<DictConfig>> {
let dir = xdg::BaseDirectories::with_prefix("akaza")?;
let dir = dir.create_data_directory("userdict")?;
let dicts = config
.engine
.dicts
.iter()
.filter(|f| f.path.contains(&dir.to_string_lossy().to_string()))
.cloned()
.collect::<Vec<_>>();
Ok(dicts)
}
unsafe fn build_preference_menu(prop_list: *mut IBusPropList) {
let preference_prop = g_object_ref_sink(ibus_property_new(
"PrefPane\0".as_ptr() as *const gchar,
IBusPropType_PROP_TYPE_MENU,
"่จญๅฎ".to_ibus_text(),
"\0".as_ptr() as *const gchar,
"Preference".to_ibus_text(),
to_gboolean(true),
to_gboolean(true),
IBusPropState_PROP_STATE_UNCHECKED,
std::ptr::null_mut() as *mut IBusPropList,
) as gpointer) as *mut IBusProperty;
ibus_prop_list_append(prop_list, preference_prop);
}
/// input_mode ใฎๅใๆฟใๆใซๅฎ่กใใใๅฆ็
pub fn set_input_mode(&self, input_mode: &InputMode, engine: *mut IBusEngine) {
// ใกใใฅใผใฎ่ฆช้
็ฎใฎใฉใใซใๅคๆดใใใใ
unsafe {
ibus_property_set_symbol(self.input_mode_prop, input_mode.symbol.to_ibus_text());
ibus_property_set_label(
self.input_mode_prop,
format!("ๅ
ฅๅใขใผใ: {}", input_mode.symbol).to_ibus_text(),
);
ibus_engine_update_property(engine, self.input_mode_prop);
}
// ๆๅนๅใใ input mode ใฎใกใใฅใผ้
็ฎใซใใงใใฏใๅ
ฅใใใ
let Some(property) = self.prop_dict.get(input_mode.prop_name) else {
panic!("Unknown input mode: {input_mode:?}");
};
unsafe {
ibus_property_set_state(*property, IBusPropState_PROP_STATE_CHECKED);
ibus_engine_update_property(engine, *property);
}
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/ibus-akaza/src/ui/mod.rs | ibus-akaza/src/ui/mod.rs | pub mod prop_controller;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/utils.rs | akaza-data/src/utils.rs | use chrono::Local;
use std::fs;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
pub fn get_file_list(src_dir: &Path) -> anyhow::Result<Vec<PathBuf>> {
let mut result: Vec<PathBuf> = Vec::new();
for src_file in WalkDir::new(src_dir)
.into_iter()
.filter_map(|file| file.ok())
.filter(|file| file.metadata().unwrap().is_file())
{
result.push(src_file.path().to_path_buf());
}
Ok(result)
}
pub fn copy_snapshot(path: &Path) -> anyhow::Result<()> {
fs::create_dir_all("work/dump/")?;
fs::copy(
path,
Path::new("work/dump/").join(
Local::now().format("%Y%m%d-%H%M%S").to_string()
+ path
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_string()
.as_str(),
),
)?;
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/main.rs | akaza-data/src/main.rs | extern crate core;
use std::io::Write;
use clap::{Parser, Subcommand};
use crate::subcmd::check::check;
use crate::subcmd::dump_bigram_dict::dump_bigram_dict;
use crate::subcmd::dump_unigram_dict::dump_unigram_dict;
use crate::subcmd::evaluate::evaluate;
use crate::subcmd::learn_corpus::learn_corpus;
use crate::subcmd::make_dict::make_system_dict;
use crate::subcmd::make_stats_system_bigram_lm::make_stats_system_bigram_lm;
use crate::subcmd::make_stats_system_unigram_lm::make_stats_system_unigram_lm;
use crate::subcmd::tokenize::tokenize;
use crate::subcmd::vocab::vocab;
use crate::subcmd::wfreq::wfreq;
mod corpus_reader;
mod subcmd;
mod tokenizer;
mod utils;
mod wordcnt;
#[derive(Debug, Parser)]
#[clap(
name = env ! ("CARGO_PKG_NAME"),
version = env ! ("CARGO_PKG_VERSION"),
author = env ! ("CARGO_PKG_AUTHORS"),
about = env ! ("CARGO_PKG_DESCRIPTION"),
arg_required_else_help = true,
)]
struct Args {
#[clap(flatten)]
verbose: clap_verbosity_flag::Verbosity,
#[clap(subcommand)]
command: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
Tokenize(TokenizeArgs),
Wfreq(WfreqArgs),
Vocab(VocabArgs),
#[clap(arg_required_else_help = true)]
MakeDict(MakeDictArgs),
WordcntUnigram(WordcntUnigramArgs),
#[clap(arg_required_else_help = true)]
WordcntBigram(WordcntBigramArgs),
LearnCorpus(LearnCorpusArgs),
#[clap(arg_required_else_help = true)]
Check(CheckArgs),
#[clap(arg_required_else_help = true)]
Evaluate(EvaluateArgs),
DumpUnigramDict(DumpUnigramDictArgs),
DumpBigramDict(DumpBigramDictArgs),
}
/// ใณใผใในใๅฝขๆ
็ด ่งฃๆๆฉใงใใผใซใใคใบใใ
#[derive(Debug, clap::Args)]
struct TokenizeArgs {
#[arg(short, long)]
reader: String,
#[arg(short, long)]
user_dict: Option<String>,
#[arg(short, long)]
system_dict: String,
#[arg(long)]
kana_preferred: bool,
src_dir: String,
dst_dir: String,
}
#[derive(Debug, clap::Args)]
struct WfreqArgs {
#[arg(long)]
src_dir: Vec<String>,
dst_file: String,
}
#[derive(Debug, clap::Args)]
struct VocabArgs {
/// ่ชๅฝใใกใคใซใซๅ้ฒใใๅ่ชๆฐใฎใใใใใฉใคใณใ
/// ๅขใใใจ่พๆธใใกใคใซใตใคใบใๅคงใใใชใใๅฎ่กๆใฎใกใขใชไฝฟ็จ้ใๅขๅคงใใใ
/// ๅขใใใจๅคๆๅฏ่ฝใช่ชๅฝใๅขใใใ
#[arg(short, long)]
threshold: u32,
src_file: String,
dst_file: String,
}
#[derive(Debug, clap::Args)]
/// ใทในใใ ่พๆธใใกใคใซใไฝๆใใใ
struct MakeDictArgs {
#[arg(short, long)]
corpus: Vec<String>,
#[arg(short, long)]
unidic: String,
#[arg(long)]
vocab: String,
/// ใใใใฐใฎใใใฎไธญ้ใใญในใใใกใคใซ
txt_file: String,
}
/// ใฆใใฐใฉใ ่จ่ชใขใใซใไฝๆใใใ
#[derive(Debug, clap::Args)]
struct WordcntUnigramArgs {
src_file: String,
dst_file: String,
}
/// ใทในใใ ่จ่ชใขใใซใ็ๆใใใ
#[derive(Debug, clap::Args)]
struct WordcntBigramArgs {
#[arg(short, long)]
threshold: u32,
#[arg(long)]
corpus_dirs: Vec<String>,
unigram_trie_file: String,
bigram_trie_file: String,
}
/// ๅไฝ็ขบ่ชใใ
#[derive(Debug, clap::Args)]
struct LearnCorpusArgs {
#[arg(short, long)]
delta: u32,
#[arg(long, default_value_t = 10)]
may_epochs: i32,
#[arg(long, default_value_t = 100)]
should_epochs: i32,
#[arg(long, default_value_t = 1000)]
must_epochs: i32,
may_corpus: String,
should_corpus: String,
must_corpus: String,
src_unigram: String,
src_bigram: String,
dst_unigram: String,
dst_bigram: String,
}
/// ๅไฝ็ขบ่ชใใ
#[derive(Debug, clap::Args)]
struct CheckArgs {
#[arg(short, long, default_value_t = false)]
user_data: bool,
/// ๅคๆใใใ่ชญใฟใใช
yomi: String,
expected: Option<String>,
#[arg(long)]
utf8_dict: Vec<String>,
#[arg(long)]
eucjp_dict: Vec<String>,
#[arg(long)]
model_dir: String,
}
/// ๅคๆ็ฒพๅบฆใ่ฉไพกใใ
#[derive(Debug, clap::Args)]
struct EvaluateArgs {
#[arg(long)]
corpus: Vec<String>,
#[arg(long)]
utf8_dict: Vec<String>,
#[arg(long)]
eucjp_dict: Vec<String>,
#[arg(long)]
model_dir: String,
}
/// ใฆใใฐใฉใ ่พๆธใใกใคใซใใใณใใใ
#[derive(Debug, clap::Args)]
struct DumpUnigramDictArgs {
dict: String,
}
/// ใใคใฐใฉใ ่พๆธใใกใคใซใใใณใใใ
#[derive(Debug, clap::Args)]
struct DumpBigramDictArgs {
unigram_file: String,
bigram_file: String,
}
fn main() -> anyhow::Result<()> {
let args = Args::parse();
env_logger::Builder::new()
.filter_level(args.verbose.log_level_filter())
.format(|buf, record| {
let ts = buf.timestamp_micros();
// show thread id
writeln!(
buf,
"{}: {:?}: {}: {}",
ts,
std::thread::current().id(),
buf.default_level_style(record.level())
.value(record.level()),
record.args()
)
})
.init();
match args.command {
Commands::Tokenize(opt) => tokenize(
opt.reader,
opt.system_dict,
opt.user_dict,
opt.kana_preferred,
opt.src_dir.as_str(),
opt.dst_dir.as_str(),
),
Commands::Wfreq(opt) => wfreq(&opt.src_dir, opt.dst_file.as_str()),
Commands::Vocab(opt) => vocab(opt.src_file.as_str(), opt.dst_file.as_str(), opt.threshold),
Commands::MakeDict(opt) => make_system_dict(
&opt.txt_file,
Some(opt.vocab.as_str()),
opt.corpus,
opt.unidic,
),
Commands::WordcntBigram(opt) => make_stats_system_bigram_lm(
opt.threshold,
&opt.corpus_dirs,
&opt.unigram_trie_file,
&opt.bigram_trie_file,
),
Commands::WordcntUnigram(opt) => {
make_stats_system_unigram_lm(opt.src_file.as_str(), opt.dst_file.as_str())
}
Commands::LearnCorpus(opts) => learn_corpus(
opts.delta,
opts.may_epochs,
opts.should_epochs,
opts.must_epochs,
opts.may_corpus.as_str(),
opts.should_corpus.as_str(),
opts.must_corpus.as_str(),
opts.src_unigram.as_str(),
opts.src_bigram.as_str(),
opts.dst_unigram.as_str(),
opts.dst_bigram.as_str(),
),
Commands::Check(opt) => check(
&opt.yomi,
opt.expected,
opt.user_data,
&opt.eucjp_dict,
&opt.utf8_dict,
&opt.model_dir,
),
Commands::Evaluate(opt) => {
evaluate(&opt.corpus, &opt.eucjp_dict, &opt.utf8_dict, opt.model_dir)
}
Commands::DumpUnigramDict(opt) => dump_unigram_dict(opt.dict.as_str()),
Commands::DumpBigramDict(opt) => {
dump_bigram_dict(opt.unigram_file.as_str(), opt.bigram_file.as_str())
}
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/wordcnt/wordcnt_bigram.rs | akaza-data/src/wordcnt/wordcnt_bigram.rs | use std::collections::HashMap;
use anyhow::Result;
use log::info;
use libakaza::cost::calc_cost;
use libakaza::lm::base::SystemBigramLM;
use libakaza::search_result::SearchResult;
use marisa_sys::{Keyset, Marisa};
/**
* bigram ่จ่ชใขใใซใ
* unigram ใฎ็ๆใฎใจใใซๅพใใใๅ่ชIDใๅฉ็จใใใใจใงใๅง็ธฎใใฆใใใ
*/
#[derive(Default)]
pub struct WordcntBigramBuilder {
keyset: Keyset,
}
impl WordcntBigramBuilder {
pub fn add(&mut self, word_id1: i32, word_id2: i32, cnt: u32) {
let id1_bytes = word_id1.to_le_bytes();
let id2_bytes = word_id2.to_le_bytes();
assert_eq!(id1_bytes[3], 0);
assert_eq!(id2_bytes[3], 0);
let mut key: Vec<u8> = Vec::new();
key.extend(id1_bytes[0..3].iter());
key.extend(id2_bytes[0..3].iter());
key.extend(cnt.to_le_bytes());
self.keyset.push_back(key.as_slice());
}
pub fn save(&self, ofname: &str) -> anyhow::Result<()> {
let mut marisa = Marisa::default();
marisa.build(&self.keyset);
marisa.save(ofname)?;
Ok(())
}
}
pub struct WordcntBigram {
marisa: Marisa,
default_edge_cost: f32,
pub total_words: u32,
pub unique_words: u32,
}
impl WordcntBigram {
pub fn to_cnt_map(&self) -> HashMap<(i32, i32), u32> {
Self::_to_map(&self.marisa)
}
fn _to_map(marisa: &Marisa) -> HashMap<(i32, i32), u32> {
let mut map: HashMap<(i32, i32), u32> = HashMap::new();
marisa.predictive_search("".as_bytes(), |word, _id| {
if word.len() == 10 {
let word_id1 = i32::from_le_bytes([word[0], word[1], word[2], 0]);
let word_id2 = i32::from_le_bytes([word[3], word[4], word[5], 0]);
let cost = u32::from_le_bytes([word[6], word[7], word[8], word[9]]);
map.insert((word_id1, word_id2), cost);
}
true
});
map
}
pub fn load(filename: &str) -> Result<WordcntBigram> {
info!("Loading system-bigram: {}", filename);
let mut marisa = Marisa::default();
marisa.load(filename)?;
let map: HashMap<(i32, i32), u32> = Self::_to_map(&marisa);
// ็ทๅบ็พๅ่ชๆฐ
let total_words = map.iter().map(|((_, _), cnt)| *cnt).sum();
// ๅ่ชใฎ็จฎ้กๆฐ
let unique_words = map.keys().count() as u32;
let default_edge_cost = calc_cost(0, total_words, unique_words);
Ok(WordcntBigram {
marisa,
default_edge_cost,
total_words,
unique_words,
})
}
}
impl SystemBigramLM for WordcntBigram {
fn get_default_edge_cost(&self) -> f32 {
self.default_edge_cost
}
/**
* edge cost ใๅพใใ
* ใใฎ ID ใฏใunigram ใฎ trie ใงใตใใใใใฎใ
*/
fn get_edge_cost(&self, word_id1: i32, word_id2: i32) -> Option<f32> {
let mut key: Vec<u8> = Vec::new();
key.extend(word_id1.to_le_bytes()[0..3].iter());
key.extend(word_id2.to_le_bytes()[0..3].iter());
let mut got: Vec<SearchResult> = Vec::new();
self.marisa.predictive_search(key.as_slice(), |key, id| {
got.push(SearchResult {
keyword: key.to_vec(),
id,
});
true
});
let Some(result) = got.first() else {
return None;
};
let last2: [u8; 4] = result.keyword[result.keyword.len() - 4..result.keyword.len()]
.try_into()
.unwrap();
let score: u32 = u32::from_le_bytes(last2);
Some(calc_cost(score, self.total_words, self.unique_words))
}
fn as_hash_map(&self) -> HashMap<(i32, i32), f32> {
let mut map: HashMap<(i32, i32), f32> = HashMap::new();
self.marisa.predictive_search("".as_bytes(), |word, _id| {
if word.len() == 10 {
let word_id1 = i32::from_le_bytes([word[0], word[1], word[2], 0]);
let word_id2 = i32::from_le_bytes([word[3], word[4], word[5], 0]);
let cnt = u32::from_le_bytes([word[6], word[7], word[8], word[9]]);
map.insert(
(word_id1, word_id2),
calc_cost(cnt, self.total_words, self.unique_words),
);
}
true
});
map
}
}
#[cfg(test)]
mod tests {
use tempfile::NamedTempFile;
use super::*;
#[test]
fn test_build() -> Result<()> {
let named_tmpfile = NamedTempFile::new().unwrap();
let tmpfile = named_tmpfile.path().to_str().unwrap().to_string();
let mut builder = WordcntBigramBuilder::default();
builder.add(4, 5, 29);
builder.add(8, 9, 32);
builder.save(tmpfile.as_str())?;
let bigram = WordcntBigram::load(tmpfile.as_str())?;
assert_eq!(
bigram.to_cnt_map(),
HashMap::from([((4, 5), 29), ((8, 9), 32),])
);
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/wordcnt/wordcnt_unigram.rs | akaza-data/src/wordcnt/wordcnt_unigram.rs | use std::collections::HashMap;
use anyhow::Result;
use log::info;
use libakaza::cost::calc_cost;
use libakaza::lm::base::SystemUnigramLM;
use marisa_sys::{Keyset, Marisa};
/**
* unigram ่จ่ชใขใใซใ
* ใๆผขๅญ/ใใชใใซๅฏพใใฆใ็บ็็ขบ็ในใณใขใไฟๆใใฆใใใ
*/
#[derive(Default)]
pub struct WordcntUnigramBuilder {
data: Vec<(String, u32)>,
}
impl WordcntUnigramBuilder {
pub fn add(&mut self, word: &str, cnt: u32) {
self.data.push((word.to_string(), cnt));
}
pub fn keyset(&self) -> Keyset {
let mut keyset = Keyset::default();
for (kanji, score) in &self.data {
// ๅบๅใๆๅญใใใใชใใฆใใๆซๅฐพใฎ4ใใคใใๅใๅบใใฐๅๅใชๆฐใใใชใใงใใชใใใ
// ๅ
้ ญไธ่ดใซใใฆใ+4ใใคใใซใชใใใฎใๆขใใฐใใใฏใใ
// ๆ้ฉๅใฎไฝๅฐใ ใใฉใ็พๅฎ็ใซใฏ็ฉบ้ๅน็ใใใ้ๅบฆใฎใปใใ้่ฆใใใใใชใใ
let key = [
kanji.as_bytes(),
b"\xff",
score.to_le_bytes().as_slice(), // ใใคใใชใซใใฆใใผใฟๅฎน้ใ็ฏ็ดใใ
]
.concat();
keyset.push_back(key.as_slice());
}
keyset
}
pub fn save(&self, fname: &str) -> Result<()> {
let mut marisa = Marisa::default();
marisa.build(&self.keyset());
marisa.save(fname)?;
Ok(())
}
}
pub struct WordcntUnigram {
marisa: Marisa,
pub(crate) total_words: u32,
pub(crate) unique_words: u32,
}
impl WordcntUnigram {
pub fn num_keys(&self) -> usize {
self.marisa.num_keys()
}
pub fn to_count_hashmap(&self) -> HashMap<String, (i32, u32)> {
Self::_to_count_hashmap(&self.marisa)
}
fn _to_count_hashmap(marisa: &Marisa) -> HashMap<String, (i32, u32)> {
let mut map: HashMap<String, (i32, u32)> = HashMap::new();
marisa.predictive_search("".as_bytes(), |word, id| {
let idx = word.iter().position(|f| *f == b'\xff').unwrap();
let bytes: [u8; 4] = word[idx + 1..idx + 1 + 4].try_into().unwrap();
let word = String::from_utf8_lossy(&word[0..idx]);
let cost = u32::from_le_bytes(bytes);
map.insert(word.to_string(), (id as i32, cost));
true
});
map
}
pub fn load(fname: &str) -> Result<WordcntUnigram> {
info!("Reading {}", fname);
let mut marisa = Marisa::default();
marisa.load(fname)?;
let map = Self::_to_count_hashmap(&marisa);
// ็ทๅบ็พๅ่ชๆฐ
let total_words = map.iter().map(|(_, (_, cnt))| *cnt).sum();
// ๅ่ชใฎ็จฎ้กๆฐ
let unique_words = map.keys().count() as u32;
Ok(WordcntUnigram {
marisa,
total_words,
unique_words,
})
}
}
impl SystemUnigramLM for WordcntUnigram {
fn get_cost(&self, wordcnt: u32) -> f32 {
calc_cost(wordcnt, self.total_words, self.unique_words)
}
/// @return (word_id, score)ใ
fn find(&self, word: &str) -> Option<(i32, f32)> {
let marisa = &self.marisa;
assert_ne!(word.len(), 0);
let key = [word.as_bytes(), b"\xff"].concat();
let mut word_id: usize = usize::MAX;
let mut score = u32::MAX;
marisa.predictive_search(key.as_slice(), |word, id| {
word_id = id;
let idx = word.iter().position(|f| *f == b'\xff').unwrap();
let bytes: [u8; 4] = word[idx + 1..idx + 1 + 4].try_into().unwrap();
score = u32::from_le_bytes(bytes);
false
});
if word_id != usize::MAX {
Some((
word_id as i32,
calc_cost(score, self.total_words, self.unique_words),
))
} else {
None
}
}
fn as_hash_map(&self) -> HashMap<String, (i32, f32)> {
let mut map = HashMap::new();
self.marisa.predictive_search("".as_bytes(), |word, id| {
let idx = word.iter().position(|f| *f == b'\xff').unwrap();
let bytes: [u8; 4] = word[idx + 1..idx + 1 + 4].try_into().unwrap();
let word = String::from_utf8_lossy(&word[0..idx]);
let cnt = u32::from_le_bytes(bytes);
map.insert(
word.to_string(),
(
id as i32,
calc_cost(cnt, self.total_words, self.unique_words),
),
);
true
});
map
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::NamedTempFile;
#[test]
fn test() -> Result<()> {
let named_tmpfile = NamedTempFile::new().unwrap();
let tmpfile = named_tmpfile.path().to_str().unwrap().to_string();
let mut builder = WordcntUnigramBuilder::default();
builder.add("็ง/ใใใ", 3);
builder.add("ๅฝผ/ใใ", 42);
builder.save(tmpfile.as_str())?;
let wordcnt = WordcntUnigram::load(tmpfile.as_str())?;
assert_eq!(
wordcnt.to_count_hashmap(),
HashMap::from([
("็ง/ใใใ".to_string(), (1_i32, 3_u32)),
("ๅฝผ/ใใ".to_string(), (0_i32, 42_u32)),
])
);
assert_eq!(wordcnt.total_words, 45); // ๅ่ช็บ็ๆฐ
assert_eq!(wordcnt.unique_words, 2); // ใฆใใผใฏๅ่ชๆฐ
assert_eq!(wordcnt.get_cost(0), 6.672098);
assert_eq!(wordcnt.get_cost(1), 1.6720936);
assert_eq!(wordcnt.find("็ง/ใใใ"), Some((1_i32, 1.1949753)));
assert_eq!(wordcnt.find("ๅฝผ/ใใ"), Some((0_i32, 0.048848562)));
assert_eq!(
wordcnt.as_hash_map(),
HashMap::from([
("็ง/ใใใ".to_string(), (1_i32, 1.1949753)),
("ๅฝผ/ใใ".to_string(), (0_i32, 0.048848562)),
])
);
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/wordcnt/mod.rs | akaza-data/src/wordcnt/mod.rs | pub mod wordcnt_bigram;
pub mod wordcnt_unigram;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/learn_corpus.rs | akaza-data/src/subcmd/learn_corpus.rs | use std::cell::RefCell;
use std::collections::HashMap;
use std::path::Path;
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use encoding_rs::UTF_8;
use log::{debug, info};
use crate::wordcnt::wordcnt_bigram::WordcntBigram;
use crate::wordcnt::wordcnt_unigram::WordcntUnigram;
use libakaza::corpus::{read_corpus_file, FullAnnotationCorpus};
use libakaza::dict::skk::read::read_skkdict;
use libakaza::graph::graph_builder::GraphBuilder;
use libakaza::graph::graph_resolver::GraphResolver;
use libakaza::graph::segmenter::Segmenter;
use libakaza::kana_kanji::hashmap_vec::HashmapVecKanaKanjiDict;
use libakaza::kana_trie::cedarwood_kana_trie::CedarwoodKanaTrie;
use libakaza::lm::base::{SystemBigramLM, SystemUnigramLM};
use libakaza::lm::on_memory::on_memory_system_bigram_lm::OnMemorySystemBigramLM;
use libakaza::lm::on_memory::on_memory_system_unigram_lm::OnMemorySystemUnigramLM;
use libakaza::lm::system_bigram::MarisaSystemBigramLMBuilder;
use libakaza::lm::system_unigram_lm::{MarisaSystemUnigramLM, MarisaSystemUnigramLMBuilder};
use libakaza::user_side_data::user_data::UserData;
struct LearningService {
graph_builder:
GraphBuilder<OnMemorySystemUnigramLM, OnMemorySystemBigramLM, HashmapVecKanaKanjiDict>,
segmenter: Segmenter,
system_unigram_lm: Rc<OnMemorySystemUnigramLM>,
system_bigram_lm: Rc<OnMemorySystemBigramLM>,
}
impl LearningService {
pub fn new(src_unigram: &str, src_bigram: &str, corpuses: &[&str]) -> anyhow::Result<Self> {
let system_kana_kanji_dict = read_skkdict(Path::new("data/SKK-JISYO.akaza"), UTF_8)?;
let all_yomis = system_kana_kanji_dict.keys().cloned().collect::<Vec<_>>();
let system_kana_trie = CedarwoodKanaTrie::build(all_yomis);
let segmenter = Segmenter::new(vec![Arc::new(Mutex::new(system_kana_trie))]);
info!("unigram source file: {}", src_unigram);
let src_system_unigram_lm = WordcntUnigram::load(src_unigram)?;
let mut unigram_map = src_system_unigram_lm.to_count_hashmap();
// unigram trie ใซ็ป้ฒใใใฆใใชใๅ่ชใ็ป้ฒใใฆใใใ
{
let mut max_id = *unigram_map
.iter()
.map(|(_, (id, _))| id)
.max()
.unwrap_or(&0);
for fname in corpuses {
let corpuses = read_corpus_file(Path::new(fname))?;
for corpus in corpuses {
for node in corpus.nodes {
if !unigram_map.contains_key(node.key().as_str()) {
info!(
"Insert missing element: {} max_id={}",
node.key(),
max_id + 1
);
unigram_map.insert(node.key(), (max_id + 1, 1));
max_id += 1;
}
}
}
}
}
let system_unigram_lm = Rc::new(OnMemorySystemUnigramLM::new(
Rc::new(RefCell::new(unigram_map)),
src_system_unigram_lm.total_words,
src_system_unigram_lm.unique_words,
));
info!("bigram source file: {}", src_bigram);
let src_system_bigram_lm = WordcntBigram::load(src_bigram)?;
let system_bigram_lm = Rc::new(OnMemorySystemBigramLM::new(
Rc::new(RefCell::new(src_system_bigram_lm.to_cnt_map())),
src_system_bigram_lm.get_default_edge_cost(),
src_system_bigram_lm.total_words,
src_system_bigram_lm.unique_words,
));
let graph_builder = GraphBuilder::new(
HashmapVecKanaKanjiDict::new(system_kana_kanji_dict),
HashmapVecKanaKanjiDict::new(HashMap::default()),
Arc::new(Mutex::new(UserData::default())),
system_unigram_lm.clone(),
system_bigram_lm.clone(),
);
Ok(LearningService {
graph_builder,
segmenter,
system_unigram_lm,
system_bigram_lm,
})
}
pub fn try_learn(&self, epochs: i32, delta: u32, corpus: &str) -> anyhow::Result<()> {
let corpuses = read_corpus_file(Path::new(corpus))?;
for _ in 1..epochs {
let mut ok_cnt = 0;
for teacher in corpuses.iter() {
let succeeded = self.learn(delta, teacher)?;
if succeeded {
ok_cnt += 1;
}
}
info!("ok_cnt={} corpuses.len()={}", ok_cnt, corpuses.len());
if ok_cnt == corpuses.len() {
info!("Learning process finished.");
break;
}
}
Ok(())
}
pub fn learn(&self, delta: u32, teacher: &FullAnnotationCorpus) -> anyhow::Result<bool> {
let yomi = teacher.yomi();
let surface = teacher.surface();
let segmentation_result = self.segmenter.build(&yomi, None);
let graph_resolver = GraphResolver::default();
let lattice = self
.graph_builder
.construct(yomi.as_str(), &segmentation_result);
let got = graph_resolver.resolve(&lattice)?;
let terms: Vec<String> = got.iter().map(|f| f[0].surface.clone()).collect();
let result = terms.join("");
println!("{result}");
// ๆญฃ่งฃใใใชใใจใใซใฏๅบ็พ้ ปๅบฆใฎ็ขบ็ใๆญฃใใใชใใจใใใใจใ ใจๆใใพใใใง
// ้ ปๅบฆใๅขใใใ
if result != surface {
// learn unigram
if !teacher.nodes.is_empty() {
for i in 0..teacher.nodes.len() {
let key = teacher.nodes[i].key();
let (_, cost) = self
.system_unigram_lm
.find_cnt(&key.to_string())
.unwrap_or((-1, 0_u32));
self.system_unigram_lm.update(key.as_str(), cost - delta);
}
}
// learn bigram
if teacher.nodes.len() > 1 {
for i in 1..teacher.nodes.len() {
let key1 = teacher.nodes[i - 1].key();
let key2 = teacher.nodes[i].key();
let Some((word_id1, _)) = self.system_unigram_lm.find(key1.as_str()) else {
// info!("{} is not registered in the real system unigram LM.",word1);
continue;
};
let Some((word_id2, _)) = self.system_unigram_lm.find(key2.as_str()) else {
// info!("{} is not registered in the real system unigram LM.",word1);
continue;
};
let v = self
.system_bigram_lm
.get_edge_cnt(word_id1, word_id2)
.unwrap_or(0_u32);
info!(
"Update bigram cost: {}={},{}={}, v={}",
key1, word_id1, key2, word_id2, v
);
self.system_bigram_lm.update(word_id1, word_id2, v - delta);
}
}
debug!("BAD! result={}, surface={}", result, surface);
Ok(false)
} else {
debug!("ๅญฆ็ฟๅฎไบ! result={}", result);
Ok(true)
}
}
pub fn save_unigram(&self, dst_unigram: &str) -> anyhow::Result<()> {
// unigram
let mut unigram_builder = MarisaSystemUnigramLMBuilder::default();
for (key, (_, cost)) in self.system_unigram_lm.as_hash_map() {
unigram_builder.add(key.as_str(), cost);
}
// โๆฌๆฅใชใ็พๅจใฎใใผใฟใงๅ่ชฟๆดใในใใ ใใไธๆฆๅ
ใฎใใฎใไฝฟใใ
// TODO ใใจใงๆด็ใใ
unigram_builder.set_unique_words(self.system_unigram_lm.unique_words);
unigram_builder.set_total_words(self.system_unigram_lm.total_words);
info!("Save unigram to {}", dst_unigram);
unigram_builder.save(dst_unigram)?;
Ok(())
}
pub fn save_bigram(&self, dst_unigram: &str, dst_bigram: &str) -> anyhow::Result<()> {
// bigram ใฎไฟๅญ
let new_unigram = MarisaSystemUnigramLM::load(dst_unigram)?;
let mut bigram_builder = MarisaSystemBigramLMBuilder::default();
let srcmap = self.system_unigram_lm.as_hash_map();
let src_wordid2key = srcmap
.iter()
.map(|(key, (word_id, _))| (*word_id, key.to_string()))
.collect::<HashMap<i32, String>>();
// info!("src_wordid2key: {:?}", src_wordid2key);
for ((word_id1, word_id2), cost) in self.system_bigram_lm.as_hash_map() {
// ใใฎใธใใง่ฝใกใใจใใฏใใผใฟใฎๆดๅๆงใใจใใฆใชใใใจใใใใฎใงใwork/ ไปฅไธใฎใใผใฟใไธๅบฆๅ
จ้จไฝใ็ดใใๆนใ
// ่ฏใใฑใผในใๅคใใงใใwork/ ไปฅไธใไฝใ็ดใใจ่ฏใใงใใ
// KNOWN BUG:
// Unknown word_id ใไธ็จฎ้กๅบใพใใใใใชใๅบใใฎใไธๆใ
// ไธๅใใใใฎใใผใฟใใญในใใใฆใใใใงใฏๅ้กใใชใใฎใงๅพๅใใ
let Some(word1) = src_wordid2key
.get(&word_id1) else {
info!("Unknown word_id: {}", word_id1);
continue;
};
let Some((new_word_id1, _)) = new_unigram
.find(word1) else {
info!("Unknown word: {}", word1);
continue;
};
let Some(word2) = src_wordid2key
.get(&word_id2) else {
info!("Unknown word_id: {}", word_id2);
continue;
};
let Some((new_word_id2, _)) = new_unigram.find(word2) else {
info!("Unknown word: {}", word2);
continue;
};
bigram_builder.add(new_word_id1, new_word_id2, cost);
}
// โๆฌๆฅใชใ็พๅจใฎใใผใฟใงๅ่ชฟๆดใในใใ ใใไธๆฆๅ
ใฎใใฎใไฝฟใใ
// TODO ใใจใงๆด็ใใ
bigram_builder.set_default_edge_cost(self.system_bigram_lm.get_default_edge_cost());
info!("Save bigram to {}", dst_bigram);
bigram_builder.save(dst_bigram)?;
Ok(())
}
}
/// ใณใผใในใๅ
ใซใใๅญฆ็ฟใ่กใใพใใ
#[allow(clippy::too_many_arguments)]
pub fn learn_corpus(
delta: u32,
may_epochs: i32,
should_epochs: i32,
must_epochs: i32,
may_corpus: &str,
should_corpus: &str,
must_corpus: &str,
src_unigram: &str,
src_bigram: &str,
dst_unigram: &str,
dst_bigram: &str,
) -> anyhow::Result<()> {
let service = LearningService::new(
src_unigram,
src_bigram,
&[may_corpus, should_corpus, must_corpus],
)?;
// ๅฎ้ใฎๅญฆ็ฟใใใใ
for (epoch, corpus) in [
(may_epochs, may_corpus),
(should_epochs, should_corpus),
(must_epochs, must_corpus),
] {
service.try_learn(epoch, delta, corpus)?;
}
// ไฟๅญใใฆใใ
service.save_unigram(dst_unigram)?;
service.save_bigram(dst_unigram, dst_bigram)?;
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/make_stats_system_bigram_lm.rs | akaza-data/src/subcmd/make_stats_system_bigram_lm.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::{prelude::*, BufReader};
use std::path::{Path, PathBuf};
use anyhow::anyhow;
use anyhow::Context;
use anyhow::Result;
use chrono::Local;
use log::info;
use rayon::prelude::*;
use libakaza::lm::base::{SystemBigramLM, SystemUnigramLM};
use crate::utils::get_file_list;
use crate::wordcnt::wordcnt_bigram::{WordcntBigram, WordcntBigramBuilder};
use crate::wordcnt::wordcnt_unigram::WordcntUnigram;
pub fn make_stats_system_bigram_lm(
threshold: u32,
corpus_dirs: &Vec<String>,
unigram_trie_file: &str,
bigram_trie_file: &str,
) -> Result<()> {
// ใพใใฏ unigram ใฎ language model ใ่ชญใฟ่พผใ
let unigram_lm = WordcntUnigram::load(unigram_trie_file)?;
info!(
"Unigram system lm: {} threshold={}",
unigram_lm.num_keys(),
threshold
);
let unigram_map = unigram_lm
.as_hash_map()
.iter()
.map(|(key, (word_id, _))| (key.clone(), *word_id))
.collect::<HashMap<_, _>>();
let reverse_unigram_map = unigram_map
.iter()
.map(|(key, word_id)| (*word_id, key.to_string()))
.collect::<HashMap<_, _>>();
// ๆฌกใซใใณใผใในใในใญใฃใณใใฆ bigram ใ่ชญใฟๅใใ
let mut file_list: Vec<PathBuf> = Vec::new();
for corpus_dir in corpus_dirs {
let list = get_file_list(Path::new(corpus_dir))?;
for x in list {
file_list.push(x)
}
}
let results = file_list
.par_iter()
.map(|src| count_bigram(src, &unigram_map))
.collect::<Vec<_>>();
// ้่จใใ็ตๆใใใผใธใใ
info!("Merging");
let mut merged: HashMap<(i32, i32), u32> = HashMap::new();
for result in results {
let result = result?;
for (word_ids, cnt) in result {
*merged.entry(word_ids).or_insert(0) += cnt;
}
}
// ในใณใขใ่จ็ฎใใ
let wordcnt = merged
.iter()
.filter(|(_, cnt)| **cnt > threshold)
.map(|((id1, id2), cnt)| ((*id1, *id2), *cnt))
.collect::<HashMap<(i32, i32), u32>>();
// dump bigram text file.
let dumpfname = format!(
"work/dump/bigram-{}.txt",
Local::now().format("%Y%m%d-%H%M%S")
);
println!("Dump to text file: {dumpfname}");
let mut file = File::create(dumpfname)?;
for ((word_id1, word_id2), cnt) in &merged {
let Some(word1) = reverse_unigram_map.get(word_id1) else {
continue;
};
let Some(word2) = reverse_unigram_map.get(word_id2) else {
continue;
};
if *cnt > 16 {
file.write_fmt(format_args!("{cnt}\t{word1}\t{word2}\n"))?;
}
}
// ็ตๆใๆธใ่พผใ
info!("Generating trie file");
let mut builder = WordcntBigramBuilder::default();
for ((word_id1, word_id2), cnt) in wordcnt {
builder.add(word_id1, word_id2, cnt);
}
info!("Writing {}", bigram_trie_file);
builder.save(bigram_trie_file)?;
validation(unigram_trie_file, bigram_trie_file)?;
println!("DONE");
Ok(())
}
fn count_bigram(
src: &PathBuf,
unigram_lm: &HashMap<String, i32>,
) -> Result<HashMap<(i32, i32), u32>> {
info!("Counting {}", src.to_string_lossy());
let file = File::open(src)?;
let mut map: HashMap<(i32, i32), u32> = HashMap::new();
for line in BufReader::new(file).lines() {
let line = line?;
let line = line.trim();
let words = line.split(' ').collect::<Vec<_>>();
if words.len() < 2 {
continue;
}
// ในใฉใคใใใชใใใใใงใใใฎใงใๅใๅ่ชใไบๅใฒใใชใใฆใใใใใซ
// ่ชฟๆดใใ
let word_ids = words
.iter()
.map(|word| unigram_lm.get(&word.to_string()))
.collect::<Vec<_>>();
for i in 0..(word_ids.len() - 1) {
let Some(word_id1) = word_ids[i] else {
continue;
};
let Some(word_id2) = word_ids[i + 1] else {
continue;
};
// info!(
// "Register {}={}/{}={}",
// words[i],
// word_id1,
// words[i + 1],
// word_id2
// );
*map.entry((*word_id1, *word_id2)).or_insert(0) += 1;
}
}
Ok(map)
}
// ่จ่ชใขใใซใใกใคใซใๆญฃ็ขบใซ็ๆใใใใ็ขบ่ชใๅฎๆฝใใ
fn validation(unigram_dst: &str, bigram_dst: &str) -> Result<()> {
let unigram = WordcntUnigram::load(unigram_dst).unwrap();
let bigram = WordcntBigram::load(bigram_dst).unwrap();
let word1 = "็ง/ใใใ";
let (word1_id, watshi_cost) = unigram
.find(word1)
.ok_or_else(|| anyhow!("Cannot find '{}' in unigram dict.", word1))?;
println!("word1_id={word1_id} word1_cost={watshi_cost}");
let word2 = "ใใ/ใใ";
let (word2_id, word2_cost) = unigram
.find(word2)
.ok_or_else(|| anyhow!("Cannot find '{}' in unigram dict.", word1))?;
println!("word2_id={word2_id} word2_cost={word2_cost}");
bigram
.get_edge_cost(word1_id, word2_id)
.with_context(|| format!("Get bigram entry: '{word1} -> {word2}' {word1_id},{word2_id}"))?;
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/make_dict.rs | akaza-data/src/subcmd/make_dict.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use anyhow::{bail, Result};
use encoding_rs::UTF_8;
use log::info;
use crate::utils::copy_snapshot;
/// ใใญในใๅฝขๅผใงใฎ่พๆธใไฝๆใใใ
pub fn make_system_dict(
txt_file: &str,
vocab_file_path: Option<&str>,
corpus_files: Vec<String>,
unidic_file: String,
) -> Result<()> {
system_dict::make_system_dict(txt_file, vocab_file_path, corpus_files, unidic_file)?;
Ok(())
}
mod system_dict {
use std::io::BufReader;
use anyhow::{bail, Context};
use kelp::{kata2hira, ConvOption};
use log::trace;
use regex::Regex;
use libakaza::corpus::read_corpus_file;
use libakaza::dict::skk::read::read_skkdict;
use libakaza::dict::skk::write::write_skk_dict;
use super::*;
pub fn make_system_dict(
txt_file: &str,
vocab_file_path: Option<&str>,
corpus_files: Vec<String>,
unidic_file: String,
) -> Result<()> {
// vocab, corpus, dict/SKK-JISYO.akaza ใใ่พๆธใ็ๆใใ
let mut dicts = Vec::new();
// SKK-JISYO.akaza ใ่ชญใ
dicts.push(
validate_dict(cleanup_dict(&read_skkdict(
Path::new("dict/SKK-JISYO.akaza"),
UTF_8,
)?))
.with_context(|| "dict/SKK-JISYO.akaza".to_string())?,
);
// vocab ใใกใคใซใ่ชญใ
if let Some(vocab_file_path) = vocab_file_path {
info!("Using vocab file: {}", vocab_file_path);
dicts.push(
validate_dict(make_vocab_dict(vocab_file_path)?)
.with_context(|| "make_vocab_dict".to_string())?,
);
}
// ใณใผใในใใใ่ชๅฝใ่ฟฝๅ ใใ
dicts.push(
validate_dict(make_corpus_dict(corpus_files)?)
.with_context(|| "make_corpus_dict".to_string())?,
);
// unidic ใใใ่ชๅฝใ่ฟฝๅ ใใ
dicts.push(
validate_dict(make_unidic_dict(unidic_file)?)
.with_context(|| "make_corpus_dict".to_string())?,
);
write_skk_dict(txt_file, dicts)?;
copy_snapshot(Path::new(txt_file))?;
post_validate(txt_file)?;
Ok(())
}
/// ๅบๆฅไธใใฃใ่พๆธใๅ้กใชใๅ่ณชใใ็ขบ่ชใใ
fn post_validate(path: &str) -> Result<()> {
let dict = read_skkdict(Path::new(path), UTF_8)?;
for key in ["ใใใใใผใใใ"] {
if !dict.contains_key(key) {
bail!("Missing key in dict: {}", key);
}
}
Ok(())
}
fn cleanup_dict(dict: &HashMap<String, Vec<String>>) -> HashMap<String, Vec<String>> {
// ๅ
จ่ง็ฉบ็ฝใๅ
ฅใฃใฆใใใจใใญในใๅฆ็ๆใซใใใใใชใใใกใชใฎใง่ชฟๆดใ
dict.iter()
.map(|(k, vs)| {
(
k.to_string(),
vs.iter()
.filter(|m| m.as_str() != "\u{3000}")
.map(|s| s.to_string())
.collect(),
)
})
.collect::<HashMap<String, Vec<String>>>()
}
fn make_corpus_dict(corpus_files: Vec<String>) -> Result<HashMap<String, Vec<String>>> {
let mut words: Vec<(String, String)> = Vec::new();
for corpus_file in corpus_files {
let corpus_vec = read_corpus_file(Path::new(corpus_file.as_str()))?;
for corpus in corpus_vec {
for node in corpus.nodes {
// info!("Add {}/{}", node.yomi, node.kanji);
words.push((node.yomi.to_string(), node.surface.to_string()));
}
}
}
Ok(grouping_words(words))
}
fn grouping_words(words: Vec<(String, String)>) -> HashMap<String, Vec<String>> {
words.iter().fold(
HashMap::new(),
|mut acc: HashMap<String, Vec<String>>, t: &(String, String)| {
let (p, q) = t;
acc.entry(p.to_string())
.or_insert_with(Vec::new)
.push(q.to_string());
acc
},
)
}
fn make_vocab_dict(vocab_file_path: &str) -> Result<HashMap<String, Vec<String>>> {
let rfp = File::open(vocab_file_path)?;
let mut words: Vec<(String, String)> = Vec::new();
for line in BufReader::new(rfp).lines() {
let line = line?;
let Some((surface, yomi)) = line.split_once('/') else {
bail!("Cannot parse vocab file: {:?} in {}", line, vocab_file_path);
};
if yomi == "UNK" {
// ใชใใฎใจใใซ็บ็ใใใใฏใใใใชใใใใชใซใๆๅณใใใใใใชๅฆ็ใ
// Python ็ใซใใฃใใฎใงๆฎใใฆใใใใใถใใใใชใๅฆ็ใ
continue;
}
if yomi.contains('\u{3000}') || surface.contains('\u{3000}') {
// ๅ
จ่ง็ฉบ็ฝใฏใใฃใฆใใฎใฏใใใใ
continue;
}
if yomi.is_empty() {
// ใใฟใใชใใฎใฏใใใใใ
continue;
}
words.push((yomi.to_string(), surface.to_string()));
}
Ok(grouping_words(words))
}
// ใใใใใผใใใใใชใฉใฎใซใฟใซใ่ชใ unidic ใใๆพใใ
fn make_unidic_dict(path: String) -> anyhow::Result<HashMap<String, Vec<String>>> {
let file = File::open(path)?;
let mut dict = HashMap::new();
let katakana_pattern = Regex::new(r#"^\p{wb=Katakana}+"#)?;
for line in BufReader::new(file).lines() {
let line = line?;
let csv = line.split(',').collect::<Vec<_>>();
if csv.len() < 10 {
trace!("Incomplete line: {:?}", line);
continue;
}
// ใณในใใฏไฝใๆนใใใๅบใฆใใใใฎใ
let surface = csv[0];
let _cost = csv[2];
let _hinshi = csv[4];
let _subhinshi = csv[5];
let yomi = csv[10];
if katakana_pattern.is_match(surface)
&& katakana_pattern.is_match(yomi)
&& surface == yomi
{
dict.insert(
kata2hira(surface, ConvOption::default()),
vec![yomi.to_string()],
);
}
}
info!("Got {} entries from unidic", dict.len());
Ok(dict)
}
}
fn validate_dict(dict: HashMap<String, Vec<String>>) -> Result<HashMap<String, Vec<String>>> {
for (kana, surfaces) in dict.iter() {
if kana.is_empty() {
bail!("Kana must not be empty: {:?}", surfaces);
}
let kana_cnt = kana.chars().count();
for surface in surfaces {
if surface.is_empty() {
bail!("Empty surface: {:?}", kana);
}
if kana_cnt == 1 && kana_cnt < surface.chars().count() {
// info!("Missing surface: {}<{}", kana, surface);
}
if kana == "ใ" && kana_cnt < surface.chars().count() {
bail!("XXX Missing surface: {:?}<{:?}", kana, surface);
}
if kana == "ใ" && surface == "ๅฅฝใ" {
bail!("Missing surface: {}<{}", kana, surface);
}
if kana.contains('\u{3000}') {
bail!("Full width space: {}<{}", kana, surface);
}
}
}
Ok(dict)
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/evaluate.rs | akaza-data/src/subcmd/evaluate.rs | use std::fs::File;
use std::io::{BufRead, BufReader};
use std::time::SystemTime;
use anyhow::Context;
use log::info;
use libakaza::config::{DictConfig, DictEncoding, DictType, DictUsage, EngineConfig};
use libakaza::engine::base::HenkanEngine;
use libakaza::engine::bigram_word_viterbi_engine::BigramWordViterbiEngineBuilder;
#[derive(Default)]
struct SaigenRitsu {
/// total_lcs = N_{LCS}
/// LCS(ๆ้ทๅ
ฑ้้จๅๅ)ใฎๆๅญๆฐใฎๅใ
/// https://www.anlp.jp/proceedings/annual_meeting/2011/pdf_dir/C4-6.pdf
total_lcs: usize,
/// ไธๆฌๅคๆ็ตๆใฎๆๅญๆฐใฎๅใ
/// N_{sys}
total_sys: usize,
}
impl SaigenRitsu {
/// @param teacher ใณใผใในใซใใใฎๅคๆ็ตๆ
/// @param my_candidate ่ฉไพกๅฏพ่ฑกใขใใซใซใใๅบๅใใใๅคๆ็ตๆ
fn add(&mut self, teacher: &str, my_candidate: &str) {
let teacher: Vec<char> = teacher.chars().collect();
let my_candidate: Vec<char> = my_candidate.chars().collect();
let lcs = lcs::LcsTable::new(&teacher, &my_candidate);
let lcs = lcs.longest_common_subsequence();
self.total_lcs += lcs.len();
self.total_sys += my_candidate.len();
}
fn rate(&self) -> f32 {
100.0 * (self.total_lcs as f32) / (self.total_sys as f32)
}
}
/// ใขใใซ/ๅคๆใขใซใดใชใบใ ใ่ฉไพกใใใ
///
/// ๆฅๆฌ่ชใใชๆผขๅญๅคๆใซใใใ่ญๅฅใขใใซใฎ้ฉ็จใจใใฎ่ๅฏ
/// https://www.anlp.jp/proceedings/annual_meeting/2011/pdf_dir/C4-6.pdf
///
/// ใซใฎใฃใฆใใ่ฉไพกๆนๆณใๆก็จใ
///
/// ใชใใใใใฆใใใใจใใใจใmozc ใฎ่ซๆใซใฎใฃใฆใใ BLEU ใไฝฟ็จใใๆนๅผใใๅฎ่ฃ
ใๆฅฝใ ใใใงใ!
pub fn evaluate(
corpus: &Vec<String>,
eucjp_dict: &Vec<String>,
utf8_dict: &Vec<String>,
model_dir: String,
) -> anyhow::Result<()> {
let mut dicts: Vec<DictConfig> = Vec::new();
for path in eucjp_dict {
dicts.push(DictConfig {
dict_type: DictType::SKK,
encoding: DictEncoding::EucJp,
path: path.clone(),
usage: DictUsage::Normal,
})
}
for path in utf8_dict {
dicts.push(DictConfig {
dict_type: DictType::SKK,
encoding: DictEncoding::Utf8,
path: path.clone(),
usage: DictUsage::Normal,
})
}
let akaza = BigramWordViterbiEngineBuilder::new(EngineConfig {
dicts,
model: model_dir,
dict_cache: false,
})
.build()?;
let mut good_cnt = 0;
let mut bad_cnt = 0;
let force_ranges = Vec::new();
let total_t1 = SystemTime::now();
let mut saigen_ritsu = SaigenRitsu::default();
for file in corpus {
let fp = File::open(file).with_context(|| format!("File: {file}"))?;
for line in BufReader::new(fp).lines() {
let line = line?;
let line = line.trim();
if line.starts_with('#') {
continue; // comment่ก
}
let (yomi, surface) = line
.split_once(' ')
.with_context(|| format!("source: {line}"))
.unwrap();
let yomi = yomi.replace('|', "");
let surface = surface.replace('|', "");
let t1 = SystemTime::now();
let result = akaza.convert(yomi.as_str(), Some(&force_ranges))?;
let t2 = SystemTime::now();
let elapsed = t2.duration_since(t1)?;
let terms: Vec<String> = result.iter().map(|f| f[0].surface.clone()).collect();
let got = terms.join("");
// ๆ้ทๅ
ฑ้้จๅๅใ็ฎๅบใ
saigen_ritsu.add(&surface, &got);
if surface == got {
info!("{} => (teacher={}, akaza={})", yomi, surface, got);
good_cnt += 1;
} else {
println!(
"{} =>\n\
| corpus={}\n\
| akaza ={}\n\
Good count={} bad count={} elapsed={}ms saigen={}",
yomi,
surface,
got,
good_cnt,
bad_cnt,
elapsed.as_millis(),
saigen_ritsu.rate()
);
// ้
ใใชใจๆใฃใใ cargo run --release ใซใชใฃใฆใใ็ขบ่ชใในใ
// https://codom.hatenablog.com/entry/2017/06/03/221318
bad_cnt += 1;
}
}
}
let total_t2 = SystemTime::now();
let total_elapsed = total_t2.duration_since(total_t1)?;
info!(
"Good count={} bad count={}, elapsed={}ms, ๅ็พ็={}",
good_cnt,
bad_cnt,
total_elapsed.as_millis(),
saigen_ritsu.rate(),
);
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/check.rs | akaza-data/src/subcmd/check.rs | use std::fs::File;
use std::io::Write;
use std::sync::{Arc, Mutex};
use log::info;
use libakaza::config::{DictConfig, DictEncoding, DictType, DictUsage, EngineConfig};
use libakaza::engine::bigram_word_viterbi_engine::BigramWordViterbiEngineBuilder;
use libakaza::user_side_data::user_data::UserData;
pub fn check(
yomi: &str,
expected: Option<String>,
user_data: bool,
eucjp_dict: &Vec<String>,
utf8_dict: &Vec<String>,
model_dir: &str,
) -> anyhow::Result<()> {
let mut dicts: Vec<DictConfig> = Vec::new();
for path in eucjp_dict {
dicts.push(DictConfig {
dict_type: DictType::SKK,
encoding: DictEncoding::EucJp,
path: path.clone(),
usage: DictUsage::Normal,
})
}
for path in utf8_dict {
dicts.push(DictConfig {
dict_type: DictType::SKK,
encoding: DictEncoding::Utf8,
path: path.clone(),
usage: DictUsage::Normal,
})
}
let mut builder = BigramWordViterbiEngineBuilder::new(EngineConfig {
dicts,
model: model_dir.to_string(),
dict_cache: false,
});
if user_data {
info!("Enabled user data");
let user_data = UserData::load_from_default_path()?;
builder.user_data(Arc::new(Mutex::new(user_data)));
}
let engine = builder.build()?;
let lattice = engine.to_lattice(yomi, None)?;
if let Some(expected) = expected {
let _dot = lattice.dump_cost_dot(expected.as_str());
println!("{_dot}");
let mut file = File::create("/tmp/dump.dot")?;
file.write_all(_dot.as_bytes())?;
}
let got = engine.resolve(&lattice)?;
let terms: Vec<String> = got.iter().map(|f| f[0].surface.clone()).collect();
let result = terms.join("/");
println!("{result}");
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/wfreq.rs | akaza-data/src/subcmd/wfreq.rs | use std::collections::{BTreeMap, HashMap};
use std::fs;
use std::fs::File;
use std::io::{BufRead, BufReader, Write};
use std::path::{Path, PathBuf};
use log::{info, trace, warn};
use rayon::prelude::*;
use regex::Regex;
use crate::utils::get_file_list;
/// ๅ่ชใฎ็บ็็ขบ็ใๆฐใไธใใใ
pub fn wfreq(src_dirs: &Vec<String>, dst_file: &str) -> anyhow::Result<()> {
info!("wfreq: {:?} => {}", src_dirs, dst_file);
let mut file_list: Vec<PathBuf> = Vec::new();
for src_dir in src_dirs {
let list = get_file_list(Path::new(src_dir))?;
for x in list {
file_list.push(x)
}
}
let results = file_list
.par_iter()
.map(|path_buf| -> anyhow::Result<HashMap<String, u32>> {
// ใใกใคใซใ่ชญใฟ่พผใใงใHashSet ใซๅ่ชๆฐใๆฐใไธใใใ
info!("Processing {} for wfreq", path_buf.to_string_lossy());
let file = File::open(path_buf)?;
let mut stats: HashMap<String, u32> = HashMap::new();
for line in BufReader::new(file).lines() {
let line = line?;
let line = line.trim();
let words = line.split(' ').collect::<Vec<_>>();
for word in words {
if word.is_empty() {
continue;
}
if word.contains('\u{200f}') {
warn!("The document contains RTL character");
continue;
}
if word.starts_with('/') {
trace!("Invalid word: {}", word);
continue;
}
if word.starts_with(' ') {
trace!("Invalid word: {}", word);
continue;
}
*stats.entry(word.to_string()).or_insert(0) += 1;
}
}
Ok(stats)
})
.collect::<Vec<_>>();
// ๆ็ต็ตๆใใกใคใซใฏ้ ็ชใๅฎๅฎใชๆนใใใใฎใง BTreeMap ใๆก็จใ
info!("Merging");
let mut retval: BTreeMap<String, u32> = BTreeMap::new();
for result in results {
// ใใฎใธใใงใใผใธใ่กใใ
let result = result?;
for (word, cnt) in result {
*retval.entry(word.to_string()).or_insert(0) += cnt;
}
}
// ็ตๆใใใกใคใซใซๆธใใฆใใ
info!("Write to {}", dst_file);
// ๆใใใซไธ่ฆใชใฏใผใใ็ป้ฒใใใฆใใใฎใ้คๅคใใใ
// ใซใฟใซใไบๆๅญ็ณปใฏๅ
จ่ฌ็ใซใใคใบใซใชใใใกใ ใใWikipedia/้็ฉบๆๅบซใซใใใฆใฏ
// ๆถ็ฉบใฎไบบ็ฉใๅฎๅจใฎไบบ็ฉใฎๅๅใจใใฆไฝฟใใใใกใชใฎใงใๆถใใ
let re = Regex::new("^[\u{30A0}-\u{30FF}]{2}/[\u{3040}-\u{309F}]{2}$")?;
// let ignore_files = HashSet::from(["ใใซ/ใฆใ", "ใใ/ใซใช", "ใฌใ/ใใก"]);
let mut ofp = File::create(dst_file.to_string() + ".tmp")?;
for (word, cnt) in retval {
if re.is_match(word.as_str()) {
info!("Skip 2 character katakana entry: {}", word);
continue;
}
ofp.write_fmt(format_args!("{word}\t{cnt}\n"))?;
}
fs::rename(dst_file.to_owned() + ".tmp", dst_file)?;
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/dump_bigram_dict.rs | akaza-data/src/subcmd/dump_bigram_dict.rs | use libakaza::lm::base::{SystemBigramLM, SystemUnigramLM};
use libakaza::lm::system_bigram::MarisaSystemBigramLM;
use libakaza::lm::system_unigram_lm::MarisaSystemUnigramLM;
use std::collections::HashMap;
pub fn dump_bigram_dict(unigram_file: &str, bigram_file: &str) -> anyhow::Result<()> {
let unigram = MarisaSystemUnigramLM::load(unigram_file)?;
let unigram_map = unigram
.as_hash_map()
.iter()
.map(|(key, (id, _))| (*id, key.to_string()))
.collect::<HashMap<i32, String>>();
let bigram = MarisaSystemBigramLM::load(bigram_file)?;
for ((word_id1, word_id2), cost) in bigram.as_hash_map() {
let key1 = unigram_map.get(&word_id1).unwrap();
let key2 = unigram_map.get(&word_id2).unwrap();
println!("{cost} {key1} {key2}");
}
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/dump_unigram_dict.rs | akaza-data/src/subcmd/dump_unigram_dict.rs | use libakaza::lm::base::SystemUnigramLM;
use libakaza::lm::system_unigram_lm::MarisaSystemUnigramLM;
pub fn dump_unigram_dict(filename: &str) -> anyhow::Result<()> {
let dict = MarisaSystemUnigramLM::load(filename)?;
let dict_map = dict.as_hash_map();
for yomi in dict_map.keys() {
let (word_id, score) = dict.find(yomi.as_str()).unwrap();
println!("{yomi} {word_id} {score}");
}
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/make_stats_system_unigram_lm.rs | akaza-data/src/subcmd/make_stats_system_unigram_lm.rs | use std::cmp::max;
use std::collections::HashMap;
use std::fs::File;
use std::io::{prelude::*, BufReader};
use crate::wordcnt::wordcnt_unigram::WordcntUnigramBuilder;
/// ็ตฑ่จ็ใใชๆผขๅญๅคๆใฎใใใฎใฆใใฐใฉใ ใทในใใ ่จ่ชใขใใซใฎไฝๆ
///
/// wfreq ใใกใคใซใ้ใใฆใใผในใใใฆใใฐใฉใ ่จ่ชใขใใซใใกใคใซใไฝๆใใฆไฟๅญใใใ
pub fn make_stats_system_unigram_lm(srcpath: &str, dstpath: &str) -> anyhow::Result<()> {
// 16 ใฏใใฅใผใชในใใฃใใฏใชๅคใ่ชฟๆดใฎไฝๅฐใ
let threshold = 16_u32;
let mut wordcnt = parse_wfreq(srcpath, threshold)?;
if wordcnt.len() >= 8388608 {
// edge cost ่จ่ชใขใใซใใกใคใซใฎๅฎน้ใๅฐใใไฟใคใใใซ
// 3 byte ใซ ID ใๅใใใใใซใใใ
// ใใฃใฆใๆๅคงใงใ 8,388,608 ๅ่ชใพใงใซใชใใใใซ vocab ใๅถ้ใใใ
// ็พๅฎ็ใช็ทใงๅใฃใฆใใ500ไธๅ่ชใใใใงๅๅใ
panic!("too much words in wfreq file: {srcpath}");
}
homograph_hack(&mut wordcnt);
score_hack(&mut wordcnt);
let mut builder = WordcntUnigramBuilder::default();
for (word, score) in &wordcnt {
builder.add(word.as_str(), *score);
}
println!("Writing {dstpath}");
builder.save(dstpath)?;
Ok(())
}
fn homograph_hack(wordcnt: &mut HashMap<String, u32>) {
// ๅๅฝข็ฐ้ณๅญใฎๅฆ็
// mecab ใงใฏ "ๆฅๆฌ" ใฏ "ๆฅๆฌ/ใซใปใ" ใซๅฆ็ใใใใใใๆฅๆฌ/ใซใฃใฝใ ใ่กจๅบใใชใใ
// ใใชๆผขๅญๅคๆไธใฏใๅไธ็จๅบฆใฎ็ขบ็ใงๅบใใ ใใใจไบๆณใใใใใจใใใใใฎ2ใคใฎ็ขบ็ใๅใใซ่จญๅฎใใใ
{
let (src, dst) = ("ๆฅๆฌ/ใซใปใ", "ๆฅๆฌ/ใซใฃใฝใ");
try_copy_cost(src, dst, wordcnt);
try_copy_cost(dst, src, wordcnt);
}
}
fn try_copy_cost(word1: &str, word2: &str, wordcnt: &mut HashMap<String, u32>) {
if !wordcnt.contains_key(word2) {
if let Some(cost) = wordcnt.get(word1) {
wordcnt.insert(word2.to_string(), *cost);
}
}
}
// Wikipedia ็นๆใงใๆฅๆฌ่ชใฎไธ่ฌ็ใชๅๅธใใใๅฐใใใใในใณใขใใคใใฆใใๆใใใใฎใง
// ใใฅใผใชในใใฃใใฏใซ่ชฟๆดใใใ
fn score_hack(wordcnt: &mut HashMap<String, u32>) {
// a ใฎๆนใฎในใณใขใ b ใใใ้ซใใชใใใใซ่ชฟๆดใใพใใ
// https://github.com/tokuhirom/akaza/wiki/%E5%A4%A7%E5%AD%97
// https://github.com/tokuhirom/akaza/wiki/%E5%8D%BF
for (a, b) in [("ไปๆฅ/ใใใ", "ๅฟ/ใใใ"), ("ๅคงไบ/ใ ใใ", "ๅคงๅญ/ใ ใใ")]
{
let Some(a_score) = wordcnt.get(a) else {
return;
};
let Some(b_score) = wordcnt.get(b) else {
return;
};
wordcnt.insert(a.to_string(), max(*a_score, b_score + 1));
}
}
fn parse_wfreq(src_file: &str, threshold: u32) -> anyhow::Result<HashMap<String, u32>> {
let file = File::open(src_file)?;
let mut map: HashMap<String, u32> = HashMap::new();
for line in BufReader::new(file).lines() {
let line = line.unwrap();
let (word, cnt) = line.trim().split_once('\t').unwrap();
let cnt: u32 = cnt.parse().unwrap();
if cnt > threshold {
map.insert(word.to_string(), cnt);
}
}
Ok(map)
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/mod.rs | akaza-data/src/subcmd/mod.rs | pub mod check;
pub mod dump_bigram_dict;
pub mod dump_unigram_dict;
pub mod evaluate;
pub mod learn_corpus;
pub mod make_dict;
pub mod make_stats_system_bigram_lm;
pub mod make_stats_system_unigram_lm;
pub mod tokenize;
pub mod vocab;
pub mod wfreq;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/tokenize.rs | akaza-data/src/subcmd/tokenize.rs | use std::fs;
use std::path::Path;
use anyhow::bail;
use log::info;
use rayon::prelude::*;
use walkdir::WalkDir;
use crate::corpus_reader::aozora_bunko::AozoraBunkoProcessor;
use crate::corpus_reader::base::{write_success_file, CorpusReader};
use crate::corpus_reader::wikipedia_extracted::ExtractedWikipediaProcessor;
use crate::tokenizer::base::AkazaTokenizer;
use crate::tokenizer::vibrato::VibratoTokenizer;
pub fn tokenize(
reader: String,
system_dict: String,
user_dict: Option<String>,
kana_preferred: bool,
src_dir: &str,
dst_dir: &str,
) -> anyhow::Result<()> {
info!("tokenize: {} => {}", src_dir, dst_dir);
let tokenizer = VibratoTokenizer::new(system_dict.as_str(), user_dict)?;
let file_list = get_file_list(Path::new(src_dir), Path::new(dst_dir))?;
match reader.as_str() {
"jawiki" => {
let processor = ExtractedWikipediaProcessor::new()?;
let result = file_list
.par_iter()
.map(|(src, dst)| {
info!("GOT: {:?} {:?}", src, dst);
processor.process_file(
Path::new(src),
Path::new(dst),
&mut (|f| tokenizer.tokenize(f, kana_preferred)),
)
})
.collect::<Vec<_>>();
for r in result {
r.unwrap();
}
}
"aozora_bunko" => {
let processor = AozoraBunkoProcessor::new()?;
let result = file_list
.par_iter()
.map(|(src, dst)| {
info!("GOT: {:?} {:?}", src, dst);
processor.process_file(
Path::new(src),
Path::new(dst),
&mut (|f| tokenizer.tokenize(f, kana_preferred)),
)
})
.collect::<Vec<_>>();
for r in result {
r.unwrap();
}
}
_ => bail!("Unknown reader :{}", reader),
}
write_success_file(Path::new(dst_dir))?;
Ok(())
}
fn get_file_list(src_dir: &Path, dst_dir: &Path) -> anyhow::Result<Vec<(String, String)>> {
let mut result: Vec<(String, String)> = Vec::new();
for src_file in WalkDir::new(src_dir)
.into_iter()
.filter_map(|file| file.ok())
.filter(|file| file.metadata().unwrap().is_file())
{
let src_path = src_file.path();
let dirname = src_path.parent().unwrap().file_name().unwrap();
fs::create_dir_all(dst_dir.join(dirname))?;
let output_file = dst_dir.join(dirname).join(src_path.file_name().unwrap());
result.push((
src_file.path().to_string_lossy().to_string(),
output_file.as_path().to_string_lossy().to_string(),
));
}
Ok(result)
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/subcmd/vocab.rs | akaza-data/src/subcmd/vocab.rs | use std::fs;
use std::fs::File;
use std::io::{BufRead, BufReader, Write};
use log::{info, warn};
/// wfreq (ๅ่ชใฎ็บ็้ ปๅบฆ่กจ)ใใ vocab (่ชๅฝใใกใคใซ)ใไฝๆใใใ
pub fn vocab(src_file: &str, dst_file: &str, threshold: u32) -> anyhow::Result<()> {
info!(
"vocab: {} => {}, threshold={}",
src_file, dst_file, threshold
);
let ifp = File::open(src_file)?;
let mut ofp = File::create(dst_file.to_string() + ".tmp")?;
for line in BufReader::new(ifp).lines() {
let line = line?;
let line = line.trim();
let (word, cnt) = line.split_once('\t').unwrap();
if word.starts_with(' ') || word.starts_with('/') {
warn!("Invalid word: {:?}", line);
continue;
}
if !word.contains('/') {
warn!("Invalid word: {:?}", line);
continue;
}
let cnt: u32 = cnt.parse()?;
if cnt > threshold {
ofp.write_fmt(format_args!("{word}\n"))?;
}
}
fs::rename(dst_file.to_owned() + ".tmp", dst_file)?;
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/tokenizer/base.rs | akaza-data/src/tokenizer/base.rs | pub trait AkazaTokenizer {
fn tokenize(&self, src: &str, kana_preferred: bool) -> anyhow::Result<String>;
}
/// ใใผใธๅฆ็ใซๅฉ็จใใ็บใฎไธญ้่กจ็พ
#[derive(Debug)]
pub(crate) struct IntermediateToken {
surface: String,
yomi: String,
hinshi: String,
subhinshi: String,
subsubhinshi: String,
}
impl IntermediateToken {
pub(crate) fn new(
surface: String,
yomi: String,
hinshi: String,
subhinshi: String,
subsubhinshi: String,
) -> IntermediateToken {
IntermediateToken {
surface,
yomi,
hinshi,
subhinshi,
subsubhinshi,
}
}
}
/// ็นๅฎใฎๅ่ฉใใใผใธใใ
/// ipadic ใฎๅ่ฉไฝ็ณปใๅฏพ่ฑกใจใใใ
pub(crate) fn merge_terms_ipadic(intermediates: Vec<IntermediateToken>) -> String {
let mut buf = String::new();
let mut i = 0;
while i < intermediates.len() {
let token = &intermediates[i];
let mut surface = token.surface.clone();
let mut yomi = token.yomi.clone();
let mut prev_token = token;
let mut j = i + 1;
while j < intermediates.len() {
/*
ๅฎๆฝ/ๅ่ฉ/ใตๅคๆฅ็ถ/ใใฃใ
ใ/ๅ่ฉ/่ช็ซ/ใ
ใ/ๅ่ฉ/ๆฅๅฐพ/ใ
ใ/ๅฉๅ่ฉ/_/ใ
ใฎใใใชๅ ดๅใ"ๅฎๆฝ,ใใใ"ใซ้ฃ็ตใใใใ
ๆธใ/ๅ่ฉ/่ช็ซ/ใใ
ใฆ/ๅฉ่ฉ/ๆฅ็ถๅฉ่ฉ/ใฆ
ใ/ๅ่ฉ/้่ช็ซ/ใ
ใ/ๅฉๅ่ฉ/_/ใ
ใใฎ/ๅ่ฉ/้่ช็ซ/ใใฎ
ใง/ๅฉๅ่ฉ/_/ใง
ใใ/ๅฉๅ่ฉ/_/ใใ
ใใ"ๆธใใฆใใใใใใฎใงใใ" ใใใใพใง้ฃ็ตใใใ
ๅฉๅ่ฉใจใใฎๅใฎใใผใฏใณใๅ็ดใซๆฅ็ถใใใจไปฅไธใฎๆงใชใฑใผในใงๅฐใใ
้ด้นฟๅป็็งๅญฆๆ่กๅคงๅญฆ/ๅ่ฉ/ๅบๆๅ่ฉ/ใใใใใใใใใใใใใ
ใคใ ใใใ
ใง/ๅฉๅ่ฉ/_/ใง
ใใฃ/ๅฉๅ่ฉ/_/ใใฃ
ใ/ๅฉๅ่ฉ/_/ใ
ใ/ๅฉ่ฉ/ๆฅ็ถๅฉ่ฉ/ใ
*/
let token = &intermediates[j];
if (token.hinshi == "ๅฉๅ่ฉ"
&& (prev_token.hinshi == "ๅ่ฉ" || prev_token.hinshi == "ๅฉๅ่ฉ"))
|| token.subhinshi == "ๆฅ็ถๅฉ่ฉ"
|| token.subhinshi == "ๆฅๅฐพ"
{
// println!("PREV_TOKEN: {:?}", prev_token);
// println!("TOKEN: {:?}", token);
surface += token.surface.as_str();
yomi += if token.surface == "ๅฎถ"
&& token.yomi == "ใ"
&& prev_token.subsubhinshi == "ไบบๅ"
{
// ไบบๅ + ๅฎถ ใฎใฑใผในใซ ipadic ใ ใจใใใใจ่ชญใใงใใพใ
// ๅ้กใใใใฎใงใใใฎๅ ดๅใฏใๅฎถ/ใใใซ่ชญใฟๆฟใใใ
"ใ"
} else {
token.yomi.as_str()
};
j += 1;
prev_token = token;
} else {
break;
}
}
buf += format!("{surface}/{yomi} ").as_str();
i = j;
}
buf.trim_end().to_string()
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/tokenizer/vibrato.rs | akaza-data/src/tokenizer/vibrato.rs | use std::fs::File;
use std::time::SystemTime;
use anyhow::Context;
use kelp::{kata2hira, ConvOption};
use log::info;
use vibrato::{Dictionary, Tokenizer};
use crate::tokenizer::base::{merge_terms_ipadic, AkazaTokenizer, IntermediateToken};
pub struct VibratoTokenizer {
tokenizer: Tokenizer,
}
impl VibratoTokenizer {
pub fn new(dictpath: &str, user_dict: Option<String>) -> anyhow::Result<VibratoTokenizer> {
// ใทในใใ ่พๆธใฎใญใผใใซใฏ14็งใใใใใใใพใใ
let t1 = SystemTime::now();
let mut dict = Dictionary::read(File::open(dictpath)?)?;
let t2 = SystemTime::now();
println!(
"Loaded {} in {}msec",
dictpath,
t2.duration_since(t1)?.as_millis()
);
// ใฆใผใถใผ่พๆธใจใใฆ jawiki-kana-kanji-dict ใไฝฟใใจ
// ๅคใชๅ่ชใ้้ใฃใฆ่ฆใใใใจใใใใฎใงใ
// ใใผใซใใคใบใใงใผใบใซใฏๅ
ฅใใชใใใจใ
if let Some(user_dict) = user_dict {
info!("Loading user dictionary: {}", user_dict);
dict = dict
.reset_user_lexicon_from_reader(Some(File::open(user_dict)?))
.with_context(|| "Opening userdic")?;
}
let tokenizer = vibrato::Tokenizer::new(dict);
Ok(VibratoTokenizer { tokenizer })
}
}
impl AkazaTokenizer for VibratoTokenizer {
/// Vibrato ใๅฉ็จใใฆใใกใคใซใใขใใใผใทใงใณใใพใใ
fn tokenize(&self, src: &str, kana_preferred: bool) -> anyhow::Result<String> {
let mut worker = self.tokenizer.new_worker();
worker.reset_sentence(src);
worker.tokenize();
let mut intermediates: Vec<IntermediateToken> = Vec::new();
// Vibrato/mecab ใฎๅ ดๅใๆฅๅฐพ่พใชใฉใ็ดฐใใๅใใใใใจใฏๅฐใชใใใ
// ไธๆนใงใๅฉ่ฉ/ๅฉๅ่ฉใชใฉใ็ดฐใใใใใใใกใ
for i in 0..worker.num_tokens() {
let token = worker.token(i);
let feature: Vec<&str> = token.feature().split(',').collect();
// if feature.len() <= 7 {
// println!("Too few features: {}/{}", token.surface(), token.feature())
// }
let hinshi = feature[0];
let subhinshi = if feature.len() > 2 { feature[1] } else { "UNK" };
let subsubhinshi = if feature.len() > 3 { feature[2] } else { "UNK" };
let yomi = if feature.len() > 7 {
feature[7]
} else {
// ่ชญใฟใใชไธๆใชใใฎใๅบๆๅ่ฉใชใฉใ
// ใตใณใใใฃใจใซใใปใใฉใใใซ/ๅ่ฉ,ๅบๆๅ่ฉ,็ต็น,*,*,*,*
token.surface()
};
let yomi = kata2hira(yomi, ConvOption::default());
let surface = if should_be_kana(kana_preferred, hinshi, subhinshi) {
yomi.to_string()
} else {
token.surface().to_string()
};
let intermediate = IntermediateToken::new(
surface,
yomi.to_string(),
hinshi.to_string(),
subhinshi.to_string(),
subsubhinshi.to_string(),
);
intermediates.push(intermediate);
// println!("{}/{}/{}", token.surface(), hinshi, yomi);
}
Ok(merge_terms_ipadic(intermediates))
}
}
/// ใใชๅชๅ
ใขใผใใฎๅฆ็
fn should_be_kana(kana_preferred: bool, hinshi: &str, subhinshi: &str) -> bool {
if !kana_preferred {
return false;
}
// ่ฒดๆน ๅ่ฉ,ไปฃๅ่ฉ,ไธ่ฌ,*,*,*,่ฒดๆน,ใขใใฟ,ใขใใฟ
subhinshi == "ไปฃๅ่ฉ"
// ็พใใ ๅฝขๅฎน่ฉ,่ช็ซ,*,*,ๅฝขๅฎน่ฉใปใคๆฎต,ๅบๆฌๅฝข,็พใใ,ใฆใใฏใทใค,ใฆใ ใฏใทใค
|| hinshi == "ๅฝขๅฎน่ฉ"
// ๅฐๅบ ๅฏ่ฉ,ไธ่ฌ,*,*,*,*,ๅฐๅบ,ใใฆใใค,ใใผใใค
|| hinshi == "ๅฏ่ฉ"
// ๅใณ ๆฅ็ถ่ฉ,*,*,*,*,*,ๅใณ,ใชใจใ,ใชใจใ
|| hinshi == "ๆฅ็ถ่ฉ"
// ๅๅผ ๆๅ่ฉ,*,*,*,*,*,ๅๅผ,ใขใข,ใขใผ
|| hinshi == "ๆๅ่ฉ"
// ไปฐใ ๅ่ฉ,่ช็ซ,*,*,ไบๆฎตใปใฌ่ก,ๅบๆฌๅฝข,ไปฐใ,ใขใชใฐ,ใขใชใฐ
|| hinshi == "ๅ่ฉ"
}
#[cfg(test)]
mod tests {
use log::LevelFilter;
use super::*;
#[test]
fn test_should_be_kana() -> anyhow::Result<()> {
assert!(!should_be_kana(false, "ๅฝขๅฎน่ฉ", "่ช็ซ"));
assert!(should_be_kana(true, "ๅฝขๅฎน่ฉ", "่ช็ซ"));
Ok(())
}
#[test]
fn test_with_kana() -> anyhow::Result<()> {
let runner = VibratoTokenizer::new("work/vibrato/ipadic-mecab-2_7_0/system.dic", None)?;
let got = runner.tokenize("็งใฎๅๅใฏไธญ้ใงใใ", true)?;
assert_eq!(
got,
"ใใใ/ใใใ ใฎ/ใฎ ๅๅ/ใชใพใ ใฏ/ใฏ ไธญ้/ใชใใฎ ใงใ/ใงใ ใ/ใ"
);
Ok(())
}
#[test]
fn test() -> anyhow::Result<()> {
let runner = VibratoTokenizer::new("work/vibrato/ipadic-mecab-2_7_0/system.dic", None)?;
runner.tokenize("็งใฎๅๅใฏไธญ้ใงใใ", false)?;
Ok(())
}
#[test]
fn test_merge() -> anyhow::Result<()> {
/*
ๆธใใฆใใใใฎใงใใ
ๆธใ ๅ่ฉ,่ช็ซ,*,*,ไบๆฎตใปใซ่กใค้ณไพฟ,้ฃ็จใฟๆฅ็ถ,ๆธใ,ใซใค,ใซใค
ใฆ ๅฉ่ฉ,ๆฅ็ถๅฉ่ฉ,*,*,*,*,ใฆ,ใ,ใ
ใ ๅ่ฉ,้่ช็ซ,*,*,ไธๆฎต,้ฃ็จๅฝข,ใใ,ใค,ใค
ใ ๅฉๅ่ฉ,*,*,*,็นๆฎใปใฟ,ๅบๆฌๅฝข,ใ,ใฟ,ใฟ
ใใฎ ๅ่ฉ,้่ช็ซ,ไธ่ฌ,*,*,*,ใใฎ,ใขใ,ใขใ
ใง ๅฉๅ่ฉ,*,*,*,็นๆฎใปใ,้ฃ็จๅฝข,ใ ,ใ,ใ
ใใ ๅฉๅ่ฉ,*,*,*,ไบๆฎตใปใฉ่กใขใซ,ๅบๆฌๅฝข,ใใ,ใขใซ,ใขใซ
EOS
*/
let _ = env_logger::builder()
.filter_level(LevelFilter::Info)
.is_test(true)
.try_init();
let runner = VibratoTokenizer::new("work/vibrato/ipadic-mecab-2_7_0/system.dic", None)?;
assert_eq!(
runner.tokenize("ๆธใใฆใใใใฎใงใใ", false)?,
"ๆธใใฆ/ใใใฆ ใใ/ใใ ใใฎ/ใใฎ ใงใใ/ใงใใ"
);
Ok(())
}
#[test]
fn test_iika() -> anyhow::Result<()> {
// ไบไผๅฎถใ ipadic ใ ใจ ใใ/ใ ใซใชใใใใใใใๆฅๅฐพใชใฎใงใ
// ่คๅ่ชๅใใใฆใใพใใใใใฏใฒใใใใซๅ้กใงใใใ
// ใใใใใใจใใใฎใฏใใๅบใฆๆฅใ่กจ็พใชใฎใงใใ
// ใใใใไบไผๅฎถใใฏ wikipedia ใงใใใๅบใฆใใใฎใงใๅฒใจใณในใใไฝใใชใฃใฆใใพใใ
// ไบไผๅฎถใ ใใซ้ใฃใๅ้กใงใฏใชใใฎใงใmecab ใธใฎ่พๆธ็ป้ฒใงใฏใซใใผใ้ฃใใใ
// ใใฃใฆใๆฅๅฐพใฎใๅฎถใใฏ็นๅฅๆฑใใใฆใๅบๆๅ่ฉ,ไบบๅใฎๅ ดๅใฎใใจใซใใใๅฎถใใฏใใใใจ่ชญใใใใซใใใ
/*
ไบไผๅฎถ
ไบไผ ๅ่ฉ,ๅบๆๅ่ฉ,ไบบๅ,ๅง,*,*,ไบไผ,ใคใค,ใคใค
ๅฎถ ๅ่ฉ,ๆฅๅฐพ,ไธ่ฌ,*,*,*,ๅฎถ,ใซ,ใซ
EOS
*/
let _ = env_logger::builder()
.filter_level(LevelFilter::Info)
.is_test(true)
.try_init();
let runner = VibratoTokenizer::new("work/vibrato/ipadic-mecab-2_7_0/system.dic", None)?;
assert_eq!(runner.tokenize("ไบไผๅฎถ", false)?, "ไบไผๅฎถ/ใใใ");
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/tokenizer/mod.rs | akaza-data/src/tokenizer/mod.rs | #![allow(dead_code)]
pub mod base;
pub mod vibrato;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/corpus_reader/wikipedia_extracted.rs | akaza-data/src/corpus_reader/wikipedia_extracted.rs | use std::fs::File;
use std::io::{BufRead, BufReader, Write};
use std::path::Path;
use anyhow::Context;
use regex::Regex;
use crate::corpus_reader::base::CorpusReader;
/// wikiextractor ใงๅฆ็ใใใใผใฟใๅใๆฑใใใใฎๅฆ็
pub struct ExtractedWikipediaProcessor {
alnum_pattern: Regex,
yomigana_pattern: Regex,
}
impl ExtractedWikipediaProcessor {
pub fn new() -> anyhow::Result<ExtractedWikipediaProcessor> {
// ่ฑๆฐ/่จๅทใฎใฟใฎ่กใ็ก่ฆใใใใใฎๆญฃ่ฆ่กจ็พใ
// 75||19||colspan=2|-||1||0||76||19
let alnum_pattern = Regex::new("^[a-zA-Z0-9|=-]+")?;
// ไธ็ดๅไบบๆ
ๅ ฑไฟ่ญทๅฃซ๏ผใใใใใ
ใใใใใใใใปใใปใใ๏ผใฏใ่ฒกๅฃๆณไบบๅ
จๆฅๆฌๆ
ๅ ฑๅญฆ็ฟๆฏ่ๅไผใ่จญใใฆใใๆฐ้่ณๆ ผใฎ็งฐๅทใ
// โ ไธ็ดๅไบบๆ
ๅ ฑไฟ่ญทๅฃซใฏใ่ฒกๅฃๆณไบบๅ
จๆฅๆฌๆ
ๅ ฑๅญฆ็ฟๆฏ่ๅไผใ่จญใใฆใใๆฐ้่ณๆ ผใฎ็งฐๅทใ
let yomigana_pattern = Regex::new(r#"[๏ผ\(][\u3041-\u309Fใ]+[๏ผ)]"#)?;
Ok(ExtractedWikipediaProcessor {
alnum_pattern,
yomigana_pattern,
})
}
fn remove_yomigana(&self, src: &str) -> String {
self.yomigana_pattern.replace_all(src, "").to_string()
}
}
impl CorpusReader for ExtractedWikipediaProcessor {
fn process_file<F>(&self, ifname: &Path, ofname: &Path, annotate: &mut F) -> anyhow::Result<()>
where
F: FnMut(&str) -> anyhow::Result<String>,
{
let file = File::open(ifname)?;
let mut buf = String::new();
for line in BufReader::new(file).lines() {
let line = line?;
let line = line.trim();
if line.starts_with('<') {
// <doc id="3697757" url="https://ja.wikipedia.org/wiki?curid=3697757"
// title="New Sunrise">
// ใฎใใใชใใฟใฐใใๅงใพใ่กใ็ก่ฆใใใ
continue;
}
// <section begin="WEST้ ไฝ" />
// class="wikitable sortable" style="text-align: center;"
// ! !!class="unsortable"| ใใผใ !!!!!!!!!!!!!!!!!!!!ๅบๅ ดๆจฉใพใใฏ้ๆ ผ
// <section end="WEST้ ไฝ" />
// !colspan="4"|ๆฅๆฌ!!colspan="2"|ใชใผใฐๆฆ!!colspan="2"|!!colspan="2"|ๅคฉ็ๆฏ!!colspan="2"|ๆ้้็ฎ
if line.starts_with(" class=") || line.starts_with("<") || line.starts_with('!') {
continue;
}
if line.is_empty() {
// ็ฉบ่กใ็ก่ฆใใ
continue;
}
if self.alnum_pattern.is_match(line) {
// ่ฑๆฐๅญใฎใฟใฎ่กใฏ็ก่ฆใใ
continue;
}
let line = self.remove_yomigana(line);
buf += (annotate(line.as_str()).with_context(|| line)? + "\n").as_str();
}
let mut ofile = File::create(ofname)?;
ofile.write_all(buf.as_bytes())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_remove_yomigana() -> anyhow::Result<()> {
// ไธ็ดๅไบบๆ
ๅ ฑไฟ่ญทๅฃซ๏ผใใใใใ
ใใใใใใใใปใใปใใ๏ผใฏใ่ฒกๅฃๆณไบบๅ
จๆฅๆฌๆ
ๅ ฑๅญฆ็ฟๆฏ่ๅไผใ่จญใใฆใใๆฐ้่ณๆ ผใฎ็งฐๅทใ
// โ ไธ็ดๅไบบๆ
ๅ ฑไฟ่ญทๅฃซใฏใ่ฒกๅฃๆณไบบๅ
จๆฅๆฌๆ
ๅ ฑๅญฆ็ฟๆฏ่ๅไผใ่จญใใฆใใๆฐ้่ณๆ ผใฎ็งฐๅทใ
let runner = ExtractedWikipediaProcessor::new()?;
let got =
runner.remove_yomigana("ไธ็ดๅไบบๆ
ๅ ฑไฟ่ญทๅฃซ๏ผใใใใใ
ใใใใใใใใปใใปใใ๏ผใฏ");
assert_eq!(got, "ไธ็ดๅไบบๆ
ๅ ฑไฟ่ญทๅฃซใฏ");
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/corpus_reader/base.rs | akaza-data/src/corpus_reader/base.rs | use std::fs::File;
use std::io::Write;
use std::path::Path;
pub trait CorpusReader {
fn process_file<F>(&self, ifname: &Path, ofname: &Path, annotate: &mut F) -> anyhow::Result<()>
where
F: FnMut(&str) -> anyhow::Result<String>;
}
/// _SUCCESS ใใกใคใซใๆธใ
pub fn write_success_file(dst_dir: &Path) -> anyhow::Result<()> {
let mut success = File::create(dst_dir.join("_SUCCESS"))?;
success.write_all("DONE".as_bytes())?;
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/corpus_reader/aozora_bunko.rs | akaza-data/src/corpus_reader/aozora_bunko.rs | use std::fs::File;
use std::io::{Read, Write};
use std::path::Path;
use anyhow::Context;
use encoding_rs::SHIFT_JIS;
use log::info;
use regex::{Regex, RegexBuilder};
use crate::corpus_reader::base::CorpusReader;
/// wikiextractor ใงๅฆ็ใใใใผใฟใๅใๆฑใใใใฎๅฆ็
pub struct AozoraBunkoProcessor {
alnum_pattern: Regex,
yomigana_pattern: Regex,
comment_pattern: Regex,
kyukana_pattern: Regex,
meta_separator_pattern: Regex,
sokohon_pattern: Regex,
}
impl AozoraBunkoProcessor {
pub fn new() -> anyhow::Result<AozoraBunkoProcessor> {
// ่ฑๆฐ/่จๅทใฎใฟใฎ่กใ็ก่ฆใใใใใฎๆญฃ่ฆ่กจ็พใ
// 75||19||colspan=2|-||1||0||76||19
let alnum_pattern = Regex::new("^[a-zA-Z0-9|=-]+")?;
// ๅฐใใกใฒใใใช
// ใฎใใใชใใฟใใชใ็ก่ฆใใใ
let yomigana_pattern = Regex::new(r#"ใ.*?ใ"#)?;
// ใณใกใณใใฎใใฟใผใณใ
let comment_pattern = Regex::new("๏ผป๏ผ.*๏ผฝ")?;
// ๆงไปฎๅ้ฃใใฎใใฟใผใณใ
let kyukana_pattern = Regex::new("[ใใฐใใฑ]")?;
let meta_separator_pattern = RegexBuilder::new(".*-{10,}\r?\n")
.dot_matches_new_line(true)
.build()?;
let sokohon_pattern = RegexBuilder::new("ๅบๆฌ๏ผ.*")
.dot_matches_new_line(true)
.build()?;
Ok(AozoraBunkoProcessor {
alnum_pattern,
yomigana_pattern,
comment_pattern,
kyukana_pattern,
meta_separator_pattern,
sokohon_pattern,
})
}
fn is_kyukana(&self, src: &str) -> bool {
self.kyukana_pattern.is_match(src)
}
fn remove_yomigana(&self, src: &str) -> String {
self.yomigana_pattern.replace_all(src, "").to_string()
}
fn remove_comment(&self, src: &str) -> String {
self.comment_pattern.replace_all(src, "").to_string()
}
fn strip_meta(&self, src: &str) -> String {
self.sokohon_pattern
.replace_all(
self.meta_separator_pattern
.replace_all(src, "")
.to_string()
.as_str(),
"",
)
.to_string()
}
}
impl CorpusReader for AozoraBunkoProcessor {
fn process_file<F>(&self, ifname: &Path, ofname: &Path, annotate: &mut F) -> anyhow::Result<()>
where
F: FnMut(&str) -> anyhow::Result<String>,
{
let mut file = File::open(ifname)?;
let mut vec_buf: Vec<u8> = Vec::new();
file.read_to_end(&mut vec_buf)?;
let (src, _, _) = SHIFT_JIS.decode(&vec_buf);
let src = src.replace('\r', "");
// _ruby_ ใใในใซๅซใพใใฆใใๅ ดๅใฏใใซใใๆฏใใใฆใใใฎใงๅคใๆๆธใ ใจๆใใ
if ifname.to_string_lossy().contains("_ruby_") {
info!("Skipping {} due to _ruby_", ifname.to_string_lossy());
return Ok(());
}
// ไบๅใฎ่ธใๅญ ใซใคใใฆใฎ่กจ่จใใใๅ ดๅใๆงไปฎๅ้ฃใใงใใใใจใๅคใใ
// ใฎใงใใใชๆผขๅญๅคๆ็จใฎใณใผใในใจใใฆใฏไธ้ฉๅใชใฎใง็ก่ฆใใใ
if src.contains("ไบๅใฎ่ธใๅญ") {
info!("Skipping {} due to ไบๅใฎ่ธใๅญ", ifname.to_string_lossy());
return Ok(());
}
// ็ฌฌ3ๆฐดๆบใฎๆๅญใๅซใพใใฆใใๆๆธใฎๅ ดๅใๆๆธใจใใฆ็ฌ็นใใใใฑใผในใๅคใใฎใง
// ็ฌฌ3ๆฐดๆบใฎๆๅญใๅซใพใใใใกใคใซใฏ็ก่ฆใใใ
//
// https://www.aozora.gr.jp/cards/000712/files/52341_42513.html
//๏ผไพ๏ผโป๏ผป๏ผใใใใใใใ๏ผๅญใใ็ฌฌ3ๆฐดๆบ1-90-90๏ผฝ
if src.contains("็ฌฌ3ๆฐดๆบ") {
info!("Skipping {} due to ็ฌฌ3ๆฐดๆบ", ifname.to_string_lossy());
return Ok(());
}
if src.contains("creativecommons.org") {
info!(
"Skipping {} due to creativecommons.org",
ifname.to_string_lossy()
);
return Ok(());
}
// ใๆงๅญใๆงไปฎๅใงๆธใใใไฝๅใใ็พไปฃ่กจ่จใซใใใใใ้ใฎไฝๆฅญๆ้ใ
// ใซใคใใฆ่จๅใใฆใใๆๆธใฏในใญใใใใใ
//
// https://www.aozora.gr.jp/cards/000712/files/52341_42513.html
if src.contains("ๆงๅญใๆงไปฎๅใงๆธใใใไฝๅใใ็พไปฃ่กจ่จใซใใใใใ้ใฎไฝๆฅญๆ้")
{
info!(
"Skipping {} due to ๆงๅญใๆงไปฎๅใงๆธใใใไฝๅใใ็พไปฃ่กจ่จใซใใใใใ้ใฎไฝๆฅญๆ้",
ifname.to_string_lossy()
);
return Ok(());
}
// ๆใใใชๆงไปฎๅ้ฃใใๆคๅบใใ
if src.contains("ๆงๅญใๆงไปฎๅใงๆธใใใไฝๅใใ็พไปฃ่กจ่จใซใใใใใ้ใฎไฝๆฅญๆ้")
{
info!(
"Skipping {} due to ๆงๅญใๆงไปฎๅใงๆธใใใไฝๅใใ็พไปฃ่กจ่จใซใใใใใ้ใฎไฝๆฅญๆ้",
ifname.to_string_lossy()
);
return Ok(());
}
if self.is_kyukana(src.as_str()) {
info!("Skipping {} due to ๆงไปฎๅ", ifname.to_string_lossy());
return Ok(());
}
let src = self.strip_meta(src.as_str());
let mut buf = String::new();
for line in src.lines() {
let line = line.trim();
if line.starts_with("ๅบๆฌ๏ผ") {
// ๅบๆฌใซใคใใฆใฎ่กจ่จใใใฃใใใใไปฅๅพใฏใกใฟใใผใฟใชใฎใง็ก่ฆใใใ
break;
}
if line.is_empty() {
// ็ฉบ่กใ็ก่ฆใใ
continue;
}
if self.alnum_pattern.is_match(line) {
// ่ฑๆฐๅญใฎใฟใฎ่กใฏ็ก่ฆใใ
continue;
}
let line = self.remove_yomigana(line);
let line = self.remove_comment(line.as_str());
buf += (annotate(line.as_str()).with_context(|| line)? + "\n").as_str();
}
info!("Writing {}", ofname.to_string_lossy());
let mut ofile = File::create(ofname)?;
ofile.write_all(buf.as_bytes())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_remove_yomigana() -> anyhow::Result<()> {
let runner = AozoraBunkoProcessor::new()?;
let got = runner.remove_yomigana("ๅฐใใกใฒใใใช");
assert_eq!(got, "ๅฐใใช");
Ok(())
}
#[test]
fn test_is_kyukana() -> anyhow::Result<()> {
let runner = AozoraBunkoProcessor::new()?;
assert!(!runner.is_kyukana("ๅฐใใช"));
assert!(runner.is_kyukana("ใฐ"));
Ok(())
}
#[test]
fn test_strip_meta() -> anyhow::Result<()> {
let runner = AozoraBunkoProcessor::new()?;
assert_eq!(runner.strip_meta("fuga\nMETA\n-------------------------------------------------------\nageage\n-------------------------------------------------------\nDOOO"), "DOOO");
assert_eq!(
runner
.strip_meta("META\n-------------------------------------------------------\nDOOO"),
"DOOO"
);
assert_eq!(runner.strip_meta("HOGE\nๅบๆฌ๏ผใใใใ"), "HOGE\n");
assert_eq!(runner.strip_meta("HELLO"), "HELLO");
assert!(!runner
.strip_meta(
r#"็พๅๅญ
-------------------------------------------------------
ใใใญในใไธญใซ็พใใ่จๅทใซใคใใฆใ
๏ผป๏ผ๏ผฝ๏ผๅ
ฅๅ่
ๆณจใไธปใซๅคๅญใฎ่ชฌๆใใๅ็นใฎไฝ็ฝฎใฎๆๅฎ
๏ผไพ๏ผ๏ผป๏ผๅฐไปใ๏ผฝใไธไนไบไธๅนดไธๆใ
-------------------------------------------------------
"#
)
.contains("็พๅๅญ"),);
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/akaza-data/src/corpus_reader/mod.rs | akaza-data/src/corpus_reader/mod.rs | pub mod aozora_bunko;
pub mod base;
pub mod wikipedia_extracted;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/config.rs | libakaza/src/config.rs | /*
---
dicts:
- path: /usr/share/skk/SKK-JISYO.okinawa
encoding: euc-jp
dict_type: skk
*/
use std::fmt::Display;
use std::fmt::Formatter;
use std::fs::File;
use std::io::{BufReader, Write};
use std::path::PathBuf;
use anyhow::{bail, Result};
use log::{info, warn};
use serde::{Deserialize, Serialize};
use DictEncoding::Utf8;
use crate::config::DictUsage::{Normal, SingleTerm};
use crate::resource::detect_resource_path;
#[derive(Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Config {
/// ใญใผใๅญใใชๅคๆใใผใใซใฎๆๅฎ
/// "default", "kana", etc.
#[serde(default = "default_romkan")]
pub romkan: String,
/// ใญใผใใใใใผใใซใฎๆๅฎ
/// "default", "atok", etc.
#[serde(default = "default_keymap")]
pub keymap: String,
#[serde(default = "default_engine_config")]
pub engine: EngineConfig,
/// ใฉใคใๅคๆ
#[serde(default = "default_live_conversion")]
pub live_conversion: bool,
}
fn default_romkan() -> String {
detect_resource_path("romkan", "default.yml").unwrap()
}
fn default_keymap() -> String {
detect_resource_path("keymap", "default.yml").unwrap()
}
fn default_engine_config() -> EngineConfig {
EngineConfig {
dicts: find_default_dicts(),
dict_cache: true,
model: default_model(),
}
}
fn find_default_dicts() -> Vec<DictConfig> {
let mut dicts: Vec<DictConfig> = Vec::new();
if let Ok(dir) = xdg::BaseDirectories::with_prefix("skk") {
if let Some(file) = dir.find_data_files("SKK-JISYO.L").next() {
dicts.push(DictConfig {
path: file.to_string_lossy().to_string(),
encoding: DictEncoding::EucJp,
dict_type: DictType::SKK,
usage: Normal,
});
}
}
info!("default dictionaries: {:?}", dicts);
dicts
}
fn default_live_conversion() -> bool {
false
}
impl Config {
pub fn load_from_file(path: &str) -> Result<Self> {
let file = File::open(path)?;
let reader = BufReader::new(file);
let config: Config = serde_yaml::from_reader(reader)?;
Ok(config)
}
pub fn file_name() -> Result<PathBuf> {
let basedir = xdg::BaseDirectories::with_prefix("akaza")?;
Ok(basedir.get_config_file("config.yml"))
}
pub fn save(&self) -> Result<()> {
let file_name = Self::file_name()?;
let yml = serde_yaml::to_string(self)?;
info!("Write to file: {}", file_name.to_str().unwrap());
let mut fp = File::create(file_name)?;
fp.write_all(yml.as_bytes())?;
Ok(())
}
pub fn load() -> Result<Self> {
let configfile = Self::file_name()?;
let config = match Config::load_from_file(configfile.to_str().unwrap()) {
Ok(config) => config,
Err(err) => {
warn!(
"Cannot load configuration file: {} {}",
configfile.to_string_lossy(),
err
);
let config: Config = serde_yaml::from_str("").unwrap();
info!("Loaded default configuration: {:?}", config);
return Ok(config);
}
};
info!(
"Loaded config file: {}, {:?}",
configfile.to_string_lossy(),
config
);
Ok(config)
}
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Default, Clone)]
pub struct EngineConfig {
pub dicts: Vec<DictConfig>,
/// ่พๆธใฎใญใฃใใทใฅๆฉ่ฝใฎใชใณใชใ่จญๅฎ
#[serde(default = "default_dict_cache")]
pub dict_cache: bool,
/// Model ใฎๆๅฎ
/// "default", etc.
#[serde(default = "default_model")]
pub model: String,
}
fn default_dict_cache() -> bool {
true
}
fn default_model() -> String {
detect_resource_path("model", "default").unwrap()
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Default, Clone)]
pub struct DictConfig {
#[serde(default = "default_path")]
pub path: String,
/// Encoding of the dictionary
/// Default: UTF-8
#[serde(default = "default_encoding")]
pub encoding: DictEncoding,
#[serde(default = "default_dict_type")]
pub dict_type: DictType,
#[serde(default = "default_dict_usage")]
pub usage: DictUsage,
}
fn default_path() -> String {
"".to_string()
}
fn default_encoding() -> DictEncoding {
Utf8
}
fn default_dict_type() -> DictType {
DictType::SKK
}
fn default_dict_usage() -> DictUsage {
DictUsage::Disabled
}
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]
pub enum DictEncoding {
EucJp,
Utf8,
}
impl Default for DictEncoding {
fn default() -> Self {
Utf8
}
}
impl Display for DictEncoding {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(self.as_str())
}
}
impl DictEncoding {
pub fn from(s: &str) -> Result<DictEncoding> {
match s {
"EUC-JP" | "EucJp" => Ok(DictEncoding::EucJp),
"UTF-8" | "Utf8" => Ok(DictEncoding::Utf8),
_ => bail!("Unknown encoding: {:?}", s),
}
}
pub fn as_str(&self) -> &'static str {
match self {
Utf8 => "UTF-8",
DictEncoding::EucJp => "EUC-JP",
}
}
}
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]
pub enum DictType {
SKK,
}
impl Display for DictType {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(self.as_str())
}
}
impl Default for DictType {
fn default() -> Self {
DictType::SKK
}
}
impl DictType {
pub fn as_str(&self) -> &'static str {
match self {
&DictType::SKK => "SKK",
}
}
}
#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]
pub enum DictUsage {
Normal,
SingleTerm,
Disabled,
}
impl Default for DictUsage {
fn default() -> Self {
Normal
}
}
impl DictUsage {
pub fn from(s: &str) -> Result<DictUsage> {
match s {
"Normal" => Ok(Normal),
"SingleTerm" => Ok(SingleTerm),
"Disabled" => Ok(DictUsage::Disabled),
_ => bail!("Unknown name: {:?}", s),
}
}
pub fn as_str(&self) -> &'static str {
match self {
Normal => "Normal",
SingleTerm => "SingleTerm",
DictUsage::Disabled => "Disabled",
}
}
pub fn text_jp(&self) -> &'static str {
match self {
Normal => "้ๅธธ่พๆธ",
SingleTerm => "ๅ้
",
DictUsage::Disabled => "็กๅน",
}
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/resource.rs | libakaza/src/resource.rs | use std::env;
use anyhow::{bail, Context};
pub fn detect_resource_path(base: &str, name: &str) -> anyhow::Result<String> {
let pathstr: String = if cfg!(test) {
format!("{}/../{}/{}", env!("CARGO_MANIFEST_DIR"), base, name)
} else {
let target_path = format!("{base}/{name}");
let basedirs = xdg::BaseDirectories::with_prefix("akaza")
.with_context(|| "Opening xdg directory with 'akaza' prefix")?;
let pathbuf = basedirs.find_data_file(&target_path);
let Some(pathbuf) = pathbuf else {
bail!("Cannot find {:?} in XDG_DATA_HOME or XDG_DATA_DIRS(XDG_DATA_HOME={:?}, XDG_DATA_DIRS={:?}, base={:?}, name={:?})",
target_path,
basedirs.get_data_home().to_string_lossy().to_string(),
basedirs.get_data_dirs().iter().map(|x| x.to_string_lossy().to_string()).collect::<Vec<_>>(),
base,
name
)
};
pathbuf.to_string_lossy().to_string()
};
Ok(pathstr)
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/kansuji.rs | libakaza/src/kansuji.rs | use std::cmp::min;
const NUMS: [&str; 10] = ["", "ไธ", "ไบ", "ไธ", "ๅ", "ไบ", "ๅ
ญ", "ไธ", "ๅ
ซ", "ไน"];
const SUBS: [&str; 4] = ["", "ๅ", "็พ", "ๅ"];
const PARTS: [&str; 18] = [
"",
"ไธ",
"ๅ",
"ๅ
",
"ไบฌ",
"ๅ",
"๐ฅฑ",
"็ฉฃ",
"ๆบ",
"ๆพ",
"ๆญฃ",
"่ผ",
"ๆฅต",
"ๆๆฒณๆฒ",
"้ฟๅง็ฅ",
"้ฃ็ฑไป",
"ไธๅฏๆ่ญฐ",
"็ก้ๅคงๆฐ",
];
pub fn int2kanji(i: i64) -> String {
if i == 0 {
return "้ถ".to_string();
}
let p = i
.to_string()
.bytes()
.map(|b| (b - b'0') as usize)
.rev()
.enumerate()
.collect::<Vec<_>>();
let mut buf: Vec<&'static str> = Vec::new();
for &(i, c) in &p {
if i % 4 == 0 && i > 0 && (i..min(i + 4, p.len())).any(|i| p[i].1 != 0) {
buf.push(PARTS[i / 4]);
}
if c != 0 {
// ใใฎๆกใ 0 ใฎใจใใฏๅบๅใใ่ฟฝๅ ใใชใ
buf.push(SUBS[i % 4]);
}
if !(i % 4 != 0 && c == 1) {
// ๅ็พๅใ่กจ็คบใใใจใใงใไธใฎใจใใฏ่ฟฝๅ ใใชใใ
buf.push(NUMS[c]);
}
}
buf.reverse();
buf.join("")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_int2kanji() {
assert_eq!(int2kanji(0), "้ถ");
assert_eq!(int2kanji(1), "ไธ");
assert_eq!(int2kanji(9), "ไน");
assert_eq!(int2kanji(10), "ๅ");
assert_eq!(int2kanji(11), "ๅไธ");
assert_eq!(int2kanji(21), "ไบๅไธ");
assert_eq!(int2kanji(99), "ไนๅไน");
assert_eq!(int2kanji(100), "็พ");
assert_eq!(int2kanji(999), "ไน็พไนๅไน");
assert_eq!(int2kanji(1000), "ๅ");
assert_eq!(int2kanji(9999), "ไนๅไน็พไนๅไน");
assert_eq!(int2kanji(10000), "ไธไธ");
assert_eq!(int2kanji(10020), "ไธไธไบๅ");
assert_eq!(int2kanji(1_000_020), "็พไธไบๅ");
assert_eq!(int2kanji(100_000_020), "ไธๅไบๅ");
assert_eq!(int2kanji(1_0000_4423), "ไธๅๅๅๅ็พไบๅไธ");
assert_eq!(int2kanji(1_8000_4423), "ไธๅๅ
ซๅไธๅๅๅ็พไบๅไธ");
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/lib.rs | libakaza/src/lib.rs | #![allow(dead_code)]
extern crate core;
pub mod config;
pub mod consonant;
pub mod corpus;
pub mod cost;
pub mod dict;
pub mod engine;
pub mod extend_clause;
pub mod graph;
pub mod kana_kanji;
pub mod kana_trie;
pub mod kansuji;
pub mod keymap;
pub mod lm;
mod resource;
pub mod romkan;
pub mod search_result;
pub mod user_side_data;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/romkan.rs | libakaza/src/romkan.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use crate::resource::detect_resource_path;
use anyhow::Context;
use log::info;
use regex::{Captures, Regex};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, PartialEq, Debug)]
pub struct RomKanConfig {
mapping: HashMap<String, Option<String>>,
extends: Option<String>,
}
fn load_romkan_map(file_path: &str) -> anyhow::Result<HashMap<String, String>> {
info!("Loading romkan map: {}", file_path);
let got: RomKanConfig = serde_yaml::from_reader(BufReader::new(
File::open(file_path).with_context(|| file_path.to_string())?,
))?;
if let Some(parent) = got.extends {
// ็ถๆฟใใฆใใใฎใง่ฆชใ่ชญใฟ่พผใใ
// ๅๅธฐ็ใชๅฆ็ใซใชใใ
let path = detect_resource_path("romkan", &format!("{parent}.yml"))?;
let mut parent = load_romkan_map(&path)?;
for (k, v) in got.mapping {
if let Some(v) = v {
parent.insert(k, v);
} else {
parent.remove(&k);
}
}
Ok(parent)
} else {
// ็ถๆฟใใฆใใชใใฎใงใใฎใพใพใ
Ok(got
.mapping
.iter()
.filter(|(_, v)| v.is_some())
.map(|(k, v)| (k.clone(), v.clone().unwrap()))
.collect())
}
}
#[derive(Debug)]
pub struct RomKanConverter {
pub mapping_name: String,
romkan_pattern: Regex,
romkan_map: HashMap<String, String>,
last_char_pattern: Regex,
}
impl RomKanConverter {
pub fn new(mapping_name: &str) -> anyhow::Result<RomKanConverter> {
let romkan_map = load_romkan_map(mapping_name)?;
let mut romas = Vec::from_iter(romkan_map.keys());
// ้ทใใญใผใใไธ่ดใใใใใใซใใใ
romas.sort_by_key(|a| std::cmp::Reverse(a.len()));
let mut pattern = String::from("(");
for x in romas {
pattern += ®ex::escape(x);
pattern += "|";
}
pattern += ".)";
let romkan_pattern = Regex::new(&pattern).unwrap();
let last_char_pattern = Regex::new(&(pattern + "$")).unwrap();
Ok(RomKanConverter {
mapping_name: mapping_name.to_string(),
romkan_pattern,
romkan_map,
last_char_pattern,
})
}
pub fn default_mapping() -> anyhow::Result<RomKanConverter> {
Self::new(&detect_resource_path("romkan", "default.yml")?)
}
}
impl RomKanConverter {
pub fn to_hiragana(&self, src: &str) -> String {
let src = src.to_ascii_lowercase();
let src = src.replace("nn", "n'"); // replace nn as n'.
let retval = self.romkan_pattern.replace_all(&src, |caps: &Captures| {
let rom = caps.get(1).unwrap().as_str();
if let Some(e) = self.romkan_map.get(rom) {
e.to_string()
} else {
rom.to_string()
}
});
retval.into_owned()
}
pub fn remove_last_char(&self, src: &str) -> String {
self.last_char_pattern.replace(src, "").to_string()
}
}
#[cfg(test)]
mod tests {
use log::LevelFilter;
use super::*;
#[test]
fn test_to_hiragana_simple() -> anyhow::Result<()> {
let converter = RomKanConverter::default_mapping()?;
assert_eq!(converter.to_hiragana("aiu"), "ใใใ");
Ok(())
}
#[test]
fn test_to_hiragana() -> anyhow::Result<()> {
let data = [
("a", "ใ"),
("ba", "ใฐ"),
("hi", "ใฒ"),
("wahaha", "ใใฏใฏ"),
("thi", "ใฆใ"),
("better", "ในใฃใฆr"),
("[", "ใ"),
("]", "ใ"),
("wo", "ใ"),
("du", "ใฅ"),
("we", "ใใ"),
("di", "ใข"),
("fu", "ใต"),
("ti", "ใก"),
("wi", "ใใ"),
("we", "ใใ"),
("wo", "ใ"),
("z,", "โฅ"),
("z.", "โฆ"),
("z/", "ใป"),
("z[", "ใ"),
("z]", "ใ"),
("du", "ใฅ"),
("di", "ใข"),
("fu", "ใต"),
("ti", "ใก"),
("wi", "ใใ"),
("we", "ใใ"),
("wo", "ใ"),
("sorenawww", "ใใใชwww"),
("komitthi", "ใใฟใฃใฆใ"),
("ddha", "ใฃใงใ"),
("zzye", "ใฃใใ"),
("tanni", "ใใใ"),
];
let converter = RomKanConverter::default_mapping()?;
for (rom, kana) in data {
assert_eq!(converter.to_hiragana(rom), kana);
}
Ok(())
}
#[test]
fn remove_last_char() -> anyhow::Result<()> {
let _ = env_logger::builder()
.filter_level(LevelFilter::Info)
.try_init();
let cases: Vec<(&str, &str)> = vec![
("aka", "a"),
("sona", "so"),
("son", "so"),
("sonn", "so"),
("sonnna", "sonn"),
("sozh", "so"),
];
let romkan = RomKanConverter::default_mapping()?;
for (src, expected) in cases {
let got = romkan.remove_last_char(src);
assert_eq!(got, expected);
}
Ok(())
}
#[test]
fn test_atok() -> anyhow::Result<()> {
let _ = env_logger::builder()
.filter_level(LevelFilter::Info)
.try_init();
let converter = RomKanConverter::new("../romkan/atok.yml")?;
assert_eq!(converter.to_hiragana("aiu"), "ใใใ");
// zya ใ null ใงไธๆธใใใใฆๆถใใฆใ
assert_eq!(converter.to_hiragana("zya"), "zใ");
// ่ฟฝๅ ใใใถใใๅนใใฆใ
assert_eq!(converter.to_hiragana("tso"), "ใคใ");
Ok(())
}
#[test]
fn test_azik() -> anyhow::Result<()> {
let _ = env_logger::builder()
.filter_level(LevelFilter::Info)
.try_init();
let converter = RomKanConverter::new("../romkan/azik.yml")?;
assert_eq!(converter.to_hiragana("dn"), "ใ ใ");
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/consonant.rs | libakaza/src/consonant.rs | /// hogen ใจๅ
ฅๅใใใๅ ดๅใ"ใปใn" ใจ่กจ็คบใใใ
/// hogena ใจใชใฃใใ "ใปใใช"
/// hogenn ใจใชใฃใใ "ใปใใ" ใจ่กจ็คบใใๅฟ
่ฆใใใใใใ
/// ใใใใจไธๆฆ่กจ็คบใใใๅพใซใใชใใซๅคๅใใใใใใจๆฐๆใกๆชใๆใใใ
/// "meny" ใฎใจใใฏ "ใny" ใจ่กจ็คบใในใใ
use regex::Regex;
#[derive(Debug)]
pub struct ConsonantSuffixExtractor {
pattern: Regex,
}
impl Default for ConsonantSuffixExtractor {
fn default() -> ConsonantSuffixExtractor {
let pattern = Regex::new("^(.*?(?:nn)*)([qwrtypsdfghjklzxcvbmn]+)$").unwrap();
ConsonantSuffixExtractor { pattern }
}
}
impl ConsonantSuffixExtractor {
/// "ใny" ใ ("ใ", "ny") ใซๅ่งฃใใใ
// (preedit, suffix) ใฎๅฝขใง่ฟใใ
pub fn extract(&self, src: &str) -> (String, String) {
if
// nn ใฏใใใใง็ขบๅฎใใใฆใใใ
src.ends_with("nn")
// ็ขๅฐใชใฉใฏใฎใใฃใๅญ้ณใใใชใใ
|| src.ends_with("zh")
|| src.ends_with("zj")
|| src.ends_with("zk")
|| src.ends_with("zl")
|| src.ends_with("z[")
|| src.ends_with("z]")
|| src.ends_with("z-")
|| src.ends_with("z.")
|| src.ends_with("z,")
|| src.ends_with("z/")
{
(src.to_string(), "".to_string())
} else if let Some(p) = self.pattern.captures(src) {
(
p.get(1).unwrap().as_str().to_string(),
p.get(2).unwrap().as_str().to_string(),
)
} else {
(src.to_string(), "".to_string())
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_consonant() {
let cse = ConsonantSuffixExtractor::default();
assert_eq!(cse.extract("meny"), ("me".to_string(), "ny".to_string()));
assert_eq!(cse.extract("menn"), ("menn".to_string(), "".to_string()));
// ใใผใwwww ใฎๅ ดๅใ wwww ใ suffix ใงใใในใ
assert_eq!(
cse.extract("u-nnwwww"),
("u-nn".to_string(), "wwww".to_string())
);
assert_eq!(
cse.extract("u-nnnnwwww"),
("u-nnnn".to_string(), "wwww".to_string())
);
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/search_result.rs | libakaza/src/search_result.rs | #[derive(Debug, Clone)]
pub struct SearchResult {
pub keyword: Vec<u8>,
pub id: usize,
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/cost.rs | libakaza/src/cost.rs | // ๅ ็ฎในใ ใผใธใณใฐ็จใฎๅฎๆฐใ
const ALPHA: f32 = 0.00001;
/// ๅ่ช/ใจใใธใฎใณในใใ่จ็ฎใใใ
/// ๅ ็ฎในใ ใผใธใณใฐใใฆใใใ
///
/// - `count`: ใใฎๅ่ชใฎๅบ็พๅๆฐ, n(w)
/// - `total_words`: ใณใผใในไธญใฎๅ่ชใฎ็ทๅบ็พๅๆฐ, `C`
/// - `unique_words`: ่ชๅฝๆฐ, `V`
pub fn calc_cost(count: u32, total_words: u32, unique_words: u32) -> f32 {
-f32::log10(
((count as f32) + ALPHA) // Alpha ใ่ถณใใ
/ // -------
((total_words as f32) + ALPHA + (unique_words as f32)),
)
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/keymap.rs | libakaza/src/keymap.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use crate::resource::detect_resource_path;
use anyhow::{bail, Context, Result};
use log::info;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct Keymap {
pub extends: Option<String>,
pub keys: Vec<KeyConfig>,
}
impl Keymap {
fn to_map(&self) -> Result<HashMap<KeyPattern, Option<String>>> {
let mut retval = HashMap::new();
for kc in &self.keys {
for key in &kc.key {
let (ctrl, shift, key) = Self::parse_key(key.as_str())?;
retval.insert(
KeyPattern {
states: kc.states.clone(),
ctrl,
shift,
key,
},
kc.command.clone(),
);
}
}
Ok(retval)
}
fn parse_key(key: &str) -> Result<(bool, bool, String)> {
if key.contains('-') {
let mut ctrl = false;
let mut shift = false;
let keys = key.split('-').collect::<Vec<_>>();
for m in &keys[0..keys.len() - 1] {
match *m {
"C" => {
ctrl = true;
}
"S" => {
shift = true;
}
_ => {
bail!("Unknown modifier in keymap: {}", key);
}
}
}
Ok((ctrl, shift, keys[keys.len() - 1].to_string()))
} else {
Ok((false, false, key.to_string()))
}
}
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct KeyConfig {
pub states: Vec<KeyState>,
pub key: Vec<String>,
pub command: Option<String>,
}
// null ใงใใจใใๆถใใใใซไฝฟใ
#[derive(PartialEq, Debug, Hash, Clone)]
pub struct KeyPattern {
pub states: Vec<KeyState>,
pub ctrl: bool,
pub shift: bool,
pub key: String,
}
impl Eq for KeyPattern {}
#[derive(Debug, Hash, PartialEq, Copy, Clone, Serialize, Deserialize)]
pub enum KeyState {
// ไฝใๅ
ฅๅใใใฆใใชใ็ถๆ
ใ
PreComposition,
// ๅคๆๅฆ็ใซๅ
ฅใๅใใฒใใใชใๅ
ฅๅใใฆใใๆฎต้ใ
Composition,
// ๅคๆไธญ
Conversion,
}
impl Keymap {
pub fn load(keymap_path: &str) -> Result<HashMap<KeyPattern, String>> {
info!("Load {}", keymap_path);
let got: Keymap = serde_yaml::from_reader(BufReader::new(
File::open(keymap_path).with_context(|| keymap_path.to_string())?,
))?;
if let Some(parent) = &got.extends {
let path = detect_resource_path("keymap", &format!("{parent}.yml"))?;
let mut map = Keymap::load(&path)?;
for (kp, opts) in &got.to_map()? {
if let Some(cmd) = opts {
// ่ฆชใฎๅคใไธๆธใ
map.insert(kp.clone(), cmd.clone());
} else {
// null ใง่ฆชใฎๅคใๆถๅปใงใใใ
map.remove(kp);
}
}
Ok(map)
} else {
let got = got
.to_map()?
.iter()
.map(|(a, b)| (a.clone(), b.clone().unwrap()))
.collect::<HashMap<KeyPattern, String>>();
Ok(got)
}
}
}
#[cfg(test)]
mod tests {
use std::fs::File;
use std::io::BufReader;
use super::*;
#[test]
fn test_keymap() -> anyhow::Result<()> {
let keymap: Keymap =
serde_yaml::from_reader(BufReader::new(File::open("../keymap/default.yml")?))?;
for kc in keymap.keys {
println!("{kc:?}");
}
Ok(())
}
#[test]
fn test_c_h() -> Result<()> {
let (ctrl, shift, key) = Keymap::parse_key("C-h")?;
assert!(ctrl);
assert!(!shift);
assert_eq!(key, "h");
Ok(())
}
#[test]
fn test_c_s_h() -> Result<()> {
let (ctrl, shift, key) = Keymap::parse_key("C-S-h")?;
assert!(ctrl);
assert!(shift);
assert_eq!(key, "h");
Ok(())
}
#[test]
fn test_shift() -> Result<()> {
let (ctrl, shift, key) = Keymap::parse_key("h")?;
assert!(!ctrl);
assert!(!shift);
assert_eq!(key, "h");
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/extend_clause.rs | libakaza/src/extend_clause.rs | use log::info;
use std::ops::Range;
use crate::graph::candidate::Candidate;
// ็พ็ถ็ถญๆใใใใใฎๆ็ฏใใผใฟใ่ฟใใพใใ
fn keep_current(clauses: &[Vec<Candidate>]) -> Vec<Range<usize>> {
let mut force_selected_clause: Vec<Range<usize>> = Vec::new();
let mut offset = 0;
for yomi_len in clauses.iter().map(|f| f[0].yomi.len()) {
force_selected_clause.push(offset..offset + yomi_len);
offset += yomi_len;
}
force_selected_clause
}
/// ๆ็ฏใฎ้ธๆ็ฏๅฒใๅณใซๆกๅผตใใใ
/// current_clause ใฏ็พๅจ้ธๆใใใฆใใๅ็ฏใๅทฆใใ 0 origin ใงใใใ
pub fn extend_right(clauses: &Vec<Vec<Candidate>>, current_clause: usize) -> Vec<Range<usize>> {
// ใซใฉใ ใฃใใใชใซใใงใใชใใ
if clauses.is_empty() {
return Vec::new();
}
// ไธ็ชๅณใฎๆ็ฏใ้ธๆใใใฆใใใใชใซใใงใใชใใ
info!(
"Keep current? current={:?} len={}",
current_clause,
clauses.len()
);
if current_clause == clauses.len() - 1 {
info!("Keep current");
return keep_current(clauses);
}
// Note: Rust ใฎ range ใฏ exclusive.
let mut force_selected_clause: Vec<Range<usize>> = Vec::new();
let mut offset = 0;
for (i, clause) in clauses.iter().enumerate() {
let candidate = &clause[0];
if current_clause == i {
// ็พๅจ้ธๆไธญใฎๆ็ฏใฏใๅณใซไผธใฐใ
let next_candidate = &clauses[i + 1][0];
force_selected_clause.push(
offset
..offset
+ candidate.yomi.len()
+ next_candidate.yomi.chars().next().unwrap().len_utf8(),
);
} else if current_clause + 1 == i {
// ้ธๆไธญใฎๅ็ฏใฎๅณใฎใใฎใฏใ1ๆๅญๆธใใใใใ
let c = candidate.yomi.chars().next().unwrap();
let first_char_len = c.len_utf8();
let start = offset + first_char_len;
let end = offset + first_char_len + candidate.yomi.len() - first_char_len;
if start < end {
// ๅใฎๆ็ฏใๆกๅผตใใ็ตๆใๆฌกใฎๆ็ฏใใชใใชใใฑใผในใใใใ
force_selected_clause.push(start..end);
}
} else {
force_selected_clause.push(offset..offset + candidate.yomi.len())
}
offset += candidate.yomi.len();
}
force_selected_clause
}
/// ๆ็ฏใฎ้ธๆ็ฏๅฒใ **ๅทฆ** ใซๆกๅผตใใใ
/// current_clause ใฏ็พๅจ้ธๆใใใฆใใๅ็ฏใๅทฆใใ 0 origin ใงใใใ
pub fn extend_left(clauses: &Vec<Vec<Candidate>>, current_clause: usize) -> Vec<Range<usize>> {
if clauses.is_empty() {
return Vec::new();
}
if clauses.len() == 1 {
// ๅ็ฏใไธๅใฎๅ ดๅ
let yomi = &clauses[0][0].yomi;
return if yomi.chars().count() > 1 {
// ๆๅพใฎๆๅญใๅฅๅ็ฏใซๅใๅบใใ
let mut force_selected_clause: Vec<Range<usize>> = Vec::new();
let last_char = yomi.chars().last().unwrap();
force_selected_clause.push(0..yomi.len() - last_char.len_utf8());
force_selected_clause.push(yomi.len() - last_char.len_utf8()..yomi.len());
force_selected_clause
} else {
// Only 1 character.
keep_current(clauses)
};
}
if current_clause == 0 {
// ไธ็ชๅทฆใฎๆ็ฏใซใใฉใผใซในใใใใฃใฆใใใฎใงใไธ็ชๅทฆใฎๅ็ฏใ็ญใใใใ
if clauses[0][0].yomi.chars().count() == 1 {
// ไธ็ชๅทฆใฎๅ็ฏใ1ๆๅญใใใชใใจใใฏ็พ็ถ็ถญๆใฎๅฝขใง่ฟใใ
return keep_current(clauses);
}
let mut force_selected_clause: Vec<Range<usize>> = Vec::new();
let mut offset = 0;
for (i, clause) in clauses.iter().enumerate() {
// AS-IS: [ab][c]
// ^^ <- focused
//
// TO-BE: [a][bc]
let yomi = &clause[0].yomi;
if i == current_clause {
let last_char = yomi.chars().last().unwrap();
force_selected_clause.push(offset..offset + yomi.len() - last_char.len_utf8());
} else if i == current_clause + 1 {
let prev_last_char = clauses[i - 1][0].yomi.chars().last().unwrap().len_utf8();
let start = offset - prev_last_char;
let end = start + (yomi.len() + prev_last_char);
// ๆถๅคฑใใใฑใผในใใใ
if start < end {
force_selected_clause.push(start..end);
}
} else {
force_selected_clause.push(offset..offset + yomi.len());
}
offset += yomi.len();
}
force_selected_clause
} else {
// ใ็ช็ฎไปฅๅพใฎๅ็ฏใซใใฉใผใซในใใใใฃใฆใใใฎใงใๅทฆ้ฃใฎๅ็ฏใ็ญใใใใใฉใผใซในใใใใฃใฆใใๅ็ฏใไผธใฐใใพใใ
let mut force_selected_clause: Vec<Range<usize>> = Vec::new();
let mut offset = 0;
for (i, clause) in clauses.iter().enumerate() {
let yomi = &clause[0].yomi;
let (start, end) = if i == current_clause {
let prev_yomi = &clauses[i - 1][0].yomi;
let prev_last_char = prev_yomi.chars().last().unwrap().len_utf8();
let start = offset - prev_last_char;
let end = start + yomi.len() + prev_last_char;
(start, end)
} else if i == current_clause - 1 {
// ใใฉใผใซในๆ็ฏใฎๅทฆใฎๆ็ฏใฏใๆซๅฐพใฎๆๅญใๅฏพ่ฑกใใๅคใ
let last_char = yomi.chars().last().unwrap().len_utf8();
let start = offset;
let end = offset + (yomi.len() - last_char);
// ๆถๅคฑใใใฑใผในใใใ
(start, end)
} else {
let start = offset;
let end = offset + yomi.len();
(start, end)
};
if start < end {
force_selected_clause.push(start..end);
}
offset += yomi.len();
}
force_selected_clause
}
}
#[cfg(test)]
mod test_base {
use super::*;
pub fn mk(src: &[&str]) -> (String, Vec<Vec<Candidate>>) {
let mut clauses: Vec<Vec<Candidate>> = Vec::new();
for x in src {
clauses.push(vec![Candidate::new(x, x, 0_f32)]);
}
let yomi = src.join("");
(yomi, clauses)
}
pub fn to_vec(yomi: String, got: Vec<Range<usize>>) -> Vec<String> {
got.iter().map(|it| yomi[it.clone()].to_string()).collect()
}
}
#[cfg(test)]
mod tests_right {
use super::test_base::mk;
use super::test_base::to_vec;
use super::*;
#[test]
fn test_extend_right() {
let (yomi, clauses) = mk(&["ใ"]);
let got = extend_right(&clauses, 0);
assert_eq!(to_vec(yomi, got), vec!("ใ"));
}
// ็ฌฌ1ๆ็ฏใๆกๅผตใใ็ตๆใ็ฌฌ2ๆ็ฏใใชใใชใใฑใผใน
#[test]
fn test_extend_right2() {
let (yomi, clauses) = mk(&["ใ", "ใ"]);
let got = extend_right(&clauses, 0);
assert_eq!(to_vec(yomi, got), vec!("ใใ"));
}
// ใกใใใจไผธใฐใใใฑใผใน
#[test]
fn test_extend_right3() {
let (yomi, clauses) = mk(&["ใ", "ใใ"]);
let got = extend_right(&clauses, 0);
assert_eq!(to_vec(yomi, got), vec!("ใใ", "ใ"));
}
// ใกใใใจไผธใฐใใใฑใผใน
#[test]
fn test_extend_right4() {
let (yomi, clauses) = mk(&["ใใ", "ใงใใญ"]);
// ใใงใใญใใซใใฉใผใซในใ
let got = extend_right(&clauses, 1);
assert_eq!(to_vec(yomi, got), vec!("ใใ", "ใงใใญ"));
}
}
#[cfg(test)]
mod tests_left {
use super::test_base::mk;
use super::test_base::to_vec;
use super::*;
#[test]
fn test_extend_left() {
let (yomi, clauses) = mk(&["ใ"]);
let got = extend_left(&clauses, 0);
assert_eq!(to_vec(yomi, got), vec!("ใ"));
}
// ็ฌฌ1ๆ็ฏใ้ธๆใใใฆใใฆใ็ฌฌ1ๆ็ฏใ1ๆๅญใฎใฑใผใน
#[test]
fn test_extend_left2() {
let (yomi, clauses) = mk(&["ใ", "ใ"]);
let got = extend_left(&clauses, 0);
assert_eq!(to_vec(yomi, got), vec!("ใ", "ใ"));
}
// ็ฌฌ1ๆ็ฏใ้ธๆใใใฆใใฆใ็ฌฌ1ๆ็ฏใ2ๆๅญไปฅไธใฎใฑใผใน
#[test]
fn test_extend_left3() {
let (yomi, clauses) = mk(&["ใใ ", "ใ", "ใ"]);
let got = extend_left(&clauses, 0);
assert_eq!(to_vec(yomi, got), vec!("ใ", "ใ ใ", "ใ"));
}
// ็ฌฌ2ๆ็ฏใ้ธๆใใใฆใใ
#[test]
fn test_extend_left4() {
let (yomi, clauses) = mk(&["ใใ ", "ใ", "ใ"]);
let got = extend_left(&clauses, 1);
assert_eq!(to_vec(yomi, got), vec!("ใ", "ใ ใ", "ใ"));
}
// ๆ็ฏใ่ฟฝๅ ใใใในใ
#[test]
fn test_extend_left5() {
let (yomi, clauses) = mk(&["ใใใใใ"]);
let got = extend_left(&clauses, 0);
assert_eq!(to_vec(yomi, got), vec!("ใใใใ", "ใ"));
}
// ๆ็ฏใใใผใธใใใในใ
#[test]
fn test_extend_left6() {
let (yomi, clauses) = mk(&["ใ", "ใพใจ"]);
let got = extend_left(&clauses, 1);
assert_eq!(to_vec(yomi, got), vec!("ใใพใจ"));
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/corpus.rs | libakaza/src/corpus.rs | use std::collections::HashSet;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::Path;
use anyhow::{bail, Result};
use log::info;
use crate::graph::word_node::WordNode;
/// ใใซใขใใใผใทใงใณใณใผใใน
/// Kytea ใฎใใซใขใใใผใทใงใณใณใผใในใจๅๆงใฎๅฝขๅผใงใใ
///
/// ใณใผใใน/ใใผใฑใ ใฎ/ใฎ ๆ/ใถใ ใง/ใง ใ/ใ ใ/ใ
///
/// โใฎใใใชๅฝขๅผใงใใ
///
/// ๅ่ฉใๅใๆฑใใฎใฏ็ด ไบบใซใฏ้ฃใใใฎใงใๅ่ฉใจใใใใใฏใ
/// ใฉใฎไฝ็ฝฎใงๅคๆๅ่ฃใๅบๅใใใฆใใใๆฐๆใก่ฏใใใใจใใ
/// ่ฆณ็นใงๅบๅใๆณๅฎใงใใ
///
/// http://www.phontron.com/kytea/io-ja.html
pub struct FullAnnotationCorpus {
pub nodes: Vec<WordNode>,
}
impl FullAnnotationCorpus {
/// ใใซใขใใใผใทใงใณใณใผใในใใใผในใใใ
pub fn new(src: &str) -> Result<FullAnnotationCorpus> {
let src = src
.trim_start_matches("__BOS__/__BOS__/__BOS__/__BOS__ ")
.trim_end_matches(" __EOS__/__EOS__/__EOS__/__EOS__");
let p: Vec<&str> = src.split(' ').collect();
let mut start_pos = 0;
let mut nodes: Vec<WordNode> = Vec::new();
for word in p {
if word.is_empty() {
continue;
}
if let Some((surface, yomi)) = word.split_once('/') {
if surface.is_empty() {
bail!("Surface is empty: {}", src);
}
nodes.push(WordNode::new(start_pos, surface, yomi, None, false));
start_pos += yomi.len() as i32;
} else {
nodes.push(WordNode::new(start_pos, word, word, None, false));
start_pos += word.len() as i32;
}
}
Ok(FullAnnotationCorpus { nodes })
}
/// ใณใผใในใฎใใใฟใใ้ฃ็ตใใใใฎใ่ฟใใ
pub fn yomi(&self) -> String {
let mut buf = String::new();
for yomi in self.nodes.iter().map(|f| f.yomi.as_str()) {
buf += yomi;
}
buf
}
/// ใณใผใในใฎใ่กจ่ฃ
ใใ้ฃ็ตใใใใฎใ่ฟใใ
pub fn surface(&self) -> String {
let mut buf = String::new();
for yomi in self.nodes.iter().map(|f| f.surface.as_str()) {
buf += yomi;
}
buf
}
/// ๆญฃ่งฃใใผใใชใใธใงใฏใใฎใปใใใ่ฟใ
pub fn correct_node_set(&self) -> HashSet<WordNode> {
HashSet::from_iter(self.nodes.iter().cloned())
}
}
pub fn read_corpus_file(src: &Path) -> Result<Vec<FullAnnotationCorpus>> {
let mut result = Vec::new();
let file = File::open(src)?;
for line in BufReader::new(file).lines() {
let line = line?;
if line.starts_with(";;") {
// ใณใกใณใ่กใฏในใญใใใ
continue;
}
if line.trim().is_empty() {
// ็ฉบ่กใฏในใญใใ
continue;
}
match FullAnnotationCorpus::new(line.trim()) {
Ok(corpus) => result.push(corpus),
Err(err) => {
info!("Cannot parse corpus: {}", err);
continue;
}
}
}
Ok(result)
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/dict/merge_dict.rs | libakaza/src/dict/merge_dict.rs | use std::collections::HashMap;
pub fn merge_dict(dicts: Vec<HashMap<String, Vec<String>>>) -> HashMap<String, Vec<String>> {
let mut result: HashMap<String, Vec<String>> = HashMap::new();
for dict in dicts {
for (yomi, kanjis) in dict {
let target = result.entry(yomi).or_default();
for kanji in kanjis {
if !target.contains(&kanji) {
target.push(kanji);
}
}
}
}
result
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_merge_dict() {
let got = merge_dict(vec![
HashMap::from([("ใ".to_string(), vec!["่ช".to_string()])]),
HashMap::from([
("ใ".to_string(), vec!["ๅพ".to_string(), "็ข".to_string()]),
("ใ".to_string(), vec!["็ท".to_string()]),
]),
]);
assert_eq!(
got,
HashMap::from([
(
"ใ".to_string(),
vec!["่ช".to_string(), "ๅพ".to_string(), "็ข".to_string()]
),
("ใ".to_string(), vec!["็ท".to_string()])
])
);
}
#[test]
fn test_merge_dict_dedup() {
let got = merge_dict(vec![
HashMap::from([("ใ".to_string(), vec!["่ช".to_string()])]),
HashMap::from([("ใ".to_string(), vec!["่ช".to_string(), "็ข".to_string()])]),
]);
assert_eq!(
got,
HashMap::from([("ใ".to_string(), vec!["่ช".to_string(), "็ข".to_string()]),])
);
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/dict/mod.rs | libakaza/src/dict/mod.rs | pub mod loader;
pub mod merge_dict;
pub mod skk;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/dict/loader.rs | libakaza/src/dict/loader.rs | use std::collections::HashMap;
use std::fs::File;
use std::path::Path;
use std::time::SystemTime;
use anyhow::Context;
use anyhow::Result;
use encoding_rs::{EUC_JP, UTF_8};
use log::{error, info};
use crate::config::{DictConfig, DictEncoding, DictType};
use crate::dict::merge_dict::merge_dict;
use crate::dict::skk::read::read_skkdict;
use crate::kana_kanji::marisa_kana_kanji_dict::MarisaKanaKanjiDict;
fn try_get_mtime(path: &str) -> Result<u128> {
let file = File::open(path)?;
let metadata = file.metadata()?;
let mtime = metadata.modified()?;
let t = mtime.duration_since(SystemTime::UNIX_EPOCH)?;
Ok(t.as_millis())
}
/// - `dict_configs`: ่พๆธใฎ่ชญใฟ่พผใฟ่จญๅฎ
/// - `cache_name`: ใญใฃใใทใฅใใกใคใซๅใ `~/.cache/akaza/kana_kanji_cache.marisa` ใจใใซใงใใ
pub fn load_dicts_with_cache(
dict_configs: &Vec<DictConfig>,
cache_name: &str,
) -> Result<MarisaKanaKanjiDict> {
// ใใฆใใใใงใๅ
จ้จใฎไพๅญๅ
ใใกใคใซใฎ mtime ใฎ max ใจใญใฃใใทใฅใใกใคใซใฎ mtime ใฎ max ใๆฏ่ผใใ
// ๆดๆฐใๅฟ
่ฆใ ใฃใใใๆดๆฐใใใ
let p = dict_configs
.iter()
.map(|it| try_get_mtime(&it.path).unwrap_or(0_u128))
.collect::<Vec<_>>();
info!("mtimes: {:?}", p);
let max_dict_mtime = dict_configs
.iter()
.map(|it| try_get_mtime(&it.path).unwrap_or(0_u128))
.max()
.unwrap_or(0_u128);
// cache file ใฎใในใๅพใ
let base_dirs = xdg::BaseDirectories::with_prefix("akaza")
.with_context(|| "xdg directory with 'akaza' prefix")?;
base_dirs.create_cache_directory("")?;
let cache_path = base_dirs
.get_cache_file(cache_name)
.to_string_lossy()
.to_string();
let cache_mtime = try_get_mtime(&cache_path).unwrap_or(0_u128);
// ็พๅจใฎ Config ใใทใชใขใฉใคใบใใใ
let config_serialized = serde_yaml::to_string(dict_configs)?;
info!("SERIALIZED: {:?}", config_serialized);
if cache_mtime >= max_dict_mtime {
match MarisaKanaKanjiDict::load(cache_path.as_str()) {
Ok(dict) => {
let dict_serialized = dict.cache_serialized();
if dict_serialized == config_serialized {
// ใญใฃใใทใฅใใกใคใซใๆธใใๆใฎ่จญๅฎใจๅใใใฉใใใ็ขบ่ชใใ
// ่จญๅฎใ้ใๅ ดๅใฏใใญใฃใใทใฅใไฝใ็ดใๅฟ
่ฆใใใใ
info!("Cache is fresh! {:?} => {}", dict_configs, cache_path);
return Ok(dict);
} else {
info!(
"DictConfig was modified...: {:?}!={:?}",
dict_serialized, config_serialized
);
}
}
Err(err) => {
info!("Cannot load {:?}: {:?}", cache_path, err)
}
}
}
info!("Cache is not fresh! {:?} => {}", dict_configs, cache_path);
let dicts = load_dicts(dict_configs)?;
MarisaKanaKanjiDict::build_with_cache(dicts, &cache_path, &config_serialized)
}
pub fn load_dicts(dict_configs: &Vec<DictConfig>) -> Result<HashMap<String, Vec<String>>> {
let mut dicts: Vec<HashMap<String, Vec<String>>> = Vec::new();
for dict_config in dict_configs {
match load_dict(dict_config) {
Ok(dict) => {
dicts.push(dict);
}
Err(err) => {
error!("Cannot load dictionary: {:?}. {}", dict_config, err);
// ไธ้กงใฎ่พๆธใฎ่ชญใฟ่พผใฟใซๅคฑๆใใฆใใไปใฎ่พๆธใฏ่ชญใฟ่พผใในใใชใฎใง
// ๅฆ็ใฏ็ถ่กใใ
}
}
}
Ok(merge_dict(dicts))
}
pub fn load_dict(dict: &DictConfig) -> Result<HashMap<String, Vec<String>>> {
info!(
"Loading dictionary: {} {:?} {}",
dict.path, dict.encoding, dict.dict_type
);
let encoding = match &dict.encoding {
DictEncoding::EucJp => EUC_JP,
DictEncoding::Utf8 => UTF_8,
};
match dict.dict_type {
DictType::SKK => {
let t1 = SystemTime::now();
let merged = read_skkdict(Path::new(dict.path.as_str()), encoding)?;
let t2 = SystemTime::now();
info!(
"Loaded {}: {} entries in {} msec",
dict.path,
merged.len(),
t2.duration_since(t1).unwrap().as_millis()
);
Ok(merged)
}
}
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use std::io::Write;
use std::{env, thread, time};
use crate::config::DictUsage;
use anyhow::Result;
use log::LevelFilter;
use tempfile::{tempdir, NamedTempFile};
use super::*;
#[test]
fn test_load_dict_ex() -> Result<()> {
let _ = env_logger::builder()
.filter_level(LevelFilter::Info)
.is_test(true)
.try_init();
let dictfile = NamedTempFile::new().unwrap();
let cachedir = tempdir()?;
info!("tmpdir: {}", cachedir.path().to_str().unwrap());
env::set_var("XDG_CACHE_HOME", cachedir.path().to_str().unwrap());
{
let mut fp = File::create(dictfile.path())?;
fp.write_all(
";; okuri-ari entries.\n\
;; okuri-nasi entries.\n\
ใใ /ๅง/\n"
.as_bytes(),
)?;
}
let loaded = load_dicts_with_cache(
&vec![DictConfig {
path: dictfile.path().to_str().unwrap().to_string(),
encoding: DictEncoding::Utf8,
dict_type: DictType::SKK,
usage: DictUsage::Normal,
}],
"test",
)?;
assert_eq!(loaded.yomis(), vec!["ใใ"]);
// timestamp ใใใใใใใซ 10msec ไผใ
thread::sleep(time::Duration::from_millis(10));
{
let mut fp = File::create(dictfile.path())?;
fp.write_all(
";; okuri-ari entries.\n\
;; okuri-nasi entries.\n\
ใใ /ๅง/\n\
ใใ /็่ณ/\n"
.as_bytes(),
)?;
}
// ใใกใคใซใๆธใ็ดใใใใใญใฃใใทใฅใ่ชญใฟใชใใใฆใปใใใ
let loaded = load_dicts_with_cache(
&vec![DictConfig {
path: dictfile.path().to_str().unwrap().to_string(),
encoding: DictEncoding::Utf8,
dict_type: DictType::SKK,
usage: DictUsage::Normal,
}],
"test",
)?;
assert_eq!(
loaded
.yomis()
.iter()
.map(|s| s.to_string())
.collect::<HashSet<_>>(),
HashSet::from(["ใใ".to_string(), "ใใ".to_string()])
);
let loaded = load_dicts_with_cache(
&vec![DictConfig {
path: dictfile.path().to_str().unwrap().to_string(),
encoding: DictEncoding::Utf8,
dict_type: DictType::SKK,
usage: DictUsage::Normal,
}],
"test",
)?;
assert_eq!(
loaded
.yomis()
.iter()
.map(|s| s.to_string())
.collect::<HashSet<_>>(),
HashSet::from(["ใใ".to_string(), "ใใ".to_string()])
);
Ok(())
}
/// ่จญๅฎใใกใคใซใๆธใๆใใใใใ่ชญใฟ็ดใใ
/// ๆธใๆใใใใใ่ชญใฟ็ดใใ
#[test]
fn test_if_config_was_changed() -> Result<()> {
let _ = env_logger::builder()
.filter_level(LevelFilter::Info)
.is_test(true)
.try_init();
let dict1 = NamedTempFile::new().unwrap();
let dict2 = NamedTempFile::new().unwrap();
let cachedir = tempdir()?;
info!("tmpdir: {}", cachedir.path().to_str().unwrap());
env::set_var("XDG_CACHE_HOME", cachedir.path().to_str().unwrap());
{
let mut fp = File::create(dict1.path())?;
fp.write_all(
";; okuri-ari entries.\n\
;; okuri-nasi entries.\n\
ใใ /ๅง/\n"
.as_bytes(),
)?;
}
{
let mut fp = File::create(dict2.path())?;
fp.write_all(
";; okuri-ari entries.\n\
;; okuri-nasi entries.\n\
ใใณ /ๆตท่/\n"
.as_bytes(),
)?;
}
// dict1 ใฎใฟใ่ชญใใงใฟใใ
let loaded = load_dicts_with_cache(
&vec![DictConfig {
path: dict1.path().to_str().unwrap().to_string(),
encoding: DictEncoding::Utf8,
dict_type: DictType::SKK,
usage: DictUsage::Normal,
}],
"test",
)?;
assert_eq!(loaded.yomis(), vec!["ใใ"]);
// dict2 ใๆๅฎใใใใฟใผใณใ
let loaded = load_dicts_with_cache(
&vec![
DictConfig {
path: dict1.path().to_str().unwrap().to_string(),
encoding: DictEncoding::Utf8,
dict_type: DictType::SKK,
usage: DictUsage::Normal,
},
DictConfig {
path: dict2.path().to_str().unwrap().to_string(),
encoding: DictEncoding::Utf8,
dict_type: DictType::SKK,
usage: DictUsage::Normal,
},
],
"test",
)?;
assert_eq!(
loaded
.yomis()
.iter()
.map(|s| s.to_string())
.collect::<HashSet<_>>(),
HashSet::from(["ใใ".to_string(), "ใใณ".to_string()])
);
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/dict/skk/write.rs | libakaza/src/dict/skk/write.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::Write;
use log::info;
use crate::dict::merge_dict::merge_dict;
pub fn write_skk_dict(
ofname: &str,
dicts: Vec<HashMap<String, Vec<String>>>,
) -> anyhow::Result<()> {
info!("Writing {}", ofname);
let merged_dict = merge_dict(dicts);
{
let mut wfp = File::create(ofname)?;
wfp.write_all(";; okuri-ari entries.\n".as_bytes())?;
wfp.write_all(";; okuri-nasi entries.\n".as_bytes())?;
let mut keys = merged_dict.keys().collect::<Vec<_>>();
keys.sort();
for yomi in keys {
let kanjis = merged_dict.get(yomi).unwrap();
assert!(!yomi.is_empty(), "yomi must not be empty: {kanjis:?}");
let kanjis = kanjis.join("/");
wfp.write_fmt(format_args!("{yomi} /{kanjis}/\n"))?;
}
}
Ok(())
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/dict/skk/mod.rs | libakaza/src/dict/skk/mod.rs | pub mod ari2nasi;
pub mod read;
pub mod write;
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/dict/skk/read.rs | libakaza/src/dict/skk/read.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::{BufReader, Read};
use std::path::Path;
use anyhow::{Context, Result};
use encoding_rs::Encoding;
use log::info;
use regex::Regex;
use crate::dict::merge_dict::merge_dict;
use crate::dict::skk::ari2nasi::Ari2Nasi;
enum ParserState {
OkuriAri,
OkuriNasi,
}
pub fn read_skkdict(
path: &Path,
encoding: &'static Encoding,
) -> Result<HashMap<String, Vec<String>>> {
let file = File::open(path).with_context(|| path.to_string_lossy().to_string())?;
let mut buf: Vec<u8> = Vec::new();
BufReader::new(file).read_to_end(&mut buf)?;
let (decoded, _, _) = encoding.decode(buf.as_slice());
let decoded = decoded.to_string();
parse_skkdict(decoded.as_str())
}
/**
* SKK ่พๆธใใใผในใใพใใ
*/
pub fn parse_skkdict(src: &str) -> Result<HashMap<String, Vec<String>>> {
let mut ari: HashMap<String, Vec<String>> = HashMap::new();
let mut nasi: HashMap<String, Vec<String>> = HashMap::new();
let mut target = &mut ari;
let comment_regex = Regex::new(";.*")?;
for line in src.lines() {
if line.starts_with(";;") {
if line.contains(";; okuri-ari entries.") {
target = &mut ari;
continue;
} else if line.contains(";; okuri-nasi entries.") {
target = &mut nasi;
continue;
} else {
// skip comment
continue;
}
}
if line.is_empty() {
// skip empty line
continue;
}
let Some((yomi, surfaces)) = line.split_once(' ') else {
info!("Invalid line: {}", line);
continue;
};
// ่ชญใฟไปฎๅใใขใซใใกใใใใฎใใฎใฏ้คๅคใใใ
// `kk /ๆ ชๅผไผ็คพ/` ใฎใใใชใจใณใใชใผใใฉใคใใณใณใใผใธใงใณๆใซ้ช้ญใซใชใใใใ
// https://github.com/akaza-im/akaza/issues/260
if let Some(first_yomi_char) = yomi.chars().next() {
if first_yomi_char.is_ascii_alphabetic() {
continue;
}
}
// example:
// ใจใใฒใ /ๅพณๅฎ/ๅพณๅคง/ๅพณๅฏ/็ฃๅผ/
// ๆซๅฐพใฎ slash ใๆใใฆใๅ ดๅใใใใจใณใใชใผใ SKK-JISYO.L ใซๅ
ฅใฃใฆใใใใใใฎใงๆณจๆใ
let surfaces: Vec<String> = surfaces
.trim_start_matches('/')
.trim_end_matches('/')
.split('/')
.map(|s| comment_regex.replace(s, "").to_string())
.filter(|it| !it.is_empty())
.collect();
assert!(!yomi.is_empty(), "yomi must not empty: line={line}");
target.insert(yomi.to_string(), surfaces);
}
let ari2nasi = Ari2Nasi::default();
let ari = ari2nasi.ari2nasi(&ari)?;
Ok(merge_dict(vec![ari, nasi]))
}
#[cfg(test)]
mod tests {
use encoding_rs::EUC_JP;
use log::warn;
use super::*;
#[test]
fn test_skk_l() -> anyhow::Result<()> {
let dictpath = Path::new("/usr/share/skk/SKK-JISYO.L");
if !dictpath.exists() {
warn!("There's no SKK-JISYO.L... Skip this test case.");
return Ok(());
}
let dict = read_skkdict(dictpath, EUC_JP)?;
assert!(!dict.is_empty());
Ok(())
}
/// ๆซๅฐพใฎในใฉใใทใฅใ่ฝใกใฆใใฆใ่จฑๅฎนใใใ
// sars-cov /severe acute respiratory syndrome coronavirus/SARSใณใญใใฆใคใซใน
#[test]
fn missing_trailing_slash() -> anyhow::Result<()> {
let src = ";; okuri-nasi entries.\n\
ใใช /็ฒ";
let dict = parse_skkdict(src)?;
assert_eq!(*dict.get("ใใช").unwrap(), vec!["็ฒ".to_string()]);
Ok(())
}
/// ใใผใน็ตๆใ็ฉบใซใชใๅ ดๅใฏ็ก่ฆใใ
#[test]
fn empty() -> anyhow::Result<()> {
let src = ";; okuri-nasi entries.\n\
ใใฟใใใ /; [Semicolon]/\n\
ใ /ๅฐพ/\n";
let dict = parse_skkdict(src)?;
assert_eq!(*dict.get("ใใฟใใใ").unwrap(), Vec::<String>::new());
assert_eq!(*dict.get("ใ").unwrap(), vec!["ๅฐพ".to_string()]);
Ok(())
}
/// ใใผใน็ตๆใ็ฉบใซใชใๅ ดๅใฏ็ก่ฆใใ
#[test]
fn kk() -> anyhow::Result<()> {
let src = ";; okuri-nasi entries.\n\
kk /ๆ ชๅผไผ็คพ/\n";
let dict = parse_skkdict(src)?;
assert_eq!(dict.get("kk"), None);
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/dict/skk/ari2nasi.rs | libakaza/src/dict/skk/ari2nasi.rs | use std::collections::HashMap;
use anyhow::bail;
pub struct Ari2Nasi {
boin_map: HashMap<char, &'static str>,
roman_map: HashMap<&'static str, &'static str>,
}
impl Default for Ari2Nasi {
fn default() -> Ari2Nasi {
let boin_map = HashMap::from([
('a', "ใ"),
('i', "ใ"),
('u', "ใ"),
('e', "ใ"),
('o', "ใ"),
]);
let roman_map = HashMap::from([
("xa", "ใ"),
("a", "ใ"),
("xi", "ใ"),
("i", "ใ"),
("xu", "ใ
"),
("u", "ใ"),
("vu", "ใใ"),
("va", "ใใใ"),
("vi", "ใใใ"),
("ve", "ใใใ"),
("vo", "ใใใ"),
("xe", "ใ"),
("e", "ใ"),
("xo", "ใ"),
("o", "ใ"),
("ka", "ใ"),
("ga", "ใ"),
("ki", "ใ"),
("kya", "ใใ"),
("kyu", "ใใ
"),
("kyo", "ใใ"),
("gi", "ใ"),
("gya", "ใใ"),
("gyu", "ใใ
"),
("gyo", "ใใ"),
("ku", "ใ"),
("gu", "ใ"),
("ke", "ใ"),
("ge", "ใ"),
("ko", "ใ"),
("go", "ใ"),
("sa", "ใ"),
("za", "ใ"),
("shi", "ใ"),
("sha", "ใใ"),
("shu", "ใใ
"),
("si", "ใ"),
("sya", "ใใ"),
("syu", "ใใ
"),
("sho", "ใใ"),
("ji", "ใ"),
("ja", "ใใ"),
("ju", "ใใ
"),
("jo", "ใใ"),
("syo", "ใใ"),
("zi", "ใ"),
("zya", "ใใ"),
("zyu", "ใใ
"),
("zyo", "ใใ"),
("su", "ใ"),
("zu", "ใ"),
("se", "ใ"),
("ze", "ใ"),
("so", "ใ"),
("zo", "ใ"),
("ta", "ใ"),
("da", "ใ "),
("chi", "ใก"),
("cha", "ใกใ"),
("chu", "ใกใ
"),
("ti", "ใก"),
("tya", "ใกใ"),
("tyu", "ใกใ
"),
("cho", "ใกใ"),
("di", "ใข"),
("dya", "ใขใ"),
("dyu", "ใขใ
"),
("dyo", "ใขใ"),
("tyo", "ใกใ"),
("xtsu", "ใฃ"),
("xtu", "ใฃ"),
("vvu", "ใฃใใ"),
("vva", "ใฃใใใ"),
("vvi", "ใฃใใใ"),
("vve", "ใฃใใใ"),
("vvo", "ใฃใใใ"),
("kka", "ใฃใ"),
("gga", "ใฃใ"),
("kki", "ใฃใ"),
("kkya", "ใฃใใ"),
("kkyu", "ใฃใใ
"),
("kkyo", "ใฃใใ"),
("ggi", "ใฃใ"),
("ggya", "ใฃใใ"),
("ggyu", "ใฃใใ
"),
("ggyo", "ใฃใใ"),
("kku", "ใฃใ"),
("ggu", "ใฃใ"),
("kke", "ใฃใ"),
("gge", "ใฃใ"),
("kko", "ใฃใ"),
("ggo", "ใฃใ"),
("ssa", "ใฃใ"),
("zza", "ใฃใ"),
("sshi", "ใฃใ"),
("ssha", "ใฃใใ"),
("ssi", "ใฃใ"),
("ssya", "ใฃใใ"),
("sshu", "ใฃใใ
"),
("ssho", "ใฃใใ"),
("ssyu", "ใฃใใ
"),
("ssyo", "ใฃใใ"),
("jji", "ใฃใ"),
("jja", "ใฃใใ"),
("jju", "ใฃใใ
"),
("jjo", "ใฃใใ"),
("zzi", "ใฃใ"),
("zzya", "ใฃใใ"),
("zzyu", "ใฃใใ
"),
("zzyo", "ใฃใใ"),
("ssu", "ใฃใ"),
("zzu", "ใฃใ"),
("sse", "ใฃใ"),
("zze", "ใฃใ"),
("sso", "ใฃใ"),
("zzo", "ใฃใ"),
("tta", "ใฃใ"),
("dda", "ใฃใ "),
("cchi", "ใฃใก"),
("tti", "ใฃใก"),
("ccha", "ใฃใกใ"),
("cchu", "ใฃใกใ
"),
("ccho", "ใฃใกใ"),
("ddi", "ใฃใข"),
("ttya", "ใฃใกใ"),
("ttyu", "ใฃใกใ
"),
("ttyo", "ใฃใกใ"),
("ddya", "ใฃใขใ"),
("ddyu", "ใฃใขใ
"),
("ddyo", "ใฃใขใ"),
("ttsu", "ใฃใค"),
("ttu", "ใฃใค"),
("ddu", "ใฃใฅ"),
("tte", "ใฃใฆ"),
("dde", "ใฃใง"),
("tto", "ใฃใจ"),
("ddo", "ใฃใฉ"),
("hha", "ใฃใฏ"),
("bba", "ใฃใฐ"),
("ppa", "ใฃใฑ"),
("hhi", "ใฃใฒ"),
("hhya", "ใฃใฒใ"),
("hhyu", "ใฃใฒใ
"),
("hhyo", "ใฃใฒใ"),
("bbi", "ใฃใณ"),
("bbya", "ใฃใณใ"),
("bbyu", "ใฃใณใ
"),
("bbyo", "ใฃใณใ"),
("ppi", "ใฃใด"),
("ppya", "ใฃใดใ"),
("ppyu", "ใฃใดใ
"),
("ppyo", "ใฃใดใ"),
("ffu", "ใฃใต"),
("hhu", "ใฃใต"),
("ffa", "ใฃใตใ"),
("ffi", "ใฃใตใ"),
("ffe", "ใฃใตใ"),
("ffo", "ใฃใตใ"),
("bbu", "ใฃใถ"),
("ppu", "ใฃใท"),
("hhe", "ใฃใธ"),
("bbe", "ใฃใน"),
("ppe", "ใฃใบ"),
("hho", "ใฃใป"),
("bbo", "ใฃใผ"),
("ppo", "ใฃใฝ"),
("yya", "ใฃใ"),
("yyu", "ใฃใ"),
("yyo", "ใฃใ"),
("rra", "ใฃใ"),
("rri", "ใฃใ"),
("rrya", "ใฃใใ"),
("rryu", "ใฃใใ
"),
("rryo", "ใฃใใ"),
("rru", "ใฃใ"),
("rre", "ใฃใ"),
("rro", "ใฃใ"),
("tu", "ใค"),
("tsu", "ใค"),
("du", "ใฅ"),
("te", "ใฆ"),
("de", "ใง"),
("to", "ใจ"),
("do", "ใฉ"),
("na", "ใช"),
("ni", "ใซ"),
("nya", "ใซใ"),
("nyu", "ใซใ
"),
("nyo", "ใซใ"),
("nu", "ใฌ"),
("ne", "ใญ"),
("no", "ใฎ"),
("ha", "ใฏ"),
("ba", "ใฐ"),
("pa", "ใฑ"),
("hi", "ใฒ"),
("hya", "ใฒใ"),
("hyu", "ใฒใ
"),
("hyo", "ใฒใ"),
("bi", "ใณ"),
("bya", "ใณใ"),
("byu", "ใณใ
"),
("byo", "ใณใ"),
("pi", "ใด"),
("pya", "ใดใ"),
("pyu", "ใดใ
"),
("pyo", "ใดใ"),
("fu", "ใต"),
("fa", "ใตใ"),
("fi", "ใตใ"),
("fe", "ใตใ"),
("fo", "ใตใ"),
("hu", "ใต"),
("bu", "ใถ"),
("pu", "ใท"),
("he", "ใธ"),
("be", "ใน"),
("pe", "ใบ"),
("ho", "ใป"),
("bo", "ใผ"),
("po", "ใฝ"),
("ma", "ใพ"),
("mi", "ใฟ"),
("mya", "ใฟใ"),
("myu", "ใฟใ
"),
("myo", "ใฟใ"),
("mu", "ใ"),
("me", "ใ"),
("mo", "ใ"),
("xya", "ใ"),
("ya", "ใ"),
("xyu", "ใ
"),
("yu", "ใ"),
("xyo", "ใ"),
("yo", "ใ"),
("ra", "ใ"),
("ri", "ใ"),
("rya", "ใใ"),
("ryu", "ใใ
"),
("ryo", "ใใ"),
("ru", "ใ"),
("re", "ใ"),
("ro", "ใ"),
("xwa", "ใ"),
("wa", "ใ"),
("wi", "ใใ"),
("we", "ใใ"),
("wo", "ใ"),
("n", "ใ"),
("n'", "ใ"),
("nn", "ใ"),
("dyi", "ใงใ"),
("-", "ใผ"),
("che", "ใกใ"),
("tye", "ใกใ"),
("cche", "ใฃใกใ"),
("ttye", "ใฃใกใ"),
("je", "ใใ"),
("zye", "ใใ"),
("zzye", "ใฃใใ"),
("dha", "ใงใ"),
("dhi", "ใงใ"),
("dhu", "ใงใ
"),
("dhe", "ใงใ"),
("dho", "ใงใ"),
("ddha", "ใฃใงใ"),
("ddhi", "ใฃใงใ"),
("ddhu", "ใฃใงใ
"),
("ddhe", "ใฃใงใ"),
("ddho", "ใฃใงใ"),
("tha", "ใฆใ"),
("thi", "ใฆใ"),
("thu", "ใฆใ
"),
("the", "ใฆใ"),
("tho", "ใฆใ"),
("ttha", "ใฃใฆใ"),
("tthi", "ใฃใฆใ"),
("tthu", "ใฃใฆใ
"),
("tthe", "ใฃใฆใ"),
("ttho", "ใฃใฆใ"),
(".", "ใ"),
(",", "ใ"),
("[", "ใ"),
("]", "ใ"),
("z[", "ใ"),
("z-", "ใ"),
("z.", "โฆ"),
("z,", "โฅ"),
("zh", "โ"),
("zj", "โ"),
("zk", "โ"),
("zl", "โ"),
("z]", "ใ"),
("z/", "ใป"),
]);
Ari2Nasi {
boin_map,
roman_map,
}
}
}
impl Ari2Nasi {
fn expand_okuri(
&self,
kana: &str,
kanjis: &[String],
) -> anyhow::Result<Vec<(String, Vec<String>)>> {
let Some(last_char) = kana.chars().last() else {
bail!("kana is empty");
};
if last_char.is_ascii_alphabetic() {
if self.boin_map.contains_key(&last_char) {
// ๆฏ้ณใฎๅ ดๅใฏใใฎใพใพๅนณไปฎๅใซๅคๆใใใ
// e.g. "a" โ "ใ"
let okuri = self.boin_map.get(&last_char).unwrap();
let yomi = &kana[0..kana.len() - last_char.len_utf8()];
let kanjis = kanjis.iter().map(|f| f.to_string() + *okuri).collect();
Ok(vec![(yomi.to_string() + okuri, kanjis)])
} else {
// ๅญ้ณใฎๅ ดๅใฏๆฏ้ณใฎ็ตใฟๅใใใซใใฃใฆๅ
จใใฟใผใณใคใใฃใฆ่ฟใใ
let mut result: Vec<(String, Vec<String>)> = Vec::new();
let yomi_base = &kana[0..kana.len() - last_char.len_utf8()].to_string();
for boin in self.boin_map.keys() {
let Some(okuri) = self
.roman_map.get((last_char.to_string() + boin.to_string().as_str()).as_str()) else {
// "wu" ใฎใใใชใๅนณไปฎๅใซๅคๆใงใใชใไธๆญฃใชใญใผใๅญใใฟใผใณใ็ๆใใฆใใใฑใผในใใใใ
// ใใใใๅ ดๅใฏใในใญใใใ
continue;
};
let kanjis = kanjis.iter().map(|f| f.to_string() + okuri).collect();
result.push((yomi_base.to_string() + okuri.to_string().as_str(), kanjis));
}
Ok(result)
}
} else {
Ok(vec![(
kana.to_string(),
kanjis.iter().map(|f| f.to_string()).collect(),
)])
}
}
pub fn ari2nasi(
&self,
src: &HashMap<String, Vec<String>>,
) -> anyhow::Result<HashMap<String, Vec<String>>> {
let mut retval: HashMap<String, Vec<String>> = HashMap::new();
for (kana, kanjis) in src.iter() {
for (kkk, vvv) in self.expand_okuri(kana, kanjis)? {
retval.insert(kkk, vvv);
}
}
Ok(retval)
}
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use super::*;
#[test]
fn test_expand_okuri() -> anyhow::Result<()> {
let ari2nasi = Ari2Nasi::default();
let got = ari2nasi.expand_okuri("ใใใใw", &["ๆใๅ".to_string()])?;
assert_eq!(
got.iter().collect::<HashSet<_>>(),
vec!(
("ใใใใใ".to_string(), vec!("ๆใๅใ".to_string())),
("ใใใใใใ".to_string(), vec!("ๆใๅใใ".to_string())),
("ใใใใใใ".to_string(), vec!("ๆใๅใใ".to_string())),
("ใใใใใ".to_string(), vec!("ๆใๅใ".to_string()))
)
.iter()
.collect::<HashSet<_>>(),
);
Ok(())
}
#[test]
fn test_expand_okuri_iu() -> anyhow::Result<()> {
let ari2nasi = Ari2Nasi::default();
let got = ari2nasi.expand_okuri("ใu", &["่จ".to_string()])?;
assert_eq!(got, vec!(("ใใ".to_string(), vec!("่จใ".to_string())),),);
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
akaza-im/akaza | https://github.com/akaza-im/akaza/blob/b7dace72e7ce2054a9daf6ba0e6aca008bb8926f/libakaza/src/graph/segmenter.rs | libakaza/src/graph/segmenter.rs | use std::collections::btree_map::{BTreeMap, Iter};
use std::collections::HashSet;
use std::ops::Range;
use std::sync::{Arc, Mutex};
use log::{debug, info, trace};
use regex::Regex;
use crate::kana_trie::base::KanaTrie;
#[derive(PartialEq, Debug)]
pub struct SegmentationResult {
base: BTreeMap<usize, Vec<String>>,
}
impl SegmentationResult {
pub(crate) fn new(base: BTreeMap<usize, Vec<String>>) -> SegmentationResult {
SegmentationResult { base }
}
pub(crate) fn iter(&self) -> Iter<'_, usize, Vec<String>> {
self.base.iter()
}
pub fn dump_dot(&self) -> String {
let mut buf = String::new();
buf += "digraph Lattice {\n";
// start ๅใณ end ใฏใbyte ๆฐๅไฝ
for (end_pos, yomis) in self.base.iter() {
for yomi in yomis {
buf += &*format!(r#" {} -> "{}"{}"#, end_pos - yomi.len(), yomi, "\n");
buf += &*format!(r#" {} -> "{}"{}"#, yomi, end_pos, "\n");
}
}
buf += &*"}\n".to_string();
buf
}
}
pub struct Segmenter {
tries: Vec<Arc<Mutex<dyn KanaTrie>>>,
number_pattern: Regex,
}
impl Segmenter {
pub fn new(tries: Vec<Arc<Mutex<dyn KanaTrie>>>) -> Segmenter {
info!("Registering tries for Segmenter: {}", tries.len());
let number_pattern = Regex::new(r#"^(?:0|[1-9][0-9]*)(\.[0-9]*)?"#).unwrap();
Segmenter {
tries,
number_pattern,
}
}
/**
* ใ่ชญใฟใใๅใๅใฃใฆ Lattice ใๆง็ฏใใใ
*
* force_ranges: ไธ่ฌ็ใช IME ใงใทใใใใใฆใใโใใใใฆใใฆใผใถใผใๆ็คบ็ใซ็ฏๅฒ้ธๆใใๅ ดๅ
* ใฎ้ธๆ็ฏๅฒใ
*/
// ใทใใใๆผใใฆ โ ใๆผใใใจใใฎใใใชๅฆ็ใฎๅ ดๅใ
// ใใฎใกใฝใใใซๅ
ฅใฃใฆใใๅใซๅฅใซๅฆ็ใใๅๆใ
pub fn build(&self, yomi: &str, force_ranges: Option<&[Range<usize>]>) -> SegmentationResult {
if let Some(force_ranges) = force_ranges {
if !force_ranges.is_empty() {
for force_range in force_ranges {
trace!(
"force_range detected: {}",
yomi[force_range.start..force_range.end].to_string()
);
}
}
}
let mut queue: Vec<usize> = Vec::new(); // ๆค็ดขๅฏพ่ฑกใจใชใ้ๅงไฝ็ฝฎ
queue.push(0);
let mut seen: HashSet<usize> = HashSet::new();
// ็ตไบไฝ็ฝฎใใจใฎๅ่ฃๅ่ชใชในใ
let mut words_ends_at: BTreeMap<usize, Vec<String>> = BTreeMap::new();
'queue_processing: while !queue.is_empty() {
let start_pos = queue.pop().unwrap();
if seen.contains(&start_pos) {
continue;
} else {
seen.insert(start_pos);
}
// start_pos ใ force ใฎ็ฏๅฒใซๅ
ฅใฃใฆใใใๅฆ็ใใชใใ
if let Some(force_ranges) = force_ranges {
for force_range in force_ranges {
if force_range.start == start_pos {
trace!("force_range detected.");
let vec = words_ends_at.entry(force_range.end).or_default();
vec.push(yomi[force_range.start..force_range.end].to_string());
queue.push(start_pos + force_range.len());
continue 'queue_processing;
}
if force_range.contains(&start_pos) {
continue 'queue_processing;
}
}
}
let yomi = &yomi[start_pos..];
if yomi.is_empty() {
continue;
}
let mut candidates: HashSet<String> = HashSet::new();
if let Some(captured) = self.number_pattern.captures(yomi) {
// ๆฐๅญใฏไธใคใฎๅ่ชใจใใฆๅฆ็ใใใ
let s = captured.get(0).unwrap().as_str();
candidates.insert(s.to_string());
} else {
for trie in &self.tries {
let got = trie.lock().unwrap().common_prefix_search(yomi);
debug!("Common prefix search: {:?}", got);
'insert: for word in got {
let ends_at = start_pos + word.len();
// end_pos ใ force ใฎ็ฏๅฒใซๅ
ฅใฃใฆใใใๅฆ็ใใชใใ
if let Some(force_ranges) = force_ranges {
for force_range in force_ranges {
// force_range ใฏ exclusive ใงใๅณใใใ
if force_range.contains(&ends_at) || force_range.end == ends_at {
trace!("Blocked candidate range: {}, {:?}", word, force_range);
continue 'insert;
} else {
trace!("Accepted candidate range: {}, {:?}", word, force_range);
}
}
}
candidates.insert(word);
}
}
}
if !candidates.is_empty() {
for candidate in &candidates {
let ends_at = start_pos + candidate.len();
let vec = words_ends_at.entry(ends_at).or_default();
trace!("Add candidate: {}", candidate);
vec.push(candidate.clone());
queue.push(start_pos + candidate.len());
}
} else {
// ่พๆธใซ1ๆๅญใๅ่ฃใใชใๅ ดๅใฏๅ
้ ญๆๅญใๅใๅบใใฆใฐใฉใใซๅ
ฅใใ
// ใใใฏๆนๅใฎไฝๅฐใใใใใใ
trace!("There's no candidates. '{}'", yomi);
let (_, c) = yomi.char_indices().next().unwrap();
let first = &yomi[0..c.len_utf8()];
let ends_at = start_pos + first.len();
let vec = words_ends_at.entry(ends_at).or_default();
vec.push(first.to_string());
queue.push(start_pos + first.len())
}
}
SegmentationResult {
base: words_ends_at,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::kana_trie::cedarwood_kana_trie::CedarwoodKanaTrie;
#[test]
fn test_simple() {
let kana_trie = CedarwoodKanaTrie::build(vec![
"ใใใ".to_string(),
"ใใ".to_string(),
"ใ".to_string(),
]);
let segmenter = Segmenter::new(vec![Arc::new(Mutex::new(kana_trie))]);
let graph = segmenter.build("ใใใ", None);
assert_eq!(
graph,
SegmentationResult::new(BTreeMap::from([
(6, vec!["ใใ".to_string()]),
(9, vec!["ใใใ".to_string(), "ใ".to_string()]),
]))
)
}
#[test]
fn test_without_kanatrie() {
let kana_trie = CedarwoodKanaTrie::build(vec![]);
let segmenter = Segmenter::new(vec![Arc::new(Mutex::new(kana_trie))]);
let graph = segmenter.build("ใใใ", None);
assert_eq!(
graph,
SegmentationResult::new(BTreeMap::from([
(3, vec!["ใ".to_string()]),
(6, vec!["ใ".to_string()]),
(9, vec!["ใ".to_string()]),
]))
)
}
#[test]
fn test_number() {
let kana_trie = CedarwoodKanaTrie::build(vec![]);
let segmenter = Segmenter::new(vec![Arc::new(Mutex::new(kana_trie))]);
let graph = segmenter.build("365", None);
assert_eq!(
graph,
SegmentationResult::new(BTreeMap::from([(3, vec!["365".to_string()]),]))
)
}
#[test]
fn test_force() -> anyhow::Result<()> {
// env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info");
// env_logger::builder().is_test(true).try_init()?;
let kana_trie = CedarwoodKanaTrie::build(Vec::from([
"ใใใ".to_string(),
"ใใ".to_string(),
"ใ".to_string(),
"ใ".to_string(),
]));
let segmenter = Segmenter::new(vec![Arc::new(Mutex::new(kana_trie))]);
let yomi = "ใใใ";
// force_range ใซ "ใใ" ใๆๅฎใใใ
let (i2, _) = yomi.char_indices().nth(1).unwrap();
let (i3, c3) = yomi.char_indices().nth(2).unwrap();
let graph = segmenter.build(yomi, Some(&[i2..(i3 + c3.len_utf8())]));
assert_eq!(
graph,
SegmentationResult::new(BTreeMap::from([
(3, vec!["ใ".to_string()]),
(9, vec!["ใใ".to_string()]),
]))
);
Ok(())
}
}
| rust | MIT | b7dace72e7ce2054a9daf6ba0e6aca008bb8926f | 2026-01-04T19:35:02.740723Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.