{ "@context": { "@language": "en", "@vocab": "https://schema.org/", "citeAs": "cr:citeAs", "column": "cr:column", "conformsTo": "dct:conformsTo", "cr": "http://mlcommons.org/croissant/", "data": { "@id": "cr:data", "@type": "@json" }, "dataType": { "@id": "cr:dataType", "@type": "@vocab" }, "dct": "http://purl.org/dc/terms/", "equivalentProperty": "cr:equivalentProperty", "examples": { "@id": "cr:examples", "@type": "@json" }, "extract": "cr:extract", "field": "cr:field", "fileProperty": "cr:fileProperty", "fileObject": "cr:fileObject", "fileSet": "cr:fileSet", "format": "cr:format", "includes": "cr:includes", "isLiveDataset": "cr:isLiveDataset", "jsonPath": "cr:jsonPath", "key": "cr:key", "md5": "cr:md5", "parentField": "cr:parentField", "path": "cr:path", "rai": "http://mlcommons.org/croissant/RAI/", "recordSet": "cr:recordSet", "references": "cr:references", "regex": "cr:regex", "repeated": "cr:repeated", "replace": "cr:replace", "samplingRate": "cr:samplingRate", "sc": "https://schema.org/", "separator": "cr:separator", "source": "cr:source", "subField": "cr:subField", "transform": "cr:transform", "prov": "http://www.w3.org/ns/prov#" }, "@type": "sc:Dataset", "@id": "https://huggingface.co/datasets/mysigner/MySign", "conformsTo": [ "http://mlcommons.org/croissant/1.0", "http://mlcommons.org/croissant/RAI/1.0" ], "name": "MySign", "alternateName": [ "mysigner/MySign", "MySign-2026" ], "description": "MySign is a 3D motion-capture dataset of Bahasa Isyarat Malaysia (Malaysian Sign Language, BIM) for fine-grained sign language generation and recognition. It comprises 5,000 isolated-sign instances (5 Deaf native signers x 1,000 BIM Sign Bank glosses, fully balanced with no missing entries), captured at 200 Hz with a six-camera OptiTrack system plus MANUS Prime 3 Data Gloves and retargeted to the SMPL-X body model, totaling approximately 15.57M synchronized frames and 36 hours of recording. The 1,000-gloss vocabulary spans nine main categories (conversation, culture, daily-life, general, health, nature, people, things, time) and 46 subcategories. Each instance is anchored to an authorized BIM Sign Bank entry at capture time, so the gloss label is community-sanctioned by construction rather than by post-hoc rating. The release uses Filmbox (.fbx) skeletal animation, organized as Signer001/ ... Signer005/. metadata.csv is an index over the .fbx files (file_name, gloss, signer_id, take). A signer-independent train/test split is provided (4 signers train, 1 test). All recordings are skeletal-only (no video, audio, or facial texture).", "citeAs": "@misc{mysign2026,\n title = {MySign: A High-Fidelity Motion-Capture Dataset for 3D Sign Generation in Bahasa Isyarat Malaysia},\n author = {{mysigner}},\n year = {2026},\n howpublished = {Hugging Face Datasets},\n url = {https://huggingface.co/datasets/mysigner/MySign},\n note = {CC BY-NC-SA 4.0}\n}", "url": "https://huggingface.co/datasets/mysigner/MySign", "sameAs": "https://huggingface.co/datasets/mysigner/MySign", "license": "https://creativecommons.org/licenses/by-nc-sa/4.0/", "version": "1.0.0", "datePublished": "2026-01-01", "inLanguage": [ "ms", "zsm" ], "keywords": [ "sign-language", "Malaysian Sign Language", "Bahasa Isyarat Malaysia", "BIM", "motion-capture", "3D", "FBX", "skeletal-animation", "gloss" ], "creator": { "@type": "Organization", "name": "mysigner", "url": "https://huggingface.co/mysigner" }, "publisher": { "@type": "Organization", "name": "mysigner", "url": "https://huggingface.co/mysigner" }, "isLiveDataset": false, "rai:dataCollection": "MySign is a 3D isolated-sign dataset for Bahasa Isyarat Malaysia (BIM), captured in a dedicated motion-capture laboratory. Five Deaf native BIM signers were each prompted, sign by sign, with 1,000 glosses drawn from the authorized BIM Sign Bank (drawn from the BIM Sign Bank developed by the Malaysian Federation of the Deaf and Guidewire Gives Back, used under non-commercial research authorization, and selected by the research team under the supervision of a Deaf advisor to cover everyday BIM across nine main categories and 46 subcategories). For each prompt the signer was shown a slide with the target gloss label and the BIM Sign Bank reference video, and signed once. Upper-body kinematics were recorded with a six-camera OptiTrack system at 200 Hz using 35 reflective markers placed per a standard clinical (ISB) protocol; finger motion was captured with MANUS Prime 3 Data Gloves, hardware-synchronized with the optical system. Marker trajectories were reconstructed in OptiTrack Motive 3.0.3 to yield 3D joint positions/orientations, then retargeted to the SMPL-X body model using the Rokoko Retarget plugin (v1.4.3) in Blender 4.5.2 to obtain a unified parametric motion representation. The released .fbx files contain the resulting per-take SMPL-X-aligned skeletal animation. Each gloss is recorded exactly once per signer, giving 5 x 1000 = 5,000 instances, ~15.57M synchronized frames, ~36 hours of recording.", "rai:dataCollectionType": [ "Manual Human Curated", "Sensor-recorded" ], "rai:dataCollectionMissingData": "The dataset is fully balanced: every (signer, gloss) pair from the 5 x 1,000 grid has exactly one recording, with no missing entries. Coverage of BIM beyond the 1,000-gloss vocabulary is intentionally out of scope.", "rai:dataCollectionRawData": "The raw data are skeletal kinematics: 200 Hz OptiTrack marker trajectories (35 reflective markers per signer, ISB clinical placement) and time-aligned MANUS Prime 3 Data Glove finger streams. No video, audio, or photographic recordings of the signers are retained or released. The released .fbx files are the post-processing output (SMPL-X retargeted skeletal animation), not the raw marker stream.", "rai:dataCollectionTimeframe": "2025-02/2026-03", "rai:dataImputationProtocol": "No imputation. Optical calibration was verified at the start of every session (wand calibration residual below 1 mm with full marker visibility throughout the capture volume), and the gloves were calibrated per signer in MANUS Core. Takes that did not meet these conditions were re-recorded rather than imputed.", "rai:dataPreprocessingProtocol": [ "OptiTrack Motive 3.0.3: marker trajectory cleaning and reconstruction of 3D joint positions and orientations from the six-camera optical stream.", "Rokoko Retarget plugin (v1.4.3) in Blender 4.5.2: retargeting of the reconstructed joint positions/orientations onto the canonical SMPL-X body model, producing a unified parametric motion representation across all signers and recordings.", "Per-joint anatomical range-of-motion (ROM) clamping on the fitted SMPL-X parameters. For upper-limb joints (shoulders, elbows, wrists) the native SMPL-X frames are sufficiently aligned with anatomical axes that Euler-angle clamping is applied directly. For finger joints, per-joint change-of-basis transforms map each rotation into an anatomical (flexion/abduction/rotation) frame, clamping is performed there, and the transform is inverted to return to SMPL-X parameter space. Bounds are specified independently for the left and right hands. This removes fitting-induced over-flexed or hyper-abducted finger configurations.", "metadata.csv is generated by generate_metadata_remote.py: it lists every Signer*/*.fbx via the Hugging Face Hub API, parses signer_id and take from the path, and normalizes the gloss label (replace _ and - with spaces; treat , and ; as synonym separators joined with ' / '; uppercase; expand time-unit abbreviations such as HR -> HOUR; normalize parenthesis spacing; strip unbalanced trailing parentheses; merge plurals with their singulars when both forms appear in the corpus, with a small block-list to keep semantically distinct plurals such as NEWS, SHORTS, MATHEMATICS)." ], "rai:dataManipulationProtocol": "No data augmentation, mirroring, retiming, or motion editing has been applied. The only transformations are the deterministic processing steps in dataPreprocessingProtocol (Motive reconstruction -> SMPL-X retargeting -> ROM clamping). The split is signer-independent: four signers form the training set, one signer forms the test set, with no signer overlap.", "rai:dataAnnotationProtocol": "Each instance is anchored at capture time to a specific entry in the authorized BIM Sign Bank: the prompt slide displayed both the target gloss label and the Sign Bank reference video, and the signer produced exactly one take of that sign. The gloss label of an instance is therefore fixed by construction at recording time, not by post-hoc rating. A separate post-hoc expert perceptual review (three Deaf native signers with BIM domain expertise) was conducted to characterize recognizability and naturalness as quality metadata; reviewers viewed only the rendered 3D motion (no reference gloss), reported the perceived gloss, and rated naturalness on a 1-5 scale. The review was used as quality metadata and did not filter or modify the released dataset (mean gloss-recognition accuracy 86.3%, Fleiss' kappa 0.73, mean naturalness 3.87 +/- 0.71).", "rai:dataAnnotationPlatform": "Capture-time labeling: pre-designed instruction slides displaying the BIM Sign Bank gloss and reference video, presented to the signer during recording. Post-hoc expert perceptual review: a custom interactive web interface that played the rendered 3D sign and collected the rater's perceived gloss and naturalness rating. metadata.csv is mechanically generated from filenames by generate_metadata_remote.py and is not produced by an annotation platform.", "rai:dataAnnotationAnalysis": "Inter-rater agreement on gloss recognition in the post-hoc expert review is substantial: Fleiss' kappa = 0.73, mean accuracy 86.3%, mean naturalness 3.87 +/- 0.71. The release-level gloss labels themselves are not subject to inter-rater disagreement, since they are fixed at capture time against a community-sanctioned BIM Sign Bank entry. The gloss-normalization pipeline in generate_metadata_remote.py prints every plural -> singular merge it applies, so the dataset author can audit and block-list semantically wrong merges before publishing.", "rai:annotationsPerItem": "1 community-sanctioned gloss label per instance (fixed at capture time by the BIM Sign Bank prompt). 3 independent expert ratings per instance for the post-hoc perceptual quality review (perceived gloss + naturalness), released as quality metadata only.", "rai:annotatorDemographics": "Capture-time gloss labels: assigned by construction via the BIM Sign Bank prompt; no separate annotator pool. Post-hoc perceptual review: three Deaf native signers with BIM domain expertise. The five capture participants themselves are Deaf native BIM signers (3 male, 2 female), aged 25-60, height 150-178 cm, representing four ethnic backgrounds in the Malaysian Deaf community (Malay, Chinese, Indian, Kadazan). Native fluency in BIM was prioritized over cohort size, following the recruitment scale typical of native-signer sign-language corpora such as How2Sign and Phoenix-2014T, where the pool of Deaf native signers available for studio capture is the binding constraint.", "rai:machineAnnotationTools": [ "OptiTrack Motive 3.0.3 (marker reconstruction)", "MANUS Core (per-signer glove calibration)", "Rokoko Retarget plugin v1.4.3 in Blender 4.5.2 (SMPL-X retargeting)", "Custom Python pipeline for per-joint anatomical ROM clamping on SMPL-X parameters", "huggingface_hub (file listing for metadata.csv generation)", "Python regular expressions (gloss normalization in generate_metadata_remote.py)" ], "rai:dataBiases": "Known biases of MySign: (i) Signer cohort: only five signers are recorded, so signer-specific motion idiosyncrasies will dominate any model trained on it; the cohort is balanced across four Malaysian ethnic backgrounds and both genders, but is too small to support strong claims about variation across the BIM-using population. (ii) Studio capture: all recordings are from a single optical motion-capture laboratory under controlled conditions; the dataset does not reflect lighting, occlusion, or camera variability of in-the-wild capture. This is intrinsic to mocap-based releases. (iii) Vocabulary: the 1,000 glosses are drawn from the authorized BIM Sign Bank under guidance of a Deaf advisor and are weighted toward common everyday signs across nine main categories; rarer technical, regional, slang, and emerging signs are under-represented. (iv) Register: only isolated, prompted signs are recorded, so connected-signing phenomena (coarticulation, prosody, role shift, classifier constructions, non-manual markers beyond what the SMPL-X skeleton captures) are absent. (v) Retargeting: SMPL-X retargeting and ROM clamping are deterministic but lossy; fine-grained details of the original marker stream (and therefore some signer idiolect) are smoothed by the parametric body model.", "rai:dataUseCases": [ "Training and evaluating fine-grained sign language generation models for BIM in a SMPL-X-compatible parametric motion representation.", "Training and evaluating isolated-sign recognition for BIM under a signer-independent train/test split.", "Driving 3D avatars for BIM sign synthesis, education, and accessibility tooling.", "Linguistic and biomechanical analysis of BIM kinematics, including bimanual coordination and finger articulation.", "Cross-sign-language transfer studies that benefit from a parametric (SMPL-X) representation comparable across corpora." ], "rai:dataLimitations": "MySign captures isolated signs only: it does not contain continuous signing, sentences, conversations, classifier constructions, or non-manual markers (facial expression, mouthing, eye gaze) beyond what the SMPL-X skeleton captures. The vocabulary is fixed at 1,000 BIM Sign Bank glosses and is not corpus-driven. Five signers, while balanced across ethnicity and gender, are too few to support strong claims about signer-independent generalization across the full BIM-using population. The release is fully retargeted SMPL-X data; raw OptiTrack marker streams and raw MANUS glove streams are not released, so users who want to study marker-level kinematics cannot do so from the public release. Per-joint ROM clamping removes anatomically inadmissible configurations but, by construction, may slightly attenuate idiosyncratic motion at the boundary of the admissible region.", "rai:dataSocialImpact": "Intended impact: Bahasa Isyarat Malaysia is under-resourced relative to languages such as ASL, and a freely available 3D motion-capture dataset of native-signer BIM lowers the cost of building accessibility tools (educational apps, avatar-based interpreters, recognition and generation systems) for the Malaysian Deaf community. The cohort consists of Deaf native signers, the vocabulary is sourced from the BIM Sign Bank (developed by the Deaf-led Malaysian Federation of the Deaf together with Guidewire Gives Back), and gloss selection was supervised by a Deaf advisor, so the released signs reflect community-sanctioned native usage rather than learner approximations. Risks: low-quality recognition or generation systems trained on this small dataset could be deployed in high-stakes settings (legal, medical, educational interpretation) where errors would harm Deaf users; biometric re-identification of a participant from skeletal kinematics is in principle possible (gait/movement biometrics), so signers should be considered pseudonymous rather than fully anonymous. We expect downstream system builders to validate with the BIM-using community before deployment.", "rai:personalSensitiveInformation": "Released files contain only skeletal motion: SMPL-X joint rotations and translations derived from OptiTrack marker trajectories and MANUS glove streams. No images, video, audio, or facial textures of the signers are released. No name, address, contact information, or other direct personal identifier is included. Signers are referenced only by opaque IDs (Signer001 ... Signer005). Re-identification of a signer from skeletal kinematics alone is in principle possible (movement biometrics); the dataset should therefore be treated as pseudonymous. All capture procedures were reviewed and approved by the institutional Human Research Ethics Committee (HREC). Recruitment and consent materials were delivered primarily in BIM (including short explanatory videos), each participant signed a written consent form before their first session, participation was voluntary, and participants received 50 MYR per session, exceeding the local minimum wage.", "rai:dataReleaseMaintenancePlan": "The dataset is hosted on the Hugging Face Hub at https://huggingface.co/datasets/mysigner/MySign. The git history of that repository is the canonical version log; metadata.csv, croissant.json, and README.md are versioned alongside the data. Errata, corrections, and clarifications will be applied in-place with descriptive commit messages. The released signer-independent train/test split is fixed and will not be silently reshuffled. Issues, errata, and takedown requests can be filed via the dataset's Hugging Face Community tab. There is no scheduled deprecation; if the dataset is superseded by a future release, the current version will remain accessible via git history.", "rai:sourceDatasets": [ { "@type": "sc:Dataset", "name": "BIM Sign Bank", "url": "https://www.bimsignbank.org/home", "license": "Non-commercial research use only; no Sign Bank video, image, or audio is redistributed in MySign", "publisher": "Malaysian Federation of the Deaf (MFD) and Guidewire Gives Back", "publisherUrl": "https://www.mymfdeaf.org/", "description": "BIM Sign Bank is the official sign language resource bank for Bahasa Isyarat Malaysia (Malaysian Sign Language), developed by the Malaysian Federation of the Deaf (MFD) together with Guidewire Gives Back. It served as the source of the 1,000-gloss vocabulary in MySign and as the per-trial reference video shown to signers during recording. Each MySign instance is anchored at capture time to a specific BIM Sign Bank entry. No Sign Bank video, image, or audio is redistributed in MySign." } ], "rai:provenance": [ { "@type": "rai:Activity", "name": "Vocabulary selection", "description": "1,000 BIM glosses drawn from the authorized BIM Sign Bank, selected by the research team under the guidance of a Deaf supervisor to cover natural and commonly used BIM across nine main categories (conversation, culture, daily-life, general, health, nature, people, things, time) and 46 subcategories.", "agentType": "Human" }, { "@type": "rai:Activity", "name": "Participant recruitment", "description": "Five Deaf native BIM signers (3 male, 2 female; ages 25-60; height 150-178 cm; representing four ethnic backgrounds in the Malaysian Deaf community: Malay, Chinese, Indian, Kadazan). Recruitment via Deaf-community mailing lists and snowball sampling. Recruitment and consent materials were delivered primarily in BIM, including short explanatory videos. All procedures reviewed and approved by the institutional Human Research Ethics Committee (HREC). Each participant signed a written consent form before their first session; participation was voluntary; participants received 50 MYR per session, exceeding the local minimum wage.", "agentType": "Human" }, { "@type": "rai:Activity", "name": "Motion capture", "description": "In-studio recording on a six-camera OptiTrack system at 200 Hz, with 35 reflective markers per signer placed per the ISB clinical protocol, plus MANUS Prime 3 Data Gloves for finger kinematics, hardware-synchronized with the optical system. Per-signer glove calibration in MANUS Core; per-session optical calibration verified by wand calibration residual below 1 mm and full marker visibility throughout the capture volume. Each prompt slide displayed the BIM Sign Bank gloss and reference video; the signer produced exactly one take per prompt; takes that did not meet calibration conditions were re-recorded.", "agentType": "Human + Sensor" }, { "@type": "rai:Activity", "name": "Marker reconstruction", "description": "Marker trajectories processed in OptiTrack Motive 3.0.3 to reconstruct 3D joint positions and orientations.", "agentType": "Software", "tool": "OptiTrack Motive 3.0.3" }, { "@type": "rai:Activity", "name": "SMPL-X retargeting", "description": "Reconstructed joints retargeted onto the canonical SMPL-X body model using the Rokoko Retarget plugin v1.4.3 in Blender 4.5.2, producing a unified parametric motion representation.", "agentType": "Software", "tool": "Rokoko Retarget v1.4.3 in Blender 4.5.2" }, { "@type": "rai:Activity", "name": "Anatomical ROM clamping", "description": "Per-joint range-of-motion (ROM) clamping on fitted SMPL-X parameters. Upper-limb joints are clamped directly in the SMPL-X frame; finger joints are first mapped into a flexion/abduction/rotation anatomical frame via per-joint change-of-basis transforms, clamped there, and inverted back. Bounds specified independently for left and right hands. Removes anatomically inadmissible (over-flexed, hyper-abducted) finger configurations.", "agentType": "Software", "tool": "Custom Python pipeline" }, { "@type": "rai:Activity", "name": "Index-table generation", "description": "metadata.csv built by generate_metadata_remote.py: lists every Signer*/*.fbx via the Hugging Face Hub API, parses signer_id and take from the path, and normalizes the gloss label (separator unification, uppercasing, time-unit abbreviation expansion, parenthesis cleanup, plural-singular merging with an audit print and human-curated block-list).", "agentType": "Software", "tool": "huggingface_hub + Python regex (generate_metadata_remote.py)" }, { "@type": "rai:Activity", "name": "Post-hoc expert perceptual review", "description": "Three Deaf native signers with BIM domain expertise viewed each rendered 3D sign in an interactive web interface (no reference gloss shown), reported the perceived gloss, and rated motion naturalness on a 1-5 scale. Mean gloss-recognition accuracy 86.3% with substantial inter-rater agreement (Fleiss' kappa = 0.73); mean naturalness 3.87 +/- 0.71. Released as quality metadata only; not used to filter or modify the dataset.", "agentType": "Human" } ], "distribution": [ { "@type": "cr:FileObject", "@id": "repo", "name": "repo", "description": "Hugging Face dataset repository serving as the root container for all distribution files.", "contentUrl": "https://huggingface.co/datasets/mysigner/MySign", "encodingFormat": "git+https", "sha256": "main" }, { "@type": "cr:FileObject", "@id": "metadata.csv", "name": "metadata.csv", "description": "Index file. One row per .fbx file with columns: file_name, gloss, signer_id, take.", "containedIn": { "@id": "repo" }, "contentUrl": "metadata.csv", "encodingFormat": "text/csv" } ], "recordSet": [ { "@type": "cr:RecordSet", "@id": "signs", "name": "signs", "description": "One record per .fbx file. The file_name column is the relative path inside the repository, e.g. 'Signer001/Above.fbx'. The .fbx files themselves are not formally declared as a Croissant FileSet (FBX has no native Croissant semantics); see the README for the file layout.", "key": { "@id": "signs/file_name" }, "field": [ { "@type": "cr:Field", "@id": "signs/file_name", "name": "file_name", "description": "Repository-relative path to the FBX file, e.g. 'Signer001/Above.fbx'. Resolves to https://huggingface.co/datasets/mysigner/MySign/resolve/main/. Kept as plain Text so consumers can choose when (and whether) to load the binary FBX content.", "dataType": "sc:Text", "source": { "fileObject": { "@id": "metadata.csv" }, "extract": { "column": "file_name" } } }, { "@type": "cr:Field", "@id": "signs/gloss", "name": "gloss", "description": "Normalized gloss label for the sign (UPPERCASE; '/' separates synonyms; '(I)', '(II)' disambiguate homonyms; numerals and time units like '1 HOUR' are spelled out).", "dataType": "sc:Text", "source": { "fileObject": { "@id": "metadata.csv" }, "extract": { "column": "gloss" } } }, { "@type": "cr:Field", "@id": "signs/signer_id", "name": "signer_id", "description": "Identifier of the signer who performed the sign. One of 'Signer001' .. 'Signer005'.", "dataType": "sc:Text", "source": { "fileObject": { "@id": "metadata.csv" }, "extract": { "column": "signer_id" } } }, { "@type": "cr:Field", "@id": "signs/take", "name": "take", "description": "Take number for the (signer, gloss) pair. The original recording is take 1; Blender-style duplicate suffixes .001/.002 map to takes 2/3.", "dataType": "sc:Integer", "source": { "fileObject": { "@id": "metadata.csv" }, "extract": { "column": "take" } } } ] } ], "rai:hasSyntheticData": false, "prov:wasDerivedFrom": [ { "@id": "https://www.bimsignbank.org/home", "prov:label": "BIM Sign Bank", "sc:license": "Non-commercial research use only; no Sign Bank video, image, or audio is redistributed in MySign", "prov:wasAttributedTo": { "@id": "https://www.mymfdeaf.org/", "prov:label": "Malaysian Federation of the Deaf (MFD) and Guidewire Gives Back" } } ], "prov:wasGeneratedBy": [ { "@type": "prov:Activity", "prov:type": { "@id": "https://www.wikidata.org/wiki/Q4929239" }, "prov:label": "Vocabulary selection", "sc:description": "Selected 1,000 BIM glosses from the BIM Sign Bank (developed by the Malaysian Federation of the Deaf and Guidewire Gives Back) under non-commercial research authorization. Selection was made by the research team under the supervision of a Deaf advisor to cover natural and commonly used BIM across nine main categories (conversation, culture, daily-life, general, health, nature, people, things, time) and 46 subcategories.", "prov:atTime": "2025-03-11T06:26:00Z" } ] }