systemofapwne commited on
Commit
072e103
·
1 Parent(s): 943d034

Initial commit

Browse files
0_transcode.sh ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+
4
+ # Chose one of the following bitrates, depending on the model you want to train
5
+ # x-low, low models
6
+ #RATE=16000
7
+ # medium, high models
8
+ RATE=22050
9
+
10
+
11
+ CODEC=pcm_s16le
12
+ CHANNELS=1
13
+
14
+ TMP=/tmp/wav
15
+ mkdir -p ${TMP}
16
+
17
+ for file in ./raw/*.wav; do
18
+ ffmpeg -i ${file} -ac ${CHANNELS} -acodec ${CODEC} -ar ${RATE} -y ${TMP}/$(basename ${file})
19
+ done
20
+
21
+ rm ./raw/*.wav
22
+ mv ${TMP}/*.wav ./raw/
1_from_good.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+
3
+ import os
4
+ import shutil
5
+ import os
6
+ from pathlib import Path
7
+
8
+
9
+ IN="./raw"
10
+ OUT="./raw_good"
11
+
12
+
13
+ GOOD = None
14
+ with open('./good.txt', 'r') as f:
15
+ GOOD = list(map(str.strip, f.readlines()))
16
+
17
+ wav_files = []
18
+ for root, _, files in os.walk(IN):
19
+ for file in files:
20
+ if not (file.endswith('.wav') and file[:-4] in GOOD): continue
21
+ src = Path(os.path.join(root, file))
22
+ dst_name = Path(OUT) / src.name
23
+ shutil.move(src, dst_name)
24
+
25
+
1_sort_good_bad.py ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+
3
+ # This script helps you sorting good-quality voice-lines from low-quality for training.
4
+ # It plays the voice lines in *TWICE THE SPEED* to speed up this process (still takes hours) - This will make the voices sound like chipmunk voices. That is fine.
5
+ # Have the folowing folders prepared in respect to this repository
6
+ # raw
7
+ # - p1: Portal1 voice lines
8
+ # - p2: Portal2 voice lines
9
+ # - p2dlc: Portal 2 voice lines (if available)
10
+ # raw_good: Destination folder for good voice lines
11
+ # raw_bad: Destination folder for bad voice lines
12
+
13
+ # Start this program (linux: start a terminal an run it via ./0_sort_good_bad.py)
14
+ # Press left/right *ONCE* to play the first voice line (in twice the speed - aka chipmunk voice)
15
+ # - If it is good: press RIGHT ARROW
16
+ # - If it was bad: press LEFT ARROW
17
+ # - If you want to replay it (or check it without chipmunk effect): press UP arrow
18
+
19
+ # Get some coffee. This might take a few hours, even though we play the voice lines at twice the speed
20
+
21
+ import os
22
+ import wave
23
+ import pyaudio
24
+ import tkinter as tk
25
+ import shutil
26
+ import os
27
+ from pathlib import Path
28
+
29
+ def play_wav_file(file_path, speed = 1):
30
+ """Play a WAV file."""
31
+ with wave.open(file_path, 'rb') as wf:
32
+ p = pyaudio.PyAudio()
33
+
34
+ # Set audio stream parameters
35
+ stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
36
+ channels=wf.getnchannels(),
37
+ rate=int(wf.getframerate() * speed), # Double the speed
38
+ output=True)
39
+
40
+ data = wf.readframes(1024)
41
+ while data:
42
+ stream.write(data)
43
+ data = wf.readframes(1024)
44
+
45
+ stream.stop_stream()
46
+ stream.close()
47
+ p.terminate()
48
+
49
+ def find_wav_files(directory):
50
+ """Recursively find all WAV files in a directory."""
51
+ wav_files = []
52
+ for root, _, files in os.walk(directory):
53
+ for file in files:
54
+ if file.endswith('.wav'):
55
+ wav_files.append(os.path.join(root, file))
56
+ return wav_files
57
+
58
+ pos = 0
59
+ files = []
60
+ wav = ""
61
+ def play(speed = 1.5):
62
+ global pos, files, wav
63
+ wav = files[pos]
64
+ print(f"File: {wav}")
65
+ play_wav_file(wav, speed)
66
+ pos += 1
67
+ print("---> ?")
68
+
69
+ def move(src, dst):
70
+ src = Path(src)
71
+ #dst_name = Path(dst) / f"{src.parent.name}_{src.name}" # I used to prepepend the parent folders (p1, p2, p2dlc) as I was unsure, if the files have unique names across games. They indeed are unique -> Disabled
72
+ dst_name = Path(dst) / src.name
73
+ shutil.move(src, dst_name)
74
+
75
+ def on_key_press(event):
76
+ global pos, files, wav
77
+ if pos >= len(files):
78
+ print("FINISHED")
79
+ if pos == 0: return play()
80
+ speed = 1.5
81
+ if event.keysym == 'Left':
82
+ print("No")
83
+ print("--------------------------------------")
84
+ move(wav, "raw_bad")
85
+ elif event.keysym == 'Right':
86
+ print("Yes")
87
+ print("--------------------------------------")
88
+ move(wav, "raw_good")
89
+ else:
90
+ speed = 1
91
+ pos -= 1
92
+ if pos < 0: pose = 0
93
+ play(speed)
94
+
95
+ if __name__ == "__main__":
96
+ src = "./raw/"
97
+ files = find_wav_files(src)
98
+
99
+ root = tk.Tk()
100
+ root.title("Left: Discard, Right: Match, Up: Replay normal (no Chipmunk)")
101
+ root.geometry("300x150")
102
+ root.bind("<Key>", on_key_press)
103
+ root.mainloop()
2_transcribe.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+
3
+ # Inspired by https://blog.networkchuck.com/posts/how-to-clone-a-voice/
4
+ # Enhanced by https://github.com/SYSTRAN/faster-whisper
5
+
6
+ import os
7
+ from faster_whisper import WhisperModel
8
+
9
+ # SELECT language and MODEL of the files you want to transcribe
10
+ # I highly suggest to use a large model and to do this on GPU
11
+ LANG="de"
12
+ WHISPER_MODEL = "large-v3"
13
+
14
+ # Run on GPU
15
+ model = WhisperModel(WHISPER_MODEL, device="cuda", compute_type="float16")
16
+ # or run on CPU with INT8 (will take ages)
17
+ #model = WhisperModel(WHISPER_MODEL, device="cpu", compute_type="int8")
18
+
19
+ # Path to the directory containing the audio files
20
+ audio_dir = "./raw_good"
21
+ output_csv = "./metadata.csv"
22
+
23
+ # List all .wav files in the directory
24
+ audio_files = [f for f in os.listdir(audio_dir) if f.endswith(".wav")]
25
+ audio_files.sort() # Sort the files alphabetically (optional)
26
+
27
+ # Open the CSV file for writing
28
+ with open(output_csv, "w") as f:
29
+ for audio_file in audio_files:
30
+ # Full path to the audio file
31
+ audio_path = os.path.join(audio_dir, audio_file)
32
+
33
+ segments, info = model.transcribe(audio_path, language=LANG, beam_size=5)
34
+
35
+ transcription = ""
36
+ for seg in segments:
37
+ transcription += " "+seg.text.strip()
38
+
39
+ transcription = transcription.strip()
40
+
41
+ # Write the filename (without .wav extension) and transcription to the CSV
42
+ file_id = os.path.splitext(audio_file)[0] # Get file name without extension
43
+ f.write(f"{file_id}|{transcription}\n")
44
+ print(f"{file_id}|{transcription}")
45
+
46
+ print(f"Transcriptions complete! Metadata saved to {output_csv}")
3_gen_traindata.sh ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ LANG=de
4
+ SAMPLERATE=22050
5
+
6
+ python3 -m piper_train.preprocess \
7
+ --language ${LANG} \
8
+ --input-dir ./raw_good/ \
9
+ --output-dir ./traindata \
10
+ --dataset-format ljspeech \
11
+ --single-speaker \
12
+ --sample-rate ${SAMPLERATE}
4_train.sh ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ ### Training parameters
4
+ # Model to train
5
+ QUALITY=high
6
+ # These two parameters will dictate, how much VRAM will be required
7
+ # NOTE: The following config will require around 20 GB VRAM (high model)
8
+ # Do not lower BATCHSIZE too much, as training will have a lot of "jitter".
9
+ # But reducing PHONEME_MAX too much will discard too long training data
10
+ # However: I was able to train on ~6-7 GB VRAM with BATCHSIZE=12 and PHONEME_MAX=400 (the jitter was clearly visible in tensorboard)
11
+ BATCHSIZE=32
12
+ PHONEME_MAX=400
13
+
14
+ # Base model to start training from. E.g. here from Thorsten Voice
15
+ BASE_CHKPOINT="./checkpoints/epoch=2665-step=1182078.ckpt"
16
+
17
+ # Start training from BASE_CHKPOINT
18
+ CHKPOINT=$BASE_CHKPOINT
19
+
20
+ # CONTINUE from an existing checkpoint.
21
+ # NOTE: It is good practice, to write down the last checkpoint, that was trained after you aborted training.
22
+ # The current checkpoint is automatically printed out by the training process.
23
+ #CHKPOINT="/training/traindata/lightning_logs/version_21/checkpoints/epoch=5661-step=1778926.ckpt"
24
+
25
+ python3 -m piper_train \
26
+ --dataset-dir ./traindata \
27
+ --accelerator 'gpu' \
28
+ --gpus 1 \
29
+ --batch-size ${BATCHSIZE} \
30
+ --validation-split 0.0 \
31
+ --num-test-examples 0 \
32
+ --max_epochs 6000 \
33
+ --resume_from_checkpoint "${CHKPOINT}" \
34
+ --checkpoint-epochs 1 \
35
+ --precision 16 \
36
+ --max-phoneme-ids ${PHONEME_MAX} \
37
+ --quality ${QUALITY}
38
+
39
+ # Batchsize: 32
40
+ # max-phenomene-ids: 400
5_export.sh ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/bash
2
+
3
+ NAME=de_DE-glados-high
4
+
5
+ CHKPOINT="./checkpoints/epoch=5999-step=1838414.ckpt"
6
+
7
+ python3 -m piper_train.export_onnx \
8
+ ${CHKPOINT} \
9
+ ./${NAME}.onnx
10
+
11
+ cp ./traindata/config.json ./${NAME}.onnx.json
Dockerfile ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use the official PyTorch image as the base image
2
+ ARG BASE_IMAGE=nvcr.io/nvidia/pytorch:22.03-py3
3
+ ARG PIPER_VERSION=c0670df63daf07070c9be36b5c4bed270ad72383
4
+ ARG PYTHON_VERSION=3.10.13
5
+ ARG PYTHON_BIN=python3.10
6
+
7
+ ########## Build python
8
+ FROM ${BASE_IMAGE} AS pythonbuilder
9
+ ARG PYTHON_VERSION
10
+
11
+ # Install dependencies needed for building Python
12
+ ENV DEBIAN_FRONTEND noninteractive
13
+ RUN apt-get update && apt install -y \
14
+ git build-essential zlib1g-dev libbz2-dev \
15
+ liblzma-dev libncurses5-dev libreadline6-dev libsqlite3-dev libssl-dev \
16
+ libgdbm-dev liblzma-dev tk-dev lzma lzma-dev libgdbm-dev libffi-dev
17
+
18
+ RUN mkdir -pv /src && mkdir -pv /build
19
+ WORKDIR /src
20
+
21
+ RUN wget https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz
22
+ RUN tar zxvf Python-${PYTHON_VERSION}.tgz
23
+
24
+ WORKDIR /src/Python-${PYTHON_VERSION}
25
+ # Prefix is not only setting the destination where "make altinstall" puts the files, but also compiles in certain path such, that any program
26
+ # that will build against this python version, expect header files etc to be there -> We install into a clean /usr/local and then move the install files to /build
27
+ RUN ./configure --enable-optimizations --prefix=/usr/local
28
+ RUN make -j8
29
+
30
+ # Make clean destination which we then copy over to the actual container
31
+ RUN rm -rf /usr/local && mkdir -pv /usr/local
32
+ RUN make altinstall
33
+
34
+ RUN mv /usr/local/* /build
35
+
36
+
37
+
38
+
39
+ ########## Build piper-train
40
+ FROM ${BASE_IMAGE}
41
+ ARG PIPER_VERSION
42
+ ARG PYTHON_BIN
43
+
44
+ # Copy python from pythonbuilder stage
45
+ RUN mkdir -pv /usr/local/
46
+ COPY --from=pythonbuilder /build/ /usr/local
47
+
48
+ # Set environment variables for Numba cache directory
49
+ ENV NUMBA_CACHE_DIR=.numba_cache
50
+
51
+ # Install dependencies and tools for training
52
+ ENV DEBIAN_FRONTEND noninteractive
53
+ RUN apt update && apt install -y \
54
+ git build-essential espeak-ng ffmpeg && \
55
+ rm -rf /var/lib/apt/lists/*
56
+
57
+ # Prepare venv for piper
58
+ RUN /usr/local/bin/${PYTHON_BIN} -m venv /.venv
59
+ # Automatically activate the virtual environment when entering the container via 'docker exec -it <container name> bash'
60
+ RUN echo "source /.venv/bin/activate" >> /etc/bash.bashrc
61
+
62
+ # Prepare piper
63
+ RUN mkdir -pv /src
64
+ WORKDIR /src
65
+ #
66
+ RUN git clone https://github.com/rhasspy/piper.git && cd piper && git checkout ${PIPER_VERSION}
67
+ WORKDIR /src/piper/src/python
68
+
69
+ # Upgrade pip
70
+ RUN source /.venv/bin/activate && pip install "pip<24"
71
+ # Install latest numpy 1.x and tochmetrics 0.x to avoid RTX 4000 issues (https://github.com/rhasspy/piper/issues/295)
72
+ RUN source /.venv/bin/activate && pip install "numpy<2" "torchmetrics<1"
73
+ # Install piper dependencies
74
+ RUN source /.venv/bin/activate && pip install pip wheel setuptools && \
75
+ pip install -r requirements.txt
76
+ # Build piper-train
77
+ RUN source /.venv/bin/activate && pip install -e . && ./build_monotonic_align.sh
78
+ # Also install piper, in case we want to test current generations
79
+ RUN pip install piper
80
+
81
+ # Actual training directory: Mount your data folder in here
82
+ RUN mkdir -pv /training
83
+ WORKDIR /training
84
+
85
+
86
+ # Makes the container stay up and expose training metrics
87
+ STOPSIGNAL SIGKILL
88
+ CMD tensorboard --logdir .
89
+
90
+
README.org ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ * GLaDOS voice model, trained on German Portal 1 and Portal 2 game files
2
+ ** Model description
3
+ This model uses a checkpoint from the [[https://huggingface.co/datasets/rhasspy/piper-checkpoints/tree/main/de/de_DE/thorsten/high][Torsten High]] model as a base and fine tuned it
4
+ via the voice lines, directly coming from the game files of Portal 1 and Portal 2 to
5
+ replicate the German GLaDOS voice for piper.
6
+
7
+ Training has been performed on an RTX 4000 with 8 GB of VRAM for over more than 3000 epochs.
8
+ ** Dataset & Training
9
+ I also added /hints on how to build the training dataset/ and the used toolchain for preparing and training the model in this repo.
10
+ Reasons being:
11
+ - The the training data is intellectual property and copyright by Valve (I cannot include it here for obvious reasons)
12
+ - Training a model for piper (as of early 2025) relies on old/outdated tools from 2021 and getting everything up
13
+ and running can be super frustrating
14
+
15
+ *Requirements*
16
+ - A PC with an nVidia GPU and the proprietary nVidia drivers, CUDA, Docker + Docker as well as the nvidia-container-toolkit installed
17
+ - Ideally use a linux system (WSL untested but potentially might work)
18
+ - Basic linux and python knowledge
19
+ *** Build the training dataset
20
+ **** Extract the files from the game
21
+ The training dataset has been extracted from the Portal 1 and Portal 2 game files.
22
+ For legal reason, they are not included in this repo. But you can easily extract them from the
23
+ gamefiles via [[https://developer.valvesoftware.com/wiki/VPKEdit][VPKEdit]]
24
+ *Portal 1*:
25
+ - Switch the game to the desired language (Here: German) via Steam
26
+ - Navigate to =<steam>/steamapps/common/Portal/portal= and open =portal_pak_dir.vpk= with VPKEdit
27
+ - Inside =portal_pak_dir.vpk=, navigate to =sound/vo/aperture_ai= and extract all =*.wav= files into the folder =raw= inside this git repo
28
+ *Portal 2:*
29
+ - Switch the game to the desired language (Here: German) via Steam
30
+ - Navigate to =<steam>/steamapps/common/portal 2=. Select the subfolder matching the language (here =portal2_german=) and open =pak01_dir.vpk= via VPKEdit
31
+ - Inside =pak01_dir.vpk=, navigate to =sound/vo/glados= and extract all =*.wav= files (but no subfolders) to the folder =raw= inside this git repo
32
+ *Portal 2 DLC 1*:
33
+ - Repeat the steps 1 for *Portal 2* above but now select the =portal2_dlc1_<your language>= folder (if it exists). Here, =portal2_dlc1_german= does exist. Open =pak01_dir.vpk= with VPKEdit
34
+ - Repeat step 3 of *Portal 2* above but copy the files to =raw= in this git repo
35
+ **** Transcode the files
36
+ We need to transcode the files. The portal 1 files have a samplerate of 44.1 kHz WAV while the portal2 files are MP3.
37
+ For training, we need WAV, 16bit (LE), mono PCM with the sampleratres shown below, depending on the model quality we want to train.
38
+ - x-low, low: 16000 kHz
39
+ - medium, high: 22050 Hz
40
+ NOTE: In principle, we can also train on 44100 Hz, however the piper-train then needs to be modified for *training* and *inference* as it only supports
41
+ Run the following command (needs =ffmpeg= to be installed)
42
+ #+begin_src sh
43
+ # Before running the script, first edit the bitrate, that you want
44
+ ./0_transcode.sh
45
+ #+end_src
46
+ **** Sort by good/bad samples
47
+ _Now the annoying part_: Listen to all voice samples, one by one and sort them by good (same voice style, no degradation in quality, no additional none-voice parts or mumble etc) and bad (the opposite)
48
+ I have written a helper script for this purpose: *1_sort_good_bad.py* (Read the comments in it).
49
+ _But hold your horses_: Before you perform this annoying job, that can take several hours: I expect the quality of the voice lines to be similar across languges. So you can use my
50
+ script =1_from_good.py= which uses the =good.txt= file to tag voice samples as *good* or *bad*, based on my decisions made during listening to GLaDOS myself.
51
+ Run the following command
52
+ #+begin_src sh
53
+ ./1_from_good.py
54
+ #+end_src
55
+ **** Transcribe
56
+ Now we need to transcribe the files. For this, we need =faster-whisper=. The easiest way to install and use it, is to do this via Docker.
57
+ But before you do that, you should edit the file =2_transcribe.py= and select the language and model you want to use.
58
+
59
+ Run this to build the docker container(s)
60
+ #+begin_src sh
61
+ docker compose up --build -d
62
+ docker exec -it transcribe bash
63
+ #+end_src
64
+
65
+ You should now be in the =transcribe= docker container. Go to
66
+ #+begin_src sh
67
+ cd /training
68
+ ./2_transcribe.py
69
+ #+end_src
70
+ This will yield a new file =metadata.csv=. _Copy this file to =raw_good=, once transcription has finished_
71
+ *** Training
72
+ For this, you should use the Docker container, which is provided by this repo.
73
+ But before you do that, you need to configure the new files:
74
+ - 3_gen_traindata.sh: Edit the samplerate (16000 for x-low and low, 22050 for medium, high models) and the language code (en, de, ru, fr, ...)
75
+ - 4_train.sh: Edit the QUALITY, BATCHSIZE, PHONEME_MAX parameters, that suit your training hardware.
76
+ Also select the CHKPOINT to start from: You ideally do not want to train from scratch but rather from an already exisiting checkpoint.
77
+ Grab [[https://huggingface.co/datasets/rhasspy/piper-checkpoints/tree/main][one from the piper people]], that fits the model (x-low, low, medium, high) and language, that you want to train.
78
+ Copy it to =checkpoints= within this repo.
79
+
80
+ Now run the following within this repo (if you haven't it already done for transcription)
81
+ #+begin_src sh
82
+ docker compose up --build -d
83
+ docker exec -it training bash
84
+ #+end_src
85
+ This will build and enter the training container and also export training metrics via tensorboard at http://127.0.0.1:6006
86
+
87
+ From inside the container, you now need to generate your traindata for the training process
88
+ #+begin_src sh
89
+ ./3_gen_traindata.sh
90
+ #+end_src
91
+
92
+ And now, you are ready for training. Simply run
93
+ #+begin_src sh
94
+ ./4_train.sh
95
+ #+end_src
96
+ inside the container.
97
+
98
+ In the case, you need to stop training, you just have to change the path to the checkpoint by setting the =CHCKPOINT= variable in =./4_train.sh=.
99
+ *** Infere the final model
100
+ After training has finished (either it flattened of or you hit the max epoch limit), you need to export the model to the onnx format.
101
+ First, edit =5_export.sh= and set the name and also the checkpoint (generally the last trained checkpoint by =4_train.sh=, you want to export the model from
102
+ From still inside the training docker container, run this command
103
+ #+begin_src sh
104
+ ./5_export.sh
105
+ #+end_src
106
+ This will generate a =<model_name>.onnx= and =<model_name>.onnx.json= file. The later one needs to be adjusted: Open it in a file editor and and navigate to the line where it reads
107
+ #+begin_src json
108
+ "dataset": "",
109
+ #+end_src
110
+ and place replace "" with this models name (here: "<model_name>")
111
+ #+begin_src json
112
+ "dataset": "de_DE-glados-high"
113
+ #+end_src
114
+
115
+ These two files can now be used by piper
checkpoints/epoch=5999-step=1838414.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c6f8da10b32bf33a12cf526ba2bea4351608ef55292e8abf1a1856da5a5a6d5
3
+ size 998167172
compose.yml ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ services:
2
+ transcribe:
3
+ image: linuxserver/faster-whisper:latest
4
+ container_name: transcribe
5
+ environment:
6
+ - NVIDIA_VISIBLE_DEVICES=all # GPU Support
7
+ - NVIDIA_DRIVER_CAPABILITIES=compute,video,utility # GPU Support
8
+ runtime: nvidia # GPU Support
9
+ volumes:
10
+ - "./:/training:rw"
11
+
12
+ train:
13
+ build: .
14
+ container_name: training
15
+ ports:
16
+ - 6006:6006
17
+ environment:
18
+ - NVIDIA_VISIBLE_DEVICES=all # GPU Support
19
+ - NVIDIA_DRIVER_CAPABILITIES=compute,video,utility # GPU Support
20
+ runtime: nvidia # GPU Support
21
+ volumes:
22
+ - "./:/training:rw"
23
+ shm_size: '4gb'
de_DE-glados-high.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3215c8c2f95b13eb159aa336d5583a128ec49a480b90f9a86b08153b8ffad259
3
+ size 114199011
de_DE-glados-high.onnx.json ADDED
@@ -0,0 +1,497 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "de_DE-glados-high",
3
+ "audio": {
4
+ "sample_rate": 22050,
5
+ "quality": "traindata"
6
+ },
7
+ "espeak": {
8
+ "voice": "de"
9
+ },
10
+ "language": {
11
+ "code": "de"
12
+ },
13
+ "inference": {
14
+ "noise_scale": 0.667,
15
+ "length_scale": 1,
16
+ "noise_w": 0.8
17
+ },
18
+ "phoneme_type": "espeak",
19
+ "phoneme_map": {},
20
+ "phoneme_id_map": {
21
+ " ": [
22
+ 3
23
+ ],
24
+ "!": [
25
+ 4
26
+ ],
27
+ "\"": [
28
+ 150
29
+ ],
30
+ "#": [
31
+ 149
32
+ ],
33
+ "$": [
34
+ 2
35
+ ],
36
+ "'": [
37
+ 5
38
+ ],
39
+ "(": [
40
+ 6
41
+ ],
42
+ ")": [
43
+ 7
44
+ ],
45
+ ",": [
46
+ 8
47
+ ],
48
+ "-": [
49
+ 9
50
+ ],
51
+ ".": [
52
+ 10
53
+ ],
54
+ "0": [
55
+ 130
56
+ ],
57
+ "1": [
58
+ 131
59
+ ],
60
+ "2": [
61
+ 132
62
+ ],
63
+ "3": [
64
+ 133
65
+ ],
66
+ "4": [
67
+ 134
68
+ ],
69
+ "5": [
70
+ 135
71
+ ],
72
+ "6": [
73
+ 136
74
+ ],
75
+ "7": [
76
+ 137
77
+ ],
78
+ "8": [
79
+ 138
80
+ ],
81
+ "9": [
82
+ 139
83
+ ],
84
+ ":": [
85
+ 11
86
+ ],
87
+ ";": [
88
+ 12
89
+ ],
90
+ "?": [
91
+ 13
92
+ ],
93
+ "X": [
94
+ 156
95
+ ],
96
+ "^": [
97
+ 1
98
+ ],
99
+ "_": [
100
+ 0
101
+ ],
102
+ "a": [
103
+ 14
104
+ ],
105
+ "b": [
106
+ 15
107
+ ],
108
+ "c": [
109
+ 16
110
+ ],
111
+ "d": [
112
+ 17
113
+ ],
114
+ "e": [
115
+ 18
116
+ ],
117
+ "f": [
118
+ 19
119
+ ],
120
+ "g": [
121
+ 154
122
+ ],
123
+ "h": [
124
+ 20
125
+ ],
126
+ "i": [
127
+ 21
128
+ ],
129
+ "j": [
130
+ 22
131
+ ],
132
+ "k": [
133
+ 23
134
+ ],
135
+ "l": [
136
+ 24
137
+ ],
138
+ "m": [
139
+ 25
140
+ ],
141
+ "n": [
142
+ 26
143
+ ],
144
+ "o": [
145
+ 27
146
+ ],
147
+ "p": [
148
+ 28
149
+ ],
150
+ "q": [
151
+ 29
152
+ ],
153
+ "r": [
154
+ 30
155
+ ],
156
+ "s": [
157
+ 31
158
+ ],
159
+ "t": [
160
+ 32
161
+ ],
162
+ "u": [
163
+ 33
164
+ ],
165
+ "v": [
166
+ 34
167
+ ],
168
+ "w": [
169
+ 35
170
+ ],
171
+ "x": [
172
+ 36
173
+ ],
174
+ "y": [
175
+ 37
176
+ ],
177
+ "z": [
178
+ 38
179
+ ],
180
+ "æ": [
181
+ 39
182
+ ],
183
+ "ç": [
184
+ 40
185
+ ],
186
+ "ð": [
187
+ 41
188
+ ],
189
+ "ø": [
190
+ 42
191
+ ],
192
+ "ħ": [
193
+ 43
194
+ ],
195
+ "ŋ": [
196
+ 44
197
+ ],
198
+ "œ": [
199
+ 45
200
+ ],
201
+ "ǀ": [
202
+ 46
203
+ ],
204
+ "ǁ": [
205
+ 47
206
+ ],
207
+ "ǂ": [
208
+ 48
209
+ ],
210
+ "ǃ": [
211
+ 49
212
+ ],
213
+ "ɐ": [
214
+ 50
215
+ ],
216
+ "ɑ": [
217
+ 51
218
+ ],
219
+ "ɒ": [
220
+ 52
221
+ ],
222
+ "ɓ": [
223
+ 53
224
+ ],
225
+ "ɔ": [
226
+ 54
227
+ ],
228
+ "ɕ": [
229
+ 55
230
+ ],
231
+ "ɖ": [
232
+ 56
233
+ ],
234
+ "ɗ": [
235
+ 57
236
+ ],
237
+ "ɘ": [
238
+ 58
239
+ ],
240
+ "ə": [
241
+ 59
242
+ ],
243
+ "ɚ": [
244
+ 60
245
+ ],
246
+ "ɛ": [
247
+ 61
248
+ ],
249
+ "ɜ": [
250
+ 62
251
+ ],
252
+ "ɞ": [
253
+ 63
254
+ ],
255
+ "ɟ": [
256
+ 64
257
+ ],
258
+ "ɠ": [
259
+ 65
260
+ ],
261
+ "ɡ": [
262
+ 66
263
+ ],
264
+ "ɢ": [
265
+ 67
266
+ ],
267
+ "ɣ": [
268
+ 68
269
+ ],
270
+ "ɤ": [
271
+ 69
272
+ ],
273
+ "ɥ": [
274
+ 70
275
+ ],
276
+ "ɦ": [
277
+ 71
278
+ ],
279
+ "ɧ": [
280
+ 72
281
+ ],
282
+ "ɨ": [
283
+ 73
284
+ ],
285
+ "ɪ": [
286
+ 74
287
+ ],
288
+ "ɫ": [
289
+ 75
290
+ ],
291
+ "ɬ": [
292
+ 76
293
+ ],
294
+ "ɭ": [
295
+ 77
296
+ ],
297
+ "ɮ": [
298
+ 78
299
+ ],
300
+ "ɯ": [
301
+ 79
302
+ ],
303
+ "ɰ": [
304
+ 80
305
+ ],
306
+ "ɱ": [
307
+ 81
308
+ ],
309
+ "ɲ": [
310
+ 82
311
+ ],
312
+ "ɳ": [
313
+ 83
314
+ ],
315
+ "ɴ": [
316
+ 84
317
+ ],
318
+ "ɵ": [
319
+ 85
320
+ ],
321
+ "ɶ": [
322
+ 86
323
+ ],
324
+ "ɸ": [
325
+ 87
326
+ ],
327
+ "ɹ": [
328
+ 88
329
+ ],
330
+ "ɺ": [
331
+ 89
332
+ ],
333
+ "ɻ": [
334
+ 90
335
+ ],
336
+ "ɽ": [
337
+ 91
338
+ ],
339
+ "ɾ": [
340
+ 92
341
+ ],
342
+ "ʀ": [
343
+ 93
344
+ ],
345
+ "ʁ": [
346
+ 94
347
+ ],
348
+ "ʂ": [
349
+ 95
350
+ ],
351
+ "ʃ": [
352
+ 96
353
+ ],
354
+ "ʄ": [
355
+ 97
356
+ ],
357
+ "ʈ": [
358
+ 98
359
+ ],
360
+ "ʉ": [
361
+ 99
362
+ ],
363
+ "ʊ": [
364
+ 100
365
+ ],
366
+ "ʋ": [
367
+ 101
368
+ ],
369
+ "ʌ": [
370
+ 102
371
+ ],
372
+ "ʍ": [
373
+ 103
374
+ ],
375
+ "ʎ": [
376
+ 104
377
+ ],
378
+ "ʏ": [
379
+ 105
380
+ ],
381
+ "ʐ": [
382
+ 106
383
+ ],
384
+ "ʑ": [
385
+ 107
386
+ ],
387
+ "ʒ": [
388
+ 108
389
+ ],
390
+ "ʔ": [
391
+ 109
392
+ ],
393
+ "ʕ": [
394
+ 110
395
+ ],
396
+ "ʘ": [
397
+ 111
398
+ ],
399
+ "ʙ": [
400
+ 112
401
+ ],
402
+ "ʛ": [
403
+ 113
404
+ ],
405
+ "ʜ": [
406
+ 114
407
+ ],
408
+ "ʝ": [
409
+ 115
410
+ ],
411
+ "ʟ": [
412
+ 116
413
+ ],
414
+ "ʡ": [
415
+ 117
416
+ ],
417
+ "ʢ": [
418
+ 118
419
+ ],
420
+ "ʦ": [
421
+ 155
422
+ ],
423
+ "ʰ": [
424
+ 145
425
+ ],
426
+ "ʲ": [
427
+ 119
428
+ ],
429
+ "ˈ": [
430
+ 120
431
+ ],
432
+ "ˌ": [
433
+ 121
434
+ ],
435
+ "ː": [
436
+ 122
437
+ ],
438
+ "ˑ": [
439
+ 123
440
+ ],
441
+ "˞": [
442
+ 124
443
+ ],
444
+ "ˤ": [
445
+ 146
446
+ ],
447
+ "̃": [
448
+ 141
449
+ ],
450
+ "̧": [
451
+ 140
452
+ ],
453
+ "̩": [
454
+ 144
455
+ ],
456
+ "̪": [
457
+ 142
458
+ ],
459
+ "̯": [
460
+ 143
461
+ ],
462
+ "̺": [
463
+ 152
464
+ ],
465
+ "̻": [
466
+ 153
467
+ ],
468
+ "β": [
469
+ 125
470
+ ],
471
+ "ε": [
472
+ 147
473
+ ],
474
+ "θ": [
475
+ 126
476
+ ],
477
+ "χ": [
478
+ 127
479
+ ],
480
+ "ᵻ": [
481
+ 128
482
+ ],
483
+ "↑": [
484
+ 151
485
+ ],
486
+ "↓": [
487
+ 148
488
+ ],
489
+ "ⱱ": [
490
+ 129
491
+ ]
492
+ },
493
+ "num_symbols": 256,
494
+ "num_speakers": 1,
495
+ "speaker_id_map": {},
496
+ "piper_version": "1.0.0"
497
+ }
good.txt ADDED
@@ -0,0 +1,1110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 00_part1_entry-2
2
+ 00_part1_entry-3
3
+ 00_part1_entry-4
4
+ 00_part1_success-1
5
+ 00_part1_success-2
6
+ 00_part1_success-3
7
+ 00_part2_entry-1
8
+ 00_part2_success-1
9
+ 01_part1_entry-1
10
+ 01_part1_entry-2
11
+ 01_part1_get_portal_gun-1
12
+ 01_part1_get_portal_gun-2
13
+ 01_part1_get_portal_gun-3
14
+ 01_part1_get_portal_gun-4
15
+ 01_part1_get_portal_gun-5
16
+ 01_part1_get_portal_gun-6
17
+ 01_part1_get_portal_gun-7
18
+ 01_part2_entry-1
19
+ 01_part2_success-1
20
+ 02_part1_entry-1
21
+ 02_part1_entry-2
22
+ 02_part1_success-1
23
+ 02_part1_success-2
24
+ 02_part2_success-1
25
+ 03_part1_entry-1
26
+ 03_part1_entry-2
27
+ 03_part2_entry-1
28
+ 03_part2_platform_activated-1
29
+ 04_part1_entry-1
30
+ 04_part1_success-1
31
+ 05_part1_entry-1
32
+ 05_part1_entry-2
33
+ 05_part1_nag1-1
34
+ 05_part1_nag2-1
35
+ 05_part1_nag3-1
36
+ 05_part1_nag4-1
37
+ 05_part1_nag5-1
38
+ 05_part1_success-1
39
+ 06_part1_success_1-1
40
+ 06_part1_success_2-1
41
+ 07_part1_entry-1
42
+ 07_part1_entry-2
43
+ 07_part1_entry-3
44
+ 07_part1_get_device_component-1
45
+ 07_part1_get_device_component-2
46
+ 07_part1_trapped-1
47
+ 07_part1_trapped-2
48
+ 08_part1_entry-1
49
+ 08_part1_entry-2
50
+ 08_part1_entry-3
51
+ 08_part1_success-1
52
+ 08_part1_success-2
53
+ 08_part1_trapped-1
54
+ 08_part1_trapped-2
55
+ 09_part1_entry-1
56
+ 09_part1_entry-2
57
+ 09_part1_success-1
58
+ 10_part1_entry-1
59
+ 10_part1_entry-2
60
+ 10_part1_entry-3
61
+ 10_part1_success-1
62
+ 11_part1_entry-1
63
+ 11_part1_entry-2
64
+ 11_part1_entry-3
65
+ 11_part1_success-1
66
+ 13_part1_end-1
67
+ 13_part1_end-2
68
+ 13_part1_end-3
69
+ 13_part1_endnag-1
70
+ 13_part1_endnag-2
71
+ 13_part1_endnag-3
72
+ 13_part1_endnag-4
73
+ 13_part1_endnag-5
74
+ 13_part1_endnag-6
75
+ 13_part1_endnag-7
76
+ 13_part1_endnag-8
77
+ 13_part1_entry-1
78
+ 13_part1_euthanized-1
79
+ 13_part1_middle-1
80
+ 13_part1_middle-2
81
+ 13_part1_middle-2b-1
82
+ 13_part1_middle-2b-2
83
+ 13_part1_pickup-1
84
+ 14_part1_end-1
85
+ 14_part1_end-2
86
+ 14_part1_entry-1
87
+ 14_part1_entry-2
88
+ 15_part1_entry-1
89
+ 15_part1_entry-2
90
+ 15_part1_into_the_fire-1
91
+ 15_part1_into_the_fire-2
92
+ 15_part1_into_the_fire-3
93
+ 15_part1_into_the_fire-4
94
+ 15_part1_into_the_fire-5
95
+ 15_part1_partyfinalstop-1
96
+ 15_part1_partyreminder-1
97
+ 15_part1_partyspeech-2
98
+ 15_part1_partyspeech-3
99
+ 15_part1_partyspeech-4
100
+ 15_part1_partyspeech-5
101
+ 15_part1_partyspeech-6
102
+ 15_part1_partyspeech-7
103
+ escape_00_part1_nag06-1
104
+ escape_00_part1_nag13-1
105
+ escape_00_part1_nag14-1
106
+ escape_00_part1_nag15-1
107
+ escape_00_part1_nag16-1
108
+ escape_01_part1_nag03-1
109
+ escape_01_part1_nag04-1
110
+ escape_01_part1_nag11-1
111
+ escape_01_part1_nag12-1
112
+ escape_01_second_hit_nag02-1
113
+ escape_02_entry-00
114
+ escape_02_entry-01
115
+ escape_02_entry-02
116
+ escape_02_entry-03
117
+ escape_02_miscbabble-01
118
+ escape_02_miscbabble-02
119
+ escape_02_miscbabble-03
120
+ escape_02_miscbabble-04
121
+ escape_02_miscbabble-05
122
+ escape_02_miscbabble-06
123
+ escape_02_miscbabble-07
124
+ escape_02_miscbabble-08
125
+ escape_02_miscbabble-09
126
+ escape_02_miscbabble-11
127
+ escape_02_miscbabble-13
128
+ escape_02_miscbabble-15
129
+ escape_02_miscbabble-16
130
+ escape_02_miscbabble-17
131
+ escape_02_miscbabble-18
132
+ escape_02_miscbabble-19
133
+ escape_02_miscbabble-20
134
+ escape_02_miscbabble-21
135
+ escape_02_miscbabble-22
136
+ escape_02_miscbabble-23
137
+ escape_02_miscbabble-24
138
+ escape_02_miscbabble-25
139
+ escape_02_miscbabble-26
140
+ escape_02_miscbabble-27
141
+ escape_02_spheredestroy1-01
142
+ escape_02_spheredestroy1-02
143
+ escape_02_spheredestroy1-04
144
+ escape_02_spheredestroy1-05
145
+ escape_02_spheredestroy1-06
146
+ escape_02_spheredestroy1-07
147
+ escape_02_spheredestroy1-08
148
+ escape_02_spheredestroy1-09
149
+ escape_02_spheredestroy1-10
150
+ escape_02_spheredestroy1-ancillary1
151
+ escape_02_spheredestroy2-01
152
+ escape_02_spheredestroy2-02
153
+ escape_02_spheredestroy2-03
154
+ escape_02_spheredestroy2-04
155
+ escape_02_spheredestroy2-05
156
+ escape_02_spheredestroy3-02
157
+ escape_02_spheredestroy3-03
158
+ escape_02_spheredestroy3-04
159
+ escape_02_spheredestroy3-06
160
+ escape_02_spheredestroy3-07
161
+ escape_02_spheredestroy4-01
162
+ escape_02_spheredestroy4-03
163
+ escape_02_spheredestroy4-04
164
+ escape_02_spheredestroy4-05
165
+ escape_02_spheredestroy4-07
166
+ escape_02_spheredestroy4-08
167
+ escape_02_spheredrop1-01
168
+ escape_02_spheredrop1-02
169
+ escape_02_spherenag1-01
170
+ escape_02_spherenag1-02
171
+ escape_02_spherenag1-03
172
+ escape_02_spherenag1-04
173
+ escape_02_spherenag1-05
174
+ escape_02_spherenag1-06
175
+ escape_02_spherenag1-07
176
+ escape_02_spherenag1-08
177
+ escape_02_spherenag1-09
178
+ escape_02_spherenag1-10
179
+ escape_02_spherenag1-11
180
+ escape_02_spherenag1-12
181
+ escape_02_spherenag1-13
182
+ escape_02_spherenag1-14
183
+ escape_02_spherenag1-15
184
+ escape_02_spherenag1-16
185
+ generic_crate_lost-1
186
+ generic_crate_lost-2
187
+ generic_crate_vaporized_in_emancipation_grid-1
188
+ generic_crate_vaporized_in_emancipation_grid-2
189
+ generic_security_camera_destroyed-2
190
+ post_escape_bridge_02
191
+ post_escape_bridge_07
192
+ post_escape_bridge_08
193
+ post_escape_bridge_09
194
+ a2_triple_laser01
195
+ a2_triple_laser02
196
+ a2_triple_laser03
197
+ anti_taunt01
198
+ anti_taunt02
199
+ anti_taunt05
200
+ anti_taunt09
201
+ anti_taunt10
202
+ anti_taunt11
203
+ anti_taunt14
204
+ anti_taunt15
205
+ anti_taunt16
206
+ anti_taunt17
207
+ anti_taunt18
208
+ anti_taunt19
209
+ anti_taunt20
210
+ anti_taunt21
211
+ anti_taunt22
212
+ anti_taunt23
213
+ botcoop_artifactone01
214
+ botcoop_artifactone02
215
+ botcoop_artifactone03
216
+ botcoop_artifactone04
217
+ botcoop_artifactone05
218
+ botcoop_artifactone06
219
+ botcoop_artifactone07
220
+ botcoop_artifactone08
221
+ botcoop_artifactone09
222
+ botcoop_artifactone_hub01
223
+ botcoop_artifactone_hub02
224
+ botcoop_artifactone_hub03
225
+ botcoop_artifactone_hub04
226
+ botcoop_artifactone_hub05
227
+ botcoop_artifactone_hub06
228
+ botcoop_artifactone_hub07
229
+ botcoop_artifactone_hub08
230
+ botcoop_competition01
231
+ botcoop_competition02
232
+ botcoop_competition03
233
+ botcoop_competition04
234
+ botcoop_competition05
235
+ botcoop_competition10
236
+ botcoop_competition13
237
+ botcoop_hub_first_run01
238
+ botcoop_hub_first_run03
239
+ botcoop_hub_first_run04
240
+ botcoop_hub_first_run05
241
+ botcoop_hub_first_run06
242
+ botcoop_hub_first_run07
243
+ botcoop_humandetector01
244
+ botcoop_humandetector02
245
+ botcoop_humandetector04
246
+ botcoop_humandetector05
247
+ botcoop_humansplanb01
248
+ botcoop_humansplanb02
249
+ botcoop_intro01
250
+ botcoop_intro02
251
+ botcoop_intro03
252
+ botcoop_intro04
253
+ botcoop_intro05
254
+ botcoop_intro06
255
+ botcoop_intro07
256
+ botcoop_intro08
257
+ botcoop_intro09
258
+ botcoop_intro10
259
+ botcoop_intro12
260
+ botcoop_intro13
261
+ botcoop_intro15
262
+ botcoop_intro16
263
+ botcoop_intro17
264
+ botcoop_paxdemo01
265
+ botcoop_tubethree01
266
+ botcoop_tubethree02
267
+ botcoop_tubetwo01
268
+ chellgladoswakeup01
269
+ chellgladoswakeup04
270
+ chellgladoswakeup05
271
+ chellgladoswakeup06
272
+ coop_get_gun01
273
+ coop_get_gun02
274
+ coop_get_gun03
275
+ coop_get_gun04
276
+ coop_get_gun10
277
+ coop_get_gun11
278
+ coop_get_gun13
279
+ coop_hub_track02
280
+ coop_hub_track03
281
+ coop_hub_track04
282
+ coop_hub_track05
283
+ coop_hub_track06
284
+ coop_hub_track09
285
+ coop_hub_track10
286
+ coop_hub_track11
287
+ coop_hub_track14
288
+ coop_hub_track16
289
+ coop_misc10
290
+ coop_misc12
291
+ coop_nameblue
292
+ coop_nameorange
293
+ coop_numbers11
294
+ coop_numbers12
295
+ coop_numbers13
296
+ coop_numbers14
297
+ coop_numbers15
298
+ coop_numbers18
299
+ coop_privatetalk_both01
300
+ coop_privatetalk_both04
301
+ coop_privatetalk_mix01
302
+ coop_privatetalk_mix04
303
+ coop_scoring01
304
+ coop_scoring02
305
+ coop_scoring03
306
+ coop_scoring06
307
+ coop_scoring07
308
+ coop_talk_at_once11
309
+ coop_talk_at_once12
310
+ coop_teamwork_exercise_alt01
311
+ coop_teamwork_exercise_alt02
312
+ coop_teamwork_exercise_alt03
313
+ coop_teamwork_exercise_alt04
314
+ coop_teamwork_exercise_alt05
315
+ coop_teamwork_exercise_alt11
316
+ coop_teamwork_exercise_alt12
317
+ coop_teamwork_exercise_alt13
318
+ coop_teamwork_exercise_alt17
319
+ coop_test_chamber_blue03
320
+ coop_test_chamber_blue06
321
+ coop_test_chamber_both01
322
+ coop_test_chamber_both02
323
+ coop_test_chamber_both07
324
+ coop_test_chamber_both08
325
+ coop_test_chamber_both12
326
+ coop_test_chamber_both14
327
+ coop_test_chamber_both15
328
+ coop_test_chamber_both33
329
+ coop_test_chamber_oneplayer37
330
+ coop_test_chamber_orange03
331
+ coop_test_chamber_orange06
332
+ coop_vault_interruption02
333
+ coop_vault_interruption03
334
+ coop_vault_interruption04
335
+ coop_vault_interruption05
336
+ coop_vault_interruption06
337
+ coop_vault_interruption08
338
+ coop_vault_interruption09
339
+ coop_vault_intro01
340
+ coop_vault_intro02
341
+ epilogue03
342
+ epilogue04
343
+ epilogue07
344
+ epilogue10
345
+ epilogue12
346
+ epilogue14
347
+ epilogue19
348
+ epilogue20
349
+ epilogue23
350
+ epilogue25
351
+ epilogue28
352
+ epiloguekillyou02
353
+ evilagainsamples01
354
+ evilagainsamples03
355
+ evilagainsamples04
356
+ evilagainsamples05
357
+ faith_plate_intro01
358
+ faithplategarbage06
359
+ fgb_trap01
360
+ fgb_trap02
361
+ fgb_trap03
362
+ fgb_trap05
363
+ fgb_trap06
364
+ fgb_trap08
365
+ fgbrvtrap02
366
+ fgbrvtrap03
367
+ fgbrvtrap05
368
+ fgbturrets01
369
+ fgbwheatleyentrance10
370
+ fgbwheatleytransfer03
371
+ fizzlecube01
372
+ fizzlecube03
373
+ fizzlecube05
374
+ fizzlecube06
375
+ gladosbattle_pre09
376
+ gladosbattle_pre13
377
+ gladosbattle_pre14
378
+ gladosbattle_pre16
379
+ gladosbattle_pre18
380
+ gladosbattle_xfer03
381
+ gladosbattle_xfer04
382
+ gladosbattle_xfer05
383
+ gladosbattle_xfer06
384
+ gladosbattle_xfer11
385
+ gladosbattle_xfer12
386
+ gladosbattle_xfer13
387
+ gladosbattle_xfer14
388
+ gladosbattle_xfer15
389
+ gladosbattle_xfer16
390
+ gladosbattle_xfer17
391
+ hub04_01
392
+ hub04_02
393
+ hub04_03
394
+ hub04_04
395
+ hub04_05
396
+ jailbreak04
397
+ jailbreak05
398
+ jailbreak06
399
+ jailbreak09
400
+ jailbreak10
401
+ jailbreak11
402
+ jailbreak12
403
+ jailbreak13
404
+ jailbreakfaketest01
405
+ jailbreakfaketest03
406
+ jailbreakfaketest05
407
+ lift_interlude01
408
+ mp_coop_calibration01
409
+ mp_coop_calibration02
410
+ mp_coop_calibration03
411
+ mp_coop_calibration04
412
+ mp_coop_calibration05
413
+ mp_coop_calibration06
414
+ mp_coop_callibrationcomplete01
415
+ mp_coop_callibrationcomplete02
416
+ mp_coop_catapult_1end01
417
+ mp_coop_catapult_1end02
418
+ mp_coop_catapult_1end03
419
+ mp_coop_catapult_2
420
+ mp_coop_catapult_201
421
+ mp_coop_catapult_wall_intro01
422
+ mp_coop_catapult_wall_intro04
423
+ mp_coop_catapult_wall_intro06
424
+ mp_coop_catapult_wall_introdeath01
425
+ mp_coop_come_along04
426
+ mp_coop_come_along06
427
+ mp_coop_come_alongstart01
428
+ mp_coop_come_alongstart02
429
+ mp_coop_come_alongstart03
430
+ mp_coop_come_alongstart04
431
+ mp_coop_confidencenotperfect01
432
+ mp_coop_confidencenotperfect02
433
+ mp_coop_confidencenotperfect03
434
+ mp_coop_confidencenotslow01
435
+ mp_coop_confidencenotslow02
436
+ mp_coop_confidencenotslow03
437
+ mp_coop_confidencenotslow04_01
438
+ mp_coop_confidencenotstay01
439
+ mp_coop_confidencenotstay02
440
+ mp_coop_confidencenotstay03
441
+ mp_coop_confidencenotstay04
442
+ mp_coop_confidenceoff01
443
+ mp_coop_confidenceoff02
444
+ mp_coop_confidenceoff03
445
+ mp_coop_confidenceoff04
446
+ mp_coop_doors01
447
+ mp_coop_doors01a
448
+ mp_coop_doors02
449
+ mp_coop_doors02a
450
+ mp_coop_doors03
451
+ mp_coop_doors04
452
+ mp_coop_doors04a
453
+ mp_coop_doors05_alt
454
+ mp_coop_doors05a
455
+ mp_coop_fan02
456
+ mp_coop_fan03
457
+ mp_coop_fan04
458
+ mp_coop_fanend01
459
+ mp_coop_fanend02
460
+ mp_coop_fanend03
461
+ mp_coop_fling_1end01_01
462
+ mp_coop_fling_1end03
463
+ mp_coop_fling_3end01
464
+ mp_coop_fling_3end02
465
+ mp_coop_fling_3end03
466
+ mp_coop_fling_3end04
467
+ mp_coop_fling_3start01
468
+ mp_coop_fling_3start02
469
+ mp_coop_fling_3start03
470
+ mp_coop_fling_crushers01
471
+ mp_coop_fling_crushers02
472
+ mp_coop_fling_crushersend01
473
+ mp_coop_fling_crushersend03
474
+ mp_coop_humanresourcedeath01
475
+ mp_coop_humanresourcedeath02
476
+ mp_coop_humanresourcedeath03
477
+ mp_coop_humanresourcedeath04
478
+ mp_coop_humanresourcedeath07
479
+ mp_coop_humanresourcedeath08
480
+ mp_coop_infinifling_trainend01
481
+ mp_coop_infinifling_trainend02
482
+ mp_coop_infinifling_trainend03
483
+ mp_coop_infinifling_trainend04
484
+ mp_coop_infinifling_trainstart01
485
+ mp_coop_infinifling_trainstart02
486
+ mp_coop_infinifling_trainstart03
487
+ mp_coop_infinifling_trainstart04
488
+ mp_coop_laser_202
489
+ mp_coop_laser_203
490
+ mp_coop_laser_207
491
+ mp_coop_laser_209
492
+ mp_coop_laser_210
493
+ mp_coop_laser_crusher01
494
+ mp_coop_laser_crushercrush01
495
+ mp_coop_laser_crushercrush02
496
+ mp_coop_laser_crushercrush03
497
+ mp_coop_laser_crushercrush04
498
+ mp_coop_laser_crushercrush05
499
+ mp_coop_laser_crushercrush06
500
+ mp_coop_obstacle01
501
+ mp_coop_obstacle03_alt
502
+ mp_coop_obstacletrait01
503
+ mp_coop_obstacletrait02
504
+ mp_coop_paint_bridge01
505
+ mp_coop_paint_bridge02
506
+ mp_coop_paint_bridgedeath01
507
+ mp_coop_paint_bridgedeath02
508
+ mp_coop_paint_bridgenodeath01
509
+ mp_coop_paint_bridgenodeath02
510
+ mp_coop_paint_come_along01
511
+ mp_coop_paint_come_along02
512
+ mp_coop_paint_come_along03
513
+ mp_coop_paint_come_along04
514
+ mp_coop_paint_come_along05
515
+ mp_coop_paint_come_along06
516
+ mp_coop_paint_come_along10
517
+ mp_coop_paint_come_along11
518
+ mp_coop_paint_longjump_intro03
519
+ mp_coop_paint_longjump_intro04
520
+ mp_coop_paint_longjump_intro05
521
+ mp_coop_paint_longjump_intro06
522
+ mp_coop_paint_longjump_intro08
523
+ mp_coop_paint_longjump_intro44
524
+ mp_coop_paint_longjump_intro45
525
+ mp_coop_paint_longjump_intro46
526
+ mp_coop_paint_longjump_intro49
527
+ mp_coop_paint_longjump_intro50
528
+ mp_coop_paint_longjump_intro51
529
+ mp_coop_paint_longjump_intro52
530
+ mp_coop_paint_longjump_intro53
531
+ mp_coop_paint_longjump_intro54
532
+ mp_coop_paint_longjump_intro58
533
+ mp_coop_paint_longjump_intro59
534
+ mp_coop_paint_longjump_intro60
535
+ mp_coop_paint_longjump_intro64
536
+ mp_coop_paint_longjump_intro65
537
+ mp_coop_paint_longjump_intro67
538
+ mp_coop_paint_longjump_intro69
539
+ mp_coop_paint_longjump_intro70
540
+ mp_coop_paint_longjump_intro71
541
+ mp_coop_paint_longjump_intro72
542
+ mp_coop_paint_longjump_intro73
543
+ mp_coop_paint_longjump_intro74
544
+ mp_coop_paint_longjump_intro75
545
+ mp_coop_paint_red_racer01
546
+ mp_coop_paint_red_racer02
547
+ mp_coop_paint_redirect01
548
+ mp_coop_paint_redirect04
549
+ mp_coop_paint_redirect06
550
+ mp_coop_paint_redirect07
551
+ mp_coop_paint_redirect08
552
+ mp_coop_paint_speed_catch01
553
+ mp_coop_paint_speed_catch03
554
+ mp_coop_paint_speed_catch04
555
+ mp_coop_paint_speed_catch05
556
+ mp_coop_paint_speed_catch06
557
+ mp_coop_paint_speed_catch07
558
+ mp_coop_paint_speed_catch08
559
+ mp_coop_paint_speed_catchalt01
560
+ mp_coop_paint_speed_catchalt02
561
+ mp_coop_paint_speed_fling01
562
+ mp_coop_paint_speed_fling02
563
+ mp_coop_paint_speed_fling03
564
+ mp_coop_paint_walljumps01
565
+ mp_coop_paint_walljumps02
566
+ mp_coop_paint_walljumps04
567
+ mp_coop_pingtool01
568
+ mp_coop_pingtool02
569
+ mp_coop_pingtool03
570
+ mp_coop_pingtool06
571
+ mp_coop_pingtool07
572
+ mp_coop_race_2start01
573
+ mp_coop_race_2start02
574
+ mp_coop_radarroomend
575
+ mp_coop_rat_mazedeath01
576
+ mp_coop_rat_mazedeath02
577
+ mp_coop_rat_mazedeath03
578
+ mp_coop_rat_mazedeath04
579
+ mp_coop_startdeath01
580
+ mp_coop_startdeath02
581
+ mp_coop_startdeath03
582
+ mp_coop_startdeath04
583
+ mp_coop_startdeath05
584
+ mp_coop_startend01
585
+ mp_coop_startend02
586
+ mp_coop_tbeam_catch_grind_1taunt01
587
+ mp_coop_tbeam_catch_grind_1taunt02
588
+ mp_coop_tbeam_catch_grind_1taunt04
589
+ mp_coop_tbeam_end01
590
+ mp_coop_tbeam_end02
591
+ mp_coop_tbeam_end09
592
+ mp_coop_tbeam_end10
593
+ mp_coop_tbeam_end11
594
+ mp_coop_tbeam_end12
595
+ mp_coop_tbeam_end13
596
+ mp_coop_tbeam_end14
597
+ mp_coop_tbeam_end15
598
+ mp_coop_tbeam_maze01
599
+ mp_coop_tbeam_maze02
600
+ mp_coop_tbeam_maze03
601
+ mp_coop_tbeam_maze05
602
+ mp_coop_tbeam_polarity3_03
603
+ mp_coop_tbeam_polarity3_05
604
+ mp_coop_tbeam_polarity3_06
605
+ mp_coop_tbeam_polarity3_16
606
+ mp_coop_tbeam_polarity3_17
607
+ mp_coop_tbeam_redirect01a
608
+ mp_coop_teambtsstart01
609
+ mp_coop_teambtsstart02
610
+ mp_coop_teambtsstart03
611
+ mp_coop_test_chamber_both09_01
612
+ mp_coop_turret_ball01
613
+ mp_coop_turret_ball02
614
+ mp_coop_turret_wall01
615
+ mp_coop_turret_walls04
616
+ mp_coop_wall_501
617
+ mp_coop_wall_502
618
+ mp_coop_wall_503
619
+ mp_coop_wall_504
620
+ mp_coop_wall_5end01
621
+ mp_coop_wall_5end02
622
+ mp_coop_wall_block01
623
+ mp_coop_wall_block02
624
+ mp_coop_wall_block03
625
+ mp_coop_wall_block04
626
+ mp_coop_wall_blockdeath01
627
+ mp_coop_wall_intro01
628
+ mp_coop_wall_intro02
629
+ mp_coop_wall_intro03
630
+ mp_coop_wall_intro04
631
+ mp_coop_wall_intro05
632
+ mp_coop_wall_intro06
633
+ mp_coop_wall_intro07
634
+ mp_coop_wall_intro08
635
+ mp_credits01
636
+ mp_credits03
637
+ mp_credits04
638
+ mp_credits05
639
+ mp_credits06
640
+ mp_credits07
641
+ mp_credits08
642
+ mp_credits09
643
+ mp_credits10
644
+ mp_credits11
645
+ mp_credits12
646
+ mp_credits13
647
+ mp_credits14
648
+ mp_credits15
649
+ mp_credits16
650
+ mp_credits17
651
+ mp_credits18
652
+ mp_credits19
653
+ mp_credits20
654
+ mp_credits21
655
+ mp_credits22
656
+ mp_credits23
657
+ mp_credits24
658
+ mp_credits25
659
+ mp_credits26
660
+ mp_credits27
661
+ mp_credits28
662
+ mp_credits29
663
+ mp_credits30
664
+ mp_credits31
665
+ mp_credits32
666
+ mp_credits33
667
+ mp_credits34
668
+ mp_credits35
669
+ mp_credits36
670
+ mp_credits37
671
+ mp_credits38
672
+ mp_credits39
673
+ mp_credits40
674
+ mp_credits41
675
+ mp_credits42
676
+ mp_credits43
677
+ mp_credits44
678
+ mp_credits45
679
+ mp_credits46
680
+ mp_credits47
681
+ mp_credits48
682
+ mp_credits49
683
+ mp_credits50
684
+ mp_credits51
685
+ mp_credits52
686
+ mp_credits53
687
+ mp_credits54
688
+ mp_credits55
689
+ mp_credits56
690
+ mp_credits57
691
+ mp_credits58
692
+ mp_credits59
693
+ mp_credits60
694
+ mp_credits61
695
+ mp_credits62
696
+ mp_credits63
697
+ mp_credits64
698
+ mp_credits65
699
+ mp_credits66
700
+ mp_credits67
701
+ mp_credits68
702
+ mp_credits69
703
+ mp_credits70
704
+ mp_credits72
705
+ mp_credits73
706
+ mp_credits74
707
+ mp_credits75
708
+ mp_credits76
709
+ mp_credits77
710
+ mp_credits79
711
+ mp_credits80
712
+ mp_credits81
713
+ mp_credits82
714
+ mp_credits83
715
+ mp_credits84
716
+ mp_credits85
717
+ mp_credits86
718
+ mp_credits87
719
+ mp_credits88
720
+ mp_credits90
721
+ mp_credits91
722
+ mp_credits92
723
+ mp_credits93
724
+ mp_credits94
725
+ mp_death01
726
+ mp_death04
727
+ mp_death05
728
+ mp_death06
729
+ mp_death08
730
+ mp_death10
731
+ mp_death11
732
+ mp_death12
733
+ mp_death16
734
+ mp_death17
735
+ mp_death18
736
+ mp_death19
737
+ mp_death20
738
+ mp_death21
739
+ mp_death22
740
+ mp_death23
741
+ mp_death24
742
+ mp_death26
743
+ mp_hub_return02a
744
+ mp_hub_return03a
745
+ mp_hub_return04a
746
+ mp_hub_return05a
747
+ mp_hub_return06a
748
+ mp_hubreturn01
749
+ mp_hubreturn02
750
+ mp_hubreturn03
751
+ mp_hubreturn04
752
+ mp_hubreturn05
753
+ mp_hubreturn06
754
+ mp_hubreturn07
755
+ mp_hubreturn08
756
+ mp_hubreturn09
757
+ mp_hubreturn10
758
+ mp_hubreturn11
759
+ mp_humanresources01
760
+ mp_humanresources03
761
+ mp_humanresources04
762
+ mp_subterfuge01
763
+ mp_subterfuge02
764
+ mp_subterfuge03
765
+ mp_subterfuge04
766
+ mp_subterfuge05
767
+ mp_subterfuge06
768
+ mp_subterfuge07
769
+ mp_subterfuge08
770
+ mp_subterfuge09
771
+ mp_subterfuge10
772
+ mp_subterfuge11
773
+ mp_subterfuge13
774
+ mp_subterfuge14
775
+ mp_subterfuge15
776
+ mp_subterfuge16_alt
777
+ mp_subterfuge17
778
+ mp_subterfuge21
779
+ mp_subterfuge22
780
+ mp_subterfuge23
781
+ mp_subterfuge24
782
+ mp_subterfuge25
783
+ mp_subterfuge26
784
+ mp_subterfuge30
785
+ mp_subterfuge32
786
+ mp_subterfuge33
787
+ mp_subterfuge34
788
+ mp_subterfuge35
789
+ mp_subterfuge36
790
+ mp_subterfuge39
791
+ mp_subterfuge44
792
+ mp_subterfuge45
793
+ mp_subterfuge46
794
+ mp_subterfuge47
795
+ mp_subterfuge_misc05
796
+ mp_subterfuge_misc06
797
+ mp_subterfuge_misc07
798
+ mp_subterfuge_misc11
799
+ mp_subterfuge_misc14
800
+ mp_subterfuge_misc16
801
+ mp_subterfugedeath_blue01
802
+ mp_subterfugedeath_blue02
803
+ mp_subterfugedeath_blue03
804
+ mp_subterfugedeath_blue04
805
+ mp_subterfugedeath_orange02
806
+ mp_subterfugedeath_orange03
807
+ mp_subterfugedeath_orange06
808
+ mp_subterfugedeath_orange08
809
+ mp_taunts01
810
+ mp_taunts02
811
+ mp_taunts03
812
+ mp_taunts04
813
+ mp_taunts05
814
+ mp_taunts06
815
+ multiple_spheres01
816
+ potatos_emotion_no01
817
+ potatos_emotion_no02
818
+ potatos_emotion_no03
819
+ potatos_fgb_confrontation07
820
+ potatos_fgb_confrontation08
821
+ prehub04
822
+ prehub14
823
+ prehub15
824
+ prehub16
825
+ prehub24
826
+ prehub26
827
+ prehub27
828
+ prehub28
829
+ prehub29
830
+ prehub30
831
+ prehub32
832
+ prehub34
833
+ prehub35
834
+ prehub36
835
+ prehub47
836
+ prehub48
837
+ prehub55
838
+ prehub56
839
+ sp_a1_wakeup_incinerator01
840
+ sp_a1_wakeup_incinerator02
841
+ sp_a2_bridge_intro01
842
+ sp_a2_bridge_intro03
843
+ sp_a2_bridge_intro04
844
+ sp_a2_bridge_the_gap01
845
+ sp_a2_bridge_the_gap02
846
+ sp_a2_bts1_intro01
847
+ sp_a2_catapult01
848
+ sp_a2_column_blocker01
849
+ sp_a2_column_blocker04
850
+ sp_a2_column_blocker05
851
+ sp_a2_core01
852
+ sp_a2_core02
853
+ sp_a2_core03
854
+ sp_a2_core04
855
+ sp_a2_dilemma01
856
+ sp_a2_dual_lasers_intro01
857
+ sp_a2_fizzler_intro01
858
+ sp_a2_fizzler_intro04
859
+ sp_a2_fizzler_intro06
860
+ sp_a2_future_starter01
861
+ sp_a2_intro1_found01
862
+ sp_a2_intro1_found05
863
+ sp_a2_intro1_found06
864
+ sp_a2_intro1_found07
865
+ sp_a2_intro1_found08
866
+ sp_a2_laser_intro_ending02
867
+ sp_a2_laser_over_goo_intro01
868
+ sp_a2_laser_stairs_intro03
869
+ sp_a2_pit_flings02
870
+ sp_a2_pit_flings03
871
+ sp_a2_pit_flings06
872
+ sp_a2_pit_flings_future_starter01
873
+ sp_a2_pit_flings_future_starter02
874
+ sp_a2_ricochet01
875
+ sp_a2_trust_fling01
876
+ sp_a2_trust_fling02
877
+ sp_a2_trust_fling03
878
+ sp_a2_trust_fling04
879
+ sp_a2_trust_fling06
880
+ sp_a2_turret_blocker_future_starter01
881
+ sp_a2_turret_blocker_future_starter02
882
+ sp_a2_turret_intro01
883
+ sp_a2_turret_intro03
884
+ sp_box_over_goo01
885
+ sp_box_over_goo04
886
+ sp_catapult_fling_sphere_peek_completion01
887
+ sp_catapult_fling_sphere_peek_failureone01
888
+ sp_catapult_fling_sphere_peek_failureone02
889
+ sp_catapult_fling_sphere_peek_failureone03
890
+ sp_catapult_fling_sphere_peek_failurethree01
891
+ sp_catapult_fling_sphere_peek_failuretwo01
892
+ sp_catapult_fling_sphere_peek_failuretwo03
893
+ sp_catapult_intro_completion01
894
+ sp_column_blocker_entry01
895
+ sp_hole_in_the_sky_completion01
896
+ sp_hole_in_the_sky_completion02
897
+ sp_hole_in_the_sky_entry01
898
+ sp_hole_in_the_sky_entry02
899
+ sp_incinerator_01_01
900
+ sp_incinerator_01_03
901
+ sp_incinerator_01_04
902
+ sp_incinerator_01_08
903
+ sp_incinerator_01_09
904
+ sp_incinerator_01_10
905
+ sp_incinerator_01_11
906
+ sp_incinerator_01_12
907
+ sp_incinerator_01_13
908
+ sp_incinerator_01_15
909
+ sp_incinerator_01_18
910
+ sp_laser_over_goo_completion01
911
+ sp_laser_over_goo_entry01
912
+ sp_laser_powered_lift_completion01
913
+ sp_laser_powered_lift_completion02
914
+ sp_laser_powered_lift_entry01
915
+ sp_laser_redirect_intro_completion01
916
+ sp_laser_redirect_intro_completion03
917
+ sp_laser_redirect_intro_entry01
918
+ sp_laser_redirect_intro_entry02
919
+ sp_laser_redirect_intro_entry03
920
+ sp_laserfield_intro01
921
+ sp_paint_jump_redirect_bomb_completion01
922
+ sp_paint_jump_redirect_bomb_entry01
923
+ sp_paint_jump_trampoline_completion01
924
+ sp_paint_jump_trampoline_entry01
925
+ sp_paint_jump_wall_jumps02
926
+ sp_shoot_through_wall_completion01
927
+ sp_shoot_through_wall_entry01
928
+ sp_sphere_2nd_encounter_completion01
929
+ sp_sphere_2nd_encounter_entry01
930
+ sp_sphere_2nd_encounter_entrytwo01
931
+ sp_sphere_2nd_encounter_malfunction01
932
+ sp_sphere_2nd_encounter_malfunction02
933
+ sp_trust_fling_entry01
934
+ sp_trust_fling_entry02
935
+ sp_trust_fling_sphereinterrupt01
936
+ sp_trust_fling_sphereinterrupt03
937
+ sp_turret_intro_entry01
938
+ sp_turret_islands01
939
+ sp_turret_islands02
940
+ taunt_big_wave01
941
+ taunt_big_wave05
942
+ taunt_big_wave07
943
+ taunt_cannonball01
944
+ taunt_cannonball02
945
+ taunt_highfive01
946
+ taunt_highfive03
947
+ taunt_highfive04
948
+ taunt_laugh01
949
+ taunt_laugh02
950
+ taunt_robot_arm_ripple01
951
+ taunt_robot_arm_ripple02
952
+ taunt_robot_arm_ripple03
953
+ taunt_robot_arm_ripple04
954
+ taunt_robot_arm_ripple05
955
+ taunt_robot_arm_ripple06
956
+ taunt_rockpaperscissors01
957
+ taunt_small_wave01
958
+ taunt_small_wave02
959
+ taunt_small_wave03
960
+ taunt_small_wave04
961
+ taunt_small_wave_portalgun01
962
+ taunt_small_wave_portalgun02
963
+ taunt_small_wave_portalgun03
964
+ taunt_somersault01
965
+ taunt_somersault02
966
+ taunt_somersault03
967
+ taunt_splits01
968
+ taunt_splits02
969
+ taunt_splits03
970
+ taunt_teamhug01
971
+ taunt_teamhug02
972
+ taunt_teamhug03
973
+ taunt_teamtease01
974
+ taunt_teamtease02
975
+ taunt_teamtease03
976
+ taunt_teamtease04
977
+ taunt_trickfire_handstand01
978
+ taunt_trickfire_handstand02
979
+ taunt_trickfire_handstand03
980
+ taunt_trickfire_handstand04
981
+ taunt_trickfire_handstand05
982
+ testchambermisc02
983
+ testchambermisc12
984
+ testchambermisc15
985
+ testchambermisc16
986
+ testchambermisc19
987
+ testchambermisc21
988
+ testchambermisc23
989
+ testchambermisc27
990
+ testchambermisc30
991
+ testchambermisc31
992
+ testchambermisc33
993
+ testchambermisc34
994
+ testchambermisc35
995
+ testchambermisc39
996
+ testchambermisc41
997
+ wakeup_outro01
998
+ wakeup_outro02
999
+ dlc1_leaderboard01
1000
+ dlc1_leaderboard02
1001
+ dlc1_leaderboard03
1002
+ dlc1_leaderboard04
1003
+ dlc1_leaderboard05
1004
+ dlc1_leaderboard06
1005
+ dlc1_leaderboard07
1006
+ dlc1_leaderboard08
1007
+ dlc1_leaderboard09
1008
+ dlc1_leaderboard10
1009
+ dlc1_leaderboard11
1010
+ dlc1_leaderboard12
1011
+ dlc1_leaderboard13
1012
+ dlc1_leaderboard14
1013
+ dlc1_leaderboard15
1014
+ dlc1_leaderboard16
1015
+ dlc1_leaderboard17
1016
+ dlc1_leaderboard18
1017
+ dlc1_leaderboard19
1018
+ dlc1_leaderboard20
1019
+ dlc1_leaderboard22
1020
+ dlc1_leaderboard23
1021
+ dlc1_mp_coop_2paints_1bridge_introb01
1022
+ dlc1_mp_coop_2paints_1bridge_outrob01
1023
+ dlc1_mp_coop_art_death_acid01
1024
+ dlc1_mp_coop_art_death_acid02
1025
+ dlc1_mp_coop_art_death_acid04
1026
+ dlc1_mp_coop_art_death_acid05
1027
+ dlc1_mp_coop_art_death_acid06
1028
+ dlc1_mp_coop_art_death_generic01
1029
+ dlc1_mp_coop_art_death_generic04
1030
+ dlc1_mp_coop_art_death_generic07
1031
+ dlc1_mp_coop_art_death_generic08
1032
+ dlc1_mp_coop_art_death_generic09
1033
+ dlc1_mp_coop_art_death_generic11
1034
+ dlc1_mp_coop_art_death_generic12
1035
+ dlc1_mp_coop_art_death_turret01
1036
+ dlc1_mp_coop_bridge_catch_intro04
1037
+ dlc1_mp_coop_bridge_catch_introb01
1038
+ dlc1_mp_coop_bridge_catch_introb02
1039
+ dlc1_mp_coop_bridge_catch_outrob01
1040
+ dlc1_mp_coop_bridge_catch_outrob02
1041
+ dlc1_mp_coop_bridge_catch_outrob03
1042
+ dlc1_mp_coop_bridge_catch_outrob04
1043
+ dlc1_mp_coop_catapult_catch_intro01
1044
+ dlc1_mp_coop_catapult_catch_outro02
1045
+ dlc1_mp_coop_ending_stinger16
1046
+ dlc1_mp_coop_ending_stinger17
1047
+ dlc1_mp_coop_ending_stinger18
1048
+ dlc1_mp_coop_ending_stinger19
1049
+ dlc1_mp_coop_ending_stinger20
1050
+ dlc1_mp_coop_ending_success07
1051
+ dlc1_mp_coop_ending_success13
1052
+ dlc1_mp_coop_finalgantry_morale01
1053
+ dlc1_mp_coop_finalgantry_morale03
1054
+ dlc1_mp_coop_finalgantry_morale04
1055
+ dlc1_mp_coop_finalgantry_morale07
1056
+ dlc1_mp_coop_laser_tbeam_intro03
1057
+ dlc1_mp_coop_laser_tbeam_outro01
1058
+ dlc1_mp_coop_paint_crazy_box_intro02
1059
+ dlc1_mp_coop_paint_crazy_box_intro03
1060
+ dlc1_mp_coop_paint_rat_maze_intro01
1061
+ dlc1_mp_coop_paint_rat_maze_intro03
1062
+ dlc1_mp_coop_paint_rat_maze_outro01
1063
+ dlc1_mp_coop_portal_bts_enterbreaker01
1064
+ dlc1_mp_coop_portal_bts_enterbreaker02
1065
+ dlc1_mp_coop_portal_bts_enteroffice01
1066
+ dlc1_mp_coop_portal_bts_entertest01
1067
+ dlc1_mp_coop_portal_bts_entertest02
1068
+ dlc1_mp_coop_portal_bts_inoffice01
1069
+ dlc1_mp_coop_portal_bts_inoffice02
1070
+ dlc1_mp_coop_portal_bts_inoffice04
1071
+ dlc1_mp_coop_portal_bts_inoffice05
1072
+ dlc1_mp_coop_portal_bts_inoffice06
1073
+ dlc1_mp_coop_portal_bts_inoffice07
1074
+ dlc1_mp_coop_portal_bts_inoffice08
1075
+ dlc1_mp_coop_portal_bts_inoffice09
1076
+ dlc1_mp_coop_portal_bts_inoffice11
1077
+ dlc1_mp_coop_portal_bts_inoffice13
1078
+ dlc1_mp_coop_portal_bts_inoffice14
1079
+ dlc1_mp_coop_portal_bts_outrob01
1080
+ dlc1_mp_coop_portal_bts_outrob03
1081
+ dlc1_mp_coop_portal_bts_outrob04
1082
+ dlc1_mp_coop_portal_bts_outrob05
1083
+ dlc1_mp_coop_portal_bts_outrob06
1084
+ dlc1_mp_coop_portal_bts_outrob07
1085
+ dlc1_mp_coop_separation_1_intro03
1086
+ dlc1_mp_coop_separation_1_intro04
1087
+ dlc1_mp_coop_separation_1_intro07
1088
+ dlc1_mp_coop_separation_1_intro08
1089
+ dlc1_mp_coop_separation_1_introb05
1090
+ dlc1_mp_coop_separation_1_introb06
1091
+ dlc1_mp_coop_separation_1_introb09
1092
+ dlc1_mp_coop_separation_1_outrob01
1093
+ dlc1_mp_coop_stingerb01
1094
+ dlc1_mp_coop_stingerb02
1095
+ dlc1_mp_coop_stingerb03
1096
+ dlc1_mp_coop_stingerb05
1097
+ dlc1_mp_coop_stingerb07
1098
+ dlc1_mp_coop_threat_death_acid01
1099
+ dlc1_mp_coop_threat_death_acid02
1100
+ dlc1_mp_coop_threat_death_generic02
1101
+ dlc1_mp_coop_threat_death_generic03
1102
+ dlc1_mp_coop_threat_death_generic04
1103
+ dlc1_mp_coop_threat_death_generic08
1104
+ dlc1_mp_coop_threat_death_generic10
1105
+ dlc1_mp_coop_threat_death_partnerkill01
1106
+ dlc1_mp_coop_threat_death_partnerkill04
1107
+ dlc1_mp_coop_threat_death_partnerkill05
1108
+ dlc1_mp_coop_threat_death_partnerkill07
1109
+ dlc1_mp_coop_tripleaxis_intro01
1110
+ dlc1_mp_coop_tripleaxis_outro01
metadata_DE.csv ADDED
The diff for this file is too large to render. See raw diff
 
raw/.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ *.wav
raw_bad/.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ *.wav
raw_good/.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ *.wav