id
int64
0
755k
file_name
stringlengths
3
109
file_path
stringlengths
13
185
content
stringlengths
31
9.38M
size
int64
31
9.38M
language
stringclasses
1 value
extension
stringclasses
11 values
total_lines
int64
1
340k
avg_line_length
float64
2.18
149k
max_line_length
int64
7
2.22M
alphanum_fraction
float64
0
1
repo_name
stringlengths
6
65
repo_stars
int64
100
47.3k
repo_forks
int64
0
12k
repo_open_issues
int64
0
3.4k
repo_license
stringclasses
9 values
repo_extraction_date
stringclasses
92 values
exact_duplicates_redpajama
bool
2 classes
near_duplicates_redpajama
bool
2 classes
exact_duplicates_githubcode
bool
2 classes
exact_duplicates_stackv2
bool
1 class
exact_duplicates_stackv1
bool
2 classes
near_duplicates_githubcode
bool
2 classes
near_duplicates_stackv1
bool
2 classes
near_duplicates_stackv2
bool
1 class
18,722
MfStructs.h
Const-me_Whisper/Whisper/API/MfStructs.h
#pragma once namespace Whisper { struct sCaptureDevice { // The display name is suitable for showing to the user, but might not be unique. const wchar_t* displayName; // Endpoint ID for an audio capture device // It uniquely identifies the device on the system, but is not a readable string. const wchar_t* endpoint; }; using pfnFoundCaptureDevices = HRESULT( __stdcall* )( int len, const sCaptureDevice* buffer, void* pv ); // Flags for the audio capture enum struct eCaptureFlags : uint32_t { // When the capture device supports stereo, keep stereo PCM samples in addition to mono Stereo = 1, }; // Parameters for audio capture struct sCaptureParams { float minDuration = 2.0f; float maxDuration = 3.0f; float dropStartSilence = 0.25f; float pauseDuration = 0.333f; // Flags for the audio capture uint32_t flags = 0; }; enum struct eCaptureStatus : uint8_t { Listening = 1, Voice = 2, Transcribing = 4, Stalled = 0x80, }; // Return S_OK to continue, or S_FALSE to stop the capture session using pfnShouldCancel = HRESULT( __stdcall* )( void* pv ) noexcept; using pfnCaptureStatus = HRESULT( __stdcall* )( void* pv, eCaptureStatus status ) noexcept; struct sCaptureCallbacks { pfnShouldCancel shouldCancel; pfnCaptureStatus captureStatus; void* pv; }; }
1,324
C++
.h
45
26.888889
105
0.741149
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,723
loggerApi.h
Const-me_Whisper/Whisper/API/loggerApi.h
#pragma once #include <stdint.h> namespace Whisper { // Log level for messages enum struct eLogLevel : uint8_t { Error = 0, Warning = 1, Info = 2, Debug = 3 }; enum struct eLoggerFlags : uint8_t { UseStandardError = 1, SkipFormatMessage = 2, }; // C function pointer to receive log messages from the library. The messages are encoded in UTF-8. using pfnLoggerSink = void( __stdcall* )( void* context, eLogLevel lvl, const char* message ); // A sink to receive log messages produced by MeshRepair.dll struct sLoggerSetup { // C function pointer to receive log messages from the library pfnLoggerSink sink = nullptr; // Optional context parameter for the sink function; when consuming from C# you don't need that, pass IntPtr.Zero, delegates can capture things. void* context = nullptr; // Maximum log level to produce eLogLevel level; // Flags about the logger eLoggerFlags flags = (eLoggerFlags)0; }; }
946
C++
.h
32
27.21875
146
0.741228
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,724
sLanguageList.h
Const-me_Whisper/Whisper/API/sLanguageList.h
#pragma once #include <stdint.h> namespace Whisper { struct sLanguageEntry { uint32_t key; int id; const char* name; }; struct sLanguageList { uint32_t length; const sLanguageEntry* pointer; }; }
213
C++
.h
16
11.25
32
0.744898
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,725
sModelSetup.h
Const-me_Whisper/Whisper/API/sModelSetup.h
#pragma once #include <stdint.h> namespace Whisper { enum struct eModelImplementation : uint32_t { // GPGPU implementation based on Direct3D 11.0 compute shaders GPU = 1, // A hybrid implementation which uses DirectCompute for encode, and decodes on CPU // Not implemented in the published builds of the DLL. To enable, change BUILD_HYBRID_VERSION macro to 1 Hybrid = 2, // A reference implementation which uses the original GGML CPU-running code // Not implemented in the published builds of the DLL. To enable, change BUILD_BOTH_VERSIONS macro to 1 Reference = 3, }; enum struct eGpuModelFlags : uint32_t { Wave32 = 1, Wave64 = 2, NoReshapedMatMul = 4, UseReshapedMatMul = 8, Cloneable = 0x10, }; struct sModelSetup { eModelImplementation impl = eModelImplementation::GPU; uint32_t flags = 0; const wchar_t* adapter = nullptr; }; // Function pointer to enumerate GPUs using pfnListAdapters = void( __stdcall* )( const wchar_t* name, void* pv ); // Function pointer to receive array of tokens from iModel.tokenize() API method using pfnDecodedTokens = void( __stdcall* )( const int* tokens, int tokensLength, void* pv ); }
1,175
C++
.h
34
32.058824
106
0.748018
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,726
iMediaFoundation.cl.h
Const-me_Whisper/Whisper/API/iMediaFoundation.cl.h
#pragma once #include "../../ComLightLib/comLightCommon.h" #include "MfStructs.h" struct IMFSourceReader; namespace Whisper { struct DECLSPEC_NOVTABLE iAudioBuffer : public ComLight::IUnknown { DEFINE_INTERFACE_ID( "{013583aa-c9eb-42bc-83db-633c2c317051}" ); virtual uint32_t COMLIGHTCALL countSamples() const = 0; virtual const float* COMLIGHTCALL getPcmMono() const = 0; virtual const float* COMLIGHTCALL getPcmStereo() const = 0; virtual HRESULT COMLIGHTCALL getTime( int64_t& rdi ) const = 0; }; struct DECLSPEC_NOVTABLE iAudioReader : public ComLight::IUnknown { DEFINE_INTERFACE_ID( "{35b988da-04a6-476a-a193-d8891d5dc390}" ); virtual HRESULT COMLIGHTCALL getDuration( int64_t& rdi ) const = 0; virtual HRESULT COMLIGHTCALL getReader( IMFSourceReader** pp ) const = 0; virtual HRESULT COMLIGHTCALL requestedStereo() const = 0; }; struct DECLSPEC_NOVTABLE iAudioCapture : public ComLight::IUnknown { DEFINE_INTERFACE_ID( "{747752c2-d9fd-40df-8847-583c781bf013}" ); virtual HRESULT COMLIGHTCALL getReader( IMFSourceReader** pp ) const = 0; virtual const sCaptureParams& COMLIGHTCALL getParams() const = 0; }; struct DECLSPEC_NOVTABLE iMediaFoundation : public ComLight::IUnknown { DEFINE_INTERFACE_ID( "{fb9763a5-d77d-4b6e-aff8-f494813cebd8}" ); virtual HRESULT COMLIGHTCALL loadAudioFile( LPCTSTR path, bool stereo, iAudioBuffer** pp ) const = 0; virtual HRESULT COMLIGHTCALL openAudioFile( LPCTSTR path, bool stereo, iAudioReader** pp ) = 0; virtual HRESULT COMLIGHTCALL loadAudioFileData( const void* data, uint64_t size, bool stereo, iAudioReader** pp ) = 0; virtual HRESULT COMLIGHTCALL listCaptureDevices( pfnFoundCaptureDevices pfn, void* pv ) = 0; virtual HRESULT COMLIGHTCALL openCaptureDevice( LPCTSTR endpoint, const sCaptureParams& captureParams, iAudioCapture** pp ) = 0; }; HRESULT COMLIGHTCALL initMediaFoundation( iMediaFoundation** pp ); }
1,920
C++
.h
38
47.973684
130
0.77938
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,727
ggml.h
Const-me_Whisper/Whisper/source/ggml.h
#pragma once // // GGML Tensor Library // // This documentation is still a work in progress. // If you wish some specific topics to be covered, feel free to drop a comment: // // https://github.com/ggerganov/whisper.cpp/issues/40 // // ## Overview // // This library implements: // // - a set of tensor operations // - automatic differentiation // - basic optimization algorithms // // The aim of this library is to provide a minimalistic approach for various machine learning tasks. This includes, // but is not limited to, the following: // // - linear regression // - support vector machines // - neural networks // // The library allows the user to define a certain function using the available tensor operations. This function // definition is represented internally via a computation graph. Each tensor operation in the function definition // corresponds to a node in the graph. Having the computation graph defined, the user can choose to compute the // function's value and/or its gradient with respect to the input variables. Optionally, the function can be optimized // using one of the available optimization algorithms. // // For example, here we define the function: f(x) = a*x^2 + b // // { // struct ggml_init_params params = { // .mem_size = 16*1024*1024, // .mem_buffer = NULL, // }; // // // memory allocation happens here // struct ggml_context * ctx = ggml_init(params); // // struct ggml_tensor * x = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); // // ggml_set_param(ctx, x); // x is an input variable // // struct ggml_tensor * a = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); // struct ggml_tensor * b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); // struct ggml_tensor * x2 = ggml_mul(ctx, x, x); // struct ggml_tensor * f = ggml_add(ctx, ggml_mul(ctx, a, x2), b); // // ... // } // // Notice that the function definition above does not involve any actual computation. The computation is performed only // when the user explicitly requests it. For example, to compute the function's value at x = 2.0: // // { // ... // // struct ggml_cgraph gf = ggml_build_forward(f); // // // set the input variable and parameter values // ggml_set_f32(x, 2.0f); // ggml_set_f32(a, 3.0f); // ggml_set_f32(b, 4.0f); // // ggml_graph_compute(ctx0, &gf); // // printf("f = %f\n", ggml_get_f32_1d(f, 0)); // // ... // } // // The actual computation is performed in the ggml_graph_compute() function. // // The ggml_new_tensor_...() functions create new tensors. They are allocated in the memory buffer provided to the // ggml_init() function. You have to be careful not to exceed the memory buffer size. Therefore, you have to know // in advance how much memory you need for your computation. Alternatively, you can allocate a large enough memory // and after defining the computation graph, call the ggml_used_mem() function to find out how much memory was // actually needed. // // The ggml_set_param() function marks a tensor as an input variable. This is used by the automatic // differentiation and optimization algorithms. // // The described approach allows to define the function graph once and then compute its forward or backward graphs // multiple times. All computations will use the same memory buffer allocated in the ggml_init() function. This way // the user can avoid the memory allocation overhead at runtime. // // The library supports multi-dimensional tensors - up to 4 dimensions. The FP16 and FP32 data types are first class // citizens, but in theory the library can be extended to support FP8 and integer data types. // // Each tensor operation produces a new tensor. Initially the library was envisioned to support only the use of unary // and binary operations. Most of the available operations fall into one of these two categories. With time, it became // clear that the library needs to support more complex operations. The way to support these operations is not clear // yet, but a few examples are demonstrated in the following operations: // // - ggml_permute() // - ggml_conv_1d_1s() // - ggml_conv_1d_2s() // // For each tensor operator, the library implements a forward and backward computation function. The forward function // computes the output tensor value given the input tensor values. The backward function computes the adjoint of the // input tensors given the adjoint of the output tensor. For a detailed explanation of what this means, take a // calculus class, or watch the following video: // // What is Automatic Differentiation? // https://www.youtube.com/watch?v=wG_nF1awSSY // // // ## Tensor data (struct ggml_tensor) // // The tensors are stored in memory via the ggml_tensor struct. The structure provides information about the size of // the tensor, the data type, and the memory buffer where the tensor data is stored. Additionally, it contains // pointers to the "source" tensors - i.e. the tensors that were used to compute the current tensor. For example: // // { // struct ggml_tensor * c = ggml_add(ctx, a, b); // // assert(c->src[0] == a); // assert(c->src[1] == b); // } // // The multi-dimensional tensors are stored in row-major order. The ggml_tensor struct contains fields for the // number of elements in each dimension ("ne") as well as the number of bytes ("nb", a.k.a. stride). This allows // to store tensors that are not contiguous in memory, which is useful for operations such as transposition and // permutation. All tensor operations have to take the stride into account and not assume that the tensor is // contiguous in memory. // // The data of the tensor is accessed via the "data" pointer. For example: // // { // struct ggml_tensor * a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, 2, 3); // // // a[1, 2] = 1.0f; // *(float *) ((char *) a->data + 2*a->nb[1] + 1*a->nb[0]) = 1.0f; // // // a[2, 0] = 2.0f; // *(float *) ((char *) a->data + 0*a->nb[1] + 2*a->nb[0]) = 2.0f; // // ... // } // // Alternatively, there are helper functions, such as ggml_get_f32_1d() and ggml_set_f32_1d() that can be used. // // ## The matrix multiplication operator (ggml_mul_mat) // // TODO // // // ## Multi-threading // // TODO // // // ## Overview of ggml.c // // TODO // // // ## SIMD optimizations // // TODO // // // ## Debugging ggml // // TODO // // #ifdef __cplusplus extern "C" { #endif #include <stdint.h> #include <stddef.h> #include <stdbool.h> #define GGML_MAX_DIMS 4 #define GGML_MAX_NODES 4096 #define GGML_MAX_PARAMS 16 #define GGML_MAX_CONTEXTS 64 #define GGML_MAX_OPT 4 #ifdef __ARM_NEON // we use the built-in 16-bit float type typedef __fp16 ggml_fp16_t; #else typedef uint16_t ggml_fp16_t; #endif // convert FP16 <-> FP32 float ggml_fp16_to_fp32(ggml_fp16_t x); ggml_fp16_t ggml_fp32_to_fp16(float x); struct ggml_object; struct ggml_context; enum ggml_type { GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, GGML_TYPE_F16, GGML_TYPE_F32, GGML_TYPE_COUNT, }; // available tensor operations: enum ggml_op { GGML_OP_NONE = 0, GGML_OP_DUP, GGML_OP_ADD, GGML_OP_SUB, GGML_OP_MUL, GGML_OP_DIV, GGML_OP_SQR, GGML_OP_SQRT, GGML_OP_SUM, GGML_OP_MEAN, GGML_OP_REPEAT, GGML_OP_ABS, GGML_OP_SGN, GGML_OP_NEG, GGML_OP_STEP, GGML_OP_RELU, GGML_OP_GELU, GGML_OP_NORM, // normalize GGML_OP_MUL_MAT, GGML_OP_SCALE, GGML_OP_CPY, GGML_OP_RESHAPE, GGML_OP_VIEW, GGML_OP_PERMUTE, GGML_OP_TRANSPOSE, GGML_OP_GET_ROWS, GGML_OP_DIAG_MASK_INF, GGML_OP_SOFT_MAX, GGML_OP_ROPE, GGML_OP_CONV_1D_1S, GGML_OP_CONV_1D_2S, GGML_OP_FLASH_ATTN, GGML_OP_FLASH_FF, GGML_OP_COUNT, }; // n-dimensional tensor struct ggml_tensor { enum ggml_type type; int n_dims; int ne[GGML_MAX_DIMS]; // number of elements size_t nb[GGML_MAX_DIMS]; // stride in bytes: // nb[0] = sizeof(type) // nb[1] = nb[0] * ne[0] + padding // nb[i] = nb[i-1] * ne[i-1] // compute data enum ggml_op op; bool is_param; struct ggml_tensor * grad; struct ggml_tensor * src0; struct ggml_tensor * src1; struct ggml_tensor * opt[GGML_MAX_OPT]; // thread scheduling int n_tasks; // performance int perf_runs; int64_t perf_cycles; int64_t perf_time_us; void * data; char padding[8]; }; // computation graph struct ggml_cgraph { int n_nodes; int n_leafs; int n_threads; size_t work_size; struct ggml_tensor * work; struct ggml_tensor * nodes[GGML_MAX_NODES]; struct ggml_tensor * grads[GGML_MAX_NODES]; struct ggml_tensor * leafs[GGML_MAX_NODES]; // performance int perf_runs; int64_t perf_cycles; int64_t perf_time_us; }; struct ggml_init_params { // memory pool size_t mem_size; // bytes void * mem_buffer; // if NULL, memory will be allocated internally }; void ggml_time_init(void); // call this once at the beginning of the program int64_t ggml_time_ms(void); int64_t ggml_time_us(void); int64_t ggml_cycles(void); int64_t ggml_cycles_per_ms(void); void ggml_print_object (const struct ggml_object * obj); void ggml_print_objects(const struct ggml_context * ctx); int ggml_nelements(const struct ggml_tensor * tensor); size_t ggml_nbytes (const struct ggml_tensor * tensor); size_t ggml_type_size (enum ggml_type type); size_t ggml_element_size(const struct ggml_tensor * tensor); struct ggml_context * ggml_init(struct ggml_init_params params); void ggml_free(struct ggml_context * ctx); size_t ggml_used_mem(const struct ggml_context * ctx); struct ggml_tensor * ggml_new_tensor( struct ggml_context * ctx, enum ggml_type type, int n_dims, const int *ne); struct ggml_tensor * ggml_new_tensor_1d( struct ggml_context * ctx, enum ggml_type type, int ne0); struct ggml_tensor * ggml_new_tensor_2d( struct ggml_context * ctx, enum ggml_type type, int ne0, int ne1); struct ggml_tensor * ggml_new_tensor_3d( struct ggml_context * ctx, enum ggml_type type, int ne0, int ne1, int ne2); struct ggml_tensor * ggml_new_tensor_4d( struct ggml_context * ctx, enum ggml_type type, int ne0, int ne1, int ne2, int ne3); struct ggml_tensor * ggml_new_i32(struct ggml_context * ctx, int32_t value); struct ggml_tensor * ggml_new_f32(struct ggml_context * ctx, float value); struct ggml_tensor * ggml_dup_tensor (struct ggml_context * ctx, const struct ggml_tensor * src); struct ggml_tensor * ggml_view_tensor(struct ggml_context * ctx, const struct ggml_tensor * src); struct ggml_tensor * ggml_set_zero(struct ggml_tensor * tensor); struct ggml_tensor * ggml_set_i32 (struct ggml_tensor * tensor, int32_t value); struct ggml_tensor * ggml_set_f32 (struct ggml_tensor * tensor, float value); int32_t ggml_get_i32_1d(const struct ggml_tensor * tensor, int i); void ggml_set_i32_1d(const struct ggml_tensor * tensor, int i, int32_t value); float ggml_get_f32_1d(const struct ggml_tensor * tensor, int i); void ggml_set_f32_1d(const struct ggml_tensor * tensor, int i, float value); void * ggml_get_data (const struct ggml_tensor * tensor); float * ggml_get_data_f32(const struct ggml_tensor * tensor); // // operations on tensors with backpropagation // struct ggml_tensor * ggml_dup( struct ggml_context * ctx, struct ggml_tensor * a); struct ggml_tensor * ggml_add( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); struct ggml_tensor * ggml_sub( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); struct ggml_tensor * ggml_mul( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); struct ggml_tensor * ggml_div( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); struct ggml_tensor * ggml_sqr( struct ggml_context * ctx, struct ggml_tensor * a); struct ggml_tensor * ggml_sqrt( struct ggml_context * ctx, struct ggml_tensor * a); // return scalar // TODO: compute sum along rows struct ggml_tensor * ggml_sum( struct ggml_context * ctx, struct ggml_tensor * a); // mean along rows struct ggml_tensor * ggml_mean( struct ggml_context * ctx, struct ggml_tensor * a); // if a is the same shape as b, and a is not parameter, return a // otherwise, return a new tensor: repeat(a) to fit in b struct ggml_tensor * ggml_repeat( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); struct ggml_tensor * ggml_abs( struct ggml_context * ctx, struct ggml_tensor * a); struct ggml_tensor * ggml_sgn( struct ggml_context * ctx, struct ggml_tensor * a); struct ggml_tensor * ggml_neg( struct ggml_context * ctx, struct ggml_tensor * a); struct ggml_tensor * ggml_step( struct ggml_context * ctx, struct ggml_tensor * a); struct ggml_tensor * ggml_relu( struct ggml_context * ctx, struct ggml_tensor * a); // TODO: double-check this computation is correct struct ggml_tensor * ggml_gelu( struct ggml_context * ctx, struct ggml_tensor * a); // normalize along rows // TODO: eps is hardcoded to 1e-5 for now struct ggml_tensor * ggml_norm( struct ggml_context * ctx, struct ggml_tensor * a); // A: m rows, n columns // B: p rows, n columns (i.e. we transpose it internally) // result is m columns, p rows struct ggml_tensor * ggml_mul_mat( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); // // operations on tensors without backpropagation // // in-place, returns view(a) struct ggml_tensor * ggml_scale( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); // a -> b, return view(b) struct ggml_tensor * ggml_cpy( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); // return view(a), b specifies the new shape // TODO: when we start computing gradient, make a copy instead of view struct ggml_tensor * ggml_reshape( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); // return view(a) // TODO: when we start computing gradient, make a copy instead of view struct ggml_tensor * ggml_reshape_2d( struct ggml_context * ctx, struct ggml_tensor * a, int ne0, int ne1); // return view(a) // TODO: when we start computing gradient, make a copy instead of view struct ggml_tensor * ggml_reshape_3d( struct ggml_context * ctx, struct ggml_tensor * a, int ne0, int ne1, int ne2); // offset in bytes struct ggml_tensor * ggml_view_1d( struct ggml_context * ctx, struct ggml_tensor * a, int ne0, size_t offset); struct ggml_tensor * ggml_view_2d( struct ggml_context * ctx, struct ggml_tensor * a, int ne0, int ne1, size_t nb1, // row stride in bytes size_t offset); struct ggml_tensor * ggml_permute( struct ggml_context * ctx, struct ggml_tensor * a, int axis0, int axis1, int axis2, int axis3); // alias for ggml_permute(ctx, a, 1, 0, 2, 3) struct ggml_tensor * ggml_transpose( struct ggml_context * ctx, struct ggml_tensor * a); struct ggml_tensor * ggml_get_rows( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); // set elements above the diagonal to -INF // in-place, returns view(a) struct ggml_tensor * ggml_diag_mask_inf( struct ggml_context * ctx, struct ggml_tensor * a, int n_past); // in-place, returns view(a) struct ggml_tensor * ggml_soft_max( struct ggml_context * ctx, struct ggml_tensor * a); // rotary position embedding // in-place, returns view(a) // if mode == 1, skip n_past elements // TODO: avoid creating a new tensor every time struct ggml_tensor * ggml_rope( struct ggml_context * ctx, struct ggml_tensor * a, int n_past, int n_dims, int mode); // padding = 1 // TODO: we don't support extra parameters for now // that's why we are hard-coding the stride, padding, and dilation // not great .. struct ggml_tensor * ggml_conv_1d_1s( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); struct ggml_tensor * ggml_conv_1d_2s( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b); struct ggml_tensor * ggml_flash_attn( struct ggml_context * ctx, struct ggml_tensor * q, struct ggml_tensor * k, struct ggml_tensor * v, bool masked); struct ggml_tensor * ggml_flash_ff( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b0, struct ggml_tensor * b1, struct ggml_tensor * c0, struct ggml_tensor * c1); // // automatic differentiation // void ggml_set_param( struct ggml_context * ctx, struct ggml_tensor * tensor); void ggml_build_forward_expand(struct ggml_cgraph * cgraph, struct ggml_tensor * tensor); struct ggml_cgraph ggml_build_forward (struct ggml_tensor * tensor); struct ggml_cgraph ggml_build_backward(struct ggml_context * ctx, struct ggml_cgraph * gf, bool keep); void ggml_graph_compute(struct ggml_context * ctx, struct ggml_cgraph * cgraph); void ggml_graph_reset (struct ggml_cgraph * cgraph); // print info and performance information for the graph void ggml_graph_print(const struct ggml_cgraph * cgraph); // dump the graph into a file using the dot format void ggml_graph_dump_dot(const struct ggml_cgraph * gb, const struct ggml_cgraph * gf, const char * filename); // // optimization // // optimization methods enum ggml_opt_type { GGML_OPT_ADAM, GGML_OPT_LBFGS, }; // linesearch methods enum ggml_linesearch { GGML_LINESEARCH_DEFAULT = 1, GGML_LINESEARCH_BACKTRACKING_ARMIJO = 0, GGML_LINESEARCH_BACKTRACKING_WOLFE = 1, GGML_LINESEARCH_BACKTRACKING_STRONG_WOLFE = 2, }; // optimization return values enum ggml_opt_result { GGML_OPT_OK = 0, GGML_OPT_DID_NOT_CONVERGE, GGML_OPT_NO_CONTEXT, GGML_OPT_INVALID_WOLFE, GGML_OPT_FAIL, GGML_LINESEARCH_FAIL = -128, GGML_LINESEARCH_MINIMUM_STEP, GGML_LINESEARCH_MAXIMUM_STEP, GGML_LINESEARCH_MAXIMUM_ITERATIONS, GGML_LINESEARCH_INVALID_PARAMETERS, }; // optimization parameters // // see ggml.c (ggml_opt_default_params) for default values // struct ggml_opt_params { enum ggml_opt_type type; int n_threads; // delta-based convergence test // // if past == 0 - disabled // if past > 0: // stop if |f(x) - f(x_past)| < delta * max(1, |f(x)|) // int past; float delta; // maximum number of iterations without improvement // // if 0 - disabled // if > 0: // assume convergence if no cost improvement in this number of iterations // int max_no_improvement; bool print_forward_graph; bool print_backward_graph; // ADAM parameters struct { int n_iter; float alpha; // learning rate float beta1; float beta2; float eps; // epsilon for numerical stability float eps_f; // epsilon for convergence test float eps_g; // epsilon for convergence test } adam; // LBFGS parameters struct { int m; // number of corrections to approximate the inv. Hessian int n_iter; int max_linesearch; float eps; // convergence tolerance float ftol; // line search tolerance float wolfe; float min_step; float max_step; enum ggml_linesearch linesearch; } lbfgs; }; struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type); // optimize the function defined by the tensor f enum ggml_opt_result ggml_opt( struct ggml_context * ctx, struct ggml_opt_params params, struct ggml_tensor * f); // // system info // int ggml_cpu_has_avx(void); int ggml_cpu_has_avx2(void); int ggml_cpu_has_avx512(void); int ggml_cpu_has_fma(void); int ggml_cpu_has_neon(void); int ggml_cpu_has_arm_fma(void); int ggml_cpu_has_f16c(void); int ggml_cpu_has_fp16_va(void); int ggml_cpu_has_wasm_simd(void); int ggml_cpu_has_blas(void); #ifdef __cplusplus } #endif
21,451
C++
.h
628
30.183121
119
0.651492
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,728
whisper.h
Const-me_Whisper/Whisper/source/whisper.h
#ifndef WHISPER_H #define WHISPER_H #include <stdint.h> #include <stdbool.h> #ifdef WHISPER_SHARED # ifdef _WIN32 # ifdef WHISPER_BUILD # define WHISPER_API __declspec(dllexport) # else # define WHISPER_API __declspec(dllimport) # endif # else # define WHISPER_API __attribute__ ((visibility ("default"))) # endif #else # define WHISPER_API #endif #define WHISPER_SAMPLE_RATE 16000 #define WHISPER_N_FFT 400 #define WHISPER_N_MEL 80 #define WHISPER_HOP_LENGTH 160 #define WHISPER_CHUNK_SIZE 30 #ifdef __cplusplus extern "C" { #endif // // C interface // // The following interface is thread-safe as long as the sample whisper_context is not used by multiple threads // concurrently. // // Basic usage: // // #include "whisper.h" // // ... // // struct whisper_context * ctx = whisper_init("/path/to/ggml-base.en.bin"); // // if (whisper_full(ctx, wparams, pcmf32.data(), pcmf32.size()) != 0) { // fprintf(stderr, "failed to process audio\n"); // return 7; // } // // const int n_segments = whisper_full_n_segments(ctx); // for (int i = 0; i < n_segments; ++i) { // const char * text = whisper_full_get_segment_text(ctx, i); // printf("%s", text); // } // // whisper_free(ctx); // // ... // // This is a demonstration of the most straightforward usage of the library. // "pcmf32" contains the RAW audio data in 32-bit floating point format. // // The interface also allows for more fine-grained control over the computation, but it requires a deeper // understanding of how the model works. // struct whisper_context; typedef int whisper_token; typedef struct whisper_token_data { whisper_token id; // token id whisper_token tid; // forced timestamp token id float p; // probability of the token float pt; // probability of the timestamp token float ptsum; // sum of probabilities of all timestamp tokens // token-level timestamp data // do not use if you haven't computed token-level timestamps int64_t t0; // start time of the token int64_t t1; // end time of the token float vlen; // voice length of the token } whisper_token_data; // Allocates all memory needed for the model and loads the model from the given file. // Returns NULL on failure. WHISPER_API struct whisper_context * whisper_init(const char * path_model); // Frees all memory allocated by the model. WHISPER_API void whisper_free(struct whisper_context * ctx); // Convert RAW PCM audio to log mel spectrogram. // The resulting spectrogram is stored inside the provided whisper context. // Returns 0 on success WHISPER_API int whisper_pcm_to_mel( struct whisper_context * ctx, const float * samples, int n_samples, int n_threads); // This can be used to set a custom log mel spectrogram inside the provided whisper context. // Use this instead of whisper_pcm_to_mel() if you want to provide your own log mel spectrogram. // n_mel must be 80 // Returns 0 on success WHISPER_API int whisper_set_mel( struct whisper_context * ctx, const float * data, int n_len, int n_mel); // Run the Whisper encoder on the log mel spectrogram stored inside the provided whisper context. // Make sure to call whisper_pcm_to_mel() or whisper_set_mel() first. // offset can be used to specify the offset of the first frame in the spectrogram. // Returns 0 on success WHISPER_API int whisper_encode( struct whisper_context * ctx, int offset, int n_threads); // Run the Whisper decoder to obtain the logits and probabilities for the next token. // Make sure to call whisper_encode() first. // tokens + n_tokens is the provided context for the decoder. // n_past is the number of tokens to use from previous decoder calls. // Returns 0 on success WHISPER_API int whisper_decode( struct whisper_context * ctx, const whisper_token * tokens, int n_tokens, int n_past, int n_threads); // Token sampling methods. // These are provided for convenience and can be used after each call to whisper_decode(). // You can also implement your own sampling method using the whisper_get_probs() function. // whisper_sample_best() returns the token with the highest probability // whisper_sample_timestamp() returns the most probable timestamp token WHISPER_API whisper_token_data whisper_sample_best(struct whisper_context * ctx); WHISPER_API whisper_token_data whisper_sample_timestamp(struct whisper_context * ctx, bool is_initial); // Convert the provided text into tokens. // The tokens pointer must be large enough to hold the resulting tokens. // Returns the number of tokens on success, no more than n_max_tokens // Returns -1 on failure // TODO: not sure if correct WHISPER_API int whisper_tokenize( struct whisper_context * ctx, const char * text, whisper_token * tokens, int n_max_tokens); // Largest language id (i.e. number of available languages - 1) WHISPER_API int whisper_lang_max_id(); // Return the id of the specified language, returns -1 if not found // Examples: // "de" -> 2 // "german" -> 2 WHISPER_API int whisper_lang_id(const char * lang); // Return the short string of the specified language id (e.g. 2 -> "de"), returns nullptr if not found WHISPER_API const char * whisper_lang_str(int id); // Use mel data at offset_ms to try and auto-detect the spoken language // Make sure to call whisper_pcm_to_mel() or whisper_set_mel() first // Returns the top language id or negative on failure // If not null, fills the lang_probs array with the probabilities of all languages // The array must be whispe_lang_max_id() + 1 in size // ref: https://github.com/openai/whisper/blob/main/whisper/decoding.py#L18-L69 WHISPER_API int whisper_lang_auto_detect( struct whisper_context * ctx, int offset_ms, int n_threads, float * lang_probs); WHISPER_API int whisper_n_len (struct whisper_context * ctx); // mel length WHISPER_API int whisper_n_vocab (struct whisper_context * ctx); WHISPER_API int whisper_n_text_ctx (struct whisper_context * ctx); WHISPER_API int whisper_is_multilingual(struct whisper_context * ctx); // The probabilities for the next token WHISPER_API float * whisper_get_probs(struct whisper_context * ctx); // Token Id -> String. Uses the vocabulary in the provided context WHISPER_API const char * whisper_token_to_str(struct whisper_context * ctx, whisper_token token); // Special tokens WHISPER_API whisper_token whisper_token_eot (struct whisper_context * ctx); WHISPER_API whisper_token whisper_token_sot (struct whisper_context * ctx); WHISPER_API whisper_token whisper_token_prev(struct whisper_context * ctx); WHISPER_API whisper_token whisper_token_solm(struct whisper_context * ctx); WHISPER_API whisper_token whisper_token_not (struct whisper_context * ctx); WHISPER_API whisper_token whisper_token_beg (struct whisper_context * ctx); WHISPER_API whisper_token whisper_token_lang(struct whisper_context * ctx, int lang_id); // Task tokens WHISPER_API whisper_token whisper_token_translate (void); WHISPER_API whisper_token whisper_token_transcribe(void); // Performance information WHISPER_API void whisper_print_timings(struct whisper_context * ctx); WHISPER_API void whisper_reset_timings(struct whisper_context * ctx); // Print system information WHISPER_API const char * whisper_print_system_info(void); //////////////////////////////////////////////////////////////////////////// // Available sampling strategies enum whisper_sampling_strategy { WHISPER_SAMPLING_GREEDY, // Always select the most probable token WHISPER_SAMPLING_BEAM_SEARCH, // TODO: not implemented yet! }; // Text segment callback // Called on every newly generated text segment // Use the whisper_full_...() functions to obtain the text segments typedef void (*whisper_new_segment_callback)(struct whisper_context * ctx, int n_new, void * user_data); // Encoder begin callback // If not NULL, called before the encoder starts // If it returns false, the computation is aborted typedef bool (*whisper_encoder_begin_callback)(struct whisper_context * ctx, void * user_data); // Parameters for the whisper_full() function // If you chnage the order or add new parameters, make sure to update the default values in whisper.cpp: // whisper_full_default_params() struct whisper_full_params { enum whisper_sampling_strategy strategy; int n_threads; int n_max_text_ctx; int offset_ms; // start offset in ms int duration_ms; // audio duration to process in ms bool translate; bool no_context; bool single_segment; // force single segment output (useful for streaming) bool print_special; bool print_progress; bool print_realtime; bool print_timestamps; // [EXPERIMENTAL] token-level timestamps bool token_timestamps; // enable token-level timestamps float thold_pt; // timestamp token probability threshold (~0.01) float thold_ptsum; // timestamp token sum probability threshold (~0.01) int max_len; // max segment length in characters int max_tokens; // max tokens per segment (0 = no limit) // [EXPERIMENTAL] speed-up techniques bool speed_up; // speed-up the audio by 2x using Phase Vocoder int audio_ctx; // overwrite the audio context size (0 = use default) // tokens to provide the whisper model as initial prompt // these are prepended to any existing text context from a previous call const whisper_token * prompt_tokens; int prompt_n_tokens; // for auto-detection, set to nullptr, "" or "auto" const char * language; struct { int n_past; } greedy; struct { int n_past; int beam_width; int n_best; } beam_search; whisper_new_segment_callback new_segment_callback; void * new_segment_callback_user_data; whisper_encoder_begin_callback encoder_begin_callback; void * encoder_begin_callback_user_data; }; WHISPER_API struct whisper_full_params whisper_full_default_params(enum whisper_sampling_strategy strategy); // Run the entire model: PCM -> log mel spectrogram -> encoder -> decoder -> text // Uses the specified decoding strategy to obtain the text. WHISPER_API int whisper_full( struct whisper_context * ctx, struct whisper_full_params params, const float * samples, int n_samples); // Split the input audio in chunks and process each chunk separately using whisper_full() // It seems this approach can offer some speedup in some cases. // However, the transcription accuracy can be worse at the beginning and end of each chunk. WHISPER_API int whisper_full_parallel( struct whisper_context * ctx, struct whisper_full_params params, const float * samples, int n_samples, int n_processors); // Number of generated text segments. // A segment can be a few words, a sentence, or even a paragraph. WHISPER_API int whisper_full_n_segments(struct whisper_context * ctx); // Get the start and end time of the specified segment. WHISPER_API int64_t whisper_full_get_segment_t0(struct whisper_context * ctx, int i_segment); WHISPER_API int64_t whisper_full_get_segment_t1(struct whisper_context * ctx, int i_segment); // Get the text of the specified segment. WHISPER_API const char * whisper_full_get_segment_text(struct whisper_context * ctx, int i_segment); // Get number of tokens in the specified segment. WHISPER_API int whisper_full_n_tokens(struct whisper_context * ctx, int i_segment); // Get the token text of the specified token in the specified segment. WHISPER_API const char * whisper_full_get_token_text(struct whisper_context * ctx, int i_segment, int i_token); WHISPER_API whisper_token whisper_full_get_token_id (struct whisper_context * ctx, int i_segment, int i_token); // Get token data for the specified token in the specified segment. // This contains probabilities, timestamps, etc. WHISPER_API whisper_token_data whisper_full_get_token_data(struct whisper_context * ctx, int i_segment, int i_token); // Get the probability of the specified token in the specified segment. WHISPER_API float whisper_full_get_token_p(struct whisper_context * ctx, int i_segment, int i_token); #ifdef __cplusplus } #endif #endif
13,919
C++
.h
273
42.981685
121
0.643756
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,729
HybridContext.h
Const-me_Whisper/Whisper/Hybrid/HybridContext.h
#pragma once #include "../Whisper/WhisperModel.h" #include "../CPU/MlContext.h" #include "../CPU/BufferAllocator.h" #include "KeyValueDownloader.h" #include "../CPU/KvTensors.h" // This version of the hybrid context uses the new, custom-built kernels class HybridContext { CpuCompute::MlContext ml; CpuCompute::VirtualAllocator allocCompute, allocComputeLayer; class AllocSingle : public CpuCompute::iArenaAllocator { CpuCompute::LargeBuffer buffer; size_t capacity = 0; bool allocated = false; // Inherited via iArenaAllocator virtual void* allocate( size_t cb, size_t align ) override final; public: virtual void resetArena() override final; }; AllocSingle allocLayerOutput; const CpuCompute::DecoderTensors& model; const Whisper::WhisperModel& whisperModel; KeyValueDownloader kvCross; CpuCompute::KvTensors kv; class SetAllocatorRaii; public: HybridContext( const Whisper::WhisperModel& wm ); HRESULT create(); HRESULT downloadKeyValues( const DirectCompute::KeyValueBuffers& source ) { return kvCross.download( source ); } struct sDecParams { int n_threads; int M; }; HRESULT decode( const int* tokens, const int n_tokens, const int n_past, const sDecParams& dp, std::vector<float>& probs_out ); };
1,254
C++
.h
41
28.365854
128
0.781198
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,730
KeyValueDownloader.h
Const-me_Whisper/Whisper/Hybrid/KeyValueDownloader.h
#pragma once #include "../Whisper/sModelParams.h" #include "../Whisper/KeyValueBuffers.h" #include "../D3D/MappedResource.h" #include "../CPU/Tensor.h" class KeyValueDownloader { CComPtr<ID3D11Buffer> keys, values; uint32_t length = 0; using E = uint16_t; static constexpr DirectCompute::eDataType dataType = DirectCompute::eDataType::FP16; public: // Create the staging resources to download kvCross tensors produced by the GPGPU encoder HRESULT create( const Whisper::sModelParams& mp ); // Download these two tensors from VRAM to the staging buffers in system RAM HRESULT download( const DirectCompute::KeyValueBuffers& source ); class ReadMap { const uint32_t length; DirectCompute::MappedResource mappedKeys, mappedValues; public: ReadMap( KeyValueDownloader& owner ); ~ReadMap() = default; ReadMap( const ReadMap& ) = delete; // A slice of model.memory_k tensor CpuCompute::Tensor keysView( uint32_t len, uint32_t off ) const { if( len + off <= length ) { E* rsi = (E*)mappedKeys.data(); rsi += off; return CpuCompute::Tensor::fromData( rsi, dataType, len ); } throw E_BOUNDS; } // A slice of model.memory_v tensor CpuCompute::Tensor valuesView( uint32_t len, uint32_t off ) const { if( len + off <= length ) { E* rsi = (E*)mappedValues.data(); rsi += off; return CpuCompute::Tensor::fromData( rsi, dataType, len ); } throw E_BOUNDS; } }; // Map both staging buffers, return RAII object which unmaps when destroyed, // which can supply the data in the shape of CpuCompute::Tensor vector decltype( auto ) map() { return ReadMap( *this ); } };
1,649
C++
.h
54
27.666667
90
0.717706
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,731
ProfileCollection.h
Const-me_Whisper/Whisper/Utils/ProfileCollection.h
#pragma once #include <atlcoll.h> #include "CpuProfiler.h" namespace DirectCompute { enum struct eComputeShader : uint16_t; enum struct eProfilerBlock : uint16_t; } namespace Whisper { struct WhisperModel; enum struct eCpuBlock : uint8_t { LoadModel, RunComplete, Run, Callbacks, Spectrogram, Sample, VAD, Encode, Decode, DecodeStep, DecodeLayer, }; class ProfileCollection { public: ProfileCollection( const WhisperModel& model ); struct Measure { size_t count = 0; // 100-nanosecond ticks uint64_t totalTicks = 0; void reset() { count = 0; totalTicks = 0; } void print( const char* name ) const; void add( uint64_t val ) { count++; totalTicks += val; } }; Measure& measure( DirectCompute::eProfilerBlock which ); Measure& measure( DirectCompute::eComputeShader which ); Measure& measure( eCpuBlock which ); #if PROFILER_COLLECT_TAGS Measure& measure( DirectCompute::eComputeShader which, uint16_t tag ); #endif void print(); void reset(); class CpuRaii { Measure* dest; const int64_t tsc; public: CpuRaii( Measure& m ) : dest( &m ), tsc( tscNow() ) { } CpuRaii( const CpuRaii& ) = delete; CpuRaii( CpuRaii&& that ) noexcept : tsc( that.tsc ) { dest = that.dest; that.dest = nullptr; } ~CpuRaii() { if( nullptr != dest ) { const int64_t elapsed = tscNow() - tsc; dest->add( ticksFromTsc( elapsed ) ); } } }; decltype( auto ) cpuBlock( eCpuBlock which ) { return CpuRaii{ measure( which ) }; } uint16_t makeTagId( const char* tag ); private: CAtlMap<uint32_t, Measure> measures; CComAutoCriticalSection critSec; #if PROFILER_COLLECT_TAGS CAtlMap<const char*, uint16_t> tagIDs; std::vector<const char*> tagNames; CAtlMap<uint32_t, Measure> taggedShaders; std::vector<uint32_t> taggedKeysTemp; struct TaggedTemp { uint64_t ticks; size_t count; const char* name; bool operator<( const TaggedTemp& that ) const { // Flipping the comparison to sort in descending order return ticks > that.ticks; } void print() const; }; std::vector<TaggedTemp> taggedTimes; #endif std::vector<uint32_t> keysTemp; }; }
2,250
C++
.h
107
17.663551
72
0.685325
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,732
parallelFor.h
Const-me_Whisper/Whisper/Utils/parallelFor.h
#pragma once namespace Whisper { // A callback to offload to the thread pool using pfnParallelForCallback = HRESULT( * )( int ith, void* ctx ) noexcept; // A simple parallel for implementation; Windows includes a decent thread pool since Vista (2006) HRESULT parallelFor( pfnParallelForCallback pfn, int threadsCount, void* ctx ); // Use this version when you wanna use the thread pool repeatedly, for the same work. // This class caches native work handle, saving a couple of WinAPI calls. class alignas( 64 ) ThreadPoolWork { PTP_WORK work = nullptr; // We want these volatile fields in another cache line from the rest of the data of this class. // threadIndex field is concurrently modified by different CPU cores, and these cache coherency protocols are slow. // OTOH, work and callback fields of this class only change when created / destroyed, that cache line is shared by CPU cores without any performance penalty. alignas( 64 ) volatile long threadIndex = 0; volatile HRESULT status = E_UNEXPECTED; static void __stdcall callbackStatic( PTP_CALLBACK_INSTANCE Instance, PVOID pv, PTP_WORK Work ); protected: virtual HRESULT threadPoolCallback( int ith ) noexcept = 0; public: ThreadPoolWork() = default; ThreadPoolWork( const ThreadPoolWork& ) = delete; ~ThreadPoolWork(); HRESULT create(); HRESULT parallelFor( int threadsCount ) noexcept; }; }
1,400
C++
.h
28
47.357143
159
0.771093
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,733
MurmurHash3.h
Const-me_Whisper/Whisper/Utils/MurmurHash3.h
#pragma once #include <stdint.h> void MurmurHash3_x86_32( const void* key, int len, uint32_t seed, void* out ); void MurmurHash3_x86_128( const void* key, int len, uint32_t seed, void* out ); void MurmurHash3_x64_128( const void* key, int len, uint32_t seed, void* out ); #include <atlcoll.h> // Traits class for `CAtlMap<const char*>` which does not copy nor owns these strings struct StringPtrTraits : public ATL::CDefaultElementTraits<const char*> { using INARGTYPE = const char*; static inline bool CompareElements( const char* a, const char* b ) { return 0 == strcmp( a, b ); } static inline int CompareElementsOrdered( const char* a, const char* b ) { return strcmp( a, b ); } static inline ULONG Hash( const char* ptr ) { uint32_t hash = UINT_MAX; if( nullptr != ptr ) { const int len = (int)strlen( ptr ); constexpr uint32_t seed = 0; MurmurHash3_x86_32( ptr, len, seed, &hash ); } return hash; } };
946
C++
.h
30
29.266667
85
0.703622
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,734
Logger.h
Const-me_Whisper/Whisper/Utils/Logger.h
#pragma once #include "../API/loggerApi.h" #ifdef __cplusplus extern "C" { #endif void logError( const char8_t* pszFormat, ... ); void logError16( const wchar_t* pszFormat, ... ); void logErrorHr( long hr, const char8_t* pszFormat, ... ); void logWarning( const char8_t* pszFormat, ... ); void logWarning16( const wchar_t* pszFormat, ... ); void logWarningHr( long hr, const char8_t* pszFormat, ... ); void logInfo( const char8_t* pszFormat, ... ); void logInfo16( const wchar_t* pszFormat, ... ); void logDebug( const char8_t* pszFormat, ... ); void logDebug16( const wchar_t* pszFormat, ... ); bool willLogMessage( Whisper::eLogLevel lvl ); #ifdef __cplusplus } #endif
677
C++
.h
19
34.421053
60
0.703364
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,735
DelayExecution.h
Const-me_Whisper/Whisper/Utils/DelayExecution.h
#pragma once #include <atlbase.h> // Utility class implementing a high-resolution Sleep() function class DelayExecution { using pfnDelay = void( * )( const DelayExecution& de ); pfnDelay pfn = nullptr; CHandle timer; static void sleepOnTheTimer( const DelayExecution& delay ); static void spinWait( const DelayExecution& ); static void sleep( const DelayExecution& ); public: DelayExecution(); DelayExecution( const DelayExecution& ) = delete; ~DelayExecution() = default; void delay() const { pfn( *this ); } };
530
C++
.h
20
24.65
64
0.759369
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,736
ReadStream.h
Const-me_Whisper/Whisper/Utils/ReadStream.h
#pragma once #include "../ComLightLib/streams.h" #include "../ComLightLib/comLightServer.h" #define WIN32_LEAN_AND_MEAN #include <atlfile.h> class ReadStream : public ComLight::ObjectRoot<ComLight::iReadStream> { CAtlFile file; // TODO: implement a buffer in this class, at least 256kb HRESULT COMLIGHTCALL read( void* lpBuffer, int nNumberOfBytesToRead, int& lpNumberOfBytesRead ) override final { return file.Read( lpBuffer, (DWORD)nNumberOfBytesToRead, *(DWORD*)&lpNumberOfBytesRead ); } HRESULT COMLIGHTCALL seek( int64_t offset, ComLight::eSeekOrigin origin ) override final { return file.Seek( offset, (uint8_t)origin ); } HRESULT COMLIGHTCALL getPosition( int64_t& position ) override final { return file.GetPosition( *(ULONGLONG*)&position ); } HRESULT COMLIGHTCALL getLength( int64_t& length ) override final { return file.GetSize( *(ULONGLONG*)&length ); } public: HRESULT open( const wchar_t* path ) { if( file ) return HRESULT_CODE( ERROR_ALREADY_INITIALIZED ); return file.Create( path, GENERIC_READ, FILE_SHARE_READ, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL | FILE_FLAG_SEQUENTIAL_SCAN ); } };
1,139
C++
.h
33
32.454545
126
0.764279
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,737
CpuProfiler.h
Const-me_Whisper/Whisper/Utils/CpuProfiler.h
#pragma once namespace Whisper { // Get current time in CPU clock // More specifically, each CPU core has a timestamp counter which runs at CPU's base frequency, regardless on the frequency scaling of that core. inline int64_t tscNow() { return __rdtsc(); } // Scale the time interval from CPU time stamp counter clock into 100-nanosecond ticks, rounding to nearest uint64_t ticksFromTsc( uint64_t tscDiff ); class CpuProfiler { const int64_t started = tscNow(); public: uint64_t elapsed() const { return ticksFromTsc( (uint64_t)( tscNow() - started ) ); } }; }
590
C++
.h
21
25.761905
146
0.739823
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,738
GpuProfiler.h
Const-me_Whisper/Whisper/Utils/GpuProfiler.h
#pragma once #include "../D3D/device.h" #include "ProfileCollection.h" #include "DelayExecution.h" namespace DirectCompute { enum struct eProfilerBlock : uint16_t { LoadModel = 0x1000, Run = 0x2000, Encode = 0x3000, EncodeLayer = 0x4000, Decode = 0x5000, DecodeStep = 0x6000, DecodeLayer = 0x7000, }; enum struct eComputeShader : uint16_t; class GpuProfiler { DelayExecution delay; CComPtr<ID3D11Query> disjoint; enum struct eEvent { None = 0, BlockStart, BlockEnd, Shader }; struct BlockState; static constexpr uint16_t EmptyShader = ~(uint16_t)0; // A circular buffer with in-flight queries which feeds timestamps into the iTimestampSink interface class Queue { static constexpr size_t queueLength = 32; // Ring buffer for individual measures struct Entry { CComPtr<ID3D11Query> query; BlockState* block; eEvent event; uint16_t shader; #if PROFILER_COLLECT_TAGS uint16_t tag = 0; #endif void join( GpuProfiler& owner ); }; GpuProfiler& owner; std::array<Entry, queueLength> queue; size_t nextEntry = 0; public: Queue( GpuProfiler& gp ) : owner( gp ) {} HRESULT create(); // Begin a next query. Eventually, this will result in the BlockState.haveTimestamp callback void submit( BlockState* block, eEvent evt, uint16_t shader = EmptyShader, uint16_t tag = 0 ); // Wait for all the pending queries, and call their callbacks void join(); }; Queue queries; struct sProfilerData; struct BlockState { int64_t timeStart = -1; sProfilerData* destBlock = nullptr; int64_t shaderStart = -1; uint16_t prevShader = EmptyShader; uint16_t prevShaderTag = 0; BlockState* parentBlock = nullptr; void haveTimestamp( eEvent evt, uint16_t cs, uint16_t tag, uint64_t time, GpuProfiler& profiler ); private: void completePrevShader( uint64_t time, GpuProfiler& profiler ); }; CAtlMap<eProfilerBlock, BlockState> blockStates; std::vector<BlockState*> stack; struct sProfilerData { // Count of accumulated measures size_t callsPending; // Total time spent running all instances of that measure, expressed in GPU ticks uint64_t timePending; Whisper::ProfileCollection::Measure* dest; inline void makeTime( uint64_t freq ); inline void addPending( int64_t time ); inline void reset(); inline void dropPending(); sProfilerData() { reset(); } }; CAtlMap<uint16_t, sProfilerData> results; #if PROFILER_COLLECT_TAGS CAtlMap<uint32_t, sProfilerData> resultsTagged; #endif void resultsMakeTime( uint64_t freq ); void resultsDropPending(); void resultsReset(); void blockStart( eProfilerBlock which ); void blockEnd(); Whisper::ProfileCollection& dest; #if PROFILER_COLLECT_TAGS uint16_t m_nextTag = 0; #endif public: GpuProfiler( Whisper::ProfileCollection& pc ) : dest( pc ), queries( *this ) { } HRESULT create( size_t maxDepth = 3 ); class BlockRaii { GpuProfiler* profiler; public: BlockRaii( GpuProfiler& owner, eProfilerBlock which ) { owner.blockStart( which ); profiler = &owner; } ~BlockRaii() { if( nullptr != profiler ) { profiler->blockEnd(); profiler = nullptr; } } BlockRaii( BlockRaii&& that ) noexcept : profiler( that.profiler ) { that.profiler = nullptr; } BlockRaii( const BlockRaii& ) = delete; void operator=( const BlockRaii& ) = delete; void operator=( BlockRaii&& ) = delete; }; BlockRaii block( eProfilerBlock which ) { return BlockRaii{ *this, which }; } void computeShader( eComputeShader cs ); bool profileShaders = false; // bool profileShaders = true; decltype( auto ) cpuBlock( Whisper::eCpuBlock block ) { return dest.cpuBlock( block ); } Whisper::ProfileCollection& profiler() { return dest; } // Set tag string for the next compute shader // The string should be readonly: for performance reason the implementation doesn’t copy nor compare any strings, it only keeps the pointer #if PROFILER_COLLECT_TAGS uint16_t setNextTag( const char* name ); #else inline uint16_t setNextTag( const char* name ) { return 0; } #endif void setNextTag( uint16_t tag ) { #if PROFILER_COLLECT_TAGS m_nextTag = tag; #endif } }; }
4,307
C++
.h
159
23.622642
141
0.715708
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,739
miscUtils.h
Const-me_Whisper/Whisper/Utils/miscUtils.h
#pragma once #define CHECK( hr ) { const HRESULT __hr = ( hr ); if( FAILED( __hr ) ) return __hr; } #define CHECK_LOG( hr ) { const HRESULT __hr = ( hr ); if( FAILED( __hr ) ) { logErrorHr(__hr, u8"%s failed", #hr ); return __hr; } } inline void check( HRESULT hr ) { if( SUCCEEDED( hr ) ) return; throw hr; } inline __m128i __vectorcall load16( const int* rsi ) { return _mm_loadu_si128( ( const __m128i* )rsi ); } inline __m128i __vectorcall load16( const uint32_t* rsi ) { return _mm_loadu_si128( ( const __m128i* )rsi ); } inline __m128i __vectorcall load( const std::array<uint32_t, 4>& arr ) { return load16( arr.data() ); } inline void __vectorcall store16( void* rdi, __m128i v ) { _mm_storeu_si128( ( __m128i* )rdi, v ); } inline void __vectorcall store12( void* rdi, __m128i v ) { _mm_storel_epi64( ( __m128i* )rdi, v ); ( (int*)rdi )[ 2 ] = _mm_extract_epi32( v, 2 ); } inline void __vectorcall store( std::array<uint32_t, 4>& arr, __m128i v ) { store16( arr.data(), v ); } inline bool __vectorcall vectorEqual( __m128i a, __m128i b ) { __m128i xx = _mm_xor_si128( a, b ); return (bool)_mm_testz_si128( xx, xx ); } inline __m128i __vectorcall setLow_size( size_t low ) { return _mm_cvtsi64_si128( (int64_t)low ); } inline __m128i __vectorcall setr_size( size_t low, size_t high ) { __m128i v = setLow_size( low ); v = _mm_insert_epi64( v, (int64_t)high, 1 ); return v; } inline __m128i __vectorcall setHigh_size( size_t high ) { __m128i v = _mm_setzero_si128(); v = _mm_insert_epi64( v, (int64_t)high, 1 ); return v; } void setCurrentThreadName( const char* name ); inline HRESULT getLastHr() { return HRESULT_FROM_WIN32( GetLastError() ); } // Scale time in seconds from unsigned 64 bit rational number ( mul / div ) into 100-nanosecond ticks // These 100-nanosecond ticks are used in NTFS, FILETIME, .NET standard library, media foundation, and quite a few other places inline uint64_t makeTime( uint64_t mul, uint64_t div ) { mul *= 10'000'000; mul += ( ( div / 2 ) - 1 ); return mul / div; } template<class E> inline size_t vectorMemoryUse( const std::vector<E>& vec ) { return sizeof( E ) * vec.capacity(); } // The formula is pow( mul / div, -0.25 ) float computeScaling( int mul, int div );
2,242
C++
.h
75
28.453333
133
0.661881
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,740
GpuProfilerSimple.h
Const-me_Whisper/Whisper/Utils/GpuProfilerSimple.h
#pragma once #include "../D3D/device.h" #include "DelayExecution.h" namespace DirectCompute { // A simple profiler which doesn't collect anything, used to measure time it took to load the model class GpuProfilerSimple { DelayExecution delay; CComPtr<ID3D11Query> disjoint, begin, end; public: HRESULT create(); HRESULT time( uint64_t& rdi ) const; }; }
366
C++
.h
15
22.533333
100
0.766382
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,742
tracing.h
Const-me_Whisper/Whisper/Utils/Trace/tracing.h
#pragma once #include "TraceWriter.h" #include "../../ML/mlUtils.h" namespace Tracing { #if SAVE_DEBUG_TRACE void traceCreate( LPCTSTR path ); void traceClose(); iTraceWriter* getWriter(); inline HRESULT tensor( const ItemName& name, const DirectCompute::Tensor& tensor ) { iTraceWriter* w = getWriter(); if( w ) return w->tensor( name, tensor ); return S_FALSE; } inline HRESULT tensor( const ItemName& name, const CpuCompute::Tensor& tensor ) { iTraceWriter* w = getWriter(); if( w ) return w->tensor( name, tensor ); return S_FALSE; } inline HRESULT tensor( const ItemName& name, const ggml_tensor* tensor ) { iTraceWriter* w = getWriter(); if( w ) return w->tensor( name, *tensor ); return S_FALSE; } void delayTensor( const ItemName& name, const ggml_tensor* tensor ); HRESULT writeDelayedTensors(); inline HRESULT buffer( const ItemName& name, const void* rsi, size_t length, eDataType dt ) { iTraceWriter* w = getWriter(); if( w ) return w->buffer( name, rsi, length, dt ); return S_FALSE; } inline HRESULT vector( const ItemName& name, const std::vector<float>& vec ) { const float* rsi = vec.empty() ? nullptr : vec.data(); return buffer( name, rsi, vec.size(), eDataType::FP32 ); } inline HRESULT vector( const ItemName& name, const float* rsi, size_t length ) { return buffer( name, rsi, length, eDataType::FP32 ); } #else inline void traceCreate( LPCTSTR path ) { } inline void traceClose() { } #if DBG_TEST_NAN HRESULT tensor( const ItemName& name, const DirectCompute::Tensor& tensor ); #else inline HRESULT tensor( const ItemName& name, const DirectCompute::Tensor& tensor ) { return S_FALSE; } #endif inline HRESULT tensor( const ItemName& name, const CpuCompute::Tensor& tensor ) { return S_FALSE; } inline HRESULT tensor( const ItemName& name, const ggml_tensor* tensor ) { return S_FALSE; } inline HRESULT buffer( const ItemName& name, const void* rsi, size_t length, eDataType dt ) { return S_FALSE; } inline HRESULT vector( const ItemName& name, const std::vector<float>& vec ) { return S_FALSE; } inline void delayTensor( const ItemName& name, const ggml_tensor* tensor ) { } inline HRESULT writeDelayedTensors() { return S_FALSE; } inline HRESULT vector( const ItemName& name, const float* rsi, size_t length ) { } #endif }
2,329
C++
.h
65
33.569231
112
0.720992
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,743
TraceWriter.h
Const-me_Whisper/Whisper/Utils/Trace/TraceWriter.h
#pragma once #include <memory> #include "../../D3D/enums.h" namespace DirectCompute { class Tensor; } namespace CpuCompute { class Tensor; } struct ggml_tensor; namespace Tracing { using DirectCompute::eDataType; struct ItemName { const char* pointer; std::array<uint32_t, 4> args; uint8_t countArgs; ItemName( const char* str ) { pointer = str; _mm_storeu_si128( ( __m128i* )args.data(), _mm_setzero_si128() ); countArgs = 0; } ItemName( const char* str, int a0 ) { pointer = str; __m128i v = _mm_cvtsi32_si128( a0 ); _mm_storeu_si128( ( __m128i* )args.data(), v ); countArgs = 1; } ItemName( const char* str, uint32_t a0 ) { pointer = str; __m128i v = _mm_cvtsi32_si128( (int)a0 ); _mm_storeu_si128( ( __m128i* )args.data(), v ); countArgs = 1; } ItemName( const char* str, size_t a0 ) { pointer = str; __m128i v = _mm_cvtsi32_si128( (int)a0 ); _mm_storeu_si128( ( __m128i* )args.data(), v ); countArgs = 1; } }; class iTraceWriter { public: virtual ~iTraceWriter() {} static std::unique_ptr<iTraceWriter> create( LPCTSTR path ); virtual HRESULT buffer( const ItemName& name, const void* rsi, size_t length, eDataType dt ) = 0; virtual HRESULT tensor( const ItemName& name, const void* rsi, __m128i size, __m128i strides, eDataType dt ) = 0; HRESULT tensor( const ItemName& name, const DirectCompute::Tensor& tensor ); HRESULT tensor( const ItemName& name, const CpuCompute::Tensor& tensor ); HRESULT tensor( const ItemName& name, const ggml_tensor& tensor ); }; }
1,575
C++
.h
60
23.45
115
0.671315
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,744
TraceStructures.h
Const-me_Whisper/Whisper/Utils/Trace/TraceStructures.h
#pragma once #include <array> #include <emmintrin.h> #include "../../D3D/enums.h" namespace Tracing { using DirectCompute::eDataType; // File header of the trace file struct sFileHeader { static constexpr uint32_t correctMagic = 0xE6B4A12Du; // random.org uint32_t magic; uint8_t formatVersion; uint8_t zzPadding; uint16_t cbItem; uint32_t countItems; uint32_t zzPadding2; uint64_t bytesPayload; uint32_t countStrings, bytesStrings; }; // Payload data starts immediately after the header, bytesPayload bytes in total. // Then `bytesStrings` with string names, first countStrings * 4 of them are offsets, then ( bytesStrings - countStrings * 4 ) bytes with the string data. // The strings in the file are null-terminated. // Immediately after the strings, the next `cbItem` * `countItems` bytes are actual items (tensors and vectors) saved in the trace. // The format is weird because optimized for streaming. // These traces can grow large, we can’t afford memory keeping the payload data in memory. // Metadata is tiny compared to payload, we accumulate that in memory, and write to the end of the file when closed. enum struct eItemType : uint8_t { Buffer = 1, Tensor = 2, }; struct sTraceItem { uint64_t payloadOffset; uint64_t payloadSize; std::array<uint32_t, 4> size; std::array<uint32_t, 4> stride; std::array<uint32_t, 4> formatArgs; eItemType itemType; eDataType dataType; uint8_t countFormatArgs = 0; uint8_t zzPadding = 0; uint32_t stringIndex; uint64_t buffer( uint64_t off, size_t length, eDataType type ); uint64_t tensor( uint64_t off, __m128i ne, __m128i nb, eDataType type ); }; }
1,673
C++
.h
48
32.291667
155
0.747212
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,745
DbgNanTest.h
Const-me_Whisper/Whisper/ML/DbgNanTest.h
#pragma once namespace DirectCompute { class DbgNanTest { CComPtr<ID3D11Buffer> bufferDefault, bufferStaging; CComPtr<ID3D11UnorderedAccessView> uav; public: HRESULT create(); void destroy(); operator ID3D11UnorderedAccessView* ( ) const { return uav; } bool test() const; }; #if DBG_TEST_NAN const DbgNanTest& getNanTestBuffers(); #endif }
366
C++
.h
20
16.05
53
0.771014
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,746
TensorGpuViews.h
Const-me_Whisper/Whisper/ML/TensorGpuViews.h
#pragma once #include <stdint.h> #include "../D3D/device.h" namespace DirectCompute { class TensorGpuViews { protected: CComPtr<ID3D11ShaderResourceView> srv; CComPtr<ID3D11UnorderedAccessView> uav; public: operator ID3D11ShaderResourceView* ( ) const { return srv; } operator ID3D11UnorderedAccessView* ( ) const { return uav; } HRESULT create( ID3D11Buffer* buffer, DXGI_FORMAT format, size_t countElements, bool makeUav ); void clear() { srv = nullptr; uav = nullptr; } void setGpuViews( ID3D11ShaderResourceView* read, ID3D11UnorderedAccessView* write = nullptr ) { srv = read; uav = write; } }; }
646
C++
.h
26
22.153846
97
0.746341
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,747
ConstantBuffer.h
Const-me_Whisper/Whisper/ML/ConstantBuffer.h
#pragma once #include "../D3D/device.h" #include "TensorShape.h" namespace DirectCompute { // 96 bytes dynamic constant buffers, with dimensions and VRAM layout of 2-3 tensors class ConstantBuffer { CComPtr<ID3D11Buffer> buffer; public: HRESULT create(); HRESULT update( const TensorShape& t0 ); HRESULT update( const TensorShape& t0, const TensorShape& t1 ); HRESULT update( const TensorShape& t0, const TensorShape& t1, const TensorShape& t2 ); void bind() const; __m128i getMemoryUse() const { return bufferMemoryUsage( buffer ); } }; }
569
C++
.h
21
24.714286
88
0.750459
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,748
TensorEx.h
Const-me_Whisper/Whisper/ML/TensorEx.h
#pragma once #include "Tensor.h" namespace DirectCompute { // A tensor which supports dynamic updates from CPU, or downloads from VRAM to system RAM class TensorEx : public Tensor { protected: CComPtr<ID3D11Buffer> buffer; CComPtr<ID3D11Buffer> stagingBuffer; HRESULT getViewSize( uint32_t& cbElement, uint32_t& countElements ) const; public: HRESULT create( const ggml_tensor& ggml, eBufferUse usage, bool uploadData ); HRESULT create( eDataType type, eBufferUse usage, const std::array<uint32_t, 4>& sizeElements ); HRESULT download( void* rdi, size_t cb ) const; HRESULT download( void* rdi ) const; template<class E> HRESULT download( std::vector<E>& vec ) const { uint32_t cbElement, numElements; CHECK( getViewSize( cbElement, numElements ) ); try { vec.resize( numElements ); } catch( const std::bad_alloc& ) { return E_OUTOFMEMORY; } return download( vec.data(), (size_t)cbElement * numElements ); } }; }
984
C++
.h
33
26.666667
98
0.728526
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,749
Reshaper.h
Const-me_Whisper/Whisper/ML/Reshaper.h
#pragma once #include "Tensor.h" namespace DirectCompute { // This class reshapes some of the model’s tensor, immediately after they’re loaded. // That feature is used on all AMD GPUs. class Reshaper { CComPtr<ID3D11Buffer> constantBuffer; HRESULT createConstants(); public: ~Reshaper(); HRESULT makePanels( Tensor& tensor, eDataType dataType ); }; }
373
C++
.h
15
22.4
85
0.768571
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,750
MlContext.h
Const-me_Whisper/Whisper/ML/MlContext.h
#pragma once #include <vector> #include "TempBuffers.h" #include "ConstantBuffer.h" #include "Tensor.h" #include "../Utils/GpuProfiler.h" #include "../Utils/ProfileCollection.h" namespace DirectCompute { enum struct eComputeShader : uint16_t; class MlContext { // When false, the implementation is 100% compatible with the CPU-running code written by Georgi Gerganov // When true, the implementation is much faster, and doesn't require FP64 support in the compute shaders. // FP64 is an optional feature, not all GPUs support that. static constexpr bool enableInexactOptimizations = true; ConstantBuffer cb; TempBuffers temp; CComPtr<ID3D11Buffer> flashAttentionConstants; void convolutionImpl( const Tensor& a, const Tensor& b, Tensor& res, bool is2 ); void cwiseBinary( const Tensor& a, const Tensor& b, Tensor& res, eComputeShader cs ); Tensor cwiseBinary( const Tensor& a, const Tensor& b, eComputeShader cs ); void mulMatDot( const Tensor& a, const Tensor& b, Tensor& res ); void mulMatMad( const Tensor& a, const Tensor& b, Tensor& res ); void mulMatTiled( const Tensor& a, const Tensor& b, Tensor& res ); void bindShader( eComputeShader cs ); protected: void copyImpl( const Tensor& a, Tensor& res, bool downcastFp32 ); // Create a dense output tensor for the results of a computation // Override this method to implement a pool of these tensors virtual Tensor createTensor( eDataType type, const std::array<uint32_t, 4>& ne ); Tensor createTensor( eDataType type, std::initializer_list<uint32_t> ne ); GpuProfiler profiler; public: MlContext( Whisper::ProfileCollection& profileColl ); MlContext( const MlContext& ) = delete; // res = a * b void mulMat( const Tensor& a, const Tensor& b, Tensor& res ); void flashAttention( const Tensor& q, const Tensor& k, const Tensor& v, Tensor& res, bool masked ); inline void convolution( const Tensor& a, const Tensor& b, Tensor& res ) { convolutionImpl( a, b, res, false ); } void convolution2( const Tensor& a, const Tensor& b, Tensor& res ) { convolutionImpl( a, b, res, true ); } void norm( const Tensor& a, Tensor& res ); Tensor conv_1d_1s( const Tensor& a, const Tensor& b ); Tensor conv_1d_2s( const Tensor& a, const Tensor& b ); Tensor add( const Tensor& a, const Tensor& b ); void addInPlace( Tensor& a, const Tensor& b ); Tensor view2d( const Tensor& a, uint32_t ne0, uint32_t ne1, uint32_t nb1, uint32_t offset ); Tensor transpose( const Tensor& a ); Tensor norm( const Tensor& a ); Tensor mulMat( const Tensor& a, const Tensor& b ); Tensor mulMatEx( const Tensor& a, const Tensor& b, const char* tagName ); Tensor permute( const Tensor& a, uint8_t axis0, uint8_t axis1, uint8_t axis2, uint8_t axis3 ); Tensor flashAttention( const Tensor& q, const Tensor& k, const Tensor& v, bool masked ); Tensor copy( const Tensor& a, eDataType type, std::initializer_list<uint32_t> size ); void copyInPlace( Tensor& dest, const Tensor& a, eDataType type, std::initializer_list<uint32_t> size ); void dbgPrintDifference( const ggml_tensor* reference, const Tensor& gpu, const char * what, bool trapToDebugger = true ); void scale( Tensor& a, float mul ); void addRepeat( Tensor& a, const Tensor& b ); void addRepeatScale( Tensor& a, const Tensor& b, float scale ); void fmaRepeat( Tensor& a, const Tensor& mul, const Tensor& add ); // ggml_diag_mask_inf void diagMaskInf( Tensor& a, uint32_t n_past ); // ggml_soft_max void softMax( Tensor& a, float inputScale = 1.0f ); void addRepeatGelu( Tensor& a, const Tensor& b ); // Extract rows from tokenEmbedding matrix, row indices are taken from the `embd` R32_UINT row vector // Extract same count of rows from positionalEmbedding matrix, starting at the `pastTokensCount` row // Return a new FP32 matrix with the sum of these rows Tensor addRows( const Tensor& tokenEmbedding, const Tensor& positionalEmbedding, const Tensor& embd, uint32_t pastTokensCount ); Tensor reshapePanels( const Tensor& a ); Tensor mulMatTiledEx( const Tensor& a, const Tensor& b ); Tensor mulMatByRowTiledEx( const Tensor& a, const Tensor& b ); // An equivalent of addRepeat( dest, pattern ) followed by addInPlace( dest, finalAdd ) void addRepeatEx( Tensor& dest, const Tensor& pattern, const Tensor& finalAdd ); __m128i getMemoryUse() const; }; }
4,392
C++
.h
83
49.843373
130
0.734517
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,751
Device.h
Const-me_Whisper/Whisper/ML/Device.h
#pragma once #include <string> #include "../D3D/sGpuInfo.h" #include "LookupTables.h" #include "DbgNanTest.h" namespace DirectCompute { struct Device { CComPtr<ID3D11Device> device; CComPtr<ID3D11DeviceContext> context; std::vector<CComPtr<ID3D11ComputeShader>> shaders; CComPtr<ID3D11Buffer> smallCb; sGpuInfo gpuInfo; LookupTables lookupTables; #if DBG_TEST_NAN DbgNanTest nanTestBuffers; #endif HRESULT create( uint32_t flags, const std::wstring& adapter ); HRESULT createClone( const Device& source ); void destroy(); class ThreadSetupRaii { bool setup; public: ThreadSetupRaii( const Device* dev ); ~ThreadSetupRaii(); ThreadSetupRaii( ThreadSetupRaii&& that ) noexcept { setup = that.setup; that.setup = false; } ThreadSetupRaii( const ThreadSetupRaii& ) = delete; void operator=( const ThreadSetupRaii& ) = delete; }; ThreadSetupRaii setForCurrentThread() const { return ThreadSetupRaii{ this }; } }; }
986
C++
.h
41
21.195122
64
0.749203
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,752
Tensor.h
Const-me_Whisper/Whisper/ML/Tensor.h
#pragma once #include "TensorShape.h" #include "TensorGpuViews.h" #include "../D3D/enums.h" namespace DirectCompute { // A minimal tensor object sufficient to compute things on GPU, with compute shaders // This class only takes 48 bytes in system memory, and is very cheap to make copies 'coz GPU objects are reference counted. class Tensor : public TensorShape, public TensorGpuViews { CComPtr<ID3D11Buffer> getBuffer() const; struct TensorType { eDataType type; eBufferUse usage; bool hasInitialData; }; #ifdef _DEBUG // In debug builds, we include a few pieces of data to this class. TensorType dbgType; #endif protected: HRESULT create( eDataType type, std::initializer_list<uint32_t> sizeElements, eBufferUse usage, CComPtr<ID3D11Buffer>& buffer, const void* rsi, ID3D11Buffer** ppStagingBuffer, bool shared = false ); static uint32_t dxgiSizeof( DXGI_FORMAT format ); void downloadImpl( const D3D11_SHADER_RESOURCE_VIEW_DESC& viewDesc, uint32_t countElements, size_t cbElement, void* rdi ) const; public: Tensor() = default; // These copy operators don't copy any data, they merely increment ref.counter of the GPU resources Tensor( const Tensor& ); Tensor( Tensor&& that ) noexcept; Tensor& operator=( const Tensor& that ); Tensor& operator=( Tensor&& that ) noexcept; // Move the provided buffer views into this newly created tensor, and assign the shape // This destroys old values in the smart pointers Tensor( const TensorShape& shape, CComPtr<ID3D11ShaderResourceView>& srv, CComPtr<ID3D11UnorderedAccessView>& uav ) noexcept; Tensor( const TensorShape& shape, const TensorGpuViews& views ); // Create a tensor from the GGML's one HRESULT create( const ggml_tensor& ggml, eBufferUse usage, bool uploadData ); // Create a new dense tensor of the specified size in elements, without initial data HRESULT create( eDataType type, std::initializer_list<uint32_t> sizeElements, bool shared = false ); HRESULT create( eDataType type, const std::array<uint32_t, 4>& sizeElements, bool shared = false ); HRESULT createImmutable( eDataType type, const std::array<int, 4>& size, const void* rsi ); eDataType getType() const; // This method should probably only be used to test things // TensorEx is better for production usage, because it creates staging buffer in advance. void download( std::vector<float>& vec ) const; void download( std::vector<uint16_t>& vec ) const; // ggml_reshape_3d Tensor reshape3d( uint32_t ne0, uint32_t ne1, uint32_t ne2 ) const; inline void dbgSetType( eDataType dt, bool hasData = false, eBufferUse use = eBufferUse::ReadWrite ) { #ifdef _DEBUG dbgType.type = dt; dbgType.hasInitialData = hasData; dbgType.usage = use; #endif } __m128i getMemoryUse() const { return resourceMemoryUsage( srv ); } }; }
2,852
C++
.h
63
42.396825
200
0.755315
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,753
testUtilsC.h
Const-me_Whisper/Whisper/ML/testUtilsC.h
#pragma once #ifdef __cplusplus extern "C" { #endif void printUniqueTensorSize( const char* name, const int* lhs, const int* rhs ); #ifdef __cplusplus } #endif
163
C++
.h
9
17
80
0.746753
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,754
mlUtils.h
Const-me_Whisper/Whisper/ML/mlUtils.h
#pragma once namespace DirectCompute { // Update the small dynamic constant buffer ID3D11Buffer* __vectorcall updateSmallCb( __m128i cbData ); // Fill the tensor with either 0.0 or NaN values void zeroMemory( ID3D11UnorderedAccessView* uav, uint32_t length, bool fillWithNaN = false ); // Fill the complete UAV with NaN values void fillTensorWithNaN( ID3D11UnorderedAccessView* uav ); // true when the tensor contains at least 1 NaN value bool scanTensorForNaN( ID3D11ShaderResourceView* tensor, uint32_t length ); // Create SRV on another device, reusing the resource HRESULT cloneResourceView( ID3D11ShaderResourceView* rsi, ID3D11ShaderResourceView** rdi ); }
677
C++
.h
14
46.357143
94
0.801214
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,755
tensorOpsTests.h
Const-me_Whisper/Whisper/ML/tensorOpsTests.h
#pragma once #include "../source/ggml.h" namespace DirectCompute { // void testMulMatReshape( const ggml_tensor* src1, const void* tempBuffer ); void testMulMat( const ggml_tensor* src0, const ggml_tensor* src1, const ggml_tensor* dst, const void* tempBuffer ); void computeMulMat( const ggml_tensor* src0, const ggml_tensor* src1, ggml_tensor* dst ); void testFlashAttention( const ggml_tensor* q, const ggml_tensor* k, const ggml_tensor* v, bool masked, const ggml_tensor* dst ); void computeFlashAttention( const ggml_tensor* q, const ggml_tensor* k, const ggml_tensor* v, bool masked, ggml_tensor* dst ); void testConvolution( const ggml_tensor* src0, const ggml_tensor* src1, const ggml_tensor* dst ); void computeConvolution( const ggml_tensor* src0, const ggml_tensor* src1, ggml_tensor* dst ); }
813
C++
.h
12
66
130
0.760951
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,756
TensorShape.h
Const-me_Whisper/Whisper/ML/TensorShape.h
#pragma once #include <stdint.h> #include <array> #include <smmintrin.h> struct ggml_tensor; using HRESULT = long; namespace DirectCompute { // This POD structure describes the shape of a tensor. // It’s used for both GPU tensors in VRAM, and tensors in system memory used by the Hybrid model. struct TensorShape { // Count of elements, up to 4 coordinates // The unused coordinates are set to 1 std::array<uint32_t, 4> ne; // Strides of the tensor // For a dense row-major tensor, these numbers are [ 1, ne[0], ne[0]*ne[1], ne[0]*ne[1]*ne[2] ] // Note that unlike GGML code, these numbers are expressed in elements not bytes, but the meaning is the same // GPU matrices reshaped into panels are keeping different values here: [ 0, panelSize, panelSize * panelsCount, panelSize * panelsCount * ne[ 2 ] ] std::array<uint32_t, 4> nb; TensorShape(); TensorShape( const TensorShape& that ); void operator=( const TensorShape& that ); HRESULT create( const ggml_tensor& ggml ); TensorShape( const ggml_tensor& ggml ); __m128i __vectorcall sizeVec() const { return load( ne ); } __m128i __vectorcall stridesVec() const { return load( nb ); } uint32_t countRows() const { return ne[ 1 ] * ne[ 2 ] * ne[ 3 ]; } uint32_t countElements() const { // return ne[ 0 ] * countRows(); const __m128i a = sizeVec(); const __m128i b = _mm_srli_si128( a, 4 ); const __m128i p2 = _mm_mul_epu32( a, b ); uint64_t res = (uint64_t)_mm_extract_epi64( p2, 1 ); res *= (uint64_t)_mm_cvtsi128_si64( p2 ); assert( 0 == ( res >> 32 ) ); return (uint32_t)res; } // Compute strides from sizes, assuming dense row-major memory layout of the tensor void setDenseStrides(); bool isMatrix() const { // return ne[ 2 ] == 1 && ne[ 3 ] == 1; const uint64_t num = *(const uint64_t*)&ne[ 2 ]; return num == 0x100000001ull; } bool isVector() const { return 1 == ne[ 1 ] && isMatrix(); } // True of this tensor is dense and row-major bool isContinuous() const { /* return 1 == nb[ 0 ] && nb[ 1 ] == nb[ 0 ] * ne[ 0 ] && nb[ 2 ] == nb[ 1 ] * ne[ 1 ] && nb[ 3 ] == nb[ 2 ] * ne[ 2 ]; */ const __m128i nbv = stridesVec(); const __m128i nev = sizeVec(); __m128i tmp = _mm_mullo_epi32( nbv, nev ); // Vertical product of int32 lanes tmp = _mm_shuffle_epi32( tmp, _MM_SHUFFLE( 2, 1, 0, 0 ) ); // Shift left by 1 int32 lane tmp = _mm_insert_epi32( tmp, 1, 0 ); // Reset X lane to 1 return vectorEqual( tmp, nbv ); } // Reset all fields to zero void setZero() { const __m128i z = _mm_setzero_si128(); _mm_storeu_si128( ( __m128i* )ne.data(), z ); _mm_storeu_si128( ( __m128i* )nb.data(), z ); } }; // True when two tensors have equal count of elements inline bool isSameShape( const TensorShape& t0, const TensorShape& t1 ) { __m128i a = t0.sizeVec(); __m128i b = t1.sizeVec(); return vectorEqual( a, b ); } // True when two tensors have equal count of elements, and equal VRAM layout too inline bool isSameShapeAndLayout( const TensorShape& t0, const TensorShape& t1 ) { __m128i a, b, x; a = t0.sizeVec(); b = t1.sizeVec(); x = _mm_xor_si128( a, b ); a = t0.stridesVec(); b = t1.stridesVec(); x = _mm_or_si128( x, _mm_xor_si128( a, b ) ); return (bool)_mm_testz_si128( x, x ); } // True when we can multiply two tensors of the provided shapes bool canMulMat( const TensorShape& t0, const TensorShape& t1 ); }
3,485
C++
.h
104
30.336538
150
0.642071
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,757
TempBuffers.h
Const-me_Whisper/Whisper/ML/TempBuffers.h
#pragma once #include "TensorGpuViews.h" namespace DirectCompute { class TempBuffers { class Buffer : public TensorGpuViews { size_t capacity = 0; public: void clear() { TensorGpuViews::clear(); capacity = 0; } HRESULT resize( DXGI_FORMAT format, size_t elements, size_t cbElement, bool zeroMemory ); size_t getCapacity() const { return capacity; } }; Buffer m_fp16; Buffer m_fp16_2; Buffer m_fp32; public: const TensorGpuViews& fp16( size_t countElements, bool zeroMemory = false ); const TensorGpuViews& fp16_2( size_t countElements, bool zeroMemory = false ); const TensorGpuViews& fp32( size_t countElements, bool zeroMemory = false ); void clear() { m_fp16.clear(); m_fp16_2.clear(); m_fp32.clear(); } __m128i getMemoryUse() const; }; }
818
C++
.h
34
20.823529
92
0.704516
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,758
TensorsArena.h
Const-me_Whisper/Whisper/ML/TensorsArena.h
#pragma once #include "Tensor.h" namespace DirectCompute { using pfnNewCapacity = uint32_t( * )( uint32_t current, uint32_t requested ); uint32_t defaultNewCapacity( uint32_t current, uint32_t requested ); class PooledTensor { TensorGpuViews views; uint32_t capacity = 0; public: Tensor tensor( eDataType type, const std::array<uint32_t, 4>& ne, pfnNewCapacity pfnNewCap ); size_t getCapacity() const { return capacity; } void clear() { views.clear(); capacity = 0; } HRESULT zeroMemory(); }; __interface iTensorArena { Tensor tensor( eDataType type, const std::array<uint32_t, 4>& ne ); void reset(); }; class TensorsArena: public iTensorArena { public: struct sArenaConfig { pfnNewCapacity pfnCapInner; size_t initialCapOuter; }; struct sArenaConfigs { sArenaConfig fp16, fp32; }; TensorsArena( const sArenaConfigs& configs ); Tensor tensor( eDataType type, const std::array<uint32_t, 4>& ne ) override final; void reset() override final; void clear(); __m128i getMemoryUse() const; HRESULT zeroMemory(); private: struct ArenaImpl { ArenaImpl( eDataType dataType, const sArenaConfig& config ); void reset() { index = 0; } void clear() { index = 0; pool.clear(); } Tensor tensor( const std::array<uint32_t, 4>& ne ); __m128i getMemoryUse() const; HRESULT zeroMemory(); private: const eDataType type; const pfnNewCapacity pfnNewCap; std::vector<PooledTensor> pool; size_t index = 0; }; static constexpr size_t countTypes = 2; std::array<ArenaImpl, countTypes> arenas; }; }
1,629
C++
.h
69
20.347826
95
0.707712
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,759
LookupTables.h
Const-me_Whisper/Whisper/ML/LookupTables.h
#pragma once #include "../D3D/device.h" namespace DirectCompute { class LookupTables { CComPtr<ID3D11ShaderResourceView> m_gelu, m_exponent; public: HRESULT create(); HRESULT createClone( const LookupTables& source ); void clear(); ID3D11ShaderResourceView* gelu() const { return m_gelu; } ID3D11ShaderResourceView* exponent() const { return m_exponent; } __m128i getMemoryUsage() const; }; const LookupTables& lookupTables(); }
452
C++
.h
17
24.235294
67
0.763341
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,760
reshapedMultiply.h
Const-me_Whisper/Whisper/ML/reshapedMultiply.h
#pragma once #include <stdint.h> namespace DirectCompute { namespace ReshapedMultiply { constexpr uint32_t TILE_SIZE = 32; } }
132
C++
.h
9
13.111111
36
0.788618
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,761
testUtils.h
Const-me_Whisper/Whisper/ML/testUtils.h
#pragma once #include "../D3D/downloadBuffer.h" #include "../D3D/RenderDoc/renderDoc.h" #include <unordered_set> #include <functional> // Funfact: this code written by ChatGPT namespace std { template<> struct hash<array<uint32_t, 8>> { size_t operator()( const array<uint32_t, 8>& arr ) const { size_t result = 0; for( uint32_t element : arr ) result = ( result * 31 ) ^ element; return result; } }; } namespace DirectCompute { struct sTensorDiff { // maximum( absolute( a - b ) ) float maxAbsDiff; // average( ( a - b )^2 ) float avgDiffSquared; size_t length; void print() const; void print( const char* what ) const; }; // Compute difference between 2 FP32 vectors sTensorDiff computeDiff( const float* a, const float* b, size_t length ); // Compute difference between 2 FP16 vectors sTensorDiff computeDiff( const uint16_t* a, const uint16_t* b, size_t length ); class Tensor; sTensorDiff computeDiff( const Tensor& a, const Tensor& b ); HRESULT dbgWriteBinaryFile( LPCTSTR fileName, const void* rsi, size_t cb ); // Print unique sizes of the two tensors class PrintUniqueTensorSizes { std::unordered_set<std::array<uint32_t, 8>> set; const char* const what; void printImpl( const std::array<uint32_t, 8>& a ); public: PrintUniqueTensorSizes( const char* w ) : what( w ) { } void print( const Tensor& lhs, const Tensor& rhs ); void print( const Tensor& lhs ); void print( const int* lhs, const int* rhs ); }; }
1,491
C++
.h
52
26.230769
80
0.706993
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,762
LookupTablesData.h
Const-me_Whisper/Whisper/ML/LookupTablesData.h
#pragma once #include <stdint.h> #include <array> namespace DirectCompute { struct LookupTablesData { std::array<uint16_t, 0x10000> gelu; std::array<uint16_t, 0x10000> exponent; LookupTablesData(); }; }
213
C++
.h
12
15.916667
41
0.76
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,763
convertThings.h
Const-me_Whisper/Whisper/source.compat/convertThings.h
#pragma once #include "../source/whisper.h" #include "../API/sFullParams.h" #include "../API/iTranscribeResult.cl.h" Whisper::sFullParams makeNewParams( const whisper_full_params& rsi ); whisper_full_params makeOldParams( const Whisper::sFullParams& rsi, Whisper::iContext* context ); HRESULT makeNewResults( whisper_context* ctx, Whisper::eResultFlags flags, Whisper::iTranscribeResult** pp );
397
C++
.h
7
55.428571
109
0.786082
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,764
LoadModelDlg.h
Const-me_Whisper/Examples/WhisperDesktop/LoadModelDlg.h
#pragma once #include "AppState.h" #include "Utils/WTL/atlddx.h" #include "Utils/miscUtils.h" class LoadModelDlg: public CDialogImpl<LoadModelDlg>, public CWinDataExchange<LoadModelDlg>, public iThreadPoolCallback { AppState& appState; public: static constexpr UINT IDD = IDD_OPEN_MODEL; static constexpr UINT WM_CALLBACK_STATUS = WM_APP + 1; LoadModelDlg( AppState& app ) : appState( app ) { } HRESULT show(); BEGIN_MSG_MAP( LoadModelDlg ) MESSAGE_HANDLER( WM_INITDIALOG, OnInitDialog ) ON_BUTTON_CLICK( IDC_CONSOLE, cbConsole.click ) COMMAND_ID_HANDLER( IDCANCEL, OnCommand ) COMMAND_ID_HANDLER( IDOK, OnOk ) COMMAND_ID_HANDLER( IDC_BROWSE, OnBrowse ) MESSAGE_HANDLER( WM_CALLBACK_STATUS, OnCallbackStatus ) NOTIFY_ID_HANDLER( IDC_HYPERLINKS, OnHyperlink ) ON_BUTTON_CLICK( IDC_MODEL_ADV, onModelAdvanced ) END_MSG_MAP() BEGIN_DDX_MAP( LoadModelDlg ) DDX_CONTROL_HANDLE( IDC_PATH, modelPath ) DDX_CONTROL_HANDLE( IDC_MODEL_TYPE, cbModelType ) DDX_CONTROL_HANDLE( IDC_PROGRESS, progressBar ); END_DDX_MAP() private: std::vector<HWND> editorsWindows; std::vector<HWND> pendingWindows; void setPending( bool nowPending ); LRESULT OnInitDialog( UINT nMessage, WPARAM wParam, LPARAM lParam, BOOL& bHandled ); LRESULT OnCallbackStatus( UINT, WPARAM wParam, LPARAM, BOOL& bHandled ); LRESULT OnCommand( UINT, INT nIdentifier, HWND, BOOL& bHandled ) { ATLVERIFY( EndDialog( nIdentifier ) ); return 0; } LRESULT OnBrowse( UINT, INT, HWND, BOOL& bHandled ); LRESULT OnOk( UINT, INT, HWND, BOOL& bHandled ); ConsoleCheckbox cbConsole; CComboBox cbModelType; CEdit modelPath; CProgressBarCtrl progressBar; LRESULT validationError( LPCTSTR message ); LRESULT validationError( LPCTSTR message, HRESULT hr ); ThreadPoolWork work; CString path; Whisper::eModelImplementation impl; CString loadError; void __stdcall poolCallback() noexcept override final; LRESULT OnHyperlink( int idCtrl, LPNMHDR pnmh, BOOL& bHandled ); static HRESULT __stdcall progressCallback( double val, void* pv ) noexcept; void onModelAdvanced(); };
2,085
C++
.h
58
33.637931
85
0.77695
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,766
targetver.h
Const-me_Whisper/Examples/WhisperDesktop/targetver.h
#pragma once // Setup Windows SDK to only enable features available since Windows 8.0 #include <WinSDKVer.h> #define _WIN32_WINNT _WIN32_WINNT_WIN8 #define NTDDI_VERSION NTDDI_WIN8 #include <SDKDDKVer.h>
204
C++
.h
6
33
72
0.80303
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,767
stdafx.h
Const-me_Whisper/Examples/WhisperDesktop/stdafx.h
#pragma once #include "framework.h" #include <whisperWindows.h> #include "resource.h" #include "Utils/WTL/atlapp.h" #include "Utils/WTL/atlctrls.h"
149
C++
.h
6
23.666667
31
0.78169
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,768
CaptureDlg.h
Const-me_Whisper/Examples/WhisperDesktop/CaptureDlg.h
#pragma once #include "AppState.h" #include "Utils/WTL/atlddx.h" #include "Utils/miscUtils.h" #include "Utils/LanguageDropdown.h" #include "Utils/TranslateCheckbox.h" #include "Utils/PendingState.h" #include "CircleIndicator.h" class CaptureDlg : public CDialogImpl<CaptureDlg>, public CWinDataExchange<CaptureDlg>, public iThreadPoolCallback { AppState& appState; public: static constexpr UINT IDD = IDD_CAPTURE_DIALOG; static constexpr UINT WM_CALLBACK_COMPLETION = WM_APP + 1; static constexpr UINT WM_CALLBACK_STATUS = WM_APP + 2; CaptureDlg( AppState& app ) : appState( app ) { } HRESULT show(); BEGIN_MSG_MAP( CaptureDlg ) MESSAGE_HANDLER( WM_INITDIALOG, OnInitDialog ) ON_BUTTON_CLICK( IDC_CONSOLE, cbConsole.click ) ON_BUTTON_CLICK( IDC_DEV_REFRESH, onDeviceRefresh ); ON_BUTTON_CLICK( IDC_BROWSE_RESULT, onBrowseResult ); ON_BUTTON_CLICK( IDC_SAVE_TEXT, onSaveTextCheckbox ); ON_BUTTON_CLICK( IDC_RUN_CAPTURE, onRunCapture ); ON_BUTTON_CLICK( IDCANCEL, onClose ) ON_BUTTON_CLICK( IDC_BACK, onBack ) ON_BUTTON_CLICK( IDC_TRANSCRIBE, onTranscribe ); MESSAGE_HANDLER( WM_CALLBACK_COMPLETION, onThreadQuit ); MESSAGE_HANDLER( WM_CALLBACK_STATUS, onThreadStatus ); END_MSG_MAP() BEGIN_DDX_MAP( CaptureDlg ) DDX_CONTROL_HANDLE( IDC_DEVICE, cbCaptureDevice ) DDX_CONTROL_HANDLE( IDC_RUN_CAPTURE, btnRunCapture ); DDX_CONTROL_HANDLE( IDC_TRANSCRIBE_PROGRESS, progressBar ); DDX_CONTROL_HANDLE( IDC_SAVE_TEXT, checkSave ) DDX_CONTROL_HANDLE( IDC_SAVE_APPEND, checkAppend ) DDX_CONTROL_HANDLE( IDC_SAVE_TIMESTAMPS, checkTimestamps ) DDX_CONTROL_HANDLE( IDC_PATH_RESULT, transcribeOutputPath ) DDX_CONTROL_HANDLE( IDC_BROWSE_RESULT, transcribeOutputBrowse ); DDX_CONTROL( IDC_VOICE_ACTIVITY, voiceActivity ); DDX_CONTROL( IDC_TRANS_STATUS, transcribeActivity ); DDX_CONTROL( IDC_STALL_STATUS, stalled ); END_DDX_MAP() private: PendingState pendingState; void setPending( bool nowPending ); LRESULT OnInitDialog( UINT nMessage, WPARAM wParam, LPARAM lParam, BOOL& bHandled ); void onClose() { ATLVERIFY( EndDialog( IDCANCEL ) ); } void onBack() { ATLVERIFY( EndDialog( IDC_BACK ) ); } void onTranscribe() { ATLVERIFY( EndDialog( IDC_TRANSCRIBE ) ); } // List capture devices, and populate the combobox bool listDevices(); void onDeviceRefresh(); bool selectDevice( LPCTSTR endpoint ); static HRESULT __stdcall listDevicesCallback( int len, const Whisper::sCaptureDevice* buffer, void* pv ) noexcept; ConsoleCheckbox cbConsole; LanguageDropdown languageSelector; TranslateCheckbox cbTranslate; CComboBox cbCaptureDevice; void onBrowseResult(); enum struct eTextFlags : uint32_t; CButton checkSave, checkAppend, checkTimestamps; CEdit transcribeOutputPath; CButton transcribeOutputBrowse; void onSaveTextCheckbox(); eTextFlags getOutputFlags(); CButton btnRunCapture; CProgressBarCtrl progressBar; ThreadPoolWork work; struct sCaptureDevice { CString displayName; CString endpoint; }; std::vector<sCaptureDevice> devices; void showError( LPCTSTR text, HRESULT hr ); CircleIndicator voiceActivity; CircleIndicator transcribeActivity; CircleIndicator stalled; struct sThreadState { volatile bool stopRequested; bool translate; eTextFlags flags; CAtlFile* file; uint32_t language; Whisper::sCaptureParams captureParams; CString endpoint; CString textOutputPath; CString errorMessage; }; sThreadState threadState; bool captureRunning = false; void getThreadError(); void onRunCapture(); HRESULT runCapture(); void __stdcall poolCallback() noexcept override final; LRESULT onThreadQuit( UINT nMessage, WPARAM wParam, LPARAM lParam, BOOL& bHandled ); LRESULT onThreadStatus( UINT nMessage, WPARAM wParam, LPARAM lParam, BOOL& bHandled ); static HRESULT __stdcall cbCancel( void* pv ) noexcept; static HRESULT __stdcall cbStatus( void* pv, Whisper::eCaptureStatus status ) noexcept; static HRESULT __cdecl newSegmentCallback( Whisper::iContext* ctx, uint32_t n_new, void* user_data ) noexcept; HRESULT appendTextFile( Whisper::iTranscribeResult* results, uint32_t newSegments ); };
4,137
C++
.h
116
33.224138
115
0.787127
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,769
Resource.h
Const-me_Whisper/Examples/WhisperDesktop/Resource.h
//{{NO_DEPENDENCIES}} // Microsoft Visual C++ generated include file. // Used by WhisperDesktop.rc // #define IDC_MYICON 2 #define IDD_WHISPERDESKTOP_DIALOG 102 #define IDM_ABOUT 104 #define IDI_WHISPERDESKTOP 107 #define IDI_SMALL 108 #define IDR_MAINFRAME 128 #define IDD_OPEN_MODEL 129 #define IDD_MAIN_DIALOG 130 #define IDD_TRANSCRIBE_DIALOG 130 #define IDD_CAPTURE_DIALOG 131 #define IDD_MODEL_ADV 132 #define IDC_PATH 1000 #define IDC_BROWSE 1001 #define IDC_MODEL_TYPE 1002 #define IDC_PATH_RESULT 1002 #define IDC_PROGRESS 1003 #define IDC_BROWSE_RESULT 1003 #define IDC_SYSLINK1 1004 #define IDC_HYPERLINKS 1004 #define IDC_TRANSCRIBE_PROGRESS 1004 #define IDC_PENDING_TEXT 1005 #define IDC_MODEL_DESC 1006 #define IDC_LANGUAGE 1007 #define IDC_OUTPUT_FORMAT 1008 #define IDC_PATH_MEDIA 1009 #define IDC_DEVICE 1009 #define IDC_BROWSE_MEDIA 1010 #define IDC_TRANSCRIBE 1011 #define IDC_BACK 1012 #define IDC_CHECK1 1013 #define IDC_CONSOLE 1013 #define IDC_CAPTURE 1014 #define IDC_DEV_REFRESH 1015 #define IDC_SAVE_TEXT 1016 #define IDC_SAVE_APPEND 1017 #define IDC_SAVE_TIMESTAMPS 1018 #define IDC_RUN_CAPTURE 1019 #define IDC_VOICE_ACTIVITY 1020 #define IDC_VOICE_ACTIVITY_LBL 1021 #define IDC_TRANS_STATUS 1022 #define IDC_TRANS_LBL 1023 #define IDC_STALL_STATUS 1024 #define IDC_STALL_LBL 1025 #define IDC_TRANSLATE 1026 #define IDC_MODEL_ADV 1027 #define IDC_WAVE 1028 #define IDC_RESHAPED_MAT_MUL 1029 #define IDC_CHECK2 1029 #define IDC_USE_INPUT_FOLDER 1029 #define IDC_RESHAPED_MAT_MUL2 1030 #define IDC_GPU 1030 #define IDC_STATIC -1 // Next default values for new objects // #ifdef APSTUDIO_INVOKED #ifndef APSTUDIO_READONLY_SYMBOLS #define _APS_NO_MFC 1 #define _APS_NEXT_RESOURCE_VALUE 131 #define _APS_NEXT_COMMAND_VALUE 32771 #define _APS_NEXT_CONTROL_VALUE 1030 #define _APS_NEXT_SYMED_VALUE 110 #endif #endif
2,764
C++
.h
67
40.223881
47
0.537092
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,770
CircleIndicator.h
Const-me_Whisper/Examples/WhisperDesktop/CircleIndicator.h
#pragma once #include "Utils/miscUtils.h" #include "Utils/WTL/atlcrack.h" // This control renders a black circle, and in the active state, the circle is filled with a bright color. class CircleIndicator: public CWindowImpl<CircleIndicator> { public: static ATL::CWndClassInfo& GetWndClassInfo(); BEGIN_MSG_MAP( CircleIndicator ) MSG_WM_PAINT( onPaint ) MSG_WM_DESTROY( onDestroy ) END_MSG_MAP() // Class registration static HRESULT registerClass(); void setActive( bool nowActive ); void setActiveColor( uint32_t col ) { activeColor = col; } CircleIndicator(); private: bool isActive = false; uint32_t activeColor; int fontHeight = 0; CFont font; HRESULT createFont( int height ); void onDestroy(); void onPaint( CDCHandle dc ); };
761
C++
.h
29
24.241379
106
0.768595
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,771
ModelAdvancedDlg.h
Const-me_Whisper/Examples/WhisperDesktop/ModelAdvancedDlg.h
#pragma once #include "AppState.h" #include "Utils/WTL/atlddx.h" #include "Utils/miscUtils.h" class ModelAdvancedDlg : public CDialogImpl<ModelAdvancedDlg> { CComboBox cbWave, cbReshapedMatMul, cbAdapter; AppState& appState; public: static constexpr UINT IDD = IDD_MODEL_ADV; ModelAdvancedDlg( AppState& app ) : appState( app ) { } BEGIN_MSG_MAP( ModelAdvancedDlg ) MESSAGE_HANDLER( WM_INITDIALOG, onInitDialog ) ON_BUTTON_CLICK( IDOK, onOk ) ON_BUTTON_CLICK( IDCANCEL, onCancel ) END_MSG_MAP() bool show( HWND owner ); private: LRESULT onInitDialog( UINT nMessage, WPARAM wParam, LPARAM lParam, BOOL& bHandled ); void onOk(); void onCancel() { EndDialog( IDCANCEL ); } };
702
C++
.h
26
24.884615
85
0.761976
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,772
TranscribeDlg.h
Const-me_Whisper/Examples/WhisperDesktop/TranscribeDlg.h
#pragma once #include "AppState.h" #include "Utils/WTL/atlddx.h" #include "Utils/WTL/atlcrack.h" #include "Utils/miscUtils.h" #include "Utils/LanguageDropdown.h" #include "Utils/TranslateCheckbox.h" #include "Utils/PendingState.h" class TranscribeDlg : public CDialogImpl<TranscribeDlg>, public CWinDataExchange<TranscribeDlg>, public iThreadPoolCallback { AppState& appState; public: static constexpr UINT IDD = IDD_TRANSCRIBE_DIALOG; static constexpr UINT WM_CALLBACK_STATUS = WM_APP + 1; TranscribeDlg( AppState& app ) : appState( app ) { } // Show this dialog modally, without parent. HRESULT show(); BEGIN_MSG_MAP( LoadModelDlg ) MESSAGE_HANDLER( WM_INITDIALOG, OnInitDialog ) ON_BUTTON_CLICK( IDC_CONSOLE, cbConsole.click ) ON_BUTTON_CLICK( IDCANCEL, onClose ) ON_BUTTON_CLICK( IDC_BACK, onBack ) ON_BUTTON_CLICK( IDC_USE_INPUT_FOLDER, onInputFolderCheck ) ON_BUTTON_CLICK( IDC_BROWSE_MEDIA, onBrowseMedia ) ON_BUTTON_CLICK( IDC_BROWSE_RESULT, onBrowseOutput ) ON_BUTTON_CLICK( IDC_TRANSCRIBE, onTranscribe ) ON_BUTTON_CLICK( IDC_CAPTURE, onCapture ) COMMAND_HANDLER( IDC_OUTPUT_FORMAT, CBN_SELCHANGE, onOutFormatChange ) COMMAND_HANDLER( IDC_PATH_MEDIA, EN_CHANGE, onInputChange ) MESSAGE_HANDLER( WM_CALLBACK_STATUS, onCallbackStatus ) MSG_WM_CLOSE( onWmClose ) END_MSG_MAP() BEGIN_DDX_MAP( LoadModelDlg ) DDX_CONTROL_HANDLE( IDC_MODEL_DESC, modelDesc ) DDX_CONTROL_HANDLE( IDC_PATH_MEDIA, sourceMediaPath ) DDX_CONTROL_HANDLE( IDC_OUTPUT_FORMAT, transcribeOutFormat ) DDX_CONTROL_HANDLE( IDC_USE_INPUT_FOLDER, useInputFolder ) DDX_CONTROL_HANDLE( IDC_PATH_RESULT, transcribeOutputPath ) DDX_CONTROL_HANDLE( IDC_BROWSE_RESULT, transcribeOutputBrowse ); DDX_CONTROL_HANDLE( IDC_TRANSCRIBE, transcribeButton ); DDX_CONTROL_HANDLE( IDC_TRANSCRIBE_PROGRESS, progressBar ); END_DDX_MAP() private: PendingState pendingState; void setPending( bool nowPending ); void transcribeError( LPCTSTR text, HRESULT hr = S_FALSE ); LRESULT OnInitDialog( UINT nMessage, WPARAM wParam, LPARAM lParam, BOOL& bHandled ); void onClose() { ATLVERIFY( EndDialog( IDCANCEL ) ); } void onBack() { ATLVERIFY( EndDialog( IDC_BACK ) ); } void printModelDescription(); CStatic modelDesc; ConsoleCheckbox cbConsole; LanguageDropdown languageSelector; TranslateCheckbox cbTranslate; CEdit sourceMediaPath; CButton useInputFolder; CEdit transcribeOutputPath; CButton transcribeOutputBrowse; CComboBox transcribeOutFormat; CButton transcribeButton; CProgressBarCtrl progressBar; void populateOutputFormats(); LRESULT onOutFormatChange( UINT, INT, HWND, BOOL& bHandled ); LRESULT onInputChange( UINT, INT, HWND, BOOL& ); void onInputFolderCheck(); void onBrowseMedia(); void onBrowseOutput(); // Despite the name, the method also handles the "Stop" button void onTranscribe(); void onCapture() { EndDialog( IDC_CAPTURE ); } ThreadPoolWork work; enum struct eOutputFormat : uint8_t; enum struct eVisualState : uint8_t; struct TranscribeArgs { CString pathMedia; CString pathOutput; uint32_t language; bool translate; eOutputFormat format; Whisper::eResultFlags resultFlags; volatile eVisualState visualState = (eVisualState)0; uint64_t startTime; int64_t mediaDuration; CString errorMessage; }; TranscribeArgs transcribeArgs; void __stdcall poolCallback() noexcept override final; LRESULT onCallbackStatus( UINT, WPARAM wParam, LPARAM, BOOL& bHandled ); HRESULT transcribe(); void getThreadError(); static HRESULT writeTextFile( const Whisper::sSegment* const segments, const size_t length, CAtlFile& file, bool timestamps ); static HRESULT writeSubRip( const Whisper::sSegment* const segments, const size_t length, CAtlFile& file ); static HRESULT writeWebVTT( const Whisper::sSegment* const segments, const size_t length, CAtlFile& file ); static HRESULT __cdecl newSegmentCallbackStatic( Whisper::iContext* ctx, uint32_t n_new, void* user_data ) noexcept; static HRESULT __cdecl encoderBeginCallback( Whisper::iContext* ctx, void* user_data ) noexcept; HRESULT newSegmentCallback( Whisper::iContext* ctx, uint32_t n_new ); static HRESULT __cdecl progressCallbackStatic( double p, Whisper::iContext* ctx, void* pv ) noexcept; HRESULT progressCallback( double p ) noexcept; void onWmClose(); // Populate output path based on the provided input media path void setOutputPath( const CString& input ); // Populate output path based on the input media path in the edit box void setOutputPath(); };
4,528
C++
.h
117
36.324786
127
0.786007
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,773
AppState.h
Const-me_Whisper/Examples/WhisperDesktop/AppState.h
#pragma once #include "Utils/DebugConsole.h" class AppState { bool coInit = false; CRegKey registryKey; CIcon appIcon; public: struct ModelSource { CString path; bool found = false; Whisper::eModelImplementation impl = (Whisper::eModelImplementation)0; uint64_t sizeInBytes = 0; }; ModelSource source; DebugConsole console; CComPtr<Whisper::iMediaFoundation> mediaFoundation; CComPtr<Whisper::iModel> model; ~AppState(); // Setup the initial things HRESULT startup(); HRESULT findModelSource(); HRESULT saveModelSource(); uint32_t languageRead(); void languageWrite( uint32_t key ); CString stringLoad( LPCTSTR name ); void stringStore( LPCTSTR name, LPCTSTR value ); uint32_t dwordLoad( LPCTSTR name, uint32_t fallback ); void dwordStore( LPCTSTR name, uint32_t value ); bool boolLoad( LPCTSTR name ); void boolStore( LPCTSTR name, bool val ); bool automaticallyLoadModel = true; void lastScreenSave( HRESULT code ); HRESULT lastScreenLoad(); void setupIcon( CWindow* wnd ); uint32_t gpuFlagsLoad(); void gpuFlagsStore( uint32_t flags ); }; constexpr HRESULT SCREEN_MODEL = 1; constexpr HRESULT SCREEN_TRANSCRIBE = 2; constexpr HRESULT SCREEN_CAPTURE = 3;
1,207
C++
.h
42
26.547619
72
0.780382
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,774
PendingState.h
Const-me_Whisper/Examples/WhisperDesktop/Utils/PendingState.h
#pragma once // Utility class to switch visual state of dialog controls between idle and pending class PendingState { std::vector<HWND> editorsWindows; std::vector<bool> wasEnabled; std::vector<HWND> pendingWindows; public: void initialize( std::initializer_list<HWND> editors, std::initializer_list<HWND> pending ); void setPending( bool nowPending ); };
361
C++
.h
11
31.363636
93
0.794286
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,775
LanguageDropdown.h
Const-me_Whisper/Examples/WhisperDesktop/Utils/LanguageDropdown.h
#pragma once #include "../AppState.h" // Dropdown list which implements language selector control class LanguageDropdown { HWND m_hWnd = nullptr; std::vector<uint32_t> keys; int getInitialSelection( AppState& state ) const; public: operator HWND() const { return m_hWnd; } // Query language list form the native library, populate the combo box // Then load the last saved language selection from registry, and preselect an item. void initialize( HWND owner, int idc, AppState& state ); // Get the ID of the currently selected language, or UINT_MAX if nothing's selected uint32_t selectedLanguage(); // Get the ID of the currently selected language, and store in registry void saveSelection( AppState& state ); };
732
C++
.h
21
32.952381
85
0.772277
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,776
DebugConsole.h
Const-me_Whisper/Examples/WhisperDesktop/Utils/DebugConsole.h
#pragma once #include <whisperWindows.h> #include <deque> #include <unordered_set> class AppState; class DebugConsole { using eLogLevel = Whisper::eLogLevel; struct Entry { eLogLevel level; CStringA message; HRESULT print( HANDLE hConsole, CString& tempString ) const; }; CComAutoCriticalSection critSec; std::deque<Entry> buffer; CString tempString; CHandle output; inline void logSink( eLogLevel lvl, const char* message ); static void __stdcall logSinkStatic( void* context, eLogLevel lvl, const char* message ); static BOOL __stdcall consoleHandlerRoutine( DWORD dwCtrlType ); static DebugConsole* pGlobalInstance; void windowClosed(); std::unordered_set<CButton*> checkboxes; CStringA tempStringA; void log( eLogLevel lvl, const char* pszFormat, va_list args ); public: HRESULT initialize( eLogLevel level = eLogLevel::Debug ); ~DebugConsole(); HRESULT show(); HRESULT hide(); bool isVisible() const { return output; } void addCheckbox( CButton& cb ); void removeCheckbox( CButton& cb ); static void logMessage( eLogLevel lvl, const char* pszFormat, va_list args ); }; class ConsoleCheckbox { CButton control; DebugConsole* console = nullptr; public: HRESULT initialize( HWND dialog, int idc, AppState& state ); void click(); ~ConsoleCheckbox() { if( nullptr != console ) console->removeCheckbox( control ); } void ensureChecked(); };
1,396
C++
.h
50
25.8
90
0.770443
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,777
logger.h
Const-me_Whisper/Examples/WhisperDesktop/Utils/logger.h
#pragma once #include <whisperWindows.h> #include <cstdarg> void logMessage( Whisper::eLogLevel lvl, const char8_t* pszFormat, va_list args ); #define LOG_MESSAGE_IMPL( lvl ) \ std::va_list args; \ va_start( args, pszFormat ); \ logMessage( lvl, pszFormat, args ); \ va_end( args ) inline void logError( const char8_t* pszFormat, ... ) { LOG_MESSAGE_IMPL( Whisper::eLogLevel::Error ); } inline void logWarning( const char8_t* pszFormat, ... ) { LOG_MESSAGE_IMPL( Whisper::eLogLevel::Warning ); } inline void logInfo( const char8_t* pszFormat, ... ) { LOG_MESSAGE_IMPL( Whisper::eLogLevel::Info ); } inline void logDebug( const char8_t* pszFormat, ... ) { LOG_MESSAGE_IMPL( Whisper::eLogLevel::Debug ); } #undef LOG_MESSAGE_IMPL HRESULT logNewSegments( const Whisper::iTranscribeResult* results, size_t newSegments, bool printSpecial = false ); void clearLastError(); bool getLastError( CString& rdi ); void printTime( CStringA& rdi, Whisper::sTimeSpan time, bool comma = false );
1,054
C++
.h
30
33.666667
115
0.696464
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,778
TranslateCheckbox.h
Const-me_Whisper/Examples/WhisperDesktop/Utils/TranslateCheckbox.h
#pragma once #include "../AppState.h" class TranslateCheckbox { HWND m_hWnd = nullptr; public: operator HWND() const { return m_hWnd; } void initialize( HWND owner, int idc, AppState& state ); bool checked(); void saveSelection( AppState& state ); };
263
C++
.h
14
16.928571
57
0.735772
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,779
miscUtils.h
Const-me_Whisper/Examples/WhisperDesktop/Utils/miscUtils.h
#pragma once #include <iContext.h> #include "logger.h" CString formatErrorMessage( HRESULT hr ); void reportFatalError( const char* what, HRESULT hr ); #define CHECK( hr ) { const HRESULT __hr = ( hr ); if( FAILED( __hr ) ) return __hr; } HRESULT implParse( const CString& s, Whisper::eModelImplementation& rdi ); LPCTSTR implString( Whisper::eModelImplementation i ); void implPopulateCombobox( CComboBox& cb, Whisper::eModelImplementation impl ); Whisper::eModelImplementation implGetValue( CComboBox& cb ); __interface iThreadPoolCallback { void __stdcall poolCallback() noexcept; }; class ThreadPoolWork { PTP_WORK work = nullptr; static void __stdcall callback( PTP_CALLBACK_INSTANCE Instance, PVOID Context, PTP_WORK Work ); public: ~ThreadPoolWork(); HRESULT create( iThreadPoolCallback* cb ); HRESULT post(); }; void makeUtf16( CString& rdi, const char* utf8 ); void makeUtf8( CStringA& rdi, const CString& utf16 ); bool getOpenFileName( HWND owner, LPCTSTR title, LPCTSTR filter, CString& path ); bool getSaveFileName( HWND owner, LPCTSTR title, LPCTSTR filter, CString& path, DWORD* filterIndex = nullptr ); #define ON_BUTTON_CLICK( id, func ) \ if( uMsg == WM_COMMAND && \ id == LOWORD( wParam ) ) \ { \ bHandled = TRUE; \ func(); \ lResult = 0; \ return TRUE; \ } void reportError( HWND owner, LPCTSTR text, LPCTSTR title, HRESULT hr = S_FALSE ); inline const wchar_t* cstr( const CString& s ) { return s; } inline const char* cstr( const CStringA& s ) { return s; } inline HRESULT getLastHr() { return HRESULT_FROM_WIN32( GetLastError() ); } HRESULT writeUtf8Bom( CAtlFile& file ); // Flip order of bytes from RGB to BGR, or vice versa inline uint32_t flipRgb( uint32_t val ) { val = _byteswap_ulong( val ); return val >> 8; } bool isInvalidTranslate( HWND owner, uint32_t lang, bool translate ); inline bool isChecked( CButton& btn ) { return btn.GetCheck() == BST_CHECKED; }
2,048
C++
.h
55
35.309091
111
0.696247
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,785
atlcrack.h
Const-me_Whisper/Examples/WhisperDesktop/Utils/WTL/atlcrack.h
// Windows Template Library - WTL version 10.0 // Copyright (C) Microsoft Corporation, WTL Team. All rights reserved. // // This file is a part of the Windows Template Library. // The use and distribution terms for this software are covered by the // Microsoft Public License (http://opensource.org/licenses/MS-PL) // which can be found in the file MS-PL.txt at the root folder. #ifndef __ATLCRACK_H__ #define __ATLCRACK_H__ #pragma once #ifndef __ATLAPP_H__ #error atlcrack.h requires atlapp.h to be included first #endif /////////////////////////////////////////////////////////////////////////////// // Message map macro for cracked handlers // Note about message maps with cracked handlers: // You can use BEGIN_MSG_MAP for classes that derive from CWindowImpl/CDialogImpl, // but must use BEGIN_MSG_MAP_EX for classes that don't. #define BEGIN_MSG_MAP_EX(theClass) \ public: \ BOOL m_bMsgHandled; \ /* "handled" management for cracked handlers */ \ BOOL IsMsgHandled() const \ { \ return m_bMsgHandled; \ } \ void SetMsgHandled(BOOL bHandled) \ { \ m_bMsgHandled = bHandled; \ } \ BOOL ProcessWindowMessage(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam, LRESULT& lResult, DWORD dwMsgMapID = 0) \ { \ BOOL bOldMsgHandled = m_bMsgHandled; \ BOOL bRet = _ProcessWindowMessage(hWnd, uMsg, wParam, lParam, lResult, dwMsgMapID); \ m_bMsgHandled = bOldMsgHandled; \ return bRet; \ } \ BOOL _ProcessWindowMessage(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam, LRESULT& lResult, DWORD dwMsgMapID) \ { \ BOOL bHandled = TRUE; \ (hWnd); \ (uMsg); \ (wParam); \ (lParam); \ (lResult); \ (bHandled); \ switch(dwMsgMapID) \ { \ case 0: /////////////////////////////////////////////////////////////////////////////// // Standard Windows message macros // int OnCreate(LPCREATESTRUCT lpCreateStruct) #define MSG_WM_CREATE(func) \ if (uMsg == WM_CREATE) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((LPCREATESTRUCT)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnInitDialog(CWindow wndFocus, LPARAM lInitParam) #define MSG_WM_INITDIALOG(func) \ if (uMsg == WM_INITDIALOG) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HWND)wParam, lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnCopyData(CWindow wnd, PCOPYDATASTRUCT pCopyDataStruct) #define MSG_WM_COPYDATA(func) \ if (uMsg == WM_COPYDATA) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HWND)wParam, (PCOPYDATASTRUCT)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDestroy() #define MSG_WM_DESTROY(func) \ if (uMsg == WM_DESTROY) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMove(CPoint ptPos) #define MSG_WM_MOVE(func) \ if (uMsg == WM_MOVE) \ { \ this->SetMsgHandled(TRUE); \ func(::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSize(UINT nType, CSize size) #define MSG_WM_SIZE(func) \ if (uMsg == WM_SIZE) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CSize(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnActivate(UINT nState, BOOL bMinimized, CWindow wndOther) #define MSG_WM_ACTIVATE(func) \ if (uMsg == WM_ACTIVATE) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)LOWORD(wParam), (BOOL)HIWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSetFocus(CWindow wndOld) #define MSG_WM_SETFOCUS(func) \ if (uMsg == WM_SETFOCUS) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnKillFocus(CWindow wndFocus) #define MSG_WM_KILLFOCUS(func) \ if (uMsg == WM_KILLFOCUS) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnEnable(BOOL bEnable) #define MSG_WM_ENABLE(func) \ if (uMsg == WM_ENABLE) \ { \ this->SetMsgHandled(TRUE); \ func((BOOL)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnPaint(CDCHandle dc) #define MSG_WM_PAINT(func) \ if (uMsg == WM_PAINT) \ { \ this->SetMsgHandled(TRUE); \ func((HDC)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnClose() #define MSG_WM_CLOSE(func) \ if (uMsg == WM_CLOSE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnQueryEndSession(UINT nSource, UINT uLogOff) #define MSG_WM_QUERYENDSESSION(func) \ if (uMsg == WM_QUERYENDSESSION) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)wParam, (UINT)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnQueryOpen() #define MSG_WM_QUERYOPEN(func) \ if (uMsg == WM_QUERYOPEN) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func(); \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnEraseBkgnd(CDCHandle dc) #define MSG_WM_ERASEBKGND(func) \ if (uMsg == WM_ERASEBKGND) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSysColorChange() #define MSG_WM_SYSCOLORCHANGE(func) \ if (uMsg == WM_SYSCOLORCHANGE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnEndSession(BOOL bEnding, UINT uLogOff) #define MSG_WM_ENDSESSION(func) \ if (uMsg == WM_ENDSESSION) \ { \ this->SetMsgHandled(TRUE); \ func((BOOL)wParam, (UINT)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnShowWindow(BOOL bShow, UINT nStatus) #define MSG_WM_SHOWWINDOW(func) \ if (uMsg == WM_SHOWWINDOW) \ { \ this->SetMsgHandled(TRUE); \ func((BOOL)wParam, (int)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnCtlColorEdit(CDCHandle dc, CEdit edit) #define MSG_WM_CTLCOLOREDIT(func) \ if (uMsg == WM_CTLCOLOREDIT) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnCtlColorListBox(CDCHandle dc, CListBox listBox) #define MSG_WM_CTLCOLORLISTBOX(func) \ if (uMsg == WM_CTLCOLORLISTBOX) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnCtlColorBtn(CDCHandle dc, CButton button) #define MSG_WM_CTLCOLORBTN(func) \ if (uMsg == WM_CTLCOLORBTN) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnCtlColorDlg(CDCHandle dc, CWindow wnd) #define MSG_WM_CTLCOLORDLG(func) \ if (uMsg == WM_CTLCOLORDLG) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnCtlColorScrollBar(CDCHandle dc, CScrollBar scrollBar) #define MSG_WM_CTLCOLORSCROLLBAR(func) \ if (uMsg == WM_CTLCOLORSCROLLBAR) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnCtlColorStatic(CDCHandle dc, CStatic wndStatic) #define MSG_WM_CTLCOLORSTATIC(func) \ if (uMsg == WM_CTLCOLORSTATIC) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSettingChange(UINT uFlags, LPCTSTR lpszSection) #define MSG_WM_SETTINGCHANGE(func) \ if (uMsg == WM_SETTINGCHANGE) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPCTSTR)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDevModeChange(LPCTSTR lpDeviceName) #define MSG_WM_DEVMODECHANGE(func) \ if (uMsg == WM_DEVMODECHANGE) \ { \ this->SetMsgHandled(TRUE); \ func((LPCTSTR)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnActivateApp(BOOL bActive, DWORD dwThreadID) #define MSG_WM_ACTIVATEAPP(func) \ if (uMsg == WM_ACTIVATEAPP) \ { \ this->SetMsgHandled(TRUE); \ func((BOOL)wParam, (DWORD)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnFontChange() #define MSG_WM_FONTCHANGE(func) \ if (uMsg == WM_FONTCHANGE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnTimeChange() #define MSG_WM_TIMECHANGE(func) \ if (uMsg == WM_TIMECHANGE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCancelMode() #define MSG_WM_CANCELMODE(func) \ if (uMsg == WM_CANCELMODE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnSetCursor(CWindow wnd, UINT nHitTest, UINT message) #define MSG_WM_SETCURSOR(func) \ if (uMsg == WM_SETCURSOR) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HWND)wParam, (UINT)LOWORD(lParam), (UINT)HIWORD(lParam)); \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnMouseActivate(CWindow wndTopLevel, UINT nHitTest, UINT message) #define MSG_WM_MOUSEACTIVATE(func) \ if (uMsg == WM_MOUSEACTIVATE) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HWND)wParam, (UINT)LOWORD(lParam), (UINT)HIWORD(lParam)); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnChildActivate() #define MSG_WM_CHILDACTIVATE(func) \ if (uMsg == WM_CHILDACTIVATE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnGetMinMaxInfo(LPMINMAXINFO lpMMI) #define MSG_WM_GETMINMAXINFO(func) \ if (uMsg == WM_GETMINMAXINFO) \ { \ this->SetMsgHandled(TRUE); \ func((LPMINMAXINFO)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnIconEraseBkgnd(CDCHandle dc) #define MSG_WM_ICONERASEBKGND(func) \ if (uMsg == WM_ICONERASEBKGND) \ { \ this->SetMsgHandled(TRUE); \ func((HDC)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSpoolerStatus(UINT nStatus, UINT nJobs) #define MSG_WM_SPOOLERSTATUS(func) \ if (uMsg == WM_SPOOLERSTATUS) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (UINT)LOWORD(lParam)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDrawItem(int nIDCtl, LPDRAWITEMSTRUCT lpDrawItemStruct) #define MSG_WM_DRAWITEM(func) \ if (uMsg == WM_DRAWITEM) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPDRAWITEMSTRUCT)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMeasureItem(int nIDCtl, LPMEASUREITEMSTRUCT lpMeasureItemStruct) #define MSG_WM_MEASUREITEM(func) \ if (uMsg == WM_MEASUREITEM) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPMEASUREITEMSTRUCT)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDeleteItem(int nIDCtl, LPDELETEITEMSTRUCT lpDeleteItemStruct) #define MSG_WM_DELETEITEM(func) \ if (uMsg == WM_DELETEITEM) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPDELETEITEMSTRUCT)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } //int OnCharToItem(UINT nChar, UINT nIndex, CListBox listBox) #define MSG_WM_CHARTOITEM(func) \ if (uMsg == WM_CHARTOITEM) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)LOWORD(wParam), (UINT)HIWORD(wParam), (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnVKeyToItem(UINT nKey, UINT nIndex, CListBox listBox) #define MSG_WM_VKEYTOITEM(func) \ if (uMsg == WM_VKEYTOITEM) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)LOWORD(wParam), (UINT)HIWORD(wParam), (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HCURSOR OnQueryDragIcon() #define MSG_WM_QUERYDRAGICON(func) \ if (uMsg == WM_QUERYDRAGICON) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func(); \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnCompareItem(int nIDCtl, LPCOMPAREITEMSTRUCT lpCompareItemStruct) #define MSG_WM_COMPAREITEM(func) \ if (uMsg == WM_COMPAREITEM) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)wParam, (LPCOMPAREITEMSTRUCT)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCompacting(UINT nCpuTime) #define MSG_WM_COMPACTING(func) \ if (uMsg == WM_COMPACTING) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnNcCreate(LPCREATESTRUCT lpCreateStruct) #define MSG_WM_NCCREATE(func) \ if (uMsg == WM_NCCREATE) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((LPCREATESTRUCT)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcDestroy() #define MSG_WM_NCDESTROY(func) \ if (uMsg == WM_NCDESTROY) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnNcCalcSize(BOOL bCalcValidRects, LPARAM lParam) #define MSG_WM_NCCALCSIZE(func) \ if (uMsg == WM_NCCALCSIZE) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((BOOL)wParam, lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // UINT OnNcHitTest(CPoint point) #define MSG_WM_NCHITTEST(func) \ if (uMsg == WM_NCHITTEST) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func(::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcPaint(CRgnHandle rgn) #define MSG_WM_NCPAINT(func) \ if (uMsg == WM_NCPAINT) \ { \ this->SetMsgHandled(TRUE); \ func((HRGN)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnNcActivate(BOOL bActive) #define MSG_WM_NCACTIVATE(func) \ if (uMsg == WM_NCACTIVATE) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((BOOL)wParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // UINT OnGetDlgCode(LPMSG lpMsg) #define MSG_WM_GETDLGCODE(func) \ if (uMsg == WM_GETDLGCODE) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((LPMSG)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcMouseMove(UINT nHitTest, CPoint point) #define MSG_WM_NCMOUSEMOVE(func) \ if (uMsg == WM_NCMOUSEMOVE) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcLButtonDown(UINT nHitTest, CPoint point) #define MSG_WM_NCLBUTTONDOWN(func) \ if (uMsg == WM_NCLBUTTONDOWN) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcLButtonUp(UINT nHitTest, CPoint point) #define MSG_WM_NCLBUTTONUP(func) \ if (uMsg == WM_NCLBUTTONUP) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcLButtonDblClk(UINT nHitTest, CPoint point) #define MSG_WM_NCLBUTTONDBLCLK(func) \ if (uMsg == WM_NCLBUTTONDBLCLK) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcRButtonDown(UINT nHitTest, CPoint point) #define MSG_WM_NCRBUTTONDOWN(func) \ if (uMsg == WM_NCRBUTTONDOWN) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcRButtonUp(UINT nHitTest, CPoint point) #define MSG_WM_NCRBUTTONUP(func) \ if (uMsg == WM_NCRBUTTONUP) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcRButtonDblClk(UINT nHitTest, CPoint point) #define MSG_WM_NCRBUTTONDBLCLK(func) \ if (uMsg == WM_NCRBUTTONDBLCLK) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcMButtonDown(UINT nHitTest, CPoint point) #define MSG_WM_NCMBUTTONDOWN(func) \ if (uMsg == WM_NCMBUTTONDOWN) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcMButtonUp(UINT nHitTest, CPoint point) #define MSG_WM_NCMBUTTONUP(func) \ if (uMsg == WM_NCMBUTTONUP) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcMButtonDblClk(UINT nHitTest, CPoint point) #define MSG_WM_NCMBUTTONDBLCLK(func) \ if (uMsg == WM_NCMBUTTONDBLCLK) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnKeyDown(UINT nChar, UINT nRepCnt, UINT nFlags) #define MSG_WM_KEYDOWN(func) \ if (uMsg == WM_KEYDOWN) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (UINT)lParam & 0xFFFF, (UINT)((lParam & 0xFFFF0000) >> 16)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnKeyUp(UINT nChar, UINT nRepCnt, UINT nFlags) #define MSG_WM_KEYUP(func) \ if (uMsg == WM_KEYUP) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (UINT)lParam & 0xFFFF, (UINT)((lParam & 0xFFFF0000) >> 16)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnChar(TCHAR chChar, UINT nRepCnt, UINT nFlags) #define MSG_WM_CHAR(func) \ if (uMsg == WM_CHAR) \ { \ this->SetMsgHandled(TRUE); \ func((TCHAR)wParam, (UINT)lParam & 0xFFFF, (UINT)((lParam & 0xFFFF0000) >> 16)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDeadChar(TCHAR chChar, UINT nRepCnt, UINT nFlags) #define MSG_WM_DEADCHAR(func) \ if (uMsg == WM_DEADCHAR) \ { \ this->SetMsgHandled(TRUE); \ func((TCHAR)wParam, (UINT)lParam & 0xFFFF, (UINT)((lParam & 0xFFFF0000) >> 16)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSysKeyDown(UINT nChar, UINT nRepCnt, UINT nFlags) #define MSG_WM_SYSKEYDOWN(func) \ if (uMsg == WM_SYSKEYDOWN) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (UINT)lParam & 0xFFFF, (UINT)((lParam & 0xFFFF0000) >> 16)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSysKeyUp(UINT nChar, UINT nRepCnt, UINT nFlags) #define MSG_WM_SYSKEYUP(func) \ if (uMsg == WM_SYSKEYUP) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (UINT)lParam & 0xFFFF, (UINT)((lParam & 0xFFFF0000) >> 16)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSysChar(TCHAR chChar, UINT nRepCnt, UINT nFlags) #define MSG_WM_SYSCHAR(func) \ if (uMsg == WM_SYSCHAR) \ { \ this->SetMsgHandled(TRUE); \ func((TCHAR)wParam, (UINT)lParam & 0xFFFF, (UINT)((lParam & 0xFFFF0000) >> 16)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSysDeadChar(TCHAR chChar, UINT nRepCnt, UINT nFlags) #define MSG_WM_SYSDEADCHAR(func) \ if (uMsg == WM_SYSDEADCHAR) \ { \ this->SetMsgHandled(TRUE); \ func((TCHAR)wParam, (UINT)lParam & 0xFFFF, (UINT)((lParam & 0xFFFF0000) >> 16)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSysCommand(UINT nID, CPoint point) #define MSG_WM_SYSCOMMAND(func) \ if (uMsg == WM_SYSCOMMAND) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnTCard(UINT idAction, DWORD dwActionData) #define MSG_WM_TCARD(func) \ if (uMsg == WM_TCARD) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (DWORD)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnTimer(UINT_PTR nIDEvent) #define MSG_WM_TIMER(func) \ if (uMsg == WM_TIMER) \ { \ this->SetMsgHandled(TRUE); \ func((UINT_PTR)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnHScroll(UINT nSBCode, UINT nPos, CScrollBar pScrollBar) #define MSG_WM_HSCROLL(func) \ if (uMsg == WM_HSCROLL) \ { \ this->SetMsgHandled(TRUE); \ func((int)LOWORD(wParam), (short)HIWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnVScroll(UINT nSBCode, UINT nPos, CScrollBar pScrollBar) #define MSG_WM_VSCROLL(func) \ if (uMsg == WM_VSCROLL) \ { \ this->SetMsgHandled(TRUE); \ func((int)LOWORD(wParam), (short)HIWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnInitMenu(CMenuHandle menu) #define MSG_WM_INITMENU(func) \ if (uMsg == WM_INITMENU) \ { \ this->SetMsgHandled(TRUE); \ func((HMENU)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnInitMenuPopup(CMenuHandle menuPopup, UINT nIndex, BOOL bSysMenu) #define MSG_WM_INITMENUPOPUP(func) \ if (uMsg == WM_INITMENUPOPUP) \ { \ this->SetMsgHandled(TRUE); \ func((HMENU)wParam, (UINT)LOWORD(lParam), (BOOL)HIWORD(lParam)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMenuSelect(UINT nItemID, UINT nFlags, CMenuHandle menu) #define MSG_WM_MENUSELECT(func) \ if (uMsg == WM_MENUSELECT) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)LOWORD(wParam), (UINT)HIWORD(wParam), (HMENU)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnMenuChar(UINT nChar, UINT nFlags, CMenuHandle menu) #define MSG_WM_MENUCHAR(func) \ if (uMsg == WM_MENUCHAR) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((TCHAR)LOWORD(wParam), (UINT)HIWORD(wParam), (HMENU)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnNotify(int idCtrl, LPNMHDR pnmh) #define MSG_WM_NOTIFY(func) \ if (uMsg == WM_NOTIFY) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((int)wParam, (LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnEnterIdle(UINT nWhy, CWindow wndWho) #define MSG_WM_ENTERIDLE(func) \ if (uMsg == WM_ENTERIDLE) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMouseMove(UINT nFlags, CPoint point) #define MSG_WM_MOUSEMOVE(func) \ if (uMsg == WM_MOUSEMOVE) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnMouseWheel(UINT nFlags, short zDelta, CPoint pt) #define MSG_WM_MOUSEWHEEL(func) \ if (uMsg == WM_MOUSEWHEEL) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)LOWORD(wParam), (short)HIWORD(wParam), ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnLButtonDown(UINT nFlags, CPoint point) #define MSG_WM_LBUTTONDOWN(func) \ if (uMsg == WM_LBUTTONDOWN) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnLButtonUp(UINT nFlags, CPoint point) #define MSG_WM_LBUTTONUP(func) \ if (uMsg == WM_LBUTTONUP) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnLButtonDblClk(UINT nFlags, CPoint point) #define MSG_WM_LBUTTONDBLCLK(func) \ if (uMsg == WM_LBUTTONDBLCLK) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnRButtonDown(UINT nFlags, CPoint point) #define MSG_WM_RBUTTONDOWN(func) \ if (uMsg == WM_RBUTTONDOWN) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnRButtonUp(UINT nFlags, CPoint point) #define MSG_WM_RBUTTONUP(func) \ if (uMsg == WM_RBUTTONUP) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnRButtonDblClk(UINT nFlags, CPoint point) #define MSG_WM_RBUTTONDBLCLK(func) \ if (uMsg == WM_RBUTTONDBLCLK) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMButtonDown(UINT nFlags, CPoint point) #define MSG_WM_MBUTTONDOWN(func) \ if (uMsg == WM_MBUTTONDOWN) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMButtonUp(UINT nFlags, CPoint point) #define MSG_WM_MBUTTONUP(func) \ if (uMsg == WM_MBUTTONUP) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMButtonDblClk(UINT nFlags, CPoint point) #define MSG_WM_MBUTTONDBLCLK(func) \ if (uMsg == WM_MBUTTONDBLCLK) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnParentNotify(UINT message, UINT nChildID, LPARAM lParam) #define MSG_WM_PARENTNOTIFY(func) \ if (uMsg == WM_PARENTNOTIFY) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)LOWORD(wParam), (UINT)HIWORD(wParam), lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMDIActivate(CWindow wndDeactivate, CWindow wndActivate) #define MSG_WM_MDIACTIVATE(func) \ if (uMsg == WM_MDIACTIVATE) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam, (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnRenderFormat(UINT nFormat) #define MSG_WM_RENDERFORMAT(func) \ if (uMsg == WM_RENDERFORMAT) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnRenderAllFormats() #define MSG_WM_RENDERALLFORMATS(func) \ if (uMsg == WM_RENDERALLFORMATS) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDestroyClipboard() #define MSG_WM_DESTROYCLIPBOARD(func) \ if (uMsg == WM_DESTROYCLIPBOARD) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDrawClipboard() #define MSG_WM_DRAWCLIPBOARD(func) \ if (uMsg == WM_DRAWCLIPBOARD) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnPaintClipboard(CWindow wndViewer, const LPPAINTSTRUCT lpPaintStruct) #define MSG_WM_PAINTCLIPBOARD(func) \ if (uMsg == WM_PAINTCLIPBOARD) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam, (const LPPAINTSTRUCT)::GlobalLock((HGLOBAL)lParam)); \ ::GlobalUnlock((HGLOBAL)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnVScrollClipboard(CWindow wndViewer, UINT nSBCode, UINT nPos) #define MSG_WM_VSCROLLCLIPBOARD(func) \ if (uMsg == WM_VSCROLLCLIPBOARD) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam, (UINT)LOWORD(lParam), (UINT)HIWORD(lParam)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnContextMenu(CWindow wnd, CPoint point) #define MSG_WM_CONTEXTMENU(func) \ if (uMsg == WM_CONTEXTMENU) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSizeClipboard(CWindow wndViewer, const LPRECT lpRect) #define MSG_WM_SIZECLIPBOARD(func) \ if (uMsg == WM_SIZECLIPBOARD) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam, (const LPRECT)::GlobalLock((HGLOBAL)lParam)); \ ::GlobalUnlock((HGLOBAL)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnAskCbFormatName(UINT nMaxCount, LPTSTR lpszString) #define MSG_WM_ASKCBFORMATNAME(func) \ if (uMsg == WM_ASKCBFORMATNAME) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPTSTR)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnChangeCbChain(CWindow wndRemove, CWindow wndAfter) #define MSG_WM_CHANGECBCHAIN(func) \ if (uMsg == WM_CHANGECBCHAIN) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam, (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnHScrollClipboard(CWindow wndViewer, UINT nSBCode, UINT nPos) #define MSG_WM_HSCROLLCLIPBOARD(func) \ if (uMsg == WM_HSCROLLCLIPBOARD) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam, (UINT)LOWORD(lParam), (UINT)HIWORD(lParam)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnQueryNewPalette() #define MSG_WM_QUERYNEWPALETTE(func) \ if (uMsg == WM_QUERYNEWPALETTE) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func(); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnPaletteChanged(CWindow wndFocus) #define MSG_WM_PALETTECHANGED(func) \ if (uMsg == WM_PALETTECHANGED) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnPaletteIsChanging(CWindow wndPalChg) #define MSG_WM_PALETTEISCHANGING(func) \ if (uMsg == WM_PALETTEISCHANGING) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDropFiles(HDROP hDropInfo) #define MSG_WM_DROPFILES(func) \ if (uMsg == WM_DROPFILES) \ { \ this->SetMsgHandled(TRUE); \ func((HDROP)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnWindowPosChanging(LPWINDOWPOS lpWndPos) #define MSG_WM_WINDOWPOSCHANGING(func) \ if (uMsg == WM_WINDOWPOSCHANGING) \ { \ this->SetMsgHandled(TRUE); \ func((LPWINDOWPOS)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnWindowPosChanged(LPWINDOWPOS lpWndPos) #define MSG_WM_WINDOWPOSCHANGED(func) \ if (uMsg == WM_WINDOWPOSCHANGED) \ { \ this->SetMsgHandled(TRUE); \ func((LPWINDOWPOS)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnExitMenuLoop(BOOL fIsTrackPopupMenu) #define MSG_WM_EXITMENULOOP(func) \ if (uMsg == WM_EXITMENULOOP) \ { \ this->SetMsgHandled(TRUE); \ func((BOOL)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnEnterMenuLoop(BOOL fIsTrackPopupMenu) #define MSG_WM_ENTERMENULOOP(func) \ if (uMsg == WM_ENTERMENULOOP) \ { \ this->SetMsgHandled(TRUE); \ func((BOOL)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnStyleChanged(int nStyleType, LPSTYLESTRUCT lpStyleStruct) #define MSG_WM_STYLECHANGED(func) \ if (uMsg == WM_STYLECHANGED) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPSTYLESTRUCT)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnStyleChanging(int nStyleType, LPSTYLESTRUCT lpStyleStruct) #define MSG_WM_STYLECHANGING(func) \ if (uMsg == WM_STYLECHANGING) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPSTYLESTRUCT)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSizing(UINT fwSide, LPRECT pRect) #define MSG_WM_SIZING(func) \ if (uMsg == WM_SIZING) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPRECT)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMoving(UINT fwSide, LPRECT pRect) #define MSG_WM_MOVING(func) \ if (uMsg == WM_MOVING) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPRECT)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCaptureChanged(CWindow wnd) #define MSG_WM_CAPTURECHANGED(func) \ if (uMsg == WM_CAPTURECHANGED) \ { \ this->SetMsgHandled(TRUE); \ func((HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnDeviceChange(UINT nEventType, DWORD_PTR dwData) #define MSG_WM_DEVICECHANGE(func) \ if (uMsg == WM_DEVICECHANGE) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)wParam, (DWORD_PTR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCommand(UINT uNotifyCode, int nID, CWindow wndCtl) #define MSG_WM_COMMAND(func) \ if (uMsg == WM_COMMAND) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDisplayChange(UINT uBitsPerPixel, CSize sizeScreen) #define MSG_WM_DISPLAYCHANGE(func) \ if (uMsg == WM_DISPLAYCHANGE) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CSize(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnEnterSizeMove() #define MSG_WM_ENTERSIZEMOVE(func) \ if (uMsg == WM_ENTERSIZEMOVE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnExitSizeMove() #define MSG_WM_EXITSIZEMOVE(func) \ if (uMsg == WM_EXITSIZEMOVE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // HFONT OnGetFont() #define MSG_WM_GETFONT(func) \ if (uMsg == WM_GETFONT) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func(); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnGetHotKey() #define MSG_WM_GETHOTKEY(func) \ if (uMsg == WM_GETHOTKEY) \ { \ this->SetMsgHandled(TRUE); \ lResult = func(); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HICON OnGetIcon() #define MSG_WM_GETICON(func) \ if (uMsg == WM_GETICON) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)wParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnGetText(int cchTextMax, LPTSTR lpszText) #define MSG_WM_GETTEXT(func) \ if (uMsg == WM_GETTEXT) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((int)wParam, (LPTSTR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnGetTextLength() #define MSG_WM_GETTEXTLENGTH(func) \ if (uMsg == WM_GETTEXTLENGTH) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func(); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnHelp(LPHELPINFO lpHelpInfo) #define MSG_WM_HELP(func) \ if (uMsg == WM_HELP) \ { \ this->SetMsgHandled(TRUE); \ func((LPHELPINFO)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnHotKey(int nHotKeyID, UINT uModifiers, UINT uVirtKey) #define MSG_WM_HOTKEY(func) \ if (uMsg == WM_HOTKEY) \ { \ this->SetMsgHandled(TRUE); \ func((int)wParam, (UINT)LOWORD(lParam), (UINT)HIWORD(lParam)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnInputLangChange(DWORD dwCharSet, HKL hKbdLayout) #define MSG_WM_INPUTLANGCHANGE(func) \ if (uMsg == WM_INPUTLANGCHANGE) \ { \ this->SetMsgHandled(TRUE); \ func((DWORD)wParam, (HKL)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnInputLangChangeRequest(BOOL bSysCharSet, HKL hKbdLayout) #define MSG_WM_INPUTLANGCHANGEREQUEST(func) \ if (uMsg == WM_INPUTLANGCHANGEREQUEST) \ { \ this->SetMsgHandled(TRUE); \ func((BOOL)wParam, (HKL)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNextDlgCtl(BOOL bHandle, WPARAM wCtlFocus) #define MSG_WM_NEXTDLGCTL(func) \ if (uMsg == WM_NEXTDLGCTL) \ { \ this->SetMsgHandled(TRUE); \ func((BOOL)LOWORD(lParam), wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNextMenu(int nVirtKey, LPMDINEXTMENU lpMdiNextMenu) #define MSG_WM_NEXTMENU(func) \ if (uMsg == WM_NEXTMENU) \ { \ this->SetMsgHandled(TRUE); \ func((int)wParam, (LPMDINEXTMENU)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnNotifyFormat(CWindow wndFrom, int nCommand) #define MSG_WM_NOTIFYFORMAT(func) \ if (uMsg == WM_NOTIFYFORMAT) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HWND)wParam, (int)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnPowerBroadcast(DWORD dwPowerEvent, DWORD_PTR dwData) #define MSG_WM_POWERBROADCAST(func) \ if (uMsg == WM_POWERBROADCAST) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((DWORD)wParam, (DWORD_PTR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnPrint(CDCHandle dc, UINT uFlags) #define MSG_WM_PRINT(func) \ if (uMsg == WM_PRINT) \ { \ this->SetMsgHandled(TRUE); \ func((HDC)wParam, (UINT)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnPrintClient(CDCHandle dc, UINT uFlags) #define MSG_WM_PRINTCLIENT(func) \ if (uMsg == WM_PRINTCLIENT) \ { \ this->SetMsgHandled(TRUE); \ func((HDC)wParam, (UINT)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnRasDialEvent(RASCONNSTATE rasconnstate, DWORD dwError) #define MSG_WM_RASDIALEVENT(func) \ if (uMsg == WM_RASDIALEVENT) \ { \ this->SetMsgHandled(TRUE); \ func((RASCONNSTATE)wParam, (DWORD)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSetFont(CFontHandle font, BOOL bRedraw) #define MSG_WM_SETFONT(func) \ if (uMsg == WM_SETFONT) \ { \ this->SetMsgHandled(TRUE); \ func((HFONT)wParam, (BOOL)LOWORD(lParam)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnSetHotKey(int nVirtKey, UINT uFlags) #define MSG_WM_SETHOTKEY(func) \ if (uMsg == WM_SETHOTKEY) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((int)LOBYTE(LOWORD(wParam)), (UINT)HIBYTE(LOWORD(wParam))); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HICON OnSetIcon(UINT uType, HICON hIcon) #define MSG_WM_SETICON(func) \ if (uMsg == WM_SETICON) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)wParam, (HICON)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnSetRedraw(BOOL bRedraw) #define MSG_WM_SETREDRAW(func) \ if (uMsg == WM_SETREDRAW) \ { \ this->SetMsgHandled(TRUE); \ func((BOOL)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnSetText(LPCTSTR lpstrText) #define MSG_WM_SETTEXT(func) \ if (uMsg == WM_SETTEXT) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((LPCTSTR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnUserChanged() #define MSG_WM_USERCHANGED(func) \ if (uMsg == WM_USERCHANGED) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } /////////////////////////////////////////////////////////////////////////////// // Newer Windows messages // void OnMouseHover(WPARAM wParam, CPoint ptPos) #define MSG_WM_MOUSEHOVER(func) \ if (uMsg == WM_MOUSEHOVER) \ { \ this->SetMsgHandled(TRUE); \ func(wParam, ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMouseLeave() #define MSG_WM_MOUSELEAVE(func) \ if (uMsg == WM_MOUSELEAVE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcMouseHover(UINT nHitTest, CPoint ptPos) #define MSG_WM_NCMOUSEHOVER(func) \ if (uMsg == WM_NCMOUSEHOVER) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, ::CPoint(MAKEPOINTS(lParam).x, MAKEPOINTS(lParam).y)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNcMouseLeave() #define MSG_WM_NCMOUSELEAVE(func) \ if (uMsg == WM_NCMOUSELEAVE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMenuRButtonUp(WPARAM wParam, CMenuHandle menu) #define MSG_WM_MENURBUTTONUP(func) \ if (uMsg == WM_MENURBUTTONUP) \ { \ this->SetMsgHandled(TRUE); \ func(wParam, (HMENU)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnMenuDrag(WPARAM wParam, CMenuHandle menu) #define MSG_WM_MENUDRAG(func) \ if (uMsg == WM_MENUDRAG) \ { \ this->SetMsgHandled(TRUE); \ lResult = func(wParam, (HMENU)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnMenuGetObject(PMENUGETOBJECTINFO info) #define MSG_WM_MENUGETOBJECT(func) \ if (uMsg == WM_MENUGETOBJECT) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((PMENUGETOBJECTINFO)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnUnInitMenuPopup(UINT nID, CMenuHandle menu) #define MSG_WM_UNINITMENUPOPUP(func) \ if (uMsg == WM_UNINITMENUPOPUP) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(lParam), (HMENU)wParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnMenuCommand(WPARAM nIndex, CMenuHandle menu) #define MSG_WM_MENUCOMMAND(func) \ if (uMsg == WM_MENUCOMMAND) \ { \ this->SetMsgHandled(TRUE); \ func(wParam, (HMENU)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnAppCommand(CWindow wndFocus, short cmd, WORD uDevice, int dwKeys) #define MSG_WM_APPCOMMAND(func) \ if (uMsg == WM_APPCOMMAND) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HWND)wParam, GET_APPCOMMAND_LPARAM(lParam), GET_DEVICE_LPARAM(lParam), GET_KEYSTATE_LPARAM(lParam)); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNCXButtonDown(int fwButton, short nHittest, CPoint ptPos) #define MSG_WM_NCXBUTTONDOWN(func) \ if (uMsg == WM_NCXBUTTONDOWN) \ { \ this->SetMsgHandled(TRUE); \ func(GET_XBUTTON_WPARAM(wParam), GET_NCHITTEST_WPARAM(wParam), ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = (LRESULT)TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNCXButtonUp(int fwButton, short nHittest, CPoint ptPos) #define MSG_WM_NCXBUTTONUP(func) \ if (uMsg == WM_NCXBUTTONUP) \ { \ this->SetMsgHandled(TRUE); \ func(GET_XBUTTON_WPARAM(wParam), GET_NCHITTEST_WPARAM(wParam), ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = (LRESULT)TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnNCXButtonDblClk(int fwButton, short nHittest, CPoint ptPos) #define MSG_WM_NCXBUTTONDBLCLK(func) \ if (uMsg == WM_NCXBUTTONDBLCLK) \ { \ this->SetMsgHandled(TRUE); \ func(GET_XBUTTON_WPARAM(wParam), GET_NCHITTEST_WPARAM(wParam), ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = (LRESULT)TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnXButtonDown(int fwButton, int dwKeys, CPoint ptPos) #define MSG_WM_XBUTTONDOWN(func) \ if (uMsg == WM_XBUTTONDOWN) \ { \ this->SetMsgHandled(TRUE); \ func(GET_XBUTTON_WPARAM(wParam), GET_KEYSTATE_WPARAM(wParam), ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = (LRESULT)TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnXButtonUp(int fwButton, int dwKeys, CPoint ptPos) #define MSG_WM_XBUTTONUP(func) \ if (uMsg == WM_XBUTTONUP) \ { \ this->SetMsgHandled(TRUE); \ func(GET_XBUTTON_WPARAM(wParam), GET_KEYSTATE_WPARAM(wParam), ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = (LRESULT)TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnXButtonDblClk(int fwButton, int dwKeys, CPoint ptPos) #define MSG_WM_XBUTTONDBLCLK(func) \ if (uMsg == WM_XBUTTONDBLCLK) \ { \ this->SetMsgHandled(TRUE); \ func(GET_XBUTTON_WPARAM(wParam), GET_KEYSTATE_WPARAM(wParam), ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ lResult = (LRESULT)TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnChangeUIState(WORD nAction, WORD nState) #define MSG_WM_CHANGEUISTATE(func) \ if (uMsg == WM_CHANGEUISTATE) \ { \ this->SetMsgHandled(TRUE); \ func(LOWORD(wParam), HIWORD(wParam)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnUpdateUIState(WORD nAction, WORD nState) #define MSG_WM_UPDATEUISTATE(func) \ if (uMsg == WM_UPDATEUISTATE) \ { \ this->SetMsgHandled(TRUE); \ func(LOWORD(wParam), HIWORD(wParam)); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnQueryUIState() #define MSG_WM_QUERYUISTATE(func) \ if (uMsg == WM_QUERYUISTATE) \ { \ this->SetMsgHandled(TRUE); \ lResult = func(); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnInput(WPARAM RawInputCode, HRAWINPUT hRawInput) #define MSG_WM_INPUT(func) \ if (uMsg == WM_INPUT) \ { \ this->SetMsgHandled(TRUE); \ func(GET_RAWINPUT_CODE_WPARAM(wParam), (HRAWINPUT)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnUniChar(TCHAR nChar, UINT nRepCnt, UINT nFlags) #define MSG_WM_UNICHAR(func) \ if (uMsg == WM_UNICHAR) \ { \ this->SetMsgHandled(TRUE); \ func((TCHAR)wParam, (UINT)lParam & 0xFFFF, (UINT)((lParam & 0xFFFF0000) >> 16)); \ if(this->IsMsgHandled()) \ { \ lResult = (wParam == UNICODE_NOCHAR) ? TRUE : FALSE; \ return TRUE; \ } \ } // void OnWTSSessionChange(WPARAM nStatusCode, DWORD dwSessionID) #define MSG_WM_WTSSESSION_CHANGE(func) \ if (uMsg == WM_WTSSESSION_CHANGE) \ { \ this->SetMsgHandled(TRUE); \ func(wParam, (DWORD)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnThemeChanged() #define MSG_WM_THEMECHANGED(func) \ if (uMsg == WM_THEMECHANGED) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } #if (_WIN32_WINNT >= 0x0600) // BOOL OnMouseHWheel(UINT nFlags, short zDelta, CPoint pt) #define MSG_WM_MOUSEHWHEEL(func) \ if (uMsg == WM_MOUSEHWHEEL) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)LOWORD(wParam), (short)HIWORD(wParam), ::CPoint(GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam))); \ if(this->IsMsgHandled()) \ return TRUE; \ } #endif // (_WIN32_WINNT >= 0x0600) #if (WINVER >= 0x0601) // void OnGesture(ULONGLONG ullArguments, HGESTUREINFO hGestureInfo) #define MSG_WM_GESTURE(func) \ if (uMsg == WM_GESTURE) \ { \ this->SetMsgHandled(TRUE); \ func((ULONGLONG)wParam, (HGESTUREINFO)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnGestureNotify(PGESTURENOTIFYSTRUCT pGestureNotifyStruct) #define MSG_WM_GESTURENOTIFY(func) \ if (uMsg == WM_GESTURENOTIFY) \ { \ func((PGESTURENOTIFYSTRUCT)lParam); \ } // void OnDpiChanged(UINT nDpiX, UINT nDpiY, PRECT pRect) #define MSG_WM_DPICHANGED(func) \ if (uMsg == WM_DPICHANGED) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)LOWORD(wParam), (UINT)HIWORD(wParam), (PRECT)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } #endif // (WINVER >= 0x0601) #if (WINVER >= 0x0605) // void OnDpiChangedBeforeParent() #define MSG_WM_DPICHANGED_BEFOREPARENT(func) \ if (uMsg == WM_DPICHANGED_BEFOREPARENT) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDpiChangedAfterParent() #define MSG_WM_DPICHANGED_AFTERPARENT(func) \ if (uMsg == WM_DPICHANGED_AFTERPARENT) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // BOOL OnGetDpiScaledSize(UINT uDpi, PSIZE pSize) #define MSG_WM_GETDPISCALEDSIZE(func) \ if (uMsg == WM_GETDPISCALEDSIZE) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)wParam, (PSIZE)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } #endif // (WINVER >= 0x0605) /////////////////////////////////////////////////////////////////////////////// // ATL defined messages // BOOL OnForwardMsg(LPMSG Msg, DWORD nUserData) #define MSG_WM_FORWARDMSG(func) \ if (uMsg == WM_FORWARDMSG) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((LPMSG)lParam, (DWORD)wParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } /////////////////////////////////////////////////////////////////////////////// // Dialog specific messages // LRESULT OnDMGetDefID() #define MSG_DM_GETDEFID(func) \ if (uMsg == DM_GETDEFID) \ { \ this->SetMsgHandled(TRUE); \ lResult = func(); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDMSetDefID(UINT DefID) #define MSG_DM_SETDEFID(func) \ if (uMsg == DM_SETDEFID) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnDMReposition() #define MSG_DM_REPOSITION(func) \ if (uMsg == DM_REPOSITION) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } /////////////////////////////////////////////////////////////////////////////// // Reflected messages // void OnReflectedCommand(UINT uNotifyCode, int nID, CWindow wndCtl) #define MSG_OCM_COMMAND(func) \ if (uMsg == OCM_COMMAND) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnReflectedNotify(int idCtrl, LPNMHDR pnmh) #define MSG_OCM_NOTIFY(func) \ if (uMsg == OCM_NOTIFY) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((int)wParam, (LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnReflectedParentNotify(UINT message, UINT nChildID, LPARAM lParam) #define MSG_OCM_PARENTNOTIFY(func) \ if (uMsg == OCM_PARENTNOTIFY) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)LOWORD(wParam), (UINT)HIWORD(wParam), lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnReflectedDrawItem(int nIDCtl, LPDRAWITEMSTRUCT lpDrawItemStruct) #define MSG_OCM_DRAWITEM(func) \ if (uMsg == OCM_DRAWITEM) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPDRAWITEMSTRUCT)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnReflectedMeasureItem(int nIDCtl, LPMEASUREITEMSTRUCT lpMeasureItemStruct) #define MSG_OCM_MEASUREITEM(func) \ if (uMsg == OCM_MEASUREITEM) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPMEASUREITEMSTRUCT)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnReflectedCompareItem(int nIDCtl, LPCOMPAREITEMSTRUCT lpCompareItemStruct) #define MSG_OCM_COMPAREITEM(func) \ if (uMsg == OCM_COMPAREITEM) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)wParam, (LPCOMPAREITEMSTRUCT)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnReflectedDeleteItem(int nIDCtl, LPDELETEITEMSTRUCT lpDeleteItemStruct) #define MSG_OCM_DELETEITEM(func) \ if (uMsg == OCM_DELETEITEM) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)wParam, (LPDELETEITEMSTRUCT)lParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } // int OnReflectedVKeyToItem(UINT nKey, UINT nIndex, CListBox listBox) #define MSG_OCM_VKEYTOITEM(func) \ if (uMsg == OCM_VKEYTOITEM) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)LOWORD(wParam), (UINT)HIWORD(wParam), (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } //int OnReflectedCharToItem(UINT nChar, UINT nIndex, CListBox listBox) #define MSG_OCM_CHARTOITEM(func) \ if (uMsg == OCM_CHARTOITEM) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((UINT)LOWORD(wParam), (UINT)HIWORD(wParam), (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnReflectedHScroll(UINT nSBCode, UINT nPos, CScrollBar pScrollBar) #define MSG_OCM_HSCROLL(func) \ if (uMsg == OCM_HSCROLL) \ { \ this->SetMsgHandled(TRUE); \ func((int)LOWORD(wParam), (short)HIWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnReflectedVScroll(UINT nSBCode, UINT nPos, CScrollBar pScrollBar) #define MSG_OCM_VSCROLL(func) \ if (uMsg == OCM_VSCROLL) \ { \ this->SetMsgHandled(TRUE); \ func((int)LOWORD(wParam), (short)HIWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnReflectedCtlColorEdit(CDCHandle dc, CEdit edit) #define MSG_OCM_CTLCOLOREDIT(func) \ if (uMsg == OCM_CTLCOLOREDIT) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnReflectedCtlColorListBox(CDCHandle dc, CListBox listBox) #define MSG_OCM_CTLCOLORLISTBOX(func) \ if (uMsg == OCM_CTLCOLORLISTBOX) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnReflectedCtlColorBtn(CDCHandle dc, CButton button) #define MSG_OCM_CTLCOLORBTN(func) \ if (uMsg == OCM_CTLCOLORBTN) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnReflectedCtlColorDlg(CDCHandle dc, CWindow wnd) #define MSG_OCM_CTLCOLORDLG(func) \ if (uMsg == OCM_CTLCOLORDLG) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnReflectedCtlColorScrollBar(CDCHandle dc, CScrollBar scrollBar) #define MSG_OCM_CTLCOLORSCROLLBAR(func) \ if (uMsg == OCM_CTLCOLORSCROLLBAR) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // HBRUSH OnReflectedCtlColorStatic(CDCHandle dc, CStatic wndStatic) #define MSG_OCM_CTLCOLORSTATIC(func) \ if (uMsg == OCM_CTLCOLORSTATIC) \ { \ this->SetMsgHandled(TRUE); \ lResult = (LRESULT)func((HDC)wParam, (HWND)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } /////////////////////////////////////////////////////////////////////////////// // Edit specific messages // void OnClear() #define MSG_WM_CLEAR(func) \ if (uMsg == WM_CLEAR) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCopy() #define MSG_WM_COPY(func) \ if (uMsg == WM_COPY) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCut() #define MSG_WM_CUT(func) \ if (uMsg == WM_CUT) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnPaste() #define MSG_WM_PASTE(func) \ if (uMsg == WM_PASTE) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnUndo() #define MSG_WM_UNDO(func) \ if (uMsg == WM_UNDO) \ { \ this->SetMsgHandled(TRUE); \ func(); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } /////////////////////////////////////////////////////////////////////////////// // Generic message handlers // LRESULT OnMessageHandlerEX(UINT uMsg, WPARAM wParam, LPARAM lParam) #define MESSAGE_HANDLER_EX(msg, func) \ if(uMsg == msg) \ { \ this->SetMsgHandled(TRUE); \ lResult = func(uMsg, wParam, lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnMessageRangeHandlerEX(UINT uMsg, WPARAM wParam, LPARAM lParam) #define MESSAGE_RANGE_HANDLER_EX(msgFirst, msgLast, func) \ if((uMsg >= msgFirst) && (uMsg <= msgLast)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func(uMsg, wParam, lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } /////////////////////////////////////////////////////////////////////////////// // Commands and notifications // void OnCommandHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define COMMAND_HANDLER_EX(id, code, func) \ if ((uMsg == WM_COMMAND) && (code == HIWORD(wParam)) && (id == LOWORD(wParam))) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCommandIDHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define COMMAND_ID_HANDLER_EX(id, func) \ if ((uMsg == WM_COMMAND) && (id == LOWORD(wParam))) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCommandCodeHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define COMMAND_CODE_HANDLER_EX(code, func) \ if ((uMsg == WM_COMMAND) && (code == HIWORD(wParam))) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnNotifyHandlerEX(LPNMHDR pnmh) #define NOTIFY_HANDLER_EX(id, cd, func) \ if ((uMsg == WM_NOTIFY) && (cd == ((LPNMHDR)lParam)->code) && (id == ((LPNMHDR)lParam)->idFrom)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnNotifyIDHandlerEX(LPNMHDR pnmh) #define NOTIFY_ID_HANDLER_EX(id, func) \ if ((uMsg == WM_NOTIFY) && (id == ((LPNMHDR)lParam)->idFrom)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnNotifyCodeHandlerEX(LPNMHDR pnmh) #define NOTIFY_CODE_HANDLER_EX(cd, func) \ if ((uMsg == WM_NOTIFY) && (cd == ((LPNMHDR)lParam)->code)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCommandRangeHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define COMMAND_RANGE_HANDLER_EX(idFirst, idLast, func) \ if((uMsg == WM_COMMAND) && (LOWORD(wParam) >= idFirst) && (LOWORD(wParam) <= idLast)) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnCommandRangeCodeHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define COMMAND_RANGE_CODE_HANDLER_EX(idFirst, idLast, code, func) \ if((uMsg == WM_COMMAND) && (code == HIWORD(wParam)) && (LOWORD(wParam) >= idFirst) && (LOWORD(wParam) <= idLast)) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnNotifyRangeHandlerEX(LPNMHDR pnmh) #define NOTIFY_RANGE_HANDLER_EX(idFirst, idLast, func) \ if((uMsg == WM_NOTIFY) && (((LPNMHDR)lParam)->idFrom >= idFirst) && (((LPNMHDR)lParam)->idFrom <= idLast)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnNotifyRangeCodeHandlerEX(LPNMHDR pnmh) #define NOTIFY_RANGE_CODE_HANDLER_EX(idFirst, idLast, cd, func) \ if((uMsg == WM_NOTIFY) && (cd == ((LPNMHDR)lParam)->code) && (((LPNMHDR)lParam)->idFrom >= idFirst) && (((LPNMHDR)lParam)->idFrom <= idLast)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnReflectedCommandHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define REFLECTED_COMMAND_HANDLER_EX(id, code, func) \ if ((uMsg == OCM_COMMAND) && (code == HIWORD(wParam)) && (id == LOWORD(wParam))) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnReflectedCommandIDHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define REFLECTED_COMMAND_ID_HANDLER_EX(id, func) \ if ((uMsg == OCM_COMMAND) && (id == LOWORD(wParam))) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnReflectedCommandCodeHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define REFLECTED_COMMAND_CODE_HANDLER_EX(code, func) \ if ((uMsg == OCM_COMMAND) && (code == HIWORD(wParam))) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnReflectedNotifyHandlerEX(LPNMHDR pnmh) #define REFLECTED_NOTIFY_HANDLER_EX(id, cd, func) \ if ((uMsg == OCM_NOTIFY) && (cd == ((LPNMHDR)lParam)->code) && (id == ((LPNMHDR)lParam)->idFrom)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnReflectedNotifyIDHandlerEX(LPNMHDR pnmh) #define REFLECTED_NOTIFY_ID_HANDLER_EX(id, func) \ if ((uMsg == OCM_NOTIFY) && (id == ((LPNMHDR)lParam)->idFrom)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnReflectedNotifyCodeHandlerEX(LPNMHDR pnmh) #define REFLECTED_NOTIFY_CODE_HANDLER_EX(cd, func) \ if ((uMsg == OCM_NOTIFY) && (cd == ((LPNMHDR)lParam)->code)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnReflectedCommandRangeHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define REFLECTED_COMMAND_RANGE_HANDLER_EX(idFirst, idLast, func) \ if((uMsg == OCM_COMMAND) && (LOWORD(wParam) >= idFirst) && (LOWORD(wParam) <= idLast)) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnReflectedCommandRangeCodeHandlerEX(UINT uNotifyCode, int nID, CWindow wndCtl) #define REFLECTED_COMMAND_RANGE_CODE_HANDLER_EX(idFirst, idLast, code, func) \ if((uMsg == OCM_COMMAND) && (code == HIWORD(wParam)) && (LOWORD(wParam) >= idFirst) && (LOWORD(wParam) <= idLast)) \ { \ this->SetMsgHandled(TRUE); \ func((UINT)HIWORD(wParam), (int)LOWORD(wParam), (HWND)lParam); \ lResult = 0; \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnReflectedNotifyRangeHandlerEX(LPNMHDR pnmh) #define REFLECTED_NOTIFY_RANGE_HANDLER_EX(idFirst, idLast, func) \ if((uMsg == OCM_NOTIFY) && (((LPNMHDR)lParam)->idFrom >= idFirst) && (((LPNMHDR)lParam)->idFrom <= idLast)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // LRESULT OnReflectedNotifyRangeCodeHandlerEX(LPNMHDR pnmh) #define REFLECTED_NOTIFY_RANGE_CODE_HANDLER_EX(idFirst, idLast, cd, func) \ if((uMsg == OCM_NOTIFY) && (cd == ((LPNMHDR)lParam)->code) && (((LPNMHDR)lParam)->idFrom >= idFirst) && (((LPNMHDR)lParam)->idFrom <= idLast)) \ { \ this->SetMsgHandled(TRUE); \ lResult = func((LPNMHDR)lParam); \ if(this->IsMsgHandled()) \ return TRUE; \ } // void OnAppCommandHandler(UINT uDevice, DWORD dwKeys, CWindow wndFocus) #define APPCOMMAND_HANDLER_EX(cmd, func) \ if((uMsg == WM_APPCOMMAND) && (cmd == GET_APPCOMMAND_LPARAM(lParam))) \ { \ this->SetMsgHandled(TRUE); \ func(GET_DEVICE_LPARAM(lParam), GET_KEYSTATE_LPARAM(lParam), (HWND)wParam); \ lResult = TRUE; \ if(this->IsMsgHandled()) \ return TRUE; \ } #endif // __ATLCRACK_H__
64,515
C++
.h
2,234
26.414503
145
0.670235
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
true
false
false
false
false
false
false
18,786
atlres.h
Const-me_Whisper/Examples/WhisperDesktop/Utils/WTL/atlres.h
// Windows Template Library - WTL version 10.0 // Copyright (C) Microsoft Corporation, WTL Team. All rights reserved. // // This file is a part of the Windows Template Library. // The use and distribution terms for this software are covered by the // Microsoft Public License (http://opensource.org/licenses/MS-PL) // which can be found in the file MS-PL.txt at the root folder. #ifndef __ATLRES_H__ #define __ATLRES_H__ #pragma once #ifdef RC_INVOKED #ifndef _INC_WINDOWS #define _INC_WINDOWS #define VS_VERSION_INFO 1 #ifdef APSTUDIO_INVOKED #define APSTUDIO_HIDDEN_SYMBOLS // Ignore following symbols #endif // APSTUDIO_INVOKED #ifndef WINVER #define WINVER 0x0500 #endif // !WINVER #include <winresrc.h> // operation messages sent to DLGINIT #define LB_ADDSTRING (WM_USER+1) #define CB_ADDSTRING (WM_USER+3) #ifdef APSTUDIO_INVOKED #undef APSTUDIO_HIDDEN_SYMBOLS #endif // APSTUDIO_INVOKED #ifdef IDC_STATIC #undef IDC_STATIC #endif // IDC_STATIC #define IDC_STATIC (-1) #endif // !_INC_WINDOWS #endif // RC_INVOKED #ifdef APSTUDIO_INVOKED #define APSTUDIO_HIDDEN_SYMBOLS #endif // APSTUDIO_INVOKED /////////////////////////////////////////////////////////////////////////////// // ATL resource types #ifndef RC_INVOKED #define RT_DLGINIT MAKEINTRESOURCE(240) #define RT_TOOLBAR MAKEINTRESOURCE(241) #endif // RC_INVOKED /////////////////////////////////////////////////////////////////////////////// #ifdef APSTUDIO_INVOKED #undef APSTUDIO_HIDDEN_SYMBOLS #endif // APSTUDIO_INVOKED /////////////////////////////////////////////////////////////////////////////// // Standard window components #define ID_SEPARATOR 0 // special separator value #define ID_DEFAULT_PANE 0 // default status bar pane #ifndef RC_INVOKED // code only // standard control bars (IDW = window ID) #define ATL_IDW_TOOLBAR 0xE800 // main Toolbar for window #define ATL_IDW_STATUS_BAR 0xE801 // Status bar window #define ATL_IDW_COMMAND_BAR 0xE802 // Command bar window // parts of a frame window #define ATL_IDW_CLIENT 0xE900 #define ATL_IDW_PANE_FIRST 0xE900 // first pane (256 max) #define ATL_IDW_PANE_LAST 0xE9FF #define ATL_IDW_HSCROLL_FIRST 0xEA00 // first Horz scrollbar (16 max) #define ATL_IDW_VSCROLL_FIRST 0xEA10 // first Vert scrollbar (16 max) #define ATL_IDW_SIZE_BOX 0xEA20 // size box for splitters #define ATL_IDW_PANE_SAVE 0xEA21 // to shift ATL_IDW_PANE_FIRST // bands for a rebar #define ATL_IDW_BAND_FIRST 0xEB00 #define ATL_IDW_BAND_LAST 0xEBFF #endif // !RC_INVOKED /////////////////////////////////////////////////////////////////////////////// // Standard Commands // File commands #define ID_FILE_NEW 0xE100 #define ID_FILE_OPEN 0xE101 #define ID_FILE_CLOSE 0xE102 #define ID_FILE_SAVE 0xE103 #define ID_FILE_SAVE_AS 0xE104 #define ID_FILE_PAGE_SETUP 0xE105 #define ID_FILE_PRINT_SETUP 0xE106 #define ID_FILE_PRINT 0xE107 #define ID_FILE_PRINT_DIRECT 0xE108 #define ID_FILE_PRINT_PREVIEW 0xE109 #define ID_FILE_UPDATE 0xE10A #define ID_FILE_SAVE_COPY_AS 0xE10B #define ID_FILE_SEND_MAIL 0xE10C #define ID_FILE_MRU_FIRST 0xE110 #define ID_FILE_MRU_FILE1 0xE110 // range - 16 max #define ID_FILE_MRU_FILE2 0xE111 #define ID_FILE_MRU_FILE3 0xE112 #define ID_FILE_MRU_FILE4 0xE113 #define ID_FILE_MRU_FILE5 0xE114 #define ID_FILE_MRU_FILE6 0xE115 #define ID_FILE_MRU_FILE7 0xE116 #define ID_FILE_MRU_FILE8 0xE117 #define ID_FILE_MRU_FILE9 0xE118 #define ID_FILE_MRU_FILE10 0xE119 #define ID_FILE_MRU_FILE11 0xE11A #define ID_FILE_MRU_FILE12 0xE11B #define ID_FILE_MRU_FILE13 0xE11C #define ID_FILE_MRU_FILE14 0xE11D #define ID_FILE_MRU_FILE15 0xE11E #define ID_FILE_MRU_FILE16 0xE11F #define ID_FILE_MRU_LAST 0xE11F // Edit commands #define ID_EDIT_CLEAR 0xE120 #define ID_EDIT_CLEAR_ALL 0xE121 #define ID_EDIT_COPY 0xE122 #define ID_EDIT_CUT 0xE123 #define ID_EDIT_FIND 0xE124 #define ID_EDIT_PASTE 0xE125 #define ID_EDIT_PASTE_LINK 0xE126 #define ID_EDIT_PASTE_SPECIAL 0xE127 #define ID_EDIT_REPEAT 0xE128 #define ID_EDIT_REPLACE 0xE129 #define ID_EDIT_SELECT_ALL 0xE12A #define ID_EDIT_UNDO 0xE12B #define ID_EDIT_REDO 0xE12C #define ID_EDIT_DELETE ID_EDIT_CLEAR #define ID_EDIT_FIND_NEXT ID_EDIT_REPEAT #define ID_EDIT_FIND_PREVIOUS 0xE12D // Window commands #define ID_WINDOW_NEW 0xE130 #define ID_WINDOW_ARRANGE 0xE131 #define ID_WINDOW_CASCADE 0xE132 #define ID_WINDOW_TILE_HORZ 0xE133 #define ID_WINDOW_TILE_VERT 0xE134 #define ID_WINDOW_SPLIT 0xE135 #ifndef RC_INVOKED // code only #define ATL_IDM_WINDOW_FIRST 0xE130 #define ATL_IDM_WINDOW_LAST 0xE13F #define ATL_IDM_FIRST_MDICHILD 0xFF00 // window list starts here #define ATL_IDM_LAST_MDICHILD 0xFFFD #endif // !RC_INVOKED // TabView #define ID_WINDOW_TABFIRST 0xFF00 // = ATL_IDM_FIRST_MDICHILD #define ID_WINDOW_TABLAST 0xFFFD #define ID_WINDOW_SHOWTABLIST 0xFFFE // Help and App commands #define ID_APP_ABOUT 0xE140 #define ID_APP_EXIT 0xE141 #define ID_HELP_INDEX 0xE142 #define ID_HELP_FINDER 0xE143 #define ID_HELP_USING 0xE144 #define ID_CONTEXT_HELP 0xE145 // shift-F1 // special commands for processing help #define ID_HELP 0xE146 // first attempt for F1 #define ID_DEFAULT_HELP 0xE147 // last attempt // Misc #define ID_NEXT_PANE 0xE150 #define ID_PREV_PANE 0xE151 #define ID_PANE_CLOSE 0xE152 #define ID_PANE_NEXT ID_NEXT_PANE #define ID_PANE_PREVIOUS ID_PREV_PANE // Format #define ID_FORMAT_FONT 0xE160 // Scroll #define ID_SCROLL_UP 0xE170 #define ID_SCROLL_DOWN 0xE171 #define ID_SCROLL_PAGE_UP 0xE172 #define ID_SCROLL_PAGE_DOWN 0xE173 #define ID_SCROLL_TOP 0xE174 #define ID_SCROLL_BOTTOM 0xE175 #define ID_SCROLL_LEFT 0xE176 #define ID_SCROLL_RIGHT 0xE177 #define ID_SCROLL_PAGE_LEFT 0xE178 #define ID_SCROLL_PAGE_RIGHT 0xE179 #define ID_SCROLL_ALL_LEFT 0xE17A #define ID_SCROLL_ALL_RIGHT 0xE17B // OLE commands #define ID_OLE_INSERT_NEW 0xE200 #define ID_OLE_EDIT_LINKS 0xE201 #define ID_OLE_EDIT_CONVERT 0xE202 #define ID_OLE_EDIT_CHANGE_ICON 0xE203 #define ID_OLE_EDIT_PROPERTIES 0xE204 #define ID_OLE_VERB_FIRST 0xE210 // range - 16 max #ifndef RC_INVOKED // code only #define ID_OLE_VERB_LAST 0xE21F #endif // !RC_INVOKED // View commands (same number used as IDW used for toolbar and status bar) #define ID_VIEW_TOOLBAR 0xE800 #define ID_VIEW_STATUS_BAR 0xE801 #define ID_VIEW_REFRESH 0xE803 #define ID_VIEW_RIBBON 0xE804 /////////////////////////////////////////////////////////////////////////////// // Standard control IDs #ifdef IDC_STATIC #undef IDC_STATIC #endif // IDC_STATIC #define IDC_STATIC (-1) // all static controls /////////////////////////////////////////////////////////////////////////////// // Standard string error/warnings // idle status bar message #define ATL_IDS_IDLEMESSAGE 0xE001 #ifndef RC_INVOKED // code only #define ATL_IDS_SCFIRST 0xEF00 #endif // !RC_INVOKED #define ATL_IDS_SCSIZE 0xEF00 #define ATL_IDS_SCMOVE 0xEF01 #define ATL_IDS_SCMINIMIZE 0xEF02 #define ATL_IDS_SCMAXIMIZE 0xEF03 #define ATL_IDS_SCNEXTWINDOW 0xEF04 #define ATL_IDS_SCPREVWINDOW 0xEF05 #define ATL_IDS_SCCLOSE 0xEF06 #define ATL_IDS_SCRESTORE 0xEF12 #define ATL_IDS_SCTASKLIST 0xEF13 #define ATL_IDS_MDICHILD 0xEF1F #define ATL_IDS_MRU_FILE 0xEFDA /////////////////////////////////////////////////////////////////////////////// // Misc. control IDs // Property Sheet control id's (determined with Spy++) #define ID_APPLY_NOW 0x3021 #define ID_WIZBACK 0x3023 #define ID_WIZNEXT 0x3024 #define ID_WIZFINISH 0x3025 #define ATL_IDC_TAB_CONTROL 0x3020 #endif // __ATLRES_H__
9,477
C++
.h
214
42.635514
80
0.569972
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
true
false
false
false
false
false
false
18,788
dr_wav.h
Const-me_Whisper/Examples/OldMain/dr_wav.h
/* WAV audio loader and writer. Choice of public domain or MIT-0. See license statements at the end of this file. dr_wav - v0.12.16 - 2020-12-02 David Reid - mackron@gmail.com GitHub: https://github.com/mackron/dr_libs */ /* RELEASE NOTES - VERSION 0.12 ============================ Version 0.12 includes breaking changes to custom chunk handling. Changes to Chunk Callback ------------------------- dr_wav supports the ability to fire a callback when a chunk is encounted (except for WAVE and FMT chunks). The callback has been updated to include both the container (RIFF or Wave64) and the FMT chunk which contains information about the format of the data in the wave file. Previously, there was no direct way to determine the container, and therefore no way to discriminate against the different IDs in the chunk header (RIFF and Wave64 containers encode chunk ID's differently). The `container` parameter can be used to know which ID to use. Sometimes it can be useful to know the data format at the time the chunk callback is fired. A pointer to a `drwav_fmt` object is now passed into the chunk callback which will give you information about the data format. To determine the sample format, use `drwav_fmt_get_format()`. This will return one of the `DR_WAVE_FORMAT_*` tokens. */ /* Introduction ============ This is a single file library. To use it, do something like the following in one .c file. ```c #define DR_WAV_IMPLEMENTATION #include "dr_wav.h" ``` You can then #include this file in other parts of the program as you would with any other header file. Do something like the following to read audio data: ```c drwav wav; if (!drwav_init_file(&wav, "my_song.wav", NULL)) { // Error opening WAV file. } drwav_int32* pDecodedInterleavedPCMFrames = malloc(wav.totalPCMFrameCount * wav.channels * sizeof(drwav_int32)); size_t numberOfSamplesActuallyDecoded = drwav_read_pcm_frames_s32(&wav, wav.totalPCMFrameCount, pDecodedInterleavedPCMFrames); ... drwav_uninit(&wav); ``` If you just want to quickly open and read the audio data in a single operation you can do something like this: ```c unsigned int channels; unsigned int sampleRate; drwav_uint64 totalPCMFrameCount; float* pSampleData = drwav_open_file_and_read_pcm_frames_f32("my_song.wav", &channels, &sampleRate, &totalPCMFrameCount, NULL); if (pSampleData == NULL) { // Error opening and reading WAV file. } ... drwav_free(pSampleData); ``` The examples above use versions of the API that convert the audio data to a consistent format (32-bit signed PCM, in this case), but you can still output the audio data in its internal format (see notes below for supported formats): ```c size_t framesRead = drwav_read_pcm_frames(&wav, wav.totalPCMFrameCount, pDecodedInterleavedPCMFrames); ``` You can also read the raw bytes of audio data, which could be useful if dr_wav does not have native support for a particular data format: ```c size_t bytesRead = drwav_read_raw(&wav, bytesToRead, pRawDataBuffer); ``` dr_wav can also be used to output WAV files. This does not currently support compressed formats. To use this, look at `drwav_init_write()`, `drwav_init_file_write()`, etc. Use `drwav_write_pcm_frames()` to write samples, or `drwav_write_raw()` to write raw data in the "data" chunk. ```c drwav_data_format format; format.container = drwav_container_riff; // <-- drwav_container_riff = normal WAV files, drwav_container_w64 = Sony Wave64. format.format = DR_WAVE_FORMAT_PCM; // <-- Any of the DR_WAVE_FORMAT_* codes. format.channels = 2; format.sampleRate = 44100; format.bitsPerSample = 16; drwav_init_file_write(&wav, "data/recording.wav", &format, NULL); ... drwav_uint64 framesWritten = drwav_write_pcm_frames(pWav, frameCount, pSamples); ``` dr_wav has seamless support the Sony Wave64 format. The decoder will automatically detect it and it should Just Work without any manual intervention. Build Options ============= #define these options before including this file. #define DR_WAV_NO_CONVERSION_API Disables conversion APIs such as `drwav_read_pcm_frames_f32()` and `drwav_s16_to_f32()`. #define DR_WAV_NO_STDIO Disables APIs that initialize a decoder from a file such as `drwav_init_file()`, `drwav_init_file_write()`, etc. Notes ===== - Samples are always interleaved. - The default read function does not do any data conversion. Use `drwav_read_pcm_frames_f32()`, `drwav_read_pcm_frames_s32()` and `drwav_read_pcm_frames_s16()` to read and convert audio data to 32-bit floating point, signed 32-bit integer and signed 16-bit integer samples respectively. Tested and supported internal formats include the following: - Unsigned 8-bit PCM - Signed 12-bit PCM - Signed 16-bit PCM - Signed 24-bit PCM - Signed 32-bit PCM - IEEE 32-bit floating point - IEEE 64-bit floating point - A-law and u-law - Microsoft ADPCM - IMA ADPCM (DVI, format code 0x11) - dr_wav will try to read the WAV file as best it can, even if it's not strictly conformant to the WAV format. */ #ifndef dr_wav_h #define dr_wav_h #ifdef __cplusplus extern "C" { #endif #define DRWAV_STRINGIFY(x) #x #define DRWAV_XSTRINGIFY(x) DRWAV_STRINGIFY(x) #define DRWAV_VERSION_MAJOR 0 #define DRWAV_VERSION_MINOR 12 #define DRWAV_VERSION_REVISION 16 #define DRWAV_VERSION_STRING DRWAV_XSTRINGIFY(DRWAV_VERSION_MAJOR) "." DRWAV_XSTRINGIFY(DRWAV_VERSION_MINOR) "." DRWAV_XSTRINGIFY(DRWAV_VERSION_REVISION) #include <stddef.h> /* For size_t. */ /* Sized types. */ typedef signed char drwav_int8; typedef unsigned char drwav_uint8; typedef signed short drwav_int16; typedef unsigned short drwav_uint16; typedef signed int drwav_int32; typedef unsigned int drwav_uint32; #if defined(_MSC_VER) typedef signed __int64 drwav_int64; typedef unsigned __int64 drwav_uint64; #else #if defined(__clang__) || (defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wlong-long" #if defined(__clang__) #pragma GCC diagnostic ignored "-Wc++11-long-long" #endif #endif typedef signed long long drwav_int64; typedef unsigned long long drwav_uint64; #if defined(__clang__) || (defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))) #pragma GCC diagnostic pop #endif #endif #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) typedef drwav_uint64 drwav_uintptr; #else typedef drwav_uint32 drwav_uintptr; #endif typedef drwav_uint8 drwav_bool8; typedef drwav_uint32 drwav_bool32; #define DRWAV_TRUE 1 #define DRWAV_FALSE 0 #if !defined(DRWAV_API) #if defined(DRWAV_DLL) #if defined(_WIN32) #define DRWAV_DLL_IMPORT __declspec(dllimport) #define DRWAV_DLL_EXPORT __declspec(dllexport) #define DRWAV_DLL_PRIVATE static #else #if defined(__GNUC__) && __GNUC__ >= 4 #define DRWAV_DLL_IMPORT __attribute__((visibility("default"))) #define DRWAV_DLL_EXPORT __attribute__((visibility("default"))) #define DRWAV_DLL_PRIVATE __attribute__((visibility("hidden"))) #else #define DRWAV_DLL_IMPORT #define DRWAV_DLL_EXPORT #define DRWAV_DLL_PRIVATE static #endif #endif #if defined(DR_WAV_IMPLEMENTATION) || defined(DRWAV_IMPLEMENTATION) #define DRWAV_API DRWAV_DLL_EXPORT #else #define DRWAV_API DRWAV_DLL_IMPORT #endif #define DRWAV_PRIVATE DRWAV_DLL_PRIVATE #else #define DRWAV_API extern #define DRWAV_PRIVATE static #endif #endif typedef drwav_int32 drwav_result; #define DRWAV_SUCCESS 0 #define DRWAV_ERROR -1 /* A generic error. */ #define DRWAV_INVALID_ARGS -2 #define DRWAV_INVALID_OPERATION -3 #define DRWAV_OUT_OF_MEMORY -4 #define DRWAV_OUT_OF_RANGE -5 #define DRWAV_ACCESS_DENIED -6 #define DRWAV_DOES_NOT_EXIST -7 #define DRWAV_ALREADY_EXISTS -8 #define DRWAV_TOO_MANY_OPEN_FILES -9 #define DRWAV_INVALID_FILE -10 #define DRWAV_TOO_BIG -11 #define DRWAV_PATH_TOO_LONG -12 #define DRWAV_NAME_TOO_LONG -13 #define DRWAV_NOT_DIRECTORY -14 #define DRWAV_IS_DIRECTORY -15 #define DRWAV_DIRECTORY_NOT_EMPTY -16 #define DRWAV_END_OF_FILE -17 #define DRWAV_NO_SPACE -18 #define DRWAV_BUSY -19 #define DRWAV_IO_ERROR -20 #define DRWAV_INTERRUPT -21 #define DRWAV_UNAVAILABLE -22 #define DRWAV_ALREADY_IN_USE -23 #define DRWAV_BAD_ADDRESS -24 #define DRWAV_BAD_SEEK -25 #define DRWAV_BAD_PIPE -26 #define DRWAV_DEADLOCK -27 #define DRWAV_TOO_MANY_LINKS -28 #define DRWAV_NOT_IMPLEMENTED -29 #define DRWAV_NO_MESSAGE -30 #define DRWAV_BAD_MESSAGE -31 #define DRWAV_NO_DATA_AVAILABLE -32 #define DRWAV_INVALID_DATA -33 #define DRWAV_TIMEOUT -34 #define DRWAV_NO_NETWORK -35 #define DRWAV_NOT_UNIQUE -36 #define DRWAV_NOT_SOCKET -37 #define DRWAV_NO_ADDRESS -38 #define DRWAV_BAD_PROTOCOL -39 #define DRWAV_PROTOCOL_UNAVAILABLE -40 #define DRWAV_PROTOCOL_NOT_SUPPORTED -41 #define DRWAV_PROTOCOL_FAMILY_NOT_SUPPORTED -42 #define DRWAV_ADDRESS_FAMILY_NOT_SUPPORTED -43 #define DRWAV_SOCKET_NOT_SUPPORTED -44 #define DRWAV_CONNECTION_RESET -45 #define DRWAV_ALREADY_CONNECTED -46 #define DRWAV_NOT_CONNECTED -47 #define DRWAV_CONNECTION_REFUSED -48 #define DRWAV_NO_HOST -49 #define DRWAV_IN_PROGRESS -50 #define DRWAV_CANCELLED -51 #define DRWAV_MEMORY_ALREADY_MAPPED -52 #define DRWAV_AT_END -53 /* Common data formats. */ #define DR_WAVE_FORMAT_PCM 0x1 #define DR_WAVE_FORMAT_ADPCM 0x2 #define DR_WAVE_FORMAT_IEEE_FLOAT 0x3 #define DR_WAVE_FORMAT_ALAW 0x6 #define DR_WAVE_FORMAT_MULAW 0x7 #define DR_WAVE_FORMAT_DVI_ADPCM 0x11 #define DR_WAVE_FORMAT_EXTENSIBLE 0xFFFE /* Constants. */ #ifndef DRWAV_MAX_SMPL_LOOPS #define DRWAV_MAX_SMPL_LOOPS 1 #endif /* Flags to pass into drwav_init_ex(), etc. */ #define DRWAV_SEQUENTIAL 0x00000001 DRWAV_API void drwav_version(drwav_uint32* pMajor, drwav_uint32* pMinor, drwav_uint32* pRevision); DRWAV_API const char* drwav_version_string(void); typedef enum { drwav_seek_origin_start, drwav_seek_origin_current } drwav_seek_origin; typedef enum { drwav_container_riff, drwav_container_w64, drwav_container_rf64 } drwav_container; typedef struct { union { drwav_uint8 fourcc[4]; drwav_uint8 guid[16]; } id; /* The size in bytes of the chunk. */ drwav_uint64 sizeInBytes; /* RIFF = 2 byte alignment. W64 = 8 byte alignment. */ unsigned int paddingSize; } drwav_chunk_header; typedef struct { /* The format tag exactly as specified in the wave file's "fmt" chunk. This can be used by applications that require support for data formats not natively supported by dr_wav. */ drwav_uint16 formatTag; /* The number of channels making up the audio data. When this is set to 1 it is mono, 2 is stereo, etc. */ drwav_uint16 channels; /* The sample rate. Usually set to something like 44100. */ drwav_uint32 sampleRate; /* Average bytes per second. You probably don't need this, but it's left here for informational purposes. */ drwav_uint32 avgBytesPerSec; /* Block align. This is equal to the number of channels * bytes per sample. */ drwav_uint16 blockAlign; /* Bits per sample. */ drwav_uint16 bitsPerSample; /* The size of the extended data. Only used internally for validation, but left here for informational purposes. */ drwav_uint16 extendedSize; /* The number of valid bits per sample. When <formatTag> is equal to WAVE_FORMAT_EXTENSIBLE, <bitsPerSample> is always rounded up to the nearest multiple of 8. This variable contains information about exactly how many bits are valid per sample. Mainly used for informational purposes. */ drwav_uint16 validBitsPerSample; /* The channel mask. Not used at the moment. */ drwav_uint32 channelMask; /* The sub-format, exactly as specified by the wave file. */ drwav_uint8 subFormat[16]; } drwav_fmt; DRWAV_API drwav_uint16 drwav_fmt_get_format(const drwav_fmt* pFMT); /* Callback for when data is read. Return value is the number of bytes actually read. pUserData [in] The user data that was passed to drwav_init() and family. pBufferOut [out] The output buffer. bytesToRead [in] The number of bytes to read. Returns the number of bytes actually read. A return value of less than bytesToRead indicates the end of the stream. Do _not_ return from this callback until either the entire bytesToRead is filled or you have reached the end of the stream. */ typedef size_t (* drwav_read_proc)(void* pUserData, void* pBufferOut, size_t bytesToRead); /* Callback for when data is written. Returns value is the number of bytes actually written. pUserData [in] The user data that was passed to drwav_init_write() and family. pData [out] A pointer to the data to write. bytesToWrite [in] The number of bytes to write. Returns the number of bytes actually written. If the return value differs from bytesToWrite, it indicates an error. */ typedef size_t (* drwav_write_proc)(void* pUserData, const void* pData, size_t bytesToWrite); /* Callback for when data needs to be seeked. pUserData [in] The user data that was passed to drwav_init() and family. offset [in] The number of bytes to move, relative to the origin. Will never be negative. origin [in] The origin of the seek - the current position or the start of the stream. Returns whether or not the seek was successful. Whether or not it is relative to the beginning or current position is determined by the "origin" parameter which will be either drwav_seek_origin_start or drwav_seek_origin_current. */ typedef drwav_bool32 (* drwav_seek_proc)(void* pUserData, int offset, drwav_seek_origin origin); /* Callback for when drwav_init_ex() finds a chunk. pChunkUserData [in] The user data that was passed to the pChunkUserData parameter of drwav_init_ex() and family. onRead [in] A pointer to the function to call when reading. onSeek [in] A pointer to the function to call when seeking. pReadSeekUserData [in] The user data that was passed to the pReadSeekUserData parameter of drwav_init_ex() and family. pChunkHeader [in] A pointer to an object containing basic header information about the chunk. Use this to identify the chunk. container [in] Whether or not the WAV file is a RIFF or Wave64 container. If you're unsure of the difference, assume RIFF. pFMT [in] A pointer to the object containing the contents of the "fmt" chunk. Returns the number of bytes read + seeked. To read data from the chunk, call onRead(), passing in pReadSeekUserData as the first parameter. Do the same for seeking with onSeek(). The return value must be the total number of bytes you have read _plus_ seeked. Use the `container` argument to discriminate the fields in `pChunkHeader->id`. If the container is `drwav_container_riff` or `drwav_container_rf64` you should use `id.fourcc`, otherwise you should use `id.guid`. The `pFMT` parameter can be used to determine the data format of the wave file. Use `drwav_fmt_get_format()` to get the sample format, which will be one of the `DR_WAVE_FORMAT_*` identifiers. The read pointer will be sitting on the first byte after the chunk's header. You must not attempt to read beyond the boundary of the chunk. */ typedef drwav_uint64 (* drwav_chunk_proc)(void* pChunkUserData, drwav_read_proc onRead, drwav_seek_proc onSeek, void* pReadSeekUserData, const drwav_chunk_header* pChunkHeader, drwav_container container, const drwav_fmt* pFMT); typedef struct { void* pUserData; void* (* onMalloc)(size_t sz, void* pUserData); void* (* onRealloc)(void* p, size_t sz, void* pUserData); void (* onFree)(void* p, void* pUserData); } drwav_allocation_callbacks; /* Structure for internal use. Only used for loaders opened with drwav_init_memory(). */ typedef struct { const drwav_uint8* data; size_t dataSize; size_t currentReadPos; } drwav__memory_stream; /* Structure for internal use. Only used for writers opened with drwav_init_memory_write(). */ typedef struct { void** ppData; size_t* pDataSize; size_t dataSize; size_t dataCapacity; size_t currentWritePos; } drwav__memory_stream_write; typedef struct { drwav_container container; /* RIFF, W64. */ drwav_uint32 format; /* DR_WAVE_FORMAT_* */ drwav_uint32 channels; drwav_uint32 sampleRate; drwav_uint32 bitsPerSample; } drwav_data_format; /* See the following for details on the 'smpl' chunk: https://sites.google.com/site/musicgapi/technical-documents/wav-file-format#smpl */ typedef struct { drwav_uint32 cuePointId; drwav_uint32 type; drwav_uint32 start; drwav_uint32 end; drwav_uint32 fraction; drwav_uint32 playCount; } drwav_smpl_loop; typedef struct { drwav_uint32 manufacturer; drwav_uint32 product; drwav_uint32 samplePeriod; drwav_uint32 midiUnityNotes; drwav_uint32 midiPitchFraction; drwav_uint32 smpteFormat; drwav_uint32 smpteOffset; drwav_uint32 numSampleLoops; drwav_uint32 samplerData; drwav_smpl_loop loops[DRWAV_MAX_SMPL_LOOPS]; } drwav_smpl; typedef struct { /* A pointer to the function to call when more data is needed. */ drwav_read_proc onRead; /* A pointer to the function to call when data needs to be written. Only used when the drwav object is opened in write mode. */ drwav_write_proc onWrite; /* A pointer to the function to call when the wav file needs to be seeked. */ drwav_seek_proc onSeek; /* The user data to pass to callbacks. */ void* pUserData; /* Allocation callbacks. */ drwav_allocation_callbacks allocationCallbacks; /* Whether or not the WAV file is formatted as a standard RIFF file or W64. */ drwav_container container; /* Structure containing format information exactly as specified by the wav file. */ drwav_fmt fmt; /* The sample rate. Will be set to something like 44100. */ drwav_uint32 sampleRate; /* The number of channels. This will be set to 1 for monaural streams, 2 for stereo, etc. */ drwav_uint16 channels; /* The bits per sample. Will be set to something like 16, 24, etc. */ drwav_uint16 bitsPerSample; /* Equal to fmt.formatTag, or the value specified by fmt.subFormat if fmt.formatTag is equal to 65534 (WAVE_FORMAT_EXTENSIBLE). */ drwav_uint16 translatedFormatTag; /* The total number of PCM frames making up the audio data. */ drwav_uint64 totalPCMFrameCount; /* The size in bytes of the data chunk. */ drwav_uint64 dataChunkDataSize; /* The position in the stream of the first byte of the data chunk. This is used for seeking. */ drwav_uint64 dataChunkDataPos; /* The number of bytes remaining in the data chunk. */ drwav_uint64 bytesRemaining; /* Only used in sequential write mode. Keeps track of the desired size of the "data" chunk at the point of initialization time. Always set to 0 for non-sequential writes and when the drwav object is opened in read mode. Used for validation. */ drwav_uint64 dataChunkDataSizeTargetWrite; /* Keeps track of whether or not the wav writer was initialized in sequential mode. */ drwav_bool32 isSequentialWrite; /* smpl chunk. */ drwav_smpl smpl; /* A hack to avoid a DRWAV_MALLOC() when opening a decoder with drwav_init_memory(). */ drwav__memory_stream memoryStream; drwav__memory_stream_write memoryStreamWrite; /* Generic data for compressed formats. This data is shared across all block-compressed formats. */ struct { drwav_uint64 iCurrentPCMFrame; /* The index of the next PCM frame that will be read by drwav_read_*(). This is used with "totalPCMFrameCount" to ensure we don't read excess samples at the end of the last block. */ } compressed; /* Microsoft ADPCM specific data. */ struct { drwav_uint32 bytesRemainingInBlock; drwav_uint16 predictor[2]; drwav_int32 delta[2]; drwav_int32 cachedFrames[4]; /* Samples are stored in this cache during decoding. */ drwav_uint32 cachedFrameCount; drwav_int32 prevFrames[2][2]; /* The previous 2 samples for each channel (2 channels at most). */ } msadpcm; /* IMA ADPCM specific data. */ struct { drwav_uint32 bytesRemainingInBlock; drwav_int32 predictor[2]; drwav_int32 stepIndex[2]; drwav_int32 cachedFrames[16]; /* Samples are stored in this cache during decoding. */ drwav_uint32 cachedFrameCount; } ima; } drwav; /* Initializes a pre-allocated drwav object for reading. pWav [out] A pointer to the drwav object being initialized. onRead [in] The function to call when data needs to be read from the client. onSeek [in] The function to call when the read position of the client data needs to move. onChunk [in, optional] The function to call when a chunk is enumerated at initialized time. pUserData, pReadSeekUserData [in, optional] A pointer to application defined data that will be passed to onRead and onSeek. pChunkUserData [in, optional] A pointer to application defined data that will be passed to onChunk. flags [in, optional] A set of flags for controlling how things are loaded. Returns true if successful; false otherwise. Close the loader with drwav_uninit(). This is the lowest level function for initializing a WAV file. You can also use drwav_init_file() and drwav_init_memory() to open the stream from a file or from a block of memory respectively. Possible values for flags: DRWAV_SEQUENTIAL: Never perform a backwards seek while loading. This disables the chunk callback and will cause this function to return as soon as the data chunk is found. Any chunks after the data chunk will be ignored. drwav_init() is equivalent to "drwav_init_ex(pWav, onRead, onSeek, NULL, pUserData, NULL, 0);". The onChunk callback is not called for the WAVE or FMT chunks. The contents of the FMT chunk can be read from pWav->fmt after the function returns. See also: drwav_init_file(), drwav_init_memory(), drwav_uninit() */ DRWAV_API drwav_bool32 drwav_init(drwav* pWav, drwav_read_proc onRead, drwav_seek_proc onSeek, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_ex(drwav* pWav, drwav_read_proc onRead, drwav_seek_proc onSeek, drwav_chunk_proc onChunk, void* pReadSeekUserData, void* pChunkUserData, drwav_uint32 flags, const drwav_allocation_callbacks* pAllocationCallbacks); /* Initializes a pre-allocated drwav object for writing. onWrite [in] The function to call when data needs to be written. onSeek [in] The function to call when the write position needs to move. pUserData [in, optional] A pointer to application defined data that will be passed to onWrite and onSeek. Returns true if successful; false otherwise. Close the writer with drwav_uninit(). This is the lowest level function for initializing a WAV file. You can also use drwav_init_file_write() and drwav_init_memory_write() to open the stream from a file or from a block of memory respectively. If the total sample count is known, you can use drwav_init_write_sequential(). This avoids the need for dr_wav to perform a post-processing step for storing the total sample count and the size of the data chunk which requires a backwards seek. See also: drwav_init_file_write(), drwav_init_memory_write(), drwav_uninit() */ DRWAV_API drwav_bool32 drwav_init_write(drwav* pWav, const drwav_data_format* pFormat, drwav_write_proc onWrite, drwav_seek_proc onSeek, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_write_sequential(drwav* pWav, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, drwav_write_proc onWrite, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_write_sequential_pcm_frames(drwav* pWav, const drwav_data_format* pFormat, drwav_uint64 totalPCMFrameCount, drwav_write_proc onWrite, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks); /* Utility function to determine the target size of the entire data to be written (including all headers and chunks). Returns the target size in bytes. Useful if the application needs to know the size to allocate. Only writing to the RIFF chunk and one data chunk is currently supported. See also: drwav_init_write(), drwav_init_file_write(), drwav_init_memory_write() */ DRWAV_API drwav_uint64 drwav_target_write_size_bytes(const drwav_data_format* pFormat, drwav_uint64 totalSampleCount); /* Uninitializes the given drwav object. Use this only for objects initialized with drwav_init*() functions (drwav_init(), drwav_init_ex(), drwav_init_write(), drwav_init_write_sequential()). */ DRWAV_API drwav_result drwav_uninit(drwav* pWav); /* Reads raw audio data. This is the lowest level function for reading audio data. It simply reads the given number of bytes of the raw internal sample data. Consider using drwav_read_pcm_frames_s16(), drwav_read_pcm_frames_s32() or drwav_read_pcm_frames_f32() for reading sample data in a consistent format. pBufferOut can be NULL in which case a seek will be performed. Returns the number of bytes actually read. */ DRWAV_API size_t drwav_read_raw(drwav* pWav, size_t bytesToRead, void* pBufferOut); /* Reads up to the specified number of PCM frames from the WAV file. The output data will be in the file's internal format, converted to native-endian byte order. Use drwav_read_pcm_frames_s16/f32/s32() to read data in a specific format. If the return value is less than <framesToRead> it means the end of the file has been reached or you have requested more PCM frames than can possibly fit in the output buffer. This function will only work when sample data is of a fixed size and uncompressed. If you are using a compressed format consider using drwav_read_raw() or drwav_read_pcm_frames_s16/s32/f32(). pBufferOut can be NULL in which case a seek will be performed. */ DRWAV_API drwav_uint64 drwav_read_pcm_frames(drwav* pWav, drwav_uint64 framesToRead, void* pBufferOut); DRWAV_API drwav_uint64 drwav_read_pcm_frames_le(drwav* pWav, drwav_uint64 framesToRead, void* pBufferOut); DRWAV_API drwav_uint64 drwav_read_pcm_frames_be(drwav* pWav, drwav_uint64 framesToRead, void* pBufferOut); /* Seeks to the given PCM frame. Returns true if successful; false otherwise. */ DRWAV_API drwav_bool32 drwav_seek_to_pcm_frame(drwav* pWav, drwav_uint64 targetFrameIndex); /* Writes raw audio data. Returns the number of bytes actually written. If this differs from bytesToWrite, it indicates an error. */ DRWAV_API size_t drwav_write_raw(drwav* pWav, size_t bytesToWrite, const void* pData); /* Writes PCM frames. Returns the number of PCM frames written. Input samples need to be in native-endian byte order. On big-endian architectures the input data will be converted to little-endian. Use drwav_write_raw() to write raw audio data without performing any conversion. */ DRWAV_API drwav_uint64 drwav_write_pcm_frames(drwav* pWav, drwav_uint64 framesToWrite, const void* pData); DRWAV_API drwav_uint64 drwav_write_pcm_frames_le(drwav* pWav, drwav_uint64 framesToWrite, const void* pData); DRWAV_API drwav_uint64 drwav_write_pcm_frames_be(drwav* pWav, drwav_uint64 framesToWrite, const void* pData); /* Conversion Utilities */ #ifndef DR_WAV_NO_CONVERSION_API /* Reads a chunk of audio data and converts it to signed 16-bit PCM samples. pBufferOut can be NULL in which case a seek will be performed. Returns the number of PCM frames actually read. If the return value is less than <framesToRead> it means the end of the file has been reached. */ DRWAV_API drwav_uint64 drwav_read_pcm_frames_s16(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut); DRWAV_API drwav_uint64 drwav_read_pcm_frames_s16le(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut); DRWAV_API drwav_uint64 drwav_read_pcm_frames_s16be(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut); /* Low-level function for converting unsigned 8-bit PCM samples to signed 16-bit PCM samples. */ DRWAV_API void drwav_u8_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Low-level function for converting signed 24-bit PCM samples to signed 16-bit PCM samples. */ DRWAV_API void drwav_s24_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Low-level function for converting signed 32-bit PCM samples to signed 16-bit PCM samples. */ DRWAV_API void drwav_s32_to_s16(drwav_int16* pOut, const drwav_int32* pIn, size_t sampleCount); /* Low-level function for converting IEEE 32-bit floating point samples to signed 16-bit PCM samples. */ DRWAV_API void drwav_f32_to_s16(drwav_int16* pOut, const float* pIn, size_t sampleCount); /* Low-level function for converting IEEE 64-bit floating point samples to signed 16-bit PCM samples. */ DRWAV_API void drwav_f64_to_s16(drwav_int16* pOut, const double* pIn, size_t sampleCount); /* Low-level function for converting A-law samples to signed 16-bit PCM samples. */ DRWAV_API void drwav_alaw_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Low-level function for converting u-law samples to signed 16-bit PCM samples. */ DRWAV_API void drwav_mulaw_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Reads a chunk of audio data and converts it to IEEE 32-bit floating point samples. pBufferOut can be NULL in which case a seek will be performed. Returns the number of PCM frames actually read. If the return value is less than <framesToRead> it means the end of the file has been reached. */ DRWAV_API drwav_uint64 drwav_read_pcm_frames_f32(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut); DRWAV_API drwav_uint64 drwav_read_pcm_frames_f32le(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut); DRWAV_API drwav_uint64 drwav_read_pcm_frames_f32be(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut); /* Low-level function for converting unsigned 8-bit PCM samples to IEEE 32-bit floating point samples. */ DRWAV_API void drwav_u8_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Low-level function for converting signed 16-bit PCM samples to IEEE 32-bit floating point samples. */ DRWAV_API void drwav_s16_to_f32(float* pOut, const drwav_int16* pIn, size_t sampleCount); /* Low-level function for converting signed 24-bit PCM samples to IEEE 32-bit floating point samples. */ DRWAV_API void drwav_s24_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Low-level function for converting signed 32-bit PCM samples to IEEE 32-bit floating point samples. */ DRWAV_API void drwav_s32_to_f32(float* pOut, const drwav_int32* pIn, size_t sampleCount); /* Low-level function for converting IEEE 64-bit floating point samples to IEEE 32-bit floating point samples. */ DRWAV_API void drwav_f64_to_f32(float* pOut, const double* pIn, size_t sampleCount); /* Low-level function for converting A-law samples to IEEE 32-bit floating point samples. */ DRWAV_API void drwav_alaw_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Low-level function for converting u-law samples to IEEE 32-bit floating point samples. */ DRWAV_API void drwav_mulaw_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Reads a chunk of audio data and converts it to signed 32-bit PCM samples. pBufferOut can be NULL in which case a seek will be performed. Returns the number of PCM frames actually read. If the return value is less than <framesToRead> it means the end of the file has been reached. */ DRWAV_API drwav_uint64 drwav_read_pcm_frames_s32(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut); DRWAV_API drwav_uint64 drwav_read_pcm_frames_s32le(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut); DRWAV_API drwav_uint64 drwav_read_pcm_frames_s32be(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut); /* Low-level function for converting unsigned 8-bit PCM samples to signed 32-bit PCM samples. */ DRWAV_API void drwav_u8_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Low-level function for converting signed 16-bit PCM samples to signed 32-bit PCM samples. */ DRWAV_API void drwav_s16_to_s32(drwav_int32* pOut, const drwav_int16* pIn, size_t sampleCount); /* Low-level function for converting signed 24-bit PCM samples to signed 32-bit PCM samples. */ DRWAV_API void drwav_s24_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Low-level function for converting IEEE 32-bit floating point samples to signed 32-bit PCM samples. */ DRWAV_API void drwav_f32_to_s32(drwav_int32* pOut, const float* pIn, size_t sampleCount); /* Low-level function for converting IEEE 64-bit floating point samples to signed 32-bit PCM samples. */ DRWAV_API void drwav_f64_to_s32(drwav_int32* pOut, const double* pIn, size_t sampleCount); /* Low-level function for converting A-law samples to signed 32-bit PCM samples. */ DRWAV_API void drwav_alaw_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t sampleCount); /* Low-level function for converting u-law samples to signed 32-bit PCM samples. */ DRWAV_API void drwav_mulaw_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t sampleCount); #endif /* DR_WAV_NO_CONVERSION_API */ /* High-Level Convenience Helpers */ #ifndef DR_WAV_NO_STDIO /* Helper for initializing a wave file for reading using stdio. This holds the internal FILE object until drwav_uninit() is called. Keep this in mind if you're caching drwav objects because the operating system may restrict the number of file handles an application can have open at any given time. */ DRWAV_API drwav_bool32 drwav_init_file(drwav* pWav, const char* filename, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_file_ex(drwav* pWav, const char* filename, drwav_chunk_proc onChunk, void* pChunkUserData, drwav_uint32 flags, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_file_w(drwav* pWav, const wchar_t* filename, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_file_ex_w(drwav* pWav, const wchar_t* filename, drwav_chunk_proc onChunk, void* pChunkUserData, drwav_uint32 flags, const drwav_allocation_callbacks* pAllocationCallbacks); /* Helper for initializing a wave file for writing using stdio. This holds the internal FILE object until drwav_uninit() is called. Keep this in mind if you're caching drwav objects because the operating system may restrict the number of file handles an application can have open at any given time. */ DRWAV_API drwav_bool32 drwav_init_file_write(drwav* pWav, const char* filename, const drwav_data_format* pFormat, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_file_write_sequential(drwav* pWav, const char* filename, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_file_write_sequential_pcm_frames(drwav* pWav, const char* filename, const drwav_data_format* pFormat, drwav_uint64 totalPCMFrameCount, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_file_write_w(drwav* pWav, const wchar_t* filename, const drwav_data_format* pFormat, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_file_write_sequential_w(drwav* pWav, const wchar_t* filename, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_file_write_sequential_pcm_frames_w(drwav* pWav, const wchar_t* filename, const drwav_data_format* pFormat, drwav_uint64 totalPCMFrameCount, const drwav_allocation_callbacks* pAllocationCallbacks); #endif /* DR_WAV_NO_STDIO */ /* Helper for initializing a loader from a pre-allocated memory buffer. This does not create a copy of the data. It is up to the application to ensure the buffer remains valid for the lifetime of the drwav object. The buffer should contain the contents of the entire wave file, not just the sample data. */ DRWAV_API drwav_bool32 drwav_init_memory(drwav* pWav, const void* data, size_t dataSize, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_memory_ex(drwav* pWav, const void* data, size_t dataSize, drwav_chunk_proc onChunk, void* pChunkUserData, drwav_uint32 flags, const drwav_allocation_callbacks* pAllocationCallbacks); /* Helper for initializing a writer which outputs data to a memory buffer. dr_wav will manage the memory allocations, however it is up to the caller to free the data with drwav_free(). The buffer will remain allocated even after drwav_uninit() is called. The buffer should not be considered valid until after drwav_uninit() has been called. */ DRWAV_API drwav_bool32 drwav_init_memory_write(drwav* pWav, void** ppData, size_t* pDataSize, const drwav_data_format* pFormat, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_memory_write_sequential(drwav* pWav, void** ppData, size_t* pDataSize, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_bool32 drwav_init_memory_write_sequential_pcm_frames(drwav* pWav, void** ppData, size_t* pDataSize, const drwav_data_format* pFormat, drwav_uint64 totalPCMFrameCount, const drwav_allocation_callbacks* pAllocationCallbacks); #ifndef DR_WAV_NO_CONVERSION_API /* Opens and reads an entire wav file in a single operation. The return value is a heap-allocated buffer containing the audio data. Use drwav_free() to free the buffer. */ DRWAV_API drwav_int16* drwav_open_and_read_pcm_frames_s16(drwav_read_proc onRead, drwav_seek_proc onSeek, void* pUserData, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API float* drwav_open_and_read_pcm_frames_f32(drwav_read_proc onRead, drwav_seek_proc onSeek, void* pUserData, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_int32* drwav_open_and_read_pcm_frames_s32(drwav_read_proc onRead, drwav_seek_proc onSeek, void* pUserData, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); #ifndef DR_WAV_NO_STDIO /* Opens and decodes an entire wav file in a single operation. The return value is a heap-allocated buffer containing the audio data. Use drwav_free() to free the buffer. */ DRWAV_API drwav_int16* drwav_open_file_and_read_pcm_frames_s16(const char* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API float* drwav_open_file_and_read_pcm_frames_f32(const char* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_int32* drwav_open_file_and_read_pcm_frames_s32(const char* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_int16* drwav_open_file_and_read_pcm_frames_s16_w(const wchar_t* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API float* drwav_open_file_and_read_pcm_frames_f32_w(const wchar_t* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_int32* drwav_open_file_and_read_pcm_frames_s32_w(const wchar_t* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); #endif /* Opens and decodes an entire wav file from a block of memory in a single operation. The return value is a heap-allocated buffer containing the audio data. Use drwav_free() to free the buffer. */ DRWAV_API drwav_int16* drwav_open_memory_and_read_pcm_frames_s16(const void* data, size_t dataSize, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API float* drwav_open_memory_and_read_pcm_frames_f32(const void* data, size_t dataSize, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); DRWAV_API drwav_int32* drwav_open_memory_and_read_pcm_frames_s32(const void* data, size_t dataSize, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks); #endif /* Frees data that was allocated internally by dr_wav. */ DRWAV_API void drwav_free(void* p, const drwav_allocation_callbacks* pAllocationCallbacks); /* Converts bytes from a wav stream to a sized type of native endian. */ DRWAV_API drwav_uint16 drwav_bytes_to_u16(const drwav_uint8* data); DRWAV_API drwav_int16 drwav_bytes_to_s16(const drwav_uint8* data); DRWAV_API drwav_uint32 drwav_bytes_to_u32(const drwav_uint8* data); DRWAV_API drwav_int32 drwav_bytes_to_s32(const drwav_uint8* data); DRWAV_API drwav_uint64 drwav_bytes_to_u64(const drwav_uint8* data); DRWAV_API drwav_int64 drwav_bytes_to_s64(const drwav_uint8* data); /* Compares a GUID for the purpose of checking the type of a Wave64 chunk. */ DRWAV_API drwav_bool32 drwav_guid_equal(const drwav_uint8 a[16], const drwav_uint8 b[16]); /* Compares a four-character-code for the purpose of checking the type of a RIFF chunk. */ DRWAV_API drwav_bool32 drwav_fourcc_equal(const drwav_uint8* a, const char* b); #ifdef __cplusplus } #endif #endif /* dr_wav_h */ /************************************************************************************************************************************************************ ************************************************************************************************************************************************************ IMPLEMENTATION ************************************************************************************************************************************************************ ************************************************************************************************************************************************************/ #if defined(DR_WAV_IMPLEMENTATION) || defined(DRWAV_IMPLEMENTATION) #ifndef dr_wav_c #define dr_wav_c #include <stdlib.h> #include <string.h> /* For memcpy(), memset() */ #include <limits.h> /* For INT_MAX */ #ifndef DR_WAV_NO_STDIO #include <stdio.h> #include <wchar.h> #endif /* Standard library stuff. */ #ifndef DRWAV_ASSERT #include <assert.h> #define DRWAV_ASSERT(expression) assert(expression) #endif #ifndef DRWAV_MALLOC #define DRWAV_MALLOC(sz) malloc((sz)) #endif #ifndef DRWAV_REALLOC #define DRWAV_REALLOC(p, sz) realloc((p), (sz)) #endif #ifndef DRWAV_FREE #define DRWAV_FREE(p) free((p)) #endif #ifndef DRWAV_COPY_MEMORY #define DRWAV_COPY_MEMORY(dst, src, sz) memcpy((dst), (src), (sz)) #endif #ifndef DRWAV_ZERO_MEMORY #define DRWAV_ZERO_MEMORY(p, sz) memset((p), 0, (sz)) #endif #ifndef DRWAV_ZERO_OBJECT #define DRWAV_ZERO_OBJECT(p) DRWAV_ZERO_MEMORY((p), sizeof(*p)) #endif #define drwav_countof(x) (sizeof(x) / sizeof(x[0])) #define drwav_align(x, a) ((((x) + (a) - 1) / (a)) * (a)) #define drwav_min(a, b) (((a) < (b)) ? (a) : (b)) #define drwav_max(a, b) (((a) > (b)) ? (a) : (b)) #define drwav_clamp(x, lo, hi) (drwav_max((lo), drwav_min((hi), (x)))) #define DRWAV_MAX_SIMD_VECTOR_SIZE 64 /* 64 for AVX-512 in the future. */ /* CPU architecture. */ #if defined(__x86_64__) || defined(_M_X64) #define DRWAV_X64 #elif defined(__i386) || defined(_M_IX86) #define DRWAV_X86 #elif defined(__arm__) || defined(_M_ARM) #define DRWAV_ARM #endif #ifdef _MSC_VER #define DRWAV_INLINE __forceinline #elif defined(__GNUC__) /* I've had a bug report where GCC is emitting warnings about functions possibly not being inlineable. This warning happens when the __attribute__((always_inline)) attribute is defined without an "inline" statement. I think therefore there must be some case where "__inline__" is not always defined, thus the compiler emitting these warnings. When using -std=c89 or -ansi on the command line, we cannot use the "inline" keyword and instead need to use "__inline__". In an attempt to work around this issue I am using "__inline__" only when we're compiling in strict ANSI mode. */ #if defined(__STRICT_ANSI__) #define DRWAV_INLINE __inline__ __attribute__((always_inline)) #else #define DRWAV_INLINE inline __attribute__((always_inline)) #endif #elif defined(__WATCOMC__) #define DRWAV_INLINE __inline #else #define DRWAV_INLINE #endif #if defined(SIZE_MAX) #define DRWAV_SIZE_MAX SIZE_MAX #else #if defined(_WIN64) || defined(_LP64) || defined(__LP64__) #define DRWAV_SIZE_MAX ((drwav_uint64)0xFFFFFFFFFFFFFFFF) #else #define DRWAV_SIZE_MAX 0xFFFFFFFF #endif #endif #if defined(_MSC_VER) && _MSC_VER >= 1400 #define DRWAV_HAS_BYTESWAP16_INTRINSIC #define DRWAV_HAS_BYTESWAP32_INTRINSIC #define DRWAV_HAS_BYTESWAP64_INTRINSIC #elif defined(__clang__) #if defined(__has_builtin) #if __has_builtin(__builtin_bswap16) #define DRWAV_HAS_BYTESWAP16_INTRINSIC #endif #if __has_builtin(__builtin_bswap32) #define DRWAV_HAS_BYTESWAP32_INTRINSIC #endif #if __has_builtin(__builtin_bswap64) #define DRWAV_HAS_BYTESWAP64_INTRINSIC #endif #endif #elif defined(__GNUC__) #if ((__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) #define DRWAV_HAS_BYTESWAP32_INTRINSIC #define DRWAV_HAS_BYTESWAP64_INTRINSIC #endif #if ((__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)) #define DRWAV_HAS_BYTESWAP16_INTRINSIC #endif #endif DRWAV_API void drwav_version(drwav_uint32* pMajor, drwav_uint32* pMinor, drwav_uint32* pRevision) { if (pMajor) { *pMajor = DRWAV_VERSION_MAJOR; } if (pMinor) { *pMinor = DRWAV_VERSION_MINOR; } if (pRevision) { *pRevision = DRWAV_VERSION_REVISION; } } DRWAV_API const char* drwav_version_string(void) { return DRWAV_VERSION_STRING; } /* These limits are used for basic validation when initializing the decoder. If you exceed these limits, first of all: what on Earth are you doing?! (Let me know, I'd be curious!) Second, you can adjust these by #define-ing them before the dr_wav implementation. */ #ifndef DRWAV_MAX_SAMPLE_RATE #define DRWAV_MAX_SAMPLE_RATE 384000 #endif #ifndef DRWAV_MAX_CHANNELS #define DRWAV_MAX_CHANNELS 256 #endif #ifndef DRWAV_MAX_BITS_PER_SAMPLE #define DRWAV_MAX_BITS_PER_SAMPLE 64 #endif static const drwav_uint8 drwavGUID_W64_RIFF[16] = {0x72,0x69,0x66,0x66, 0x2E,0x91, 0xCF,0x11, 0xA5,0xD6, 0x28,0xDB,0x04,0xC1,0x00,0x00}; /* 66666972-912E-11CF-A5D6-28DB04C10000 */ static const drwav_uint8 drwavGUID_W64_WAVE[16] = {0x77,0x61,0x76,0x65, 0xF3,0xAC, 0xD3,0x11, 0x8C,0xD1, 0x00,0xC0,0x4F,0x8E,0xDB,0x8A}; /* 65766177-ACF3-11D3-8CD1-00C04F8EDB8A */ /*static const drwav_uint8 drwavGUID_W64_JUNK[16] = {0x6A,0x75,0x6E,0x6B, 0xF3,0xAC, 0xD3,0x11, 0x8C,0xD1, 0x00,0xC0,0x4F,0x8E,0xDB,0x8A};*/ /* 6B6E756A-ACF3-11D3-8CD1-00C04F8EDB8A */ static const drwav_uint8 drwavGUID_W64_FMT [16] = {0x66,0x6D,0x74,0x20, 0xF3,0xAC, 0xD3,0x11, 0x8C,0xD1, 0x00,0xC0,0x4F,0x8E,0xDB,0x8A}; /* 20746D66-ACF3-11D3-8CD1-00C04F8EDB8A */ static const drwav_uint8 drwavGUID_W64_FACT[16] = {0x66,0x61,0x63,0x74, 0xF3,0xAC, 0xD3,0x11, 0x8C,0xD1, 0x00,0xC0,0x4F,0x8E,0xDB,0x8A}; /* 74636166-ACF3-11D3-8CD1-00C04F8EDB8A */ static const drwav_uint8 drwavGUID_W64_DATA[16] = {0x64,0x61,0x74,0x61, 0xF3,0xAC, 0xD3,0x11, 0x8C,0xD1, 0x00,0xC0,0x4F,0x8E,0xDB,0x8A}; /* 61746164-ACF3-11D3-8CD1-00C04F8EDB8A */ static const drwav_uint8 drwavGUID_W64_SMPL[16] = {0x73,0x6D,0x70,0x6C, 0xF3,0xAC, 0xD3,0x11, 0x8C,0xD1, 0x00,0xC0,0x4F,0x8E,0xDB,0x8A}; /* 6C706D73-ACF3-11D3-8CD1-00C04F8EDB8A */ static DRWAV_INLINE drwav_bool32 drwav__guid_equal(const drwav_uint8 a[16], const drwav_uint8 b[16]) { int i; for (i = 0; i < 16; i += 1) { if (a[i] != b[i]) { return DRWAV_FALSE; } } return DRWAV_TRUE; } static DRWAV_INLINE drwav_bool32 drwav__fourcc_equal(const drwav_uint8* a, const char* b) { return a[0] == b[0] && a[1] == b[1] && a[2] == b[2] && a[3] == b[3]; } static DRWAV_INLINE int drwav__is_little_endian(void) { #if defined(DRWAV_X86) || defined(DRWAV_X64) return DRWAV_TRUE; #elif defined(__BYTE_ORDER) && defined(__LITTLE_ENDIAN) && __BYTE_ORDER == __LITTLE_ENDIAN return DRWAV_TRUE; #else int n = 1; return (*(char*)&n) == 1; #endif } static DRWAV_INLINE drwav_uint16 drwav__bytes_to_u16(const drwav_uint8* data) { return (data[0] << 0) | (data[1] << 8); } static DRWAV_INLINE drwav_int16 drwav__bytes_to_s16(const drwav_uint8* data) { return (short)drwav__bytes_to_u16(data); } static DRWAV_INLINE drwav_uint32 drwav__bytes_to_u32(const drwav_uint8* data) { return (data[0] << 0) | (data[1] << 8) | (data[2] << 16) | (data[3] << 24); } static DRWAV_INLINE drwav_int32 drwav__bytes_to_s32(const drwav_uint8* data) { return (drwav_int32)drwav__bytes_to_u32(data); } static DRWAV_INLINE drwav_uint64 drwav__bytes_to_u64(const drwav_uint8* data) { return ((drwav_uint64)data[0] << 0) | ((drwav_uint64)data[1] << 8) | ((drwav_uint64)data[2] << 16) | ((drwav_uint64)data[3] << 24) | ((drwav_uint64)data[4] << 32) | ((drwav_uint64)data[5] << 40) | ((drwav_uint64)data[6] << 48) | ((drwav_uint64)data[7] << 56); } static DRWAV_INLINE drwav_int64 drwav__bytes_to_s64(const drwav_uint8* data) { return (drwav_int64)drwav__bytes_to_u64(data); } static DRWAV_INLINE void drwav__bytes_to_guid(const drwav_uint8* data, drwav_uint8* guid) { int i; for (i = 0; i < 16; ++i) { guid[i] = data[i]; } } static DRWAV_INLINE drwav_uint16 drwav__bswap16(drwav_uint16 n) { #ifdef DRWAV_HAS_BYTESWAP16_INTRINSIC #if defined(_MSC_VER) return _byteswap_ushort(n); #elif defined(__GNUC__) || defined(__clang__) return __builtin_bswap16(n); #else #error "This compiler does not support the byte swap intrinsic." #endif #else return ((n & 0xFF00) >> 8) | ((n & 0x00FF) << 8); #endif } static DRWAV_INLINE drwav_uint32 drwav__bswap32(drwav_uint32 n) { #ifdef DRWAV_HAS_BYTESWAP32_INTRINSIC #if defined(_MSC_VER) return _byteswap_ulong(n); #elif defined(__GNUC__) || defined(__clang__) #if defined(DRWAV_ARM) && (defined(__ARM_ARCH) && __ARM_ARCH >= 6) && !defined(DRWAV_64BIT) /* <-- 64-bit inline assembly has not been tested, so disabling for now. */ /* Inline assembly optimized implementation for ARM. In my testing, GCC does not generate optimized code with __builtin_bswap32(). */ drwav_uint32 r; __asm__ __volatile__ ( #if defined(DRWAV_64BIT) "rev %w[out], %w[in]" : [out]"=r"(r) : [in]"r"(n) /* <-- This is untested. If someone in the community could test this, that would be appreciated! */ #else "rev %[out], %[in]" : [out]"=r"(r) : [in]"r"(n) #endif ); return r; #else return __builtin_bswap32(n); #endif #else #error "This compiler does not support the byte swap intrinsic." #endif #else return ((n & 0xFF000000) >> 24) | ((n & 0x00FF0000) >> 8) | ((n & 0x0000FF00) << 8) | ((n & 0x000000FF) << 24); #endif } static DRWAV_INLINE drwav_uint64 drwav__bswap64(drwav_uint64 n) { #ifdef DRWAV_HAS_BYTESWAP64_INTRINSIC #if defined(_MSC_VER) return _byteswap_uint64(n); #elif defined(__GNUC__) || defined(__clang__) return __builtin_bswap64(n); #else #error "This compiler does not support the byte swap intrinsic." #endif #else /* Weird "<< 32" bitshift is required for C89 because it doesn't support 64-bit constants. Should be optimized out by a good compiler. */ return ((n & ((drwav_uint64)0xFF000000 << 32)) >> 56) | ((n & ((drwav_uint64)0x00FF0000 << 32)) >> 40) | ((n & ((drwav_uint64)0x0000FF00 << 32)) >> 24) | ((n & ((drwav_uint64)0x000000FF << 32)) >> 8) | ((n & ((drwav_uint64)0xFF000000 )) << 8) | ((n & ((drwav_uint64)0x00FF0000 )) << 24) | ((n & ((drwav_uint64)0x0000FF00 )) << 40) | ((n & ((drwav_uint64)0x000000FF )) << 56); #endif } static DRWAV_INLINE drwav_int16 drwav__bswap_s16(drwav_int16 n) { return (drwav_int16)drwav__bswap16((drwav_uint16)n); } static DRWAV_INLINE void drwav__bswap_samples_s16(drwav_int16* pSamples, drwav_uint64 sampleCount) { drwav_uint64 iSample; for (iSample = 0; iSample < sampleCount; iSample += 1) { pSamples[iSample] = drwav__bswap_s16(pSamples[iSample]); } } static DRWAV_INLINE void drwav__bswap_s24(drwav_uint8* p) { drwav_uint8 t; t = p[0]; p[0] = p[2]; p[2] = t; } static DRWAV_INLINE void drwav__bswap_samples_s24(drwav_uint8* pSamples, drwav_uint64 sampleCount) { drwav_uint64 iSample; for (iSample = 0; iSample < sampleCount; iSample += 1) { drwav_uint8* pSample = pSamples + (iSample*3); drwav__bswap_s24(pSample); } } static DRWAV_INLINE drwav_int32 drwav__bswap_s32(drwav_int32 n) { return (drwav_int32)drwav__bswap32((drwav_uint32)n); } static DRWAV_INLINE void drwav__bswap_samples_s32(drwav_int32* pSamples, drwav_uint64 sampleCount) { drwav_uint64 iSample; for (iSample = 0; iSample < sampleCount; iSample += 1) { pSamples[iSample] = drwav__bswap_s32(pSamples[iSample]); } } static DRWAV_INLINE float drwav__bswap_f32(float n) { union { drwav_uint32 i; float f; } x; x.f = n; x.i = drwav__bswap32(x.i); return x.f; } static DRWAV_INLINE void drwav__bswap_samples_f32(float* pSamples, drwav_uint64 sampleCount) { drwav_uint64 iSample; for (iSample = 0; iSample < sampleCount; iSample += 1) { pSamples[iSample] = drwav__bswap_f32(pSamples[iSample]); } } static DRWAV_INLINE double drwav__bswap_f64(double n) { union { drwav_uint64 i; double f; } x; x.f = n; x.i = drwav__bswap64(x.i); return x.f; } static DRWAV_INLINE void drwav__bswap_samples_f64(double* pSamples, drwav_uint64 sampleCount) { drwav_uint64 iSample; for (iSample = 0; iSample < sampleCount; iSample += 1) { pSamples[iSample] = drwav__bswap_f64(pSamples[iSample]); } } static DRWAV_INLINE void drwav__bswap_samples_pcm(void* pSamples, drwav_uint64 sampleCount, drwav_uint32 bytesPerSample) { /* Assumes integer PCM. Floating point PCM is done in drwav__bswap_samples_ieee(). */ switch (bytesPerSample) { case 2: /* s16, s12 (loosely packed) */ { drwav__bswap_samples_s16((drwav_int16*)pSamples, sampleCount); } break; case 3: /* s24 */ { drwav__bswap_samples_s24((drwav_uint8*)pSamples, sampleCount); } break; case 4: /* s32 */ { drwav__bswap_samples_s32((drwav_int32*)pSamples, sampleCount); } break; default: { /* Unsupported format. */ DRWAV_ASSERT(DRWAV_FALSE); } break; } } static DRWAV_INLINE void drwav__bswap_samples_ieee(void* pSamples, drwav_uint64 sampleCount, drwav_uint32 bytesPerSample) { switch (bytesPerSample) { #if 0 /* Contributions welcome for f16 support. */ case 2: /* f16 */ { drwav__bswap_samples_f16((drwav_float16*)pSamples, sampleCount); } break; #endif case 4: /* f32 */ { drwav__bswap_samples_f32((float*)pSamples, sampleCount); } break; case 8: /* f64 */ { drwav__bswap_samples_f64((double*)pSamples, sampleCount); } break; default: { /* Unsupported format. */ DRWAV_ASSERT(DRWAV_FALSE); } break; } } static DRWAV_INLINE void drwav__bswap_samples(void* pSamples, drwav_uint64 sampleCount, drwav_uint32 bytesPerSample, drwav_uint16 format) { switch (format) { case DR_WAVE_FORMAT_PCM: { drwav__bswap_samples_pcm(pSamples, sampleCount, bytesPerSample); } break; case DR_WAVE_FORMAT_IEEE_FLOAT: { drwav__bswap_samples_ieee(pSamples, sampleCount, bytesPerSample); } break; case DR_WAVE_FORMAT_ALAW: case DR_WAVE_FORMAT_MULAW: { drwav__bswap_samples_s16((drwav_int16*)pSamples, sampleCount); } break; case DR_WAVE_FORMAT_ADPCM: case DR_WAVE_FORMAT_DVI_ADPCM: default: { /* Unsupported format. */ DRWAV_ASSERT(DRWAV_FALSE); } break; } } static void* drwav__malloc_default(size_t sz, void* pUserData) { (void)pUserData; return DRWAV_MALLOC(sz); } static void* drwav__realloc_default(void* p, size_t sz, void* pUserData) { (void)pUserData; return DRWAV_REALLOC(p, sz); } static void drwav__free_default(void* p, void* pUserData) { (void)pUserData; DRWAV_FREE(p); } static void* drwav__malloc_from_callbacks(size_t sz, const drwav_allocation_callbacks* pAllocationCallbacks) { if (pAllocationCallbacks == NULL) { return NULL; } if (pAllocationCallbacks->onMalloc != NULL) { return pAllocationCallbacks->onMalloc(sz, pAllocationCallbacks->pUserData); } /* Try using realloc(). */ if (pAllocationCallbacks->onRealloc != NULL) { return pAllocationCallbacks->onRealloc(NULL, sz, pAllocationCallbacks->pUserData); } return NULL; } static void* drwav__realloc_from_callbacks(void* p, size_t szNew, size_t szOld, const drwav_allocation_callbacks* pAllocationCallbacks) { if (pAllocationCallbacks == NULL) { return NULL; } if (pAllocationCallbacks->onRealloc != NULL) { return pAllocationCallbacks->onRealloc(p, szNew, pAllocationCallbacks->pUserData); } /* Try emulating realloc() in terms of malloc()/free(). */ if (pAllocationCallbacks->onMalloc != NULL && pAllocationCallbacks->onFree != NULL) { void* p2; p2 = pAllocationCallbacks->onMalloc(szNew, pAllocationCallbacks->pUserData); if (p2 == NULL) { return NULL; } if (p != NULL) { DRWAV_COPY_MEMORY(p2, p, szOld); pAllocationCallbacks->onFree(p, pAllocationCallbacks->pUserData); } return p2; } return NULL; } static void drwav__free_from_callbacks(void* p, const drwav_allocation_callbacks* pAllocationCallbacks) { if (p == NULL || pAllocationCallbacks == NULL) { return; } if (pAllocationCallbacks->onFree != NULL) { pAllocationCallbacks->onFree(p, pAllocationCallbacks->pUserData); } } static drwav_allocation_callbacks drwav_copy_allocation_callbacks_or_defaults(const drwav_allocation_callbacks* pAllocationCallbacks) { if (pAllocationCallbacks != NULL) { /* Copy. */ return *pAllocationCallbacks; } else { /* Defaults. */ drwav_allocation_callbacks allocationCallbacks; allocationCallbacks.pUserData = NULL; allocationCallbacks.onMalloc = drwav__malloc_default; allocationCallbacks.onRealloc = drwav__realloc_default; allocationCallbacks.onFree = drwav__free_default; return allocationCallbacks; } } static DRWAV_INLINE drwav_bool32 drwav__is_compressed_format_tag(drwav_uint16 formatTag) { return formatTag == DR_WAVE_FORMAT_ADPCM || formatTag == DR_WAVE_FORMAT_DVI_ADPCM; } static unsigned int drwav__chunk_padding_size_riff(drwav_uint64 chunkSize) { return (unsigned int)(chunkSize % 2); } static unsigned int drwav__chunk_padding_size_w64(drwav_uint64 chunkSize) { return (unsigned int)(chunkSize % 8); } static drwav_uint64 drwav_read_pcm_frames_s16__msadpcm(drwav* pWav, drwav_uint64 samplesToRead, drwav_int16* pBufferOut); static drwav_uint64 drwav_read_pcm_frames_s16__ima(drwav* pWav, drwav_uint64 samplesToRead, drwav_int16* pBufferOut); static drwav_bool32 drwav_init_write__internal(drwav* pWav, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount); static drwav_result drwav__read_chunk_header(drwav_read_proc onRead, void* pUserData, drwav_container container, drwav_uint64* pRunningBytesReadOut, drwav_chunk_header* pHeaderOut) { if (container == drwav_container_riff || container == drwav_container_rf64) { drwav_uint8 sizeInBytes[4]; if (onRead(pUserData, pHeaderOut->id.fourcc, 4) != 4) { return DRWAV_AT_END; } if (onRead(pUserData, sizeInBytes, 4) != 4) { return DRWAV_INVALID_FILE; } pHeaderOut->sizeInBytes = drwav__bytes_to_u32(sizeInBytes); pHeaderOut->paddingSize = drwav__chunk_padding_size_riff(pHeaderOut->sizeInBytes); *pRunningBytesReadOut += 8; } else { drwav_uint8 sizeInBytes[8]; if (onRead(pUserData, pHeaderOut->id.guid, 16) != 16) { return DRWAV_AT_END; } if (onRead(pUserData, sizeInBytes, 8) != 8) { return DRWAV_INVALID_FILE; } pHeaderOut->sizeInBytes = drwav__bytes_to_u64(sizeInBytes) - 24; /* <-- Subtract 24 because w64 includes the size of the header. */ pHeaderOut->paddingSize = drwav__chunk_padding_size_w64(pHeaderOut->sizeInBytes); *pRunningBytesReadOut += 24; } return DRWAV_SUCCESS; } static drwav_bool32 drwav__seek_forward(drwav_seek_proc onSeek, drwav_uint64 offset, void* pUserData) { drwav_uint64 bytesRemainingToSeek = offset; while (bytesRemainingToSeek > 0) { if (bytesRemainingToSeek > 0x7FFFFFFF) { if (!onSeek(pUserData, 0x7FFFFFFF, drwav_seek_origin_current)) { return DRWAV_FALSE; } bytesRemainingToSeek -= 0x7FFFFFFF; } else { if (!onSeek(pUserData, (int)bytesRemainingToSeek, drwav_seek_origin_current)) { return DRWAV_FALSE; } bytesRemainingToSeek = 0; } } return DRWAV_TRUE; } static drwav_bool32 drwav__seek_from_start(drwav_seek_proc onSeek, drwav_uint64 offset, void* pUserData) { if (offset <= 0x7FFFFFFF) { return onSeek(pUserData, (int)offset, drwav_seek_origin_start); } /* Larger than 32-bit seek. */ if (!onSeek(pUserData, 0x7FFFFFFF, drwav_seek_origin_start)) { return DRWAV_FALSE; } offset -= 0x7FFFFFFF; for (;;) { if (offset <= 0x7FFFFFFF) { return onSeek(pUserData, (int)offset, drwav_seek_origin_current); } if (!onSeek(pUserData, 0x7FFFFFFF, drwav_seek_origin_current)) { return DRWAV_FALSE; } offset -= 0x7FFFFFFF; } /* Should never get here. */ /*return DRWAV_TRUE; */ } static drwav_bool32 drwav__read_fmt(drwav_read_proc onRead, drwav_seek_proc onSeek, void* pUserData, drwav_container container, drwav_uint64* pRunningBytesReadOut, drwav_fmt* fmtOut) { drwav_chunk_header header; drwav_uint8 fmt[16]; if (drwav__read_chunk_header(onRead, pUserData, container, pRunningBytesReadOut, &header) != DRWAV_SUCCESS) { return DRWAV_FALSE; } /* Skip non-fmt chunks. */ while (((container == drwav_container_riff || container == drwav_container_rf64) && !drwav__fourcc_equal(header.id.fourcc, "fmt ")) || (container == drwav_container_w64 && !drwav__guid_equal(header.id.guid, drwavGUID_W64_FMT))) { if (!drwav__seek_forward(onSeek, header.sizeInBytes + header.paddingSize, pUserData)) { return DRWAV_FALSE; } *pRunningBytesReadOut += header.sizeInBytes + header.paddingSize; /* Try the next header. */ if (drwav__read_chunk_header(onRead, pUserData, container, pRunningBytesReadOut, &header) != DRWAV_SUCCESS) { return DRWAV_FALSE; } } /* Validation. */ if (container == drwav_container_riff || container == drwav_container_rf64) { if (!drwav__fourcc_equal(header.id.fourcc, "fmt ")) { return DRWAV_FALSE; } } else { if (!drwav__guid_equal(header.id.guid, drwavGUID_W64_FMT)) { return DRWAV_FALSE; } } if (onRead(pUserData, fmt, sizeof(fmt)) != sizeof(fmt)) { return DRWAV_FALSE; } *pRunningBytesReadOut += sizeof(fmt); fmtOut->formatTag = drwav__bytes_to_u16(fmt + 0); fmtOut->channels = drwav__bytes_to_u16(fmt + 2); fmtOut->sampleRate = drwav__bytes_to_u32(fmt + 4); fmtOut->avgBytesPerSec = drwav__bytes_to_u32(fmt + 8); fmtOut->blockAlign = drwav__bytes_to_u16(fmt + 12); fmtOut->bitsPerSample = drwav__bytes_to_u16(fmt + 14); fmtOut->extendedSize = 0; fmtOut->validBitsPerSample = 0; fmtOut->channelMask = 0; memset(fmtOut->subFormat, 0, sizeof(fmtOut->subFormat)); if (header.sizeInBytes > 16) { drwav_uint8 fmt_cbSize[2]; int bytesReadSoFar = 0; if (onRead(pUserData, fmt_cbSize, sizeof(fmt_cbSize)) != sizeof(fmt_cbSize)) { return DRWAV_FALSE; /* Expecting more data. */ } *pRunningBytesReadOut += sizeof(fmt_cbSize); bytesReadSoFar = 18; fmtOut->extendedSize = drwav__bytes_to_u16(fmt_cbSize); if (fmtOut->extendedSize > 0) { /* Simple validation. */ if (fmtOut->formatTag == DR_WAVE_FORMAT_EXTENSIBLE) { if (fmtOut->extendedSize != 22) { return DRWAV_FALSE; } } if (fmtOut->formatTag == DR_WAVE_FORMAT_EXTENSIBLE) { drwav_uint8 fmtext[22]; if (onRead(pUserData, fmtext, fmtOut->extendedSize) != fmtOut->extendedSize) { return DRWAV_FALSE; /* Expecting more data. */ } fmtOut->validBitsPerSample = drwav__bytes_to_u16(fmtext + 0); fmtOut->channelMask = drwav__bytes_to_u32(fmtext + 2); drwav__bytes_to_guid(fmtext + 6, fmtOut->subFormat); } else { if (!onSeek(pUserData, fmtOut->extendedSize, drwav_seek_origin_current)) { return DRWAV_FALSE; } } *pRunningBytesReadOut += fmtOut->extendedSize; bytesReadSoFar += fmtOut->extendedSize; } /* Seek past any leftover bytes. For w64 the leftover will be defined based on the chunk size. */ if (!onSeek(pUserData, (int)(header.sizeInBytes - bytesReadSoFar), drwav_seek_origin_current)) { return DRWAV_FALSE; } *pRunningBytesReadOut += (header.sizeInBytes - bytesReadSoFar); } if (header.paddingSize > 0) { if (!onSeek(pUserData, header.paddingSize, drwav_seek_origin_current)) { return DRWAV_FALSE; } *pRunningBytesReadOut += header.paddingSize; } return DRWAV_TRUE; } static size_t drwav__on_read(drwav_read_proc onRead, void* pUserData, void* pBufferOut, size_t bytesToRead, drwav_uint64* pCursor) { size_t bytesRead; DRWAV_ASSERT(onRead != NULL); DRWAV_ASSERT(pCursor != NULL); bytesRead = onRead(pUserData, pBufferOut, bytesToRead); *pCursor += bytesRead; return bytesRead; } #if 0 static drwav_bool32 drwav__on_seek(drwav_seek_proc onSeek, void* pUserData, int offset, drwav_seek_origin origin, drwav_uint64* pCursor) { DRWAV_ASSERT(onSeek != NULL); DRWAV_ASSERT(pCursor != NULL); if (!onSeek(pUserData, offset, origin)) { return DRWAV_FALSE; } if (origin == drwav_seek_origin_start) { *pCursor = offset; } else { *pCursor += offset; } return DRWAV_TRUE; } #endif static drwav_uint32 drwav_get_bytes_per_pcm_frame(drwav* pWav) { /* The bytes per frame is a bit ambiguous. It can be either be based on the bits per sample, or the block align. The way I'm doing it here is that if the bits per sample is a multiple of 8, use floor(bitsPerSample*channels/8), otherwise fall back to the block align. */ if ((pWav->bitsPerSample & 0x7) == 0) { /* Bits per sample is a multiple of 8. */ return (pWav->bitsPerSample * pWav->fmt.channels) >> 3; } else { return pWav->fmt.blockAlign; } } DRWAV_API drwav_uint16 drwav_fmt_get_format(const drwav_fmt* pFMT) { if (pFMT == NULL) { return 0; } if (pFMT->formatTag != DR_WAVE_FORMAT_EXTENSIBLE) { return pFMT->formatTag; } else { return drwav__bytes_to_u16(pFMT->subFormat); /* Only the first two bytes are required. */ } } static drwav_bool32 drwav_preinit(drwav* pWav, drwav_read_proc onRead, drwav_seek_proc onSeek, void* pReadSeekUserData, const drwav_allocation_callbacks* pAllocationCallbacks) { if (pWav == NULL || onRead == NULL || onSeek == NULL) { return DRWAV_FALSE; } DRWAV_ZERO_MEMORY(pWav, sizeof(*pWav)); pWav->onRead = onRead; pWav->onSeek = onSeek; pWav->pUserData = pReadSeekUserData; pWav->allocationCallbacks = drwav_copy_allocation_callbacks_or_defaults(pAllocationCallbacks); if (pWav->allocationCallbacks.onFree == NULL || (pWav->allocationCallbacks.onMalloc == NULL && pWav->allocationCallbacks.onRealloc == NULL)) { return DRWAV_FALSE; /* Invalid allocation callbacks. */ } return DRWAV_TRUE; } static drwav_bool32 drwav_init__internal(drwav* pWav, drwav_chunk_proc onChunk, void* pChunkUserData, drwav_uint32 flags) { /* This function assumes drwav_preinit() has been called beforehand. */ drwav_uint64 cursor; /* <-- Keeps track of the byte position so we can seek to specific locations. */ drwav_bool32 sequential; drwav_uint8 riff[4]; drwav_fmt fmt; unsigned short translatedFormatTag; drwav_bool32 foundDataChunk; drwav_uint64 dataChunkSize = 0; /* <-- Important! Don't explicitly set this to 0 anywhere else. Calculation of the size of the data chunk is performed in different paths depending on the container. */ drwav_uint64 sampleCountFromFactChunk = 0; /* Same as dataChunkSize - make sure this is the only place this is initialized to 0. */ drwav_uint64 chunkSize; cursor = 0; sequential = (flags & DRWAV_SEQUENTIAL) != 0; /* The first 4 bytes should be the RIFF identifier. */ if (drwav__on_read(pWav->onRead, pWav->pUserData, riff, sizeof(riff), &cursor) != sizeof(riff)) { return DRWAV_FALSE; } /* The first 4 bytes can be used to identify the container. For RIFF files it will start with "RIFF" and for w64 it will start with "riff". */ if (drwav__fourcc_equal(riff, "RIFF")) { pWav->container = drwav_container_riff; } else if (drwav__fourcc_equal(riff, "riff")) { int i; drwav_uint8 riff2[12]; pWav->container = drwav_container_w64; /* Check the rest of the GUID for validity. */ if (drwav__on_read(pWav->onRead, pWav->pUserData, riff2, sizeof(riff2), &cursor) != sizeof(riff2)) { return DRWAV_FALSE; } for (i = 0; i < 12; ++i) { if (riff2[i] != drwavGUID_W64_RIFF[i+4]) { return DRWAV_FALSE; } } } else if (drwav__fourcc_equal(riff, "RF64")) { pWav->container = drwav_container_rf64; } else { return DRWAV_FALSE; /* Unknown or unsupported container. */ } if (pWav->container == drwav_container_riff || pWav->container == drwav_container_rf64) { drwav_uint8 chunkSizeBytes[4]; drwav_uint8 wave[4]; /* RIFF/WAVE */ if (drwav__on_read(pWav->onRead, pWav->pUserData, chunkSizeBytes, sizeof(chunkSizeBytes), &cursor) != sizeof(chunkSizeBytes)) { return DRWAV_FALSE; } if (pWav->container == drwav_container_riff) { if (drwav__bytes_to_u32(chunkSizeBytes) < 36) { return DRWAV_FALSE; /* Chunk size should always be at least 36 bytes. */ } } else { if (drwav__bytes_to_u32(chunkSizeBytes) != 0xFFFFFFFF) { return DRWAV_FALSE; /* Chunk size should always be set to -1/0xFFFFFFFF for RF64. The actual size is retrieved later. */ } } if (drwav__on_read(pWav->onRead, pWav->pUserData, wave, sizeof(wave), &cursor) != sizeof(wave)) { return DRWAV_FALSE; } if (!drwav__fourcc_equal(wave, "WAVE")) { return DRWAV_FALSE; /* Expecting "WAVE". */ } } else { drwav_uint8 chunkSizeBytes[8]; drwav_uint8 wave[16]; /* W64 */ if (drwav__on_read(pWav->onRead, pWav->pUserData, chunkSizeBytes, sizeof(chunkSizeBytes), &cursor) != sizeof(chunkSizeBytes)) { return DRWAV_FALSE; } if (drwav__bytes_to_u64(chunkSizeBytes) < 80) { return DRWAV_FALSE; } if (drwav__on_read(pWav->onRead, pWav->pUserData, wave, sizeof(wave), &cursor) != sizeof(wave)) { return DRWAV_FALSE; } if (!drwav__guid_equal(wave, drwavGUID_W64_WAVE)) { return DRWAV_FALSE; } } /* For RF64, the "ds64" chunk must come next, before the "fmt " chunk. */ if (pWav->container == drwav_container_rf64) { drwav_uint8 sizeBytes[8]; drwav_uint64 bytesRemainingInChunk; drwav_chunk_header header; drwav_result result = drwav__read_chunk_header(pWav->onRead, pWav->pUserData, pWav->container, &cursor, &header); if (result != DRWAV_SUCCESS) { return DRWAV_FALSE; } if (!drwav__fourcc_equal(header.id.fourcc, "ds64")) { return DRWAV_FALSE; /* Expecting "ds64". */ } bytesRemainingInChunk = header.sizeInBytes + header.paddingSize; /* We don't care about the size of the RIFF chunk - skip it. */ if (!drwav__seek_forward(pWav->onSeek, 8, pWav->pUserData)) { return DRWAV_FALSE; } bytesRemainingInChunk -= 8; cursor += 8; /* Next 8 bytes is the size of the "data" chunk. */ if (drwav__on_read(pWav->onRead, pWav->pUserData, sizeBytes, sizeof(sizeBytes), &cursor) != sizeof(sizeBytes)) { return DRWAV_FALSE; } bytesRemainingInChunk -= 8; dataChunkSize = drwav__bytes_to_u64(sizeBytes); /* Next 8 bytes is the same count which we would usually derived from the FACT chunk if it was available. */ if (drwav__on_read(pWav->onRead, pWav->pUserData, sizeBytes, sizeof(sizeBytes), &cursor) != sizeof(sizeBytes)) { return DRWAV_FALSE; } bytesRemainingInChunk -= 8; sampleCountFromFactChunk = drwav__bytes_to_u64(sizeBytes); /* Skip over everything else. */ if (!drwav__seek_forward(pWav->onSeek, bytesRemainingInChunk, pWav->pUserData)) { return DRWAV_FALSE; } cursor += bytesRemainingInChunk; } /* The next bytes should be the "fmt " chunk. */ if (!drwav__read_fmt(pWav->onRead, pWav->onSeek, pWav->pUserData, pWav->container, &cursor, &fmt)) { return DRWAV_FALSE; /* Failed to read the "fmt " chunk. */ } /* Basic validation. */ if ((fmt.sampleRate == 0 || fmt.sampleRate > DRWAV_MAX_SAMPLE_RATE) || (fmt.channels == 0 || fmt.channels > DRWAV_MAX_CHANNELS) || (fmt.bitsPerSample == 0 || fmt.bitsPerSample > DRWAV_MAX_BITS_PER_SAMPLE) || fmt.blockAlign == 0) { return DRWAV_FALSE; /* Probably an invalid WAV file. */ } /* Translate the internal format. */ translatedFormatTag = fmt.formatTag; if (translatedFormatTag == DR_WAVE_FORMAT_EXTENSIBLE) { translatedFormatTag = drwav__bytes_to_u16(fmt.subFormat + 0); } /* We need to enumerate over each chunk for two reasons: 1) The "data" chunk may not be the next one 2) We may want to report each chunk back to the client In order to correctly report each chunk back to the client we will need to keep looping until the end of the file. */ foundDataChunk = DRWAV_FALSE; /* The next chunk we care about is the "data" chunk. This is not necessarily the next chunk so we'll need to loop. */ for (;;) { drwav_chunk_header header; drwav_result result = drwav__read_chunk_header(pWav->onRead, pWav->pUserData, pWav->container, &cursor, &header); if (result != DRWAV_SUCCESS) { if (!foundDataChunk) { return DRWAV_FALSE; } else { break; /* Probably at the end of the file. Get out of the loop. */ } } /* Tell the client about this chunk. */ if (!sequential && onChunk != NULL) { drwav_uint64 callbackBytesRead = onChunk(pChunkUserData, pWav->onRead, pWav->onSeek, pWav->pUserData, &header, pWav->container, &fmt); /* dr_wav may need to read the contents of the chunk, so we now need to seek back to the position before we called the callback. */ if (callbackBytesRead > 0) { if (!drwav__seek_from_start(pWav->onSeek, cursor, pWav->pUserData)) { return DRWAV_FALSE; } } } if (!foundDataChunk) { pWav->dataChunkDataPos = cursor; } chunkSize = header.sizeInBytes; if (pWav->container == drwav_container_riff || pWav->container == drwav_container_rf64) { if (drwav__fourcc_equal(header.id.fourcc, "data")) { foundDataChunk = DRWAV_TRUE; if (pWav->container != drwav_container_rf64) { /* The data chunk size for RF64 will always be set to 0xFFFFFFFF here. It was set to it's true value earlier. */ dataChunkSize = chunkSize; } } } else { if (drwav__guid_equal(header.id.guid, drwavGUID_W64_DATA)) { foundDataChunk = DRWAV_TRUE; dataChunkSize = chunkSize; } } /* If at this point we have found the data chunk and we're running in sequential mode, we need to break out of this loop. The reason for this is that we would otherwise require a backwards seek which sequential mode forbids. */ if (foundDataChunk && sequential) { break; } /* Optional. Get the total sample count from the FACT chunk. This is useful for compressed formats. */ if (pWav->container == drwav_container_riff) { if (drwav__fourcc_equal(header.id.fourcc, "fact")) { drwav_uint32 sampleCount; if (drwav__on_read(pWav->onRead, pWav->pUserData, &sampleCount, 4, &cursor) != 4) { return DRWAV_FALSE; } chunkSize -= 4; if (!foundDataChunk) { pWav->dataChunkDataPos = cursor; } /* The sample count in the "fact" chunk is either unreliable, or I'm not understanding it properly. For now I am only enabling this for Microsoft ADPCM formats. */ if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ADPCM) { sampleCountFromFactChunk = sampleCount; } else { sampleCountFromFactChunk = 0; } } } else if (pWav->container == drwav_container_w64) { if (drwav__guid_equal(header.id.guid, drwavGUID_W64_FACT)) { if (drwav__on_read(pWav->onRead, pWav->pUserData, &sampleCountFromFactChunk, 8, &cursor) != 8) { return DRWAV_FALSE; } chunkSize -= 8; if (!foundDataChunk) { pWav->dataChunkDataPos = cursor; } } } else if (pWav->container == drwav_container_rf64) { /* We retrieved the sample count from the ds64 chunk earlier so no need to do that here. */ } /* "smpl" chunk. */ if (pWav->container == drwav_container_riff || pWav->container == drwav_container_rf64) { if (drwav__fourcc_equal(header.id.fourcc, "smpl")) { drwav_uint8 smplHeaderData[36]; /* 36 = size of the smpl header section, not including the loop data. */ if (chunkSize >= sizeof(smplHeaderData)) { drwav_uint64 bytesJustRead = drwav__on_read(pWav->onRead, pWav->pUserData, smplHeaderData, sizeof(smplHeaderData), &cursor); chunkSize -= bytesJustRead; if (bytesJustRead == sizeof(smplHeaderData)) { drwav_uint32 iLoop; pWav->smpl.manufacturer = drwav__bytes_to_u32(smplHeaderData+0); pWav->smpl.product = drwav__bytes_to_u32(smplHeaderData+4); pWav->smpl.samplePeriod = drwav__bytes_to_u32(smplHeaderData+8); pWav->smpl.midiUnityNotes = drwav__bytes_to_u32(smplHeaderData+12); pWav->smpl.midiPitchFraction = drwav__bytes_to_u32(smplHeaderData+16); pWav->smpl.smpteFormat = drwav__bytes_to_u32(smplHeaderData+20); pWav->smpl.smpteOffset = drwav__bytes_to_u32(smplHeaderData+24); pWav->smpl.numSampleLoops = drwav__bytes_to_u32(smplHeaderData+28); pWav->smpl.samplerData = drwav__bytes_to_u32(smplHeaderData+32); for (iLoop = 0; iLoop < pWav->smpl.numSampleLoops && iLoop < drwav_countof(pWav->smpl.loops); ++iLoop) { drwav_uint8 smplLoopData[24]; /* 24 = size of a loop section in the smpl chunk. */ bytesJustRead = drwav__on_read(pWav->onRead, pWav->pUserData, smplLoopData, sizeof(smplLoopData), &cursor); chunkSize -= bytesJustRead; if (bytesJustRead == sizeof(smplLoopData)) { pWav->smpl.loops[iLoop].cuePointId = drwav__bytes_to_u32(smplLoopData+0); pWav->smpl.loops[iLoop].type = drwav__bytes_to_u32(smplLoopData+4); pWav->smpl.loops[iLoop].start = drwav__bytes_to_u32(smplLoopData+8); pWav->smpl.loops[iLoop].end = drwav__bytes_to_u32(smplLoopData+12); pWav->smpl.loops[iLoop].fraction = drwav__bytes_to_u32(smplLoopData+16); pWav->smpl.loops[iLoop].playCount = drwav__bytes_to_u32(smplLoopData+20); } else { break; /* Break from the smpl loop for loop. */ } } } } else { /* Looks like invalid data. Ignore the chunk. */ } } } else { if (drwav__guid_equal(header.id.guid, drwavGUID_W64_SMPL)) { /* This path will be hit when a W64 WAV file contains a smpl chunk. I don't have a sample file to test this path, so a contribution is welcome to add support for this. */ } } /* Make sure we seek past the padding. */ chunkSize += header.paddingSize; if (!drwav__seek_forward(pWav->onSeek, chunkSize, pWav->pUserData)) { break; } cursor += chunkSize; if (!foundDataChunk) { pWav->dataChunkDataPos = cursor; } } /* If we haven't found a data chunk, return an error. */ if (!foundDataChunk) { return DRWAV_FALSE; } /* We may have moved passed the data chunk. If so we need to move back. If running in sequential mode we can assume we are already sitting on the data chunk. */ if (!sequential) { if (!drwav__seek_from_start(pWav->onSeek, pWav->dataChunkDataPos, pWav->pUserData)) { return DRWAV_FALSE; } cursor = pWav->dataChunkDataPos; } /* At this point we should be sitting on the first byte of the raw audio data. */ pWav->fmt = fmt; pWav->sampleRate = fmt.sampleRate; pWav->channels = fmt.channels; pWav->bitsPerSample = fmt.bitsPerSample; pWav->bytesRemaining = dataChunkSize; pWav->translatedFormatTag = translatedFormatTag; pWav->dataChunkDataSize = dataChunkSize; if (sampleCountFromFactChunk != 0) { pWav->totalPCMFrameCount = sampleCountFromFactChunk; } else { pWav->totalPCMFrameCount = dataChunkSize / drwav_get_bytes_per_pcm_frame(pWav); if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ADPCM) { drwav_uint64 totalBlockHeaderSizeInBytes; drwav_uint64 blockCount = dataChunkSize / fmt.blockAlign; /* Make sure any trailing partial block is accounted for. */ if ((blockCount * fmt.blockAlign) < dataChunkSize) { blockCount += 1; } /* We decode two samples per byte. There will be blockCount headers in the data chunk. This is enough to know how to calculate the total PCM frame count. */ totalBlockHeaderSizeInBytes = blockCount * (6*fmt.channels); pWav->totalPCMFrameCount = ((dataChunkSize - totalBlockHeaderSizeInBytes) * 2) / fmt.channels; } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_DVI_ADPCM) { drwav_uint64 totalBlockHeaderSizeInBytes; drwav_uint64 blockCount = dataChunkSize / fmt.blockAlign; /* Make sure any trailing partial block is accounted for. */ if ((blockCount * fmt.blockAlign) < dataChunkSize) { blockCount += 1; } /* We decode two samples per byte. There will be blockCount headers in the data chunk. This is enough to know how to calculate the total PCM frame count. */ totalBlockHeaderSizeInBytes = blockCount * (4*fmt.channels); pWav->totalPCMFrameCount = ((dataChunkSize - totalBlockHeaderSizeInBytes) * 2) / fmt.channels; /* The header includes a decoded sample for each channel which acts as the initial predictor sample. */ pWav->totalPCMFrameCount += blockCount; } } /* Some formats only support a certain number of channels. */ if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ADPCM || pWav->translatedFormatTag == DR_WAVE_FORMAT_DVI_ADPCM) { if (pWav->channels > 2) { return DRWAV_FALSE; } } #ifdef DR_WAV_LIBSNDFILE_COMPAT /* I use libsndfile as a benchmark for testing, however in the version I'm using (from the Windows installer on the libsndfile website), it appears the total sample count libsndfile uses for MS-ADPCM is incorrect. It would seem they are computing the total sample count from the number of blocks, however this results in the inclusion of extra silent samples at the end of the last block. The correct way to know the total sample count is to inspect the "fact" chunk, which should always be present for compressed formats, and should always include the sample count. This little block of code below is only used to emulate the libsndfile logic so I can properly run my correctness tests against libsndfile, and is disabled by default. */ if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ADPCM) { drwav_uint64 blockCount = dataChunkSize / fmt.blockAlign; pWav->totalPCMFrameCount = (((blockCount * (fmt.blockAlign - (6*pWav->channels))) * 2)) / fmt.channels; /* x2 because two samples per byte. */ } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_DVI_ADPCM) { drwav_uint64 blockCount = dataChunkSize / fmt.blockAlign; pWav->totalPCMFrameCount = (((blockCount * (fmt.blockAlign - (4*pWav->channels))) * 2) + (blockCount * pWav->channels)) / fmt.channels; } #endif return DRWAV_TRUE; } DRWAV_API drwav_bool32 drwav_init(drwav* pWav, drwav_read_proc onRead, drwav_seek_proc onSeek, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_ex(pWav, onRead, onSeek, NULL, pUserData, NULL, 0, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_ex(drwav* pWav, drwav_read_proc onRead, drwav_seek_proc onSeek, drwav_chunk_proc onChunk, void* pReadSeekUserData, void* pChunkUserData, drwav_uint32 flags, const drwav_allocation_callbacks* pAllocationCallbacks) { if (!drwav_preinit(pWav, onRead, onSeek, pReadSeekUserData, pAllocationCallbacks)) { return DRWAV_FALSE; } return drwav_init__internal(pWav, onChunk, pChunkUserData, flags); } static drwav_uint32 drwav__riff_chunk_size_riff(drwav_uint64 dataChunkSize) { drwav_uint64 chunkSize = 4 + 24 + dataChunkSize + drwav__chunk_padding_size_riff(dataChunkSize); /* 4 = "WAVE". 24 = "fmt " chunk. */ if (chunkSize > 0xFFFFFFFFUL) { chunkSize = 0xFFFFFFFFUL; } return (drwav_uint32)chunkSize; /* Safe cast due to the clamp above. */ } static drwav_uint32 drwav__data_chunk_size_riff(drwav_uint64 dataChunkSize) { if (dataChunkSize <= 0xFFFFFFFFUL) { return (drwav_uint32)dataChunkSize; } else { return 0xFFFFFFFFUL; } } static drwav_uint64 drwav__riff_chunk_size_w64(drwav_uint64 dataChunkSize) { drwav_uint64 dataSubchunkPaddingSize = drwav__chunk_padding_size_w64(dataChunkSize); return 80 + 24 + dataChunkSize + dataSubchunkPaddingSize; /* +24 because W64 includes the size of the GUID and size fields. */ } static drwav_uint64 drwav__data_chunk_size_w64(drwav_uint64 dataChunkSize) { return 24 + dataChunkSize; /* +24 because W64 includes the size of the GUID and size fields. */ } static drwav_uint64 drwav__riff_chunk_size_rf64(drwav_uint64 dataChunkSize) { drwav_uint64 chunkSize = 4 + 36 + 24 + dataChunkSize + drwav__chunk_padding_size_riff(dataChunkSize); /* 4 = "WAVE". 36 = "ds64" chunk. 24 = "fmt " chunk. */ if (chunkSize > 0xFFFFFFFFUL) { chunkSize = 0xFFFFFFFFUL; } return chunkSize; } static drwav_uint64 drwav__data_chunk_size_rf64(drwav_uint64 dataChunkSize) { return dataChunkSize; } static size_t drwav__write(drwav* pWav, const void* pData, size_t dataSize) { DRWAV_ASSERT(pWav != NULL); DRWAV_ASSERT(pWav->onWrite != NULL); /* Generic write. Assumes no byte reordering required. */ return pWav->onWrite(pWav->pUserData, pData, dataSize); } static size_t drwav__write_u16ne_to_le(drwav* pWav, drwav_uint16 value) { DRWAV_ASSERT(pWav != NULL); DRWAV_ASSERT(pWav->onWrite != NULL); if (!drwav__is_little_endian()) { value = drwav__bswap16(value); } return drwav__write(pWav, &value, 2); } static size_t drwav__write_u32ne_to_le(drwav* pWav, drwav_uint32 value) { DRWAV_ASSERT(pWav != NULL); DRWAV_ASSERT(pWav->onWrite != NULL); if (!drwav__is_little_endian()) { value = drwav__bswap32(value); } return drwav__write(pWav, &value, 4); } static size_t drwav__write_u64ne_to_le(drwav* pWav, drwav_uint64 value) { DRWAV_ASSERT(pWav != NULL); DRWAV_ASSERT(pWav->onWrite != NULL); if (!drwav__is_little_endian()) { value = drwav__bswap64(value); } return drwav__write(pWav, &value, 8); } static drwav_bool32 drwav_preinit_write(drwav* pWav, const drwav_data_format* pFormat, drwav_bool32 isSequential, drwav_write_proc onWrite, drwav_seek_proc onSeek, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks) { if (pWav == NULL || onWrite == NULL) { return DRWAV_FALSE; } if (!isSequential && onSeek == NULL) { return DRWAV_FALSE; /* <-- onSeek is required when in non-sequential mode. */ } /* Not currently supporting compressed formats. Will need to add support for the "fact" chunk before we enable this. */ if (pFormat->format == DR_WAVE_FORMAT_EXTENSIBLE) { return DRWAV_FALSE; } if (pFormat->format == DR_WAVE_FORMAT_ADPCM || pFormat->format == DR_WAVE_FORMAT_DVI_ADPCM) { return DRWAV_FALSE; } DRWAV_ZERO_MEMORY(pWav, sizeof(*pWav)); pWav->onWrite = onWrite; pWav->onSeek = onSeek; pWav->pUserData = pUserData; pWav->allocationCallbacks = drwav_copy_allocation_callbacks_or_defaults(pAllocationCallbacks); if (pWav->allocationCallbacks.onFree == NULL || (pWav->allocationCallbacks.onMalloc == NULL && pWav->allocationCallbacks.onRealloc == NULL)) { return DRWAV_FALSE; /* Invalid allocation callbacks. */ } pWav->fmt.formatTag = (drwav_uint16)pFormat->format; pWav->fmt.channels = (drwav_uint16)pFormat->channels; pWav->fmt.sampleRate = pFormat->sampleRate; pWav->fmt.avgBytesPerSec = (drwav_uint32)((pFormat->bitsPerSample * pFormat->sampleRate * pFormat->channels) / 8); pWav->fmt.blockAlign = (drwav_uint16)((pFormat->channels * pFormat->bitsPerSample) / 8); pWav->fmt.bitsPerSample = (drwav_uint16)pFormat->bitsPerSample; pWav->fmt.extendedSize = 0; pWav->isSequentialWrite = isSequential; return DRWAV_TRUE; } static drwav_bool32 drwav_init_write__internal(drwav* pWav, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount) { /* The function assumes drwav_preinit_write() was called beforehand. */ size_t runningPos = 0; drwav_uint64 initialDataChunkSize = 0; drwav_uint64 chunkSizeFMT; /* The initial values for the "RIFF" and "data" chunks depends on whether or not we are initializing in sequential mode or not. In sequential mode we set this to its final values straight away since they can be calculated from the total sample count. In non- sequential mode we initialize it all to zero and fill it out in drwav_uninit() using a backwards seek. */ if (pWav->isSequentialWrite) { initialDataChunkSize = (totalSampleCount * pWav->fmt.bitsPerSample) / 8; /* The RIFF container has a limit on the number of samples. drwav is not allowing this. There's no practical limits for Wave64 so for the sake of simplicity I'm not doing any validation for that. */ if (pFormat->container == drwav_container_riff) { if (initialDataChunkSize > (0xFFFFFFFFUL - 36)) { return DRWAV_FALSE; /* Not enough room to store every sample. */ } } } pWav->dataChunkDataSizeTargetWrite = initialDataChunkSize; /* "RIFF" chunk. */ if (pFormat->container == drwav_container_riff) { drwav_uint32 chunkSizeRIFF = 28 + (drwav_uint32)initialDataChunkSize; /* +28 = "WAVE" + [sizeof "fmt " chunk] */ runningPos += drwav__write(pWav, "RIFF", 4); runningPos += drwav__write_u32ne_to_le(pWav, chunkSizeRIFF); runningPos += drwav__write(pWav, "WAVE", 4); } else if (pFormat->container == drwav_container_w64) { drwav_uint64 chunkSizeRIFF = 80 + 24 + initialDataChunkSize; /* +24 because W64 includes the size of the GUID and size fields. */ runningPos += drwav__write(pWav, drwavGUID_W64_RIFF, 16); runningPos += drwav__write_u64ne_to_le(pWav, chunkSizeRIFF); runningPos += drwav__write(pWav, drwavGUID_W64_WAVE, 16); } else if (pFormat->container == drwav_container_rf64) { runningPos += drwav__write(pWav, "RF64", 4); runningPos += drwav__write_u32ne_to_le(pWav, 0xFFFFFFFF); /* Always 0xFFFFFFFF for RF64. Set to a proper value in the "ds64" chunk. */ runningPos += drwav__write(pWav, "WAVE", 4); } /* "ds64" chunk (RF64 only). */ if (pFormat->container == drwav_container_rf64) { drwav_uint32 initialds64ChunkSize = 28; /* 28 = [Size of RIFF (8 bytes)] + [Size of DATA (8 bytes)] + [Sample Count (8 bytes)] + [Table Length (4 bytes)]. Table length always set to 0. */ drwav_uint64 initialRiffChunkSize = 8 + initialds64ChunkSize + initialDataChunkSize; /* +8 for the ds64 header. */ runningPos += drwav__write(pWav, "ds64", 4); runningPos += drwav__write_u32ne_to_le(pWav, initialds64ChunkSize); /* Size of ds64. */ runningPos += drwav__write_u64ne_to_le(pWav, initialRiffChunkSize); /* Size of RIFF. Set to true value at the end. */ runningPos += drwav__write_u64ne_to_le(pWav, initialDataChunkSize); /* Size of DATA. Set to true value at the end. */ runningPos += drwav__write_u64ne_to_le(pWav, totalSampleCount); /* Sample count. */ runningPos += drwav__write_u32ne_to_le(pWav, 0); /* Table length. Always set to zero in our case since we're not doing any other chunks than "DATA". */ } /* "fmt " chunk. */ if (pFormat->container == drwav_container_riff || pFormat->container == drwav_container_rf64) { chunkSizeFMT = 16; runningPos += drwav__write(pWav, "fmt ", 4); runningPos += drwav__write_u32ne_to_le(pWav, (drwav_uint32)chunkSizeFMT); } else if (pFormat->container == drwav_container_w64) { chunkSizeFMT = 40; runningPos += drwav__write(pWav, drwavGUID_W64_FMT, 16); runningPos += drwav__write_u64ne_to_le(pWav, chunkSizeFMT); } runningPos += drwav__write_u16ne_to_le(pWav, pWav->fmt.formatTag); runningPos += drwav__write_u16ne_to_le(pWav, pWav->fmt.channels); runningPos += drwav__write_u32ne_to_le(pWav, pWav->fmt.sampleRate); runningPos += drwav__write_u32ne_to_le(pWav, pWav->fmt.avgBytesPerSec); runningPos += drwav__write_u16ne_to_le(pWav, pWav->fmt.blockAlign); runningPos += drwav__write_u16ne_to_le(pWav, pWav->fmt.bitsPerSample); pWav->dataChunkDataPos = runningPos; /* "data" chunk. */ if (pFormat->container == drwav_container_riff) { drwav_uint32 chunkSizeDATA = (drwav_uint32)initialDataChunkSize; runningPos += drwav__write(pWav, "data", 4); runningPos += drwav__write_u32ne_to_le(pWav, chunkSizeDATA); } else if (pFormat->container == drwav_container_w64) { drwav_uint64 chunkSizeDATA = 24 + initialDataChunkSize; /* +24 because W64 includes the size of the GUID and size fields. */ runningPos += drwav__write(pWav, drwavGUID_W64_DATA, 16); runningPos += drwav__write_u64ne_to_le(pWav, chunkSizeDATA); } else if (pFormat->container == drwav_container_rf64) { runningPos += drwav__write(pWav, "data", 4); runningPos += drwav__write_u32ne_to_le(pWav, 0xFFFFFFFF); /* Always set to 0xFFFFFFFF for RF64. The true size of the data chunk is specified in the ds64 chunk. */ } /* The runningPos variable is incremented in the section above but is left unused which is causing some static analysis tools to detect it as a dead store. I'm leaving this as-is for safety just in case I want to expand this function later to include other tags and want to keep track of the running position for whatever reason. The line below should silence the static analysis tools. */ (void)runningPos; /* Set some properties for the client's convenience. */ pWav->container = pFormat->container; pWav->channels = (drwav_uint16)pFormat->channels; pWav->sampleRate = pFormat->sampleRate; pWav->bitsPerSample = (drwav_uint16)pFormat->bitsPerSample; pWav->translatedFormatTag = (drwav_uint16)pFormat->format; return DRWAV_TRUE; } DRWAV_API drwav_bool32 drwav_init_write(drwav* pWav, const drwav_data_format* pFormat, drwav_write_proc onWrite, drwav_seek_proc onSeek, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks) { if (!drwav_preinit_write(pWav, pFormat, DRWAV_FALSE, onWrite, onSeek, pUserData, pAllocationCallbacks)) { return DRWAV_FALSE; } return drwav_init_write__internal(pWav, pFormat, 0); /* DRWAV_FALSE = Not Sequential */ } DRWAV_API drwav_bool32 drwav_init_write_sequential(drwav* pWav, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, drwav_write_proc onWrite, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks) { if (!drwav_preinit_write(pWav, pFormat, DRWAV_TRUE, onWrite, NULL, pUserData, pAllocationCallbacks)) { return DRWAV_FALSE; } return drwav_init_write__internal(pWav, pFormat, totalSampleCount); /* DRWAV_TRUE = Sequential */ } DRWAV_API drwav_bool32 drwav_init_write_sequential_pcm_frames(drwav* pWav, const drwav_data_format* pFormat, drwav_uint64 totalPCMFrameCount, drwav_write_proc onWrite, void* pUserData, const drwav_allocation_callbacks* pAllocationCallbacks) { if (pFormat == NULL) { return DRWAV_FALSE; } return drwav_init_write_sequential(pWav, pFormat, totalPCMFrameCount*pFormat->channels, onWrite, pUserData, pAllocationCallbacks); } DRWAV_API drwav_uint64 drwav_target_write_size_bytes(const drwav_data_format* pFormat, drwav_uint64 totalSampleCount) { /* Casting totalSampleCount to drwav_int64 for VC6 compatibility. No issues in practice because nobody is going to exhaust the whole 63 bits. */ drwav_uint64 targetDataSizeBytes = (drwav_uint64)((drwav_int64)totalSampleCount * pFormat->channels * pFormat->bitsPerSample/8.0); drwav_uint64 riffChunkSizeBytes; drwav_uint64 fileSizeBytes = 0; if (pFormat->container == drwav_container_riff) { riffChunkSizeBytes = drwav__riff_chunk_size_riff(targetDataSizeBytes); fileSizeBytes = (8 + riffChunkSizeBytes); /* +8 because WAV doesn't include the size of the ChunkID and ChunkSize fields. */ } else if (pFormat->container == drwav_container_w64) { riffChunkSizeBytes = drwav__riff_chunk_size_w64(targetDataSizeBytes); fileSizeBytes = riffChunkSizeBytes; } else if (pFormat->container == drwav_container_rf64) { riffChunkSizeBytes = drwav__riff_chunk_size_rf64(targetDataSizeBytes); fileSizeBytes = (8 + riffChunkSizeBytes); /* +8 because WAV doesn't include the size of the ChunkID and ChunkSize fields. */ } return fileSizeBytes; } #ifndef DR_WAV_NO_STDIO /* drwav_result_from_errno() is only used for fopen() and wfopen() so putting it inside DR_WAV_NO_STDIO for now. If something else needs this later we can move it out. */ #include <errno.h> static drwav_result drwav_result_from_errno(int e) { switch (e) { case 0: return DRWAV_SUCCESS; #ifdef EPERM case EPERM: return DRWAV_INVALID_OPERATION; #endif #ifdef ENOENT case ENOENT: return DRWAV_DOES_NOT_EXIST; #endif #ifdef ESRCH case ESRCH: return DRWAV_DOES_NOT_EXIST; #endif #ifdef EINTR case EINTR: return DRWAV_INTERRUPT; #endif #ifdef EIO case EIO: return DRWAV_IO_ERROR; #endif #ifdef ENXIO case ENXIO: return DRWAV_DOES_NOT_EXIST; #endif #ifdef E2BIG case E2BIG: return DRWAV_INVALID_ARGS; #endif #ifdef ENOEXEC case ENOEXEC: return DRWAV_INVALID_FILE; #endif #ifdef EBADF case EBADF: return DRWAV_INVALID_FILE; #endif #ifdef ECHILD case ECHILD: return DRWAV_ERROR; #endif #ifdef EAGAIN case EAGAIN: return DRWAV_UNAVAILABLE; #endif #ifdef ENOMEM case ENOMEM: return DRWAV_OUT_OF_MEMORY; #endif #ifdef EACCES case EACCES: return DRWAV_ACCESS_DENIED; #endif #ifdef EFAULT case EFAULT: return DRWAV_BAD_ADDRESS; #endif #ifdef ENOTBLK case ENOTBLK: return DRWAV_ERROR; #endif #ifdef EBUSY case EBUSY: return DRWAV_BUSY; #endif #ifdef EEXIST case EEXIST: return DRWAV_ALREADY_EXISTS; #endif #ifdef EXDEV case EXDEV: return DRWAV_ERROR; #endif #ifdef ENODEV case ENODEV: return DRWAV_DOES_NOT_EXIST; #endif #ifdef ENOTDIR case ENOTDIR: return DRWAV_NOT_DIRECTORY; #endif #ifdef EISDIR case EISDIR: return DRWAV_IS_DIRECTORY; #endif #ifdef EINVAL case EINVAL: return DRWAV_INVALID_ARGS; #endif #ifdef ENFILE case ENFILE: return DRWAV_TOO_MANY_OPEN_FILES; #endif #ifdef EMFILE case EMFILE: return DRWAV_TOO_MANY_OPEN_FILES; #endif #ifdef ENOTTY case ENOTTY: return DRWAV_INVALID_OPERATION; #endif #ifdef ETXTBSY case ETXTBSY: return DRWAV_BUSY; #endif #ifdef EFBIG case EFBIG: return DRWAV_TOO_BIG; #endif #ifdef ENOSPC case ENOSPC: return DRWAV_NO_SPACE; #endif #ifdef ESPIPE case ESPIPE: return DRWAV_BAD_SEEK; #endif #ifdef EROFS case EROFS: return DRWAV_ACCESS_DENIED; #endif #ifdef EMLINK case EMLINK: return DRWAV_TOO_MANY_LINKS; #endif #ifdef EPIPE case EPIPE: return DRWAV_BAD_PIPE; #endif #ifdef EDOM case EDOM: return DRWAV_OUT_OF_RANGE; #endif #ifdef ERANGE case ERANGE: return DRWAV_OUT_OF_RANGE; #endif #ifdef EDEADLK case EDEADLK: return DRWAV_DEADLOCK; #endif #ifdef ENAMETOOLONG case ENAMETOOLONG: return DRWAV_PATH_TOO_LONG; #endif #ifdef ENOLCK case ENOLCK: return DRWAV_ERROR; #endif #ifdef ENOSYS case ENOSYS: return DRWAV_NOT_IMPLEMENTED; #endif #ifdef ENOTEMPTY case ENOTEMPTY: return DRWAV_DIRECTORY_NOT_EMPTY; #endif #ifdef ELOOP case ELOOP: return DRWAV_TOO_MANY_LINKS; #endif #ifdef ENOMSG case ENOMSG: return DRWAV_NO_MESSAGE; #endif #ifdef EIDRM case EIDRM: return DRWAV_ERROR; #endif #ifdef ECHRNG case ECHRNG: return DRWAV_ERROR; #endif #ifdef EL2NSYNC case EL2NSYNC: return DRWAV_ERROR; #endif #ifdef EL3HLT case EL3HLT: return DRWAV_ERROR; #endif #ifdef EL3RST case EL3RST: return DRWAV_ERROR; #endif #ifdef ELNRNG case ELNRNG: return DRWAV_OUT_OF_RANGE; #endif #ifdef EUNATCH case EUNATCH: return DRWAV_ERROR; #endif #ifdef ENOCSI case ENOCSI: return DRWAV_ERROR; #endif #ifdef EL2HLT case EL2HLT: return DRWAV_ERROR; #endif #ifdef EBADE case EBADE: return DRWAV_ERROR; #endif #ifdef EBADR case EBADR: return DRWAV_ERROR; #endif #ifdef EXFULL case EXFULL: return DRWAV_ERROR; #endif #ifdef ENOANO case ENOANO: return DRWAV_ERROR; #endif #ifdef EBADRQC case EBADRQC: return DRWAV_ERROR; #endif #ifdef EBADSLT case EBADSLT: return DRWAV_ERROR; #endif #ifdef EBFONT case EBFONT: return DRWAV_INVALID_FILE; #endif #ifdef ENOSTR case ENOSTR: return DRWAV_ERROR; #endif #ifdef ENODATA case ENODATA: return DRWAV_NO_DATA_AVAILABLE; #endif #ifdef ETIME case ETIME: return DRWAV_TIMEOUT; #endif #ifdef ENOSR case ENOSR: return DRWAV_NO_DATA_AVAILABLE; #endif #ifdef ENONET case ENONET: return DRWAV_NO_NETWORK; #endif #ifdef ENOPKG case ENOPKG: return DRWAV_ERROR; #endif #ifdef EREMOTE case EREMOTE: return DRWAV_ERROR; #endif #ifdef ENOLINK case ENOLINK: return DRWAV_ERROR; #endif #ifdef EADV case EADV: return DRWAV_ERROR; #endif #ifdef ESRMNT case ESRMNT: return DRWAV_ERROR; #endif #ifdef ECOMM case ECOMM: return DRWAV_ERROR; #endif #ifdef EPROTO case EPROTO: return DRWAV_ERROR; #endif #ifdef EMULTIHOP case EMULTIHOP: return DRWAV_ERROR; #endif #ifdef EDOTDOT case EDOTDOT: return DRWAV_ERROR; #endif #ifdef EBADMSG case EBADMSG: return DRWAV_BAD_MESSAGE; #endif #ifdef EOVERFLOW case EOVERFLOW: return DRWAV_TOO_BIG; #endif #ifdef ENOTUNIQ case ENOTUNIQ: return DRWAV_NOT_UNIQUE; #endif #ifdef EBADFD case EBADFD: return DRWAV_ERROR; #endif #ifdef EREMCHG case EREMCHG: return DRWAV_ERROR; #endif #ifdef ELIBACC case ELIBACC: return DRWAV_ACCESS_DENIED; #endif #ifdef ELIBBAD case ELIBBAD: return DRWAV_INVALID_FILE; #endif #ifdef ELIBSCN case ELIBSCN: return DRWAV_INVALID_FILE; #endif #ifdef ELIBMAX case ELIBMAX: return DRWAV_ERROR; #endif #ifdef ELIBEXEC case ELIBEXEC: return DRWAV_ERROR; #endif #ifdef EILSEQ case EILSEQ: return DRWAV_INVALID_DATA; #endif #ifdef ERESTART case ERESTART: return DRWAV_ERROR; #endif #ifdef ESTRPIPE case ESTRPIPE: return DRWAV_ERROR; #endif #ifdef EUSERS case EUSERS: return DRWAV_ERROR; #endif #ifdef ENOTSOCK case ENOTSOCK: return DRWAV_NOT_SOCKET; #endif #ifdef EDESTADDRREQ case EDESTADDRREQ: return DRWAV_NO_ADDRESS; #endif #ifdef EMSGSIZE case EMSGSIZE: return DRWAV_TOO_BIG; #endif #ifdef EPROTOTYPE case EPROTOTYPE: return DRWAV_BAD_PROTOCOL; #endif #ifdef ENOPROTOOPT case ENOPROTOOPT: return DRWAV_PROTOCOL_UNAVAILABLE; #endif #ifdef EPROTONOSUPPORT case EPROTONOSUPPORT: return DRWAV_PROTOCOL_NOT_SUPPORTED; #endif #ifdef ESOCKTNOSUPPORT case ESOCKTNOSUPPORT: return DRWAV_SOCKET_NOT_SUPPORTED; #endif #ifdef EOPNOTSUPP case EOPNOTSUPP: return DRWAV_INVALID_OPERATION; #endif #ifdef EPFNOSUPPORT case EPFNOSUPPORT: return DRWAV_PROTOCOL_FAMILY_NOT_SUPPORTED; #endif #ifdef EAFNOSUPPORT case EAFNOSUPPORT: return DRWAV_ADDRESS_FAMILY_NOT_SUPPORTED; #endif #ifdef EADDRINUSE case EADDRINUSE: return DRWAV_ALREADY_IN_USE; #endif #ifdef EADDRNOTAVAIL case EADDRNOTAVAIL: return DRWAV_ERROR; #endif #ifdef ENETDOWN case ENETDOWN: return DRWAV_NO_NETWORK; #endif #ifdef ENETUNREACH case ENETUNREACH: return DRWAV_NO_NETWORK; #endif #ifdef ENETRESET case ENETRESET: return DRWAV_NO_NETWORK; #endif #ifdef ECONNABORTED case ECONNABORTED: return DRWAV_NO_NETWORK; #endif #ifdef ECONNRESET case ECONNRESET: return DRWAV_CONNECTION_RESET; #endif #ifdef ENOBUFS case ENOBUFS: return DRWAV_NO_SPACE; #endif #ifdef EISCONN case EISCONN: return DRWAV_ALREADY_CONNECTED; #endif #ifdef ENOTCONN case ENOTCONN: return DRWAV_NOT_CONNECTED; #endif #ifdef ESHUTDOWN case ESHUTDOWN: return DRWAV_ERROR; #endif #ifdef ETOOMANYREFS case ETOOMANYREFS: return DRWAV_ERROR; #endif #ifdef ETIMEDOUT case ETIMEDOUT: return DRWAV_TIMEOUT; #endif #ifdef ECONNREFUSED case ECONNREFUSED: return DRWAV_CONNECTION_REFUSED; #endif #ifdef EHOSTDOWN case EHOSTDOWN: return DRWAV_NO_HOST; #endif #ifdef EHOSTUNREACH case EHOSTUNREACH: return DRWAV_NO_HOST; #endif #ifdef EALREADY case EALREADY: return DRWAV_IN_PROGRESS; #endif #ifdef EINPROGRESS case EINPROGRESS: return DRWAV_IN_PROGRESS; #endif #ifdef ESTALE case ESTALE: return DRWAV_INVALID_FILE; #endif #ifdef EUCLEAN case EUCLEAN: return DRWAV_ERROR; #endif #ifdef ENOTNAM case ENOTNAM: return DRWAV_ERROR; #endif #ifdef ENAVAIL case ENAVAIL: return DRWAV_ERROR; #endif #ifdef EISNAM case EISNAM: return DRWAV_ERROR; #endif #ifdef EREMOTEIO case EREMOTEIO: return DRWAV_IO_ERROR; #endif #ifdef EDQUOT case EDQUOT: return DRWAV_NO_SPACE; #endif #ifdef ENOMEDIUM case ENOMEDIUM: return DRWAV_DOES_NOT_EXIST; #endif #ifdef EMEDIUMTYPE case EMEDIUMTYPE: return DRWAV_ERROR; #endif #ifdef ECANCELED case ECANCELED: return DRWAV_CANCELLED; #endif #ifdef ENOKEY case ENOKEY: return DRWAV_ERROR; #endif #ifdef EKEYEXPIRED case EKEYEXPIRED: return DRWAV_ERROR; #endif #ifdef EKEYREVOKED case EKEYREVOKED: return DRWAV_ERROR; #endif #ifdef EKEYREJECTED case EKEYREJECTED: return DRWAV_ERROR; #endif #ifdef EOWNERDEAD case EOWNERDEAD: return DRWAV_ERROR; #endif #ifdef ENOTRECOVERABLE case ENOTRECOVERABLE: return DRWAV_ERROR; #endif #ifdef ERFKILL case ERFKILL: return DRWAV_ERROR; #endif #ifdef EHWPOISON case EHWPOISON: return DRWAV_ERROR; #endif default: return DRWAV_ERROR; } } static drwav_result drwav_fopen(FILE** ppFile, const char* pFilePath, const char* pOpenMode) { #if _MSC_VER && _MSC_VER >= 1400 errno_t err; #endif if (ppFile != NULL) { *ppFile = NULL; /* Safety. */ } if (pFilePath == NULL || pOpenMode == NULL || ppFile == NULL) { return DRWAV_INVALID_ARGS; } #if _MSC_VER && _MSC_VER >= 1400 err = fopen_s(ppFile, pFilePath, pOpenMode); if (err != 0) { return drwav_result_from_errno(err); } #else #if defined(_WIN32) || defined(__APPLE__) *ppFile = fopen(pFilePath, pOpenMode); #else #if defined(_FILE_OFFSET_BITS) && _FILE_OFFSET_BITS == 64 && defined(_LARGEFILE64_SOURCE) *ppFile = fopen64(pFilePath, pOpenMode); #else *ppFile = fopen(pFilePath, pOpenMode); #endif #endif if (*ppFile == NULL) { drwav_result result = drwav_result_from_errno(errno); if (result == DRWAV_SUCCESS) { result = DRWAV_ERROR; /* Just a safety check to make sure we never ever return success when pFile == NULL. */ } return result; } #endif return DRWAV_SUCCESS; } /* _wfopen() isn't always available in all compilation environments. * Windows only. * MSVC seems to support it universally as far back as VC6 from what I can tell (haven't checked further back). * MinGW-64 (both 32- and 64-bit) seems to support it. * MinGW wraps it in !defined(__STRICT_ANSI__). * OpenWatcom wraps it in !defined(_NO_EXT_KEYS). This can be reviewed as compatibility issues arise. The preference is to use _wfopen_s() and _wfopen() as opposed to the wcsrtombs() fallback, so if you notice your compiler not detecting this properly I'm happy to look at adding support. */ #if defined(_WIN32) #if defined(_MSC_VER) || defined(__MINGW64__) || (!defined(__STRICT_ANSI__) && !defined(_NO_EXT_KEYS)) #define DRWAV_HAS_WFOPEN #endif #endif static drwav_result drwav_wfopen(FILE** ppFile, const wchar_t* pFilePath, const wchar_t* pOpenMode, const drwav_allocation_callbacks* pAllocationCallbacks) { if (ppFile != NULL) { *ppFile = NULL; /* Safety. */ } if (pFilePath == NULL || pOpenMode == NULL || ppFile == NULL) { return DRWAV_INVALID_ARGS; } #if defined(DRWAV_HAS_WFOPEN) { /* Use _wfopen() on Windows. */ #if defined(_MSC_VER) && _MSC_VER >= 1400 errno_t err = _wfopen_s(ppFile, pFilePath, pOpenMode); if (err != 0) { return drwav_result_from_errno(err); } #else *ppFile = _wfopen(pFilePath, pOpenMode); if (*ppFile == NULL) { return drwav_result_from_errno(errno); } #endif (void)pAllocationCallbacks; } #else /* Use fopen() on anything other than Windows. Requires a conversion. This is annoying because fopen() is locale specific. The only real way I can think of to do this is with wcsrtombs(). Note that wcstombs() is apparently not thread-safe because it uses a static global mbstate_t object for maintaining state. I've checked this with -std=c89 and it works, but if somebody get's a compiler error I'll look into improving compatibility. */ { mbstate_t mbs; size_t lenMB; const wchar_t* pFilePathTemp = pFilePath; char* pFilePathMB = NULL; char pOpenModeMB[32] = {0}; /* Get the length first. */ DRWAV_ZERO_OBJECT(&mbs); lenMB = wcsrtombs(NULL, &pFilePathTemp, 0, &mbs); if (lenMB == (size_t)-1) { return drwav_result_from_errno(errno); } pFilePathMB = (char*)drwav__malloc_from_callbacks(lenMB + 1, pAllocationCallbacks); if (pFilePathMB == NULL) { return DRWAV_OUT_OF_MEMORY; } pFilePathTemp = pFilePath; DRWAV_ZERO_OBJECT(&mbs); wcsrtombs(pFilePathMB, &pFilePathTemp, lenMB + 1, &mbs); /* The open mode should always consist of ASCII characters so we should be able to do a trivial conversion. */ { size_t i = 0; for (;;) { if (pOpenMode[i] == 0) { pOpenModeMB[i] = '\0'; break; } pOpenModeMB[i] = (char)pOpenMode[i]; i += 1; } } *ppFile = fopen(pFilePathMB, pOpenModeMB); drwav__free_from_callbacks(pFilePathMB, pAllocationCallbacks); } if (*ppFile == NULL) { return DRWAV_ERROR; } #endif return DRWAV_SUCCESS; } static size_t drwav__on_read_stdio(void* pUserData, void* pBufferOut, size_t bytesToRead) { return fread(pBufferOut, 1, bytesToRead, (FILE*)pUserData); } static size_t drwav__on_write_stdio(void* pUserData, const void* pData, size_t bytesToWrite) { return fwrite(pData, 1, bytesToWrite, (FILE*)pUserData); } static drwav_bool32 drwav__on_seek_stdio(void* pUserData, int offset, drwav_seek_origin origin) { return fseek((FILE*)pUserData, offset, (origin == drwav_seek_origin_current) ? SEEK_CUR : SEEK_SET) == 0; } DRWAV_API drwav_bool32 drwav_init_file(drwav* pWav, const char* filename, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_file_ex(pWav, filename, NULL, NULL, 0, pAllocationCallbacks); } static drwav_bool32 drwav_init_file__internal_FILE(drwav* pWav, FILE* pFile, drwav_chunk_proc onChunk, void* pChunkUserData, drwav_uint32 flags, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav_bool32 result; result = drwav_preinit(pWav, drwav__on_read_stdio, drwav__on_seek_stdio, (void*)pFile, pAllocationCallbacks); if (result != DRWAV_TRUE) { fclose(pFile); return result; } result = drwav_init__internal(pWav, onChunk, pChunkUserData, flags); if (result != DRWAV_TRUE) { fclose(pFile); return result; } return DRWAV_TRUE; } DRWAV_API drwav_bool32 drwav_init_file_ex(drwav* pWav, const char* filename, drwav_chunk_proc onChunk, void* pChunkUserData, drwav_uint32 flags, const drwav_allocation_callbacks* pAllocationCallbacks) { FILE* pFile; if (drwav_fopen(&pFile, filename, "rb") != DRWAV_SUCCESS) { return DRWAV_FALSE; } /* This takes ownership of the FILE* object. */ return drwav_init_file__internal_FILE(pWav, pFile, onChunk, pChunkUserData, flags, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_file_w(drwav* pWav, const wchar_t* filename, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_file_ex_w(pWav, filename, NULL, NULL, 0, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_file_ex_w(drwav* pWav, const wchar_t* filename, drwav_chunk_proc onChunk, void* pChunkUserData, drwav_uint32 flags, const drwav_allocation_callbacks* pAllocationCallbacks) { FILE* pFile; if (drwav_wfopen(&pFile, filename, L"rb", pAllocationCallbacks) != DRWAV_SUCCESS) { return DRWAV_FALSE; } /* This takes ownership of the FILE* object. */ return drwav_init_file__internal_FILE(pWav, pFile, onChunk, pChunkUserData, flags, pAllocationCallbacks); } static drwav_bool32 drwav_init_file_write__internal_FILE(drwav* pWav, FILE* pFile, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, drwav_bool32 isSequential, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav_bool32 result; result = drwav_preinit_write(pWav, pFormat, isSequential, drwav__on_write_stdio, drwav__on_seek_stdio, (void*)pFile, pAllocationCallbacks); if (result != DRWAV_TRUE) { fclose(pFile); return result; } result = drwav_init_write__internal(pWav, pFormat, totalSampleCount); if (result != DRWAV_TRUE) { fclose(pFile); return result; } return DRWAV_TRUE; } static drwav_bool32 drwav_init_file_write__internal(drwav* pWav, const char* filename, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, drwav_bool32 isSequential, const drwav_allocation_callbacks* pAllocationCallbacks) { FILE* pFile; if (drwav_fopen(&pFile, filename, "wb") != DRWAV_SUCCESS) { return DRWAV_FALSE; } /* This takes ownership of the FILE* object. */ return drwav_init_file_write__internal_FILE(pWav, pFile, pFormat, totalSampleCount, isSequential, pAllocationCallbacks); } static drwav_bool32 drwav_init_file_write_w__internal(drwav* pWav, const wchar_t* filename, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, drwav_bool32 isSequential, const drwav_allocation_callbacks* pAllocationCallbacks) { FILE* pFile; if (drwav_wfopen(&pFile, filename, L"wb", pAllocationCallbacks) != DRWAV_SUCCESS) { return DRWAV_FALSE; } /* This takes ownership of the FILE* object. */ return drwav_init_file_write__internal_FILE(pWav, pFile, pFormat, totalSampleCount, isSequential, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_file_write(drwav* pWav, const char* filename, const drwav_data_format* pFormat, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_file_write__internal(pWav, filename, pFormat, 0, DRWAV_FALSE, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_file_write_sequential(drwav* pWav, const char* filename, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_file_write__internal(pWav, filename, pFormat, totalSampleCount, DRWAV_TRUE, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_file_write_sequential_pcm_frames(drwav* pWav, const char* filename, const drwav_data_format* pFormat, drwav_uint64 totalPCMFrameCount, const drwav_allocation_callbacks* pAllocationCallbacks) { if (pFormat == NULL) { return DRWAV_FALSE; } return drwav_init_file_write_sequential(pWav, filename, pFormat, totalPCMFrameCount*pFormat->channels, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_file_write_w(drwav* pWav, const wchar_t* filename, const drwav_data_format* pFormat, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_file_write_w__internal(pWav, filename, pFormat, 0, DRWAV_FALSE, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_file_write_sequential_w(drwav* pWav, const wchar_t* filename, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_file_write_w__internal(pWav, filename, pFormat, totalSampleCount, DRWAV_TRUE, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_file_write_sequential_pcm_frames_w(drwav* pWav, const wchar_t* filename, const drwav_data_format* pFormat, drwav_uint64 totalPCMFrameCount, const drwav_allocation_callbacks* pAllocationCallbacks) { if (pFormat == NULL) { return DRWAV_FALSE; } return drwav_init_file_write_sequential_w(pWav, filename, pFormat, totalPCMFrameCount*pFormat->channels, pAllocationCallbacks); } #endif /* DR_WAV_NO_STDIO */ static size_t drwav__on_read_memory(void* pUserData, void* pBufferOut, size_t bytesToRead) { drwav* pWav = (drwav*)pUserData; size_t bytesRemaining; DRWAV_ASSERT(pWav != NULL); DRWAV_ASSERT(pWav->memoryStream.dataSize >= pWav->memoryStream.currentReadPos); bytesRemaining = pWav->memoryStream.dataSize - pWav->memoryStream.currentReadPos; if (bytesToRead > bytesRemaining) { bytesToRead = bytesRemaining; } if (bytesToRead > 0) { DRWAV_COPY_MEMORY(pBufferOut, pWav->memoryStream.data + pWav->memoryStream.currentReadPos, bytesToRead); pWav->memoryStream.currentReadPos += bytesToRead; } return bytesToRead; } static drwav_bool32 drwav__on_seek_memory(void* pUserData, int offset, drwav_seek_origin origin) { drwav* pWav = (drwav*)pUserData; DRWAV_ASSERT(pWav != NULL); if (origin == drwav_seek_origin_current) { if (offset > 0) { if (pWav->memoryStream.currentReadPos + offset > pWav->memoryStream.dataSize) { return DRWAV_FALSE; /* Trying to seek too far forward. */ } } else { if (pWav->memoryStream.currentReadPos < (size_t)-offset) { return DRWAV_FALSE; /* Trying to seek too far backwards. */ } } /* This will never underflow thanks to the clamps above. */ pWav->memoryStream.currentReadPos += offset; } else { if ((drwav_uint32)offset <= pWav->memoryStream.dataSize) { pWav->memoryStream.currentReadPos = offset; } else { return DRWAV_FALSE; /* Trying to seek too far forward. */ } } return DRWAV_TRUE; } static size_t drwav__on_write_memory(void* pUserData, const void* pDataIn, size_t bytesToWrite) { drwav* pWav = (drwav*)pUserData; size_t bytesRemaining; DRWAV_ASSERT(pWav != NULL); DRWAV_ASSERT(pWav->memoryStreamWrite.dataCapacity >= pWav->memoryStreamWrite.currentWritePos); bytesRemaining = pWav->memoryStreamWrite.dataCapacity - pWav->memoryStreamWrite.currentWritePos; if (bytesRemaining < bytesToWrite) { /* Need to reallocate. */ void* pNewData; size_t newDataCapacity = (pWav->memoryStreamWrite.dataCapacity == 0) ? 256 : pWav->memoryStreamWrite.dataCapacity * 2; /* If doubling wasn't enough, just make it the minimum required size to write the data. */ if ((newDataCapacity - pWav->memoryStreamWrite.currentWritePos) < bytesToWrite) { newDataCapacity = pWav->memoryStreamWrite.currentWritePos + bytesToWrite; } pNewData = drwav__realloc_from_callbacks(*pWav->memoryStreamWrite.ppData, newDataCapacity, pWav->memoryStreamWrite.dataCapacity, &pWav->allocationCallbacks); if (pNewData == NULL) { return 0; } *pWav->memoryStreamWrite.ppData = pNewData; pWav->memoryStreamWrite.dataCapacity = newDataCapacity; } DRWAV_COPY_MEMORY(((drwav_uint8*)(*pWav->memoryStreamWrite.ppData)) + pWav->memoryStreamWrite.currentWritePos, pDataIn, bytesToWrite); pWav->memoryStreamWrite.currentWritePos += bytesToWrite; if (pWav->memoryStreamWrite.dataSize < pWav->memoryStreamWrite.currentWritePos) { pWav->memoryStreamWrite.dataSize = pWav->memoryStreamWrite.currentWritePos; } *pWav->memoryStreamWrite.pDataSize = pWav->memoryStreamWrite.dataSize; return bytesToWrite; } static drwav_bool32 drwav__on_seek_memory_write(void* pUserData, int offset, drwav_seek_origin origin) { drwav* pWav = (drwav*)pUserData; DRWAV_ASSERT(pWav != NULL); if (origin == drwav_seek_origin_current) { if (offset > 0) { if (pWav->memoryStreamWrite.currentWritePos + offset > pWav->memoryStreamWrite.dataSize) { offset = (int)(pWav->memoryStreamWrite.dataSize - pWav->memoryStreamWrite.currentWritePos); /* Trying to seek too far forward. */ } } else { if (pWav->memoryStreamWrite.currentWritePos < (size_t)-offset) { offset = -(int)pWav->memoryStreamWrite.currentWritePos; /* Trying to seek too far backwards. */ } } /* This will never underflow thanks to the clamps above. */ pWav->memoryStreamWrite.currentWritePos += offset; } else { if ((drwav_uint32)offset <= pWav->memoryStreamWrite.dataSize) { pWav->memoryStreamWrite.currentWritePos = offset; } else { pWav->memoryStreamWrite.currentWritePos = pWav->memoryStreamWrite.dataSize; /* Trying to seek too far forward. */ } } return DRWAV_TRUE; } DRWAV_API drwav_bool32 drwav_init_memory(drwav* pWav, const void* data, size_t dataSize, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_memory_ex(pWav, data, dataSize, NULL, NULL, 0, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_memory_ex(drwav* pWav, const void* data, size_t dataSize, drwav_chunk_proc onChunk, void* pChunkUserData, drwav_uint32 flags, const drwav_allocation_callbacks* pAllocationCallbacks) { if (data == NULL || dataSize == 0) { return DRWAV_FALSE; } if (!drwav_preinit(pWav, drwav__on_read_memory, drwav__on_seek_memory, pWav, pAllocationCallbacks)) { return DRWAV_FALSE; } pWav->memoryStream.data = (const drwav_uint8*)data; pWav->memoryStream.dataSize = dataSize; pWav->memoryStream.currentReadPos = 0; return drwav_init__internal(pWav, onChunk, pChunkUserData, flags); } static drwav_bool32 drwav_init_memory_write__internal(drwav* pWav, void** ppData, size_t* pDataSize, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, drwav_bool32 isSequential, const drwav_allocation_callbacks* pAllocationCallbacks) { if (ppData == NULL || pDataSize == NULL) { return DRWAV_FALSE; } *ppData = NULL; /* Important because we're using realloc()! */ *pDataSize = 0; if (!drwav_preinit_write(pWav, pFormat, isSequential, drwav__on_write_memory, drwav__on_seek_memory_write, pWav, pAllocationCallbacks)) { return DRWAV_FALSE; } pWav->memoryStreamWrite.ppData = ppData; pWav->memoryStreamWrite.pDataSize = pDataSize; pWav->memoryStreamWrite.dataSize = 0; pWav->memoryStreamWrite.dataCapacity = 0; pWav->memoryStreamWrite.currentWritePos = 0; return drwav_init_write__internal(pWav, pFormat, totalSampleCount); } DRWAV_API drwav_bool32 drwav_init_memory_write(drwav* pWav, void** ppData, size_t* pDataSize, const drwav_data_format* pFormat, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_memory_write__internal(pWav, ppData, pDataSize, pFormat, 0, DRWAV_FALSE, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_memory_write_sequential(drwav* pWav, void** ppData, size_t* pDataSize, const drwav_data_format* pFormat, drwav_uint64 totalSampleCount, const drwav_allocation_callbacks* pAllocationCallbacks) { return drwav_init_memory_write__internal(pWav, ppData, pDataSize, pFormat, totalSampleCount, DRWAV_TRUE, pAllocationCallbacks); } DRWAV_API drwav_bool32 drwav_init_memory_write_sequential_pcm_frames(drwav* pWav, void** ppData, size_t* pDataSize, const drwav_data_format* pFormat, drwav_uint64 totalPCMFrameCount, const drwav_allocation_callbacks* pAllocationCallbacks) { if (pFormat == NULL) { return DRWAV_FALSE; } return drwav_init_memory_write_sequential(pWav, ppData, pDataSize, pFormat, totalPCMFrameCount*pFormat->channels, pAllocationCallbacks); } DRWAV_API drwav_result drwav_uninit(drwav* pWav) { drwav_result result = DRWAV_SUCCESS; if (pWav == NULL) { return DRWAV_INVALID_ARGS; } /* If the drwav object was opened in write mode we'll need to finalize a few things: - Make sure the "data" chunk is aligned to 16-bits for RIFF containers, or 64 bits for W64 containers. - Set the size of the "data" chunk. */ if (pWav->onWrite != NULL) { drwav_uint32 paddingSize = 0; /* Padding. Do not adjust pWav->dataChunkDataSize - this should not include the padding. */ if (pWav->container == drwav_container_riff || pWav->container == drwav_container_rf64) { paddingSize = drwav__chunk_padding_size_riff(pWav->dataChunkDataSize); } else { paddingSize = drwav__chunk_padding_size_w64(pWav->dataChunkDataSize); } if (paddingSize > 0) { drwav_uint64 paddingData = 0; drwav__write(pWav, &paddingData, paddingSize); /* Byte order does not matter for this. */ } /* Chunk sizes. When using sequential mode, these will have been filled in at initialization time. We only need to do this when using non-sequential mode. */ if (pWav->onSeek && !pWav->isSequentialWrite) { if (pWav->container == drwav_container_riff) { /* The "RIFF" chunk size. */ if (pWav->onSeek(pWav->pUserData, 4, drwav_seek_origin_start)) { drwav_uint32 riffChunkSize = drwav__riff_chunk_size_riff(pWav->dataChunkDataSize); drwav__write_u32ne_to_le(pWav, riffChunkSize); } /* the "data" chunk size. */ if (pWav->onSeek(pWav->pUserData, (int)pWav->dataChunkDataPos + 4, drwav_seek_origin_start)) { drwav_uint32 dataChunkSize = drwav__data_chunk_size_riff(pWav->dataChunkDataSize); drwav__write_u32ne_to_le(pWav, dataChunkSize); } } else if (pWav->container == drwav_container_w64) { /* The "RIFF" chunk size. */ if (pWav->onSeek(pWav->pUserData, 16, drwav_seek_origin_start)) { drwav_uint64 riffChunkSize = drwav__riff_chunk_size_w64(pWav->dataChunkDataSize); drwav__write_u64ne_to_le(pWav, riffChunkSize); } /* The "data" chunk size. */ if (pWav->onSeek(pWav->pUserData, (int)pWav->dataChunkDataPos + 16, drwav_seek_origin_start)) { drwav_uint64 dataChunkSize = drwav__data_chunk_size_w64(pWav->dataChunkDataSize); drwav__write_u64ne_to_le(pWav, dataChunkSize); } } else if (pWav->container == drwav_container_rf64) { /* We only need to update the ds64 chunk. The "RIFF" and "data" chunks always have their sizes set to 0xFFFFFFFF for RF64. */ int ds64BodyPos = 12 + 8; /* The "RIFF" chunk size. */ if (pWav->onSeek(pWav->pUserData, ds64BodyPos + 0, drwav_seek_origin_start)) { drwav_uint64 riffChunkSize = drwav__riff_chunk_size_rf64(pWav->dataChunkDataSize); drwav__write_u64ne_to_le(pWav, riffChunkSize); } /* The "data" chunk size. */ if (pWav->onSeek(pWav->pUserData, ds64BodyPos + 8, drwav_seek_origin_start)) { drwav_uint64 dataChunkSize = drwav__data_chunk_size_rf64(pWav->dataChunkDataSize); drwav__write_u64ne_to_le(pWav, dataChunkSize); } } } /* Validation for sequential mode. */ if (pWav->isSequentialWrite) { if (pWav->dataChunkDataSize != pWav->dataChunkDataSizeTargetWrite) { result = DRWAV_INVALID_FILE; } } } #ifndef DR_WAV_NO_STDIO /* If we opened the file with drwav_open_file() we will want to close the file handle. We can know whether or not drwav_open_file() was used by looking at the onRead and onSeek callbacks. */ if (pWav->onRead == drwav__on_read_stdio || pWav->onWrite == drwav__on_write_stdio) { fclose((FILE*)pWav->pUserData); } #endif return result; } DRWAV_API size_t drwav_read_raw(drwav* pWav, size_t bytesToRead, void* pBufferOut) { size_t bytesRead; if (pWav == NULL || bytesToRead == 0) { return 0; } if (bytesToRead > pWav->bytesRemaining) { bytesToRead = (size_t)pWav->bytesRemaining; } if (pBufferOut != NULL) { bytesRead = pWav->onRead(pWav->pUserData, pBufferOut, bytesToRead); } else { /* We need to seek. If we fail, we need to read-and-discard to make sure we get a good byte count. */ bytesRead = 0; while (bytesRead < bytesToRead) { size_t bytesToSeek = (bytesToRead - bytesRead); if (bytesToSeek > 0x7FFFFFFF) { bytesToSeek = 0x7FFFFFFF; } if (pWav->onSeek(pWav->pUserData, (int)bytesToSeek, drwav_seek_origin_current) == DRWAV_FALSE) { break; } bytesRead += bytesToSeek; } /* When we get here we may need to read-and-discard some data. */ while (bytesRead < bytesToRead) { drwav_uint8 buffer[4096]; size_t bytesSeeked; size_t bytesToSeek = (bytesToRead - bytesRead); if (bytesToSeek > sizeof(buffer)) { bytesToSeek = sizeof(buffer); } bytesSeeked = pWav->onRead(pWav->pUserData, buffer, bytesToSeek); bytesRead += bytesSeeked; if (bytesSeeked < bytesToSeek) { break; /* Reached the end. */ } } } pWav->bytesRemaining -= bytesRead; return bytesRead; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_le(drwav* pWav, drwav_uint64 framesToRead, void* pBufferOut) { drwav_uint32 bytesPerFrame; drwav_uint64 bytesToRead; /* Intentionally uint64 instead of size_t so we can do a check that we're not reading too much on 32-bit builds. */ if (pWav == NULL || framesToRead == 0) { return 0; } /* Cannot use this function for compressed formats. */ if (drwav__is_compressed_format_tag(pWav->translatedFormatTag)) { return 0; } bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } /* Don't try to read more samples than can potentially fit in the output buffer. */ bytesToRead = framesToRead * bytesPerFrame; if (bytesToRead > DRWAV_SIZE_MAX) { bytesToRead = (DRWAV_SIZE_MAX / bytesPerFrame) * bytesPerFrame; /* Round the number of bytes to read to a clean frame boundary. */ } /* Doing an explicit check here just to make it clear that we don't want to be attempt to read anything if there's no bytes to read. There *could* be a time where it evaluates to 0 due to overflowing. */ if (bytesToRead == 0) { return 0; } return drwav_read_raw(pWav, (size_t)bytesToRead, pBufferOut) / bytesPerFrame; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_be(drwav* pWav, drwav_uint64 framesToRead, void* pBufferOut) { drwav_uint64 framesRead = drwav_read_pcm_frames_le(pWav, framesToRead, pBufferOut); if (pBufferOut != NULL) { drwav__bswap_samples(pBufferOut, framesRead*pWav->channels, drwav_get_bytes_per_pcm_frame(pWav)/pWav->channels, pWav->translatedFormatTag); } return framesRead; } DRWAV_API drwav_uint64 drwav_read_pcm_frames(drwav* pWav, drwav_uint64 framesToRead, void* pBufferOut) { if (drwav__is_little_endian()) { return drwav_read_pcm_frames_le(pWav, framesToRead, pBufferOut); } else { return drwav_read_pcm_frames_be(pWav, framesToRead, pBufferOut); } } DRWAV_API drwav_bool32 drwav_seek_to_first_pcm_frame(drwav* pWav) { if (pWav->onWrite != NULL) { return DRWAV_FALSE; /* No seeking in write mode. */ } if (!pWav->onSeek(pWav->pUserData, (int)pWav->dataChunkDataPos, drwav_seek_origin_start)) { return DRWAV_FALSE; } if (drwav__is_compressed_format_tag(pWav->translatedFormatTag)) { pWav->compressed.iCurrentPCMFrame = 0; /* Cached data needs to be cleared for compressed formats. */ if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ADPCM) { DRWAV_ZERO_OBJECT(&pWav->msadpcm); } else if (pWav->translatedFormatTag == DR_WAVE_FORMAT_DVI_ADPCM) { DRWAV_ZERO_OBJECT(&pWav->ima); } else { DRWAV_ASSERT(DRWAV_FALSE); /* If this assertion is triggered it means I've implemented a new compressed format but forgot to add a branch for it here. */ } } pWav->bytesRemaining = pWav->dataChunkDataSize; return DRWAV_TRUE; } DRWAV_API drwav_bool32 drwav_seek_to_pcm_frame(drwav* pWav, drwav_uint64 targetFrameIndex) { /* Seeking should be compatible with wave files > 2GB. */ if (pWav == NULL || pWav->onSeek == NULL) { return DRWAV_FALSE; } /* No seeking in write mode. */ if (pWav->onWrite != NULL) { return DRWAV_FALSE; } /* If there are no samples, just return DRWAV_TRUE without doing anything. */ if (pWav->totalPCMFrameCount == 0) { return DRWAV_TRUE; } /* Make sure the sample is clamped. */ if (targetFrameIndex >= pWav->totalPCMFrameCount) { targetFrameIndex = pWav->totalPCMFrameCount - 1; } /* For compressed formats we just use a slow generic seek. If we are seeking forward we just seek forward. If we are going backwards we need to seek back to the start. */ if (drwav__is_compressed_format_tag(pWav->translatedFormatTag)) { /* TODO: This can be optimized. */ /* If we're seeking forward it's simple - just keep reading samples until we hit the sample we're requesting. If we're seeking backwards, we first need to seek back to the start and then just do the same thing as a forward seek. */ if (targetFrameIndex < pWav->compressed.iCurrentPCMFrame) { if (!drwav_seek_to_first_pcm_frame(pWav)) { return DRWAV_FALSE; } } if (targetFrameIndex > pWav->compressed.iCurrentPCMFrame) { drwav_uint64 offsetInFrames = targetFrameIndex - pWav->compressed.iCurrentPCMFrame; drwav_int16 devnull[2048]; while (offsetInFrames > 0) { drwav_uint64 framesRead = 0; drwav_uint64 framesToRead = offsetInFrames; if (framesToRead > drwav_countof(devnull)/pWav->channels) { framesToRead = drwav_countof(devnull)/pWav->channels; } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ADPCM) { framesRead = drwav_read_pcm_frames_s16__msadpcm(pWav, framesToRead, devnull); } else if (pWav->translatedFormatTag == DR_WAVE_FORMAT_DVI_ADPCM) { framesRead = drwav_read_pcm_frames_s16__ima(pWav, framesToRead, devnull); } else { DRWAV_ASSERT(DRWAV_FALSE); /* If this assertion is triggered it means I've implemented a new compressed format but forgot to add a branch for it here. */ } if (framesRead != framesToRead) { return DRWAV_FALSE; } offsetInFrames -= framesRead; } } } else { drwav_uint64 totalSizeInBytes; drwav_uint64 currentBytePos; drwav_uint64 targetBytePos; drwav_uint64 offset; totalSizeInBytes = pWav->totalPCMFrameCount * drwav_get_bytes_per_pcm_frame(pWav); DRWAV_ASSERT(totalSizeInBytes >= pWav->bytesRemaining); currentBytePos = totalSizeInBytes - pWav->bytesRemaining; targetBytePos = targetFrameIndex * drwav_get_bytes_per_pcm_frame(pWav); if (currentBytePos < targetBytePos) { /* Offset forwards. */ offset = (targetBytePos - currentBytePos); } else { /* Offset backwards. */ if (!drwav_seek_to_first_pcm_frame(pWav)) { return DRWAV_FALSE; } offset = targetBytePos; } while (offset > 0) { int offset32 = ((offset > INT_MAX) ? INT_MAX : (int)offset); if (!pWav->onSeek(pWav->pUserData, offset32, drwav_seek_origin_current)) { return DRWAV_FALSE; } pWav->bytesRemaining -= offset32; offset -= offset32; } } return DRWAV_TRUE; } DRWAV_API size_t drwav_write_raw(drwav* pWav, size_t bytesToWrite, const void* pData) { size_t bytesWritten; if (pWav == NULL || bytesToWrite == 0 || pData == NULL) { return 0; } bytesWritten = pWav->onWrite(pWav->pUserData, pData, bytesToWrite); pWav->dataChunkDataSize += bytesWritten; return bytesWritten; } DRWAV_API drwav_uint64 drwav_write_pcm_frames_le(drwav* pWav, drwav_uint64 framesToWrite, const void* pData) { drwav_uint64 bytesToWrite; drwav_uint64 bytesWritten; const drwav_uint8* pRunningData; if (pWav == NULL || framesToWrite == 0 || pData == NULL) { return 0; } bytesToWrite = ((framesToWrite * pWav->channels * pWav->bitsPerSample) / 8); if (bytesToWrite > DRWAV_SIZE_MAX) { return 0; } bytesWritten = 0; pRunningData = (const drwav_uint8*)pData; while (bytesToWrite > 0) { size_t bytesJustWritten; drwav_uint64 bytesToWriteThisIteration; bytesToWriteThisIteration = bytesToWrite; DRWAV_ASSERT(bytesToWriteThisIteration <= DRWAV_SIZE_MAX); /* <-- This is checked above. */ bytesJustWritten = drwav_write_raw(pWav, (size_t)bytesToWriteThisIteration, pRunningData); if (bytesJustWritten == 0) { break; } bytesToWrite -= bytesJustWritten; bytesWritten += bytesJustWritten; pRunningData += bytesJustWritten; } return (bytesWritten * 8) / pWav->bitsPerSample / pWav->channels; } DRWAV_API drwav_uint64 drwav_write_pcm_frames_be(drwav* pWav, drwav_uint64 framesToWrite, const void* pData) { drwav_uint64 bytesToWrite; drwav_uint64 bytesWritten; drwav_uint32 bytesPerSample; const drwav_uint8* pRunningData; if (pWav == NULL || framesToWrite == 0 || pData == NULL) { return 0; } bytesToWrite = ((framesToWrite * pWav->channels * pWav->bitsPerSample) / 8); if (bytesToWrite > DRWAV_SIZE_MAX) { return 0; } bytesWritten = 0; pRunningData = (const drwav_uint8*)pData; bytesPerSample = drwav_get_bytes_per_pcm_frame(pWav) / pWav->channels; while (bytesToWrite > 0) { drwav_uint8 temp[4096]; drwav_uint32 sampleCount; size_t bytesJustWritten; drwav_uint64 bytesToWriteThisIteration; bytesToWriteThisIteration = bytesToWrite; DRWAV_ASSERT(bytesToWriteThisIteration <= DRWAV_SIZE_MAX); /* <-- This is checked above. */ /* WAV files are always little-endian. We need to byte swap on big-endian architectures. Since our input buffer is read-only we need to use an intermediary buffer for the conversion. */ sampleCount = sizeof(temp)/bytesPerSample; if (bytesToWriteThisIteration > ((drwav_uint64)sampleCount)*bytesPerSample) { bytesToWriteThisIteration = ((drwav_uint64)sampleCount)*bytesPerSample; } DRWAV_COPY_MEMORY(temp, pRunningData, (size_t)bytesToWriteThisIteration); drwav__bswap_samples(temp, sampleCount, bytesPerSample, pWav->translatedFormatTag); bytesJustWritten = drwav_write_raw(pWav, (size_t)bytesToWriteThisIteration, temp); if (bytesJustWritten == 0) { break; } bytesToWrite -= bytesJustWritten; bytesWritten += bytesJustWritten; pRunningData += bytesJustWritten; } return (bytesWritten * 8) / pWav->bitsPerSample / pWav->channels; } DRWAV_API drwav_uint64 drwav_write_pcm_frames(drwav* pWav, drwav_uint64 framesToWrite, const void* pData) { if (drwav__is_little_endian()) { return drwav_write_pcm_frames_le(pWav, framesToWrite, pData); } else { return drwav_write_pcm_frames_be(pWav, framesToWrite, pData); } } static drwav_uint64 drwav_read_pcm_frames_s16__msadpcm(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut) { drwav_uint64 totalFramesRead = 0; DRWAV_ASSERT(pWav != NULL); DRWAV_ASSERT(framesToRead > 0); /* TODO: Lots of room for optimization here. */ while (framesToRead > 0 && pWav->compressed.iCurrentPCMFrame < pWav->totalPCMFrameCount) { /* If there are no cached frames we need to load a new block. */ if (pWav->msadpcm.cachedFrameCount == 0 && pWav->msadpcm.bytesRemainingInBlock == 0) { if (pWav->channels == 1) { /* Mono. */ drwav_uint8 header[7]; if (pWav->onRead(pWav->pUserData, header, sizeof(header)) != sizeof(header)) { return totalFramesRead; } pWav->msadpcm.bytesRemainingInBlock = pWav->fmt.blockAlign - sizeof(header); pWav->msadpcm.predictor[0] = header[0]; pWav->msadpcm.delta[0] = drwav__bytes_to_s16(header + 1); pWav->msadpcm.prevFrames[0][1] = (drwav_int32)drwav__bytes_to_s16(header + 3); pWav->msadpcm.prevFrames[0][0] = (drwav_int32)drwav__bytes_to_s16(header + 5); pWav->msadpcm.cachedFrames[2] = pWav->msadpcm.prevFrames[0][0]; pWav->msadpcm.cachedFrames[3] = pWav->msadpcm.prevFrames[0][1]; pWav->msadpcm.cachedFrameCount = 2; } else { /* Stereo. */ drwav_uint8 header[14]; if (pWav->onRead(pWav->pUserData, header, sizeof(header)) != sizeof(header)) { return totalFramesRead; } pWav->msadpcm.bytesRemainingInBlock = pWav->fmt.blockAlign - sizeof(header); pWav->msadpcm.predictor[0] = header[0]; pWav->msadpcm.predictor[1] = header[1]; pWav->msadpcm.delta[0] = drwav__bytes_to_s16(header + 2); pWav->msadpcm.delta[1] = drwav__bytes_to_s16(header + 4); pWav->msadpcm.prevFrames[0][1] = (drwav_int32)drwav__bytes_to_s16(header + 6); pWav->msadpcm.prevFrames[1][1] = (drwav_int32)drwav__bytes_to_s16(header + 8); pWav->msadpcm.prevFrames[0][0] = (drwav_int32)drwav__bytes_to_s16(header + 10); pWav->msadpcm.prevFrames[1][0] = (drwav_int32)drwav__bytes_to_s16(header + 12); pWav->msadpcm.cachedFrames[0] = pWav->msadpcm.prevFrames[0][0]; pWav->msadpcm.cachedFrames[1] = pWav->msadpcm.prevFrames[1][0]; pWav->msadpcm.cachedFrames[2] = pWav->msadpcm.prevFrames[0][1]; pWav->msadpcm.cachedFrames[3] = pWav->msadpcm.prevFrames[1][1]; pWav->msadpcm.cachedFrameCount = 2; } } /* Output anything that's cached. */ while (framesToRead > 0 && pWav->msadpcm.cachedFrameCount > 0 && pWav->compressed.iCurrentPCMFrame < pWav->totalPCMFrameCount) { if (pBufferOut != NULL) { drwav_uint32 iSample = 0; for (iSample = 0; iSample < pWav->channels; iSample += 1) { pBufferOut[iSample] = (drwav_int16)pWav->msadpcm.cachedFrames[(drwav_countof(pWav->msadpcm.cachedFrames) - (pWav->msadpcm.cachedFrameCount*pWav->channels)) + iSample]; } pBufferOut += pWav->channels; } framesToRead -= 1; totalFramesRead += 1; pWav->compressed.iCurrentPCMFrame += 1; pWav->msadpcm.cachedFrameCount -= 1; } if (framesToRead == 0) { return totalFramesRead; } /* If there's nothing left in the cache, just go ahead and load more. If there's nothing left to load in the current block we just continue to the next loop iteration which will trigger the loading of a new block. */ if (pWav->msadpcm.cachedFrameCount == 0) { if (pWav->msadpcm.bytesRemainingInBlock == 0) { continue; } else { static drwav_int32 adaptationTable[] = { 230, 230, 230, 230, 307, 409, 512, 614, 768, 614, 512, 409, 307, 230, 230, 230 }; static drwav_int32 coeff1Table[] = { 256, 512, 0, 192, 240, 460, 392 }; static drwav_int32 coeff2Table[] = { 0, -256, 0, 64, 0, -208, -232 }; drwav_uint8 nibbles; drwav_int32 nibble0; drwav_int32 nibble1; if (pWav->onRead(pWav->pUserData, &nibbles, 1) != 1) { return totalFramesRead; } pWav->msadpcm.bytesRemainingInBlock -= 1; /* TODO: Optimize away these if statements. */ nibble0 = ((nibbles & 0xF0) >> 4); if ((nibbles & 0x80)) { nibble0 |= 0xFFFFFFF0UL; } nibble1 = ((nibbles & 0x0F) >> 0); if ((nibbles & 0x08)) { nibble1 |= 0xFFFFFFF0UL; } if (pWav->channels == 1) { /* Mono. */ drwav_int32 newSample0; drwav_int32 newSample1; newSample0 = ((pWav->msadpcm.prevFrames[0][1] * coeff1Table[pWav->msadpcm.predictor[0]]) + (pWav->msadpcm.prevFrames[0][0] * coeff2Table[pWav->msadpcm.predictor[0]])) >> 8; newSample0 += nibble0 * pWav->msadpcm.delta[0]; newSample0 = drwav_clamp(newSample0, -32768, 32767); pWav->msadpcm.delta[0] = (adaptationTable[((nibbles & 0xF0) >> 4)] * pWav->msadpcm.delta[0]) >> 8; if (pWav->msadpcm.delta[0] < 16) { pWav->msadpcm.delta[0] = 16; } pWav->msadpcm.prevFrames[0][0] = pWav->msadpcm.prevFrames[0][1]; pWav->msadpcm.prevFrames[0][1] = newSample0; newSample1 = ((pWav->msadpcm.prevFrames[0][1] * coeff1Table[pWav->msadpcm.predictor[0]]) + (pWav->msadpcm.prevFrames[0][0] * coeff2Table[pWav->msadpcm.predictor[0]])) >> 8; newSample1 += nibble1 * pWav->msadpcm.delta[0]; newSample1 = drwav_clamp(newSample1, -32768, 32767); pWav->msadpcm.delta[0] = (adaptationTable[((nibbles & 0x0F) >> 0)] * pWav->msadpcm.delta[0]) >> 8; if (pWav->msadpcm.delta[0] < 16) { pWav->msadpcm.delta[0] = 16; } pWav->msadpcm.prevFrames[0][0] = pWav->msadpcm.prevFrames[0][1]; pWav->msadpcm.prevFrames[0][1] = newSample1; pWav->msadpcm.cachedFrames[2] = newSample0; pWav->msadpcm.cachedFrames[3] = newSample1; pWav->msadpcm.cachedFrameCount = 2; } else { /* Stereo. */ drwav_int32 newSample0; drwav_int32 newSample1; /* Left. */ newSample0 = ((pWav->msadpcm.prevFrames[0][1] * coeff1Table[pWav->msadpcm.predictor[0]]) + (pWav->msadpcm.prevFrames[0][0] * coeff2Table[pWav->msadpcm.predictor[0]])) >> 8; newSample0 += nibble0 * pWav->msadpcm.delta[0]; newSample0 = drwav_clamp(newSample0, -32768, 32767); pWav->msadpcm.delta[0] = (adaptationTable[((nibbles & 0xF0) >> 4)] * pWav->msadpcm.delta[0]) >> 8; if (pWav->msadpcm.delta[0] < 16) { pWav->msadpcm.delta[0] = 16; } pWav->msadpcm.prevFrames[0][0] = pWav->msadpcm.prevFrames[0][1]; pWav->msadpcm.prevFrames[0][1] = newSample0; /* Right. */ newSample1 = ((pWav->msadpcm.prevFrames[1][1] * coeff1Table[pWav->msadpcm.predictor[1]]) + (pWav->msadpcm.prevFrames[1][0] * coeff2Table[pWav->msadpcm.predictor[1]])) >> 8; newSample1 += nibble1 * pWav->msadpcm.delta[1]; newSample1 = drwav_clamp(newSample1, -32768, 32767); pWav->msadpcm.delta[1] = (adaptationTable[((nibbles & 0x0F) >> 0)] * pWav->msadpcm.delta[1]) >> 8; if (pWav->msadpcm.delta[1] < 16) { pWav->msadpcm.delta[1] = 16; } pWav->msadpcm.prevFrames[1][0] = pWav->msadpcm.prevFrames[1][1]; pWav->msadpcm.prevFrames[1][1] = newSample1; pWav->msadpcm.cachedFrames[2] = newSample0; pWav->msadpcm.cachedFrames[3] = newSample1; pWav->msadpcm.cachedFrameCount = 1; } } } } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_s16__ima(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut) { drwav_uint64 totalFramesRead = 0; drwav_uint32 iChannel; static drwav_int32 indexTable[16] = { -1, -1, -1, -1, 2, 4, 6, 8, -1, -1, -1, -1, 2, 4, 6, 8 }; static drwav_int32 stepTable[89] = { 7, 8, 9, 10, 11, 12, 13, 14, 16, 17, 19, 21, 23, 25, 28, 31, 34, 37, 41, 45, 50, 55, 60, 66, 73, 80, 88, 97, 107, 118, 130, 143, 157, 173, 190, 209, 230, 253, 279, 307, 337, 371, 408, 449, 494, 544, 598, 658, 724, 796, 876, 963, 1060, 1166, 1282, 1411, 1552, 1707, 1878, 2066, 2272, 2499, 2749, 3024, 3327, 3660, 4026, 4428, 4871, 5358, 5894, 6484, 7132, 7845, 8630, 9493, 10442, 11487, 12635, 13899, 15289, 16818, 18500, 20350, 22385, 24623, 27086, 29794, 32767 }; DRWAV_ASSERT(pWav != NULL); DRWAV_ASSERT(framesToRead > 0); /* TODO: Lots of room for optimization here. */ while (framesToRead > 0 && pWav->compressed.iCurrentPCMFrame < pWav->totalPCMFrameCount) { /* If there are no cached samples we need to load a new block. */ if (pWav->ima.cachedFrameCount == 0 && pWav->ima.bytesRemainingInBlock == 0) { if (pWav->channels == 1) { /* Mono. */ drwav_uint8 header[4]; if (pWav->onRead(pWav->pUserData, header, sizeof(header)) != sizeof(header)) { return totalFramesRead; } pWav->ima.bytesRemainingInBlock = pWav->fmt.blockAlign - sizeof(header); if (header[2] >= drwav_countof(stepTable)) { pWav->onSeek(pWav->pUserData, pWav->ima.bytesRemainingInBlock, drwav_seek_origin_current); pWav->ima.bytesRemainingInBlock = 0; return totalFramesRead; /* Invalid data. */ } pWav->ima.predictor[0] = drwav__bytes_to_s16(header + 0); pWav->ima.stepIndex[0] = header[2]; pWav->ima.cachedFrames[drwav_countof(pWav->ima.cachedFrames) - 1] = pWav->ima.predictor[0]; pWav->ima.cachedFrameCount = 1; } else { /* Stereo. */ drwav_uint8 header[8]; if (pWav->onRead(pWav->pUserData, header, sizeof(header)) != sizeof(header)) { return totalFramesRead; } pWav->ima.bytesRemainingInBlock = pWav->fmt.blockAlign - sizeof(header); if (header[2] >= drwav_countof(stepTable) || header[6] >= drwav_countof(stepTable)) { pWav->onSeek(pWav->pUserData, pWav->ima.bytesRemainingInBlock, drwav_seek_origin_current); pWav->ima.bytesRemainingInBlock = 0; return totalFramesRead; /* Invalid data. */ } pWav->ima.predictor[0] = drwav__bytes_to_s16(header + 0); pWav->ima.stepIndex[0] = header[2]; pWav->ima.predictor[1] = drwav__bytes_to_s16(header + 4); pWav->ima.stepIndex[1] = header[6]; pWav->ima.cachedFrames[drwav_countof(pWav->ima.cachedFrames) - 2] = pWav->ima.predictor[0]; pWav->ima.cachedFrames[drwav_countof(pWav->ima.cachedFrames) - 1] = pWav->ima.predictor[1]; pWav->ima.cachedFrameCount = 1; } } /* Output anything that's cached. */ while (framesToRead > 0 && pWav->ima.cachedFrameCount > 0 && pWav->compressed.iCurrentPCMFrame < pWav->totalPCMFrameCount) { if (pBufferOut != NULL) { drwav_uint32 iSample; for (iSample = 0; iSample < pWav->channels; iSample += 1) { pBufferOut[iSample] = (drwav_int16)pWav->ima.cachedFrames[(drwav_countof(pWav->ima.cachedFrames) - (pWav->ima.cachedFrameCount*pWav->channels)) + iSample]; } pBufferOut += pWav->channels; } framesToRead -= 1; totalFramesRead += 1; pWav->compressed.iCurrentPCMFrame += 1; pWav->ima.cachedFrameCount -= 1; } if (framesToRead == 0) { return totalFramesRead; } /* If there's nothing left in the cache, just go ahead and load more. If there's nothing left to load in the current block we just continue to the next loop iteration which will trigger the loading of a new block. */ if (pWav->ima.cachedFrameCount == 0) { if (pWav->ima.bytesRemainingInBlock == 0) { continue; } else { /* From what I can tell with stereo streams, it looks like every 4 bytes (8 samples) is for one channel. So it goes 4 bytes for the left channel, 4 bytes for the right channel. */ pWav->ima.cachedFrameCount = 8; for (iChannel = 0; iChannel < pWav->channels; ++iChannel) { drwav_uint32 iByte; drwav_uint8 nibbles[4]; if (pWav->onRead(pWav->pUserData, &nibbles, 4) != 4) { pWav->ima.cachedFrameCount = 0; return totalFramesRead; } pWav->ima.bytesRemainingInBlock -= 4; for (iByte = 0; iByte < 4; ++iByte) { drwav_uint8 nibble0 = ((nibbles[iByte] & 0x0F) >> 0); drwav_uint8 nibble1 = ((nibbles[iByte] & 0xF0) >> 4); drwav_int32 step = stepTable[pWav->ima.stepIndex[iChannel]]; drwav_int32 predictor = pWav->ima.predictor[iChannel]; drwav_int32 diff = step >> 3; if (nibble0 & 1) diff += step >> 2; if (nibble0 & 2) diff += step >> 1; if (nibble0 & 4) diff += step; if (nibble0 & 8) diff = -diff; predictor = drwav_clamp(predictor + diff, -32768, 32767); pWav->ima.predictor[iChannel] = predictor; pWav->ima.stepIndex[iChannel] = drwav_clamp(pWav->ima.stepIndex[iChannel] + indexTable[nibble0], 0, (drwav_int32)drwav_countof(stepTable)-1); pWav->ima.cachedFrames[(drwav_countof(pWav->ima.cachedFrames) - (pWav->ima.cachedFrameCount*pWav->channels)) + (iByte*2+0)*pWav->channels + iChannel] = predictor; step = stepTable[pWav->ima.stepIndex[iChannel]]; predictor = pWav->ima.predictor[iChannel]; diff = step >> 3; if (nibble1 & 1) diff += step >> 2; if (nibble1 & 2) diff += step >> 1; if (nibble1 & 4) diff += step; if (nibble1 & 8) diff = -diff; predictor = drwav_clamp(predictor + diff, -32768, 32767); pWav->ima.predictor[iChannel] = predictor; pWav->ima.stepIndex[iChannel] = drwav_clamp(pWav->ima.stepIndex[iChannel] + indexTable[nibble1], 0, (drwav_int32)drwav_countof(stepTable)-1); pWav->ima.cachedFrames[(drwav_countof(pWav->ima.cachedFrames) - (pWav->ima.cachedFrameCount*pWav->channels)) + (iByte*2+1)*pWav->channels + iChannel] = predictor; } } } } } return totalFramesRead; } #ifndef DR_WAV_NO_CONVERSION_API static unsigned short g_drwavAlawTable[256] = { 0xEA80, 0xEB80, 0xE880, 0xE980, 0xEE80, 0xEF80, 0xEC80, 0xED80, 0xE280, 0xE380, 0xE080, 0xE180, 0xE680, 0xE780, 0xE480, 0xE580, 0xF540, 0xF5C0, 0xF440, 0xF4C0, 0xF740, 0xF7C0, 0xF640, 0xF6C0, 0xF140, 0xF1C0, 0xF040, 0xF0C0, 0xF340, 0xF3C0, 0xF240, 0xF2C0, 0xAA00, 0xAE00, 0xA200, 0xA600, 0xBA00, 0xBE00, 0xB200, 0xB600, 0x8A00, 0x8E00, 0x8200, 0x8600, 0x9A00, 0x9E00, 0x9200, 0x9600, 0xD500, 0xD700, 0xD100, 0xD300, 0xDD00, 0xDF00, 0xD900, 0xDB00, 0xC500, 0xC700, 0xC100, 0xC300, 0xCD00, 0xCF00, 0xC900, 0xCB00, 0xFEA8, 0xFEB8, 0xFE88, 0xFE98, 0xFEE8, 0xFEF8, 0xFEC8, 0xFED8, 0xFE28, 0xFE38, 0xFE08, 0xFE18, 0xFE68, 0xFE78, 0xFE48, 0xFE58, 0xFFA8, 0xFFB8, 0xFF88, 0xFF98, 0xFFE8, 0xFFF8, 0xFFC8, 0xFFD8, 0xFF28, 0xFF38, 0xFF08, 0xFF18, 0xFF68, 0xFF78, 0xFF48, 0xFF58, 0xFAA0, 0xFAE0, 0xFA20, 0xFA60, 0xFBA0, 0xFBE0, 0xFB20, 0xFB60, 0xF8A0, 0xF8E0, 0xF820, 0xF860, 0xF9A0, 0xF9E0, 0xF920, 0xF960, 0xFD50, 0xFD70, 0xFD10, 0xFD30, 0xFDD0, 0xFDF0, 0xFD90, 0xFDB0, 0xFC50, 0xFC70, 0xFC10, 0xFC30, 0xFCD0, 0xFCF0, 0xFC90, 0xFCB0, 0x1580, 0x1480, 0x1780, 0x1680, 0x1180, 0x1080, 0x1380, 0x1280, 0x1D80, 0x1C80, 0x1F80, 0x1E80, 0x1980, 0x1880, 0x1B80, 0x1A80, 0x0AC0, 0x0A40, 0x0BC0, 0x0B40, 0x08C0, 0x0840, 0x09C0, 0x0940, 0x0EC0, 0x0E40, 0x0FC0, 0x0F40, 0x0CC0, 0x0C40, 0x0DC0, 0x0D40, 0x5600, 0x5200, 0x5E00, 0x5A00, 0x4600, 0x4200, 0x4E00, 0x4A00, 0x7600, 0x7200, 0x7E00, 0x7A00, 0x6600, 0x6200, 0x6E00, 0x6A00, 0x2B00, 0x2900, 0x2F00, 0x2D00, 0x2300, 0x2100, 0x2700, 0x2500, 0x3B00, 0x3900, 0x3F00, 0x3D00, 0x3300, 0x3100, 0x3700, 0x3500, 0x0158, 0x0148, 0x0178, 0x0168, 0x0118, 0x0108, 0x0138, 0x0128, 0x01D8, 0x01C8, 0x01F8, 0x01E8, 0x0198, 0x0188, 0x01B8, 0x01A8, 0x0058, 0x0048, 0x0078, 0x0068, 0x0018, 0x0008, 0x0038, 0x0028, 0x00D8, 0x00C8, 0x00F8, 0x00E8, 0x0098, 0x0088, 0x00B8, 0x00A8, 0x0560, 0x0520, 0x05E0, 0x05A0, 0x0460, 0x0420, 0x04E0, 0x04A0, 0x0760, 0x0720, 0x07E0, 0x07A0, 0x0660, 0x0620, 0x06E0, 0x06A0, 0x02B0, 0x0290, 0x02F0, 0x02D0, 0x0230, 0x0210, 0x0270, 0x0250, 0x03B0, 0x0390, 0x03F0, 0x03D0, 0x0330, 0x0310, 0x0370, 0x0350 }; static unsigned short g_drwavMulawTable[256] = { 0x8284, 0x8684, 0x8A84, 0x8E84, 0x9284, 0x9684, 0x9A84, 0x9E84, 0xA284, 0xA684, 0xAA84, 0xAE84, 0xB284, 0xB684, 0xBA84, 0xBE84, 0xC184, 0xC384, 0xC584, 0xC784, 0xC984, 0xCB84, 0xCD84, 0xCF84, 0xD184, 0xD384, 0xD584, 0xD784, 0xD984, 0xDB84, 0xDD84, 0xDF84, 0xE104, 0xE204, 0xE304, 0xE404, 0xE504, 0xE604, 0xE704, 0xE804, 0xE904, 0xEA04, 0xEB04, 0xEC04, 0xED04, 0xEE04, 0xEF04, 0xF004, 0xF0C4, 0xF144, 0xF1C4, 0xF244, 0xF2C4, 0xF344, 0xF3C4, 0xF444, 0xF4C4, 0xF544, 0xF5C4, 0xF644, 0xF6C4, 0xF744, 0xF7C4, 0xF844, 0xF8A4, 0xF8E4, 0xF924, 0xF964, 0xF9A4, 0xF9E4, 0xFA24, 0xFA64, 0xFAA4, 0xFAE4, 0xFB24, 0xFB64, 0xFBA4, 0xFBE4, 0xFC24, 0xFC64, 0xFC94, 0xFCB4, 0xFCD4, 0xFCF4, 0xFD14, 0xFD34, 0xFD54, 0xFD74, 0xFD94, 0xFDB4, 0xFDD4, 0xFDF4, 0xFE14, 0xFE34, 0xFE54, 0xFE74, 0xFE8C, 0xFE9C, 0xFEAC, 0xFEBC, 0xFECC, 0xFEDC, 0xFEEC, 0xFEFC, 0xFF0C, 0xFF1C, 0xFF2C, 0xFF3C, 0xFF4C, 0xFF5C, 0xFF6C, 0xFF7C, 0xFF88, 0xFF90, 0xFF98, 0xFFA0, 0xFFA8, 0xFFB0, 0xFFB8, 0xFFC0, 0xFFC8, 0xFFD0, 0xFFD8, 0xFFE0, 0xFFE8, 0xFFF0, 0xFFF8, 0x0000, 0x7D7C, 0x797C, 0x757C, 0x717C, 0x6D7C, 0x697C, 0x657C, 0x617C, 0x5D7C, 0x597C, 0x557C, 0x517C, 0x4D7C, 0x497C, 0x457C, 0x417C, 0x3E7C, 0x3C7C, 0x3A7C, 0x387C, 0x367C, 0x347C, 0x327C, 0x307C, 0x2E7C, 0x2C7C, 0x2A7C, 0x287C, 0x267C, 0x247C, 0x227C, 0x207C, 0x1EFC, 0x1DFC, 0x1CFC, 0x1BFC, 0x1AFC, 0x19FC, 0x18FC, 0x17FC, 0x16FC, 0x15FC, 0x14FC, 0x13FC, 0x12FC, 0x11FC, 0x10FC, 0x0FFC, 0x0F3C, 0x0EBC, 0x0E3C, 0x0DBC, 0x0D3C, 0x0CBC, 0x0C3C, 0x0BBC, 0x0B3C, 0x0ABC, 0x0A3C, 0x09BC, 0x093C, 0x08BC, 0x083C, 0x07BC, 0x075C, 0x071C, 0x06DC, 0x069C, 0x065C, 0x061C, 0x05DC, 0x059C, 0x055C, 0x051C, 0x04DC, 0x049C, 0x045C, 0x041C, 0x03DC, 0x039C, 0x036C, 0x034C, 0x032C, 0x030C, 0x02EC, 0x02CC, 0x02AC, 0x028C, 0x026C, 0x024C, 0x022C, 0x020C, 0x01EC, 0x01CC, 0x01AC, 0x018C, 0x0174, 0x0164, 0x0154, 0x0144, 0x0134, 0x0124, 0x0114, 0x0104, 0x00F4, 0x00E4, 0x00D4, 0x00C4, 0x00B4, 0x00A4, 0x0094, 0x0084, 0x0078, 0x0070, 0x0068, 0x0060, 0x0058, 0x0050, 0x0048, 0x0040, 0x0038, 0x0030, 0x0028, 0x0020, 0x0018, 0x0010, 0x0008, 0x0000 }; static DRWAV_INLINE drwav_int16 drwav__alaw_to_s16(drwav_uint8 sampleIn) { return (short)g_drwavAlawTable[sampleIn]; } static DRWAV_INLINE drwav_int16 drwav__mulaw_to_s16(drwav_uint8 sampleIn) { return (short)g_drwavMulawTable[sampleIn]; } static void drwav__pcm_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t totalSampleCount, unsigned int bytesPerSample) { unsigned int i; /* Special case for 8-bit sample data because it's treated as unsigned. */ if (bytesPerSample == 1) { drwav_u8_to_s16(pOut, pIn, totalSampleCount); return; } /* Slightly more optimal implementation for common formats. */ if (bytesPerSample == 2) { for (i = 0; i < totalSampleCount; ++i) { *pOut++ = ((const drwav_int16*)pIn)[i]; } return; } if (bytesPerSample == 3) { drwav_s24_to_s16(pOut, pIn, totalSampleCount); return; } if (bytesPerSample == 4) { drwav_s32_to_s16(pOut, (const drwav_int32*)pIn, totalSampleCount); return; } /* Anything more than 64 bits per sample is not supported. */ if (bytesPerSample > 8) { DRWAV_ZERO_MEMORY(pOut, totalSampleCount * sizeof(*pOut)); return; } /* Generic, slow converter. */ for (i = 0; i < totalSampleCount; ++i) { drwav_uint64 sample = 0; unsigned int shift = (8 - bytesPerSample) * 8; unsigned int j; for (j = 0; j < bytesPerSample; j += 1) { DRWAV_ASSERT(j < 8); sample |= (drwav_uint64)(pIn[j]) << shift; shift += 8; } pIn += j; *pOut++ = (drwav_int16)((drwav_int64)sample >> 48); } } static void drwav__ieee_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t totalSampleCount, unsigned int bytesPerSample) { if (bytesPerSample == 4) { drwav_f32_to_s16(pOut, (const float*)pIn, totalSampleCount); return; } else if (bytesPerSample == 8) { drwav_f64_to_s16(pOut, (const double*)pIn, totalSampleCount); return; } else { /* Only supporting 32- and 64-bit float. Output silence in all other cases. Contributions welcome for 16-bit float. */ DRWAV_ZERO_MEMORY(pOut, totalSampleCount * sizeof(*pOut)); return; } } static drwav_uint64 drwav_read_pcm_frames_s16__pcm(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut) { drwav_uint32 bytesPerFrame; drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; /* Fast path. */ if ((pWav->translatedFormatTag == DR_WAVE_FORMAT_PCM && pWav->bitsPerSample == 16) || pBufferOut == NULL) { return drwav_read_pcm_frames(pWav, framesToRead, pBufferOut); } bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav__pcm_to_s16(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels), bytesPerFrame/pWav->channels); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_s16__ieee(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame; if (pBufferOut == NULL) { return drwav_read_pcm_frames(pWav, framesToRead, NULL); } bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav__ieee_to_s16(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels), bytesPerFrame/pWav->channels); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_s16__alaw(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame; if (pBufferOut == NULL) { return drwav_read_pcm_frames(pWav, framesToRead, NULL); } bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav_alaw_to_s16(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels)); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_s16__mulaw(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame; if (pBufferOut == NULL) { return drwav_read_pcm_frames(pWav, framesToRead, NULL); } bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav_mulaw_to_s16(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels)); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_s16(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut) { if (pWav == NULL || framesToRead == 0) { return 0; } if (pBufferOut == NULL) { return drwav_read_pcm_frames(pWav, framesToRead, NULL); } /* Don't try to read more samples than can potentially fit in the output buffer. */ if (framesToRead * pWav->channels * sizeof(drwav_int16) > DRWAV_SIZE_MAX) { framesToRead = DRWAV_SIZE_MAX / sizeof(drwav_int16) / pWav->channels; } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_PCM) { return drwav_read_pcm_frames_s16__pcm(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_IEEE_FLOAT) { return drwav_read_pcm_frames_s16__ieee(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ALAW) { return drwav_read_pcm_frames_s16__alaw(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_MULAW) { return drwav_read_pcm_frames_s16__mulaw(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ADPCM) { return drwav_read_pcm_frames_s16__msadpcm(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_DVI_ADPCM) { return drwav_read_pcm_frames_s16__ima(pWav, framesToRead, pBufferOut); } return 0; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_s16le(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut) { drwav_uint64 framesRead = drwav_read_pcm_frames_s16(pWav, framesToRead, pBufferOut); if (pBufferOut != NULL && drwav__is_little_endian() == DRWAV_FALSE) { drwav__bswap_samples_s16(pBufferOut, framesRead*pWav->channels); } return framesRead; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_s16be(drwav* pWav, drwav_uint64 framesToRead, drwav_int16* pBufferOut) { drwav_uint64 framesRead = drwav_read_pcm_frames_s16(pWav, framesToRead, pBufferOut); if (pBufferOut != NULL && drwav__is_little_endian() == DRWAV_TRUE) { drwav__bswap_samples_s16(pBufferOut, framesRead*pWav->channels); } return framesRead; } DRWAV_API void drwav_u8_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t sampleCount) { int r; size_t i; for (i = 0; i < sampleCount; ++i) { int x = pIn[i]; r = x << 8; r = r - 32768; pOut[i] = (short)r; } } DRWAV_API void drwav_s24_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t sampleCount) { int r; size_t i; for (i = 0; i < sampleCount; ++i) { int x = ((int)(((unsigned int)(((const drwav_uint8*)pIn)[i*3+0]) << 8) | ((unsigned int)(((const drwav_uint8*)pIn)[i*3+1]) << 16) | ((unsigned int)(((const drwav_uint8*)pIn)[i*3+2])) << 24)) >> 8; r = x >> 8; pOut[i] = (short)r; } } DRWAV_API void drwav_s32_to_s16(drwav_int16* pOut, const drwav_int32* pIn, size_t sampleCount) { int r; size_t i; for (i = 0; i < sampleCount; ++i) { int x = pIn[i]; r = x >> 16; pOut[i] = (short)r; } } DRWAV_API void drwav_f32_to_s16(drwav_int16* pOut, const float* pIn, size_t sampleCount) { int r; size_t i; for (i = 0; i < sampleCount; ++i) { float x = pIn[i]; float c; c = ((x < -1) ? -1 : ((x > 1) ? 1 : x)); c = c + 1; r = (int)(c * 32767.5f); r = r - 32768; pOut[i] = (short)r; } } DRWAV_API void drwav_f64_to_s16(drwav_int16* pOut, const double* pIn, size_t sampleCount) { int r; size_t i; for (i = 0; i < sampleCount; ++i) { double x = pIn[i]; double c; c = ((x < -1) ? -1 : ((x > 1) ? 1 : x)); c = c + 1; r = (int)(c * 32767.5); r = r - 32768; pOut[i] = (short)r; } } DRWAV_API void drwav_alaw_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; for (i = 0; i < sampleCount; ++i) { pOut[i] = drwav__alaw_to_s16(pIn[i]); } } DRWAV_API void drwav_mulaw_to_s16(drwav_int16* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; for (i = 0; i < sampleCount; ++i) { pOut[i] = drwav__mulaw_to_s16(pIn[i]); } } static void drwav__pcm_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount, unsigned int bytesPerSample) { unsigned int i; /* Special case for 8-bit sample data because it's treated as unsigned. */ if (bytesPerSample == 1) { drwav_u8_to_f32(pOut, pIn, sampleCount); return; } /* Slightly more optimal implementation for common formats. */ if (bytesPerSample == 2) { drwav_s16_to_f32(pOut, (const drwav_int16*)pIn, sampleCount); return; } if (bytesPerSample == 3) { drwav_s24_to_f32(pOut, pIn, sampleCount); return; } if (bytesPerSample == 4) { drwav_s32_to_f32(pOut, (const drwav_int32*)pIn, sampleCount); return; } /* Anything more than 64 bits per sample is not supported. */ if (bytesPerSample > 8) { DRWAV_ZERO_MEMORY(pOut, sampleCount * sizeof(*pOut)); return; } /* Generic, slow converter. */ for (i = 0; i < sampleCount; ++i) { drwav_uint64 sample = 0; unsigned int shift = (8 - bytesPerSample) * 8; unsigned int j; for (j = 0; j < bytesPerSample; j += 1) { DRWAV_ASSERT(j < 8); sample |= (drwav_uint64)(pIn[j]) << shift; shift += 8; } pIn += j; *pOut++ = (float)((drwav_int64)sample / 9223372036854775807.0); } } static void drwav__ieee_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount, unsigned int bytesPerSample) { if (bytesPerSample == 4) { unsigned int i; for (i = 0; i < sampleCount; ++i) { *pOut++ = ((const float*)pIn)[i]; } return; } else if (bytesPerSample == 8) { drwav_f64_to_f32(pOut, (const double*)pIn, sampleCount); return; } else { /* Only supporting 32- and 64-bit float. Output silence in all other cases. Contributions welcome for 16-bit float. */ DRWAV_ZERO_MEMORY(pOut, sampleCount * sizeof(*pOut)); return; } } static drwav_uint64 drwav_read_pcm_frames_f32__pcm(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav__pcm_to_f32(pBufferOut, sampleData, (size_t)framesRead*pWav->channels, bytesPerFrame/pWav->channels); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_f32__msadpcm(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut) { /* We're just going to borrow the implementation from the drwav_read_s16() since ADPCM is a little bit more complicated than other formats and I don't want to duplicate that code. */ drwav_uint64 totalFramesRead = 0; drwav_int16 samples16[2048]; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames_s16(pWav, drwav_min(framesToRead, drwav_countof(samples16)/pWav->channels), samples16); if (framesRead == 0) { break; } drwav_s16_to_f32(pBufferOut, samples16, (size_t)(framesRead*pWav->channels)); /* <-- Safe cast because we're clamping to 2048. */ pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_f32__ima(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut) { /* We're just going to borrow the implementation from the drwav_read_s16() since IMA-ADPCM is a little bit more complicated than other formats and I don't want to duplicate that code. */ drwav_uint64 totalFramesRead = 0; drwav_int16 samples16[2048]; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames_s16(pWav, drwav_min(framesToRead, drwav_countof(samples16)/pWav->channels), samples16); if (framesRead == 0) { break; } drwav_s16_to_f32(pBufferOut, samples16, (size_t)(framesRead*pWav->channels)); /* <-- Safe cast because we're clamping to 2048. */ pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_f32__ieee(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame; /* Fast path. */ if (pWav->translatedFormatTag == DR_WAVE_FORMAT_IEEE_FLOAT && pWav->bitsPerSample == 32) { return drwav_read_pcm_frames(pWav, framesToRead, pBufferOut); } bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav__ieee_to_f32(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels), bytesPerFrame/pWav->channels); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_f32__alaw(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav_alaw_to_f32(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels)); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_f32__mulaw(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav_mulaw_to_f32(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels)); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_f32(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut) { if (pWav == NULL || framesToRead == 0) { return 0; } if (pBufferOut == NULL) { return drwav_read_pcm_frames(pWav, framesToRead, NULL); } /* Don't try to read more samples than can potentially fit in the output buffer. */ if (framesToRead * pWav->channels * sizeof(float) > DRWAV_SIZE_MAX) { framesToRead = DRWAV_SIZE_MAX / sizeof(float) / pWav->channels; } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_PCM) { return drwav_read_pcm_frames_f32__pcm(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ADPCM) { return drwav_read_pcm_frames_f32__msadpcm(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_IEEE_FLOAT) { return drwav_read_pcm_frames_f32__ieee(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ALAW) { return drwav_read_pcm_frames_f32__alaw(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_MULAW) { return drwav_read_pcm_frames_f32__mulaw(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_DVI_ADPCM) { return drwav_read_pcm_frames_f32__ima(pWav, framesToRead, pBufferOut); } return 0; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_f32le(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut) { drwav_uint64 framesRead = drwav_read_pcm_frames_f32(pWav, framesToRead, pBufferOut); if (pBufferOut != NULL && drwav__is_little_endian() == DRWAV_FALSE) { drwav__bswap_samples_f32(pBufferOut, framesRead*pWav->channels); } return framesRead; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_f32be(drwav* pWav, drwav_uint64 framesToRead, float* pBufferOut) { drwav_uint64 framesRead = drwav_read_pcm_frames_f32(pWav, framesToRead, pBufferOut); if (pBufferOut != NULL && drwav__is_little_endian() == DRWAV_TRUE) { drwav__bswap_samples_f32(pBufferOut, framesRead*pWav->channels); } return framesRead; } DRWAV_API void drwav_u8_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } #ifdef DR_WAV_LIBSNDFILE_COMPAT /* It appears libsndfile uses slightly different logic for the u8 -> f32 conversion to dr_wav, which in my opinion is incorrect. It appears libsndfile performs the conversion something like "f32 = (u8 / 256) * 2 - 1", however I think it should be "f32 = (u8 / 255) * 2 - 1" (note the divisor of 256 vs 255). I use libsndfile as a benchmark for testing, so I'm therefore leaving this block here just for my automated correctness testing. This is disabled by default. */ for (i = 0; i < sampleCount; ++i) { *pOut++ = (pIn[i] / 256.0f) * 2 - 1; } #else for (i = 0; i < sampleCount; ++i) { float x = pIn[i]; x = x * 0.00784313725490196078f; /* 0..255 to 0..2 */ x = x - 1; /* 0..2 to -1..1 */ *pOut++ = x; } #endif } DRWAV_API void drwav_s16_to_f32(float* pOut, const drwav_int16* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = pIn[i] * 0.000030517578125f; } } DRWAV_API void drwav_s24_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { double x; drwav_uint32 a = ((drwav_uint32)(pIn[i*3+0]) << 8); drwav_uint32 b = ((drwav_uint32)(pIn[i*3+1]) << 16); drwav_uint32 c = ((drwav_uint32)(pIn[i*3+2]) << 24); x = (double)((drwav_int32)(a | b | c) >> 8); *pOut++ = (float)(x * 0.00000011920928955078125); } } DRWAV_API void drwav_s32_to_f32(float* pOut, const drwav_int32* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = (float)(pIn[i] / 2147483648.0); } } DRWAV_API void drwav_f64_to_f32(float* pOut, const double* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = (float)pIn[i]; } } DRWAV_API void drwav_alaw_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = drwav__alaw_to_s16(pIn[i]) / 32768.0f; } } DRWAV_API void drwav_mulaw_to_f32(float* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = drwav__mulaw_to_s16(pIn[i]) / 32768.0f; } } static void drwav__pcm_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t totalSampleCount, unsigned int bytesPerSample) { unsigned int i; /* Special case for 8-bit sample data because it's treated as unsigned. */ if (bytesPerSample == 1) { drwav_u8_to_s32(pOut, pIn, totalSampleCount); return; } /* Slightly more optimal implementation for common formats. */ if (bytesPerSample == 2) { drwav_s16_to_s32(pOut, (const drwav_int16*)pIn, totalSampleCount); return; } if (bytesPerSample == 3) { drwav_s24_to_s32(pOut, pIn, totalSampleCount); return; } if (bytesPerSample == 4) { for (i = 0; i < totalSampleCount; ++i) { *pOut++ = ((const drwav_int32*)pIn)[i]; } return; } /* Anything more than 64 bits per sample is not supported. */ if (bytesPerSample > 8) { DRWAV_ZERO_MEMORY(pOut, totalSampleCount * sizeof(*pOut)); return; } /* Generic, slow converter. */ for (i = 0; i < totalSampleCount; ++i) { drwav_uint64 sample = 0; unsigned int shift = (8 - bytesPerSample) * 8; unsigned int j; for (j = 0; j < bytesPerSample; j += 1) { DRWAV_ASSERT(j < 8); sample |= (drwav_uint64)(pIn[j]) << shift; shift += 8; } pIn += j; *pOut++ = (drwav_int32)((drwav_int64)sample >> 32); } } static void drwav__ieee_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t totalSampleCount, unsigned int bytesPerSample) { if (bytesPerSample == 4) { drwav_f32_to_s32(pOut, (const float*)pIn, totalSampleCount); return; } else if (bytesPerSample == 8) { drwav_f64_to_s32(pOut, (const double*)pIn, totalSampleCount); return; } else { /* Only supporting 32- and 64-bit float. Output silence in all other cases. Contributions welcome for 16-bit float. */ DRWAV_ZERO_MEMORY(pOut, totalSampleCount * sizeof(*pOut)); return; } } static drwav_uint64 drwav_read_pcm_frames_s32__pcm(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame; /* Fast path. */ if (pWav->translatedFormatTag == DR_WAVE_FORMAT_PCM && pWav->bitsPerSample == 32) { return drwav_read_pcm_frames(pWav, framesToRead, pBufferOut); } bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav__pcm_to_s32(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels), bytesPerFrame/pWav->channels); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_s32__msadpcm(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut) { /* We're just going to borrow the implementation from the drwav_read_s16() since ADPCM is a little bit more complicated than other formats and I don't want to duplicate that code. */ drwav_uint64 totalFramesRead = 0; drwav_int16 samples16[2048]; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames_s16(pWav, drwav_min(framesToRead, drwav_countof(samples16)/pWav->channels), samples16); if (framesRead == 0) { break; } drwav_s16_to_s32(pBufferOut, samples16, (size_t)(framesRead*pWav->channels)); /* <-- Safe cast because we're clamping to 2048. */ pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_s32__ima(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut) { /* We're just going to borrow the implementation from the drwav_read_s16() since IMA-ADPCM is a little bit more complicated than other formats and I don't want to duplicate that code. */ drwav_uint64 totalFramesRead = 0; drwav_int16 samples16[2048]; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames_s16(pWav, drwav_min(framesToRead, drwav_countof(samples16)/pWav->channels), samples16); if (framesRead == 0) { break; } drwav_s16_to_s32(pBufferOut, samples16, (size_t)(framesRead*pWav->channels)); /* <-- Safe cast because we're clamping to 2048. */ pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_s32__ieee(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav__ieee_to_s32(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels), bytesPerFrame/pWav->channels); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_s32__alaw(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav_alaw_to_s32(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels)); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } static drwav_uint64 drwav_read_pcm_frames_s32__mulaw(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut) { drwav_uint64 totalFramesRead; drwav_uint8 sampleData[4096]; drwav_uint32 bytesPerFrame = drwav_get_bytes_per_pcm_frame(pWav); if (bytesPerFrame == 0) { return 0; } totalFramesRead = 0; while (framesToRead > 0) { drwav_uint64 framesRead = drwav_read_pcm_frames(pWav, drwav_min(framesToRead, sizeof(sampleData)/bytesPerFrame), sampleData); if (framesRead == 0) { break; } drwav_mulaw_to_s32(pBufferOut, sampleData, (size_t)(framesRead*pWav->channels)); pBufferOut += framesRead*pWav->channels; framesToRead -= framesRead; totalFramesRead += framesRead; } return totalFramesRead; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_s32(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut) { if (pWav == NULL || framesToRead == 0) { return 0; } if (pBufferOut == NULL) { return drwav_read_pcm_frames(pWav, framesToRead, NULL); } /* Don't try to read more samples than can potentially fit in the output buffer. */ if (framesToRead * pWav->channels * sizeof(drwav_int32) > DRWAV_SIZE_MAX) { framesToRead = DRWAV_SIZE_MAX / sizeof(drwav_int32) / pWav->channels; } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_PCM) { return drwav_read_pcm_frames_s32__pcm(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ADPCM) { return drwav_read_pcm_frames_s32__msadpcm(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_IEEE_FLOAT) { return drwav_read_pcm_frames_s32__ieee(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_ALAW) { return drwav_read_pcm_frames_s32__alaw(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_MULAW) { return drwav_read_pcm_frames_s32__mulaw(pWav, framesToRead, pBufferOut); } if (pWav->translatedFormatTag == DR_WAVE_FORMAT_DVI_ADPCM) { return drwav_read_pcm_frames_s32__ima(pWav, framesToRead, pBufferOut); } return 0; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_s32le(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut) { drwav_uint64 framesRead = drwav_read_pcm_frames_s32(pWav, framesToRead, pBufferOut); if (pBufferOut != NULL && drwav__is_little_endian() == DRWAV_FALSE) { drwav__bswap_samples_s32(pBufferOut, framesRead*pWav->channels); } return framesRead; } DRWAV_API drwav_uint64 drwav_read_pcm_frames_s32be(drwav* pWav, drwav_uint64 framesToRead, drwav_int32* pBufferOut) { drwav_uint64 framesRead = drwav_read_pcm_frames_s32(pWav, framesToRead, pBufferOut); if (pBufferOut != NULL && drwav__is_little_endian() == DRWAV_TRUE) { drwav__bswap_samples_s32(pBufferOut, framesRead*pWav->channels); } return framesRead; } DRWAV_API void drwav_u8_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = ((int)pIn[i] - 128) << 24; } } DRWAV_API void drwav_s16_to_s32(drwav_int32* pOut, const drwav_int16* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = pIn[i] << 16; } } DRWAV_API void drwav_s24_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { unsigned int s0 = pIn[i*3 + 0]; unsigned int s1 = pIn[i*3 + 1]; unsigned int s2 = pIn[i*3 + 2]; drwav_int32 sample32 = (drwav_int32)((s0 << 8) | (s1 << 16) | (s2 << 24)); *pOut++ = sample32; } } DRWAV_API void drwav_f32_to_s32(drwav_int32* pOut, const float* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = (drwav_int32)(2147483648.0 * pIn[i]); } } DRWAV_API void drwav_f64_to_s32(drwav_int32* pOut, const double* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = (drwav_int32)(2147483648.0 * pIn[i]); } } DRWAV_API void drwav_alaw_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i = 0; i < sampleCount; ++i) { *pOut++ = ((drwav_int32)drwav__alaw_to_s16(pIn[i])) << 16; } } DRWAV_API void drwav_mulaw_to_s32(drwav_int32* pOut, const drwav_uint8* pIn, size_t sampleCount) { size_t i; if (pOut == NULL || pIn == NULL) { return; } for (i= 0; i < sampleCount; ++i) { *pOut++ = ((drwav_int32)drwav__mulaw_to_s16(pIn[i])) << 16; } } static drwav_int16* drwav__read_pcm_frames_and_close_s16(drwav* pWav, unsigned int* channels, unsigned int* sampleRate, drwav_uint64* totalFrameCount) { drwav_uint64 sampleDataSize; drwav_int16* pSampleData; drwav_uint64 framesRead; DRWAV_ASSERT(pWav != NULL); sampleDataSize = pWav->totalPCMFrameCount * pWav->channels * sizeof(drwav_int16); if (sampleDataSize > DRWAV_SIZE_MAX) { drwav_uninit(pWav); return NULL; /* File's too big. */ } pSampleData = (drwav_int16*)drwav__malloc_from_callbacks((size_t)sampleDataSize, &pWav->allocationCallbacks); /* <-- Safe cast due to the check above. */ if (pSampleData == NULL) { drwav_uninit(pWav); return NULL; /* Failed to allocate memory. */ } framesRead = drwav_read_pcm_frames_s16(pWav, (size_t)pWav->totalPCMFrameCount, pSampleData); if (framesRead != pWav->totalPCMFrameCount) { drwav__free_from_callbacks(pSampleData, &pWav->allocationCallbacks); drwav_uninit(pWav); return NULL; /* There was an error reading the samples. */ } drwav_uninit(pWav); if (sampleRate) { *sampleRate = pWav->sampleRate; } if (channels) { *channels = pWav->channels; } if (totalFrameCount) { *totalFrameCount = pWav->totalPCMFrameCount; } return pSampleData; } static float* drwav__read_pcm_frames_and_close_f32(drwav* pWav, unsigned int* channels, unsigned int* sampleRate, drwav_uint64* totalFrameCount) { drwav_uint64 sampleDataSize; float* pSampleData; drwav_uint64 framesRead; DRWAV_ASSERT(pWav != NULL); sampleDataSize = pWav->totalPCMFrameCount * pWav->channels * sizeof(float); if (sampleDataSize > DRWAV_SIZE_MAX) { drwav_uninit(pWav); return NULL; /* File's too big. */ } pSampleData = (float*)drwav__malloc_from_callbacks((size_t)sampleDataSize, &pWav->allocationCallbacks); /* <-- Safe cast due to the check above. */ if (pSampleData == NULL) { drwav_uninit(pWav); return NULL; /* Failed to allocate memory. */ } framesRead = drwav_read_pcm_frames_f32(pWav, (size_t)pWav->totalPCMFrameCount, pSampleData); if (framesRead != pWav->totalPCMFrameCount) { drwav__free_from_callbacks(pSampleData, &pWav->allocationCallbacks); drwav_uninit(pWav); return NULL; /* There was an error reading the samples. */ } drwav_uninit(pWav); if (sampleRate) { *sampleRate = pWav->sampleRate; } if (channels) { *channels = pWav->channels; } if (totalFrameCount) { *totalFrameCount = pWav->totalPCMFrameCount; } return pSampleData; } static drwav_int32* drwav__read_pcm_frames_and_close_s32(drwav* pWav, unsigned int* channels, unsigned int* sampleRate, drwav_uint64* totalFrameCount) { drwav_uint64 sampleDataSize; drwav_int32* pSampleData; drwav_uint64 framesRead; DRWAV_ASSERT(pWav != NULL); sampleDataSize = pWav->totalPCMFrameCount * pWav->channels * sizeof(drwav_int32); if (sampleDataSize > DRWAV_SIZE_MAX) { drwav_uninit(pWav); return NULL; /* File's too big. */ } pSampleData = (drwav_int32*)drwav__malloc_from_callbacks((size_t)sampleDataSize, &pWav->allocationCallbacks); /* <-- Safe cast due to the check above. */ if (pSampleData == NULL) { drwav_uninit(pWav); return NULL; /* Failed to allocate memory. */ } framesRead = drwav_read_pcm_frames_s32(pWav, (size_t)pWav->totalPCMFrameCount, pSampleData); if (framesRead != pWav->totalPCMFrameCount) { drwav__free_from_callbacks(pSampleData, &pWav->allocationCallbacks); drwav_uninit(pWav); return NULL; /* There was an error reading the samples. */ } drwav_uninit(pWav); if (sampleRate) { *sampleRate = pWav->sampleRate; } if (channels) { *channels = pWav->channels; } if (totalFrameCount) { *totalFrameCount = pWav->totalPCMFrameCount; } return pSampleData; } DRWAV_API drwav_int16* drwav_open_and_read_pcm_frames_s16(drwav_read_proc onRead, drwav_seek_proc onSeek, void* pUserData, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (channelsOut) { *channelsOut = 0; } if (sampleRateOut) { *sampleRateOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init(&wav, onRead, onSeek, pUserData, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_s16(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } DRWAV_API float* drwav_open_and_read_pcm_frames_f32(drwav_read_proc onRead, drwav_seek_proc onSeek, void* pUserData, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (channelsOut) { *channelsOut = 0; } if (sampleRateOut) { *sampleRateOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init(&wav, onRead, onSeek, pUserData, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_f32(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } DRWAV_API drwav_int32* drwav_open_and_read_pcm_frames_s32(drwav_read_proc onRead, drwav_seek_proc onSeek, void* pUserData, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (channelsOut) { *channelsOut = 0; } if (sampleRateOut) { *sampleRateOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init(&wav, onRead, onSeek, pUserData, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_s32(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } #ifndef DR_WAV_NO_STDIO DRWAV_API drwav_int16* drwav_open_file_and_read_pcm_frames_s16(const char* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (channelsOut) { *channelsOut = 0; } if (sampleRateOut) { *sampleRateOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init_file(&wav, filename, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_s16(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } DRWAV_API float* drwav_open_file_and_read_pcm_frames_f32(const char* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (channelsOut) { *channelsOut = 0; } if (sampleRateOut) { *sampleRateOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init_file(&wav, filename, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_f32(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } DRWAV_API drwav_int32* drwav_open_file_and_read_pcm_frames_s32(const char* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (channelsOut) { *channelsOut = 0; } if (sampleRateOut) { *sampleRateOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init_file(&wav, filename, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_s32(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } DRWAV_API drwav_int16* drwav_open_file_and_read_pcm_frames_s16_w(const wchar_t* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (sampleRateOut) { *sampleRateOut = 0; } if (channelsOut) { *channelsOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init_file_w(&wav, filename, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_s16(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } DRWAV_API float* drwav_open_file_and_read_pcm_frames_f32_w(const wchar_t* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (sampleRateOut) { *sampleRateOut = 0; } if (channelsOut) { *channelsOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init_file_w(&wav, filename, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_f32(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } DRWAV_API drwav_int32* drwav_open_file_and_read_pcm_frames_s32_w(const wchar_t* filename, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (sampleRateOut) { *sampleRateOut = 0; } if (channelsOut) { *channelsOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init_file_w(&wav, filename, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_s32(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } #endif DRWAV_API drwav_int16* drwav_open_memory_and_read_pcm_frames_s16(const void* data, size_t dataSize, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (channelsOut) { *channelsOut = 0; } if (sampleRateOut) { *sampleRateOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init_memory(&wav, data, dataSize, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_s16(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } DRWAV_API float* drwav_open_memory_and_read_pcm_frames_f32(const void* data, size_t dataSize, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (channelsOut) { *channelsOut = 0; } if (sampleRateOut) { *sampleRateOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init_memory(&wav, data, dataSize, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_f32(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } DRWAV_API drwav_int32* drwav_open_memory_and_read_pcm_frames_s32(const void* data, size_t dataSize, unsigned int* channelsOut, unsigned int* sampleRateOut, drwav_uint64* totalFrameCountOut, const drwav_allocation_callbacks* pAllocationCallbacks) { drwav wav; if (channelsOut) { *channelsOut = 0; } if (sampleRateOut) { *sampleRateOut = 0; } if (totalFrameCountOut) { *totalFrameCountOut = 0; } if (!drwav_init_memory(&wav, data, dataSize, pAllocationCallbacks)) { return NULL; } return drwav__read_pcm_frames_and_close_s32(&wav, channelsOut, sampleRateOut, totalFrameCountOut); } #endif /* DR_WAV_NO_CONVERSION_API */ DRWAV_API void drwav_free(void* p, const drwav_allocation_callbacks* pAllocationCallbacks) { if (pAllocationCallbacks != NULL) { drwav__free_from_callbacks(p, pAllocationCallbacks); } else { drwav__free_default(p, NULL); } } DRWAV_API drwav_uint16 drwav_bytes_to_u16(const drwav_uint8* data) { return drwav__bytes_to_u16(data); } DRWAV_API drwav_int16 drwav_bytes_to_s16(const drwav_uint8* data) { return drwav__bytes_to_s16(data); } DRWAV_API drwav_uint32 drwav_bytes_to_u32(const drwav_uint8* data) { return drwav__bytes_to_u32(data); } DRWAV_API drwav_int32 drwav_bytes_to_s32(const drwav_uint8* data) { return drwav__bytes_to_s32(data); } DRWAV_API drwav_uint64 drwav_bytes_to_u64(const drwav_uint8* data) { return drwav__bytes_to_u64(data); } DRWAV_API drwav_int64 drwav_bytes_to_s64(const drwav_uint8* data) { return drwav__bytes_to_s64(data); } DRWAV_API drwav_bool32 drwav_guid_equal(const drwav_uint8 a[16], const drwav_uint8 b[16]) { return drwav__guid_equal(a, b); } DRWAV_API drwav_bool32 drwav_fourcc_equal(const drwav_uint8* a, const char* b) { return drwav__fourcc_equal(a, b); } #endif /* dr_wav_c */ #endif /* DR_WAV_IMPLEMENTATION */ /* RELEASE NOTES - v0.11.0 ======================= Version 0.11.0 has breaking API changes. Improved Client-Defined Memory Allocation ----------------------------------------- The main change with this release is the addition of a more flexible way of implementing custom memory allocation routines. The existing system of DRWAV_MALLOC, DRWAV_REALLOC and DRWAV_FREE are still in place and will be used by default when no custom allocation callbacks are specified. To use the new system, you pass in a pointer to a drwav_allocation_callbacks object to drwav_init() and family, like this: void* my_malloc(size_t sz, void* pUserData) { return malloc(sz); } void* my_realloc(void* p, size_t sz, void* pUserData) { return realloc(p, sz); } void my_free(void* p, void* pUserData) { free(p); } ... drwav_allocation_callbacks allocationCallbacks; allocationCallbacks.pUserData = &myData; allocationCallbacks.onMalloc = my_malloc; allocationCallbacks.onRealloc = my_realloc; allocationCallbacks.onFree = my_free; drwav_init_file(&wav, "my_file.wav", &allocationCallbacks); The advantage of this new system is that it allows you to specify user data which will be passed in to the allocation routines. Passing in null for the allocation callbacks object will cause dr_wav to use defaults which is the same as DRWAV_MALLOC, DRWAV_REALLOC and DRWAV_FREE and the equivalent of how it worked in previous versions. Every API that opens a drwav object now takes this extra parameter. These include the following: drwav_init() drwav_init_ex() drwav_init_file() drwav_init_file_ex() drwav_init_file_w() drwav_init_file_w_ex() drwav_init_memory() drwav_init_memory_ex() drwav_init_write() drwav_init_write_sequential() drwav_init_write_sequential_pcm_frames() drwav_init_file_write() drwav_init_file_write_sequential() drwav_init_file_write_sequential_pcm_frames() drwav_init_file_write_w() drwav_init_file_write_sequential_w() drwav_init_file_write_sequential_pcm_frames_w() drwav_init_memory_write() drwav_init_memory_write_sequential() drwav_init_memory_write_sequential_pcm_frames() drwav_open_and_read_pcm_frames_s16() drwav_open_and_read_pcm_frames_f32() drwav_open_and_read_pcm_frames_s32() drwav_open_file_and_read_pcm_frames_s16() drwav_open_file_and_read_pcm_frames_f32() drwav_open_file_and_read_pcm_frames_s32() drwav_open_file_and_read_pcm_frames_s16_w() drwav_open_file_and_read_pcm_frames_f32_w() drwav_open_file_and_read_pcm_frames_s32_w() drwav_open_memory_and_read_pcm_frames_s16() drwav_open_memory_and_read_pcm_frames_f32() drwav_open_memory_and_read_pcm_frames_s32() Endian Improvements ------------------- Previously, the following APIs returned little-endian audio data. These now return native-endian data. This improves compatibility on big-endian architectures. drwav_read_pcm_frames() drwav_read_pcm_frames_s16() drwav_read_pcm_frames_s32() drwav_read_pcm_frames_f32() drwav_open_and_read_pcm_frames_s16() drwav_open_and_read_pcm_frames_s32() drwav_open_and_read_pcm_frames_f32() drwav_open_file_and_read_pcm_frames_s16() drwav_open_file_and_read_pcm_frames_s32() drwav_open_file_and_read_pcm_frames_f32() drwav_open_file_and_read_pcm_frames_s16_w() drwav_open_file_and_read_pcm_frames_s32_w() drwav_open_file_and_read_pcm_frames_f32_w() drwav_open_memory_and_read_pcm_frames_s16() drwav_open_memory_and_read_pcm_frames_s32() drwav_open_memory_and_read_pcm_frames_f32() APIs have been added to give you explicit control over whether or not audio data is read or written in big- or little-endian byte order: drwav_read_pcm_frames_le() drwav_read_pcm_frames_be() drwav_read_pcm_frames_s16le() drwav_read_pcm_frames_s16be() drwav_read_pcm_frames_f32le() drwav_read_pcm_frames_f32be() drwav_read_pcm_frames_s32le() drwav_read_pcm_frames_s32be() drwav_write_pcm_frames_le() drwav_write_pcm_frames_be() Removed APIs ------------ The following APIs were deprecated in version 0.10.0 and have now been removed: drwav_open() drwav_open_ex() drwav_open_write() drwav_open_write_sequential() drwav_open_file() drwav_open_file_ex() drwav_open_file_write() drwav_open_file_write_sequential() drwav_open_memory() drwav_open_memory_ex() drwav_open_memory_write() drwav_open_memory_write_sequential() drwav_close() RELEASE NOTES - v0.10.0 ======================= Version 0.10.0 has breaking API changes. There are no significant bug fixes in this release, so if you are affected you do not need to upgrade. Removed APIs ------------ The following APIs were deprecated in version 0.9.0 and have been completely removed in version 0.10.0: drwav_read() drwav_read_s16() drwav_read_f32() drwav_read_s32() drwav_seek_to_sample() drwav_write() drwav_open_and_read_s16() drwav_open_and_read_f32() drwav_open_and_read_s32() drwav_open_file_and_read_s16() drwav_open_file_and_read_f32() drwav_open_file_and_read_s32() drwav_open_memory_and_read_s16() drwav_open_memory_and_read_f32() drwav_open_memory_and_read_s32() drwav::totalSampleCount See release notes for version 0.9.0 at the bottom of this file for replacement APIs. Deprecated APIs --------------- The following APIs have been deprecated. There is a confusing and completely arbitrary difference between drwav_init*() and drwav_open*(), where drwav_init*() initializes a pre-allocated drwav object, whereas drwav_open*() will first allocated a drwav object on the heap and then initialize it. drwav_open*() has been deprecated which means you must now use a pre- allocated drwav object with drwav_init*(). If you need the previous functionality, you can just do a malloc() followed by a called to one of the drwav_init*() APIs. drwav_open() drwav_open_ex() drwav_open_write() drwav_open_write_sequential() drwav_open_file() drwav_open_file_ex() drwav_open_file_write() drwav_open_file_write_sequential() drwav_open_memory() drwav_open_memory_ex() drwav_open_memory_write() drwav_open_memory_write_sequential() drwav_close() These APIs will be removed completely in a future version. The rationale for this change is to remove confusion between the two different ways to initialize a drwav object. */ /* REVISION HISTORY ================ v0.12.16 - 2020-12-02 - Fix a bug when trying to read more bytes than can fit in a size_t. v0.12.15 - 2020-11-21 - Fix compilation with OpenWatcom. v0.12.14 - 2020-11-13 - Minor code clean up. v0.12.13 - 2020-11-01 - Improve compiler support for older versions of GCC. v0.12.12 - 2020-09-28 - Add support for RF64. - Fix a bug in writing mode where the size of the RIFF chunk incorrectly includes the header section. v0.12.11 - 2020-09-08 - Fix a compilation error on older compilers. v0.12.10 - 2020-08-24 - Fix a bug when seeking with ADPCM formats. v0.12.9 - 2020-08-02 - Simplify sized types. v0.12.8 - 2020-07-25 - Fix a compilation warning. v0.12.7 - 2020-07-15 - Fix some bugs on big-endian architectures. - Fix an error in s24 to f32 conversion. v0.12.6 - 2020-06-23 - Change drwav_read_*() to allow NULL to be passed in as the output buffer which is equivalent to a forward seek. - Fix a buffer overflow when trying to decode invalid IMA-ADPCM files. - Add include guard for the implementation section. v0.12.5 - 2020-05-27 - Minor documentation fix. v0.12.4 - 2020-05-16 - Replace assert() with DRWAV_ASSERT(). - Add compile-time and run-time version querying. - DRWAV_VERSION_MINOR - DRWAV_VERSION_MAJOR - DRWAV_VERSION_REVISION - DRWAV_VERSION_STRING - drwav_version() - drwav_version_string() v0.12.3 - 2020-04-30 - Fix compilation errors with VC6. v0.12.2 - 2020-04-21 - Fix a bug where drwav_init_file() does not close the file handle after attempting to load an erroneous file. v0.12.1 - 2020-04-13 - Fix some pedantic warnings. v0.12.0 - 2020-04-04 - API CHANGE: Add container and format parameters to the chunk callback. - Minor documentation updates. v0.11.5 - 2020-03-07 - Fix compilation error with Visual Studio .NET 2003. v0.11.4 - 2020-01-29 - Fix some static analysis warnings. - Fix a bug when reading f32 samples from an A-law encoded stream. v0.11.3 - 2020-01-12 - Minor changes to some f32 format conversion routines. - Minor bug fix for ADPCM conversion when end of file is reached. v0.11.2 - 2019-12-02 - Fix a possible crash when using custom memory allocators without a custom realloc() implementation. - Fix an integer overflow bug. - Fix a null pointer dereference bug. - Add limits to sample rate, channels and bits per sample to tighten up some validation. v0.11.1 - 2019-10-07 - Internal code clean up. v0.11.0 - 2019-10-06 - API CHANGE: Add support for user defined memory allocation routines. This system allows the program to specify their own memory allocation routines with a user data pointer for client-specific contextual data. This adds an extra parameter to the end of the following APIs: - drwav_init() - drwav_init_ex() - drwav_init_file() - drwav_init_file_ex() - drwav_init_file_w() - drwav_init_file_w_ex() - drwav_init_memory() - drwav_init_memory_ex() - drwav_init_write() - drwav_init_write_sequential() - drwav_init_write_sequential_pcm_frames() - drwav_init_file_write() - drwav_init_file_write_sequential() - drwav_init_file_write_sequential_pcm_frames() - drwav_init_file_write_w() - drwav_init_file_write_sequential_w() - drwav_init_file_write_sequential_pcm_frames_w() - drwav_init_memory_write() - drwav_init_memory_write_sequential() - drwav_init_memory_write_sequential_pcm_frames() - drwav_open_and_read_pcm_frames_s16() - drwav_open_and_read_pcm_frames_f32() - drwav_open_and_read_pcm_frames_s32() - drwav_open_file_and_read_pcm_frames_s16() - drwav_open_file_and_read_pcm_frames_f32() - drwav_open_file_and_read_pcm_frames_s32() - drwav_open_file_and_read_pcm_frames_s16_w() - drwav_open_file_and_read_pcm_frames_f32_w() - drwav_open_file_and_read_pcm_frames_s32_w() - drwav_open_memory_and_read_pcm_frames_s16() - drwav_open_memory_and_read_pcm_frames_f32() - drwav_open_memory_and_read_pcm_frames_s32() Set this extra parameter to NULL to use defaults which is the same as the previous behaviour. Setting this NULL will use DRWAV_MALLOC, DRWAV_REALLOC and DRWAV_FREE. - Add support for reading and writing PCM frames in an explicit endianness. New APIs: - drwav_read_pcm_frames_le() - drwav_read_pcm_frames_be() - drwav_read_pcm_frames_s16le() - drwav_read_pcm_frames_s16be() - drwav_read_pcm_frames_f32le() - drwav_read_pcm_frames_f32be() - drwav_read_pcm_frames_s32le() - drwav_read_pcm_frames_s32be() - drwav_write_pcm_frames_le() - drwav_write_pcm_frames_be() - Remove deprecated APIs. - API CHANGE: The following APIs now return native-endian data. Previously they returned little-endian data. - drwav_read_pcm_frames() - drwav_read_pcm_frames_s16() - drwav_read_pcm_frames_s32() - drwav_read_pcm_frames_f32() - drwav_open_and_read_pcm_frames_s16() - drwav_open_and_read_pcm_frames_s32() - drwav_open_and_read_pcm_frames_f32() - drwav_open_file_and_read_pcm_frames_s16() - drwav_open_file_and_read_pcm_frames_s32() - drwav_open_file_and_read_pcm_frames_f32() - drwav_open_file_and_read_pcm_frames_s16_w() - drwav_open_file_and_read_pcm_frames_s32_w() - drwav_open_file_and_read_pcm_frames_f32_w() - drwav_open_memory_and_read_pcm_frames_s16() - drwav_open_memory_and_read_pcm_frames_s32() - drwav_open_memory_and_read_pcm_frames_f32() v0.10.1 - 2019-08-31 - Correctly handle partial trailing ADPCM blocks. v0.10.0 - 2019-08-04 - Remove deprecated APIs. - Add wchar_t variants for file loading APIs: drwav_init_file_w() drwav_init_file_ex_w() drwav_init_file_write_w() drwav_init_file_write_sequential_w() - Add drwav_target_write_size_bytes() which calculates the total size in bytes of a WAV file given a format and sample count. - Add APIs for specifying the PCM frame count instead of the sample count when opening in sequential write mode: drwav_init_write_sequential_pcm_frames() drwav_init_file_write_sequential_pcm_frames() drwav_init_file_write_sequential_pcm_frames_w() drwav_init_memory_write_sequential_pcm_frames() - Deprecate drwav_open*() and drwav_close(): drwav_open() drwav_open_ex() drwav_open_write() drwav_open_write_sequential() drwav_open_file() drwav_open_file_ex() drwav_open_file_write() drwav_open_file_write_sequential() drwav_open_memory() drwav_open_memory_ex() drwav_open_memory_write() drwav_open_memory_write_sequential() drwav_close() - Minor documentation updates. v0.9.2 - 2019-05-21 - Fix warnings. v0.9.1 - 2019-05-05 - Add support for C89. - Change license to choice of public domain or MIT-0. v0.9.0 - 2018-12-16 - API CHANGE: Add new reading APIs for reading by PCM frames instead of samples. Old APIs have been deprecated and will be removed in v0.10.0. Deprecated APIs and their replacements: drwav_read() -> drwav_read_pcm_frames() drwav_read_s16() -> drwav_read_pcm_frames_s16() drwav_read_f32() -> drwav_read_pcm_frames_f32() drwav_read_s32() -> drwav_read_pcm_frames_s32() drwav_seek_to_sample() -> drwav_seek_to_pcm_frame() drwav_write() -> drwav_write_pcm_frames() drwav_open_and_read_s16() -> drwav_open_and_read_pcm_frames_s16() drwav_open_and_read_f32() -> drwav_open_and_read_pcm_frames_f32() drwav_open_and_read_s32() -> drwav_open_and_read_pcm_frames_s32() drwav_open_file_and_read_s16() -> drwav_open_file_and_read_pcm_frames_s16() drwav_open_file_and_read_f32() -> drwav_open_file_and_read_pcm_frames_f32() drwav_open_file_and_read_s32() -> drwav_open_file_and_read_pcm_frames_s32() drwav_open_memory_and_read_s16() -> drwav_open_memory_and_read_pcm_frames_s16() drwav_open_memory_and_read_f32() -> drwav_open_memory_and_read_pcm_frames_f32() drwav_open_memory_and_read_s32() -> drwav_open_memory_and_read_pcm_frames_s32() drwav::totalSampleCount -> drwav::totalPCMFrameCount - API CHANGE: Rename drwav_open_and_read_file_*() to drwav_open_file_and_read_*(). - API CHANGE: Rename drwav_open_and_read_memory_*() to drwav_open_memory_and_read_*(). - Add built-in support for smpl chunks. - Add support for firing a callback for each chunk in the file at initialization time. - This is enabled through the drwav_init_ex(), etc. family of APIs. - Handle invalid FMT chunks more robustly. v0.8.5 - 2018-09-11 - Const correctness. - Fix a potential stack overflow. v0.8.4 - 2018-08-07 - Improve 64-bit detection. v0.8.3 - 2018-08-05 - Fix C++ build on older versions of GCC. v0.8.2 - 2018-08-02 - Fix some big-endian bugs. v0.8.1 - 2018-06-29 - Add support for sequential writing APIs. - Disable seeking in write mode. - Fix bugs with Wave64. - Fix typos. v0.8 - 2018-04-27 - Bug fix. - Start using major.minor.revision versioning. v0.7f - 2018-02-05 - Restrict ADPCM formats to a maximum of 2 channels. v0.7e - 2018-02-02 - Fix a crash. v0.7d - 2018-02-01 - Fix a crash. v0.7c - 2018-02-01 - Set drwav.bytesPerSample to 0 for all compressed formats. - Fix a crash when reading 16-bit floating point WAV files. In this case dr_wav will output silence for all format conversion reading APIs (*_s16, *_s32, *_f32 APIs). - Fix some divide-by-zero errors. v0.7b - 2018-01-22 - Fix errors with seeking of compressed formats. - Fix compilation error when DR_WAV_NO_CONVERSION_API v0.7a - 2017-11-17 - Fix some GCC warnings. v0.7 - 2017-11-04 - Add writing APIs. v0.6 - 2017-08-16 - API CHANGE: Rename dr_* types to drwav_*. - Add support for custom implementations of malloc(), realloc(), etc. - Add support for Microsoft ADPCM. - Add support for IMA ADPCM (DVI, format code 0x11). - Optimizations to drwav_read_s16(). - Bug fixes. v0.5g - 2017-07-16 - Change underlying type for booleans to unsigned. v0.5f - 2017-04-04 - Fix a minor bug with drwav_open_and_read_s16() and family. v0.5e - 2016-12-29 - Added support for reading samples as signed 16-bit integers. Use the _s16() family of APIs for this. - Minor fixes to documentation. v0.5d - 2016-12-28 - Use drwav_int* and drwav_uint* sized types to improve compiler support. v0.5c - 2016-11-11 - Properly handle JUNK chunks that come before the FMT chunk. v0.5b - 2016-10-23 - A minor change to drwav_bool8 and drwav_bool32 types. v0.5a - 2016-10-11 - Fixed a bug with drwav_open_and_read() and family due to incorrect argument ordering. - Improve A-law and mu-law efficiency. v0.5 - 2016-09-29 - API CHANGE. Swap the order of "channels" and "sampleRate" parameters in drwav_open_and_read*(). Rationale for this is to keep it consistent with dr_audio and dr_flac. v0.4b - 2016-09-18 - Fixed a typo in documentation. v0.4a - 2016-09-18 - Fixed a typo. - Change date format to ISO 8601 (YYYY-MM-DD) v0.4 - 2016-07-13 - API CHANGE. Make onSeek consistent with dr_flac. - API CHANGE. Rename drwav_seek() to drwav_seek_to_sample() for clarity and consistency with dr_flac. - Added support for Sony Wave64. v0.3a - 2016-05-28 - API CHANGE. Return drwav_bool32 instead of int in onSeek callback. - Fixed a memory leak. v0.3 - 2016-05-22 - Lots of API changes for consistency. v0.2a - 2016-05-16 - Fixed Linux/GCC build. v0.2 - 2016-05-11 - Added support for reading data as signed 32-bit PCM for consistency with dr_flac. v0.1a - 2016-05-07 - Fixed a bug in drwav_open_file() where the file handle would not be closed if the loader failed to initialize. v0.1 - 2016-05-04 - Initial versioned release. */ /* This software is available as a choice of the following licenses. Choose whichever you prefer. =============================================================================== ALTERNATIVE 1 - Public Domain (www.unlicense.org) =============================================================================== This is free and unencumbered software released into the public domain. Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. For more information, please refer to <http://unlicense.org/> =============================================================================== ALTERNATIVE 2 - MIT No Attribution =============================================================================== Copyright 2020 David Reid Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
241,358
C++
.h
5,266
39.350741
269
0.662126
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,789
Logger.h
Const-me_Whisper/Examples/OldMain/Utils/Logger.h
#pragma once #ifdef __cplusplus extern "C" { #endif struct ggml_tensor; void logError( const char8_t* pszFormat, ... ); void logWarning( const char8_t* pszFormat, ... ); void logInfo( const char8_t* pszFormat, ... ); void logDebug( const char8_t* pszFormat, ... ); #ifdef __cplusplus } namespace Tracing { struct ItemName { ItemName( const char* str ) { } ItemName( const char* str, uint32_t a0 ) { } ItemName( const char* str, int a0 ) { } }; inline void tensor( const ItemName& name, const ggml_tensor* tensor ) { } inline void delayTensor( const ItemName& name, const ggml_tensor* tensor ) { } inline void vector( const ItemName& name, const std::vector<float>& vec ) { } inline void writeDelayedTensors() { } } #endif
742
C++
.h
25
27.96
79
0.703652
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,790
params.h
Const-me_Whisper/Examples/main/params.h
#pragma once #include <vector> #include <string> // command-line parameters struct whisper_params { uint32_t n_threads; uint32_t n_processors = 1; uint32_t offset_t_ms = 0; uint32_t offset_n = 0; uint32_t duration_ms = 0; uint32_t max_context = UINT_MAX; uint32_t max_len = 0; float word_thold = 0.01f; bool speed_up = false; bool translate = false; bool diarize = false; bool output_txt = false; bool output_vtt = false; bool output_srt = false; bool output_wts = false; bool print_special = false; bool print_colors = true; bool no_timestamps = false; std::string language = "en"; std::wstring model = L"models/ggml-base.en.bin"; std::wstring gpu; std::string prompt; std::vector<std::wstring> fname_inp; whisper_params(); bool parse( int argc, wchar_t* argv[] ); }; void whisper_print_usage( int argc, wchar_t** argv, const whisper_params& params );
884
C++
.h
33
24.848485
83
0.72071
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,791
miscUtils.h
Const-me_Whisper/Examples/main/miscUtils.h
#pragma once #include <string> std::string utf8( const std::wstring& utf16 ); std::wstring utf16( const std::string& u8 ); using HRESULT = long; void printError( const char* what, HRESULT hr );
196
C++
.h
6
31.333333
48
0.739362
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,792
textWriter.h
Const-me_Whisper/Examples/main/textWriter.h
#pragma once #include "../../Whisper/API/iContext.cl.h" // These functions print output segments into text files of various formats HRESULT writeText( Whisper::iContext* context, LPCTSTR audioPath, bool timestamps ); HRESULT writeSubRip( Whisper::iContext* context, LPCTSTR audioPath ); HRESULT writeWebVTT( Whisper::iContext* context, LPCTSTR audioPath );
357
C++
.h
6
58.5
84
0.797721
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,793
languageCodez.inl
Const-me_Whisper/Whisper/Whisper/languageCodez.inl
// This file is generated by a tool, from the `languageCodez.tsv` file in this repository Lang{ 0x6661, 68, "afrikaans" }, Lang{ 0x7173, 58, "albanian" }, Lang{ 0x6D61, 75, "amharic" }, Lang{ 0x7261, 13, "arabic" }, Lang{ 0x7968, 53, "armenian" }, Lang{ 0x7361, 91, "assamese" }, Lang{ 0x7A61, 45, "azerbaijani" }, Lang{ 0x6162, 96, "bashkir" }, Lang{ 0x7565, 51, "basque" }, Lang{ 0x6562, 71, "belarusian" }, Lang{ 0x6E62, 43, "bengali" }, Lang{ 0x7362, 56, "bosnian" }, Lang{ 0x7262, 50, "breton" }, Lang{ 0x6762, 33, "bulgarian" }, Lang{ 0x6163, 11, "catalan" }, Lang{ 0x687A, 1, "chinese" }, Lang{ 0x7268, 32, "croatian" }, Lang{ 0x7363, 24, "czech" }, Lang{ 0x6164, 26, "danish" }, Lang{ 0x6C6E, 12, "dutch" }, Lang{ 0x6E65, 0, "english" }, Lang{ 0x7465, 48, "estonian" }, Lang{ 0x6F66, 79, "faroese" }, Lang{ 0x6966, 18, "finnish" }, Lang{ 0x7266, 6, "french" }, Lang{ 0x6C67, 60, "galician" }, Lang{ 0x616B, 70, "georgian" }, Lang{ 0x6564, 2, "german" }, Lang{ 0x6C65, 22, "greek" }, Lang{ 0x7567, 74, "gujarati" }, Lang{ 0x7468, 80, "haitian creole" }, Lang{ 0x6168, 95, "hausa" }, Lang{ 0x776168, 93, "hawaiian" }, Lang{ 0x7769, 20, "hebrew" }, Lang{ 0x6968, 17, "hindi" }, Lang{ 0x7568, 27, "hungarian" }, Lang{ 0x7369, 52, "icelandic" }, Lang{ 0x6469, 16, "indonesian" }, Lang{ 0x7469, 15, "italian" }, Lang{ 0x616A, 7, "japanese" }, Lang{ 0x776A, 97, "javanese" }, Lang{ 0x6E6B, 47, "kannada" }, Lang{ 0x6B6B, 57, "kazakh" }, Lang{ 0x6D6B, 64, "khmer" }, Lang{ 0x6F6B, 5, "korean" }, Lang{ 0x6F6C, 77, "lao" }, Lang{ 0x616C, 35, "latin" }, Lang{ 0x766C, 42, "latvian" }, Lang{ 0x6E6C, 94, "lingala" }, Lang{ 0x746C, 34, "lithuanian" }, Lang{ 0x626C, 86, "luxembourgish" }, Lang{ 0x6B6D, 49, "macedonian" }, Lang{ 0x676D, 90, "malagasy" }, Lang{ 0x736D, 23, "malay" }, Lang{ 0x6C6D, 37, "malayalam" }, Lang{ 0x746D, 84, "maltese" }, Lang{ 0x696D, 36, "maori" }, Lang{ 0x726D, 61, "marathi" }, Lang{ 0x6E6D, 55, "mongolian" }, Lang{ 0x796D, 87, "myanmar" }, Lang{ 0x656E, 54, "nepali" }, Lang{ 0x6F6E, 29, "norwegian" }, Lang{ 0x6E6E, 83, "nynorsk" }, Lang{ 0x636F, 69, "occitan" }, Lang{ 0x7370, 81, "pashto" }, Lang{ 0x6166, 41, "persian" }, Lang{ 0x6C70, 10, "polish" }, Lang{ 0x7470, 8, "portuguese" }, Lang{ 0x6170, 62, "punjabi" }, Lang{ 0x6F72, 25, "romanian" }, Lang{ 0x7572, 4, "russian" }, Lang{ 0x6173, 85, "sanskrit" }, Lang{ 0x7273, 44, "serbian" }, Lang{ 0x6E73, 65, "shona" }, Lang{ 0x6473, 73, "sindhi" }, Lang{ 0x6973, 63, "sinhala" }, Lang{ 0x6B73, 39, "slovak" }, Lang{ 0x6C73, 46, "slovenian" }, Lang{ 0x6F73, 67, "somali" }, Lang{ 0x7365, 3, "spanish" }, Lang{ 0x7573, 98, "sundanese" }, Lang{ 0x7773, 59, "swahili" }, Lang{ 0x7673, 14, "swedish" }, Lang{ 0x6C74, 89, "tagalog" }, Lang{ 0x6774, 72, "tajik" }, Lang{ 0x6174, 28, "tamil" }, Lang{ 0x7474, 92, "tatar" }, Lang{ 0x6574, 40, "telugu" }, Lang{ 0x6874, 30, "thai" }, Lang{ 0x6F62, 88, "tibetan" }, Lang{ 0x7274, 9, "turkish" }, Lang{ 0x6B74, 82, "turkmen" }, Lang{ 0x6B75, 21, "ukrainian" }, Lang{ 0x7275, 31, "urdu" }, Lang{ 0x7A75, 78, "uzbek" }, Lang{ 0x6976, 19, "vietnamese" }, Lang{ 0x7963, 38, "welsh" }, Lang{ 0x6979, 76, "yiddish" }, Lang{ 0x6F79, 66, "yoruba" },
3,163
C++
.inl
100
30.63
89
0.63761
Const-me/Whisper
8,145
691
142
MPL-2.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,795
timeline-tests.cpp
olive-editor_olive/tests/timeline/timeline-tests.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "core.h" #include "node/block/clip/clip.h" #include "node/block/transition/crossdissolve/crossdissolvetransition.h" #include "node/math/math/math.h" #include "node/math/merge/merge.h" #include "node/project.h" #include "node/project/sequence/sequence.h" #include "testutil.h" #include "timeline/timelineundogeneral.h" #include "timeline/timelineundopointer.h" #include "undo/undocommand.h" namespace olive { #define TIMELINE_TEST_START \ ColorManager::SetUpDefaultConfig(); \ Project project; \ Sequence sequence; \ sequence.setParent(&project) OLIVE_ADD_TEST(AddTrack) { TIMELINE_TEST_START; Track *first_video_track, *first_audio_track; { // Test creating initial video track first_video_track = TimelineAddTrackCommand::RunImmediately(sequence.track_list(Track::kVideo)); OLIVE_ASSERT(sequence.GetConnectedOutput(Sequence::kTextureInput) == first_video_track); OLIVE_ASSERT(sequence.track_list(Track::kVideo)->GetTrackCount() == 1); OLIVE_ASSERT(sequence.track_list(Track::kVideo)->GetTrackAt(0) == first_video_track); } { // Test creating initial audio track first_audio_track = TimelineAddTrackCommand::RunImmediately(sequence.track_list(Track::kAudio)); OLIVE_ASSERT(sequence.GetConnectedOutput(Sequence::kSamplesInput) == first_audio_track); OLIVE_ASSERT(sequence.track_list(Track::kAudio)->GetTrackCount() == 1); OLIVE_ASSERT(sequence.track_list(Track::kAudio)->GetTrackAt(0) == first_audio_track); } { // Test creating second video track with merge Track* second_video_track = TimelineAddTrackCommand::RunImmediately(sequence.track_list(Track::kVideo), true); OLIVE_ASSERT(sequence.GetConnectedOutput(Sequence::kTextureInput) != first_video_track); OLIVE_ASSERT(sequence.GetConnectedOutput(Sequence::kTextureInput) != second_video_track); OLIVE_ASSERT(sequence.track_list(Track::kVideo)->GetTrackCount() == 2); OLIVE_ASSERT(sequence.track_list(Track::kVideo)->GetTrackAt(1) == second_video_track); MergeNode* merge = dynamic_cast<MergeNode*>(sequence.GetConnectedOutput(Sequence::kTextureInput)); OLIVE_ASSERT(merge); OLIVE_ASSERT(merge->GetConnectedOutput(MergeNode::kBaseIn) == first_video_track); OLIVE_ASSERT(merge->GetConnectedOutput(MergeNode::kBlendIn) == second_video_track); } { // Test creating second audio track with merge Track* second_audio_track = TimelineAddTrackCommand::RunImmediately(sequence.track_list(Track::kAudio), true); OLIVE_ASSERT(sequence.GetConnectedOutput(Sequence::kSamplesInput) != first_audio_track); OLIVE_ASSERT(sequence.GetConnectedOutput(Sequence::kSamplesInput) != second_audio_track); OLIVE_ASSERT(sequence.track_list(Track::kAudio)->GetTrackCount() == 2); OLIVE_ASSERT(sequence.track_list(Track::kAudio)->GetTrackAt(1) == second_audio_track); MathNode* merge = dynamic_cast<MathNode*>(sequence.GetConnectedOutput(Sequence::kSamplesInput)); OLIVE_ASSERT(merge); OLIVE_ASSERT(merge->GetConnectedOutput(MathNode::kParamAIn) == first_audio_track); OLIVE_ASSERT(merge->GetConnectedOutput(MathNode::kParamBIn) == second_audio_track); } OLIVE_TEST_END; } OLIVE_ADD_TEST(SequenceDefaults) { TIMELINE_TEST_START; sequence.add_default_nodes(); OLIVE_ASSERT(sequence.GetTracks().size() == 2); Track* tex_connect = dynamic_cast<Track*>(sequence.GetConnectedTextureOutput()); OLIVE_ASSERT(tex_connect); Track* smp_connect = dynamic_cast<Track*>(sequence.GetConnectedSampleOutput()); OLIVE_ASSERT(smp_connect); OLIVE_ASSERT(tex_connect != smp_connect); OLIVE_ASSERT(sequence.GetTracks().contains(tex_connect)); OLIVE_ASSERT(sequence.GetTracks().contains(smp_connect)); OLIVE_TEST_END; } OLIVE_ADD_TEST(Trim) { TIMELINE_TEST_START; sequence.add_default_nodes(); Track* track = sequence.GetTracks().first(); ClipBlock* block1 = new ClipBlock(); block1->set_length_and_media_out(2); block1->setParent(&project); track->AppendBlock(block1); ClipBlock* block2 = new ClipBlock(); block2->set_length_and_media_out(2); block2->setParent(&project); track->AppendBlock(block2); // There should be two blocks right now OLIVE_ASSERT(track->Blocks().size() == 2); { // Trim out point of second block BlockTrimCommand command(track, block2, 1, Timeline::kTrimOut); command.redo_now(); // No block should have been added OLIVE_ASSERT(track->Blocks().size() == 2); OLIVE_ASSERT(block2->length() == 1); OLIVE_ASSERT(block1->length() == 2); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 2); OLIVE_ASSERT(block2->length() == 2); OLIVE_ASSERT(block1->length() == 2); } { // Trim in point of second block BlockTrimCommand command(track, block2, 1, Timeline::kTrimIn); command.redo_now(); // Gap should be inserted in between OLIVE_ASSERT(track->Blocks().size() == 3); GapBlock* gap = dynamic_cast<GapBlock*>(track->Blocks().at(1)); OLIVE_ASSERT(gap); OLIVE_ASSERT(gap->length() == 1); OLIVE_ASSERT(block2->length() == 1); OLIVE_ASSERT(block1->length() == 2); OLIVE_ASSERT(block1->next() == gap); OLIVE_ASSERT(block2->previous() == gap); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 2); OLIVE_ASSERT(block2->length() == 2); OLIVE_ASSERT(block1->length() == 2); } { // Trim out point of first block BlockTrimCommand command(track, block1, 1, Timeline::kTrimOut); command.redo_now(); // Gap should be inserted in between OLIVE_ASSERT(track->Blocks().size() == 3); GapBlock* gap = dynamic_cast<GapBlock*>(track->Blocks().at(1)); OLIVE_ASSERT(gap); OLIVE_ASSERT(gap->length() == 1); OLIVE_ASSERT(block1->length() == 1); OLIVE_ASSERT(block2->length() == 2); OLIVE_ASSERT(block1->next() == gap); OLIVE_ASSERT(block2->previous() == gap); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 2); OLIVE_ASSERT(block2->length() == 2); OLIVE_ASSERT(block1->length() == 2); } { // Trim in point of first block BlockTrimCommand command(track, block1, 1, Timeline::kTrimIn); command.redo_now(); // Gap should be prepended to the start OLIVE_ASSERT(track->Blocks().size() == 3); GapBlock* gap = dynamic_cast<GapBlock*>(track->Blocks().at(0)); OLIVE_ASSERT(gap); OLIVE_ASSERT(gap->length() == 1); OLIVE_ASSERT(block1->length() == 1); OLIVE_ASSERT(block2->length() == 2); OLIVE_ASSERT(block1->next() == block2); OLIVE_ASSERT(block1->previous() == gap); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 2); OLIVE_ASSERT(block2->length() == 2); OLIVE_ASSERT(block1->length() == 2); } OLIVE_TEST_END; } OLIVE_ADD_TEST(ReplaceBlockWithGap_ClipsOnly) { TIMELINE_TEST_START; // Create a track that goes clip -> clip -> clip sequence.add_default_nodes(); Track* track = sequence.track_list(Track::kVideo)->GetTracks().first(); ClipBlock* a = new ClipBlock(); a->setParent(&project); track->AppendBlock(a); ClipBlock* b = new ClipBlock(); b->setParent(&project); track->AppendBlock(b); ClipBlock* c = new ClipBlock(); c->setParent(&project); track->AppendBlock(c); { // Replace clip C with a gap TrackReplaceBlockWithGapCommand command(track, c); command.redo_now(); // Clip should be removed without any gap actually taking its place, since the clip is at the // end of the track OLIVE_ASSERT(track->Blocks().size() == 2); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); } { // Replace clip B with a gap TrackReplaceBlockWithGapCommand command(track, b); command.redo_now(); // B should be replaced with a gap OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) != b); OLIVE_ASSERT(dynamic_cast<GapBlock*>(track->Blocks().at(1))); OLIVE_ASSERT(track->Blocks().at(1)->length() == b->length()); OLIVE_ASSERT(track->Blocks().at(2) == c); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); } OLIVE_TEST_END; } OLIVE_ADD_TEST(ReplaceBlockWithGap_ClipsAndGaps) { TIMELINE_TEST_START; // Create a track that goes clip -> gap -> clip -> clip -> gap -> clip sequence.add_default_nodes(); Track* track = sequence.track_list(Track::kVideo)->GetTracks().first(); ClipBlock* a = new ClipBlock(); a->setParent(&project); track->AppendBlock(a); GapBlock* b = new GapBlock(); b->setParent(&project); track->AppendBlock(b); ClipBlock* c = new ClipBlock(); c->setParent(&project); track->AppendBlock(c); GapBlock* d = new GapBlock(); d->setParent(&project); track->AppendBlock(d); ClipBlock* e = new ClipBlock(); e->setParent(&project); track->AppendBlock(e); { // Replace clip E with a gap TrackReplaceBlockWithGapCommand command(track, e); command.redo_now(); // Both clips D and E should be removed because this command should remove any trailing gaps OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); // Test undo command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 5); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); OLIVE_ASSERT(track->Blocks().at(3) == d); OLIVE_ASSERT(track->Blocks().at(4) == e); } { // Replace clip A with a gap rational original_length_of_a = a->length(); rational original_length_of_b = b->length(); TrackReplaceBlockWithGapCommand command(track, a); command.redo_now(); // A should be removed and B should take its place OLIVE_ASSERT(track->Blocks().size() == 4); OLIVE_ASSERT(track->Blocks().at(0) == b); OLIVE_ASSERT(track->Blocks().at(1) == c); OLIVE_ASSERT(track->Blocks().at(2) == d); OLIVE_ASSERT(track->Blocks().at(3) == e); OLIVE_ASSERT(b->length() == original_length_of_a + original_length_of_b); // Test undo command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 5); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); OLIVE_ASSERT(track->Blocks().at(3) == d); OLIVE_ASSERT(track->Blocks().at(4) == e); OLIVE_ASSERT(a->length() == original_length_of_a); OLIVE_ASSERT(b->length() == original_length_of_b); } { // Replace clip C with a gap rational original_length_of_b = b->length(); rational original_length_of_c = c->length(); rational original_length_of_d = d->length(); TrackReplaceBlockWithGapCommand command(track, c); command.redo_now(); // C and D should be removed, and B should take both of their places OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == e); OLIVE_ASSERT(b->length() == original_length_of_b + original_length_of_c + original_length_of_d); // Test undo command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 5); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); OLIVE_ASSERT(track->Blocks().at(3) == d); OLIVE_ASSERT(track->Blocks().at(4) == e); OLIVE_ASSERT(b->length() == original_length_of_b); OLIVE_ASSERT(c->length() == original_length_of_c); OLIVE_ASSERT(d->length() == original_length_of_d); } { // Add a fourth clip at the end of the track ClipBlock* f = new ClipBlock(); f->setParent(&project); track->AppendBlock(f); // Try replacing E with a block again TrackReplaceBlockWithGapCommand command(track, e); rational original_length_of_d = d->length(); rational original_length_of_e = e->length(); command.redo_now(); // E should be removed and D should have taken its place OLIVE_ASSERT(track->Blocks().size() == 5); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); OLIVE_ASSERT(track->Blocks().at(3) == d); OLIVE_ASSERT(track->Blocks().at(4) == f); OLIVE_ASSERT(d->length() == original_length_of_d + original_length_of_e); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 6); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); OLIVE_ASSERT(track->Blocks().at(3) == d); OLIVE_ASSERT(track->Blocks().at(4) == e); OLIVE_ASSERT(track->Blocks().at(5) == f); OLIVE_ASSERT(d->length() == original_length_of_d); OLIVE_ASSERT(e->length() == original_length_of_e); } OLIVE_TEST_END; } #define UsingTransition CrossDissolveTransition OLIVE_ADD_TEST(ReplaceBlockWithGap_ClipsAndTransitions) { TIMELINE_TEST_START; // Create a track that goes clip -> gap -> clip -> clip -> gap -> clip sequence.add_default_nodes(); Track* track = sequence.track_list(Track::kVideo)->GetTracks().first(); UsingTransition* a_in = new UsingTransition(); a_in->setParent(&project); track->AppendBlock(a_in); ClipBlock* a = new ClipBlock(); a->setParent(&project); track->AppendBlock(a); UsingTransition* a_to_b = new UsingTransition(); a_to_b->setParent(&project); track->AppendBlock(a_to_b); ClipBlock* b = new ClipBlock(); b->setParent(&project); track->AppendBlock(b); UsingTransition* b_out = new UsingTransition(); b_out->setParent(&project); track->AppendBlock(b_out); Node::ConnectEdge(a, NodeInput(a_in, UsingTransition::kInBlockInput)); Node::ConnectEdge(a, NodeInput(a_to_b, UsingTransition::kOutBlockInput)); Node::ConnectEdge(b, NodeInput(a_to_b, UsingTransition::kInBlockInput)); Node::ConnectEdge(b, NodeInput(b_out, UsingTransition::kOutBlockInput)); { // Replace A with gap TrackReplaceBlockWithGapCommand command(track, a); command.redo_now(); // A should be replaced with a gap and so should A_IN since A was the only clip connected to it. // Also A_TO_B should only be connected to B now OLIVE_ASSERT(track->Blocks().size() == 4); OLIVE_ASSERT(dynamic_cast<GapBlock*>(track->Blocks().at(0))); OLIVE_ASSERT(track->Blocks().at(1) == a_to_b); OLIVE_ASSERT(track->Blocks().at(2) == b); OLIVE_ASSERT(track->Blocks().at(3) == b_out); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 5); OLIVE_ASSERT(track->Blocks().at(0) == a_in); OLIVE_ASSERT(track->Blocks().at(1) == a); OLIVE_ASSERT(track->Blocks().at(2) == a_to_b); OLIVE_ASSERT(track->Blocks().at(3) == b); OLIVE_ASSERT(track->Blocks().at(4) == b_out); } OLIVE_TEST_END; } OLIVE_ADD_TEST(InsertGaps_SingleTrack) { TIMELINE_TEST_START; sequence.add_default_nodes(); TrackList *list = sequence.track_list(Track::kVideo); Track *track = list->GetTracks().first(); ClipBlock *a = new ClipBlock(); a->set_length_and_media_out(1); a->setParent(&project); track->AppendBlock(a); ClipBlock *b = new ClipBlock(); b->set_length_and_media_out(1); b->setParent(&project); track->AppendBlock(b); ClipBlock *c = new ClipBlock(); c->set_length_and_media_out(1); c->setParent(&project); track->AppendBlock(c); OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); { // Insert gap at the start of the track, all blocks should be unsplit and shifted to the right TrackListInsertGaps command(list, 0, 2); command.redo_now(); OLIVE_ASSERT(track->Blocks().size() == 4); OLIVE_ASSERT(dynamic_cast<GapBlock *>(track->Blocks().at(0))); OLIVE_ASSERT(track->Blocks().at(0)->length() == 2); OLIVE_ASSERT(track->Blocks().at(1) == a); OLIVE_ASSERT(track->Blocks().at(2) == b); OLIVE_ASSERT(track->Blocks().at(3) == c); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); } { // Insert gap in the middle of block A, block A should be halved with a copy at 2 and the gap at 1 TrackListInsertGaps command(list, rational(1, 2), 2); command.redo_now(); OLIVE_ASSERT(track->Blocks().size() == 5); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(0)->length() == rational(1, 2)); OLIVE_ASSERT(dynamic_cast<GapBlock *>(track->Blocks().at(1))); OLIVE_ASSERT(dynamic_cast<ClipBlock*>(track->Blocks().at(2))); OLIVE_ASSERT(track->Blocks().at(3) == b); OLIVE_ASSERT(track->Blocks().at(4) == c); command.undo_now(); OLIVE_ASSERT_EQUAL(track->Blocks().size(), 3); OLIVE_ASSERT_EQUAL(track->Blocks().at(0), a); OLIVE_ASSERT_EQUAL(track->Blocks().at(0)->length(), 1); OLIVE_ASSERT_EQUAL(track->Blocks().at(1), b); OLIVE_ASSERT_EQUAL(track->Blocks().at(2), c); } { // Insert gap between block A and B, blocks should be unsplit with a gap at 1 TrackListInsertGaps command(list, 1, 2); command.redo_now(); OLIVE_ASSERT(track->Blocks().size() == 4); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(dynamic_cast<GapBlock *>(track->Blocks().at(1))); OLIVE_ASSERT(track->Blocks().at(2) == b); OLIVE_ASSERT(track->Blocks().at(3) == c); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); } { // Insert gap at end, nothing should be added TrackListInsertGaps command(list, 3, 2); command.redo_now(); OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); command.undo_now(); OLIVE_ASSERT(track->Blocks().size() == 3); OLIVE_ASSERT(track->Blocks().at(0) == a); OLIVE_ASSERT(track->Blocks().at(1) == b); OLIVE_ASSERT(track->Blocks().at(2) == c); } OLIVE_TEST_END; } }
19,502
C++
.cpp
476
36.697479
114
0.671394
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,796
compositing-tests.cpp
olive-editor_olive/tests/compositing/compositing-tests.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "testutil.h" #include "node/distort/crop/cropdistortnode.h" #include "node/distort/transform/transformdistortnode.h" #include "node/generator/solid/solid.h" #include "node/math/merge/merge.h" #include "node/project.h" #include "render/rendermanager.h" namespace olive { }
996
C++
.cpp
23
40.869565
71
0.78112
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,797
common-tests.cpp
olive-editor_olive/tests/general/common-tests.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "testutil.h" #include "common/digit.h" namespace olive { OLIVE_ADD_TEST(DigitTest) { OLIVE_ASSERT(GetDigitCount(1) == 1); OLIVE_ASSERT(GetDigitCount(69) == 2); OLIVE_ASSERT(GetDigitCount(420) == 3); OLIVE_ASSERT(GetDigitCount(1337) == 4); OLIVE_ASSERT(GetDigitCount(80085) == 5); OLIVE_ASSERT(GetDigitCount(555555) == 6); OLIVE_ASSERT(GetDigitCount(8675309) == 7); OLIVE_ASSERT(GetDigitCount(78956423) == 8); OLIVE_ASSERT(GetDigitCount(148497523) == 9); OLIVE_ASSERT(GetDigitCount(4845821233) == 10); OLIVE_ASSERT(GetDigitCount(18002738255) == 11); OLIVE_ASSERT(GetDigitCount(180027382556) == 12); OLIVE_ASSERT(GetDigitCount(1800273825568) == 13); OLIVE_ASSERT(GetDigitCount(18002738255685) == 14); OLIVE_ASSERT(GetDigitCount(180027382556857) == 15); OLIVE_ASSERT(GetDigitCount(1800273825564857) == 16); OLIVE_TEST_END; } }
1,584
C++
.cpp
38
38.868421
71
0.755049
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,798
main.cpp
olive-editor_olive/app/main.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ /** \mainpage Olive Video Editor - Code Documentation * * This documentation is a primarily a developer resource. For information on using Olive, visit the website * https://www.olivevideoeditor.org/ * * Use the navigation above to find documentation on classes or source files. */ extern "C" { #include <libavformat/avformat.h> #include <libavfilter/avfilter.h> } #include <csignal> #include <QApplication> #include <QCommandLineParser> #include <QMessageBox> #include <QSurfaceFormat> #include "core.h" #include "common/commandlineparser.h" #include "common/debug.h" #include "node/project/serializer/serializer.h" #include "version.h" #ifdef _WIN32 #include <QOffscreenSurface> #include <QOpenGLContext> #include <QOpenGLFunctions> #include <Windows.h> #endif #ifdef USE_CRASHPAD #include "common/crashpadinterface.h" #endif // USE_CRASHPAD int decompress_project(const QString &project) { if (project.isEmpty()) { printf("%s\n", QCoreApplication::translate("main", "No project filename set to decompress").toUtf8().constData()); return 1; } QFile project_file(project); if (!project_file.open(QFile::ReadOnly)) { printf("%s\n", QCoreApplication::translate("main", "Failed to open file \"%1\"").arg(project).toUtf8().constData()); return 1; } printf("%s\n", QCoreApplication::translate("main", "Decompressing project...").toUtf8().constData()); if (!olive::ProjectSerializer::CheckCompressedID(&project_file)) { printf("%s\n", QCoreApplication::translate("main", "Failed to decompress, project may be corrupt").toUtf8().constData()); return 1; } QByteArray b = project_file.readAll(); project_file.close(); QByteArray decompressed = qUncompress(b); if (decompressed.isEmpty()) { printf("%s\n", QCoreApplication::translate("main", "Failed to decompress, project may be corrupt").toUtf8().constData()); return 1; } QFileInfo info(project); QString filename; QString append; int append_num = 0; do { filename = info.dir().filePath(info.completeBaseName().append(append).append(QStringLiteral(".ovexml"))); append_num++; append = QStringLiteral("-%1").arg(append_num); } while(QFileInfo::exists(filename)); printf("%s\n", QCoreApplication::translate("main", "Outputting to file \"%1\"").arg(filename).toUtf8().constData()); QFile out(filename); if (!out.open(QFile::WriteOnly)) { printf("%s\n", QCoreApplication::translate("main", "Failed to open output file \"%1\"").arg(filename).toUtf8().constData()); return 1; } out.write(decompressed); out.close(); printf("%s\n", QCoreApplication::translate("main", "Decompressed successfully").toUtf8().constData()); return 0; } int main(int argc, char *argv[]) { // Set up debug handler qInstallMessageHandler(olive::DebugHandler); // Set application metadata QCoreApplication::setOrganizationName("olivevideoeditor.org"); QCoreApplication::setOrganizationDomain("olivevideoeditor.org"); QCoreApplication::setApplicationName("Olive"); QGuiApplication::setDesktopFileName("org.olivevideoeditor.Olive"); QCoreApplication::setApplicationVersion(olive::kAppVersionLong); // // Parse command line arguments // QVector<QString> args; #if defined(_WIN32) && defined(UNICODE) int wargc; LPWSTR *wargv = CommandLineToArgvW(GetCommandLineW(), &wargc); args.resize(wargc); for (int i=0; i<wargc; i++) { args[i] = QString::fromWCharArray(wargv[i]); } LocalFree(wargv); #else args.resize(argc); for (int i=0; i<argc; i++) { args[i] = QString::fromLocal8Bit(argv[i]); } #endif olive::Core::CoreParams startup_params; CommandLineParser parser; // Our options auto help_option = parser.AddOption({QStringLiteral("h"), QStringLiteral("-help")}, QCoreApplication::translate("main", "Show this help text")); auto version_option = parser.AddOption({QStringLiteral("v"), QStringLiteral("-version")}, QCoreApplication::translate("main", "Show application version")); auto fullscreen_option = parser.AddOption({QStringLiteral("f"), QStringLiteral("-fullscreen")}, QCoreApplication::translate("main", "Start in full-screen mode")); auto export_option = parser.AddOption({QStringLiteral("x"), QStringLiteral("-export")}, QCoreApplication::translate("main", "Export only (No GUI)")); auto ts_option = parser.AddOption({QStringLiteral("-ts")}, QCoreApplication::translate("main", "Override language with file"), true, QCoreApplication::translate("main", "qm-file")); auto decompress_option = parser.AddOption({QStringLiteral("d"), QStringLiteral("-decompress")}, QCoreApplication::translate("main", "Decompress project file (No GUI)")); #ifdef _WIN32 auto console_option = parser.AddOption({QStringLiteral("c"), QStringLiteral("-console")}, QCoreApplication::translate("main", "Launch with debug console")); #endif // _WIN32 auto project_argument = parser.AddPositionalArgument(QStringLiteral("project"), QCoreApplication::translate("main", "Project to open on startup")); // Qt options re-implemented (add to this as necessary) // // Because we don't use QCommandLineParser, we must filter out Qt's arguments ourselves. Here, // we create them so they're recognized, but never use and also hide them in the "help" text. parser.AddOption({QStringLiteral("platform")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("platformpluginpath")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("platformtheme")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("plugin")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("qmljsdebugger")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("qwindowgeometry")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("qwindowicon")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("qwindowtitle")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("reverse")}, QString(), false, QString(), true); parser.AddOption({QStringLiteral("session")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("style")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("stylesheet")}, QString(), true, QString(), true); parser.AddOption({QStringLiteral("widgetcount")}, QString(), false, QString(), true); // Hidden crash option for debugging the crash handling auto crash_option = parser.AddOption({QStringLiteral("-crash")}, QString(), true, QString(), true); parser.Process(args); if (help_option->IsSet()) { // Show help parser.PrintHelp(argv[0]); return 0; } if (version_option->IsSet()) { // Print version printf("%s\n", QCoreApplication::applicationVersion().toUtf8().constData()); return 0; } if (decompress_option->IsSet()) { return decompress_project(project_argument->GetSetting()); } if (export_option->IsSet()) { startup_params.set_run_mode(olive::Core::CoreParams::kHeadlessExport); } if (ts_option->IsSet()) { if (ts_option->GetSetting().isEmpty()) { qWarning() << "--ts was set but no translation file was provided"; } else { startup_params.set_startup_language(ts_option->GetSetting()); } } if (crash_option->IsSet()) { startup_params.set_crash_on_startup(true); } startup_params.set_fullscreen(fullscreen_option->IsSet()); startup_params.set_startup_project(project_argument->GetSetting()); // Set OpenGL display profile QSurfaceFormat format; // Tries to cover all bases. If drivers don't support 3.2, they should fallback to the closest // alternative. Unfortunately Qt doesn't support 3.0-3.1 without DeprecatedFunctions, so we // declare that too. We also force Qt to not use ANGLE because I've had a lot of problems with it // so far. // // https://bugreports.qt.io/browse/QTBUG-46140 QCoreApplication::setAttribute(Qt::AA_UseDesktopOpenGL); format.setVersion(3, 2); format.setProfile(QSurfaceFormat::CoreProfile); format.setDepthBufferSize(24); QSurfaceFormat::setDefaultFormat(format); // Enable application automatically using higher resolution images from icons QCoreApplication::setAttribute(Qt::AA_UseHighDpiPixmaps); QCoreApplication::setAttribute(Qt::AA_ShareOpenGLContexts); // Create application instance std::unique_ptr<QCoreApplication> a; if (startup_params.run_mode() == olive::Core::CoreParams::kRunNormal) { #ifdef _WIN32 // Since Olive is linked with the console subsystem (for better POSIX compatibility), a console // is created by default. If the user didn't request one, we free it here. if (!console_option->IsSet()) { FreeConsole(); } #endif // _WIN32 a.reset(new QApplication(argc, argv)); } else { a.reset(new QCoreApplication(argc, argv)); } #ifdef _WIN32 // On Windows, users seem to frequently run into a crash caused by their graphics driver not // supporting framebuffers, which we require. I personally have only been able to recreate this // when no driver is installed (e.g. when using the Microsoft Basic Display Adapter). Whether // that's true for all users or not is still up in the air, but what we do know is it's a driver // issue and users should know what to do rather than simply receive a cryptic crash report. QOpenGLContext ctx; ctx.create(); QOffscreenSurface surface; surface.create(); ctx.makeCurrent(&surface); bool has_proc_address = wglGetProcAddress("glGenFramebuffers"); std::string gpu_vendor = reinterpret_cast<const char*>(ctx.functions()->glGetString(GL_VENDOR)); std::string gpu_renderer = reinterpret_cast<const char*>(ctx.functions()->glGetString(GL_RENDERER)); std::string gpu_version = reinterpret_cast<const char*>(ctx.functions()->glGetString(GL_VERSION)); ctx.doneCurrent(); surface.destroy(); if (!has_proc_address) { QString msg = QCoreApplication::translate("main", "Your computer's graphics driver does not appear to support framebuffers. " "This most likely means either your graphics driver is not up-to-date or your graphics card is too old to run Olive.\n\n" "Please update your graphics driver to the latest version and try again.\n\n" "Current driver information: %1 %2 %3").arg(QString::fromStdString(gpu_vendor), QString::fromStdString(gpu_renderer), QString::fromStdString(gpu_version)); if (dynamic_cast<QGuiApplication*>(a.get())) { QMessageBox::critical(nullptr, QString(), msg); } else { qCritical().noquote() << msg; } return 1; } #endif // Register FFmpeg codecs and filters (deprecated in 4.0+) #if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(58, 9, 100) av_register_all(); #endif #if LIBAVFILTER_VERSION_INT < AV_VERSION_INT(7, 14, 100) avfilter_register_all(); #endif // Enable Google Crashpad if compiled with it #ifdef USE_CRASHPAD if (!InitializeCrashpad()) { qWarning() << "Failed to initialize Crashpad handler"; } #endif // USE_CRASHPAD // Start core olive::Core c(startup_params); c.Start(); int ret = a->exec(); // Clear core memory c.Stop(); return ret; }
12,176
C++
.cpp
276
39.884058
161
0.713138
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,799
version.cpp
olive-editor_olive/app/version.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "version.h" namespace olive { QString kAppVersion = QStringLiteral(APPVERSION); QString kAppVersionLong = QStringLiteral(APPVERSIONLONG); }
864
C++
.cpp
19
42.736842
71
0.783493
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,800
core.cpp
olive-editor_olive/app/core.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "core.h" #include <QApplication> #include <QClipboard> #include <QDebug> #include <QFileDialog> #include <QFileInfo> #include <QHBoxLayout> #include <QInputDialog> #include <QMessageBox> #include <QStyleFactory> #include "window/mainwindow/mainwindowundo.h" #ifdef Q_OS_WINDOWS #include <QtPlatformHeaders/QWindowsWindowFunctions> #endif #include "audio/audiomanager.h" #include "cli/clitask/clitaskdialog.h" #include "codec/conformmanager.h" #include "common/filefunctions.h" #include "common/xmlutils.h" #include "config/config.h" #include "dialog/about/about.h" #include "dialog/autorecovery/autorecoverydialog.h" #include "dialog/export/export.h" #include "dialog/footagerelink/footagerelinkdialog.h" #ifdef USE_OTIO #include "dialog/otioproperties/otiopropertiesdialog.h" #endif #include "dialog/projectproperties/projectproperties.h" #include "dialog/sequence/sequence.h" #include "dialog/task/task.h" #include "dialog/preferences/preferences.h" #include "node/color/colormanager/colormanager.h" #include "node/factory.h" #include "node/nodeundo.h" #include "node/project/serializer/serializer.h" #include "panel/panelmanager.h" #include "panel/project/project.h" #include "panel/viewer/viewer.h" #include "render/diskmanager.h" #include "render/framemanager.h" #include "render/rendermanager.h" #ifdef USE_OTIO #include "task/project/loadotio/loadotio.h" #include "task/project/saveotio/saveotio.h" #endif #include "task/project/import/import.h" #include "task/project/import/importerrordialog.h" #include "task/project/load/load.h" #include "task/project/save/save.h" #include "task/taskmanager.h" #include "ui/style/style.h" #include "undo/undostack.h" #include "widget/menu/menushared.h" #include "widget/taskview/taskviewitem.h" #include "widget/viewer/viewer.h" #include "window/mainwindow/mainstatusbar.h" #include "window/mainwindow/mainwindow.h" namespace olive { Core* Core::instance_ = nullptr; Core::Core(const CoreParams& params) : main_window_(nullptr), open_project_(nullptr), tool_(Tool::kPointer), addable_object_(Tool::kAddableEmpty), snapping_(true), core_params_(params), magic_(false), pixel_sampling_users_(0), shown_cache_full_warning_(false) { // Store reference to this object, making the assumption that Core will only ever be made in // main(). This will obviously break if not. instance_ = this; translator_ = new QTranslator(this); } Core *Core::instance() { return instance_; } void Core::DeclareTypesForQt() { qRegisterMetaType<olive::core::rational>(); qRegisterMetaType<NodeValue>(); qRegisterMetaType<NodeValueTable>(); qRegisterMetaType<NodeValueDatabase>(); qRegisterMetaType<FramePtr>(); qRegisterMetaType<SampleBuffer>(); qRegisterMetaType<AudioParams>(); qRegisterMetaType<NodeKeyframe::Type>(); qRegisterMetaType<Decoder::RetrieveState>(); qRegisterMetaType<olive::core::TimeRange>(); qRegisterMetaType<olive::core::Color>(); qRegisterMetaType<olive::AudioVisualWaveform>(); qRegisterMetaType<olive::VideoParams>(); qRegisterMetaType<olive::VideoParams::Interlacing>(); qRegisterMetaType<olive::MainWindowLayoutInfo>(); qRegisterMetaType<olive::RenderTicketPtr>(); } void Core::Start() { // Load application config Config::Load(); // Set locale based on either startup arg, config, or auto-detect SetStartupLocale(); // Declare custom types for Qt signal/slot system DeclareTypesForQt(); // Set up node factory/library NodeFactory::Initialize(); // Set up color manager's default config ColorManager::SetUpDefaultConfig(); // Initialize task manager TaskManager::CreateInstance(); // Initialize ConformManager ConformManager::CreateInstance(); // Initialize RenderManager RenderManager::CreateInstance(); // Initialize FrameManager FrameManager::CreateInstance(); // Initialize project serializers ProjectSerializer::Initialize(); // // Start application // qInfo() << "Using Qt version:" << qVersion(); switch (core_params_.run_mode()) { case CoreParams::kRunNormal: // Start GUI StartGUI(core_params_.fullscreen()); // If we have a startup QMetaObject::invokeMethod(this, "OpenStartupProject", Qt::QueuedConnection); break; case CoreParams::kHeadlessExport: qInfo() << "Headless export is not fully implemented yet"; break; case CoreParams::kHeadlessPreCache: qInfo() << "Headless pre-cache is not fully implemented yet"; break; } // Manual crash triggering if (core_params_.crash_on_startup()) { const int interval = 5000; qInfo() << "Manual crash was triggered. Application will crash in" << interval << "ms"; QTimer *crash_timer = new QTimer(this); crash_timer->setInterval(interval); connect(crash_timer, &QTimer::timeout, this, []{ abort(); }); crash_timer->start(); } } void Core::Stop() { // Assume all projects have closed gracefully and no auto-recovery is necessary autorecovered_projects_.clear(); SaveUnrecoveredList(); // Save Config Config::Save(); ProjectSerializer::Destroy(); ConformManager::DestroyInstance(); FrameManager::DestroyInstance(); RenderManager::DestroyInstance(); MenuShared::DestroyInstance(); TaskManager::DestroyInstance(); PanelManager::DestroyInstance(); AudioManager::DestroyInstance(); DiskManager::DestroyInstance(); NodeFactory::Destroy(); delete main_window_; main_window_ = nullptr; } MainWindow *Core::main_window() { return main_window_; } UndoStack *Core::undo_stack() { return &undo_stack_; } void Core::ImportFiles(const QStringList &urls, Folder* parent) { if (urls.isEmpty()) { QMessageBox::critical(main_window_, tr("Import error"), tr("Nothing to import")); return; } ProjectImportTask* pim = new ProjectImportTask(parent, urls); if (!pim->GetFileCount()) { // No files to import delete pim; return; } TaskDialog* task_dialog = new TaskDialog(pim, tr("Importing..."), main_window()); connect(task_dialog, &TaskDialog::TaskSucceeded, this, &Core::ImportTaskComplete); task_dialog->open(); } const Tool::Item &Core::tool() const { return tool_; } const Tool::AddableObject &Core::GetSelectedAddableObject() const { return addable_object_; } const QString &Core::GetSelectedTransition() const { return selected_transition_; } void Core::SetSelectedAddableObject(const Tool::AddableObject &obj) { addable_object_ = obj; emit AddableObjectChanged(addable_object_); } void Core::SetSelectedTransitionObject(const QString &obj) { selected_transition_ = obj; } void Core::ClearOpenRecentList() { recent_projects_.clear(); SaveRecentProjectsList(); emit OpenRecentListChanged(); } void Core::CreateNewProject() { // If we already have an empty/new project, switch to it if (CloseProject(false)) { Project *p = new Project(); p->Initialize(); AddOpenProject(p); } } const bool &Core::snapping() const { return snapping_; } const QStringList &Core::GetRecentProjects() const { return recent_projects_; } void Core::SetTool(const Tool::Item &tool) { tool_ = tool; emit ToolChanged(tool_); } void Core::SetSnapping(const bool &b) { snapping_ = b; emit SnappingChanged(snapping_); } void Core::DialogAboutShow() { AboutDialog a(false, main_window_); a.exec(); } void Core::DialogImportShow() { // Open dialog for user to select files QStringList files = QFileDialog::getOpenFileNames(main_window_, tr("Import footage...")); // Check if the user actually selected files to import if (!files.isEmpty()) { // Locate the most recently focused Project panel (assume that's the panel the user wants to import into) ProjectPanel* active_project_panel = PanelManager::instance()->MostRecentlyFocused<ProjectPanel>(); Project* active_project; if (active_project_panel == nullptr // Check that we found a Project panel || (active_project = active_project_panel->project()) == nullptr) { // and that we could find an active Project QMessageBox::critical(main_window_, tr("Failed to import footage"), tr("Failed to find active Project panel")); return; } // Get the selected folder in this panel Folder* folder = active_project_panel->GetSelectedFolder(); ImportFiles(files, folder); } } void Core::DialogPreferencesShow() { PreferencesDialog pd(main_window_); pd.exec(); } void Core::DialogProjectPropertiesShow() { Project *proj = GetActiveProject(); if (proj) { ProjectPropertiesDialog ppd(proj, main_window_); ppd.exec(); } else { QMessageBox::critical(main_window_, tr("No Active Project"), tr("No project is currently open to set the properties for"), QMessageBox::Ok); } } void Core::DialogExportShow() { if (ViewerOutput* viewer = GetSequenceToExport()) { OpenExportDialogForViewer(viewer, false); } } #ifdef USE_OTIO bool Core::DialogImportOTIOShow(const QList<Sequence*>& sequences) { Project* active_project = GetActiveProject(); OTIOPropertiesDialog opd(sequences, active_project); return opd.exec() == QDialog::Accepted; } #endif void Core::CreateNewFolder() { // Locate the most recently focused Project panel (assume that's the panel the user wants to import into) ProjectPanel* active_project_panel = PanelManager::instance()->MostRecentlyFocused<ProjectPanel>(); Project* active_project; if (active_project_panel == nullptr // Check that we found a Project panel || (active_project = active_project_panel->project()) == nullptr) { // and that we could find an active Project QMessageBox::critical(main_window_, tr("Failed to create new folder"), tr("Failed to find active project")); return; } // Get the selected folder in this panel Folder* folder = active_project_panel->GetSelectedFolder(); // Create new folder Folder* new_folder = new Folder(); // Set a default name new_folder->SetLabel(tr("New Folder")); // Create an undoable command MultiUndoCommand* command = new MultiUndoCommand(); command->add_child(new NodeAddCommand(active_project, new_folder)); command->add_child(new FolderAddChild(folder, new_folder)); Core::instance()->undo_stack()->push(command, tr("Created New Folder")); // Trigger an automatic rename so users can enter the folder name active_project_panel->Edit(new_folder); } void Core::CreateNewSequence() { Project* active_project = GetActiveProject(); if (!active_project) { QMessageBox::critical(main_window_, tr("Failed to create new sequence"), tr("Failed to find active project")); return; } // Create new sequence Sequence* new_sequence = CreateNewSequenceForProject(active_project); SequenceDialog sd(new_sequence, SequenceDialog::kNew, main_window_); // Make sure SequenceDialog doesn't make an undo command for editing the sequence, since we make an undo command for // adding it later on sd.SetUndoable(false); if (sd.exec() == QDialog::Accepted) { // Create an undoable command MultiUndoCommand* command = new MultiUndoCommand(); command->add_child(new NodeAddCommand(active_project, new_sequence)); command->add_child(new FolderAddChild(GetSelectedFolderInActiveProject(), new_sequence)); command->add_child(new NodeSetPositionCommand(new_sequence, new_sequence, Node::Position())); command->add_child(new OpenSequenceCommand(new_sequence)); // Create and connect default nodes to new sequence new_sequence->add_default_nodes(command); Core::instance()->undo_stack()->push(command, tr("Created New Sequence")); } else { // If the dialog was accepted, ownership goes to the AddItemCommand. But if we get here, just delete delete new_sequence; } } void Core::AddOpenProject(Project* p, bool add_to_recents) { // Ensure project is not open at the moment if (open_project_ == p) { return; } // If we currently have an empty project, close it first if (open_project_) { CloseProject(false); } SetActiveProject(p); if (!p->filename().isEmpty() && add_to_recents) { PushRecentlyOpenedProject(p->filename()); } } bool Core::AddOpenProjectFromTask(Task *task, bool add_to_recents) { ProjectLoadBaseTask* load_task = static_cast<ProjectLoadBaseTask*>(task); if (!load_task->IsCancelled()) { Project* project = load_task->GetLoadedProject(); if (ValidateFootageInLoadedProject(project, project->GetSavedURL())) { AddOpenProject(project, add_to_recents); main_window_->LoadLayout(load_task->GetLoadedLayout()); return true; } else { delete project; CreateNewProject(); } } return false; } void Core::SetActiveProject(Project *p) { if (open_project_) { disconnect(open_project_, &Project::ModifiedChanged, this, &Core::ProjectWasModified); } open_project_ = p; RenderManager::instance()->SetProject(p); main_window_->SetProject(p); if (open_project_) { connect(open_project_, &Project::ModifiedChanged, this, &Core::ProjectWasModified); } } void Core::ImportTaskComplete(Task* task) { ProjectImportTask* import_task = static_cast<ProjectImportTask*>(task); MultiUndoCommand *command = import_task->GetCommand(); foreach (Footage *f, import_task->GetImportedFootage()) { // Look for multi-layer images if (f->GetAudioStreamCount() == 0 && f->GetVideoStreamCount() > 1) { bool all_stills = true; for (int i=0; i<f->GetVideoStreamCount(); i++) { const VideoParams &vs = f->GetVideoParams(i); if (!(vs.video_type() == VideoParams::kVideoTypeStill && vs.enabled() == (i == 0))) { all_stills = false; } } if (all_stills) { QMessageBox d(main_window()); d.setIcon(QMessageBox::Question); d.setWindowTitle(tr("Multi-Layer Image")); d.setText(tr("The file '%1' has multiple layers. Would you like these layers to be " "separated across multiple tracks or merged into a single image?").arg(f->filename())); auto multi_btn = d.addButton(tr("Multiple Layers"), QMessageBox::YesRole); auto single_btn = d.addButton(tr("Single Layer"), QMessageBox::NoRole); auto cancel_btn = d.addButton(QMessageBox::Cancel); d.exec(); if (d.clickedButton() == multi_btn) { for (int i=0; i<f->GetVideoStreamCount(); i++) { VideoParams vs = f->GetVideoParams(i); vs.set_enabled(!vs.enabled()); f->SetVideoParams(vs, i); } } else if (d.clickedButton() == single_btn) { // Do nothing, footage will already be set up this way } else if (d.clickedButton() == cancel_btn) { // Cancel import delete command; return; } } } } if (import_task->HasInvalidFiles()) { ProjectImportErrorDialog d(import_task->GetInvalidFiles(), main_window_); d.exec(); } undo_stack_.push(command, tr("Imported %1 File(s)").arg(import_task->GetImportedFootage().size())); main_window_->SelectFootage(import_task->GetImportedFootage()); } bool Core::ConfirmImageSequence(const QString& filename) { QMessageBox mb(main_window_); mb.setIcon(QMessageBox::Question); mb.setWindowTitle(tr("Possible image sequence detected")); mb.setText(tr("The file '%1' looks like it might be part of an image " "sequence. Would you like to import it as such?").arg(filename)); mb.addButton(QMessageBox::Yes); mb.addButton(QMessageBox::No); return (mb.exec() == QMessageBox::Yes); } void Core::ProjectWasModified(bool e) { main_window_->setWindowModified(e); } bool Core::StartHeadlessExport() { const QString& startup_project = core_params_.startup_project(); if (startup_project.isEmpty()) { qCritical().noquote() << tr("You must specify a project file to export"); return false; } if (!QFileInfo::exists(startup_project)) { qCritical().noquote() << tr("Specified project does not exist"); return false; } // Start a load task and try running it ProjectLoadTask plm(startup_project); CLITaskDialog task_dialog(&plm); /* if (task_dialog.Run()) { std::unique_ptr<Project> p = std::unique_ptr<Project>(plm.GetLoadedProject()); QVector<Item*> items = p->get_items_of_type(Item::kSequence); // Check if this project contains sequences if (items.isEmpty()) { qCritical().noquote() << tr("Project contains no sequences, nothing to export"); return false; } Sequence* sequence = nullptr; // Check if this project contains multiple sequences if (items.size() > 1) { qInfo().noquote() << tr("This project has multiple sequences. Which do you wish to export?"); for (int i=0;i<items.size();i++) { std::cout << "[" << i << "] " << items.at(i)->GetLabel().toStdString(); } QTextStream stream(stdin); QString sequence_read; int sequence_index = -1; QString quit_code = QStringLiteral("q"); std::string prompt = tr("Enter number (or %1 to cancel): ").arg(quit_code).toStdString(); forever { std::cout << prompt; stream.readLineInto(&sequence_read); if (!QString::compare(sequence_read, quit_code, Qt::CaseInsensitive)) { return false; } bool ok; sequence_index = sequence_read.toInt(&ok); if (ok && sequence_index >= 0 && sequence_index < items.size()) { break; } else { qCritical().noquote() << tr("Invalid sequence number"); } } sequence = static_cast<Sequence*>(items.at(sequence_index)); } else { sequence = static_cast<Sequence*>(items.first()); } ExportParams params; ExportTask export_task(sequence->viewer_output(), p->color_manager(), params); CLITaskDialog export_dialog(&export_task); if (export_dialog.Run()) { qInfo().noquote() << tr("Export succeeded"); return true; } else { qInfo().noquote() << tr("Export failed: %1").arg(export_task.GetError()); return false; } } else { qCritical().noquote() << tr("Project failed to load: %1").arg(plm.GetError()); return false; } */ return false; } void Core::OpenStartupProject() { const QString& startup_project = core_params_.startup_project(); bool startup_project_exists = !startup_project.isEmpty() && QFileInfo::exists(startup_project); // Load startup project if (!startup_project_exists && !startup_project.isEmpty()) { QMessageBox::warning(main_window_, tr("Failed to open startup file"), tr("The project \"%1\" doesn't exist. " "A new project will be started instead.").arg(startup_project), QMessageBox::Ok); } if (startup_project_exists) { // If a startup project was set and exists, open it now OpenProjectInternal(startup_project); } else { // If no load project is set, create a new one on open CreateNewProject(); } } void Core::AddRecoveryProjectFromTask(Task *task) { if (AddOpenProjectFromTask(task, false)) { ProjectLoadBaseTask* load_task = static_cast<ProjectLoadBaseTask*>(task); Project* project = load_task->GetLoadedProject(); // Clearing the filename will force the user to re-save it somewhere else project->set_filename(QString()); // Forcing a UUID regeneration will prevent it from saving auto-recoveries in the same place // the original project did project->RegenerateUuid(); // Setting modified will ensure that the program doesn't close and lose the project without // prompting the user first project->set_modified(true); } } void Core::StartGUI(bool full_screen) { // Set UI style StyleManager::Init(); // Set up shared menus MenuShared::CreateInstance(); // Since we're starting GUI mode, create a PanelFocusManager (auto-deletes with QObject) PanelManager::CreateInstance(); // Initialize audio service AudioManager::CreateInstance(); // Initialize disk service DiskManager::CreateInstance(); // Connect the PanelFocusManager to the application's focus change signal connect(qApp, &QApplication::focusChanged, PanelManager::instance(), &PanelManager::FocusChanged); // Set KDDockWidgets flags auto &config = KDDockWidgets::Config::self(); auto flags = config.flags(); flags |= KDDockWidgets::Config::Flag_TabsHaveCloseButton; flags |= KDDockWidgets::Config::Flag_HideTitleBarWhenTabsVisible; flags |= KDDockWidgets::Config::Flag_AlwaysShowTabs; flags |= KDDockWidgets::Config::Flag_AllowReorderTabs; config.setFlags(flags); config.setAbsoluteWidgetMinSize(QSize(1, 1)); // Create main window and open it main_window_ = new MainWindow(); if (full_screen) { main_window_->showFullScreen(); } else { main_window_->showMaximized(); } #ifdef Q_OS_WINDOWS // Workaround for Qt bug where menus don't appear in full screen mode // See: https://doc.qt.io/qt-5/windows-issues.html QWindowsWindowFunctions::setHasBorderInFullScreen(main_window_->windowHandle(), true); #endif // Start autorecovery timer using the config value as its interval SetAutorecoveryInterval(OLIVE_CONFIG("AutorecoveryInterval").toInt()); connect(&autorecovery_timer_, &QTimer::timeout, this, &Core::SaveAutorecovery); autorecovery_timer_.start(); // Load recently opened projects list { QFile recent_projects_file(GetRecentProjectsFilePath()); if (recent_projects_file.open(QFile::ReadOnly | QFile::Text)) { QString r = QString::fromUtf8(recent_projects_file.readAll()); if (!r.isEmpty()) { recent_projects_ = r.split('\n'); } recent_projects_file.close(); } emit OpenRecentListChanged(); } } void Core::SaveProjectInternal(const QString& override_filename) { // Create save manager Task* psm; if (open_project_->filename().endsWith(QStringLiteral(".otio"), Qt::CaseInsensitive)) { #ifdef USE_OTIO psm = new SaveOTIOTask(open_project_); #else QMessageBox::critical(main_window_, tr("Missing OpenTimelineIO Libraries"), tr("This build was compiled without OpenTimelineIO and therefore " "cannot open OpenTimelineIO files.")); return; #endif } else { bool use_compression = !open_project_->filename().endsWith(QStringLiteral(".ovexml"), Qt::CaseInsensitive); psm = new ProjectSaveTask(open_project_, use_compression); static_cast<ProjectSaveTask*>(psm)->SetLayout(main_window_->SaveLayout()); if (!override_filename.isEmpty()) { // Set override filename if provided static_cast<ProjectSaveTask*>(psm)->SetOverrideFilename(override_filename); } } // We don't use a TaskDialog here because a model save dialog is annoying, particularly when // saving auto-recoveries that the user can't anticipate. Doing this in the main thread will // cause a brief (but often unnoticeable) pause in the GUI, which, while not ideal, is not that // different from what already happened (modal dialog preventing use of the GUI) and in many ways // less annoying (doesn't disrupt any current actions or pull focus from elsewhere). // // Ideally we could do this in a background thread and show progress in the status bar like // Microsoft Word, but that would be far more complex. If it becomes necessary in the future, // we will look into an approach like that. if (psm->Start()) { if (override_filename.isEmpty()) { ProjectSaveSucceeded(psm); } } psm->deleteLater(); } ViewerOutput *Core::GetSequenceToExport() { // First try the most recently focused time based window TimeBasedPanel* time_panel = PanelManager::instance()->MostRecentlyFocused<TimeBasedPanel>(); // If that fails try defaulting to the first timeline (i.e. if a project has just been loaded). if (!time_panel->GetConnectedViewer()) { // Safe to assume there will always be one timeline. time_panel = PanelManager::instance()->GetPanelsOfType<TimelinePanel>().first(); } if (time_panel && time_panel->GetConnectedViewer()) { if (time_panel->GetConnectedViewer()->GetLength() == 0) { QMessageBox::critical(main_window_, tr("Error"), tr("This Sequence is empty. There is nothing to export."), QMessageBox::Ok); } else { return time_panel->GetConnectedViewer(); } } else { QMessageBox::critical(main_window_, tr("Error"), tr("No valid sequence detected.\n\nMake sure a sequence is loaded and it has a connected Viewer node."), QMessageBox::Ok); } return nullptr; } QString Core::GetAutoRecoveryIndexFilename() { return QDir(QStandardPaths::writableLocation(QStandardPaths::AppLocalDataLocation)).filePath(QStringLiteral("unrecovered")); } void Core::SaveUnrecoveredList() { QFile autorecovery_index(GetAutoRecoveryIndexFilename()); if (autorecovered_projects_.isEmpty()) { // Recovery list is empty, delete file if exists if (autorecovery_index.exists()) { autorecovery_index.remove(); } } else if (autorecovery_index.open(QFile::WriteOnly)) { // Overwrite recovery list with current list QTextStream ts(&autorecovery_index); bool first = true; foreach (const QUuid& uuid, autorecovered_projects_) { if (first) { first = false; } else { ts << QStringLiteral("\n"); } ts << uuid.toString(); } autorecovery_index.close(); } else { qWarning() << "Failed to save unrecovered list"; } } bool Core::RevertProjectInternal(bool by_opening_existing) { if (open_project_->filename().isEmpty()) { QMessageBox::critical(main_window_, tr("Revert"), tr("This project has not yet been saved, therefore there is no last saved state to revert to.")); } else { QString msg; if (by_opening_existing) { msg = tr("The project \"%1\" is already open. By re-opening it, the project will revert to " "its last saved state. Any unsaved changes will be lost. Do you wish to continue?").arg(open_project_->filename()); } else { msg = tr("This will revert the project \"%1\" back to its last saved state. " "All unsaved changes will be lost. Do you wish to continue?").arg(open_project_->name()); } if (QMessageBox::question(main_window_, tr("Revert"), msg, QMessageBox::Ok | QMessageBox::Cancel) == QMessageBox::Ok) { // Copy filename because CloseProject is going to delete `p` QString filename = open_project_->filename(); // Close project without prompting to save it CloseProject(false, true); // NOTE: `open_project_` will be deleted now, so don't try accessing it // Re-open project at the filename OpenProjectInternal(filename); return true; } } return false; } void Core::SaveRecentProjectsList() { // Save recently opened projects QFile recent_projects_file(GetRecentProjectsFilePath()); if (recent_projects_file.open(QFile::WriteOnly | QFile::Text)) { recent_projects_file.write(recent_projects_.join('\n').toUtf8()); recent_projects_file.close(); } } void Core::SaveAutorecovery() { if (OLIVE_CONFIG("AutorecoveryEnabled").toBool()) { if (open_project_ && !open_project_->has_autorecovery_been_saved()) { QDir project_autorecovery_dir(QDir(FileFunctions::GetAutoRecoveryRoot()).filePath(open_project_->GetUuid().toString())); if (FileFunctions::DirectoryIsValid(project_autorecovery_dir)) { QString this_autorecovery_path = project_autorecovery_dir.filePath(QStringLiteral("%1.ove").arg(QString::number(QDateTime::currentSecsSinceEpoch()))); SaveProjectInternal(this_autorecovery_path); open_project_->set_autorecovery_saved(true); // Keep track of projects that where the "newest" save is the recovery project if (!autorecovered_projects_.contains(open_project_->GetUuid())) { autorecovered_projects_.append(open_project_->GetUuid()); } qDebug() << "Saved auto-recovery to:" << this_autorecovery_path; // Write human-readable real name so it's not just a UUID { QFile realname_file(project_autorecovery_dir.filePath(QStringLiteral("realname.txt"))); realname_file.open(QFile::WriteOnly); realname_file.write(open_project_->pretty_filename().toUtf8()); realname_file.close(); } int64_t max_recoveries_per_file = OLIVE_CONFIG("AutorecoveryMaximum").toLongLong(); // Since we write an extra file, increment total allowed files by 1 max_recoveries_per_file++; // Delete old entries QStringList recovery_files = project_autorecovery_dir.entryList(QDir::Files | QDir::NoDotAndDotDot, QDir::Name); while (recovery_files.size() > max_recoveries_per_file) { bool deleted = false; for (int i=0; i<recovery_files.size(); i++) { const QString& f = recovery_files.at(i); if (f.endsWith(QStringLiteral(".ove"), Qt::CaseInsensitive)) { QString delete_full_path = project_autorecovery_dir.filePath(f); qDebug() << "Deleted old recovery:" << delete_full_path; QFile::remove(delete_full_path); recovery_files.removeAt(i); deleted = true; break; } } if (!deleted) { // For some reason none of the files were deletable. Break so we don't end up in // an infinite loop. break; } } } else { QMessageBox::critical(main_window_, tr("Auto-Recovery Error"), tr("Failed to save auto-recovery to \"%1\". " "Olive may not have permission to this directory.") .arg(project_autorecovery_dir.absolutePath())); } } // Save index SaveUnrecoveredList(); } } void Core::ProjectSaveSucceeded(Task* task) { Project* p = static_cast<ProjectSaveTask*>(task)->GetProject(); PushRecentlyOpenedProject(p->filename()); p->set_modified(false); autorecovered_projects_.removeOne(p->GetUuid()); SaveUnrecoveredList(); ShowStatusBarMessage(tr("Saved to \"%1\" successfully").arg(p->filename())); } Project* Core::GetActiveProject() const { return open_project_; } Folder *Core::GetSelectedFolderInActiveProject() const { ProjectPanel* active_project_panel = PanelManager::instance()->MostRecentlyFocused<ProjectPanel>(); if (active_project_panel) { return active_project_panel->GetSelectedFolder(); } else { return nullptr; } } Timecode::Display Core::GetTimecodeDisplay() const { return static_cast<Timecode::Display>(OLIVE_CONFIG("TimecodeDisplay").toInt()); } void Core::SetTimecodeDisplay(Timecode::Display d) { OLIVE_CONFIG("TimecodeDisplay") = d; emit TimecodeDisplayChanged(d); } void Core::SetAutorecoveryInterval(int minutes) { // Convert minutes to milliseconds autorecovery_timer_.setInterval(minutes * 60000); } void Core::CopyStringToClipboard(const QString &s) { QGuiApplication::clipboard()->setText(s); } QString Core::PasteStringFromClipboard() { return QGuiApplication::clipboard()->text(); } QString Core::GetProjectFilter(bool include_any_filter) { static const QVector< QPair<QString, QString> > FILTERS = { // Standard compressed Olive project {tr("Olive Project"), QStringLiteral("ove")}, // Uncompressed XML Olive project {tr("Olive Project (Uncompressed XML)"), QStringLiteral("ovexml")}, // OpenTimelineIO project, if available #ifdef USE_OTIO {tr("OpenTimelineIO"), QStringLiteral("otio")} #endif }; QStringList filters; filters.reserve(FILTERS.size() + 1); if (include_any_filter) { QStringList combined; for (auto it=FILTERS.cbegin(); it!=FILTERS.cend(); it++) { combined.append(QStringLiteral("*.%1").arg(it->second)); } filters.append(QStringLiteral("%1 (%2)").arg(tr("All Supported Projects"), combined.join(' '))); } for (auto it=FILTERS.cbegin(); it!=FILTERS.cend(); it++) { filters.append(QStringLiteral("%1 (*.%2)").arg(it->first, it->second)); } return filters.join(QStringLiteral(";;")); } QString Core::GetRecentProjectsFilePath() { return QDir(FileFunctions::GetConfigurationLocation()).filePath(QStringLiteral("recent")); } void Core::SetStartupLocale() { // Set language if (!core_params_.startup_language().isEmpty()) { if (translator_->load(core_params_.startup_language()) && QApplication::installTranslator(translator_)) { return; } else { qWarning() << "Failed to load translation file. Falling back to defaults."; } } QString use_locale = OLIVE_CONFIG("Language").toString(); if (use_locale.isEmpty()) { // No configured locale, auto-detect the system's locale use_locale = QLocale::system().name(); } if (!SetLanguage(use_locale)) { qWarning() << "Trying to use locale" << use_locale << "but couldn't find a translation for it"; } } bool Core::SaveProject() { if (open_project_->filename().isEmpty()) { return SaveProjectAs(); } else { SaveProjectInternal(); return true; } } void Core::ShowStatusBarMessage(const QString &s, int timeout) { main_window_->statusBar()->showMessage(s, timeout); } void Core::ClearStatusBarMessage() { main_window_->statusBar()->clearMessage(); } void Core::OpenRecoveryProject(const QString &filename) { OpenProjectInternal(filename, true); } void Core::OpenNodeInViewer(ViewerOutput *viewer) { main_window_->OpenNodeInViewer(viewer); } void Core::OpenExportDialogForViewer(ViewerOutput *viewer, bool start_still_image) { ExportDialog* ed = new ExportDialog(viewer, start_still_image, main_window_); connect(ed, &ExportDialog::finished, ed, &ExportDialog::deleteLater); ed->open(); connect(ed, &ExportDialog::RequestImportFile, this, &Core::ImportSingleFile); } void Core::CheckForAutoRecoveries() { QFile autorecovery_index(GetAutoRecoveryIndexFilename()); if (autorecovery_index.exists()) { // Uh-oh, we have auto-recoveries to prompt if (autorecovery_index.open(QFile::ReadOnly)) { QStringList recovery_filenames = QString::fromUtf8(autorecovery_index.readAll()).split('\n'); AutoRecoveryDialog ard(tr("The following projects had unsaved changes when Olive " "forcefully quit. Would you like to load them?"), recovery_filenames, true, main_window_); ard.exec(); autorecovery_index.close(); // Delete recovery index since we don't need it anymore QFile::remove(GetAutoRecoveryIndexFilename()); } else { QMessageBox::critical(main_window_, tr("Auto-Recovery Error"), tr("Found auto-recoveries but failed to load the auto-recovery index. " "Auto-recover projects will have to be opened manually.\n\n" "Your recoverable projects are still available at: %1").arg(FileFunctions::GetAutoRecoveryRoot())); } } } void Core::BrowseAutoRecoveries() { // List all auto-recovery entries AutoRecoveryDialog ard(tr("The following project versions have been auto-saved:"), QDir(FileFunctions::GetAutoRecoveryRoot()).entryList(QDir::Dirs | QDir::NoDotAndDotDot), false, main_window_); ard.exec(); } void Core::RequestPixelSamplingInViewers(bool e) { if (e) { if (pixel_sampling_users_ == 0) { // Signal to start pixel sampling emit ColorPickerEnabled(true); } pixel_sampling_users_++; } else { pixel_sampling_users_--; if (pixel_sampling_users_ == 0) { // Signal to end pixel sampling emit ColorPickerEnabled(false); } } } void Core::WarnCacheFull() { if (!shown_cache_full_warning_ && main_window_) { shown_cache_full_warning_ = true; QMessageBox::warning(main_window_, tr("Disk Cache Full"), tr("The disk cache is currently full and Olive is having to delete old " "frames to keep it within the limits set in the Disk preferences. This " "will result in SIGNIFICANTLY reduced cache performance.\n\n" "To remedy this, please do one of the following:\n\n" "1. Manually clear the disk cache in Disk preferences.\n" "2. Increase the maximum disk cache size in Disk preferences.\n" "3. Reduce usage of the disk cache (e.g. disable auto-cache or only cache specific sections of your sequence).")); } } bool Core::SaveProjectAs() { QFileDialog fd(main_window_, tr("Save Project As")); fd.setAcceptMode(QFileDialog::AcceptSave); fd.setNameFilter(GetProjectFilter(false)); if (fd.exec() == QDialog::Accepted) { QString fn = fd.selectedFiles().first(); // Somewhat hacky method of extracting the extension from the name filter const QString& name_filter = fd.selectedNameFilter(); int ext_index = name_filter.indexOf(QStringLiteral("(*.")) + 3; QString extension = name_filter.mid(ext_index, name_filter.size() - ext_index - 1); fn = FileFunctions::EnsureFilenameExtension(fn, extension); open_project_->set_filename(fn); SaveProjectInternal(); return true; } return false; } void Core::RevertProject() { RevertProjectInternal(false); } void Core::PushRecentlyOpenedProject(const QString& s) { if (s.isEmpty()) { return; } int existing_index = recent_projects_.indexOf(s); if (existing_index >= 0) { recent_projects_.move(existing_index, 0); } else { recent_projects_.prepend(s); const int kMaximumRecentProjects = 10; while (recent_projects_.size() > kMaximumRecentProjects) { recent_projects_.removeLast(); } } SaveRecentProjectsList(); emit OpenRecentListChanged(); } void Core::OpenProjectInternal(const QString &filename, bool recovery_project) { if (open_project_) { // Comparing QFileInfos will handle case insensitivity and both slash directions on platforms // where this is necessary (not naming any names *cough* Windows) if (QFileInfo(open_project_->filename()) == QFileInfo(filename)) { // This project is already open bool reverted = RevertProjectInternal(true); if (!reverted) { // Calling this will focus attention to the project that the user just tried to re-open AddOpenProject(open_project_); } // Don't do anything else return; } } Task* load_task; if (filename.endsWith(QStringLiteral(".otio"), Qt::CaseInsensitive)) { // Load OpenTimelineIO project #ifdef USE_OTIO load_task = new LoadOTIOTask(filename); #else QMessageBox::critical(main_window_, tr("Missing OpenTimelineIO Libraries"), tr("This build was compiled without OpenTimelineIO and therefore " "cannot open OpenTimelineIO files.")); return; #endif } else { // Fallback to regular OVE project load_task = new ProjectLoadTask(filename); } TaskDialog* task_dialog = new TaskDialog(load_task, tr("Load Project"), main_window()); if (recovery_project) { connect(task_dialog, &TaskDialog::TaskSucceeded, this, &Core::AddRecoveryProjectFromTask); } else { connect(task_dialog, &TaskDialog::TaskSucceeded, this, &Core::AddOpenProjectFromTaskAndAddToRecents); } task_dialog->open(); } void Core::ImportSingleFile(const QString &f) { if (Project *p = GetActiveProject()) { ImportFiles({f}, p->root()); } } int Core::CountFilesInFileList(const QFileInfoList &filenames) { int file_count = 0; foreach (const QFileInfo& f, filenames) { // For some reason QDir::NoDotAndDotDot doesn't work with entryInfoList, so we have to check manually if (f.fileName() == "." || f.fileName() == "..") { continue; } else if (f.isDir()) { QFileInfoList info_list = QDir(f.absoluteFilePath()).entryInfoList(); file_count += CountFilesInFileList(info_list); } else { file_count++; } } return file_count; } bool Core::LabelNodes(const QVector<Node *> &nodes, MultiUndoCommand *parent) { if (nodes.isEmpty()) { return false; } bool ok; QString start_label = nodes.first()->GetLabel(); for (int i=1; i<nodes.size(); i++) { if (nodes.at(i)->GetLabel() != start_label) { // Not all the nodes share the same name, so we'll start with a blank one start_label.clear(); break; } } QString s = QInputDialog::getText(main_window_, tr("Label Node"), tr("Set node label"), QLineEdit::Normal, start_label, &ok); if (ok) { NodeRenameCommand* rename_command = new NodeRenameCommand(); foreach (Node* n, nodes) { rename_command->AddNode(n, s); } if (parent) { parent->add_child(rename_command); } else { undo_stack_.push(rename_command, tr("Renamed %1 Node(s)").arg(nodes.size())); } return true; } return false; } Sequence *Core::CreateNewSequenceForProject(const QString &format, Project* project) { Sequence* new_sequence = new Sequence(); // Get default name for this sequence (in the format "Sequence N", the first that doesn't exist) int sequence_number = 1; QString sequence_name; do { sequence_name = format.arg(sequence_number); sequence_number++; } while (project->root()->ChildExistsWithName(sequence_name)); new_sequence->SetLabel(sequence_name); return new_sequence; } void Core::OpenProjectFromRecentList(int index) { const QString& open_fn = recent_projects_.at(index); if (QFileInfo::exists(open_fn)) { OpenProjectInternal(open_fn); } else if (QMessageBox::information(main_window(), tr("Cannot open recent project"), tr("The project \"%1\" doesn't exist. Would you like to remove this file from the recent list?").arg(open_fn), QMessageBox::Yes | QMessageBox::No) == QMessageBox::Yes) { recent_projects_.removeAt(index); SaveRecentProjectsList(); emit OpenRecentListChanged(); } } bool Core::CloseProject(bool auto_open_new, bool ignore_modified) { if (open_project_) { if (open_project_->is_modified() && !ignore_modified) { QMessageBox mb(main_window_); mb.setWindowModality(Qt::WindowModal); mb.setIcon(QMessageBox::Question); mb.setWindowTitle(tr("Unsaved Changes")); mb.setText(tr("The project '%1' has unsaved changes. Would you like to save them?") .arg(open_project_->name())); QPushButton* yes_btn = mb.addButton(tr("Save"), QMessageBox::YesRole); mb.addButton(tr("Don't Save"), QMessageBox::NoRole); QPushButton* cancel_btn = mb.addButton(QMessageBox::Cancel); mb.exec(); if (mb.clickedButton() == cancel_btn) { // Stop closing projects if the user clicked cancel return false; } if (mb.clickedButton() == yes_btn && !SaveProject()) { // The save failed, stop closing projects return false; } } // For safety, the undo stack is cleared so no commands try to affect a freed project undo_stack_.clear(); Project *tmp = open_project_; SetActiveProject(nullptr); delete tmp; } // Ensure a project is always active if (auto_open_new) { CreateNewProject(); } return true; } void Core::CacheActiveSequence(bool in_out_only) { TimeBasedPanel* p = PanelManager::instance()->MostRecentlyFocused<TimeBasedPanel>(); if (p && p->GetConnectedViewer()) { // Hacky but works for now // Find Viewer attached to this TimeBasedPanel QList<ViewerPanel*> all_viewers = PanelManager::instance()->GetPanelsOfType<ViewerPanel>(); ViewerPanel* found_panel = nullptr; foreach (ViewerPanel* viewer, all_viewers) { if (viewer->GetConnectedViewer() == p->GetConnectedViewer()) { found_panel = viewer; break; } } if (found_panel) { if (in_out_only) { found_panel->CacheSequenceInOut(); } else { found_panel->CacheEntireSequence(); } } else { QMessageBox::critical(main_window_, tr("Failed to cache sequence"), tr("No active viewer found with this sequence."), QMessageBox::Ok); } } } QString StripWindowsDriveLetter(QString s) { // HACK: On Windows, absolute paths are saved with a drive letter (e.g. "C:\video.mp4"). Below, // we use Qt's relative path system to resolve when an entire project may be in a different // folder, but the files are all in the same place relatively to the project. Unfortunately, // Qt chooses not to understand paths from Windows on non-Windows platforms, which causes // this to break when a project is moving from Windows to non-Windows. To resolve that, if // we're on a non-Windows platform and we detect a Windows path (i.e. a path with a drive // letter at the start), we strip it off. We also convert any back-slashes to forward-slashes // because on Windows they are interchangeable and on non-Windows they are not. #ifndef Q_OS_WINDOWS if (s.size() >= 2) { if (s.at(0).isLetter() && s.at(1) == ':') { s = s.mid(2); s.replace('\\', '/'); } } #endif return s; } bool Core::ValidateFootageInLoadedProject(Project* project, const QString& project_saved_url) { QVector<Footage*> footage_we_couldnt_validate; for (Node *n : project->nodes()) { if (Footage *footage = dynamic_cast<Footage*>(n)) { QString footage_fn = StripWindowsDriveLetter(footage->filename()); QString project_fn = StripWindowsDriveLetter(project_saved_url); if (!QFileInfo::exists(footage_fn) && !project_saved_url.isEmpty()) { // If the footage doesn't exist, it might have moved with the project const QString& project_current_url = project->filename(); if (project_current_url != project_fn) { // Project has definitely moved, try to resolve relative paths QDir saved_dir(QFileInfo(project_fn).dir()); QDir true_dir(QFileInfo(project_current_url).dir()); QString relative_filename = saved_dir.relativeFilePath(footage_fn); QString transformed_abs_filename = true_dir.filePath(relative_filename); if (QFileInfo::exists(transformed_abs_filename)) { // Use this file instead qInfo() << "Resolved" << footage_fn << "relatively to" << transformed_abs_filename; footage->set_filename(transformed_abs_filename); } } } if (QFileInfo::exists(footage->filename())) { // Assume valid footage->SetValid(); } else { footage_we_couldnt_validate.append(footage); } } } if (!footage_we_couldnt_validate.isEmpty()) { FootageRelinkDialog frd(footage_we_couldnt_validate, main_window_); if (frd.exec() == QDialog::Rejected) { return false; } } return true; } bool Core::SetLanguage(const QString &locale) { QApplication::removeTranslator(translator_); QString resource_path = QStringLiteral(":/ts/%1").arg(locale); if (translator_->load(resource_path) && QApplication::installTranslator(translator_)) { return true; } return false; } void Core::OpenProject() { QString file = QFileDialog::getOpenFileName(main_window_, tr("Open Project"), QString(), GetProjectFilter(true)); if (!file.isEmpty()) { OpenProjectInternal(file); } } Core::CoreParams::CoreParams() : mode_(kRunNormal), run_fullscreen_(false), crash_(false) { } }
49,304
C++
.cpp
1,317
32.106302
158
0.680702
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,801
timelineundoripple.cpp
olive-editor_olive/app/timeline/timelineundoripple.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "timelineundoripple.h" #include "timelineundocommon.h" namespace olive { // // TrackRippleRemoveAreaCommand // TrackRippleRemoveAreaCommand::TrackRippleRemoveAreaCommand(Track* track, const TimeRange& range) : track_(track), range_(range), allow_splitting_gaps_(false), splice_split_command_(nullptr) { trim_out_.block = nullptr; trim_in_.block = nullptr; } TrackRippleRemoveAreaCommand::~TrackRippleRemoveAreaCommand() { delete splice_split_command_; qDeleteAll(remove_block_commands_); } void TrackRippleRemoveAreaCommand::prepare() { // Determine precisely what will be happening to these tracks Block* first_block = track_->NearestBlockBeforeOrAt(range_.in()); if (!first_block) { // No blocks at this time, nothing to be done on this track return; } // Determine if this first block is getting trimmed or removed bool first_block_is_out_trimmed = first_block->in() < range_.in(); bool first_block_is_in_trimmed = first_block->out() > range_.out(); // Set's the block that any insert command should insert AFTER. If the first block is not // getting out-trimmed, that means first block is either getting removed or in-trimmed, which // means any insert should happen before it insert_previous_ = first_block_is_out_trimmed ? first_block : first_block->previous(); // If it's getting trimmed, determine if it's actually getting spliced if (first_block_is_out_trimmed && first_block_is_in_trimmed) { if (!allow_splitting_gaps_ && dynamic_cast<GapBlock*>(first_block)) { // As a rule, we don't split gaps, so we just treat it as a trim of the range requested trim_out_ = {first_block, first_block->length(), first_block->length() - range_.length()}; } else { // This block is getting spliced, so we'll handle that later splice_split_command_ = new BlockSplitCommand(first_block, range_.in()); } } else { // It's just getting trimmed or removed, so we'll append that operation if (first_block_is_out_trimmed) { trim_out_ = {first_block, first_block->length(), first_block->length() - (first_block->out() - range_.in())}; } else if (first_block_is_in_trimmed) { // Block is getting in trimmed trim_in_ = {first_block, first_block->length(), first_block->length() - (range_.out() - first_block->in())}; } else { // We know for sure this block is within the range so it will be removed removals_.append(RemoveOperation({first_block, first_block->previous()})); } // If the first block is getting in trimmed, we're already at the end of our range if (!first_block_is_in_trimmed) { // Loop through the rest of the blocks and determine what to do with those for (Block* next=first_block->next(); next; next=next->next()) { bool trimming = (next->out() > range_.out()); if (trimming) { trim_in_ = {next, next->length(), next->length() - (range_.out() - next->in())}; break; } else { removals_.append(RemoveOperation({next, next->previous()})); if (next->out() == range_.out()) { break; } } } } } } void TrackRippleRemoveAreaCommand::redo() { if (splice_split_command_) { // We're just splicing splice_split_command_->redo_now(); // Trim the in of the split Block* split = splice_split_command_->new_block(); split->set_length_and_media_in(split->length() - (range_.out() - split->in())); } else { if (trim_out_.block) { trim_out_.block->set_length_and_media_out(trim_out_.new_length); } if (trim_in_.block) { trim_in_.block->set_length_and_media_in(trim_in_.new_length); } // Perform removals if (!removals_.isEmpty()) { foreach (auto op, removals_) { // Ripple remove them all first track_->RippleRemoveBlock(op.block); } // Create undo commands for node removals where possible if (remove_block_commands_.isEmpty()) { foreach (auto op, removals_) { if (NodeCanBeRemoved(op.block)) { remove_block_commands_.append(CreateRemoveCommand(op.block)); } } } foreach (UndoCommand* c, remove_block_commands_) { c->redo_now(); } } } } void TrackRippleRemoveAreaCommand::undo() { if (splice_split_command_) { splice_split_command_->undo_now(); } else { if (trim_out_.block) { trim_out_.block->set_length_and_media_out(trim_out_.old_length); } if (trim_in_.block) { trim_in_.block->set_length_and_media_in(trim_in_.old_length); } // Un-remove any blocks for (int i=remove_block_commands_.size()-1; i>=0; i--) { remove_block_commands_.at(i)->undo_now(); } foreach (auto op, removals_) { track_->InsertBlockAfter(op.block, op.before); } } } // // TrackListRippleRemoveAreaCommand // void TrackListRippleRemoveAreaCommand::prepare() { foreach (Track* track, list_->GetTracks()) { if (track->IsLocked()) { continue; } TrackRippleRemoveAreaCommand* c = new TrackRippleRemoveAreaCommand(track, range_); commands_.append(c); working_tracks_.append(track); } } void TrackListRippleRemoveAreaCommand::redo() { foreach (TrackRippleRemoveAreaCommand* c, commands_) { c->redo_now(); } } void TrackListRippleRemoveAreaCommand::undo() { foreach (TrackRippleRemoveAreaCommand* c, commands_) { c->undo_now(); } } // // TimelineRippleRemoveAreaCommand // TimelineRippleRemoveAreaCommand::TimelineRippleRemoveAreaCommand(Sequence* timeline, rational in, rational out) : timeline_(timeline) { for (int i=0; i<Track::kCount; i++) { add_child(new TrackListRippleRemoveAreaCommand(timeline->track_list(static_cast<Track::Type>(i)), in, out)); } } // // TrackListRippleToolCommand // TrackListRippleToolCommand::TrackListRippleToolCommand(TrackList* track_list, const QHash<Track*, RippleInfo>& info, const rational& ripple_movement, const Timeline::MovementMode& movement_mode) : track_list_(track_list), info_(info), ripple_movement_(ripple_movement), movement_mode_(movement_mode) { } void TrackListRippleToolCommand::ripple(bool redo) { if (info_.isEmpty()) { return; } // The following variables are used to determine how much of the cache to invalidate // If we can shift, we will shift from the latest out before the ripple to the latest out after, // since those sections will be unchanged by this ripple rational pre_latest_out = RATIONAL_MIN; rational post_latest_out = RATIONAL_MIN; // Make timeline changes for (auto it=info_.cbegin(); it!=info_.cend(); it++) { Track* track = it.key(); const RippleInfo& info = it.value(); WorkingData working_data = working_data_.value(track); Block* b = info.block; // Generate block length rational new_block_length; rational operation_movement = ripple_movement_; if (movement_mode_ == Timeline::kTrimIn) { operation_movement = -operation_movement; } if (!redo) { operation_movement = -operation_movement; } if (b) { new_block_length = b->length() + operation_movement; } rational pre_shift; rational post_shift; if (info.append_gap) { // Rather than rippling the referenced block, we'll insert a gap and ripple with that GapBlock* gap = working_data.created_gap; if (redo) { if (!gap) { gap = new GapBlock(); gap->set_length_and_media_out(qAbs(ripple_movement_)); working_data.created_gap = gap; } gap->setParent(track->parent()); track->InsertBlockBefore(gap, b); // As an insertion, we will shift from the gap's in to the gap's out pre_shift = gap->in(); post_shift = gap->out(); working_data.earliest_point_of_change = gap->in(); } else { // As a removal, we will shift from the gap's out to the gap's in pre_shift = gap->out(); post_shift = gap->in(); track->RippleRemoveBlock(gap); gap->setParent(&memory_manager_); } } else if ((redo && new_block_length.isNull()) || (!redo && !b->track())) { // The ripple is the length of this block. We assume that for this to happen, it must have // been a gap that we will now remove. if (redo) { // The earliest point changes will happen is at the start of this block working_data.earliest_point_of_change = b->in(); // As a removal, we will be shifting from the out point to the in point pre_shift = b->out(); post_shift = b->in(); // Remove gap from track and from graph working_data.removed_gap_after = b->previous(); track->RippleRemoveBlock(b); b->setParent(&memory_manager_); } else { // Restore gap to graph and track b->setParent(track->parent()); track->InsertBlockAfter(b, working_data.removed_gap_after); // The earliest point changes will happen is at the start of this block working_data.earliest_point_of_change = b->in(); // As an insert, we will be shifting from the block's in point to its out point pre_shift = b->in(); post_shift = b->out(); } } else { // Store old length working_data.old_length = b->length(); if (movement_mode_ == Timeline::kTrimIn) { // The earliest point changes will occur is in point of this bloc working_data.earliest_point_of_change = b->in(); // Undo the trim in inversion we do above, this will still be inverted accurately for // undoing where appropriate rational inverted = -operation_movement; if (inverted > 0) { pre_shift = b->in() + inverted; post_shift = b->in(); } else { pre_shift = b->in(); post_shift = b->in() - inverted; } // Update length b->set_length_and_media_in(new_block_length); } else { // The earliest point changes will occur is the out point if trimming out or the in point // if trimming in working_data.earliest_point_of_change = b->out(); // The latest out before the ripple is this block's current out point pre_shift = b->out(); // Update length b->set_length_and_media_out(new_block_length); // The latest out after the ripple is this block's out point after the length change post_shift = b->out(); } } working_data_.insert(it.key(), working_data); pre_latest_out = qMax(pre_latest_out, pre_shift); post_latest_out = qMax(post_latest_out, post_shift); } } // // TimelineRippleDeleteGapsAtRegionsCommand // void TimelineRippleDeleteGapsAtRegionsCommand::prepare() { int max_gaps = 0; QHash<Track*, QVector<RemovalRequest> > requested_gaps; // Convert regions to gaps for (const QPair<Track*, TimeRange> &region : qAsConst(regions_)) { Track *track = region.first; const TimeRange &range = region.second; GapBlock *gap = dynamic_cast<GapBlock*>(track->NearestBlockBeforeOrAt(range.in())); if (gap) { QVector<RemovalRequest> &gaps_on_track = requested_gaps[track]; RemovalRequest this_req = {gap, range}; // Insertion sort bool inserted = false; for (int i=0; i<gaps_on_track.size(); i++) { if (gaps_on_track.at(i).range.in() < range.in()) { gaps_on_track.insert(i, this_req); inserted = true; break; } } if (!inserted) { gaps_on_track.append(this_req); } max_gaps = qMax(max_gaps, gaps_on_track.size()); } else { qWarning() << "Failed to find corresponding gap to region"; } } // For each gap on each track, find a corresponding gap on every other track (which may include // a requested gap) to ripple in order to keep everything synchronized QHash<GapBlock*, rational> gap_lengths; for (int gap_index=0; gap_index<max_gaps; gap_index++) { rational earliest_point = RATIONAL_MAX; rational ripple_length = RATIONAL_MAX; rational latest_point = RATIONAL_MIN; foreach (const QVector<RemovalRequest> &gaps_on_track, requested_gaps) { if (gap_index < gaps_on_track.size()) { const RemovalRequest &gap = gaps_on_track.at(gap_index); earliest_point = qMin(earliest_point, gap.range.in()); ripple_length = qMin(ripple_length, gap.range.length()); latest_point = qMax(latest_point, gap.range.out()); } } // Determine which gaps will be involved in this operation QVector<GapBlock*> gaps; foreach (Track* track, timeline_->GetTracks()) { if (track->IsLocked()) { continue; } const QVector<RemovalRequest> &requested_gaps_on_track = requested_gaps.value(track); GapBlock *gap = nullptr; if (gap_index < requested_gaps_on_track.size()) { // A requested gap was at this index, use it gap = requested_gaps_on_track.at(gap_index).gap; } else { // No requested gap was at this index, find one Block *block = track->NearestBlockAfterOrAt(earliest_point); if (block) { // Found a block, test if it's a gap gap = dynamic_cast<GapBlock*>(block); if (!gap) { if (block->in() == earliest_point) { if (block->next()) { gap = dynamic_cast<GapBlock*>(block->next()); if (!gap) { ripple_length = 0; } } } else { gap = dynamic_cast<GapBlock*>(block->previous()); if (!gap) { ripple_length = 0; } } } } else { // Assume track finishes here and track won't be affected by this operation } } if (gap) { gaps.append(gap); if (!gap_lengths.contains(gap)) { gap_lengths.insert(gap, gap->length()); } ripple_length = qMin(ripple_length, gap_lengths.value(gap)); } if (ripple_length == 0) { break; } } if (ripple_length > 0) { foreach (GapBlock *gap, gaps) { if (gap_lengths.value(gap) == ripple_length) { commands_.append(new TrackRippleRemoveBlockCommand(gap->track(), gap)); } else { gap_lengths[gap] -= ripple_length; commands_.append(new BlockResizeCommand(gap, gap_lengths.value(gap))); } } } } } void TimelineRippleDeleteGapsAtRegionsCommand::redo() { for (auto it=commands_.cbegin(); it!=commands_.cend(); it++) { (*it)->redo_now(); } } void TimelineRippleDeleteGapsAtRegionsCommand::undo() { for (auto it=commands_.crbegin(); it!=commands_.crend(); it++) { (*it)->undo_now(); } } }
15,939
C++
.cpp
430
30.351163
113
0.627578
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,802
timelineworkarea.cpp
olive-editor_olive/app/timeline/timelineworkarea.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "timelineworkarea.h" #include "common/xmlutils.h" namespace olive { const rational TimelineWorkArea::kResetIn = 0; const rational TimelineWorkArea::kResetOut = RATIONAL_MAX; TimelineWorkArea::TimelineWorkArea(QObject *parent) : QObject(parent), workarea_enabled_(false) { } bool TimelineWorkArea::enabled() const { return workarea_enabled_; } void TimelineWorkArea::set_enabled(bool e) { workarea_enabled_ = e; emit EnabledChanged(workarea_enabled_); } const TimeRange &TimelineWorkArea::range() const { return workarea_range_; } void TimelineWorkArea::set_range(const TimeRange &range) { workarea_range_ = range; emit RangeChanged(workarea_range_); } bool TimelineWorkArea::load(QXmlStreamReader *reader) { rational range_in = this->in(); rational range_out = this->out(); uint version = 0; XMLAttributeLoop(reader, attr) { if (attr.name() == QStringLiteral("version")) { version = attr.value().toUInt(); } } Q_UNUSED(version) while (XMLReadNextStartElement(reader)) { if (reader->name() == QStringLiteral("enabled")) { this->set_enabled(reader->readElementText() != QStringLiteral("0")); } else if (reader->name() == QStringLiteral("in")) { range_in = rational::fromString(reader->readElementText().toStdString()); } else if (reader->name() == QStringLiteral("out")) { range_out = rational::fromString(reader->readElementText().toStdString()); } else { reader->skipCurrentElement(); } } TimeRange loaded_workarea(range_in, range_out); if (loaded_workarea != this->range()) { this->set_range(loaded_workarea); } return true; } void TimelineWorkArea::save(QXmlStreamWriter *writer) const { writer->writeAttribute(QStringLiteral("version"), QString::number(1)); writer->writeTextElement(QStringLiteral("enabled"), QString::number(this->enabled())); writer->writeTextElement(QStringLiteral("in"), QString::fromStdString(this->in().toString())); writer->writeTextElement(QStringLiteral("out"), QString::fromStdString(this->out().toString())); } const rational &TimelineWorkArea::in() const { return workarea_range_.in(); } const rational &TimelineWorkArea::out() const { return workarea_range_.out(); } const rational &TimelineWorkArea::length() const { return workarea_range_.length(); } }
3,045
C++
.cpp
90
30.988889
98
0.740184
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,803
timelineundosplit.cpp
olive-editor_olive/app/timeline/timelineundosplit.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "timelineundosplit.h" #include "node/block/clip/clip.h" #include "node/block/transition/transition.h" #include "node/nodeundo.h" namespace olive { // // BlockSplitCommand // void BlockSplitCommand::prepare() { reconnect_tree_command_ = new MultiUndoCommand(); new_block_ = static_cast<Block*>(Node::CopyNodeInGraph(block_, reconnect_tree_command_)); } void BlockSplitCommand::redo() { old_length_ = block_->length(); Q_ASSERT(point_ > block_->in() && point_ < block_->out()); reconnect_tree_command_->redo_now(); // Determine our new lengths rational new_length = point_ - block_->in(); rational new_part_length = block_->out() - point_; // Begin an operation Track* track = block_->track(); // Set lengths block_->set_length_and_media_out(new_length); new_block()->set_length_and_media_in(new_part_length); // Insert new block track->InsertBlockAfter(new_block(), block_); if (ClipBlock *new_clip = dynamic_cast<ClipBlock*>(new_block_)) { ClipBlock *old_clip = static_cast<ClipBlock*>(block_); new_clip->AddCachePassthroughFrom(old_clip); } // If the block had an out transition, we move it to the new block moved_transition_ = NodeInput(); TransitionBlock* potential_transition = dynamic_cast<TransitionBlock*>(new_block()->next()); if (potential_transition) { for (const Node::OutputConnection& output : block_->output_connections()) { if (output.second.node() == potential_transition) { moved_transition_ = NodeInput(potential_transition, TransitionBlock::kOutBlockInput); Node::DisconnectEdge(block_, moved_transition_); Node::ConnectEdge(new_block(), moved_transition_); break; } } } } void BlockSplitCommand::undo() { Track* track = block_->track(); if (moved_transition_.IsValid()) { Node::DisconnectEdge(new_block(), moved_transition_); Node::ConnectEdge(block_, moved_transition_); } block_->set_length_and_media_out(old_length_); track->RippleRemoveBlock(new_block()); // If we ran a reconnect command, disconnect now reconnect_tree_command_->undo_now(); } // // BlockSplitPreservingLinksCommand // Block *BlockSplitPreservingLinksCommand::GetSplit(Block *original, int time_index) const { if (time_index >= 0 && time_index < times_.size()) { int original_index = blocks_.indexOf(original); if (original_index != -1) { return splits_.at(time_index).at(original_index); } } return nullptr; } void BlockSplitPreservingLinksCommand::prepare() { splits_.resize(times_.size()); for (int i=0;i<times_.size();i++) { const rational& time = times_.at(i); // FIXME: I realize this isn't going to work if the times aren't ordered. I'm lazy so rather // than writing in a sorting algorithm here, I'll just put an assert as a reminder // if this ever becomes an issue. Q_ASSERT(i == 0 || time > times_.at(i-1)); QVector<Block*> splits(blocks_.size()); for (int j=0;j<blocks_.size();j++) { Block* b = blocks_.at(j); if (b->in() < time && b->out() > time) { BlockSplitCommand* split_command = new BlockSplitCommand(b, time); split_command->redo_now(); splits.replace(j, split_command->new_block()); commands_.append(split_command); } else { splits.replace(j, nullptr); } } splits_.replace(i, splits); } // Now that we've determined all the splits, we can relink everything for (int i=0;i<blocks_.size();i++) { Block* a = blocks_.at(i); for (int j=0;j<blocks_.size();j++) { if (i == j) { continue; } Block* b = blocks_.at(j); if (Block::AreLinked(a, b)) { // These blocks are linked, ensure all the splits are linked too foreach (const QVector<Block*>& split_list, splits_) { NodeLinkCommand* blc = new NodeLinkCommand(split_list.at(i), split_list.at(j), true); blc->redo_now(); commands_.append(blc); } } } } } // // TrackSplitAtTimeCommand // void TrackSplitAtTimeCommand::prepare() { // Find Block that contains this time Block* b = track_->BlockContainingTime(point_); if (b) { command_ = new BlockSplitCommand(b, point_); } } }
4,975
C++
.cpp
139
31.611511
96
0.676814
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,804
timelineundopointer.cpp
olive-editor_olive/app/timeline/timelineundopointer.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "timelineundopointer.h" #include "node/block/gap/gap.h" #include "node/block/transition/transition.h" #include "node/project.h" #include "timelineundocommon.h" namespace olive { // // BlockTrimCommand // void BlockTrimCommand::redo() { if (doing_nothing_) { return; } // Determine how much time to invalidate TimeRange invalidate_range; if (mode_ == Timeline::kTrimIn) { invalidate_range = TimeRange(block_->in(), block_->in() + trim_diff_); block_->set_length_and_media_in(new_length_); } else { invalidate_range = TimeRange(block_->out(), block_->out() - trim_diff_); block_->set_length_and_media_out(new_length_); } if (needs_adjacent_) { if (we_created_adjacent_) { // Add adjacent and insert it adjacent_->setParent(track_->parent()); if (mode_ == Timeline::kTrimIn) { track_->InsertBlockBefore(adjacent_, block_); } else { track_->InsertBlockAfter(adjacent_, block_); } } else if (we_removed_adjacent_) { track_->RippleRemoveBlock(adjacent_); // It no longer inputs/outputs anything, remove it if (remove_block_from_graph_ && NodeCanBeRemoved(adjacent_)) { if (!deleted_adjacent_command_) { deleted_adjacent_command_ = CreateAndRunRemoveCommand(adjacent_); } else { deleted_adjacent_command_->redo_now(); } } } else { rational adjacent_length = adjacent_->length() + trim_diff_; if (mode_ == Timeline::kTrimIn) { adjacent_->set_length_and_media_out(adjacent_length); } else { adjacent_->set_length_and_media_in(adjacent_length); } } } if (dynamic_cast<TransitionBlock*>(block_)) { // Whole transition needs to be invalidated invalidate_range = block_->range(); } } void BlockTrimCommand::undo() { if (doing_nothing_) { return; } // Will be POSITIVE if trimming shorter and NEGATIVE if trimming longer if (needs_adjacent_) { if (we_created_adjacent_) { // Adjacent is ours, just delete it track_->RippleRemoveBlock(adjacent_); adjacent_->setParent(&memory_manager_); } else { if (we_removed_adjacent_) { if (deleted_adjacent_command_) { // We deleted adjacent, restore it now deleted_adjacent_command_->undo_now(); } if (mode_ == Timeline::kTrimIn) { track_->InsertBlockBefore(adjacent_, block_); } else { track_->InsertBlockAfter(adjacent_, block_); } } else { rational adjacent_length = adjacent_->length() - trim_diff_; if (mode_ == Timeline::kTrimIn) { adjacent_->set_length_and_media_out(adjacent_length); } else { adjacent_->set_length_and_media_in(adjacent_length); } } } } TimeRange invalidate_range; if (mode_ == Timeline::kTrimIn) { block_->set_length_and_media_in(old_length_); invalidate_range = TimeRange(block_->in(), block_->in() + trim_diff_); } else { block_->set_length_and_media_out(old_length_); invalidate_range = TimeRange(block_->out(), block_->out() - trim_diff_); } if (dynamic_cast<TransitionBlock*>(block_)) { // Whole transition needs to be invalidated invalidate_range = block_->range(); } } void BlockTrimCommand::prepare() { // Store old length old_length_ = block_->length(); // Determine if the length isn't changing, in which case we set a flag to do nothing if ((doing_nothing_ = (old_length_ == new_length_))) { return; } // Will be POSITIVE if trimming shorter and NEGATIVE if trimming longer trim_diff_ = old_length_ - new_length_; // Retrieve our adjacent block (or nullptr if none) if (mode_ == Timeline::kTrimIn) { adjacent_ = block_->previous(); } else { adjacent_ = block_->next(); } // Ignore when trimming the out with no adjacent, because the user must have trimmed the end // of the last block in the track, so we don't need to do anything elses needs_adjacent_ = (mode_ == Timeline::kTrimIn || adjacent_); if (needs_adjacent_) { // If we're trimming shorter, we need an adjacent, so check if we have a viable one. we_created_adjacent_ = (trim_diff_ > 0 && (!adjacent_ || (!dynamic_cast<GapBlock*>(adjacent_) && !trim_is_a_roll_edit_))); if (we_created_adjacent_) { // We shortened but don't have a viable adjacent to lengthen, so we create one adjacent_ = new GapBlock(); adjacent_->set_length_and_media_out(trim_diff_); } else { // Determine if we're removing the adjacent rational adjacent_length = adjacent_->length() + trim_diff_; we_removed_adjacent_ = adjacent_length.isNull(); } } } // // TrackSlideCommand // void TrackSlideCommand::redo() { // Make sure all movement blocks' old positions are invalidated TimeRange invalidate_range(blocks_.first()->in(), blocks_.last()->out()); // We will always have an in adjacent if there was a valid slide if (we_created_in_adjacent_) { // We created in adjacent, so all we have to do is insert it in_adjacent_->setParent(track_->parent()); track_->InsertBlockBefore(in_adjacent_, blocks_.first()); } else if (-movement_ == in_adjacent_->length()) { // Movement will remove in adjacent track_->RippleRemoveBlock(in_adjacent_); if (NodeCanBeRemoved(in_adjacent_)) { if (!in_adjacent_remove_command_) { in_adjacent_remove_command_ = CreateRemoveCommand(in_adjacent_); } in_adjacent_remove_command_->redo_now(); } we_removed_in_adjacent_ = true; } else { // Simply resize adjacent in_adjacent_->set_length_and_media_out(in_adjacent_->length() + movement_); } // We may not have an out adjacent if the slide was at the end of the track if (out_adjacent_) { if (we_created_out_adjacent_) { // We created out adjacent, so we just have to insert it out_adjacent_->setParent(track_->parent()); track_->InsertBlockAfter(out_adjacent_, blocks_.last()); } else if (movement_ == out_adjacent_->length()) { // Movement will remove out adjacent track_->RippleRemoveBlock(out_adjacent_); if (NodeCanBeRemoved(out_adjacent_)) { if (!out_adjacent_remove_command_) { out_adjacent_remove_command_ = CreateRemoveCommand(out_adjacent_); } out_adjacent_remove_command_->redo_now(); } we_removed_out_adjacent_ = true; } else { // Simply resize adjacent out_adjacent_->set_length_and_media_in(out_adjacent_->length() - movement_); } } // Make sure all movement blocks' new positions are invalidated invalidate_range.set_range(qMin(invalidate_range.in(), blocks_.first()->in()), qMax(invalidate_range.out(), blocks_.last()->out())); } void TrackSlideCommand::undo() { // Make sure all movement blocks' old positions are invalidated TimeRange invalidate_range(blocks_.first()->in(), blocks_.last()->out()); if (we_created_in_adjacent_) { // We created this, so we can remove it now track_->RippleRemoveBlock(in_adjacent_); in_adjacent_->setParent(&memory_manager_); } else if (we_removed_in_adjacent_) { if (in_adjacent_remove_command_) { // We removed this, so we can restore it now in_adjacent_remove_command_->undo_now(); } track_->InsertBlockBefore(in_adjacent_, blocks_.first()); } else { // Simply resize adjacent in_adjacent_->set_length_and_media_out(in_adjacent_->length() - movement_); } if (out_adjacent_) { if (we_created_out_adjacent_) { // We created this, so we can remove it now track_->RippleRemoveBlock(out_adjacent_); out_adjacent_->setParent(&memory_manager_); } else if (we_removed_out_adjacent_) { if (out_adjacent_remove_command_) { out_adjacent_remove_command_->undo_now(); } track_->InsertBlockAfter(out_adjacent_, blocks_.last()); } else { out_adjacent_->set_length_and_media_in(out_adjacent_->length() + movement_); } } // Make sure all movement blocks' new positions are invalidated invalidate_range.set_range(qMin(invalidate_range.in(), blocks_.first()->in()), qMax(invalidate_range.out(), blocks_.last()->out())); } void TrackSlideCommand::prepare() { if (!in_adjacent_) { in_adjacent_ = new GapBlock(); in_adjacent_->set_length_and_media_out(movement_); in_adjacent_->setParent(&memory_manager_); we_created_in_adjacent_ = true; } else { we_created_in_adjacent_ = false; } if (!out_adjacent_ && blocks_.last()->next()) { out_adjacent_ = new GapBlock(); out_adjacent_->set_length_and_media_out(-movement_); out_adjacent_->setParent(&memory_manager_); we_created_out_adjacent_ = true; } else { we_created_out_adjacent_ = false; } } // // TrackPlaceBlockCommand // TrackPlaceBlockCommand::~TrackPlaceBlockCommand() { delete ripple_remove_command_; qDeleteAll(add_track_commands_); } void TrackPlaceBlockCommand::redo() { // Determine if we need to add tracks if (track_index_ >= timeline_->GetTracks().size()) { if (add_track_commands_.isEmpty()) { // First redo, create tracks now add_track_commands_.resize(track_index_ - timeline_->GetTracks().size() + 1); for (int i=0; i<add_track_commands_.size(); i++) { add_track_commands_[i] = new TimelineAddTrackCommand(timeline_); } } for (int i=0; i<add_track_commands_.size(); i++) { add_track_commands_.at(i)->redo_now(); } } Track* track = timeline_->GetTrackAt(track_index_); bool append = (in_ >= track->track_length()); // Check if the placement location is past the end of the timeline if (append) { if (in_ > track->track_length()) { // If so, insert a gap here if (!gap_) { gap_ = new GapBlock(); gap_->set_length_and_media_out(in_ - track->track_length()); } gap_->setParent(track->parent()); track->AppendBlock(gap_); } track->AppendBlock(insert_); } else { // Place the Block at this point if (!ripple_remove_command_) { ripple_remove_command_ = new TrackRippleRemoveAreaCommand(track, TimeRange(in_, in_ + insert_->length())); ripple_remove_command_->SetAllowSplittingGaps(true); } ripple_remove_command_->redo_now(); track->InsertBlockAfter(insert_, ripple_remove_command_->GetInsertionIndex()); } } void TrackPlaceBlockCommand::undo() { Track* t = timeline_->GetTrackAt(track_index_); TimeRange insert_range(insert_->in(), insert_->out()); // Firstly, remove our insert t->RippleRemoveBlock(insert_); if (ripple_remove_command_) { // If we ripple removed, just undo that ripple_remove_command_->undo_now(); } else if (gap_) { t->RippleRemoveBlock(gap_); gap_->setParent(&memory_manager_); } // Remove tracks if we added them for (int i=add_track_commands_.size()-1; i>=0; i--) { add_track_commands_.at(i)->undo_now(); } } }
11,764
C++
.cpp
320
31.74375
126
0.654684
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,805
timelinecoordinate.cpp
olive-editor_olive/app/timeline/timelinecoordinate.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "timelinecoordinate.h" namespace olive { TimelineCoordinate::TimelineCoordinate() : track_(Track::kNone, 0) { } TimelineCoordinate::TimelineCoordinate(const rational &frame, const Track::Reference &track) : frame_(frame), track_(track) { } TimelineCoordinate::TimelineCoordinate(const rational &frame, const Track::Type &track_type, const int &track_index) : frame_(frame), track_(track_type, track_index) { } const rational &TimelineCoordinate::GetFrame() const { return frame_; } const Track::Reference &TimelineCoordinate::GetTrack() const { return track_; } void TimelineCoordinate::SetFrame(const rational &frame) { frame_ = frame; } void TimelineCoordinate::SetTrack(const Track::Reference &track) { track_ = track; } }
1,474
C++
.cpp
47
29.148936
118
0.772663
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,806
timelineundotrack.cpp
olive-editor_olive/app/timeline/timelineundotrack.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "timelineundotrack.h" namespace olive { }
765
C++
.cpp
17
42.117647
71
0.774324
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,807
timelineundogeneral.cpp
olive-editor_olive/app/timeline/timelineundogeneral.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "timelineundogeneral.h" #include "node/block/clip/clip.h" #include "node/block/transition/transition.h" #include "node/factory.h" #include "node/math/math/math.h" #include "node/math/merge/merge.h" #include "timelineundocommon.h" #include "timelineundotrack.h" namespace olive { // // BlockResizeCommand // void BlockResizeCommand::redo() { old_length_ = block_->length(); block_->set_length_and_media_out(new_length_); } void BlockResizeCommand::undo() { block_->set_length_and_media_out(old_length_); } // // BlockResizeWithMediaInCommand // void BlockResizeWithMediaInCommand::redo() { old_length_ = block_->length(); block_->set_length_and_media_in(new_length_); } void BlockResizeWithMediaInCommand::undo() { block_->set_length_and_media_in(old_length_); } // // BlockSetMediaInCommand // void BlockSetMediaInCommand::redo() { old_media_in_ = block_->media_in(); block_->set_media_in(new_media_in_); } void BlockSetMediaInCommand::undo() { block_->set_media_in(old_media_in_); } // // TimelineAddTrackCommand // TimelineAddTrackCommand::TimelineAddTrackCommand(TrackList *timeline, bool automerge_tracks) : timeline_(timeline), merge_(nullptr), position_command_(nullptr) { // Create new track track_ = new Track(); track_->setParent(&memory_manager_); // Determine what input to connect it to QString relevant_input; if (timeline_->type() == Track::kVideo) { relevant_input = Sequence::kTextureInput; } else if (timeline_->type() == Track::kAudio) { relevant_input = Sequence::kSamplesInput; } // If we have an input to connect to, set it as our `direct` connection if (!relevant_input.isEmpty()) { direct_ = NodeInput(timeline_->parent(), relevant_input); // If we're automerging and something is already connected, determine if/how to merge it if (automerge_tracks && direct_.IsConnected()) { if (timeline_->type() == Track::kVideo) { // Use merge for video merge_ = new MergeNode(); base_ = NodeInput(merge_, MergeNode::kBaseIn); blend_ = NodeInput(merge_, MergeNode::kBlendIn); } else if (timeline_->type() == Track::kAudio) { // Use math (add) for audio merge_ = new MathNode(); base_ = NodeInput(merge_, MathNode::kParamAIn); blend_ = NodeInput(merge_, MathNode::kParamBIn); } if (merge_) { // If we got created a merge node, ensure it's parented merge_->setParent(&memory_manager_); } } } } void TimelineAddTrackCommand::redo() { // Get sequence Sequence* sequence = timeline_->parent(); // Add track to sequence track_->setParent(timeline_->GetParentGraph()); if (timeline_->GetTrackCount() > 0) { track_->SetTrackHeight(timeline_->GetTrackAt(timeline_->GetTrackCount()-1)->GetTrackHeight()); } timeline_->ArrayAppend(); Node::ConnectEdge(track_, timeline_->track_input(timeline_->ArraySize() - 1)); qreal position_factor = 0.5; if (timeline_->type() == Track::kVideo) { position_factor = -position_factor; } bool create_pos_command = (!position_command_ && (timeline_->type() == Track::kVideo || timeline_->type() == Track::kAudio)); if (create_pos_command) { position_command_ = new MultiUndoCommand(); } // Add merge if applicable if (merge_) { // Determine what was previously connected Node *previous_connection = direct_.GetConnectedOutput(); // Add merge to graph merge_->setParent(timeline_->GetParentGraph()); // Connect merge between what used to be here Node::DisconnectEdge(previous_connection, direct_); Node::ConnectEdge(merge_, direct_); Node::ConnectEdge(previous_connection, base_); Node::ConnectEdge(track_, blend_); if (create_pos_command) { position_command_->add_child(new NodeSetPositionCommand(track_, sequence, sequence->GetNodePositionInContext(sequence) + QPointF(-1, -position_factor))); position_command_->add_child(new NodeSetPositionCommand(merge_, sequence, sequence->GetNodePositionInContext(sequence))); position_command_->add_child(new NodeSetPositionAndDependenciesRecursivelyCommand(merge_, sequence, sequence->GetNodePositionInContext(sequence) + QPointF(-1, position_factor * timeline_->GetTrackCount()))); } } else if (direct_.IsValid() && !direct_.IsConnected()) { // If no merge, we have a direct connection, and nothing else is connected, connect this Node::ConnectEdge(track_, direct_); if (create_pos_command) { // Just position directly next to the context node position_command_->add_child(new NodeSetPositionCommand(track_, sequence, sequence->GetNodePositionInContext(sequence) + QPointF(-1, position_factor))); } } // Run position command if we created one if (position_command_) { position_command_->redo_now(); } } void TimelineAddTrackCommand::undo() { if (position_command_) { position_command_->undo_now(); } // Remove merge if applicable if (merge_) { Node *previous_connection = base_.GetConnectedOutput(); Node::DisconnectEdge(track_, blend_); Node::DisconnectEdge(previous_connection, base_); Node::DisconnectEdge(merge_, direct_); Node::ConnectEdge(previous_connection, direct_); merge_->setParent(&memory_manager_); } else if (direct_.IsValid() && direct_.GetConnectedOutput() == track_) { Node::DisconnectEdge(track_, direct_); } // Remove track Node::DisconnectEdge(track_, timeline_->track_input(timeline_->ArraySize() - 1)); timeline_->ArrayRemoveLast(); track_->setParent(&memory_manager_); } // // TransitionRemoveCommand // void TransitionRemoveCommand::redo() { track_ = block_->track(); out_block_ = block_->connected_out_block(); in_block_ = block_->connected_in_block(); Q_ASSERT(out_block_ || in_block_); TimeRange invalidate_range(block_->in(), block_->out()); if (in_block_) { in_block_->set_length_and_media_in(in_block_->length() + block_->in_offset()); } if (out_block_) { out_block_->set_length_and_media_out(out_block_->length() + block_->out_offset()); } if (in_block_) { Node::DisconnectEdge(in_block_, NodeInput(block_, TransitionBlock::kInBlockInput)); } if (out_block_) { Node::DisconnectEdge(out_block_, NodeInput(block_, TransitionBlock::kOutBlockInput)); } track_->RippleRemoveBlock(block_); if (remove_from_graph_) { if (!remove_command_) { remove_command_ = CreateRemoveCommand(block_); } remove_command_->redo_now(); } } void TransitionRemoveCommand::undo() { if (remove_from_graph_) { remove_command_->undo_now(); } if (in_block_) { track_->InsertBlockBefore(block_, in_block_); } else { track_->InsertBlockAfter(block_, out_block_); } if (in_block_) { Node::ConnectEdge(in_block_, NodeInput(block_, TransitionBlock::kInBlockInput)); } if (out_block_) { Node::ConnectEdge(out_block_, NodeInput(block_, TransitionBlock::kOutBlockInput)); } // These if statements must be separated because in_offset and out_offset report different things // if only one block is connected vs two. So we have to connect the blocks first before we have // an accurate return value from these offset functions. if (in_block_) { in_block_->set_length_and_media_in(in_block_->length() - block_->in_offset()); } if (out_block_) { out_block_->set_length_and_media_out(out_block_->length() - block_->out_offset()); } } // // TrackListInsertGaps // void TrackListInsertGaps::prepare() { // Determine if all tracks will be affected, which will allow us to make some optimizations foreach (Track* track, track_list_->GetTracks()) { if (track->IsLocked()) { continue; } working_tracks_.append(track); } QVector<Block*> blocks_to_split; QVector<Block*> blocks_to_append_gap_to; QVector<Track*> tracks_to_append_gap_to; for (Track* track : qAsConst(working_tracks_)) { for (Block* b : track->Blocks()) { if (dynamic_cast<GapBlock*>(b) && b->in() <= point_ && b->out() >= point_) { // Found a gap at the location gaps_to_extend_.append(b); break; } else if (dynamic_cast<ClipBlock*>(b) && b->out() >= point_) { bool append_gap = true; if (b->in() == point_) { // The only reason we should be here is if this block is at the start of the track, // in which case no split needs to occur b = nullptr; } else if (b->out() > point_) { // Block must be split as well as having a gap appended to it blocks_to_split.append(b); } else if (!b->next()) { // At the end of a track, no gap needs to be added at all append_gap = false; } if (append_gap) { tracks_to_append_gap_to.append(track); blocks_to_append_gap_to.append(b); } break; } } } if (!blocks_to_split.isEmpty()) { split_command_ = new BlockSplitPreservingLinksCommand(blocks_to_split, {point_}); } for (int i=0; i<blocks_to_append_gap_to.size(); i++) { GapBlock* gap = new GapBlock(); gap->set_length_and_media_out(length_); gap->setParent(&memory_manager_); gaps_added_.append({gap, blocks_to_append_gap_to.at(i), tracks_to_append_gap_to.at(i)}); } } void TrackListInsertGaps::redo() { foreach (Block* gap, gaps_to_extend_) { gap->set_length_and_media_out(gap->length() + length_); } if (split_command_) { split_command_->redo_now(); } foreach (auto add_gap, gaps_added_) { add_gap.gap->setParent(add_gap.track->parent()); add_gap.track->InsertBlockAfter(add_gap.gap, add_gap.before); } } void TrackListInsertGaps::undo() { // Remove added gaps foreach (auto add_gap, gaps_added_) { add_gap.gap->track()->RippleRemoveBlock(add_gap.gap); add_gap.gap->setParent(&memory_manager_); } // Un-split blocks if (split_command_) { split_command_->undo_now(); } // Restore original length of gaps foreach (Block* gap, gaps_to_extend_) { gap->set_length_and_media_out(gap->length() - length_); } } // // TrackReplaceBlockWithGapCommand // void TrackReplaceBlockWithGapCommand::redo() { // Determine if this block is connected to any transitions that should also be removed by this operation if (handle_transitions_ && transition_remove_commands_.isEmpty()) { CreateRemoveTransitionCommandIfNecessary(false); CreateRemoveTransitionCommandIfNecessary(true); } for (auto it=transition_remove_commands_.cbegin(); it!=transition_remove_commands_.cend(); it++) { (*it)->redo_now(); } if (block_->next()) { // Invalidate the range inhabited by this block TimeRange invalidate_range(block_->in(), block_->out()); // Block has a next, which means it's NOT at the end of the sequence and thus requires a gap rational new_gap_length = block_->length(); Block* previous = block_->previous(); Block* next = block_->next(); bool previous_is_a_gap = dynamic_cast<GapBlock*>(previous); bool next_is_a_gap = dynamic_cast<GapBlock*>(next); if (previous_is_a_gap && next_is_a_gap) { // Clip is preceded and followed by a gap, so we'll merge the two existing_gap_ = static_cast<GapBlock*>(previous); existing_merged_gap_ = static_cast<GapBlock*>(next); new_gap_length += existing_merged_gap_->length(); track_->RippleRemoveBlock(existing_merged_gap_); existing_merged_gap_->setParent(&memory_manager_); } else if (previous_is_a_gap) { // Extend this gap to fill space left by block existing_gap_ = static_cast<GapBlock*>(previous); } else if (next_is_a_gap) { // Extend this gap to fill space left by block existing_gap_ = static_cast<GapBlock*>(next); } if (existing_gap_) { // Extend an existing gap new_gap_length += existing_gap_->length(); existing_gap_->set_length_and_media_out(new_gap_length); track_->RippleRemoveBlock(block_); existing_gap_precedes_ = (existing_gap_ == previous); } else { // No gap exists to fill this space, create a new one and swap it in if (!our_gap_) { our_gap_ = new GapBlock(); our_gap_->set_length_and_media_out(new_gap_length); } our_gap_->setParent(track_->parent()); track_->ReplaceBlock(block_, our_gap_); } } else { // Block is at the end of the track, simply remove it Block* preceding = block_->previous(); track_->RippleRemoveBlock(block_); // Determine if it's preceded by a gap, and remove that gap if so if (dynamic_cast<GapBlock*>(preceding)) { track_->RippleRemoveBlock(preceding); preceding->setParent(&memory_manager_); existing_merged_gap_ = static_cast<GapBlock*>(preceding); } } } void TrackReplaceBlockWithGapCommand::undo() { if (our_gap_ || existing_gap_) { if (our_gap_) { // We made this gap, simply swap our gap back track_->ReplaceBlock(our_gap_, block_); our_gap_->setParent(&memory_manager_); } else { // If we're here, assume that we extended an existing gap rational original_gap_length = existing_gap_->length() - block_->length(); // If we merged two gaps together, restore the second one now if (existing_merged_gap_) { original_gap_length -= existing_merged_gap_->length(); existing_merged_gap_->setParent(track_->parent()); track_->InsertBlockAfter(existing_merged_gap_, existing_gap_); existing_merged_gap_ = nullptr; } // Restore original block if (existing_gap_precedes_) { track_->InsertBlockAfter(block_, existing_gap_); } else { track_->InsertBlockBefore(block_, existing_gap_); } // Restore gap's original length existing_gap_->set_length_and_media_out(original_gap_length); existing_gap_ = nullptr; } } else { // Our gap and existing gap were both null, our block must have been at the end and thus // required no gap extension/replacement // However, we may have removed an unnecessary gap that preceded it if (existing_merged_gap_) { existing_merged_gap_->setParent(track_->parent()); track_->AppendBlock(existing_merged_gap_); existing_merged_gap_ = nullptr; } // Restore block track_->AppendBlock(block_); } for (auto it=transition_remove_commands_.crbegin(); it!=transition_remove_commands_.crend(); it++) { (*it)->undo_now(); } } void TrackReplaceBlockWithGapCommand::CreateRemoveTransitionCommandIfNecessary(bool next) { Block* relevant_block; if (next) { relevant_block = block_->next(); } else { relevant_block = block_->previous(); } TransitionBlock* transition_cast_test = dynamic_cast<TransitionBlock*>(relevant_block); if (transition_cast_test) { if ((next && transition_cast_test->connected_out_block() == block_ && !transition_cast_test->connected_in_block()) || (!next && transition_cast_test->connected_in_block() == block_ && !transition_cast_test->connected_out_block())) { TransitionRemoveCommand* command = new TransitionRemoveCommand(transition_cast_test, true); transition_remove_commands_.append(command); } } } void TimelineRemoveTrackCommand::prepare() { list_ = track_->sequence()->track_list(track_->type()); index_ = list_->GetArrayIndexFromCacheIndex(track_->Index()); remove_command_ = new NodeRemoveWithExclusiveDependenciesAndDisconnect(track_); } void TimelineRemoveTrackCommand::redo() { remove_command_->redo_now(); list_->parent()->InputArrayRemove(list_->track_input(), index_); } void TimelineRemoveTrackCommand::undo() { list_->parent()->InputArrayInsert(list_->track_input(), index_); remove_command_->undo_now(); } void TimelineAddDefaultTransitionCommand::prepare() { for (auto it=clips_.cbegin(); it!=clips_.cend(); it++) { ClipBlock *c = *it; // Handle in transition if (clips_.contains(static_cast<ClipBlock*>(c->previous()))) { // Do nothing, assume this will be handled by a dual transition from that clip } else if (dynamic_cast<GapBlock*>(c->previous()) || !c->previous()) { // Create in transition AddTransition(c, kIn); } // Handle out transition if (clips_.contains(static_cast<ClipBlock*>(c->next()))) { AddTransition(c, kOutDual); } else if (dynamic_cast<GapBlock*>(c->next()) || !c->next()) { // Create out transition AddTransition(c, kOut); } } } void TimelineAddDefaultTransitionCommand::AddTransition(ClipBlock *c, CreateTransitionMode mode) { if (Track *t = c->track()) { Node *p = nullptr; if (t->type() == Track::kVideo) { p = NodeFactory::CreateFromID(OLIVE_CONFIG("DefaultVideoTransition").toString()); } else if (t->type() == Track::kAudio) { p = NodeFactory::CreateFromID(OLIVE_CONFIG("DefaultAudioTransition").toString()); } rational transition_length = OLIVE_CONFIG("DefaultTransitionLength").value<rational>(); // Resize original clip switch (mode) { case kIn: ValidateTransitionLength(c, transition_length); if (transition_length > 0) { AdjustClipLength(c, transition_length, false); } break; case kOut: ValidateTransitionLength(c, transition_length); if (transition_length > 0) { AdjustClipLength(c, transition_length, true); } break; case kOutDual: { rational half_length = transition_length / 2; ValidateTransitionLength(static_cast<ClipBlock*>(c->next()), half_length); ValidateTransitionLength(c, half_length); transition_length = half_length * 2; if (transition_length > 0) { AdjustClipLength(static_cast<ClipBlock*>(c->next()), half_length, false); AdjustClipLength(c, half_length, true); } break; } } if (transition_length > 0) { if (TransitionBlock *transition = dynamic_cast<TransitionBlock*>(p)) { transition->set_length_and_media_out(transition_length); // Add transition commands_.append(new NodeAddCommand(c->parent(), transition)); // Insert block Block *insert_after = (mode == kIn) ? c->previous() : c; commands_.append(new TrackInsertBlockAfterCommand(c->track(), transition, insert_after)); // Connect switch (mode) { case kIn: commands_.append(new NodeEdgeAddCommand(c, NodeInput(transition, TransitionBlock::kInBlockInput))); break; case kOutDual: commands_.append(new NodeEdgeAddCommand(c->next(), NodeInput(transition, TransitionBlock::kInBlockInput))); /* fall through */ case kOut: commands_.append(new NodeEdgeAddCommand(c, NodeInput(transition, TransitionBlock::kOutBlockInput))); break; } } } } } void TimelineAddDefaultTransitionCommand::AdjustClipLength(ClipBlock *c, const rational &transition_length, bool out) { rational cur_len = lengths_.value(c, c->length()); rational new_len = cur_len - transition_length; if (out) { commands_.append(new BlockResizeCommand(c, new_len)); } else { commands_.append(new BlockResizeWithMediaInCommand(c, new_len)); } lengths_.insert(c, new_len); } void TimelineAddDefaultTransitionCommand::ValidateTransitionLength(ClipBlock *c, rational &transition_length) { rational cur_len = lengths_.value(c, c->length()); rational half_cur_len = cur_len/2; if (transition_length >= half_cur_len) { transition_length = half_cur_len - timebase_; } } }
20,332
C++
.cpp
546
32.587912
213
0.675617
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,808
timelinemarker.cpp
olive-editor_olive/app/timeline/timelinemarker.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "timelinemarker.h" #include <QApplication> #include "common/qtutils.h" #include "common/xmlutils.h" #include "config/config.h" #include "core.h" #include "ui/colorcoding.h" namespace olive { TimelineMarker::TimelineMarker(QObject *parent) : color_(OLIVE_CONFIG("MarkerColor").toInt()) { setParent(parent); } TimelineMarker::TimelineMarker(int color, const TimeRange &time, const QString &name, QObject *parent) : time_(time), name_(name), color_(color) { setParent(parent); } void TimelineMarker::set_time(const TimeRange &time) { time_ = time; emit TimeChanged(time_); } void TimelineMarker::set_time(const rational &time) { set_time(TimeRange(time, time + time_.length())); } bool TimelineMarker::has_sibling_at_time(const rational &t) const { TimelineMarker *m = static_cast<TimelineMarkerList*>(parent())->GetMarkerAtTime(t); return m && m != this; } void TimelineMarker::set_name(const QString &name) { name_ = name; emit NameChanged(name_); } void TimelineMarker::set_color(int c) { color_ = c; emit ColorChanged(color_); } int TimelineMarker::GetMarkerHeight(const QFontMetrics &fm) { return fm.height(); } QRect TimelineMarker::Draw(QPainter *p, const QPoint &pt, int max_right, double scale, bool selected) { QFontMetrics fm = p->fontMetrics(); int marker_height = GetMarkerHeight(fm); int marker_width = QtUtils::QFontMetricsWidth(fm, QStringLiteral("H")); int half_width = marker_width / 2; QColor c = QtUtils::toQColor(ColorCoding::GetColor(color())); if (selected) { p->setPen(Qt::white); p->setBrush(c.lighter()); } else { p->setPen(Qt::black); p->setBrush(c); } int top = pt.y() - marker_height; QTextOption op(Qt::AlignLeft | Qt::AlignVCenter); op.setWrapMode(QTextOption::NoWrap); if (time_.out() != time_.in()) { QRect marker_rect(pt.x(), top, time_.length().toDouble() * scale, marker_height); p->drawRect(marker_rect); if (!name_.isEmpty()) { p->setPen(ColorCoding::GetUISelectorColor(ColorCoding::GetColor(color_))); p->drawText(marker_rect.adjusted(marker_width/4, 0, 0, 0), name_, op); } return marker_rect; } else { int half_marker_height = marker_height / 3; int left = pt.x() - half_width; int right = pt.x() + half_width; int center_y = pt.y() - half_marker_height; QPoint points[] = { pt, QPoint(left, center_y), QPoint(left, top), QPoint(right, top), QPoint(right, center_y), pt, }; p->setRenderHint(QPainter::Antialiasing); p->drawPolygon(points, 6); if (!name_.isEmpty() && max_right != -1) { QRect text_rect(right, top, max_right - right, marker_height); int padding = QtUtils::QFontMetricsWidth(p->fontMetrics(), QStringLiteral(" ")); text_rect.adjust(padding, 0, - padding - half_width, 0); p->setPen(qApp->palette().text().color()); p->drawText(text_rect, name_, op); } return QRect(left, top, marker_width, marker_height); } } bool TimelineMarker::load(QXmlStreamReader *reader) { rational in, out; XMLAttributeLoop(reader, attr) { if (attr.name() == QStringLiteral("name")) { this->set_name(attr.value().toString()); } else if (attr.name() == QStringLiteral("in")) { in = rational::fromString(attr.value().toString().toStdString()); } else if (attr.name() == QStringLiteral("out")) { out = rational::fromString(attr.value().toString().toStdString()); } else if (attr.name() == QStringLiteral("color")) { this->set_color(attr.value().toInt()); } } this->set_time(TimeRange(in, out)); // This element has no inner text, so just skip it reader->skipCurrentElement(); return true; } void TimelineMarker::save(QXmlStreamWriter *writer) const { writer->writeAttribute(QStringLiteral("name"), this->name()); writer->writeAttribute(QStringLiteral("in"), QString::fromStdString(this->time().in().toString())); writer->writeAttribute(QStringLiteral("out"), QString::fromStdString(this->time().out().toString())); writer->writeAttribute(QStringLiteral("color"), QString::number(this->color())); } bool TimelineMarkerList::load(QXmlStreamReader *reader) { while (XMLReadNextStartElement(reader)) { if (reader->name() == QStringLiteral("marker")) { TimelineMarker *marker = new TimelineMarker(this); if (!marker->load(reader)) { return false; } } else { reader->skipCurrentElement(); } } return true; } void TimelineMarkerList::save(QXmlStreamWriter *writer) const { for (auto it=this->cbegin(); it!=this->cend(); it++) { TimelineMarker* marker = *it; writer->writeStartElement(QStringLiteral("marker")); marker->save(writer); writer->writeEndElement(); // marker } } void TimelineMarkerList::childEvent(QChildEvent *e) { QObject::childEvent(e); if (TimelineMarker *marker = dynamic_cast<TimelineMarker *>(e->child())) { if (e->type() == QChildEvent::ChildAdded) { connect(marker, &TimelineMarker::TimeChanged, this, &TimelineMarkerList::HandleMarkerTimeChange); connect(marker, &TimelineMarker::TimeChanged, this, &TimelineMarkerList::HandleMarkerModification); connect(marker, &TimelineMarker::NameChanged, this, &TimelineMarkerList::HandleMarkerModification); connect(marker, &TimelineMarker::ColorChanged, this, &TimelineMarkerList::HandleMarkerModification); InsertIntoList(marker); emit MarkerAdded(marker); } else if (e->type() == QChildEvent::ChildRemoved) { RemoveFromList(marker); disconnect(marker, &TimelineMarker::TimeChanged, this, &TimelineMarkerList::HandleMarkerTimeChange); disconnect(marker, &TimelineMarker::TimeChanged, this, &TimelineMarkerList::HandleMarkerModification); disconnect(marker, &TimelineMarker::NameChanged, this, &TimelineMarkerList::HandleMarkerModification); disconnect(marker, &TimelineMarker::ColorChanged, this, &TimelineMarkerList::HandleMarkerModification); emit MarkerRemoved(marker); } } } void TimelineMarkerList::InsertIntoList(TimelineMarker *marker) { // Insertion sort by time to allow some loop optimizations bool found = false; for (auto it=markers_.begin(); it!=markers_.end(); it++) { TimelineMarker *m = *it; Q_ASSERT(m->time().in() != marker->time().in()); if (m->time().in() > marker->time().in()) { markers_.insert(it, marker); found = true; break; } } if (!found) { markers_.push_back(marker); } } bool TimelineMarkerList::RemoveFromList(TimelineMarker *marker) { auto it = std::find(markers_.begin(), markers_.end(), marker); if (it != markers_.end()) { markers_.erase(it); return true; } return false; } void TimelineMarkerList::HandleMarkerModification() { emit MarkerModified(static_cast<TimelineMarker*>(sender())); } void TimelineMarkerList::HandleMarkerTimeChange() { TimelineMarker *m = static_cast<TimelineMarker*>(sender()); auto it = std::find(markers_.begin(), markers_.end(), m); if (it != markers_.end()) { markers_.erase(it); InsertIntoList(m); } } MarkerAddCommand::MarkerAddCommand(TimelineMarkerList *marker_list, const TimeRange &range, const QString &name, int color) : MarkerAddCommand(marker_list, new TimelineMarker(color, range, name, &memory_manager_)) { } MarkerAddCommand::MarkerAddCommand(TimelineMarkerList *marker_list, TimelineMarker *marker) : marker_list_(marker_list), added_marker_(marker) { added_marker_->setParent(&memory_manager_); } Project* MarkerAddCommand::GetRelevantProject() const { return Project::GetProjectFromObject(marker_list_); } void MarkerAddCommand::redo() { added_marker_->setParent(marker_list_); } void MarkerAddCommand::undo() { added_marker_->setParent(&memory_manager_); } MarkerRemoveCommand::MarkerRemoveCommand(TimelineMarker *marker) : marker_(marker) { } Project* MarkerRemoveCommand::GetRelevantProject() const { return Project::GetProjectFromObject(marker_); } void MarkerRemoveCommand::redo() { marker_list_ = marker_->parent(); marker_->setParent(&memory_manager_); } void MarkerRemoveCommand::undo() { marker_->setParent(marker_list_); } MarkerChangeColorCommand::MarkerChangeColorCommand(TimelineMarker *marker, int new_color) : marker_(marker), new_color_(new_color) { } Project* MarkerChangeColorCommand::GetRelevantProject() const { return Project::GetProjectFromObject(marker_); } void MarkerChangeColorCommand::redo() { old_color_ = marker_->color(); marker_->set_color(new_color_); } void MarkerChangeColorCommand::undo() { marker_->set_color(old_color_); } MarkerChangeNameCommand::MarkerChangeNameCommand(TimelineMarker *marker, QString new_name) : marker_(marker), new_name_(new_name) { } Project* MarkerChangeNameCommand::GetRelevantProject() const { return Project::GetProjectFromObject(marker_); } void MarkerChangeNameCommand::redo() { old_name_ = marker_->name(); marker_->set_name(new_name_); } void MarkerChangeNameCommand::undo() { marker_->set_name(old_name_); } MarkerChangeTimeCommand::MarkerChangeTimeCommand(TimelineMarker* marker, const TimeRange &time, const TimeRange &old_time) : marker_(marker), old_time_(old_time), new_time_(time) { } Project* MarkerChangeTimeCommand::GetRelevantProject() const { return Project::GetProjectFromObject(marker_); } void MarkerChangeTimeCommand::redo() { marker_->set_time(new_time_); } void MarkerChangeTimeCommand::undo() { marker_->set_time(old_time_); } }
10,280
C++
.cpp
315
29.374603
125
0.718336
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,809
timelineundoworkarea.cpp
olive-editor_olive/app/timeline/timelineundoworkarea.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "timelineundoworkarea.h" namespace olive { }
768
C++
.cpp
17
42.294118
71
0.775236
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,810
taskmanager.cpp
olive-editor_olive/app/task/taskmanager.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "taskmanager.h" #include <QDebug> #include <QThread> namespace olive { TaskManager* TaskManager::instance_ = nullptr; TaskManager::TaskManager() { thread_pool_.setMaxThreadCount(1); } TaskManager::~TaskManager() { thread_pool_.clear(); foreach (Task* t, tasks_) { t->Cancel(); } thread_pool_.waitForDone(); foreach (Task* t, tasks_) { t->deleteLater(); } } void TaskManager::CreateInstance() { instance_ = new TaskManager(); } void TaskManager::DestroyInstance() { delete instance_; instance_ = nullptr; } TaskManager *TaskManager::instance() { return instance_; } int TaskManager::GetTaskCount() const { return tasks_.size(); } Task *TaskManager::GetFirstTask() const { return tasks_.begin().value(); } void TaskManager::CancelTaskAndWait(Task* t) { t->Cancel(); QFutureWatcher<bool>* w = tasks_.key(t); if (w) { w->waitForFinished(); } } void TaskManager::AddTask(Task* t) { // Create a watcher for signalling QFutureWatcher<bool>* watcher = new QFutureWatcher<bool>(); connect(watcher, &QFutureWatcher<bool>::finished, this, &TaskManager::TaskFinished); // Add the Task to the queue tasks_.insert(watcher, t); // Run task concurrently watcher->setFuture( #if QT_VERSION_MAJOR >= 6 QtConcurrent::run(&thread_pool_, &Task::Start, t) #else QtConcurrent::run(&thread_pool_, t, &Task::Start) #endif ); // Emit signal that a Task was added emit TaskAdded(t); emit TaskListChanged(); } void TaskManager::CancelTask(Task *t) { if (std::find(failed_tasks_.begin(), failed_tasks_.end(), t) != failed_tasks_.end()) { failed_tasks_.remove(t); emit TaskRemoved(t); t->deleteLater(); } else { t->Cancel(); } } void TaskManager::TaskFinished() { QFutureWatcher<bool>* watcher = static_cast<QFutureWatcher<bool>*>(sender()); Task* t = tasks_.value(watcher); tasks_.remove(watcher); if (watcher->result()) { // Task completed successfully emit TaskRemoved(t); t->deleteLater(); } else { // Task failed, keep it so the user can see the error message emit TaskFailed(t); failed_tasks_.push_back(t); } watcher->deleteLater(); emit TaskListChanged(); } }
2,937
C++
.cpp
110
23.8
88
0.712241
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,811
conform.cpp
olive-editor_olive/app/task/conform/conform.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "conform.h" namespace olive { ConformTask::ConformTask(const QString &decoder_id, const Decoder::CodecStream &stream, const AudioParams& params, const QVector<QString> &output_filenames) : decoder_id_(decoder_id), stream_(stream), params_(params), output_filenames_(output_filenames) { SetTitle(tr("Conforming Audio %1:%2").arg(stream.filename(), QString::number(stream.stream()))); } bool ConformTask::Run() { DecoderPtr decoder = Decoder::CreateFromID(decoder_id_); if (!decoder->Open(stream_)) { SetError(tr("Failed to open decoder for audio conform")); return false; } connect(decoder.get(), &Decoder::IndexProgress, this, &ConformTask::ProgressChanged); qDebug() << "Starting conform of" << stream_.filename() << stream_.stream(); bool ret = decoder->ConformAudio(output_filenames_, params_, GetCancelAtom()); decoder->Close(); return ret; } }
1,614
C++
.cpp
38
39.526316
158
0.747436
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,812
customcachetask.cpp
olive-editor_olive/app/task/customcache/customcachetask.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "customcachetask.h" namespace olive { CustomCacheTask::CustomCacheTask(const QString &sequence_name) : cancelled_through_finish_(false) { SetTitle(tr("Caching custom range for \"%1\"").arg(sequence_name)); } void CustomCacheTask::Finish() { mutex_.lock(); cancelled_through_finish_ = true; Cancel(); mutex_.unlock(); } bool CustomCacheTask::Run() { mutex_.lock(); while (!IsCancelled()) { wait_cond_.wait(&mutex_); } mutex_.unlock(); return true; } void CustomCacheTask::CancelEvent() { if (!cancelled_through_finish_) { emit Cancelled(); } wait_cond_.wakeOne(); } }
1,335
C++
.cpp
45
26.955556
71
0.74077
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,813
saveotio.cpp
olive-editor_olive/app/task/project/saveotio/saveotio.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "saveotio.h" #ifdef USE_OTIO #include <opentimelineio/clip.h> #include <opentimelineio/externalReference.h> #include <opentimelineio/gap.h> #include <opentimelineio/serializableCollection.h> #include <opentimelineio/serializableObject.h> #include <opentimelineio/transition.h> #include "node/block/clip/clip.h" #include "node/block/gap/gap.h" #include "node/block/transition/transition.h" #include "node/project/footage/footage.h" namespace olive { SaveOTIOTask::SaveOTIOTask(Project *project) : project_(project) { SetTitle(tr("Exporting project to OpenTimelineIO")); } bool SaveOTIOTask::Run() { QVector<Sequence*> sequences = project_->root()->ListChildrenOfType<Sequence>(); if (sequences.isEmpty()) { SetError(tr("Project contains no sequences to export.")); return false; } std::vector<OTIO::SerializableObject*> serialized; foreach (Sequence* seq, sequences) { auto otio_timeline = SerializeTimeline(seq); if (otio_timeline) { // Append to list serialized.push_back(otio_timeline); } else { // Delete all existing timelines foreach (auto s, serialized) { s->possibly_delete(); } // Error out of function SetError(tr("Failed to serialize sequence \"%1\"").arg(seq->GetLabel())); return false; } } OTIO::ErrorStatus es; if (serialized.size() == 1) { // Serialize timeline on its own auto t = serialized.front(); t->to_json_file(project_->filename().toStdString(), &es); t->possibly_delete(); } else { // Serialize all into a SerializableCollection auto collection = new OTIO::SerializableCollection("Sequences", serialized); collection->to_json_file(project_->filename().toStdString(), &es); collection->possibly_delete(); // Delete all existing timelines foreach (auto s, serialized) { s->possibly_delete(); } } return (es.outcome == OTIO::ErrorStatus::Outcome::OK); } OTIO::Timeline *SaveOTIOTask::SerializeTimeline(Sequence *sequence) { auto otio_timeline = new OTIO::Timeline(sequence->GetLabel().toStdString()); // Retainers clean themselves up when the final user is removed OTIO::Timeline::Retainer<OTIO::Timeline>* timeline_retainer = new OTIO::Timeline::Retainer<OTIO::Timeline>(otio_timeline); // Suppress unused variable warning Q_UNUSED(timeline_retainer); double rate = sequence->GetVideoParams().frame_rate().toDouble(); if (qIsNaN(rate)) { return nullptr; } if (!SerializeTrackList(sequence->track_list(Track::kVideo), otio_timeline, rate) || !SerializeTrackList(sequence->track_list(Track::kAudio), otio_timeline, rate)) { otio_timeline->possibly_delete(); return nullptr; } return otio_timeline; } OTIO::Track *SaveOTIOTask::SerializeTrack(Track *track, double sequence_rate, rational max_track_length) { auto otio_track = new OTIO::Track(); OTIO::ErrorStatus es; switch (track->type()) { case Track::kVideo: otio_track->set_kind("Video"); break; case Track::kAudio: otio_track->set_kind("Audio"); break; default: qWarning() << "Don't know OTIO track kind for native type" << track->type(); goto fail; } foreach (Block* block, track->Blocks()) { OTIO::Composable* otio_block = nullptr; if (dynamic_cast<ClipBlock*>(block)) { auto otio_clip = new OTIO::Clip(block->GetLabel().toStdString()); otio_clip->set_source_range(OTIO::TimeRange(block->in().toRationalTime(sequence_rate), block->length().toRationalTime(sequence_rate))); QVector<Footage*> media_nodes = block->FindInputNodes<Footage>(); if (!media_nodes.isEmpty()) { OTIO::TimeRange available_range; if (otio_track->kind().compare("Video") == 0) { // OTIO ExternalReference uses the source clips frame rate (or sample rate) as opposed to // the sequences rate double source_frame_rate = static_cast<ClipBlock*>(block)->connected_viewer()->GetVideoParams().frame_rate().toDouble(); available_range = OTIO::TimeRange(OTIO::RationalTime(0, source_frame_rate), OTIO::RationalTime(media_nodes.first()->GetVideoParams().duration(), source_frame_rate)); } else if (otio_track->kind().compare("Audio") == 0) { available_range = OTIO::TimeRange(OTIO::RationalTime(0, media_nodes.first()->GetAudioParams().sample_rate()), OTIO::RationalTime(media_nodes.first()->GetAudioParams().duration(), media_nodes.first()->GetAudioParams().sample_rate())); } auto media_ref = new OTIO::ExternalReference(media_nodes.first()->filename().toStdString(), available_range); otio_clip->set_media_reference(media_ref); } otio_block = otio_clip; } else if (dynamic_cast<GapBlock*>(block)) { otio_block = new OTIO::Gap(OTIO::TimeRange(block->in().toRationalTime(), block->length().toRationalTime()), block->GetLabel().toStdString() ); } else if (dynamic_cast<TransitionBlock*>(block)) { auto otio_transition = new OTIO::Transition(block->GetLabel().toStdString()); TransitionBlock* our_transition = static_cast<TransitionBlock*>(block); otio_transition->set_in_offset(our_transition->in_offset().toRationalTime()); otio_transition->set_out_offset(our_transition->out_offset().toRationalTime()); otio_block = new OTIO::Transition(); } if (!otio_block) { // We shouldn't ever get here, but catch without crashing if we ever do goto fail; } otio_track->append_child(otio_block, &es); if (es.outcome != OTIO::ErrorStatus::Outcome::OK) { goto fail; } } // All OTIO tracks must have the same duration so we add a Gap to fill the remaining time if (otio_track->duration(&es).to_seconds() < max_track_length.toDouble()) { double time_left = max_track_length.toDouble() - otio_track->duration(&es).to_seconds(); OTIO::Gap* gap = new OTIO::Gap(OTIO::TimeRange(otio_track->duration(&es), OTIO::RationalTime(time_left, 1.0))); otio_track->append_child(gap, &es); if (es.outcome != OTIO::ErrorStatus::Outcome::OK) { goto fail; } } return otio_track; fail: otio_track->possibly_delete(); return nullptr; } bool SaveOTIOTask::SerializeTrackList(TrackList *list, OTIO::Timeline* otio_timeline, double sequence_rate) { OTIO::ErrorStatus es; rational max_track_length = RATIONAL_MIN; foreach (Track* track, list->GetTracks()) { if (track->track_length() > max_track_length) { max_track_length = track->track_length(); } } foreach (Track* track, list->GetTracks()) { auto otio_track = SerializeTrack(track, sequence_rate, max_track_length); if (!otio_track) { return false; } otio_timeline->tracks()->append_child(otio_track, &es); if (es.outcome != OTIO::ErrorStatus::Outcome::OK) { otio_track->possibly_delete(); return false; } } return true; } } #endif // USE_OTIO
8,022
C++
.cpp
191
35.591623
130
0.665166
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,814
loadotio.cpp
olive-editor_olive/app/task/project/loadotio/loadotio.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "loadotio.h" #ifdef USE_OTIO #include <opentimelineio/clip.h> #include <opentimelineio/externalReference.h> #include <opentimelineio/gap.h> #include <opentimelineio/serializableCollection.h> #include <opentimelineio/timeline.h> #include <opentimelineio/transition.h> #include <QApplication> #include <QFileInfo> #include <QThread> #include "core.h" #include "node/audio/volume/volume.h" #include "node/block/clip/clip.h" #include "node/block/gap/gap.h" #include "node/block/transition/crossdissolve/crossdissolvetransition.h" #include "node/distort/transform/transformdistortnode.h" #include "node/generator/matrix/matrix.h" #include "node/math/math/math.h" #include "node/nodeundo.h" #include "node/project/folder/folder.h" #include "node/project/footage/footage.h" #include "node/project/sequence/sequence.h" #include "timeline/timelineundogeneral.h" #include "window/mainwindow/mainwindowundo.h" namespace olive { LoadOTIOTask::LoadOTIOTask(const QString& s) : ProjectLoadBaseTask(s) { } bool LoadOTIOTask::Run() { OTIO::ErrorStatus es; auto root = OTIO::SerializableObjectWithMetadata::from_json_file(GetFilename().toStdString(), &es); if (es.outcome != OTIO::ErrorStatus::Outcome::OK) { SetError(tr("Failed to load OpenTimelineIO from file \"%1\" \n\nOpenTimelineIO Error:\n\n%2") .arg(GetFilename(), QString::fromStdString(es.full_description))); return false; } project_ = new Project(); project_->Initialize(); project_->set_modified(true); std::vector<OTIO::Timeline*> timelines; if (root->schema_name() == "SerializableCollection") { // This is a number of timelines std::vector<OTIO::SerializableObject::Retainer<OTIO::SerializableObject>>& root_children = static_cast<OTIO::SerializableCollection*>(root)->children(); timelines.resize(root_children.size()); for (size_t j=0; j<root_children.size(); j++) { timelines[j] = static_cast<OTIO::Timeline*>(root_children[j].value); } } else if (root->schema_name() == "Timeline") { // This is a single timeline timelines.push_back(static_cast<OTIO::Timeline*>(root)); } else { // Unknown root, we don't know what to do with this SetError(tr("Unknown OpenTimelineIO root element")); return false; } // Keep track of imported footage QMap<QString, Footage*> imported_footage; QMap<OTIO::Timeline*, Sequence*> timeline_sequnce_map; // Variables used for loading bar float number_of_clips = 0; float clips_done = 0; // Generate a list of sequences with the same names as the timelines. // Assumes each timeline has a unique name. int unnamed_sequence_count = 0; foreach (auto timeline, timelines) { Sequence* sequence = new Sequence(); if (!timeline->name().empty()) { sequence->SetLabel(QString::fromStdString(timeline->name())); } else { // If the otio timeline does not provide a name, create a default one here unnamed_sequence_count++; QString label = tr("Sequence %1").arg(unnamed_sequence_count); sequence->SetLabel(QString::fromStdString(label.toStdString())); } // Set default params incase they aren't edited. sequence->set_default_parameters(); timeline_sequnce_map.insert(timeline, sequence); // Get number of clips for loading bar foreach (auto track, timeline->tracks()->children()) { auto otio_track = static_cast<OTIO::Track*>(track.value); number_of_clips += otio_track->children().size(); } } // Dialog has to be called from the main thread so we pass the list of sequences here. bool accepted = false; QMetaObject::invokeMethod(Core::instance(), "DialogImportOTIOShow", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, accepted), Q_ARG(QList<Sequence*>,timeline_sequnce_map.values())); if (!accepted) { // Cancel to indicate to caller that this task did not complete and to simply dispose of it Cancel(); qDeleteAll(timeline_sequnce_map); // Clear sequences return true; } foreach (auto timeline, timeline_sequnce_map.keys()) { Sequence* sequence = timeline_sequnce_map.value(timeline); sequence->setParent(project_); FolderAddChild(project_->root(), sequence).redo_now(); // Create a folder for this sequence's footage Folder* sequence_footage = new Folder(); sequence_footage->SetLabel(QString::fromStdString(timeline->name())); sequence_footage->setParent(project_); FolderAddChild(project_->root(), sequence_footage).redo_now(); // Iterate through tracks for (auto c : timeline->tracks()->children()) { auto otio_track = static_cast<OTIO::Track*>(c.value); // Create a new track Track* track = nullptr; // Determine what kind of track it is if (otio_track->kind() == "Video" || otio_track->kind() == "Audio") { Track::Type type; if (otio_track->kind() == "Video") { type = Track::kVideo; } else { type = Track::kAudio; } // Create track TimelineAddTrackCommand t(sequence->track_list(type)); t.redo_now(); track = t.track(); } else { qWarning() << "Found unknown track type:" << otio_track->kind().c_str(); continue; } // Get clips from track auto clip_map = otio_track->children(); if (es.outcome != OTIO::ErrorStatus::Outcome::OK) { SetError(tr("Failed to load clip")); return false; } Block* previous_block = nullptr; bool prev_block_transition = false; for (auto otio_block_retainer : clip_map) { auto otio_block = otio_block_retainer.value; Block* block = nullptr; if (otio_block->schema_name() == "Clip") { block = new ClipBlock(); } else if (otio_block->schema_name() == "Gap") { block = new GapBlock(); } else if (otio_block->schema_name() == "Transition") { // Todo: Look into OTIO supported transitions and add them to Olive block = new CrossDissolveTransition(); } else { // We don't know what this is yet, just create a gap for now so that *something* is there qWarning() << "Found unknown block type:" << otio_block->schema_name().c_str(); block = new GapBlock(); } block->setParent(project_); block->SetLabel(QString::fromStdString(otio_block->name())); track->AppendBlock(block); rational start_time; rational duration; if (otio_block->schema_name() == "Clip" || otio_block->schema_name() == "Gap") { start_time = rational::fromDouble(static_cast<OTIO::Item*>(otio_block)->source_range()->start_time().to_seconds()); duration = rational::fromDouble(static_cast<OTIO::Item*>(otio_block)->source_range()->duration().to_seconds()); if (otio_block->schema_name() == "Clip") { static_cast<ClipBlock*>(block)->set_media_in(start_time); } block->set_length_and_media_out(duration); } // If the previous block was a transition, connect the current block to it if (prev_block_transition) { TransitionBlock* previous_transition_block = static_cast<TransitionBlock*>(previous_block); Node::ConnectEdge(block, NodeInput(previous_transition_block, TransitionBlock::kInBlockInput)); prev_block_transition = false; } if (otio_block->schema_name() == "Transition") { TransitionBlock* transition_block = static_cast<TransitionBlock*>(block); OTIO::Transition* otio_block_transition = static_cast<OTIO::Transition*>(otio_block); // Set how far the transition eats into the previous clip transition_block->set_offsets_and_length(rational::fromRationalTime(otio_block_transition->in_offset()), rational::fromRationalTime(otio_block_transition->out_offset())); if (previous_block) { Node::ConnectEdge(previous_block, NodeInput(transition_block, TransitionBlock::kOutBlockInput)); } prev_block_transition = true; // Add nodes to the graph and set up contexts block->setParent(sequence->parent()); // Position transition in its own context block->SetNodePositionInContext(block, QPointF(0, 0)); } if (otio_block->schema_name() == "Gap") { // Add nodes to the graph and set up contexts block->setParent(sequence->parent()); // Position transition in its own context block->SetNodePositionInContext(block, QPointF(0, 0)); } // Update this after it's used but before any continue statements previous_block = block; if (otio_block->schema_name() == "Clip") { auto otio_clip = static_cast<OTIO::Clip*>(otio_block); if (!otio_clip->media_reference()) { continue; } if (otio_clip->media_reference()->schema_name() == "ExternalReference") { // Link footage QString footage_url = QString::fromStdString(static_cast<OTIO::ExternalReference*>(otio_clip->media_reference())->target_url()); Footage* probed_item; if (imported_footage.contains(footage_url)) { probed_item = imported_footage.value(footage_url); } else { probed_item = new Footage(footage_url); imported_footage.insert(footage_url, probed_item); probed_item->setParent(project_); QFileInfo info(probed_item->filename()); probed_item->SetLabel(info.fileName()); FolderAddChild add(sequence_footage, probed_item); add.redo_now(); } // Add nodes to the graph and set up contexts block->setParent(sequence->parent()); // Position clip in its own context block->SetNodePositionInContext(block, QPointF(0, 0)); // Position footage in its context block->SetNodePositionInContext(probed_item, QPointF(-2, 0)); if (track->type() == Track::kVideo) { TransformDistortNode* transform = new TransformDistortNode(); transform->setParent(sequence->parent()); Node::ConnectEdge(probed_item, NodeInput(transform, TransformDistortNode::kTextureInput)); Node::ConnectEdge(transform, NodeInput(block, ClipBlock::kBufferIn)); block->SetNodePositionInContext(transform, QPointF(-1, 0)); } else { VolumeNode* volume_node = new VolumeNode(); volume_node->setParent(sequence->parent()); Node::ConnectEdge(probed_item, NodeInput(volume_node, VolumeNode::kSamplesInput)); Node::ConnectEdge(volume_node, NodeInput(block, ClipBlock::kBufferIn)); block->SetNodePositionInContext(volume_node, QPointF(-1, 0)); } } } clips_done++; emit ProgressChanged(clips_done / number_of_clips); } } } project_->moveToThread(qApp->thread()); return true; } } #endif // USE_OTIO
11,990
C++
.cpp
260
38.388462
180
0.652853
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,815
import.cpp
olive-editor_olive/app/task/project/import/import.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "import.h" #include <QDir> #include <QFileInfo> #include "config/config.h" #include "core.h" #include "node/nodeundo.h" #include "node/project/footage/footage.h" namespace olive { ProjectImportTask::ProjectImportTask(Folder *folder, const QStringList &filenames) : command_(nullptr), folder_(folder) { foreach (const QString& f, filenames) { filenames_.append(QFileInfo(f)); } file_count_ = Core::CountFilesInFileList(filenames_); SetTitle(tr("Importing %n file(s)", nullptr, file_count_)); } const int &ProjectImportTask::GetFileCount() const { return file_count_; } bool ProjectImportTask::Run() { command_ = new MultiUndoCommand(); int imported = 0; Import(folder_, filenames_, imported, command_); if (IsCancelled()) { delete command_; command_ = nullptr; return false; } else { return true; } } void ProjectImportTask::Import(Folder *folder, QFileInfoList import, int &counter, MultiUndoCommand* parent_command) { for (int i=0; i<import.size(); i++) { if (IsCancelled()) { break; } const QFileInfo& file_info = import.at(i); // Check if this file is a directory if (file_info.isDir()) { // QDir::entryList only returns filenames, we can use entryInfoList() to get full paths QFileInfoList entry_list = QDir(file_info.absoluteFilePath()).entryInfoList(); // Strip out "." and ".." (for some reason QDir::NoDotAndDotDot doesn't work with entryInfoList, so we have to // check manually) for (int j=0;j<entry_list.size();j++) { if (entry_list.at(j).fileName() == QStringLiteral(".") || entry_list.at(j).fileName() == QStringLiteral("..")) { entry_list.removeAt(j); j--; } } // Only proceed if the empty actually has files in it if (!entry_list.isEmpty()) { // Create a folder corresponding to the directory Folder* f = new Folder(); f->SetLabel(file_info.fileName()); // Create undoable command that adds the items to the model AddItemToFolder(folder, f, parent_command); // Recursively follow this path Import(f, entry_list, counter, parent_command); } } else { Footage* footage = new Footage(); footage->SetCancelPointer(this->GetCancelAtom()); footage->set_filename(file_info.absoluteFilePath()); footage->SetLabel(file_info.fileName()); footage->SetCancelPointer(nullptr); if (footage->IsValid()) { // See if this footage is an image sequence ValidateImageSequence(footage, import, i); // Create undoable command that adds the items to the model AddItemToFolder(folder, footage, parent_command); // Add to vector imported_footage_.push_back(footage); } else { // Add to list so we can tell the user about it later invalid_files_.append(file_info.absoluteFilePath()); delete footage; } counter++; emit ProgressChanged(static_cast<double>(counter) / static_cast<double>(file_count_)); } } } void ProjectImportTask::ValidateImageSequence(Footage *footage, QFileInfoList& info_list, int index) { // Heuristically determine whether this file is part of an image sequence or not // // By this point we've established that video contains a single still image stream. Now we'll // see if it ends with numbers. if (Decoder::GetImageSequenceDigitCount(footage->filename()) > 0 && !image_sequence_ignore_files_.contains(footage->filename()) && footage->InputArraySize(Footage::kVideoParamsInput)) { VideoParams video_stream = footage->GetVideoParams(0); QSize dim(video_stream.width(), video_stream.height()); int64_t ind = Decoder::GetImageSequenceIndex(footage->filename()); // Check if files around exist around it with that follow a sequence QString previous_img_fn = Decoder::TransformImageSequenceFileName(footage->filename(), ind - 1); QString next_img_fn = Decoder::TransformImageSequenceFileName(footage->filename(), ind + 1); Footage* previous_file = new Footage(previous_img_fn); Footage* next_file = new Footage(next_img_fn); // Finally see if these files have the same dimensions if ((previous_file->IsValid() && CompareStillImageSize(previous_file, dim)) || (next_file->IsValid() && CompareStillImageSize(next_file, dim))) { // By this point, we've established this file is a still image with a number at the end of // the filename surrounded by adjacent numbers. It could be a still image! But let's ask the // user just in case... bool is_sequence; QMetaObject::invokeMethod(Core::instance(), "ConfirmImageSequence", Qt::BlockingQueuedConnection, Q_RETURN_ARG(bool, is_sequence), Q_ARG(QString, footage->filename())); int64_t seq_index = Decoder::GetImageSequenceIndex(footage->filename()); // Heuristic to find the first and last images (users can always override this later in // FootagePropertiesDialog) int64_t start_index = GetImageSequenceLimit(footage->filename(), seq_index, false); int64_t end_index = GetImageSequenceLimit(footage->filename(), seq_index, true); // Depending on the user's choice, either remove them from the list or don't ask for the // remainders for (int64_t j=start_index; j<=end_index; j++) { QString entry_fn = Decoder::TransformImageSequenceFileName(footage->filename(), j); if (is_sequence) { // If this is part of the sequence we're importing here, remove it for (int i=index+1; i<info_list.size(); i++) { if (info_list.at(i).absoluteFilePath() == entry_fn) { if (is_sequence) { info_list.removeAt(i); } break; } } } else { image_sequence_ignore_files_.append(entry_fn); } } if (is_sequence) { // User has confirmed it is a still image, let's set it accordingly. video_stream.set_video_type(VideoParams::kVideoTypeImageSequence); rational default_timebase = OLIVE_CONFIG("DefaultSequenceFrameRate").value<rational>(); video_stream.set_time_base(default_timebase); video_stream.set_frame_rate(default_timebase.flipped()); video_stream.set_start_time(start_index); video_stream.set_duration(end_index - start_index + 1); footage->SetVideoParams(video_stream, 0); } } delete previous_file; delete next_file; } } void ProjectImportTask::AddItemToFolder(Folder *folder, Node *item, MultiUndoCommand *command) { // Create undoable command that adds the items to the model Project* project = folder_->project(); NodeAddCommand* nac = new NodeAddCommand(project, item); nac->PushToThread(project->thread()); command->add_child(nac); command->add_child(new FolderAddChild(folder, item)); } bool ProjectImportTask::ItemIsStillImageFootageOnly(Footage* footage) { if (footage->GetTotalStreamCount() != 1) { // Footage with more than one stream (usually video+audio) most likely isn't an image sequence return false; } VideoParams vp = footage->GetVideoParams(0); // Footage must be valid and video stream must be a still image to be an image sequence return vp.is_valid() && vp.video_type() == VideoParams::kVideoTypeStill; } bool ProjectImportTask::CompareStillImageSize(Footage* footage, const QSize &sz) { if (!ItemIsStillImageFootageOnly(footage)) { return false; } VideoParams stream = footage->GetVideoParams(0); return stream.width() == sz.width() && stream.height() == sz.height(); } int64_t ProjectImportTask::GetImageSequenceLimit(const QString& start_fn, int64_t start, bool up) { QString test_filename; int test_index; forever { if (up) { test_index = start + 1; } else { test_index = start - 1; } test_filename = Decoder::TransformImageSequenceFileName(start_fn, test_index); if (!QFileInfo::exists(test_filename)) { // Reached end of index break; } start = test_index; } return start; } }
9,025
C++
.cpp
216
35.703704
116
0.678719
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,816
importerrordialog.cpp
olive-editor_olive/app/task/project/import/importerrordialog.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "importerrordialog.h" #include <QDialogButtonBox> #include <QLabel> #include <QListWidget> #include <QVBoxLayout> namespace olive { ProjectImportErrorDialog::ProjectImportErrorDialog(const QStringList& filenames, QWidget* parent) : QDialog(parent) { QVBoxLayout* layout = new QVBoxLayout(this); setWindowTitle(tr("Import Error")); layout->addWidget(new QLabel(tr("The following files failed to import. Olive likely does not " "support their formats."))); QListWidget* list_widget = new QListWidget(); foreach (const QString& s, filenames) { list_widget->addItem(s); } layout->addWidget(list_widget); QDialogButtonBox* buttons = new QDialogButtonBox(); buttons->setStandardButtons(QDialogButtonBox::Ok); buttons->setCenterButtons(true); connect(buttons, &QDialogButtonBox::accepted, this, &ProjectImportErrorDialog::accept); layout->addWidget(buttons); } }
1,649
C++
.cpp
39
38.666667
99
0.760025
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,817
save.cpp
olive-editor_olive/app/task/project/save/save.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "save.h" #include <QDir> #include <QFile> #include <QXmlStreamWriter> #include "common/filefunctions.h" #include "core.h" #include "node/project/serializer/serializer.h" namespace olive { ProjectSaveTask::ProjectSaveTask(Project *project, bool use_compression) : project_(project), use_compression_(use_compression) { SetTitle(tr("Saving '%1'").arg(project->filename())); } bool ProjectSaveTask::Run() { QString using_filename = override_filename_.isEmpty() ? project_->filename() : override_filename_; ProjectSerializer::SaveData data(ProjectSerializer::kProject); data.SetFilename(using_filename); data.SetProject(project_); data.SetLayout(layout_); ProjectSerializer::Result result = ProjectSerializer::Save(data, use_compression_); bool success = false; switch (result.code()) { case ProjectSerializer::kSuccess: success = true; break; case ProjectSerializer::kXmlError: SetError(tr("Failed to write XML data.")); break; case ProjectSerializer::kFileError: SetError(tr("Failed to open file \"%1\" for writing.").arg(result.GetDetails())); break; case ProjectSerializer::kOverwriteError: SetError(tr("Failed to overwrite \"%1\". Project has been saved as \"%2\" instead.") .arg(using_filename, result.GetDetails())); success = true; break; // Errors that should never be thrown by a save case ProjectSerializer::kProjectTooNew: case ProjectSerializer::kProjectTooOld: case ProjectSerializer::kUnknownVersion: case ProjectSerializer::kNoData: SetError(tr("Unknown error.")); break; } return success; } }
2,343
C++
.cpp
63
33.873016
100
0.749226
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,818
loadbasetask.cpp
olive-editor_olive/app/task/project/load/loadbasetask.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "loadbasetask.h" namespace olive { ProjectLoadBaseTask::ProjectLoadBaseTask(const QString &filename) : project_(nullptr), filename_(filename) { SetTitle(tr("Loading '%1'").arg(filename)); } }
922
C++
.cpp
23
37.391304
71
0.769663
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,819
load.cpp
olive-editor_olive/app/task/project/load/load.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "load.h" #include <QApplication> #include "node/project/serializer/serializer.h" namespace olive { ProjectLoadTask::ProjectLoadTask(const QString &filename) : ProjectLoadBaseTask(filename) { } bool ProjectLoadTask::Run() { project_ = new Project(); project_->set_filename(GetFilename()); ProjectSerializer::Result result = ProjectSerializer::Load(project_, GetFilename(), ProjectSerializer::kProject); layout_ = result.GetLoadData().layout; switch (result.code()) { case ProjectSerializer::kSuccess: break; case ProjectSerializer::kProjectTooOld: SetError(tr("This project is from a version of Olive that is no longer supported in this version.")); break; case ProjectSerializer::kProjectTooNew: SetError(tr("This project is from a newer version of Olive and cannot be opened in this version.")); break; case ProjectSerializer::kUnknownVersion: SetError(tr("Failed to determine project version.")); break; case ProjectSerializer::kFileError: SetError(tr("Failed to read file \"%1\" for reading.").arg(GetFilename())); break; case ProjectSerializer::kXmlError: SetError(tr("Failed to read XML document. File may be corrupt. Error was: %1").arg(result.GetDetails())); break; case ProjectSerializer::kNoData: SetError(tr("Failed to find any data to parse.")); break; // Errors that should never be thrown by a load case ProjectSerializer::kOverwriteError: SetError(tr("Unknown error.")); break; } if (result == ProjectSerializer::kSuccess) { project_->moveToThread(qApp->thread()); return true; } else { delete project_; return false; } } }
2,387
C++
.cpp
63
34.380952
115
0.745447
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,820
render.cpp
olive-editor_olive/app/task/render/render.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "render.h" #include "node/project/sequence/sequence.h" #include "render/rendermanager.h" namespace olive { RenderTask::RenderTask() : running_tickets_(0), native_progress_signalling_(true) { } RenderTask::~RenderTask() { } bool RenderTask::Render(ColorManager* manager, const TimeRangeList& video_range, const TimeRangeList &audio_range, const TimeRange &subtitle_range, RenderMode::Mode mode, FrameHashCache* cache, const QSize &force_size, const QMatrix4x4 &force_matrix, PixelFormat force_format, int force_channel_count, ColorProcessorPtr force_color_output) { QMetaObject::invokeMethod(RenderManager::instance(), "SetAggressiveGarbageCollection", Q_ARG(bool, true)); // Run watchers in another thread so they can accept signals even while this thread is blocked QThread watcher_thread; watcher_thread.start(); double progress_counter = 0; double total_length = 0; // Store real time before any rendering takes place // Queue audio jobs foreach (const TimeRange& range, audio_range) { // Don't count audio progress, since it's generally a lot faster than video and is weighted at // 50%, which makes the progress bar look weird to the uninitiated //total_length += r.length().toDouble(); RenderManager::RenderAudioParams rap(viewer_->GetConnectedSampleOutput(), range, audio_params_, RenderMode::kOnline); RenderTicketWatcher* watcher = new RenderTicketWatcher(); watcher->setProperty("range", QVariant::fromValue(range)); PrepareWatcher(watcher, &watcher_thread); IncrementRunningTickets(); watcher->SetTicket(RenderManager::instance()->RenderAudio(rap)); } // Look up hashes TimeRangeListFrameIterator iterator(video_range, video_params().frame_rate_as_time_base()); total_number_of_frames_ = iterator.size(); total_length += total_number_of_frames_; // Start a render of a limited amount, and then render one frame for each frame that gets // finished. This prevents rendered frames from stacking up in memory indefinitely while the // encoder is processing them. The amount is kind of arbitrary, but we use the thread count so // each of the system's threads are utilized as memory allows. const int maximum_rendered_frames = QThread::idealThreadCount(); rational next_frame; for (int i=0; i<maximum_rendered_frames && iterator.GetNext(&next_frame); i++) { StartTicket(&watcher_thread, manager, next_frame, mode, cache, force_size, force_matrix, force_format, force_channel_count, force_color_output); } bool result = true; // Subtitle loop, loops over all blocks in sequence on all tracks if (!subtitle_range.length().isNull()) { if (Sequence *sequence = dynamic_cast<Sequence*>(viewer_)) { TrackList *list = sequence->track_list(Track::kSubtitle); QVector<int> block_indexes(list->GetTrackCount(), 0); QVector<int> tracks_to_push; do { tracks_to_push.clear(); for (int i=0; i<block_indexes.size(); i++) { Track *this_track = list->GetTrackAt(i); if (this_track->IsMuted()) { continue; } int &this_block_index = block_indexes[i]; if (this_block_index >= this_track->Blocks().size()) { continue; } Block *this_block = this_track->Blocks().at(this_block_index); Track *compare_track = tracks_to_push.isEmpty() ? nullptr : list->GetTrackAt(tracks_to_push.first()); const int &compare_block_index = tracks_to_push.isEmpty() ? -1 : block_indexes.at(tracks_to_push.first()); Block *compare_block = compare_track ? compare_track->Blocks().at(compare_block_index) : nullptr; if (!compare_track || compare_block->in() >= this_block->in()) { if (compare_track && compare_block->in() != this_block->in()) { tracks_to_push.clear(); } tracks_to_push.append(i); } } for (int i=0; i<tracks_to_push.size(); i++) { Track *this_track = list->GetTrackAt(tracks_to_push.at(i)); Block *this_block = this_track->Blocks().at(block_indexes.at(tracks_to_push.at(i))); if (const SubtitleBlock *sub = dynamic_cast<const SubtitleBlock*>(this_block)) { if (sub->is_enabled()) { if (!EncodeSubtitle(sub)) { result = false; break; } } } block_indexes[tracks_to_push.at(i)]++; } } while (!tracks_to_push.isEmpty()); } } finished_watcher_mutex_.lock(); while (result && !IsCancelled()) { while (!finished_watchers_.empty() && !IsCancelled() && result) { RenderTicketWatcher* watcher = finished_watchers_.front(); finished_watchers_.pop_front(); finished_watcher_mutex_.unlock(); // Analyze watcher here RenderManager::TicketType ticket_type = watcher->GetTicket()->property("type").value<RenderManager::TicketType>(); if (ticket_type == RenderManager::kTypeAudio) { TimeRange range = watcher->property("range").value<TimeRange>(); if (!AudioDownloaded(range, watcher->Get().value<SampleBuffer>())) { result = false; } // Don't count audio progress, since it's generally a lot faster than video and is weighted at // 50%, which makes the progress bar look weird to the uninitiated //progress_counter += range.length().toDouble(); //emit ProgressChanged(progress_counter / total_length); } else if (ticket_type == RenderManager::kTypeVideo && TwoStepFrameRendering()) { if (!DownloadFrame(&watcher_thread, watcher->Get().value<FramePtr>(), watcher->property("time").value<rational>())) { result = false; } if (native_progress_signalling_) { progress_counter += 0.5; emit ProgressChanged(progress_counter / total_length); } } else { // Assume single-step video or video download ticket if (!FrameDownloaded(watcher->Get().value<FramePtr>(), watcher->property("time").value<rational>())) { result = false; } if (native_progress_signalling_) { double progress_to_add = 1.0; if (TwoStepFrameRendering()) { progress_to_add *= 0.5; } progress_counter += progress_to_add; emit ProgressChanged(progress_counter / total_length); } if (iterator.GetNext(&next_frame)) { StartTicket(&watcher_thread, manager, next_frame, mode, cache, force_size, force_matrix, force_format, force_channel_count, force_color_output); } } delete watcher; running_watchers_.removeOne(watcher); finished_watcher_mutex_.lock(); } if (IsCancelled() || !result) { break; } // Run out of finished watchers. If we still have running tickets, wait for the next one to finish. if (running_tickets_ > 0) { finished_watcher_wait_cond_.wait(&finished_watcher_mutex_); } else { // No more running tickets or finished tickets, wem ust be break; } } finished_watcher_mutex_.unlock(); if (IsCancelled() || !result) { // Cancel every watcher we created foreach (RenderTicketWatcher* watcher, running_watchers_) { watcher->Cancel(); disconnect(watcher, &RenderTicketWatcher::Finished, this, &RenderTask::TicketDone); RenderManager::instance()->RemoveTicket(watcher->GetTicket()); } foreach (RenderTicketWatcher* watcher, running_watchers_) { watcher->WaitForFinished(); } } watcher_thread.quit(); watcher_thread.wait(); QMetaObject::invokeMethod(RenderManager::instance(), "SetAggressiveGarbageCollection", Q_ARG(bool, false)); return result; } bool RenderTask::DownloadFrame(QThread *thread, FramePtr frame, const rational &time) { //RenderTicketWatcher* watcher = new RenderTicketWatcher(); //PrepareWatcher(watcher, thread); //IncrementRunningTickets(); //watcher->SetTicket(RenderManager::instance()->SaveFrameToCache(viewer_->video_frame_cache(), frame, time)); // NOTE: Doesn't reflect the actual return result of SaveFrameToCache return true; } bool RenderTask::EncodeSubtitle(const SubtitleBlock *subtitle) { Q_UNUSED(subtitle) return true; } void RenderTask::PrepareWatcher(RenderTicketWatcher *watcher, QThread *thread) { watcher->moveToThread(thread); connect(watcher, &RenderTicketWatcher::Finished, this, &RenderTask::TicketDone, Qt::DirectConnection); running_watchers_.append(watcher); } void RenderTask::IncrementRunningTickets() { finished_watcher_mutex_.lock(); running_tickets_++; finished_watcher_mutex_.unlock(); } void RenderTask::StartTicket(QThread* watcher_thread, ColorManager* manager, const rational& time, RenderMode::Mode mode, FrameHashCache* cache, const QSize &force_size, const QMatrix4x4 &force_matrix, PixelFormat force_format, int force_channel_count, ColorProcessorPtr force_color_output) { RenderManager::RenderVideoParams rvp(viewer_->GetConnectedTextureOutput(), video_params_, audio_params_, time, manager, mode); rvp.force_size = force_size; rvp.force_matrix = force_matrix; rvp.force_format = force_format; rvp.force_color_output = force_color_output; rvp.force_channel_count = force_channel_count; if (cache) { rvp.AddCache(cache); } RenderTicketWatcher* watcher = new RenderTicketWatcher(); watcher->setProperty("time", QVariant::fromValue(time)); PrepareWatcher(watcher, watcher_thread); IncrementRunningTickets(); watcher->SetTicket(RenderManager::instance()->RenderFrame(rvp)); } void RenderTask::TicketDone(RenderTicketWatcher* watcher) { finished_watcher_mutex_.lock(); finished_watchers_.push_back(watcher); finished_watcher_wait_cond_.wakeAll(); running_tickets_--; finished_watcher_mutex_.unlock(); } }
11,015
C++
.cpp
245
37.665306
154
0.665763
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,821
precachetask.cpp
olive-editor_olive/app/task/precache/precachetask.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "precachetask.h" #include "node/project.h" namespace olive { PreCacheTask::PreCacheTask(Footage *footage, int index, Sequence* sequence) { // Set video and audio params set_video_params(sequence->GetVideoParams()); set_audio_params(sequence->GetAudioParams()); // Create new project project_ = new Project(); // Create viewer with same parameters as the sequence set_viewer(new ViewerOutput()); viewer()->setParent(project_); viewer()->SetVideoParams(sequence->GetVideoParams()); viewer()->SetAudioParams(sequence->GetAudioParams()); // Copy project config nodes Project::CopySettings(footage->project(), project_); // Copy footage node so it can precache without any modifications from the user screwing it up footage_ = static_cast<Footage*>(footage->copy()); footage_->setParent(project_); Node::CopyInputs(footage, footage_, false); Node::ConnectEdge(footage_, NodeInput(viewer(), ViewerOutput::kTextureInput)); viewer()->SetValueHintForInput(ViewerOutput::kTextureInput, Node::ValueHint({NodeValue::kTexture}, Track::Reference(Track::kVideo, index).ToString())); SetTitle(tr("Pre-caching %1:%2").arg(footage_->filename(), QString::number(index))); } PreCacheTask::~PreCacheTask() { // This should delete the footage we copied and the viewer we created delete project_; } bool PreCacheTask::Run() { // Get list of invalidated ranges TimeRange intersection; if (footage_->GetWorkArea()->enabled()) { // If we're caching only in-out, limit the range to that intersection = footage_->GetWorkArea()->range(); } else { // Otherwise use full length intersection = TimeRange(0, footage_->GetVideoLength()); } TimeRangeList video_range = viewer()->video_frame_cache()->GetInvalidatedRanges(intersection); Render(project_->color_manager(), video_range, TimeRangeList(), TimeRange(), RenderMode::kOnline, viewer()->video_frame_cache()); return true; } bool PreCacheTask::FrameDownloaded(FramePtr frame, const rational &time) { // Do nothing. Pre-cache essentially just creates more frames in the cache, it doesn't need to do // anything else. Q_UNUSED(frame) Q_UNUSED(time) return true; } bool PreCacheTask::AudioDownloaded(const TimeRange &range, const SampleBuffer &samples) { // Pre-cache doesn't cache any audio Q_UNUSED(range) Q_UNUSED(samples) return true; } }
3,141
C++
.cpp
80
35.9
153
0.740521
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,822
export.cpp
olive-editor_olive/app/task/export/export.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "export.h" #include "node/color/colormanager/colormanager.h" namespace olive { ExportTask::ExportTask(ViewerOutput *viewer_node, ColorManager* color_manager, const EncodingParams& params) : params_(params) { // Create a copy of the project copier_ = new ProjectCopier(this); copier_->SetProject(viewer_node->project()); set_viewer(copier_->GetCopy(viewer_node)); color_manager_ = copier_->GetCopiedProject()->color_manager(); // Adjust video params to have no divider VideoParams vp = viewer_node->GetVideoParams(); vp.set_divider(1); vp.set_time_base(params.video_params().time_base()); vp.set_frame_rate(params.video_params().frame_rate()); set_video_params(vp); set_audio_params(viewer_node->GetAudioParams()); SetTitle(tr("Exporting \"%1\"").arg(viewer_node->GetLabel())); SetNativeProgressSignallingEnabled(false); } bool ExportTask::Run() { // For safety, if we're overwriting, we save to a temporary filename and then only overwrite it // at the end QString real_filename = params_.filename(); if (QFileInfo::exists(params_.filename())) { // Generate a filename that definitely doesn't exist params_.SetFilename(FileFunctions::GetSafeTemporaryFilename(real_filename)); } // If we're exporting to a sidecar subtitle file, disable the subtitles in the main encoder bool subtitles_enabled = params_.subtitles_enabled(); EncodingParams sidecar_params = params_; if (subtitles_enabled && params_.subtitles_are_sidecar()) { params_.DisableSubtitles(); } encoder_ = std::shared_ptr<Encoder>(Encoder::CreateFromParams(params_)); if (!encoder_) { SetError(tr("Failed to create encoder")); return false; } if (!encoder_->Open()) { SetError(tr("Failed to open file: %1").arg(encoder_->GetError())); return false; } if (subtitles_enabled && params_.subtitles_are_sidecar()) { // Construct sidecar params sidecar_params.DisableVideo(); sidecar_params.DisableAudio(); QString sidecar_filename; { QFileInfo fi(real_filename); sidecar_filename = fi.completeBaseName(); sidecar_filename.append('.'); sidecar_filename.append(ExportFormat::GetExtension(sidecar_params.subtitle_sidecar_fmt())); sidecar_filename = fi.dir().filePath(sidecar_filename); } sidecar_params.SetFilename(sidecar_filename); subtitle_encoder_ = std::shared_ptr<Encoder>(Encoder::CreateFromFormat(sidecar_params.subtitle_sidecar_fmt(), sidecar_params)); if (!subtitle_encoder_) { SetError(tr("Failed to create subtitle encoder")); return false; } if (!subtitle_encoder_->Open()) { SetError(tr("Failed to open subtitle sidecar file: %1").arg(sidecar_filename)); return false; } } else { subtitle_encoder_ = encoder_; } if (params_.has_custom_range()) { // Render custom range only export_range_ = params_.custom_range(); } else { // Render entire sequence export_range_ = TimeRange(0, viewer()->GetLength()); } frame_time_ = 0; QSize video_force_size; QMatrix4x4 video_force_matrix; if (params_.video_enabled()) { // If a transformation matrix is applied to this video, create it here if (video_params().width() != params_.video_params().width() || video_params().height() != params_.video_params().height()) { video_force_size = QSize(params_.video_params().width(), params_.video_params().height()); if (params_.video_scaling_method() != EncodingParams::kStretch) { video_force_matrix = EncodingParams::GenerateMatrix(params_.video_scaling_method(), video_params().width(), video_params().height(), params_.video_params().width(), params_.video_params().height()); } } else { // Disables forcing size in the renderer video_force_size = QSize(0, 0); } // Create color processor color_processor_ = ColorProcessor::Create(color_manager_, color_manager_->GetReferenceColorSpace(), params_.color_transform()); } // Start render process TimeRangeList video_range, audio_range; TimeRange subtitle_range; if (params_.video_enabled()) { if (export_range_.in() > 0) { export_range_.set_in(Timecode::snap_time_to_timebase(export_range_.in(), video_params().frame_rate_as_time_base())); } video_range = {export_range_}; } if (params_.audio_enabled()) { audio_range = {export_range_}; } if (subtitles_enabled) { subtitle_range = export_range_; } Render(color_manager_, video_range, audio_range, subtitle_range, RenderMode::kOnline, nullptr, video_force_size, video_force_matrix, encoder_->GetDesiredPixelFormat(), VideoParams::kRGBAChannelCount, color_processor_); bool success = true; encoder_->Close(); if (!encoder_->GetError().isEmpty()) { SetError(encoder_->GetError()); success = false; } if (subtitle_encoder_ != encoder_) { subtitle_encoder_->Close(); if (!subtitle_encoder_->GetError().isEmpty()) { SetError(subtitle_encoder_->GetError()); success = false; } } // If cancelled, delete the file we made, which is always a file we created since we write to a // temp file during the actual encoding process if (IsCancelled()) { QFile::remove(params_.filename()); } else if (params_.filename() != real_filename) { // If we were writing to a temp file, overwrite now if (!FileFunctions::RenameFileAllowOverwrite(params_.filename(), real_filename)) { SetError(tr("Failed to overwrite \"%1\". Export has been saved as \"%2\" instead.") .arg(real_filename, params_.filename())); success = false; } } return success; } bool ExportTask::FrameDownloaded(FramePtr f, const rational &time) { rational actual_time = time - export_range_.in(); time_map_.insert(actual_time, f); while (!IsCancelled()) { rational real_time = Timecode::timestamp_to_time(frame_time_, video_params().frame_rate_as_time_base()); if (!time_map_.contains(real_time)) { break; } // Unfortunately this can't be done in another thread since the frames need to be sent // one after the other chronologically. if (!encoder_->WriteFrame(time_map_.take(real_time), real_time)) { SetError(encoder_->GetError()); return false; } frame_time_++; emit ProgressChanged(double(frame_time_) / double(GetTotalNumberOfFrames())); } return true; } bool ExportTask::AudioDownloaded(const TimeRange &range, const SampleBuffer &samples) { TimeRange adjusted_range = range - export_range_.in(); if (adjusted_range.in() == audio_time_) { if (!WriteAudioLoop(adjusted_range, samples)) { return false; } } else { audio_map_.insert(adjusted_range, samples); } return true; } bool ExportTask::EncodeSubtitle(const SubtitleBlock *sub) { if (!subtitle_encoder_->WriteSubtitle(sub)) { SetError(subtitle_encoder_->GetError()); return false; } else { return true; } } bool ExportTask::WriteAudioLoop(const TimeRange& time, const SampleBuffer &samples) { if (!encoder_->WriteAudio(samples)) { SetError(encoder_->GetError()); return false; } audio_time_ = time.out(); for (auto it=audio_map_.begin(); it!=audio_map_.end(); it++) { TimeRange t = it.key(); SampleBuffer s = it.value(); if (t.in() == audio_time_) { // Erase from audio map since we're just about to write it audio_map_.erase(it); // Call recursively to write the next sample buffer if (!WriteAudioLoop(t, s)) { return false; } // Break out of loop break; } } return true; } }
8,792
C++
.cpp
228
32.407895
131
0.65632
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,823
viewerwindow.cpp
olive-editor_olive/app/widget/viewer/viewerwindow.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "viewerwindow.h" #include <QKeyEvent> #include <QVBoxLayout> namespace olive { ViewerWindow::ViewerWindow(QWidget *parent) : QWidget(parent, Qt::Window | Qt::WindowStaysOnTopHint), pixel_aspect_(1) { QVBoxLayout* layout = new QVBoxLayout(this); layout->setContentsMargins(0, 0, 0, 0); layout->setSpacing(0); display_widget_ = new ViewerDisplayWidget(); layout->addWidget(display_widget_); } ViewerDisplayWidget *ViewerWindow::display_widget() const { return display_widget_; } void ViewerWindow::SetVideoParams(const VideoParams &params) { width_ = params.width(); height_ = params.height(); pixel_aspect_ = params.pixel_aspect_ratio(); UpdateMatrix(); } void ViewerWindow::SetResolution(int width, int height) { width_ = width; height_ = height; UpdateMatrix(); } void ViewerWindow::SetPixelAspectRatio(const rational &pixel_aspect) { pixel_aspect_ = pixel_aspect; UpdateMatrix(); } void ViewerWindow::keyPressEvent(QKeyEvent *e) { QWidget::keyPressEvent(e); if (e->key() == Qt::Key_Escape) { close(); } } void ViewerWindow::closeEvent(QCloseEvent *e) { QWidget::closeEvent(e); deleteLater(); } void ViewerWindow::UpdateMatrix() { // Set GL widget matrix to maintain this texture's aspect ratio double window_ar = static_cast<double>(this->width()) / static_cast<double>(this->height()); double image_ar = static_cast<double>(width_) / static_cast<double>(height_) * pixel_aspect_.toDouble(); QMatrix4x4 mat; if (window_ar > image_ar) { // Window is wider than image, adjust X scale mat.scale(image_ar / window_ar, 1.0f, 1.0f); } else if (window_ar < image_ar) { // Window is taller than image, adjust Y scale mat.scale(1.0f, window_ar / image_ar, 1.0f); } display_widget_->SetMatrixZoom(mat); } }
2,520
C++
.cpp
78
29.641026
106
0.737997
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,824
viewersizer.cpp
olive-editor_olive/app/widget/viewer/viewersizer.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "viewersizer.h" #include <QApplication> #include <QEvent> #include <QMatrix4x4> #include <QWheelEvent> #include "widget/handmovableview/handmovableview.h" namespace olive { ViewerSizer::ViewerSizer(QWidget *parent) : QWidget(parent), widget_(nullptr), width_(0), height_(0), pixel_aspect_(1), zoom_(-1), current_widget_scale_(0) { horiz_scrollbar_ = new QScrollBar(Qt::Horizontal, this); horiz_scrollbar_->setVisible(false); connect(horiz_scrollbar_, &QScrollBar::valueChanged, this, &ViewerSizer::ScrollBarMoved); vert_scrollbar_ = new QScrollBar(Qt::Vertical, this); vert_scrollbar_->setVisible(false); connect(vert_scrollbar_, &QScrollBar::valueChanged, this, &ViewerSizer::ScrollBarMoved); setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding); } void ViewerSizer::SetWidget(QWidget *widget) { // Delete any previous widgets occupying this space delete widget_; widget_ = widget; if (widget_ != nullptr) { widget_->setParent(this); widget_->installEventFilter(this); UpdateSize(); } } QSize ViewerSizer::GetContainerSize() const { double s = GetRealCurrentZoom(); return QSize(std::min(this->width(), int(width_ * s)) - vert_scrollbar_->width(), std::min(int(height_ * s), this->height()) - horiz_scrollbar_->height()); } void ViewerSizer::SetChildSize(int width, int height) { width_ = width; height_ = height; UpdateSize(); } void ViewerSizer::SetPixelAspectRatio(const rational &pixel_aspect) { pixel_aspect_ = pixel_aspect; UpdateSize(); } void ViewerSizer::SetZoom(double percent) { zoom_ = percent; UpdateSize(); } void ViewerSizer::SetZoomAnchored(double next_scale, double cursor_x, double cursor_y) { if (next_scale > 0) { double cur_scale = GetRealCurrentZoom(); // Clamp scale within safe values next_scale = std::clamp(next_scale, kZoomLevels[0], kZoomLevels[kZoomLevelCount-1]); int anchor_x = qRound(double(cursor_x + horiz_scrollbar_->value()) / cur_scale * next_scale - cursor_x); int anchor_y = qRound(double(cursor_y + vert_scrollbar_->value()) / cur_scale * next_scale - cursor_y); SetZoom(next_scale); horiz_scrollbar_->setValue(anchor_x); vert_scrollbar_->setValue(anchor_y); } else { SetZoom(-1); horiz_scrollbar_->setValue(0); vert_scrollbar_->setValue(0); } } void ViewerSizer::HandDragMove(int x, int y) { if (horiz_scrollbar_->isVisible()) { horiz_scrollbar_->setValue(horiz_scrollbar_->value() - x); } if (vert_scrollbar_->isVisible()) { vert_scrollbar_->setValue(vert_scrollbar_->value() - y); } } bool ViewerSizer::eventFilter(QObject *watched, QEvent *event) { if (watched == widget_) { if (event->type() == QEvent::Wheel) { QWheelEvent *w = static_cast<QWheelEvent*>(event); if (HandMovableView::WheelEventIsAZoomEvent(w)) { double next_scale = GetRealCurrentZoom() * HandMovableView::GetScrollZoomMultiplier(w); QPointF cursor_pos = w->position(); SetZoomAnchored(next_scale, cursor_pos.x(), cursor_pos.y()); } else { // Pass scroll values to scrollbars QPoint p = w->pixelDelta(); horiz_scrollbar_->setValue(horiz_scrollbar_->value() - p.x()); vert_scrollbar_->setValue(vert_scrollbar_->value() - p.y()); } return true; } } return QWidget::eventFilter(watched, event); } void ViewerSizer::resizeEvent(QResizeEvent *event) { QWidget::resizeEvent(event); UpdateSize(); } void ViewerSizer::UpdateSize() { if (widget_ == nullptr) { return; } // If the aspect ratio is 0, default to taking all space if (!width_ || !height_) { widget_->move(0, 0); widget_->resize(width(), height()); return; } // Calculate how much UI space is available (it will be less if we have to show scrollbars) int available_width = width(); int available_height = height(); // Determine if we need scrollbars for the zoom we want horiz_scrollbar_->setVisible(zoom_ > 0 && GetZoomedValue(width_) > available_width); vert_scrollbar_->setVisible(zoom_ > 0 && GetZoomedValue(height_) > available_height); // Horizontal scrollbar will reduce the available height if (horiz_scrollbar_->isVisible()) { available_height -= horiz_scrollbar_->sizeHint().height(); } // Vertical scrollbar will reduce the available width if (vert_scrollbar_->isVisible()) { available_width -= vert_scrollbar_->sizeHint().width(); } // Set correct values on horizontal scrollbar if (horiz_scrollbar_->isVisible()) { horiz_scrollbar_->resize(available_width, horiz_scrollbar_->sizeHint().height()); horiz_scrollbar_->move(0, this->height() - horiz_scrollbar_->height() - 1); horiz_scrollbar_->setMaximum(GetZoomedValue(width_) - available_width); horiz_scrollbar_->setPageStep(available_width); } // Set correct values on vertical scrollbar if (vert_scrollbar_->isVisible()) { vert_scrollbar_->resize(vert_scrollbar_->sizeHint().width(), available_height); vert_scrollbar_->move(this->width() - vert_scrollbar_->width() - 1, 0); vert_scrollbar_->setMaximum(GetZoomedValue(height_) - available_height); vert_scrollbar_->setPageStep(available_height); } // Size widget to the UI space we've calculated widget_->resize(available_width, available_height); // Adjust to aspect ratio double sequence_aspect_ratio = double(width_) / double(height_) * pixel_aspect_.toDouble(); double our_aspect_ratio = double(available_width) / double(available_height); QMatrix4x4 child_matrix; double current_scale; if (our_aspect_ratio > sequence_aspect_ratio) { // This container is wider than the image, scale by height child_matrix.scale(sequence_aspect_ratio / our_aspect_ratio, 1.0); current_scale = double(available_height) / double(height_); } else { // This container is taller than the image, scale by width child_matrix.scale(1.0, our_aspect_ratio / sequence_aspect_ratio); current_scale = double(available_width) / double(width_); } current_widget_scale_ = current_scale; if (zoom_ > 0) { // Scale to get to the requested zoom double zoom_diff = zoom_ / current_scale; child_matrix.scale(zoom_diff, zoom_diff, 1.0); } emit RequestScale(child_matrix); ScrollBarMoved(); } int ViewerSizer::GetZoomedValue(int value) { return qRound(value * zoom_); } double ViewerSizer::GetRealCurrentZoom() const { if (zoom_ < 0) { // Currently set to "fit" return current_widget_scale_; } else { // Explicit zoom set return zoom_; } } void ViewerSizer::ScrollBarMoved() { QMatrix4x4 mat; float x_scroll, y_scroll; if (horiz_scrollbar_->isVisible()) { int zoomed_width = GetZoomedValue(width_); x_scroll = (zoomed_width/2 - horiz_scrollbar_->value() - widget_->width() / 2) * (2.0 / zoomed_width); } else { x_scroll = 0; } if (vert_scrollbar_->isVisible()) { int zoomed_height = GetZoomedValue(height_); y_scroll = (zoomed_height/2 - vert_scrollbar_->value() - widget_->height() / 2) * (2.0 / zoomed_height); } else { y_scroll = 0; } // Zero translate is centered, so we need to determine how much "off center" we are mat.translate(x_scroll, y_scroll); emit RequestTranslate(mat); } }
8,008
C++
.cpp
221
32.642534
157
0.704534
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,825
audiowaveformview.cpp
olive-editor_olive/app/widget/viewer/audiowaveformview.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "audiowaveformview.h" #include <QFile> #include <QPainter> #include <QtMath> #include "config/config.h" #include "timeline/timelinecommon.h" namespace olive { #define super SeekableWidget AudioWaveformView::AudioWaveformView(QWidget *parent) : super(parent), playback_(nullptr) { setAutoFillBackground(true); setBackgroundRole(QPalette::Base); setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOff); setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOff); // NOTE: At some point it might make sense for this to be AlignCenter since the waveform // originates from the center. But we're leaving it top/left for now since it was just // ported from a QWidget's paintEvent. setAlignment(Qt::AlignLeft | Qt::AlignTop); } void AudioWaveformView::SetViewer(ViewerOutput *playback) { if (playback_) { pool_.clear(); pool_.waitForDone(); disconnect(playback_, &ViewerOutput::ConnectedWaveformChanged, viewport(), static_cast<void(QWidget::*)()>(&QWidget::update)); SetTimebase(0); } playback_ = playback; if (playback_) { connect(playback_, &ViewerOutput::ConnectedWaveformChanged, viewport(), static_cast<void(QWidget::*)()>(&QWidget::update)); SetTimebase(playback_->GetAudioParams().sample_rate_as_time_base()); } } void AudioWaveformView::drawForeground(QPainter *p, const QRectF &rect) { super::drawForeground(p, rect); if (!playback_) { return; } const AudioWaveformCache *wave = playback_->GetConnectedWaveform(); if (!wave) { return; } const AudioParams& params = wave->GetParameters(); if (!params.is_valid()) { return; } // Draw in/out points DrawWorkArea(p); DrawMarkers(p); // Draw waveform p->setPen(QColor(64, 255, 160)); // FIXME: Hardcoded color wave->Draw(p, rect.toRect(), GetScale(), SceneToTime(GetScroll())); // Draw playhead p->setPen(PLAYHEAD_COLOR); int playhead_x = TimeToScene(GetViewerNode()->GetPlayhead()); p->drawLine(playhead_x, 0, playhead_x, height()); } }
2,734
C++
.cpp
75
33.386667
130
0.740122
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,826
viewerdisplay.cpp
olive-editor_olive/app/widget/viewer/viewerdisplay.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "viewerdisplay.h" #include <OpenImageIO/imagebuf.h> #include <QAbstractTextDocumentLayout> #include <QApplication> #include <QFileInfo> #include <QMessageBox> #include <QMouseEvent> #include <QOpenGLContext> #include <QOpenGLFunctions> #include <QOpenGLTexture> #include <QPainter> #include <QPushButton> #include <QScreen> #include <QTextEdit> #include "common/define.h" #include "common/html.h" #include "common/qtutils.h" #include "config/config.h" #include "core.h" #include "node/block/subtitle/subtitle.h" #include "node/gizmo/path.h" #include "node/gizmo/point.h" #include "node/gizmo/polygon.h" #include "node/gizmo/screen.h" #include "window/mainwindow/mainwindow.h" namespace olive { #define super ManagedDisplayWidget ViewerDisplayWidget::ViewerDisplayWidget(QWidget *parent) : super(parent), deinterlace_texture_(nullptr), signal_cursor_color_(false), gizmos_(nullptr), current_gizmo_(nullptr), gizmo_drag_started_(false), show_subtitles_(true), subtitle_tracks_(nullptr), hand_dragging_(false), deinterlace_(false), show_fps_(false), frames_skipped_(0), show_widget_background_(false), playback_speed_(0), push_mode_(kPushNull), add_band_(false), queue_starved_(false), text_edit_(nullptr) { connect(Core::instance(), &Core::ToolChanged, this, &ViewerDisplayWidget::ToolChanged); // Initializes cursor based on tool UpdateCursor(); const int kFrameRateAverageCount = 8; frame_rate_averages_.resize(kFrameRateAverageCount); inner_widget()->setAcceptDrops(true); } ViewerDisplayWidget::~ViewerDisplayWidget() { delete text_edit_; MANAGEDDISPLAYWIDGET_DEFAULT_DESTRUCTOR_INNER; } void ViewerDisplayWidget::SetMatrixTranslate(const QMatrix4x4 &mat) { translate_matrix_ = mat; UpdateMatrix(); } void ViewerDisplayWidget::SetMatrixZoom(const QMatrix4x4 &mat) { scale_matrix_ = mat; UpdateMatrix(); } void ViewerDisplayWidget::SetMatrixCrop(const QMatrix4x4 &mat) { crop_matrix_ = mat; update(); } void ViewerDisplayWidget::UpdateCursor() { if (Core::instance()->tool() == Tool::kHand) { this->inner_widget()->setCursor(Qt::OpenHandCursor); } else if (Core::instance()->tool() == Tool::kAdd) { this->inner_widget()->setCursor(Qt::CrossCursor); } else { this->inner_widget()->unsetCursor(); } } void ViewerDisplayWidget::SetSignalCursorColorEnabled(bool e) { signal_cursor_color_ = e; SetInnerMouseTracking(e); } void ViewerDisplayWidget::SetImage(const QVariant &buffer) { load_frame_ = buffer; if (load_frame_.isNull()) { push_mode_ = kPushNull; } else { push_mode_ = kPushFrame; } update(); } void ViewerDisplayWidget::SetBlank() { push_mode_ = kPushBlank; update(); } void ViewerDisplayWidget::ToolChanged() { UpdateCursor(); } void ViewerDisplayWidget::SetDeinterlacing(bool e) { deinterlace_ = e; if (!deinterlace_) { if (!deinterlace_shader_.isNull()) { renderer()->DestroyNativeShader(deinterlace_shader_); deinterlace_shader_.clear(); } deinterlace_texture_ = nullptr; } update(); } const ViewerSafeMarginInfo &ViewerDisplayWidget::GetSafeMargin() const { return safe_margin_; } void ViewerDisplayWidget::SetSafeMargins(const ViewerSafeMarginInfo &safe_margin) { if (safe_margin_ != safe_margin) { safe_margin_ = safe_margin; update(); } } void ViewerDisplayWidget::SetGizmos(Node *node) { if (gizmos_ != node) { gizmos_ = node; update(); } } void ViewerDisplayWidget::SetVideoParams(const VideoParams &params) { gizmo_params_ = params; if (gizmos_) { update(); } } void ViewerDisplayWidget::SetAudioParams(const AudioParams &params) { gizmo_audio_params_ = params; if (gizmos_) { update(); } } void ViewerDisplayWidget::SetTime(const rational &time) { time_ = time; if (gizmos_) { update(); } } void ViewerDisplayWidget::SetSubtitleTracks(Sequence *list) { if (subtitle_tracks_) { disconnect(subtitle_tracks_, &Sequence::SubtitlesChanged, this, &ViewerDisplayWidget::SubtitlesChanged); } subtitle_tracks_ = list; if (subtitle_tracks_) { connect(subtitle_tracks_, &Sequence::SubtitlesChanged, this, &ViewerDisplayWidget::SubtitlesChanged); } update(); } QPointF ViewerDisplayWidget::TransformViewerSpaceToBufferSpace(const QPointF &pos) { /* * Inversion will only fail if the viewer has been scaled by 0 in any direction * which I think should never happen. */ return pos * GenerateDisplayTransform().inverted(); } void ViewerDisplayWidget::ResetFPSTimer() { fps_timer_start_ = QDateTime::currentMSecsSinceEpoch(); fps_timer_update_count_ = 0; frames_skipped_ = 0; frame_rate_average_count_ = 0; Core::instance()->ClearStatusBarMessage(); } void ViewerDisplayWidget::IncrementSkippedFrames() { frames_skipped_++; Core::instance()->ShowStatusBarMessage(tr("%n skipped frame(s) detected during playback", nullptr, frames_skipped_), 10000); } bool ViewerDisplayWidget::eventFilter(QObject *o, QEvent *e) { if (o == this->inner_widget()) { switch (e->type()) { case QEvent::MouseButtonPress: { QMouseEvent *mouse = static_cast<QMouseEvent*>(e); if (!(mouse->flags() & Qt::MouseEventCreatedDoubleClick)) { if (OnMousePress(mouse)) { return true; } } break; } case QEvent::MouseMove: EmitColorAtCursor(static_cast<QMouseEvent*>(e)); if (OnMouseMove(static_cast<QMouseEvent*>(e))) { return true; } break; case QEvent::MouseButtonRelease: if (OnMouseRelease(static_cast<QMouseEvent*>(e))) { return true; } break; case QEvent::MouseButtonDblClick: if (OnMouseDoubleClick(static_cast<QMouseEvent*>(e))) { return true; } break; case QEvent::ShortcutOverride: case QEvent::KeyPress: if (OnKeyPress(static_cast<QKeyEvent*>(e))) { return true; } break; case QEvent::KeyRelease: if (OnKeyRelease(static_cast<QKeyEvent*>(e))) { return true; } break; case QEvent::DragEnter: { auto drag_enter = static_cast<QDragEnterEvent*>(e); if (text_edit_) { ForwardDragEventToTextEdit(drag_enter); } else { emit DragEntered(drag_enter); } if (drag_enter->isAccepted()) { return true; } break; } case QEvent::DragMove: { auto drag_move = static_cast<QDragMoveEvent*>(e); if (text_edit_) { ForwardDragEventToTextEdit(drag_move); } if (drag_move->isAccepted()) { return true; } break; } case QEvent::DragLeave: { auto drag_leave = static_cast<QDragLeaveEvent*>(e); if (text_edit_) { ForwardDragEventToTextEdit(drag_leave); } else { emit DragLeft(drag_leave); } if (drag_leave->isAccepted()) { return true; } break; } case QEvent::Drop: { auto drop = static_cast<QDropEvent*>(e); if (text_edit_) { ForwardDragEventToTextEdit(drop); } else { emit Dropped(drop); } if (drop->isAccepted()) { return true; } break; } default: break; } } else if (o == text_edit_) { switch (e->type()) { case QEvent::Paint: update(); return true; default: break; } } return super::eventFilter(o, e); } void ViewerDisplayWidget::OnPaint() { // Clear background to empty QColor bg_color = show_widget_background_ ? palette().window().color() : Qt::black; renderer()->ClearDestination(nullptr, bg_color.redF(), bg_color.greenF(), bg_color.blueF()); // We only draw if we have a pipeline if (push_mode_ != kPushNull) { // Draw texture through color transform VideoParams device_params = GetViewportParams(); if (push_mode_ == kPushBlank) { DrawBlank(device_params); } else if (color_service()) { if (FramePtr frame = load_frame_.value<FramePtr>()) { // This is a CPU frame, upload it now if (!texture_ || texture_->renderer() != renderer() // Some implementations don't like it if we upload to a texture created in another (albeit shared) context || texture_->width() != frame->width() || texture_->height() != frame->height() || texture_->format() != frame->format() || texture_->channel_count() != frame->channel_count()) { texture_ = renderer()->CreateTexture(frame->video_params(), frame->data(), frame->linesize_pixels()); } else { texture_->Upload(frame->data(), frame->linesize_pixels()); } } else if (TexturePtr texture = load_frame_.value<TexturePtr>()) { // This is a GPU texture, switch to it directly texture_ = texture; } else { texture_ = LoadCustomTextureFromFrame(load_frame_); } emit TextureChanged(texture_); push_mode_ = kPushUnnecessary; TexturePtr texture_to_draw = texture_; if (!texture_to_draw || texture_to_draw->IsDummy()) { DrawBlank(device_params); } else { if (deinterlace_) { if (deinterlace_shader_.isNull()) { deinterlace_shader_ = renderer()->CreateNativeShader(ShaderCode(FileFunctions::ReadFileAsString(QStringLiteral(":/shaders/deinterlace.frag")))); } if (!deinterlace_texture_ || deinterlace_texture_->params() != texture_to_draw->params()) { // (Re)create texture deinterlace_texture_ = renderer()->CreateTexture(texture_to_draw->params()); } ShaderJob job; job.Insert(QStringLiteral("resolution_in"), NodeValue(NodeValue::kVec2, QVector2D(texture_to_draw->width(), texture_to_draw->height()))); job.Insert(QStringLiteral("ove_maintex"), NodeValue(NodeValue::kTexture, QVariant::fromValue(texture_to_draw))); renderer()->BlitToTexture(deinterlace_shader_, job, deinterlace_texture_.get()); texture_to_draw = deinterlace_texture_; } ColorTransformJob ctj; ctj.SetColorProcessor(color_service()); ctj.SetInputTexture(texture_to_draw); ctj.SetInputAlphaAssociation(OLIVE_CONFIG("ReassocLinToNonLin").toBool() ? kAlphaAssociated : kAlphaNone); ctj.SetClearDestinationEnabled(false); ctj.SetTransformMatrix(combined_matrix_flipped_); ctj.SetCropMatrix(crop_matrix_); ctj.SetForceOpaque(true); renderer()->BlitColorManaged(ctj, device_params); } } } // Draw gizmos if we have any if (gizmos_) { QPainter p(paint_device()); GenerateGizmoTransforms(); p.setWorldTransform(gizmo_last_draw_transform_); gizmos_->UpdateGizmoPositions(gizmo_db_, NodeGlobals(gizmo_params_, gizmo_audio_params_, gizmo_draw_time_, LoopMode::kLoopModeOff)); foreach (NodeGizmo *gizmo, gizmos_->GetGizmos()) { if (gizmo->IsVisible()) { gizmo->Draw(&p); } } if (text_edit_) { QPixmap pm(text_edit_->width(), text_edit_->height()); pm.fill(Qt::transparent); QPainter pixp(&pm); text_edit_->Paint(&pixp, active_text_gizmo_->GetVerticalAlignment()); p.drawPixmap(text_edit_pos_, pm); } } // Draw action/title safe areas if (safe_margin_.is_enabled()) { QPainter p(paint_device()); p.setWorldTransform(GenerateWorldTransform()); p.setPen(QPen(Qt::lightGray, 0)); p.setBrush(Qt::NoBrush); int x = 0, y = 0, w = width(), h = height(); if (safe_margin_.custom_ratio()) { double widget_ar = static_cast<double>(width()) / static_cast<double>(height()); if (widget_ar > safe_margin_.ratio()) { // Widget is wider than margins w = h * safe_margin_.ratio(); x = width() / 2 - w / 2; } else { h = w / safe_margin_.ratio(); y = height() / 2 - h / 2; } } p.drawRect(w / 20 + x, h / 20 + y, w / 10 * 9, h / 10 * 9); p.drawRect(w / 10 + x, h / 10 + y, w / 10 * 8, h / 10 * 8); int cross = qMin(w, h) / 32; QLine lines[] = {QLine(rect().center().x() - cross, rect().center().y(),rect().center().x() + cross, rect().center().y()), QLine(rect().center().x(), rect().center().y() - cross, rect().center().x(), rect().center().y() + cross)}; p.drawLines(lines, 2); } if (show_fps_) { { qint64 now = QDateTime::currentMSecsSinceEpoch(); double frame_rate; if (now == fps_timer_start_) { // This will cause a divide by zero, so we do nothing here frame_rate = 0; } else { frame_rate = double(fps_timer_update_count_) / double((now - fps_timer_start_)/1000.0); } if (frame_rate > 0) { frame_rate_averages_[frame_rate_average_count_%frame_rate_averages_.size()] = frame_rate; frame_rate_average_count_++; } } if (frame_rate_average_count_ >= frame_rate_averages_.size()) { QPainter p(paint_device()); double average = 0.0; for (int i=0; i<frame_rate_averages_.size(); i++) { average += frame_rate_averages_[i]; } average /= double(frame_rate_averages_.size()); DrawTextWithCrudeShadow(&p, GetInnerRect(), tr("%1 FPS").arg(QString::number(average, 'f', 1))); if (frames_skipped_ > 0) { DrawTextWithCrudeShadow(&p, GetInnerRect().adjusted(0, p.fontMetrics().height(), 0, 0), tr("%1 frames skipped").arg(frames_skipped_)); } } } // Extraordinarily basic subtitle renderer. Hoping to swap this out with libass at some point. DrawSubtitleTracks(); if (add_band_) { QPainter p(paint_device()); QColor highlight = palette().highlight().color(); p.setPen(highlight); highlight.setAlpha(128); p.setBrush(highlight); p.drawRect(QRect(add_band_start_, add_band_end_).normalized()); } } void ViewerDisplayWidget::OnDestroy() { if (!deinterlace_shader_.isNull()) { renderer()->DestroyNativeShader(deinterlace_shader_); deinterlace_shader_.clear(); } if (!blank_shader_.isNull()) { renderer()->DestroyNativeShader(blank_shader_); blank_shader_.clear(); } super::OnDestroy(); texture_ = nullptr; deinterlace_texture_ = nullptr; if (load_frame_.isNull()) { push_mode_ = kPushNull; } else { push_mode_ = kPushFrame; } } QPointF ViewerDisplayWidget::GetTexturePosition(const QPoint &screen_pos) { return GetTexturePosition(screen_pos.x(), screen_pos.y()); } QPointF ViewerDisplayWidget::GetTexturePosition(const QSize &size) { return GetTexturePosition(size.width(), size.height()); } QPointF ViewerDisplayWidget::GetTexturePosition(const double &x, const double &y) { return QPointF(x / gizmo_params_.width(), y / gizmo_params_.height()); } void ViewerDisplayWidget::DrawTextWithCrudeShadow(QPainter *painter, const QRect &rect, const QString &text, const QTextOption &opt) { painter->setPen(Qt::black); painter->drawText(rect.adjusted(1, 1, 0, 0), text, opt); painter->setPen(Qt::white); painter->drawText(rect, text, opt); } rational ViewerDisplayWidget::GetGizmoTime() { return GetAdjustedTime(GetTimeTarget(), gizmos_, time_, Node::kTransformTowardsInput); } bool ViewerDisplayWidget::IsHandDrag(QMouseEvent *event) const { return event->button() == Qt::MiddleButton || Core::instance()->tool() == Tool::kHand; } void ViewerDisplayWidget::UpdateMatrix() { combined_matrix_ = scale_matrix_ * translate_matrix_; combined_matrix_flipped_.setToIdentity(); combined_matrix_flipped_.scale(1.0, -1.0, 1.0); combined_matrix_flipped_ *= combined_matrix_; update(); } QTransform ViewerDisplayWidget::GenerateWorldTransform() { /* * Get matrix elements (roughly) as below in column major order * * | Sx 0 0 Tx | * | 0 Sy 0 Ty | * | 0 0 Sz Tz | * | 0 0 0 1 | */ float *d = combined_matrix_.data(); QTransform world; // Move corner of canvas to correct point world.translate(width() * 0.5 - width() * *(d)*0.5, height() * 0.5 - height() * *(d + 5) * 0.5); // Scale world.scale(*(d), *(d + 5)); // Translate for mouse movement world.translate(*(d + 12) * width() * 0.5 / *(d), *(d + 13) * height() * 0.5 / *(d + 5)); return world; } QTransform ViewerDisplayWidget::GenerateDisplayTransform() { QVector2D viewer_scale(GetTexturePosition(size())); QTransform gizmo_transform = GenerateWorldTransform(); gizmo_transform.scale(viewer_scale.x(), viewer_scale.y()); gizmo_transform.scale(gizmo_params_.pixel_aspect_ratio().flipped().toDouble(), 1); return gizmo_transform; } QTransform ViewerDisplayWidget::GenerateGizmoTransform(NodeTraverser &gt, const TimeRange &range) { QTransform t = GenerateDisplayTransform(); if (GetTimeTarget()) { Node *target = GetTimeTarget(); if (ViewerOutput *v = dynamic_cast<ViewerOutput *>(target)) { if (Node *n = v->GetConnectedTextureOutput()) { target = n; } } QTransform nt; gt.Transform(&nt, gizmos_, target, range); t.translate(gizmo_params_.width()*0.5, gizmo_params_.height()*0.5); t.scale(gizmo_params_.width(), gizmo_params_.height()); t = nt * t; t.scale(1.0 / gizmo_params_.width(), 1.0 / gizmo_params_.height()); t.translate(-gizmo_params_.width()*0.5, -gizmo_params_.height()*0.5); } return t; } NodeGizmo *ViewerDisplayWidget::TryGizmoPress(const NodeValueRow &row, const QPointF &p) { if (!gizmos_) { return nullptr; } for (auto it=gizmos_->GetGizmos().crbegin(); it!=gizmos_->GetGizmos().crend(); it++) { NodeGizmo *gizmo = *it; if (gizmo->IsVisible()) { if (PointGizmo *point = dynamic_cast<PointGizmo*>(gizmo)) { if (point->GetClickingRect(gizmo_last_draw_transform_).contains(p)) { return point; } } else if (PolygonGizmo *poly = dynamic_cast<PolygonGizmo*>(gizmo)) { if (poly->GetPolygon().containsPoint(p, Qt::OddEvenFill)) { return poly; } } else if (PathGizmo *path = dynamic_cast<PathGizmo*>(gizmo)) { if (path->GetPath().contains(p)) { return path; } } else if (ScreenGizmo *screen = dynamic_cast<ScreenGizmo*>(gizmo)) { // NOTE: Perhaps this should limit to the actual visible screen space? We'll see. return screen; } } } return nullptr; } void ViewerDisplayWidget::OpenTextGizmo(TextGizmo *text, QMouseEvent *event) { GenerateGizmoTransforms(); gizmos_->UpdateGizmoPositions(gizmo_db_, NodeGlobals(gizmo_params_, gizmo_audio_params_, gizmo_draw_time_, LoopMode::kLoopModeOff)); active_text_gizmo_ = text; connect(active_text_gizmo_, &TextGizmo::RectChanged, this, &ViewerDisplayWidget::UpdateActiveTextGizmoSize); text_transform_ = GenerateGizmoTransform(); text_transform_inverted_ = text_transform_.inverted(); // Create text editor text_edit_ = new ViewerTextEditor(text_transform_.m11(), this); // Set text editor's gizmo property for later use text_edit_->setProperty("gizmo", reinterpret_cast<quintptr>(text)); // Install ourselves as event filter so we can receive the text editor's paint events text_edit_->installEventFilter(this); // Disable focus on text editor text_edit_->setFocusPolicy(Qt::NoFocus); // Disable mouse events on text editor text_edit_->setAttribute(Qt::WA_TransparentForMouseEvents); // "Show" text editor so that it throws paint events, even though its paint event is disabled text_edit_->show(); // Convert HTML to Qt document Html::HtmlToDoc(text_edit_->document(), text->GetHtml()); // Connect text change event to propagate back to node connect(text_edit_, &ViewerTextEditor::textChanged, this, &ViewerDisplayWidget::TextEditChanged); // Connect destroyed signal to cleanup after destruction connect(text_edit_, &ViewerTextEditor::destroyed, this, &ViewerDisplayWidget::TextEditDestroyed); // Set text editor's size to logical size QRectF text_rect = UpdateActiveTextGizmoSize(); // Emit text gizmo activation signal emit text->Activated(); // Create toolbar text_toolbar_ = new ViewerTextEditorToolBar(text_edit_); text_toolbar_->setWindowFlags(Qt::Tool | Qt::FramelessWindowHint); connect(text_toolbar_, &ViewerTextEditorToolBar::VerticalAlignmentChanged, text, &TextGizmo::SetVerticalAlignment); connect(text, &TextGizmo::VerticalAlignmentChanged, text_toolbar_, &ViewerTextEditorToolBar::SetVerticalAlignment); text_toolbar_->SetVerticalAlignment(text->GetVerticalAlignment()); text_edit_->ConnectToolBar(text_toolbar_); QPoint toolbar_pos = mapToGlobal(text_transform_.map(text_edit_pos_).toPoint()); if (QScreen *screen = qApp->screenAt(toolbar_pos)) { // Determine whether to anchor to the top of the rect of the bottom if (toolbar_pos.y() - text_toolbar_->height() >= screen->geometry().top()) { toolbar_pos.setY(toolbar_pos.y() - text_toolbar_->height()); } else { toolbar_pos.setY(toolbar_pos.y() + text_transform_.map(text_rect).boundingRect().height()); } // Clamp X if (toolbar_pos.x() + text_toolbar_->width() > screen->geometry().right()) { toolbar_pos.setX(screen->geometry().right() - text_toolbar_->width()); } // Clamp Y if (toolbar_pos.y() + text_toolbar_->height() > screen->geometry().bottom()) { toolbar_pos.setY(screen->geometry().bottom() - text_toolbar_->height()); } } else { // Fallback toolbar_pos.setY(toolbar_pos.y() - text_toolbar_->height()); } text_toolbar_->move(toolbar_pos); text_toolbar_->show(); // Allow widget to take keyboard focus inner_widget()->setFocusPolicy(Qt::StrongFocus); inner_widget()->setMouseTracking(true); connect(qApp, &QApplication::focusChanged, this, &ViewerDisplayWidget::FocusChanged); // Start text cursor where the user clicked if (event) { QPoint click_pos = text_transform_inverted_.map(event->pos()) - text_edit_pos_.toPoint(); text_edit_->setTextCursor(text_edit_->cursorForPosition(click_pos)); } // Grab focus back from the toolbar connect(text_toolbar_, &ViewerTextEditorToolBar::FirstPaint, this, [this]{ Core::instance()->main_window()->activateWindow(); inner_widget()->setFocus(); }); } bool ViewerDisplayWidget::OnMousePress(QMouseEvent *event) { if (IsHandDrag(event)) { // Handle hand drag hand_last_drag_pos_ = event->pos(); hand_dragging_ = true; emit HandDragStarted(); inner_widget()->setCursor(Qt::ClosedHandCursor); return true; } else if (text_edit_ && ForwardMouseEventToTextEdit(event, true)) { return true; } else if (event->button() == Qt::LeftButton) { if (Core::instance()->tool() == Tool::kAdd && (Core::instance()->GetSelectedAddableObject() == Tool::kAddableShape || Core::instance()->GetSelectedAddableObject() == Tool::kAddableTitle)) { add_band_start_ = event->pos(); add_band_end_ = add_band_start_; add_band_ = true; } else if ((current_gizmo_ = TryGizmoPress(gizmo_db_, gizmo_last_draw_transform_inverted_.map(event->pos())))) { // Handle gizmo click gizmo_start_drag_ = event->pos(); gizmo_last_drag_ = gizmo_start_drag_; current_gizmo_->SetGlobals(NodeGlobals(gizmo_params_, gizmo_audio_params_, GenerateGizmoTime(), LoopMode::kLoopModeOff)); } else { // Handle standard drag emit DragStarted(event->pos()); } return true; } return false; } bool ViewerDisplayWidget::OnMouseMove(QMouseEvent *event) { // Handle hand dragging if (hand_dragging_) { // Emit movement emit HandDragMoved(event->x() - hand_last_drag_pos_.x(), event->y() - hand_last_drag_pos_.y()); hand_last_drag_pos_ = event->pos(); return true; } else if (text_edit_ && ForwardMouseEventToTextEdit(event)) { return true; } else if (add_band_) { add_band_end_ = event->pos(); update(); return true; } else if (current_gizmo_) { // Signal movement if (DraggableGizmo *draggable = dynamic_cast<DraggableGizmo*>(current_gizmo_)) { if (!gizmo_drag_started_) { QPointF start = ScreenToScenePoint(gizmo_start_drag_); rational gizmo_time = GetGizmoTime(); NodeTraverser t; t.SetCacheVideoParams(gizmo_params_); t.SetCacheAudioParams(gizmo_audio_params_); NodeValueRow row = t.GenerateRow(gizmos_, TimeRange(gizmo_time, gizmo_time + gizmo_params_.frame_rate_as_time_base())); draggable->DragStart(row, start.x(), start.y(), gizmo_time); gizmo_drag_started_ = true; } QPointF v = ScreenToScenePoint(event->pos()); switch (draggable->GetDragValueBehavior()) { case DraggableGizmo::kAbsolute: // Above value is correct break; case DraggableGizmo::kDeltaFromPrevious: v -= ScreenToScenePoint(gizmo_last_drag_); gizmo_last_drag_ = event->pos(); break; case DraggableGizmo::kDeltaFromStart: v -= ScreenToScenePoint(gizmo_start_drag_); break; } draggable->DragMove(v.x(), v.y(), event->modifiers()); return true; } } return false; } bool ViewerDisplayWidget::OnMouseRelease(QMouseEvent *e) { if (hand_dragging_) { // Handle hand drag emit HandDragEnded(); hand_dragging_ = false; UpdateCursor(); return true; } else if (text_edit_ && ForwardMouseEventToTextEdit(e)) { return true; } else if (add_band_) { QRect band_rect = QRect(add_band_start_, add_band_end_).normalized(); if (band_rect.width() > 1 && band_rect.height() > 1) { QRectF r = GenerateDisplayTransform().inverted().mapRect(band_rect); emit CreateAddableAt(r); } add_band_ = false; return true; } else if (current_gizmo_) { // Handle gizmo if (gizmo_drag_started_) { MultiUndoCommand *command = new MultiUndoCommand(); if (DraggableGizmo *draggable = dynamic_cast<DraggableGizmo*>(current_gizmo_)) { draggable->DragEnd(command); } Core::instance()->undo_stack()->push(command, tr("Dragged Gizmo")); gizmo_drag_started_ = false; } current_gizmo_ = nullptr; return true; } return false; } bool ViewerDisplayWidget::OnMouseDoubleClick(QMouseEvent *event) { if (text_edit_ && ForwardMouseEventToTextEdit(event)) { return true; } else if (event->button() == Qt::LeftButton && gizmos_) { QPointF ptr = TransformViewerSpaceToBufferSpace(event->pos()); foreach (NodeGizmo *g, gizmos_->GetGizmos()) { if (TextGizmo *text = dynamic_cast<TextGizmo*>(g)) { if (text->GetRect().contains(ptr)) { OpenTextGizmo(text, event); return true; } } } } return false; } bool ViewerDisplayWidget::OnKeyPress(QKeyEvent *e) { if (text_edit_) { if (e->key() == Qt::Key_Escape) { CloseTextEditor(); return true; } else { return ForwardEventToTextEdit(e); } } return false; } bool ViewerDisplayWidget::OnKeyRelease(QKeyEvent *e) { if (text_edit_) { return ForwardEventToTextEdit(e); } return false; } void ViewerDisplayWidget::EmitColorAtCursor(QMouseEvent *e) { // Do this no matter what, emits signal to any pixel samplers if (signal_cursor_color_) { Color reference, display; if (texture_) { QPointF pixel_pos = GenerateDisplayTransform().inverted().map(e->pos()); pixel_pos /= texture_->params().divider(); makeCurrent(); reference = renderer()->GetPixelFromTexture(texture_.get(), pixel_pos); display = color_service()->ConvertColor(reference); } emit CursorColor(reference, display); } } void ViewerDisplayWidget::DrawSubtitleTracks() { if (!show_subtitles_ || !subtitle_tracks_) { return; } const QVector<Track*> &subtitle_tracklist = subtitle_tracks_->track_list(Track::kSubtitle)->GetTracks(); if (subtitle_tracklist.empty()) { return; } // Scale font size by transform QTransform display_transform = GenerateDisplayTransform(); qreal font_sz = OLIVE_CONFIG("DefaultSubtitleSize").toInt(); font_sz *= display_transform.m11(); if (qIsNaN(font_sz)) { return; } QPainterPath path; QTransform transform = GenerateWorldTransform(); QRect bounding_box = transform.mapRect(rect()); QFont f; f.setPointSizeF(font_sz); QString family = OLIVE_CONFIG("DefaultSubtitleFamily").toString(); if (!family.isEmpty()) { f.setFamily(family); } f.setWeight(static_cast<QFont::Weight>(OLIVE_CONFIG("DefaultSubtitleWeight").toInt())); bounding_box.adjust(bounding_box.width()/10, bounding_box.height()/10, -bounding_box.width()/10, -bounding_box.height()/10); QFontMetrics fm(f); for (int j=subtitle_tracklist.size()-1; j>=0; j--) { Track *sub_track = subtitle_tracklist.at(j); if (!sub_track->IsMuted()) { if (SubtitleBlock *sub = dynamic_cast<SubtitleBlock*>(sub_track->VisibleBlockAtTime(time_))) { // Split into lines QStringList list = QtUtils::WordWrapString(sub->GetText(), fm, bounding_box.width()); for (int i=list.size()-1; i>=0; i--) { int w = QtUtils::QFontMetricsWidth(fm, list.at(i)); path.addText(bounding_box.width()/2 - w/2, bounding_box.height() - fm.height() * (list.size() - i) + fm.ascent(), f, list.at(i)); } } } } bool antialias = OLIVE_CONFIG("AntialiasSubtitles").toBool(); QPixmap *aa_pixmap; QPainter *text_painter; if (antialias) { // QPainter only supports anti-aliasing in software, so to achieve it, we draw to a // software buffer first and then draw that onto the hardware aa_pixmap = new QPixmap(bounding_box.width(), bounding_box.height()); aa_pixmap->fill(Qt::transparent); text_painter = new QPainter(aa_pixmap); } else { // Just draw straight to the hardware text_painter = new QPainter(paint_device()); // Offset path by however much is necessary path.translate(bounding_box.x(), bounding_box.y()); } text_painter->setPen(QPen(Qt::black, f.pointSizeF() / 16)); text_painter->setBrush(Qt::white); text_painter->setRenderHint(QPainter::Antialiasing); text_painter->drawPath(path); delete text_painter; if (antialias) { // We just drew to a software buffer, now draw this image onto the hardware device QPainter p(paint_device()); p.drawPixmap(bounding_box.x(), bounding_box.y(), *aa_pixmap); delete aa_pixmap; } } template <typename T> void ViewerDisplayWidget::ForwardDragEventToTextEdit(T *e) { // HACK: Absolutely filthy hack. We need to be able to transform the mouse coordinates for our // proxied QTextEdit, however unlike QMouseEvents, Qt's drag events don't allow modifying // the position after construction. Unhelpfully, Qt also explicitly forbids users creating // their own drag events because they "rely on Qt's internal state". So in order to forward // drag events, we defy this by creating our own events, but DON'T process them through Qt's // event queue and instead just send them directly to the widget (requiring its protected // drag events to be made public). That way Qt stays happy, because as far as it's // concerned it's only interfacing with this widget, and the QTextEdit gets to receive // transformed events. It's a terrible hack, but seems to work. if constexpr (std::is_same_v<T, QDragLeaveEvent>) { text_edit_->dragLeaveEvent(e); } else { T relay(AdjustPosByVAlign(GetVirtualPosForTextEdit(e->pos())).toPoint(), e->possibleActions(), e->mimeData(), e->mouseButtons(), e->keyboardModifiers()); if (e->type() == QEvent::DragEnter) { text_edit_->dragEnterEvent(static_cast<QDragEnterEvent*>(&relay)); } else if (e->type() == QEvent::DragMove) { text_edit_->dragMoveEvent(static_cast<QDragMoveEvent*>(&relay)); } else if (e->type() == QEvent::Drop) { text_edit_->dropEvent(&relay); } if (relay.isAccepted()) { e->accept(); } } } bool ViewerDisplayWidget::ForwardMouseEventToTextEdit(QMouseEvent *event, bool check_if_outside) { if (current_gizmo_) { return false; } // Transform screen mouse coords to world mouse coords QPointF local_pos = GetVirtualPosForTextEdit(event->pos()); if (event->type() == QEvent::MouseMove && event->buttons() == Qt::NoButton) { QPointF mapped = text_transform_inverted_.map(event->pos()) - text_edit_pos_; if (mapped.x() >= 0 && mapped.y() >= 0 && mapped.x() < text_edit_->width() && mapped.y() < text_edit_->height()) { inner_widget()->setCursor(Qt::IBeamCursor); } else { inner_widget()->unsetCursor(); } } if (check_if_outside) { if (local_pos.x() < 0 || local_pos.x() >= text_edit_->width() || local_pos.y() < 0 || local_pos.y() >= text_edit_->height()) { // Allow clicking other gizmos so the user can resize while the text editor is active if ((current_gizmo_ = TryGizmoPress(gizmo_db_, gizmo_last_draw_transform_inverted_.map(event->pos())))) { return false; } else { CloseTextEditor(); return true; } } } local_pos = AdjustPosByVAlign(local_pos); QMouseEvent derived(event->type(), local_pos, event->windowPos(), event->screenPos(), event->button(), event->buttons(), event->modifiers(), event->source()); return ForwardEventToTextEdit(&derived); } bool ViewerDisplayWidget::ForwardEventToTextEdit(QEvent *event) { qApp->sendEvent(text_edit_->viewport(), event); bool e = event->isAccepted(); if (e) { update(); } return e; } QPointF ViewerDisplayWidget::AdjustPosByVAlign(QPointF p) { switch (active_text_gizmo_->GetVerticalAlignment()) { case Qt::AlignTop: // Do nothing break; case Qt::AlignVCenter: p.setY(p.y() - text_edit_->height()/2 + text_edit_->document()->size().height()/2); break; case Qt::AlignBottom: p.setY(p.y() - text_edit_->height() + text_edit_->document()->size().height()); break; } return p; } void ViewerDisplayWidget::CloseTextEditor() { text_edit_->deleteLater(); text_edit_ = nullptr; disconnect(active_text_gizmo_, &TextGizmo::RectChanged, this, &ViewerDisplayWidget::UpdateActiveTextGizmoSize); active_text_gizmo_ = nullptr; } void ViewerDisplayWidget::GenerateGizmoTransforms() { NodeTraverser gt; gt.SetCacheVideoParams(gizmo_params_); gt.SetCacheAudioParams(gizmo_audio_params_); gizmo_draw_time_ = GenerateGizmoTime(); if (gizmos_) { gizmo_db_ = gt.GenerateRow(gizmos_, gizmo_draw_time_); } gizmo_last_draw_transform_ = GenerateGizmoTransform(gt, gizmo_draw_time_); gizmo_last_draw_transform_inverted_ = gizmo_last_draw_transform_.inverted(); } void ViewerDisplayWidget::DrawBlank(const VideoParams &device_params) { if (blank_shader_.isNull()) { blank_shader_ = renderer()->CreateNativeShader(ShaderCode()); } ShaderJob job; job.Insert(QStringLiteral("ove_mvpmat"), NodeValue(NodeValue::kMatrix, combined_matrix_flipped_)); job.Insert(QStringLiteral("ove_cropmatrix"), NodeValue(NodeValue::kMatrix, crop_matrix_)); renderer()->Blit(blank_shader_, job, device_params, false); } void ViewerDisplayWidget::SetShowFPS(bool e) { show_fps_ = e; update(); } void ViewerDisplayWidget::RequestStartEditingText() { if (gizmos_) { foreach (NodeGizmo *gizmo, gizmos_->GetGizmos()) { if (TextGizmo *text = dynamic_cast<TextGizmo*>(gizmo)) { OpenTextGizmo(text); break; } } } } void ViewerDisplayWidget::Play(const int64_t &start_timestamp, const int &playback_speed, const rational &timebase, bool start_updating) { playback_timebase_ = timebase; playback_speed_ = playback_speed; timer_.Start(start_timestamp, playback_speed, timebase.toDouble()); if (start_updating) { connect(this, &ViewerDisplayWidget::frameSwapped, this, &ViewerDisplayWidget::UpdateFromQueue); update(); } } void ViewerDisplayWidget::Pause() { disconnect(this, &ViewerDisplayWidget::frameSwapped, this, &ViewerDisplayWidget::UpdateFromQueue); queue_.clear(); queue_starved_ = false; } QPointF ViewerDisplayWidget::ScreenToScenePoint(const QPoint &p) { if (gizmo_last_draw_transform_.isIdentity()) { GenerateGizmoTransforms(); } return p * gizmo_last_draw_transform_inverted_; } void ViewerDisplayWidget::UpdateFromQueue() { int64_t t = timer_.GetTimestampNow(); rational time = Timecode::timestamp_to_time(t, playback_timebase_); bool popped = false; if (queue_.empty()) { queue_starved_ = true; emit QueueStarved(); } else { while (!queue_.empty()) { const ViewerPlaybackFrame& pf = queue_.front(); if (pf.timestamp == time) { // Frame was in queue, no need to decode anything SetImage(pf.frame); if (queue_starved_) { queue_starved_ = false; emit QueueNoLongerStarved(); } return; } else if ((pf.timestamp > time) == (playback_speed_ > 0)) { // The next frame in the queue is too new, so just do a regular update. Either the // frame we want will arrive in time, or we'll just have to skip it. break; } else { queue_.pop_front(); if (popped) { // We've already popped a frame in this loop, meaning a frame has been skipped IncrementSkippedFrames(); } else { // Shown a frame and progressed to the next one IncrementFrameCount(); popped = true; } if (queue_.empty()) { queue_starved_ = true; emit QueueStarved(); break; } } } } update(); } void ViewerDisplayWidget::TextEditChanged() { ViewerTextEditor *editor = static_cast<ViewerTextEditor *>(sender()); TextGizmo *gizmo = reinterpret_cast<TextGizmo*>(editor->property("gizmo").value<quintptr>()); QString html = Html::DocToHtml(editor->document()); gizmo->UpdateInputHtml(html, GetGizmoTime()); } void ViewerDisplayWidget::TextEditDestroyed() { TextGizmo *gizmo = reinterpret_cast<TextGizmo*>(sender()->property("gizmo").value<quintptr>()); emit gizmo->Deactivated(); text_edit_ = nullptr; text_toolbar_ = nullptr; inner_widget()->setMouseTracking(false); inner_widget()->setFocusPolicy(Qt::NoFocus); UpdateCursor(); disconnect(qApp, &QApplication::focusChanged, this, &ViewerDisplayWidget::FocusChanged); } void ViewerDisplayWidget::SubtitlesChanged(const TimeRange &r) { if (time_ >= r.in() && time_ < r.out()) { update(); } } void ViewerDisplayWidget::FocusChanged(QWidget *old, QWidget *now) { if (!now) { // Ignore this return; } bool unfocused = true; while (now) { if (now == text_toolbar_ || now == this) { unfocused = false; break; } else { now = now->parentWidget(); } } if (unfocused) { CloseTextEditor(); } } QRectF ViewerDisplayWidget::UpdateActiveTextGizmoSize() { QRectF text_rect = active_text_gizmo_->GetRect(); text_edit_pos_ = text_rect.topLeft(); text_edit_->setGeometry(text_rect.toRect()); return text_rect; } }
39,983
C++
.cpp
1,139
30.352941
160
0.671515
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,827
viewer.cpp
olive-editor_olive/app/widget/viewer/viewer.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "viewer.h" #include <QDateTime> #include <QFontDialog> #include <QGuiApplication> #include <QInputDialog> #include <QLabel> #include <QMessageBox> #include <QResizeEvent> #include <QScreen> #include <QtMath> #include <QVBoxLayout> #include "audio/audiomanager.h" #include "common/ratiodialog.h" #include "config/config.h" #include "core.h" #include "node/block/gap/gap.h" #include "node/generator/shape/shapenodebase.h" #include "node/nodeundo.h" #include "node/project.h" #include "panel/multicam/multicampanel.h" #include "panel/panelmanager.h" #include "render/rendermanager.h" #include "viewerpreventsleep.h" #include "widget/audiomonitor/audiomonitor.h" #include "widget/menu/menu.h" #include "widget/multicam/multicamdisplay.h" #include "widget/timelinewidget/tool/add.h" #include "widget/timeruler/timeruler.h" namespace olive { #define super TimeBasedWidget QVector<ViewerWidget*> ViewerWidget::instances_; // NOTE: Hardcoded interval of size of audio chunk to render and send to the output at a time. // We want this to be as long as possible so the code has plenty of time to send the audio // while also being as short as possible so users get relatively immediate feedback when // changing values. 1/4 second seems to be a good middleground. const rational ViewerWidget::kAudioPlaybackInterval = rational(1, 4); const rational kVideoPlaybackInterval = rational(1, 2); ViewerWidget::ViewerWidget(ViewerDisplayWidget *display, QWidget *parent) : super(false, true, parent), playback_speed_(0), color_menu_enabled_(true), time_changed_from_timer_(false), prequeuing_video_(false), prequeuing_audio_(0), record_armed_(false), recording_(false), first_requeue_watcher_(nullptr), enable_audio_scrubbing_(true), waveform_mode_(kWFAutomatic), ignore_scrub_(0), multicam_panel_(nullptr) { // Set up main layout QVBoxLayout* layout = new QVBoxLayout(this); layout->setContentsMargins(0, 0, 0, 0); // Create main OpenGL-based view and sizer sizer_ = new ViewerSizer(); layout->addWidget(sizer_); display_widget_ = display; display_widget_->SetShowWidgetBackground(true); playback_devices_.append(display_widget_); connect(display_widget_, &ViewerDisplayWidget::customContextMenuRequested, this, &ViewerWidget::ShowContextMenu); connect(display_widget_, &ViewerDisplayWidget::CursorColor, this, &ViewerWidget::CursorColor); connect(display_widget_, &ViewerDisplayWidget::ColorProcessorChanged, this, &ViewerWidget::ColorProcessorChanged); connect(display_widget_, &ViewerDisplayWidget::ColorManagerChanged, this, &ViewerWidget::ColorManagerChanged); connect(display_widget_, &ViewerDisplayWidget::DragEntered, this, &ViewerWidget::DragEntered); connect(display_widget_, &ViewerDisplayWidget::Dropped, this, &ViewerWidget::Dropped); connect(display_widget_, &ViewerDisplayWidget::TextureChanged, this, &ViewerWidget::TextureChanged); connect(display_widget_, &ViewerDisplayWidget::QueueStarved, this, &ViewerWidget::QueueStarved); connect(display_widget_, &ViewerDisplayWidget::QueueNoLongerStarved, this, &ViewerWidget::QueueNoLongerStarved); connect(display_widget_, &ViewerDisplayWidget::CreateAddableAt, this, &ViewerWidget::CreateAddableAt); connect(sizer_, &ViewerSizer::RequestScale, display_widget_, &ViewerDisplayWidget::SetMatrixZoom); connect(sizer_, &ViewerSizer::RequestTranslate, display_widget_, &ViewerDisplayWidget::SetMatrixTranslate); connect(display_widget_, &ViewerDisplayWidget::HandDragMoved, sizer_, &ViewerSizer::HandDragMove); sizer_->SetWidget(display_widget_); // Make the display widget the first tabbable widget. While the viewer display cannot actually // be interacted with by tabbing, it prevents the actual first tabbable widget (the playhead // slider in `controls_`) from getting auto-focused any time the panel is maximized (with `) display_widget_->setFocusPolicy(Qt::TabFocus); // Create waveform view when audio is connected and video isn't waveform_view_ = new AudioWaveformView(); ConnectTimelineView(waveform_view_); layout->addWidget(waveform_view_); // Create time ruler layout->addWidget(ruler()); // Create scrollbar layout->addWidget(scrollbar()); // Create lower controls controls_ = new PlaybackControls(); controls_->SetTimecodeEnabled(true); controls_->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Maximum); connect(controls_, &PlaybackControls::PlayClicked, this, static_cast<void(ViewerWidget::*)()>(&ViewerWidget::Play)); connect(controls_, &PlaybackControls::PauseClicked, this, &ViewerWidget::Pause); connect(controls_, &PlaybackControls::PrevFrameClicked, this, &ViewerWidget::PrevFrame); connect(controls_, &PlaybackControls::NextFrameClicked, this, &ViewerWidget::NextFrame); connect(controls_, &PlaybackControls::BeginClicked, this, &ViewerWidget::GoToStart); connect(controls_, &PlaybackControls::EndClicked, this, &ViewerWidget::GoToEnd); layout->addWidget(controls_); // FIXME: Magic number SetScale(48.0); // Ensures that seeking on the waveform view updates the time as expected connect(waveform_view_, &AudioWaveformView::customContextMenuRequested, this, &ViewerWidget::ShowContextMenu); connect(&playback_backup_timer_, &QTimer::timeout, this, &ViewerWidget::PlaybackTimerUpdate); SetAutoMaxScrollBar(true); instances_.append(this); UpdateWaveformViewFromMode(); connect(Core::instance(), &Core::ColorPickerEnabled, this, &ViewerWidget::SetSignalCursorColorEnabled); connect(this, &ViewerWidget::CursorColor, Core::instance(), &Core::ColorPickerColorEmitted); connect(AudioManager::instance(), &AudioManager::OutputParamsChanged, this, &ViewerWidget::UpdateAudioProcessor); } ViewerWidget::~ViewerWidget() { instances_.removeOne(this); auto windows = windows_; foreach (ViewerWindow* window, windows) { delete window; } delete display_widget_; display_widget_ = nullptr; } void ViewerWidget::TimeChangedEvent(const rational &time) { if (!time_changed_from_timer_) { PauseInternal(); } if (record_armed_) { DisarmRecording(); } controls_->SetTime(time); if (GetConnectedNode() && last_time_ != time) { if (!IsPlaying()) { UpdateTextureFromNode(); PushScrubbedAudio(); // We don't clear the FPS timer on pause in case users want to see it immediately after, but by // the time a new texture is drawn, assume that the FPS no longer needs to be shown. display_widget_->ResetFPSTimer(); } display_widget_->SetTime(time); } // Send time to auto-cacher RenderManager::instance()->GetCacher()->SetPlayhead(time); last_time_ = time; } void ViewerWidget::ConnectNodeEvent(ViewerOutput *n) { connect(n, &ViewerOutput::SizeChanged, this, &ViewerWidget::SetViewerResolution); connect(n, &ViewerOutput::PixelAspectChanged, this, &ViewerWidget::SetViewerPixelAspect); connect(n, &ViewerOutput::LengthChanged, this, &ViewerWidget::LengthChangedSlot); connect(n, &ViewerOutput::InterlacingChanged, this, &ViewerWidget::InterlacingChangedSlot); connect(n, &ViewerOutput::VideoParamsChanged, this, &ViewerWidget::UpdateRendererVideoParameters); connect(n, &ViewerOutput::VideoParamsChanged, this, &ViewerWidget::UpdateTextureFromNode, Qt::QueuedConnection); connect(n, &ViewerOutput::AudioParamsChanged, this, &ViewerWidget::UpdateRendererAudioParameters); connect(n->video_frame_cache(), &FrameHashCache::Invalidated, this, &ViewerWidget::ViewerInvalidatedVideoRange); connect(n, &ViewerOutput::TextureInputChanged, this, &ViewerWidget::UpdateWaveformViewFromMode); connect(controls_, &PlaybackControls::TimeChanged, n, &ViewerOutput::SetPlayhead); VideoParams vp = n->GetVideoParams(); InterlacingChangedSlot(vp.interlacing()); ruler()->SetPlaybackCache(n->video_frame_cache()); SetViewerResolution(vp.width(), vp.height()); SetViewerPixelAspect(vp.pixel_aspect_ratio()); last_length_ = 0; LengthChangedSlot(n->GetLength()); UpdateAudioProcessor(); ColorManager* color_manager = n->project()->color_manager(); foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->ConnectColorManager(color_manager); } UpdateWaveformViewFromMode(); waveform_view_->SetViewer(GetConnectedNode()); UpdateRendererVideoParameters(); UpdateRendererAudioParameters(); // Set texture to new texture (or null if no viewer node is available) UpdateTextureFromNode(); } void ViewerWidget::DisconnectNodeEvent(ViewerOutput *n) { PauseInternal(); disconnect(n, &ViewerOutput::SizeChanged, this, &ViewerWidget::SetViewerResolution); disconnect(n, &ViewerOutput::PixelAspectChanged, this, &ViewerWidget::SetViewerPixelAspect); disconnect(n, &ViewerOutput::LengthChanged, this, &ViewerWidget::LengthChangedSlot); disconnect(n, &ViewerOutput::InterlacingChanged, this, &ViewerWidget::InterlacingChangedSlot); disconnect(n, &ViewerOutput::VideoParamsChanged, this, &ViewerWidget::UpdateRendererVideoParameters); disconnect(n, &ViewerOutput::VideoParamsChanged, this, &ViewerWidget::UpdateTextureFromNode); disconnect(n, &ViewerOutput::AudioParamsChanged, this, &ViewerWidget::UpdateRendererAudioParameters); disconnect(n->video_frame_cache(), &FrameHashCache::Invalidated, this, &ViewerWidget::ViewerInvalidatedVideoRange); disconnect(n, &ViewerOutput::TextureInputChanged, this, &ViewerWidget::UpdateWaveformViewFromMode); disconnect(controls_, &PlaybackControls::TimeChanged, n, &ViewerOutput::SetPlayhead); timeline_selected_blocks_.clear(); node_view_selected_.clear(); if (multicam_panel_) { multicam_panel_->SetMulticamNode(nullptr, nullptr, nullptr, rational::NaN); } CloseAudioProcessor(); audio_scrub_watchers_.clear(); SetDisplayImage(nullptr); ruler()->SetPlaybackCache(nullptr); // Effectively disables the viewer and clears the state SetViewerResolution(0, 0); foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->DisconnectColorManager(); } waveform_view_->SetViewer(nullptr); // Queue an UpdateStack so that when it runs, the viewer node will be fully disconnected QMetaObject::invokeMethod(this, &ViewerWidget::UpdateWaveformViewFromMode, Qt::QueuedConnection); SetGizmos(nullptr); } void ViewerWidget::ConnectedNodeChangeEvent(ViewerOutput *n) { display_widget_->SetSubtitleTracks(dynamic_cast<Sequence*>(n)); } void ViewerWidget::ConnectedWorkAreaChangeEvent(TimelineWorkArea *workarea) { waveform_view_->SetWorkArea(workarea); } void ViewerWidget::ConnectedMarkersChangeEvent(TimelineMarkerList *markers) { waveform_view_->SetMarkers(markers); } void ViewerWidget::ScaleChangedEvent(const double &s) { super::ScaleChangedEvent(s); waveform_view_->SetScale(s); } void ViewerWidget::resizeEvent(QResizeEvent *event) { super::resizeEvent(event); UpdateMinimumScale(); } RenderTicketPtr ViewerWidget::GetSingleFrame(const rational &t, bool dry) { return RenderManager::instance()->GetCacher()->GetSingleFrame(this->GetConnectedNode(), t, dry); } void ViewerWidget::TogglePlayPause() { if (IsPlaying()) { Pause(); } else { Play(); } } bool ViewerWidget::IsPlaying() const { return playback_speed_ != 0; } void ViewerWidget::SetColorMenuEnabled(bool enabled) { color_menu_enabled_ = enabled; } void ViewerWidget::SetMatrix(const QMatrix4x4 &mat) { foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->SetMatrixCrop(mat); } } void ViewerWidget::SetFullScreen(QScreen *screen) { if (!screen) { // Try to find the screen that contains the mouse cursor currently foreach (QScreen* test, QGuiApplication::screens()) { if (test->geometry().contains(QCursor::pos())) { screen = test; break; } } // Fallback, just use the first screen if (!screen) { screen = QGuiApplication::screens().first(); } } if (windows_.contains(screen)) { ViewerWindow* vw = windows_.take(screen); vw->deleteLater(); return; } ViewerWindow* vw = new ViewerWindow(this); vw->setGeometry(screen->geometry()); vw->showFullScreen(); vw->display_widget()->ConnectColorManager(color_manager()); connect(vw, &ViewerWindow::destroyed, this, &ViewerWidget::WindowAboutToClose); connect(vw->display_widget(), &ViewerDisplayWidget::customContextMenuRequested, this, &ViewerWidget::ShowContextMenu); if (GetConnectedNode()) { vw->SetVideoParams(GetConnectedNode()->GetVideoParams()); vw->display_widget()->SetDeinterlacing(vw->display_widget()->IsDeinterlacing()); } vw->display_widget()->SetImage(QVariant::fromValue(display_widget()->GetCurrentTexture())); playback_devices_.append(vw->display_widget()); (*vw->display_widget()->queue()) = *playback_devices_.first()->queue(); if (IsPlaying()) { vw->display_widget()->Play(GetTimestamp(), playback_speed_, timebase(), true); } windows_.insert(screen, vw); } void ViewerWidget::CacheEntireSequence() { RenderManager::instance()->GetCacher()->ForceCacheRange(GetConnectedNode(), TimeRange(0, GetConnectedNode()->GetVideoLength())); } void ViewerWidget::CacheSequenceInOut() { if (GetConnectedNode() && GetConnectedNode()->GetWorkArea()->enabled()) { RenderManager::instance()->GetCacher()->ForceCacheRange(GetConnectedNode(), GetConnectedNode()->GetWorkArea()->range()); } else { QMessageBox::warning(this, tr("Error"), tr("No in or out points are set to cache."), QMessageBox::Ok); } } void ViewerWidget::SetGizmos(Node *node) { display_widget_->SetTimeTarget(GetConnectedNode()); display_widget_->SetGizmos(node); } void ViewerWidget::StartCapture(TimelineWidget *source, const TimeRange &time, const Track::Reference &track) { GetConnectedNode()->SetPlayhead(time.in()); ArmForRecording(); recording_callback_ = source; recording_range_ = time; recording_track_ = track; } void ViewerWidget::ConnectMulticamWidget(MulticamWidget *p) { if (multicam_panel_) { disconnect(multicam_panel_, &MulticamWidget::Switched, this, &ViewerWidget::DetectMulticamNodeNow); } multicam_panel_ = p; if (multicam_panel_) { connect(multicam_panel_, &MulticamWidget::Switched, this, &ViewerWidget::DetectMulticamNodeNow); } } FramePtr ViewerWidget::DecodeCachedImage(const QString &cache_path, const QUuid &cache_id, const int64_t& time) { FramePtr frame = FrameHashCache::LoadCacheFrame(cache_path, cache_id, time); if (frame) { frame->set_timestamp(time); } else { qWarning() << "Tried to load cached frame from file but it was null"; } return frame; } void ViewerWidget::DecodeCachedImage(RenderTicketPtr ticket, const QString &cache_path, const QUuid &cache_id, const int64_t& time) { ticket->Start(); FramePtr f = DecodeCachedImage(cache_path, cache_id, time); if (f) { ticket->Finish(QVariant::fromValue(f)); } else { ticket->Finish(); } } bool ViewerWidget::ShouldForceWaveform() const { return GetConnectedNode() && !GetConnectedNode()->GetConnectedTextureOutput() && GetConnectedNode()->GetConnectedSampleOutput(); } void ViewerWidget::SetEmptyImage() { foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->SetBlank(); } } void ViewerWidget::UpdateAutoCacher() { RenderManager::instance()->GetCacher()->SetPlayhead(GetConnectedNode()->GetPlayhead()); } void ViewerWidget::DecrementPrequeuedAudio() { prequeuing_audio_--; if (!prequeuing_audio_) { FinishPlayPreprocess(); } } void ViewerWidget::ArmForRecording() { controls_->StartPlayBlink(); record_armed_ = true; } void ViewerWidget::DisarmRecording() { controls_->StopPlayBlink(); record_armed_ = false; } void ViewerWidget::UpdateAudioProcessor() { if (GetConnectedNode()) { CloseAudioProcessor(); AudioParams ap = GetConnectedNode()->GetAudioParams(); ap.set_format(ViewerOutput::kDefaultSampleFormat); AudioParams packed(OLIVE_CONFIG("AudioOutputSampleRate").toInt(), OLIVE_CONFIG("AudioOutputChannelLayout").toULongLong(), SampleFormat::from_string(OLIVE_CONFIG("AudioOutputSampleFormat").toString().toStdString())); audio_processor_.Open(ap, packed, (playback_speed_ == 0) ? 1 : std::abs(playback_speed_)); } } void ViewerWidget::CreateAddableAt(const QRectF &f) { if (Sequence *s = dynamic_cast<Sequence*>(GetConnectedNode())) { Track::Type type = Track::kVideo; int track_index = -1; TrackList *list = s->track_list(type); const rational &in = GetConnectedNode()->GetPlayhead(); rational length = OLIVE_CONFIG("DefaultStillLength").value<rational>(); rational out = in + length; // Find a free track where we won't overwrite anything while (true) { track_index++; if (track_index >= list->GetTrackCount()) { // Just create a new track break; } Track *track = list->GetTrackAt(track_index); if (track->IsLocked()) { continue; } Block *b = track->NearestBlockBeforeOrAt(in); if (!b || (dynamic_cast<GapBlock*>(b) && b->out() >= out)) { break; } } MultiUndoCommand *command = new MultiUndoCommand(); Node *clip = AddTool::CreateAddableClip(command, s, Track::Reference(type, track_index), in, length); if (ShapeNodeBase *shape = dynamic_cast<ShapeNodeBase*>(clip)) { shape->SetRect(f, s->GetVideoParams(), command); } Core::instance()->undo_stack()->push(command, tr("Created Shape")); SetGizmos(clip); } } void ViewerWidget::HandleFirstRequeueDestroy() { // Extra protection to ensure we don't reference a destroyed object if (first_requeue_watcher_ == sender()) { first_requeue_watcher_ = nullptr; } } void ViewerWidget::ShowSubtitleProperties() { QFont f(OLIVE_CONFIG("DefaultSubtitleFamily").toString(), OLIVE_CONFIG("DefaultSubtitleSize").toInt(), OLIVE_CONFIG("DefaultSubtitleWeight").toInt()); QFontDialog fd(f, this); if (fd.exec() == QDialog::Accepted) { f = fd.selectedFont(); OLIVE_CONFIG("DefaultSubtitleSize") = f.pointSize(); OLIVE_CONFIG("DefaultSubtitleFamily") = f.family(); OLIVE_CONFIG("DefaultSubtitleWeight") = f.weight(); display_widget_->update(); } } void ViewerWidget::DryRunFinished() { RenderTicketWatcher *w = static_cast<RenderTicketWatcher*>(sender()); if (dry_run_watchers_.contains(w)) { RequestNextDryRun(); } delete w; } void ViewerWidget::RequestNextDryRun() { if (IsPlaying()) { rational next_time = Timecode::timestamp_to_time(dry_run_next_frame_, timebase()); if (FrameExistsAtTime(next_time)) { if (next_time > GetConnectedNode()->GetPlayhead() + RenderManager::kDryRunInterval) { QTimer::singleShot(timebase().toDouble() / playback_speed_, this, &ViewerWidget::RequestNextDryRun); } else { RenderTicketWatcher *watcher = new RenderTicketWatcher(this); connect(watcher, &RenderTicketWatcher::Finished, this, &ViewerWidget::DryRunFinished); watcher->SetTicket(GetSingleFrame(next_time, true)); dry_run_next_frame_ += playback_speed_; dry_run_watchers_.append(watcher); } } } } void ViewerWidget::SaveFrameAsImage() { Core::instance()->OpenExportDialogForViewer(GetConnectedNode(), true); } void ViewerWidget::DetectMulticamNodeNow() { if (GetConnectedNode()) { DetectMulticamNode(GetConnectedNode()->GetPlayhead()); } } void ViewerWidget::CloseAudioProcessor() { audio_processor_.Close(); } void ViewerWidget::SetWaveformMode(WaveformMode wf) { waveform_mode_ = wf; UpdateWaveformViewFromMode(); } void ViewerWidget::DetectMulticamNode(const rational &time) { // Look for multicam node MultiCamNode *multicam = nullptr; ClipBlock *clip = nullptr; // Faster way to do this if (multicam_panel_ && multicam_panel_->isVisible()) { if (Sequence *s = dynamic_cast<Sequence*>(GetConnectedNode())) { // Prefer selected nodes for (Node *n : qAsConst(node_view_selected_)) { if ((multicam = dynamic_cast<MultiCamNode*>(n))) { // Found multicam, now try to find corresponding clip from selected timeline blocks for (Block *b : qAsConst(timeline_selected_blocks_)) { if (ClipBlock *c = dynamic_cast<ClipBlock*>(b)) { if (c->range().Contains(time) && c->ContextContainsNode(multicam)) { clip = c; break; } } } break; } } // Next, prefer multicam from selected block if (!multicam) { for (Block *b : qAsConst(timeline_selected_blocks_)) { if (b->range().Contains(time)) { if ((clip = dynamic_cast<ClipBlock*>(b))) { if ((multicam = clip->FindMulticam())) { break; } } } } } if (!multicam) { const QVector<Track*> &tracks = s->GetTracks(); for (Track *t : tracks) { if (t->IsLocked()) { continue; } Block *b = t->NearestBlockBeforeOrAt(time); if ((clip = dynamic_cast<ClipBlock*>(b))) { if ((multicam = clip->FindMulticam())) { break; } } } } } } if (multicam) { if (multicam_panel_) { multicam_panel_->SetMulticamNode(GetConnectedNode(), multicam, clip, time); } // FIXME: Really dirty RenderManager::instance()->GetCacher()->SetMulticamNode(multicam); } else { RenderManager::instance()->GetCacher()->SetMulticamNode(nullptr); if (multicam_panel_) { multicam_panel_->SetMulticamNode(nullptr, nullptr, nullptr, time); } } } bool ViewerWidget::IsVideoVisible() const { return GetConnectedNode()->GetVideoParams().video_type() != VideoParams::kVideoTypeStill && (display_widget_->isVisible() || !windows_.isEmpty()); } void ViewerWidget::UpdateWaveformViewFromMode() { bool prefer_waveform = ShouldForceWaveform(); sizer_->setVisible(waveform_mode_ == kWFViewerAndWaveform || waveform_mode_ == kWFViewerOnly || (waveform_mode_ == kWFAutomatic && !prefer_waveform)); waveform_view_->setVisible(waveform_mode_ == kWFViewerAndWaveform || waveform_mode_ == kWFWaveformOnly || (waveform_mode_ == kWFAutomatic && prefer_waveform)); waveform_view_->setSizePolicy(QSizePolicy::Expanding, waveform_mode_ == kWFViewerAndWaveform ? QSizePolicy::Maximum : QSizePolicy::Expanding); if (GetConnectedNode()) { GetConnectedNode()->SetWaveformEnabled(waveform_view_->isVisible()); } } void ViewerWidget::QueueNextAudioBuffer() { rational queue_end = audio_playback_queue_time_ + (kAudioPlaybackInterval * playback_speed_); // Clamp queue end by zero and the audio length queue_end = std::clamp(queue_end, rational(0), GetConnectedNode()->GetAudioLength()); if ((playback_speed_ > 0 && queue_end <= audio_playback_queue_time_) || (playback_speed_ < 0 && queue_end >= audio_playback_queue_time_)) { // This will queue nothing, so stop the loop here if (prequeuing_audio_) { DecrementPrequeuedAudio(); } return; } RenderTicketWatcher *watcher = new RenderTicketWatcher(this); connect(watcher, &RenderTicketWatcher::Finished, this, &ViewerWidget::ReceivedAudioBufferForPlayback); audio_playback_queue_.push_back(watcher); watcher->SetTicket(RenderManager::instance()->GetCacher()->GetRangeOfAudio(GetConnectedNode(), TimeRange(audio_playback_queue_time_, queue_end))); audio_playback_queue_time_ = queue_end; } void ViewerWidget::ReceivedAudioBufferForPlayback() { while (!audio_playback_queue_.empty() && audio_playback_queue_.front()->HasResult()) { RenderTicketWatcher *watcher = audio_playback_queue_.front(); audio_playback_queue_.pop_front(); if (watcher->HasResult()) { SampleBuffer samples = watcher->Get().value<SampleBuffer>(); if (samples.is_allocated()) { // If the samples must be reversed, reverse them now if (playback_speed_ < 0) { samples.reverse(); } // Convert to packed data for audio output AudioProcessor::Buffer buf; int r = audio_processor_.Convert(samples.to_raw_ptrs().data(), samples.sample_count(), &buf); // TempoProcessor may have emptied the array if (r >= 0) { if (!buf.empty()) { const QByteArray &pack = buf.at(0); if (prequeuing_audio_) { // Add to prequeued audio buffer prequeued_audio_.append(pack); } else { // Push directly to audio manager AudioManager::instance()->PushToOutput(audio_processor_.to(), pack); } } } else { qCritical() << "Failed to process audio for playback:" << r; } } } if (prequeuing_audio_) { DecrementPrequeuedAudio(); } delete watcher; } } void ViewerWidget::ReceivedAudioBufferForScrubbing() { RenderTicketWatcher *watcher = static_cast<RenderTicketWatcher *>(sender()); while (!audio_scrub_watchers_.empty() && audio_scrub_watchers_.front() != watcher) { audio_scrub_watchers_.pop_front(); } if (!audio_scrub_watchers_.empty()) { if (watcher->HasResult()) { SampleBuffer samples = watcher->Get().value<SampleBuffer>(); if (samples.is_allocated()) { if (samples.audio_params().channel_count() > 0) { AudioProcessor::Buffer buf; int r = audio_processor_.Convert(samples.to_raw_ptrs().data(), samples.sample_count(), &buf); if (r >= 0) { if (!buf.empty()) { QString error; const QByteArray &packed = buf.at(0); AudioManager::instance()->ClearBufferedOutput(); if (!AudioManager::instance()->PushToOutput(audio_processor_.to(), packed, &error)) { Core::instance()->ShowStatusBarMessage(tr("Audio scrubbing failed: %1").arg(error)); } AudioMonitor::PushSampleBufferOnAll(samples); } } else { qCritical() << "Failed to process audio for scrubbing:" << r; } } } } } delete watcher; } void ViewerWidget::QueueStarved() { static const int kMaximumWaitTimeMs = 250; static const rational kMaximumWaitTime(kMaximumWaitTimeMs, 1000); qint64 now = QDateTime::currentMSecsSinceEpoch(); if (!queue_starved_start_) { queue_starved_start_ = now; } else if (now > queue_starved_start_ + kMaximumWaitTimeMs) { if (first_requeue_watcher_) { if (GetConnectedNode()->GetPlayhead() + kMaximumWaitTime < first_requeue_watcher_->property("time").value<rational>()) { // We still have time return; } } ForceRequeueFromCurrentTime(); queue_starved_start_ = 0; } } void ViewerWidget::QueueNoLongerStarved() { queue_starved_start_ = 0; } void ViewerWidget::ForceRequeueFromCurrentTime() { // Allow half a second for requeue to complete static const rational kRequeueWaitTime(1); RenderManager::instance()->GetCacher()->ClearSingleFrameRenders(); queue_watchers_.clear(); int queue = DeterminePlaybackQueueSize(); playback_queue_next_frame_ = GetTimestamp() + playback_speed_ * Timecode::time_to_timestamp(kRequeueWaitTime, timebase(), Timecode::kFloor);; first_requeue_watcher_ = nullptr; for (int i=0; i<queue; i++) { RenderTicketWatcher *watcher = RequestNextFrameForQueue(); if (!first_requeue_watcher_) { first_requeue_watcher_ = watcher; connect(first_requeue_watcher_, &RenderTicketWatcher::destroyed, this, &ViewerWidget::HandleFirstRequeueDestroy); } } } void ViewerWidget::UpdateTextureFromNode() { if (!GetConnectedNode()) { return; } if (IsPlaying()) { qWarning() << "UpdateTextureFromNode called while playing"; return; } rational time = GetConnectedNode()->GetPlayhead(); bool frame_exists_at_time = FrameExistsAtTime(time); bool frame_might_be_still = ViewerMightBeAStill(); if (frame_exists_at_time || frame_might_be_still) { // Frame was not in queue, will require rendering or decoding from cache // Not playing, run a task to get the frame either from the cache or the renderer RenderTicketWatcher* watcher = new RenderTicketWatcher(); watcher->setProperty("start", QDateTime::currentMSecsSinceEpoch()); watcher->setProperty("time", QVariant::fromValue(time)); connect(watcher, &RenderTicketWatcher::Finished, this, &ViewerWidget::RendererGeneratedFrame); nonqueue_watchers_.append(watcher); // Clear queue because we want this frame more than any others RenderManager::instance()->GetCacher()->ClearSingleFrameRendersThatArentRunning(); DetectMulticamNode(time); watcher->SetTicket(GetFrame(time)); } else { // There is definitely no frame here, we can immediately flip to showing nothing nonqueue_watchers_.clear(); SetEmptyImage(); return; } } void ViewerWidget::PlayInternal(int speed, bool in_to_out_only) { Q_ASSERT(speed != 0); if (!GetConnectedNode()) { // Do nothing if no viewer node is attached return; } if (timebase().isNull()) { qCritical() << "ViewerWidget can't play with an invalid timebase"; return; } // Kindly tell all viewers to stop playing and caching so all resources can be used for playback foreach (ViewerWidget* viewer, instances_) { if (viewer != this) { viewer->PauseInternal(); } } RenderManager::instance()->GetCacher()->SetThumbnailsPaused(true); RenderManager::instance()->SetAggressiveGarbageCollection(true); // Disarm recording if armed if (record_armed_) { DisarmRecording(); } // If the playhead is beyond the end, restart at 0 if (!recording_) { rational last_frame = GetConnectedNode()->GetLength() - timebase(); if (!in_to_out_only && GetConnectedNode()->GetPlayhead() >= last_frame) { if (speed > 0) { GetConnectedNode()->SetPlayhead(0); } else { GetConnectedNode()->SetPlayhead(last_frame); } } } playback_speed_ = speed; play_in_to_out_only_ = in_to_out_only; playback_queue_next_frame_ = GetTimestamp() + playback_speed_; controls_->ShowPauseButton(); queue_starved_start_ = 0; // Attempt to fill playback queue if (IsVideoVisible()) { prequeue_length_ = DeterminePlaybackQueueSize(); if (prequeue_length_ > 0) { prequeuing_video_ = true; prequeue_count_ = 0; for (int i=0; i<prequeue_length_; i++) { RequestNextFrameForQueue(); } dry_run_next_frame_ = playback_queue_next_frame_; RequestNextDryRun(); } } AudioParams ap = GetConnectedNode()->GetAudioParams(); if (ap.is_valid()) { UpdateAudioProcessor(); AudioManager::instance()->SetOutputNotifyInterval(audio_processor_.to().time_to_bytes(kAudioPlaybackInterval)); connect(AudioManager::instance(), &AudioManager::OutputNotify, this, &ViewerWidget::QueueNextAudioBuffer); static const int prequeue_count = 2; prequeuing_audio_ = prequeue_count; // Queue two buffers ahead of time audio_playback_queue_time_ = GetConnectedNode()->GetPlayhead(); for (int i=0; i<prequeue_count; i++) { QueueNextAudioBuffer(); } } // Force screen to stay awake PreventSleep(true); } void ViewerWidget::PauseInternal() { if (recording_) { AudioManager::instance()->StopRecording(); recording_ = false; controls_->SetPauseButtonRecordingState(false); recording_callback_->DisableRecordingOverlay(); recording_callback_->RecordingCallback(recording_filename_, recording_range_, recording_track_); } if (IsPlaying()) { playback_speed_ = 0; controls_->ShowPlayButton(); foreach (ViewerDisplayWidget *dw, playback_devices_){ dw->Pause(); } qDeleteAll(queue_watchers_); queue_watchers_.clear(); RenderManager::instance()->GetCacher()->ClearSingleFrameRenders(); playback_backup_timer_.stop(); // Handle audio AudioManager::instance()->StopOutput(); AudioMonitor::StopOnAll(); prequeued_audio_.clear(); disconnect(AudioManager::instance(), &AudioManager::OutputNotify, this, &ViewerWidget::QueueNextAudioBuffer); qDeleteAll(audio_playback_queue_); audio_playback_queue_.clear(); UpdateAudioProcessor(); RenderManager::instance()->GetCacher()->SetThumbnailsPaused(false); UpdateTextureFromNode(); RenderManager::instance()->SetAggressiveGarbageCollection(false); } prequeuing_video_ = false; prequeuing_audio_ = 0; dry_run_watchers_.clear(); // Reset screen timeout timer PreventSleep(false); } void ViewerWidget::PushScrubbedAudio() { if (!IsPlaying() && GetConnectedNode() && OLIVE_CONFIG("AudioScrubbing").toBool() && enable_audio_scrubbing_) { if (ignore_scrub_ > 0) { ignore_scrub_--; } if (ignore_scrub_ == 0) { // Get audio src device from renderer const AudioParams& params = GetConnectedNode()->GetAudioParams(); if (params.is_valid()) { // NOTE: Hardcoded scrubbing interval (20ms) rational interval = rational(20, 1000); RenderTicketWatcher *watcher = new RenderTicketWatcher(); connect(watcher, &RenderTicketWatcher::Finished, this, &ViewerWidget::ReceivedAudioBufferForScrubbing); audio_scrub_watchers_.push_back(watcher); watcher->SetTicket(RenderManager::instance()->GetCacher()->GetRangeOfAudio(GetConnectedNode(), TimeRange(GetConnectedNode()->GetPlayhead(), GetConnectedNode()->GetPlayhead() + interval))); } } } } void ViewerWidget::UpdateMinimumScale() { if (!GetConnectedNode()) { return; } if (GetConnectedNode()->GetLength().isNull()) { // Avoids divide by zero SetMinimumScale(0); } else { SetMinimumScale(static_cast<double>(ruler()->width()) / GetConnectedNode()->GetLength().toDouble()); } } void ViewerWidget::SetColorTransform(const ColorTransform &transform, ViewerDisplayWidget *sender) { sender->SetColorTransform(transform); } QString ViewerWidget::GetCachedFilenameFromTime(const rational &time) { if (FrameExistsAtTime(time)) { return GetConnectedNode()->video_frame_cache()->GetValidCacheFilename(time); } return QString(); } bool ViewerWidget::FrameExistsAtTime(const rational &time) { return GetConnectedNode() && time >= 0 && time < GetConnectedNode()->GetVideoLength(); } bool ViewerWidget::ViewerMightBeAStill() { return GetConnectedNode() && GetConnectedNode()->GetConnectedTextureOutput() && GetConnectedNode()->GetVideoLength().isNull(); } void ViewerWidget::SetDisplayImage(RenderTicketPtr ticket) { foreach (ViewerDisplayWidget *dw, playback_devices_) { QVariant push; if (ticket) { if (dynamic_cast<MulticamDisplay*>(dw)) { push = ticket->property("multicam_output"); } else { push = ticket->Get(); } } dw->SetImage(push); } } RenderTicketWatcher *ViewerWidget::RequestNextFrameForQueue(bool increment) { RenderTicketWatcher *watcher = nullptr; rational next_time = Timecode::timestamp_to_time(playback_queue_next_frame_, timebase()); if (FrameExistsAtTime(next_time) || ViewerMightBeAStill()) { if (increment) { playback_queue_next_frame_ += playback_speed_; } watcher = new RenderTicketWatcher(); watcher->setProperty("time", QVariant::fromValue(next_time)); DetectMulticamNode(next_time); connect(watcher, &RenderTicketWatcher::Finished, this, &ViewerWidget::RendererGeneratedFrameForQueue); queue_watchers_.append(watcher); watcher->SetTicket(GetFrame(next_time)); } return watcher; } RenderTicketPtr ViewerWidget::GetFrame(const rational &t) { QString cache_fn = GetConnectedNode()->video_frame_cache()->GetValidCacheFilename(t); if (!QFileInfo::exists(cache_fn)) { // Frame hasn't been cached, start render job return GetSingleFrame(t); } else { // Frame has been cached, grab the frame RenderTicketPtr ticket = std::make_shared<RenderTicket>(); ticket->setProperty("time", QVariant::fromValue(t)); QtConcurrent::run(static_cast<void(*)(RenderTicketPtr, const QString &, const QUuid &, const int64_t &)>(ViewerWidget::DecodeCachedImage), ticket, GetConnectedNode()->video_frame_cache()->GetCacheDirectory(), GetConnectedNode()->video_frame_cache()->GetUuid(), Timecode::time_to_timestamp(t, timebase(), Timecode::kFloor)); return ticket; } } void ViewerWidget::FinishPlayPreprocess() { // Check if we're still waiting for video or audio respectively if (prequeuing_video_ || prequeuing_audio_) { return; } int64_t playback_start_time = GetTimestamp(); // Start audio waveform playback if (!prequeued_audio_.isEmpty()) { QString error; if (!AudioManager::instance()->PushToOutput(audio_processor_.to(), prequeued_audio_, &error)) { QMessageBox::critical(this, tr("Audio Error"), tr("Failed to start audio: %1\n\n" "Please check your audio preferences and try again.").arg(error)); } prequeued_audio_.clear(); AudioMonitor::StartWaveformOnAll(GetConnectedNode()->GetConnectedWaveform(), GetConnectedNode()->GetPlayhead(), playback_speed_); } display_widget_->ResetFPSTimer(); foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->Play(playback_start_time, playback_speed_, timebase(), IsVideoVisible()); } // This is our timer for loading the queue and setting the time playback_backup_timer_.setInterval(qFloor(timebase_dbl())); playback_backup_timer_.start(); PlaybackTimerUpdate(); } int ViewerWidget::DeterminePlaybackQueueSize() { if (playback_speed_ == 0) { return 0; } int64_t end_ts; if (playback_speed_ > 0) { end_ts = Timecode::time_to_timestamp(GetConnectedNode()->GetVideoLength(), timebase()); } else { end_ts = 0; } int remaining_frames = (end_ts - GetTimestamp() - 1) / playback_speed_; // Generate maximum queue int max_frames = qCeil(kVideoPlaybackInterval.toDouble() / timebase().toDouble()); return qMin(max_frames, remaining_frames); } void ViewerWidget::ContextMenuSetFullScreen(QAction *action) { SetFullScreen(QGuiApplication::screens().at(action->data().toInt())); } void ViewerWidget::ContextMenuSetPlaybackRes(QAction *action) { int div = action->data().toInt(); auto vp = GetConnectedNode()->GetVideoParams(); vp.set_divider(div); auto c = new NodeParamSetStandardValueCommand(NodeKeyframeTrackReference(NodeInput(GetConnectedNode(), ViewerOutput::kVideoParamsInput, 0)), QVariant::fromValue(vp)); Core::instance()->undo_stack()->push(c, tr("Changed Playback Resolution")); } void ViewerWidget::ContextMenuDisableSafeMargins() { context_menu_widget_->SetSafeMargins(ViewerSafeMarginInfo(false)); } void ViewerWidget::ContextMenuSetSafeMargins() { context_menu_widget_->SetSafeMargins(ViewerSafeMarginInfo(true)); } void ViewerWidget::ContextMenuSetCustomSafeMargins() { bool ok; double new_ratio = GetFloatRatioFromUser(this, tr("Safe Margins"), &ok); if (ok) { context_menu_widget_->SetSafeMargins(ViewerSafeMarginInfo(true, new_ratio)); } } void ViewerWidget::WindowAboutToClose() { ViewerWindow *vw = static_cast<ViewerWindow*>(sender()); windows_.remove(windows_.key(vw)); playback_devices_.removeOne(vw->display_widget()); } void ViewerWidget::RendererGeneratedFrame() { RenderTicketWatcher* ticket = static_cast<RenderTicketWatcher*>(sender()); if (ticket->HasResult()) { if (nonqueue_watchers_.contains(ticket)) { while (!nonqueue_watchers_.isEmpty()) { // Pop frames that are "old" if (nonqueue_watchers_.takeFirst() == ticket) { break; } } SetDisplayImage(ticket->GetTicket()); } } delete ticket; } void ViewerWidget::RendererGeneratedFrameForQueue() { RenderTicketWatcher* watcher = static_cast<RenderTicketWatcher*>(sender()); if (queue_watchers_.contains(watcher)) { queue_watchers_.removeOne(watcher); if (watcher->HasResult()) { QVariant frame = watcher->Get(); // Ignore this signal if we've paused now if (IsPlaying() || prequeuing_video_) { rational ts = watcher->property("time").value<rational>(); foreach (ViewerDisplayWidget *dw, playback_devices_) { QVariant push; if (dynamic_cast<MulticamDisplay*>(dw)) { push = watcher->GetTicket()->property("multicam_output"); } else { push = frame; } dw->queue()->AppendTimewise({ts, push}, playback_speed_); } if (prequeuing_video_) { prequeue_count_++; if (prequeue_count_ == prequeue_length_) { prequeuing_video_ = false; FinishPlayPreprocess(); } else { // This call was mostly necessary to keep the threads busy between prequeue and playback. // If we only have a single render thread, it's no longer necessary. //RequestNextFrameForQueue(); } } } } } if (first_requeue_watcher_ == watcher) { first_requeue_watcher_ = nullptr; } delete watcher; } void ViewerWidget::ShowContextMenu(const QPoint &pos) { if (!GetConnectedNode()) { return; } Menu menu(static_cast<QWidget*>(sender())); context_menu_widget_ = dynamic_cast<ViewerDisplayWidget*>(sender()); // ViewerDisplayWidget options if (context_menu_widget_) { // Color options if (context_menu_widget_->color_manager() && color_menu_enabled_) { { Menu* ocio_colorspace_menu = context_menu_widget_->GetColorSpaceMenu(&menu); menu.addMenu(ocio_colorspace_menu); } { Menu* ocio_display_menu = context_menu_widget_->GetDisplayMenu(&menu); menu.addMenu(ocio_display_menu); } { Menu* ocio_view_menu = context_menu_widget_->GetViewMenu(&menu); menu.addMenu(ocio_view_menu); } { Menu* ocio_look_menu = context_menu_widget_->GetLookMenu(&menu); menu.addMenu(ocio_look_menu); } menu.addSeparator(); } { // Viewer Zoom Level Menu* zoom_menu = new Menu(tr("Zoom"), &menu); menu.addMenu(zoom_menu); zoom_menu->addAction(tr("Fit"))->setData(-1); for (int i=0;i<ViewerSizer::kZoomLevelCount;i++) { double z = ViewerSizer::kZoomLevels[i]; zoom_menu->addAction(tr("%1%").arg(z * 100.0))->setData(z); } connect(zoom_menu, &QMenu::triggered, this, &ViewerWidget::SetZoomFromMenu); } { // Full Screen Menu Menu* full_screen_menu = new Menu(tr("Full Screen"), &menu); menu.addMenu(full_screen_menu); for (int i=0;i<QGuiApplication::screens().size();i++) { QScreen* s = QGuiApplication::screens().at(i); QAction* a = full_screen_menu->addAction(tr("Screen %1: %2x%3").arg(QString::number(i), QString::number(s->size().width()), QString::number(s->size().height()))); a->setData(i); a->setCheckable(true); a->setChecked(windows_.contains(QGuiApplication::screens().at(i))); } connect(full_screen_menu, &QMenu::triggered, this, &ViewerWidget::ContextMenuSetFullScreen); } { // Playback Resolution Menu Menu *playback_res_menu = new Menu(tr("Playback Resolution"), &menu); menu.addMenu(playback_res_menu); for (int d : VideoParams::kSupportedDividers) { playback_res_menu->AddActionWithData(VideoParams::GetNameForDivider(d), d, GetConnectedNode()->GetVideoParams().divider()); } connect(playback_res_menu, &QMenu::triggered, this, &ViewerWidget::ContextMenuSetPlaybackRes); } { // Deinterlace Option if (GetConnectedNode()->GetVideoParams().interlacing() != VideoParams::kInterlaceNone) { QAction* deinterlace_action = menu.addAction(tr("Deinterlace")); deinterlace_action->setCheckable(true); deinterlace_action->setChecked(display_widget_->IsDeinterlacing()); connect(deinterlace_action, &QAction::triggered, display_widget_, &ViewerDisplayWidget::SetDeinterlacing); } } menu.addSeparator(); /* TEMP: Hide sequence cache options. Want to see if clip caching supersedes it. { Menu* cache_menu = new Menu(tr("Cache"), &menu); menu.addMenu(cache_menu); // Cache Entire Sequence QAction* cache_entire_sequence = cache_menu->addAction(tr("Cache Entire Sequence")); connect(cache_entire_sequence, &QAction::triggered, this, &ViewerWidget::CacheEntireSequence); // Cache In/Out Sequence QAction* cache_inout_sequence = cache_menu->addAction(tr("Cache Sequence In/Out")); connect(cache_inout_sequence, &QAction::triggered, this, &ViewerWidget::CacheSequenceInOut); }*/ menu.addSeparator(); { // Safe Margins Menu* safe_margin_menu = new Menu(tr("Safe Margins"), &menu); menu.addMenu(safe_margin_menu); QAction* safe_margin_off = safe_margin_menu->addAction(tr("Off")); safe_margin_off->setCheckable(true); safe_margin_off->setChecked(!context_menu_widget_->GetSafeMargin().is_enabled()); connect(safe_margin_off, &QAction::triggered, this, &ViewerWidget::ContextMenuDisableSafeMargins); QAction* safe_margin_on = safe_margin_menu->addAction(tr("On")); safe_margin_on->setCheckable(true); safe_margin_on->setChecked(context_menu_widget_->GetSafeMargin().is_enabled() && !context_menu_widget_->GetSafeMargin().custom_ratio()); connect(safe_margin_on, &QAction::triggered, this, &ViewerWidget::ContextMenuSetSafeMargins); QAction* safe_margin_custom = safe_margin_menu->addAction(tr("Custom Aspect")); safe_margin_custom->setCheckable(true); safe_margin_custom->setChecked(context_menu_widget_->GetSafeMargin().is_enabled() && context_menu_widget_->GetSafeMargin().custom_ratio()); connect(safe_margin_custom, &QAction::triggered, this, &ViewerWidget::ContextMenuSetCustomSafeMargins); } menu.addSeparator(); } { QAction *stop_playback_on_last_frame = menu.addAction(tr("Stop Playback On Last Frame")); stop_playback_on_last_frame->setCheckable(true); stop_playback_on_last_frame->setChecked(OLIVE_CONFIG("StopPlaybackOnLastFrame").toBool()); connect(stop_playback_on_last_frame, &QAction::triggered, this, [](bool e){ OLIVE_CONFIG("StopPlaybackOnLastFrame") = e; }); menu.addSeparator(); } { auto waveform_menu = new Menu(tr("Audio Waveform"), &menu); menu.addMenu(waveform_menu); waveform_menu->AddActionWithData(tr("Automatically Show/Hide"), kWFAutomatic, waveform_mode_); waveform_menu->AddActionWithData(tr("Show Waveform Only"), kWFWaveformOnly, waveform_mode_); waveform_menu->AddActionWithData(tr("Show Both Viewer And Waveform"), kWFViewerAndWaveform, waveform_mode_); connect(waveform_menu, &Menu::triggered, this, &ViewerWidget::UpdateWaveformModeFromMenu); } { QAction* show_fps_action = menu.addAction(tr("Show FPS")); show_fps_action->setCheckable(true); show_fps_action->setChecked(display_widget_->GetShowFPS()); connect(show_fps_action, &QAction::triggered, display_widget_, &ViewerDisplayWidget::SetShowFPS); } if (context_menu_widget_ == display_widget_) { auto subtitle_menu = new Menu(tr("Subtitles"), &menu); menu.addMenu(subtitle_menu); QAction* show_subtitles_action = subtitle_menu->addAction(tr("Show Subtitles")); show_subtitles_action->setCheckable(true); show_subtitles_action->setChecked(display_widget_->GetShowSubtitles()); connect(show_subtitles_action, &QAction::triggered, display_widget_, &ViewerDisplayWidget::SetShowSubtitles); subtitle_menu->addSeparator(); auto subtitle_font_properties = subtitle_menu->addAction(tr("Subtitle Properties")); connect(subtitle_font_properties, &QAction::triggered, this, &ViewerWidget::ShowSubtitleProperties); auto subtitle_antialias = subtitle_menu->addAction(tr("Use Anti-aliasing")); subtitle_antialias->setCheckable(true); subtitle_antialias->setChecked(OLIVE_CONFIG("AntialiasSubtitles").toBool()); connect(subtitle_antialias, &QAction::triggered, this, [this](bool e){ OLIVE_CONFIG("AntialiasSubtitles") = e; display_widget_->update(); }); } menu.addSeparator(); auto save_frame_as_image = menu.addAction(tr("Save Frame As Image")); connect(save_frame_as_image, &QAction::triggered, this, &ViewerWidget::SaveFrameAsImage); menu.exec(static_cast<QWidget*>(sender())->mapToGlobal(pos)); } void ViewerWidget::Play(bool in_to_out_only) { if (in_to_out_only) { if (GetConnectedNode() && GetConnectedNode()->GetWorkArea()->enabled()) { // Jump to in point GetConnectedNode()->SetPlayhead(GetConnectedNode()->GetWorkArea()->in()); } else { in_to_out_only = false; } } else if (record_armed_) { DisarmRecording(); if (GetConnectedNode()->project()->filename().isEmpty()) { QMessageBox::critical(this, tr("Audio Recording"), tr("Project must be saved before you can record audio.")); return; } QDir audio_path(QFileInfo(GetConnectedNode()->project()->filename()).dir().filePath(tr("audio"))); if (!audio_path.exists()) { audio_path.mkpath(QStringLiteral(".")); } recording_filename_ = audio_path.filePath(QStringLiteral("%1.%2").arg( QDateTime::currentDateTime().toString("yyyy-MM-dd hh-mm-ss"), ExportFormat::GetExtension(static_cast<ExportFormat::Format>(OLIVE_CONFIG("AudioRecordingFormat").toInt()))) ); AudioParams ap(OLIVE_CONFIG("AudioRecordingSampleRate").toInt(), OLIVE_CONFIG("AudioRecordingChannelLayout").toULongLong(), SampleFormat::from_string(OLIVE_CONFIG("AudioRecordingSampleFormat").toString().toStdString())); EncodingParams encode_param; encode_param.EnableAudio(ap, static_cast<ExportCodec::Codec>(OLIVE_CONFIG("AudioRecordingCodec").toInt())); encode_param.SetFilename(recording_filename_); encode_param.set_audio_bit_rate(OLIVE_CONFIG("AudioRecordingBitRate").toInt() * 1000); QString error; if (AudioManager::instance()->StartRecording(encode_param, &error)) { recording_ = true; controls_->SetPauseButtonRecordingState(true); recording_callback_->EnableRecordingOverlay(TimelineCoordinate(recording_range_.in(), recording_track_)); } else { QMessageBox::critical(this, tr("Audio Recording"), tr("Failed to start audio recording: %1").arg(error)); return; } } PlayInternal(1, in_to_out_only); } void ViewerWidget::Play() { Play(false); } void ViewerWidget::Pause() { PauseInternal(); } void ViewerWidget::ShuttleLeft() { int current_speed = playback_speed_; if (current_speed != 0) { PauseInternal(); } current_speed--; if (current_speed == 0) { current_speed--; } PlayInternal(current_speed, false); } void ViewerWidget::ShuttleStop() { Pause(); } void ViewerWidget::ShuttleRight() { int current_speed = playback_speed_; if (current_speed != 0) { PauseInternal(); } current_speed++; if (current_speed == 0) { current_speed++; } PlayInternal(current_speed, false); } void ViewerWidget::SetColorTransform(const ColorTransform &transform) { SetColorTransform(transform, display_widget_); } void ViewerWidget::SetSignalCursorColorEnabled(bool e) { foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->SetSignalCursorColorEnabled(e); } } void ViewerWidget::TimebaseChangedEvent(const rational &timebase) { super::TimebaseChangedEvent(timebase); controls_->SetTimebase(timebase); controls_->SetTime(GetConnectedNode() ? GetConnectedNode()->GetPlayhead() : 0); LengthChangedSlot(GetConnectedNode() ? GetConnectedNode()->GetLength() : 0); } void ViewerWidget::PlaybackTimerUpdate() { Q_ASSERT(playback_speed_ != 0); rational current_time = Timecode::timestamp_to_time(display_widget_->timer()->GetTimestampNow(), timebase()); rational min_time, max_time; if (recording_ && recording_range_.out() != recording_range_.in()) { // Limit recording range if applicable min_time = recording_range_.in(); max_time = recording_range_.out(); } else if (play_in_to_out_only_ && GetConnectedNode()->GetWorkArea()->enabled()) { // If "play in to out" is enabled or we're looping AND we have a workarea, only play the workarea min_time = GetConnectedNode()->GetWorkArea()->in(); max_time = GetConnectedNode()->GetWorkArea()->out(); } else { // Otherwise set the bounds to the range of the sequence min_time = 0; max_time = GetConnectedNode()->GetLength(); } // If we're stopping playback on the last frame rather than after it, subtract our max time // by one timebase unit if (OLIVE_CONFIG("StopPlaybackOnLastFrame").toBool()) { max_time = qMax(min_time, max_time - timebase()); } rational time_to_set; bool end_of_line = false; bool play_after_pause = false; if ((!recording_ || recording_range_.out() != recording_range_.in()) && ((playback_speed_ < 0 && current_time <= min_time) || (playback_speed_ > 0 && current_time >= max_time))) { // Determine which timestamp we tripped rational tripped_time; if (current_time <= min_time) { tripped_time = min_time; } else { tripped_time = max_time; } // Signal that we've reached the end of whatever range we're playing and should either pause // or restart playback end_of_line = true; if (OLIVE_CONFIG("Loop").toBool() && !recording_) { // If we're looping, jump to the other side of the workarea and continue time_to_set = (tripped_time == min_time) ? max_time : min_time; // Signal to restart playback after the pause signalled by `end_of_line` play_after_pause = true; } else { // Pause at the boundary we tripped time_to_set = tripped_time; } } else { // Sets time normally to whatever we calculated as the "current time" time_to_set = current_time; } // Set the time. By wrapping in this bool, we prevent TimeChangedEvent's default behavior of // pausing. Even if we pause it later with `end_of_line`, we prefer pausing after setting the time // so that an audio scrub event, etc. isn't sent. time_changed_from_timer_ = true; GetConnectedNode()->SetPlayhead(time_to_set); time_changed_from_timer_ = false; if (end_of_line) { // Cache the current speed int current_speed = playback_speed_; PauseInternal(); if (play_after_pause) { PlayInternal(current_speed, play_in_to_out_only_); } } if (IsPlaying() && IsVideoVisible()) { while ((int(display_widget_->queue()->size()) + queue_watchers_.size()) < DeterminePlaybackQueueSize()) { if (!RequestNextFrameForQueue()) { // Prevent infinite loop break; } } } foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->queue()->PurgeBefore(current_time, playback_speed_); } } void ViewerWidget::SetViewerResolution(int width, int height) { sizer_->SetChildSize(width, height); foreach (ViewerWindow* vw, windows_) { vw->SetResolution(width, height); } } void ViewerWidget::SetViewerPixelAspect(const rational &ratio) { sizer_->SetPixelAspectRatio(ratio); foreach (ViewerWindow* vw, windows_) { vw->SetPixelAspectRatio(ratio); } } void ViewerWidget::LengthChangedSlot(const rational &length) { if (last_length_ != length) { controls_->SetEndTime(length); UpdateMinimumScale(); if (GetConnectedNode() && length < last_length_ && GetConnectedNode()->GetPlayhead() >= length) { UpdateTextureFromNode(); } last_length_ = length; } } void ViewerWidget::InterlacingChangedSlot(VideoParams::Interlacing interlacing) { // Automatically set a "sane" deinterlacing option bool deint = interlacing != VideoParams::kInterlaceNone; foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->SetDeinterlacing(deint); } } void ViewerWidget::UpdateRendererVideoParameters() { VideoParams vp = GetConnectedNode()->GetVideoParams(); foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->SetVideoParams(vp); } } void ViewerWidget::UpdateRendererAudioParameters() { AudioParams ap = GetConnectedNode()->GetAudioParams(); UpdateAudioProcessor(); foreach (ViewerDisplayWidget *dw, playback_devices_) { dw->SetAudioParams(ap); } } void ViewerWidget::SetZoomFromMenu(QAction *action) { auto s = sizer_->GetContainerSize(); sizer_->SetZoomAnchored(action->data().toDouble(), s.width()/2, s.height()/2); } void ViewerWidget::ViewerInvalidatedVideoRange(const TimeRange &range) { // If our current frame is within this range, we need to update if (!IsPlaying() && GetConnectedNode()->GetPlayhead() >= range.in() && (GetConnectedNode()->GetPlayhead() < range.out() || range.in() == range.out())) { QMetaObject::invokeMethod(this, &ViewerWidget::UpdateTextureFromNode, Qt::QueuedConnection); } } void ViewerWidget::UpdateWaveformModeFromMenu(QAction *a) { SetWaveformMode(static_cast<WaveformMode>(a->data().toInt())); } void ViewerWidget::DragEntered(QDragEnterEvent* event) { if (event->mimeData()->formats().contains(Project::kItemMimeType)) { event->accept(); } } void ViewerWidget::Dropped(QDropEvent *event) { QByteArray mimedata = event->mimeData()->data(Project::kItemMimeType); QDataStream stream(&mimedata, QIODevice::ReadOnly); // Variables to deserialize into quintptr item_ptr = 0; QVector<Track::Reference> enabled_streams; while (!stream.atEnd()) { stream >> enabled_streams >> item_ptr; // We only need the one item break; } if (item_ptr) { Node* item = reinterpret_cast<Node*>(item_ptr); ViewerOutput* viewer = dynamic_cast<ViewerOutput*>(item); if (viewer) { ConnectViewerNode(viewer); } } } }
59,342
C++
.cpp
1,495
34.772575
327
0.700774
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,828
viewertexteditor.cpp
olive-editor_olive/app/widget/viewer/viewertexteditor.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "viewertexteditor.h" #include <QAbstractTextDocumentLayout> #include <QColorDialog> #include <QKeyEvent> #include <QHBoxLayout> #include <QPainter> #include <QScrollBar> #include <QTextBlock> #include <QtMath> #include "common/qtutils.h" #include "ui/icons/icons.h" namespace olive { #define super QTextEdit ViewerTextEditor::ViewerTextEditor(double scale, QWidget *parent) : super(parent), transparent_clone_(nullptr), block_update_toolbar_signal_(false), forced_default_(false) { // Ensure default text color is white QPalette p = palette(); p.setColor(QPalette::Text, Qt::white); setPalette(p); document()->setDefaultStyleSheet(QStringLiteral("body { color: white; }")); // Ensure cursor is visible at this scale setCursorWidth(std::ceil(1.0 / scale)); viewport()->setAutoFillBackground(false); setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOff); setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOff); connect(horizontalScrollBar(), &QScrollBar::rangeChanged, this, &ViewerTextEditor::LockScrollBarMaximumToZero); connect(verticalScrollBar(), &QScrollBar::rangeChanged, this, &ViewerTextEditor::LockScrollBarMaximumToZero); // Force DPI to the same one that we're using in the actual render dpi_force_ = QImage(1, 1, QImage::Format_RGBA8888_Premultiplied); const int dpm = 3780; dpi_force_.setDotsPerMeterX(dpm); dpi_force_.setDotsPerMeterY(dpm); document()->documentLayout()->setPaintDevice(&dpi_force_); connect(this, &QTextEdit::currentCharFormatChanged, this, &ViewerTextEditor::FormatChanged); connect(document(), &QTextDocument::contentsChanged, this, &ViewerTextEditor::DocumentChanged, Qt::QueuedConnection); setAcceptRichText(false); } void ViewerTextEditor::ConnectToolBar(ViewerTextEditorToolBar *toolbar) { connect(toolbar, &ViewerTextEditorToolBar::FamilyChanged, this, &ViewerTextEditor::SetFamily); connect(toolbar, &ViewerTextEditorToolBar::SizeChanged, this, &ViewerTextEditor::setFontPointSize); connect(toolbar, &ViewerTextEditorToolBar::StyleChanged, this, &ViewerTextEditor::SetStyle); connect(toolbar, &ViewerTextEditorToolBar::UnderlineChanged, this, &ViewerTextEditor::setFontUnderline); connect(toolbar, &ViewerTextEditorToolBar::StrikethroughChanged, this, &ViewerTextEditor::SetFontStrikethrough); connect(toolbar, &ViewerTextEditorToolBar::ColorChanged, this, &ViewerTextEditor::setTextColor); connect(toolbar, &ViewerTextEditorToolBar::SmallCapsChanged, this, &ViewerTextEditor::SetSmallCaps); connect(toolbar, &ViewerTextEditorToolBar::StretchChanged, this, &ViewerTextEditor::SetFontStretch); connect(toolbar, &ViewerTextEditorToolBar::KerningChanged, this, &ViewerTextEditor::SetFontKerning); connect(toolbar, &ViewerTextEditorToolBar::LineHeightChanged, this, &ViewerTextEditor::SetLineHeight); connect(toolbar, &ViewerTextEditorToolBar::AlignmentChanged, this, [this](Qt::Alignment a){ this->setAlignment(a); // Ensure no buttons are checked that shouldn't be static_cast<ViewerTextEditorToolBar*>(sender())->SetAlignment(a); }); UpdateToolBar(toolbar, this->currentCharFormat(), this->textCursor().blockFormat(), this->alignment()); toolbars_.append(toolbar); } void ViewerTextEditor::Paint(QPainter *p, Qt::Alignment valign) { QAbstractTextDocumentLayout::PaintContext ctx; QRect clip = this->rect(); p->setClipRect(clip, Qt::IntersectClip); ctx.clip = clip; ctx.cursorPosition = this->textCursor().position(); if (this->textCursor().hasSelection()) { QAbstractTextDocumentLayout::Selection selection; selection.cursor = this->textCursor(); QPalette::ColorGroup cg = this->hasFocus() ? QPalette::Active : QPalette::Inactive; QBrush b = ctx.palette.brush(cg, QPalette::Highlight); QColor bc = b.color(); bc.setAlpha(128); b.setColor(bc); selection.format.setBackground(b); QStyleOption opt; opt.initFrom(this); if (this->style()->styleHint(QStyle::SH_RichText_FullWidthSelection, &opt, this)) { selection.format.setProperty(QTextFormat::FullWidthSelection, true); } ctx.selections.append(selection); } switch (valign) { case Qt::AlignTop: // Do nothing break; case Qt::AlignVCenter: p->translate(0, clip.height()/2-document()->size().height()/2); break; case Qt::AlignBottom: p->translate(0, clip.height()-document()->size().height()); break; } const bool use_transparent_clone = true; if (transparent_clone_ && use_transparent_clone) { transparent_clone_->setPageSize(this->document()->pageSize()); transparent_clone_->documentLayout()->draw(p, ctx); } else { document()->documentLayout()->draw(p, ctx); } } void ViewerTextEditor::paintEvent(QPaintEvent *e) { // Disable painting } void ViewerTextEditor::UpdateToolBar(ViewerTextEditorToolBar *toolbar, const QTextCharFormat &f, const QTextBlockFormat &b, Qt::Alignment alignment) { QStringList families = f.fontFamilies().toStringList(); QString family; if (families.isEmpty()) { family = qApp->font().family(); } else { family = families.first(); } QString style = f.fontStyleName().toString(); QStringList styles = QFontDatabase().styles(family); if (!styles.isEmpty() && (style.isEmpty() || !styles.contains(style))) { // There seems to be no better way to find the "regular" style outside of this heuristic. // Feel free to add more if a font isn't working right. style = QStringLiteral("Regular"); if (!styles.contains(style)) { style = QStringLiteral("Normal"); } if (!styles.contains(style)) { style = QStringLiteral("R"); } if (!styles.contains(style)) { style = styles.first(); } } toolbar->SetFontFamily(family); toolbar->SetFontSize(f.fontPointSize()); toolbar->SetStyle(style); toolbar->SetUnderline(f.fontUnderline()); toolbar->SetStrikethrough(f.fontStrikeOut()); toolbar->SetAlignment(alignment); toolbar->SetColor(f.foreground().color()); toolbar->SetSmallCaps(f.fontCapitalization() == QFont::SmallCaps); toolbar->SetStretch(f.fontStretch() == 0 ? 100 : f.fontStretch()); toolbar->SetKerning(f.fontLetterSpacing() == 0.0 ? 100 : f.fontLetterSpacing()); toolbar->SetLineHeight(b.lineHeight() == 0.0 ? 100 : b.lineHeight()); } void ViewerTextEditor::FormatChanged(const QTextCharFormat &f) { if (!block_update_toolbar_signal_) { foreach (ViewerTextEditorToolBar *toolbar, toolbars_) { UpdateToolBar(toolbar, f, textCursor().blockFormat(), this->alignment()); } } if (!(document()->blockCount() == 1 && document()->firstBlock().text().isEmpty())) { default_fmt_ = f; } } void ViewerTextEditor::SetFamily(const QString &s) { ViewerTextEditorToolBar *toolbar = static_cast<ViewerTextEditorToolBar *>(sender()); QTextCharFormat f; f.setFontFamilies({s}); ApplyStyle(&f, s, toolbar->GetFontStyleName()); MergeCharFormat(f); } void ViewerTextEditor::SetStyle(const QString &s) { ViewerTextEditorToolBar *toolbar = static_cast<ViewerTextEditorToolBar *>(sender()); QTextCharFormat f; ApplyStyle(&f, toolbar->GetFontFamily(), s); MergeCharFormat(f); } void ViewerTextEditor::SetFontStrikethrough(bool e) { QTextCharFormat f; f.setFontStrikeOut(e); MergeCharFormat(f); } void ViewerTextEditor::SetSmallCaps(bool e) { QTextCharFormat f; f.setFontCapitalization(e ? QFont::SmallCaps : QFont::MixedCase); MergeCharFormat(f); } void ViewerTextEditor::SetFontStretch(int i) { QTextCharFormat f; f.setFontStretch(i); MergeCharFormat(f); } void ViewerTextEditor::SetFontKerning(qreal i) { QTextCharFormat f; f.setFontLetterSpacing(i); MergeCharFormat(f); } void ViewerTextEditor::MergeCharFormat(const QTextCharFormat &fmt) { // mergeCurrentCharFormat throws a currentCharFormatChanged signal that updates the toolbar, // this can be undesirable if the user is currently typing a font block_update_toolbar_signal_ = true; mergeCurrentCharFormat(fmt); //default_fmt_ = this->currentCharFormat(); block_update_toolbar_signal_ = false; } void ViewerTextEditor::ApplyStyle(QTextCharFormat *format, const QString &family, const QString &style) { // NOTE: Windows appears to require setting weight and italic manually, while macOS and Linux are // perfectly fine with just the style name format->setFontWeight(QFontDatabase().weight(family, style)); format->setFontItalic(QFontDatabase().italic(family, style)); format->setFontStyleName(style); } void ViewerTextEditor::SetLineHeight(qreal i) { QTextBlockFormat f = this->textCursor().blockFormat(); f.setLineHeight(i, QTextBlockFormat::ProportionalHeight); this->textCursor().setBlockFormat(f); } void ViewerTextEditor::LockScrollBarMaximumToZero() { static_cast<QScrollBar*>(sender())->setMaximum(0); } void ViewerTextEditor::DocumentChanged() { if (document()->blockCount() == 1 && document()->firstBlock().text().isEmpty()) { if (!forced_default_) { QTextCursor c(document()->firstBlock()); c.setBlockCharFormat(default_fmt_); forced_default_ = true; } } else { if (default_fmt_.isEmpty()) { default_fmt_ = document()->firstBlock().charFormat(); } forced_default_ = false; } // HACK: We want to show the text cursor and selections without necessarily rendering the text, // because the text is already being rendered underneath the gizmo (and rendering twice will // alter the overall look of the text while editing). This is something that Qt does not // support by default, and while it could be solved by subclassing QAbstractTextDocumentLayout, // this seems like overkill since 99% of QTextDocumentLayout's (the subclass used by default) // functionality and would need to be fully implemented ourselves. So instead, we clone the // document, set all of its colors to rgba(0,0,0,0) to make them transparent, and draw that // document instead. The result is cursor and selections being rendered without the text. // While this isn't the fastest code, nor is it the cleanest solution, it definitely works. delete transparent_clone_; transparent_clone_ = document()->clone(this); transparent_clone_->documentLayout()->setPaintDevice(&dpi_force_); transparent_clone_->documentLayout()->setProperty("cursorWidth", document()->documentLayout()->property("cursorWidth")); QTextCursor cursor(transparent_clone_); cursor.select(QTextCursor::Document); QTextCharFormat fmt; fmt.setForeground(QColor(0, 0, 0, 0)); cursor.mergeCharFormat(fmt); } ViewerTextEditorToolBar::ViewerTextEditorToolBar(QWidget *parent) : QWidget(parent), painted_(false), drag_enabled_(true) { QVBoxLayout *outer_layout = new QVBoxLayout(this); const int advanced_slider_width = QtUtils::QFontMetricsWidth(fontMetrics(), QStringLiteral("9999.9%")); { QHBoxLayout *row_layout = new QHBoxLayout(); row_layout->setSpacing(0); outer_layout->addLayout(row_layout); font_combo_ = new QFontComboBox(); connect(font_combo_, &QFontComboBox::currentTextChanged, this, &ViewerTextEditorToolBar::UpdateFontStyleListAndEmitFamilyChanged); row_layout->addWidget(font_combo_); font_sz_slider_ = new FloatSlider(); font_sz_slider_->SetMinimum(0.1); font_sz_slider_->SetMaximum(9999.9); font_sz_slider_->SetDecimalPlaces(1); font_sz_slider_->SetAlignment(Qt::AlignCenter); font_sz_slider_->setFixedWidth(advanced_slider_width); connect(font_sz_slider_, &FloatSlider::ValueChanged, this, &ViewerTextEditorToolBar::SizeChanged); font_sz_slider_->SetLadderElementCount(2); row_layout->addWidget(font_sz_slider_); style_combo_ = new QComboBox(); connect(style_combo_, &QComboBox::currentTextChanged, this, &ViewerTextEditorToolBar::StyleChanged); row_layout->addWidget(style_combo_); underline_btn_ = new QPushButton(); connect(underline_btn_, &QPushButton::clicked, this, &ViewerTextEditorToolBar::UnderlineChanged); underline_btn_->setCheckable(true); underline_btn_->setIcon(icon::TextUnderline); row_layout->addWidget(underline_btn_); strikethrough_btn_ = new QPushButton(); connect(strikethrough_btn_, &QPushButton::clicked, this, &ViewerTextEditorToolBar::StrikethroughChanged); strikethrough_btn_->setCheckable(true); strikethrough_btn_->setIcon(icon::TextStrikethrough); row_layout->addWidget(strikethrough_btn_); AddSpacer(row_layout); color_btn_ = new QPushButton(); color_btn_->setAutoFillBackground(true); connect(color_btn_, &QPushButton::clicked, this, [this]{ QColor c = color_btn_->property("color").value<QColor>(); QColorDialog cd(c, this); if (cd.exec() == QDialog::Accepted) { c = cd.selectedColor(); SetColor(c); emit ColorChanged(c); } }); row_layout->addWidget(color_btn_); row_layout->addStretch(); } { QHBoxLayout *row_layout = new QHBoxLayout(); row_layout->setSpacing(0); outer_layout->addLayout(row_layout); align_left_btn_ = new QPushButton(); align_left_btn_->setCheckable(true); align_left_btn_->setIcon(icon::TextAlignLeft); connect(align_left_btn_, &QPushButton::clicked, this, [this]{emit AlignmentChanged(Qt::AlignLeft);}); row_layout->addWidget(align_left_btn_); align_center_btn_ = new QPushButton(); align_center_btn_->setCheckable(true); align_center_btn_->setIcon(icon::TextAlignCenter); connect(align_center_btn_, &QPushButton::clicked, this, [this]{emit AlignmentChanged(Qt::AlignHCenter);}); row_layout->addWidget(align_center_btn_); align_right_btn_ = new QPushButton(); align_right_btn_->setCheckable(true); align_right_btn_->setIcon(icon::TextAlignRight); connect(align_right_btn_, &QPushButton::clicked, this, [this]{emit AlignmentChanged(Qt::AlignRight);}); row_layout->addWidget(align_right_btn_); align_justify_btn_ = new QPushButton(); align_justify_btn_->setCheckable(true); align_justify_btn_->setIcon(icon::TextAlignJustify); connect(align_justify_btn_, &QPushButton::clicked, this, [this]{emit AlignmentChanged(Qt::AlignJustify);}); row_layout->addWidget(align_justify_btn_); AddSpacer(row_layout); align_top_btn_ = new QPushButton(); align_top_btn_->setCheckable(true); align_top_btn_->setIcon(icon::TextAlignTop); connect(align_top_btn_, &QPushButton::clicked, this, [this]{emit VerticalAlignmentChanged(Qt::AlignTop);}); row_layout->addWidget(align_top_btn_); align_middle_btn_ = new QPushButton(); align_middle_btn_->setCheckable(true); align_middle_btn_->setIcon(icon::TextAlignMiddle); connect(align_middle_btn_, &QPushButton::clicked, this, [this]{emit VerticalAlignmentChanged(Qt::AlignVCenter);}); row_layout->addWidget(align_middle_btn_); align_bottom_btn_ = new QPushButton(); align_bottom_btn_->setCheckable(true); align_bottom_btn_->setIcon(icon::TextAlignBottom); connect(align_bottom_btn_, &QPushButton::clicked, this, [this]{emit VerticalAlignmentChanged(Qt::AlignBottom);}); row_layout->addWidget(align_bottom_btn_); AddSpacer(row_layout); small_caps_btn_ = new QPushButton(); small_caps_btn_->setIcon(icon::TextSmallCaps); small_caps_btn_->setCheckable(true); connect(small_caps_btn_, &QPushButton::clicked, this, &ViewerTextEditorToolBar::SmallCapsChanged); row_layout->addWidget(small_caps_btn_); AddSpacer(row_layout); row_layout->addWidget(new QLabel(tr("Stretch: "))); // FIXME: Procure icon stretch_slider_ = new IntegerSlider(); stretch_slider_->SetMinimum(0); stretch_slider_->SetDefaultValue(100); stretch_slider_->setFixedWidth(advanced_slider_width); stretch_slider_->SetFormat(tr("%1%")); connect(stretch_slider_, &IntegerSlider::ValueChanged, this, &ViewerTextEditorToolBar::StretchChanged); row_layout->addWidget(stretch_slider_); row_layout->addWidget(new QLabel(tr("Kerning: "))); // FIXME: Procure icon kerning_slider_ = new FloatSlider(); kerning_slider_->SetMinimum(0); kerning_slider_->SetDefaultValue(100); kerning_slider_->SetDecimalPlaces(1); kerning_slider_->setFixedWidth(advanced_slider_width); kerning_slider_->SetFormat(tr("%1%")); connect(kerning_slider_, &FloatSlider::ValueChanged, this, &ViewerTextEditorToolBar::KerningChanged); row_layout->addWidget(kerning_slider_); row_layout->addWidget(new QLabel(tr("Line Height: "))); // FIXME: Procure icon line_height_slider_ = new FloatSlider(); line_height_slider_->SetMinimum(0); line_height_slider_->SetDefaultValue(100); line_height_slider_->SetDecimalPlaces(1); line_height_slider_->setFixedWidth(advanced_slider_width); line_height_slider_->SetFormat(tr("%1%")); connect(line_height_slider_, &FloatSlider::ValueChanged, this, &ViewerTextEditorToolBar::LineHeightChanged); row_layout->addWidget(line_height_slider_); row_layout->addStretch(); } setAutoFillBackground(true); resize(sizeHint()); } void ViewerTextEditorToolBar::SetAlignment(Qt::Alignment a) { align_left_btn_->setChecked(a == Qt::AlignLeft); align_center_btn_->setChecked(a == Qt::AlignHCenter); align_right_btn_->setChecked(a == Qt::AlignRight); align_justify_btn_->setChecked(a == Qt::AlignJustify); } void ViewerTextEditorToolBar::SetVerticalAlignment(Qt::Alignment a) { align_top_btn_->setChecked(a == Qt::AlignTop); align_middle_btn_->setChecked(a == Qt::AlignVCenter); align_bottom_btn_->setChecked(a == Qt::AlignBottom); } void ViewerTextEditorToolBar::SetColor(const QColor &c) { color_btn_->setProperty("color", c); color_btn_->setStyleSheet(QStringLiteral("QPushButton { background: %1; }").arg(c.name())); } void ViewerTextEditorToolBar::closeEvent(QCloseEvent *event) { event->ignore(); } void ViewerTextEditorToolBar::paintEvent(QPaintEvent *event) { if (!painted_) { emit FirstPaint(); painted_ = true; } QWidget::paintEvent(event); } void ViewerTextEditorToolBar::AddSpacer(QLayout *l) { const int spacing = this->fontMetrics().height()/4; QWidget *a = new QWidget(); a->setFixedSize(spacing, 1); l->addWidget(a); l->addWidget(QtUtils::CreateVerticalLine()); QWidget *b = new QWidget(); b->setFixedSize(spacing, 1); l->addWidget(b); } void ViewerTextEditorToolBar::UpdateFontStyleList(const QString &family) { QString temp = style_combo_->currentText(); style_combo_->blockSignals(true); style_combo_->clear(); QStringList l = QFontDatabase().styles(family); foreach (const QString &style, l) { style_combo_->addItem(style); } style_combo_->setCurrentText(temp); style_combo_->blockSignals(false); } void ViewerTextEditorToolBar::UpdateFontStyleListAndEmitFamilyChanged(const QString &family) { // Ensures correct ordering of commands UpdateFontStyleList(family); emit FamilyChanged(family); } void ViewerTextEditorToolBar::mousePressEvent(QMouseEvent *event) { QWidget::mousePressEvent(event); if (event->button() == Qt::LeftButton && drag_enabled_) { drag_anchor_ = event->pos(); } } void ViewerTextEditorToolBar::mouseMoveEvent(QMouseEvent *event) { QWidget::mouseMoveEvent(event); if ((event->buttons() & Qt::LeftButton) && drag_enabled_) { this->move(mapToParent(QPoint(event->pos() - drag_anchor_))); } } void ViewerTextEditorToolBar::mouseReleaseEvent(QMouseEvent *event) { QWidget::mouseReleaseEvent(event); } }
20,217
C++
.cpp
475
38.964211
148
0.738592
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,829
viewerpreventsleep.cpp
olive-editor_olive/app/widget/viewer/viewerpreventsleep.cpp
#include "viewerpreventsleep.h" #include <QtGlobal> #if defined(Q_OS_WINDOWS) #include <windows.h> #elif defined(Q_OS_MAC) #include <IOKit/pwr_mgt/IOPMLib.h> #elif defined(Q_OS_LINUX) #include <QtDBus/QtDBus> #endif namespace olive { #if defined(Q_OS_MAC) IOPMAssertionID assertionID = 0; #elif defined(Q_OS_LINUX) #endif void PreventSleep(bool on) { #if defined(Q_OS_WINDOWS) SetThreadExecutionState(on ? ES_DISPLAY_REQUIRED | ES_CONTINUOUS : ES_CONTINUOUS); #elif defined(Q_OS_MAC) if (on) { static const CFStringRef reasonForActivity = CFSTR("Video Playback"); IOPMAssertionCreateWithName(kIOPMAssertionTypeNoDisplaySleep, kIOPMAssertionLevelOn, reasonForActivity, &assertionID); } else if (assertionID) { IOPMAssertionRelease(assertionID); assertionID = 0; } #elif defined(Q_OS_LINUX) QDBusConnection bus = QDBusConnection::sessionBus(); if(bus.isConnected()) { static const QStringList sleep_services = { QStringLiteral("org.freedesktop.ScreenSaver"), //QStringLiteral("org.gnome.SessionManager") }; static const QStringList sleep_paths = { QStringLiteral("/org/freedesktop/ScreenSaver"), //QStringLiteral("/org/gnome/SessionManager") }; static QVector<uint> sleep_cookies; // Initialize vector to 0 if (sleep_cookies.isEmpty()) { sleep_cookies.resize(sleep_services.size()); sleep_cookies.fill(0); } for (int i=0; i<sleep_cookies.size(); i++) { QDBusInterface interface(sleep_services.at(i), sleep_paths.at(i), sleep_services.at(i), bus); if (interface.isValid()) { QDBusReply<uint> reply; if (on) { reply = interface.call(QStringLiteral("Inhibit"), QStringLiteral("Olive Video Editor"), QStringLiteral("Video Playback")); } else { reply = interface.call(QStringLiteral("UnInhibit"), sleep_cookies.at(i)); } if (reply.isValid()) { sleep_cookies[i] = reply.value(); } } } } #endif } }
2,036
C++
.cpp
62
27.870968
132
0.686035
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false
18,830
footageviewer.cpp
olive-editor_olive/app/widget/viewer/footageviewer.cpp
/*** Olive - Non-Linear Video Editor Copyright (C) 2022 Olive Team This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ***/ #include "footageviewer.h" #include <QDrag> #include <QMimeData> #include "config/config.h" #include "node/project.h" namespace olive { #define super ViewerWidget FootageViewerWidget::FootageViewerWidget(QWidget *parent) : super(parent) { connect(display_widget(), &ViewerDisplayWidget::DragStarted, this, &FootageViewerWidget::StartFootageDrag); controls_->SetAudioVideoDragButtonsVisible(true); connect(controls_, &PlaybackControls::VideoClicked, this, &FootageViewerWidget::VideoButtonClicked); connect(controls_, &PlaybackControls::AudioClicked, this, &FootageViewerWidget::AudioButtonClicked); connect(controls_, &PlaybackControls::VideoDragged, this, &FootageViewerWidget::StartVideoDrag); connect(controls_, &PlaybackControls::AudioDragged, this, &FootageViewerWidget::StartAudioDrag); override_workarea_ = new TimelineWorkArea(this); } void FootageViewerWidget::OverrideWorkArea(const TimeRange &r) { override_workarea_->set_enabled(true); override_workarea_->set_range(r); this->ConnectWorkArea(override_workarea_); } void FootageViewerWidget::ResetWorkArea() { if (GetConnectedWorkArea() == override_workarea_) { this->ConnectWorkArea(GetConnectedNode() ? GetConnectedNode()->GetWorkArea() : nullptr); } } void FootageViewerWidget::StartFootageDragInternal(bool enable_video, bool enable_audio) { if (!GetConnectedNode()) { return; } QDrag* drag = new QDrag(this); QMimeData* mimedata = new QMimeData(); QByteArray encoded_data; QDataStream data_stream(&encoded_data, QIODevice::WriteOnly); QVector<Track::Reference> streams = GetConnectedNode()->GetEnabledStreamsAsReferences(); // Disable streams that have been disabled if (!enable_video || !enable_audio) { for (int i=0; i<streams.size(); i++) { const Track::Reference& ref = streams.at(i); if ((ref.type() == Track::kVideo && !enable_video) || (ref.type() == Track::kAudio && !enable_audio)) { streams.removeAt(i); i--; } } } if (!streams.isEmpty()) { data_stream << streams << reinterpret_cast<quintptr>(GetConnectedNode()); mimedata->setData(Project::kItemMimeType, encoded_data); drag->setMimeData(mimedata); drag->exec(); } } void FootageViewerWidget::StartFootageDrag() { StartFootageDragInternal(true, true); } void FootageViewerWidget::StartVideoDrag() { StartFootageDragInternal(true, false); } void FootageViewerWidget::StartAudioDrag() { StartFootageDragInternal(false, true); } void FootageViewerWidget::VideoButtonClicked() { this->SetWaveformMode(kWFAutomatic); } void FootageViewerWidget::AudioButtonClicked() { this->SetWaveformMode(kWFWaveformOnly); } }
3,407
C++
.cpp
93
33.591398
109
0.760353
olive-editor/olive
8,144
552
131
GPL-3.0
9/20/2024, 9:26:25 PM (Europe/Amsterdam)
false
false
false
false
false
false
false
false