diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 77248fac..35be1009 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -7,7 +7,7 @@ Use your best judgement, and feel free to propose changes to this document in a ### raylib philosophy - - raylib is a tool to LEARN videogames programming, every single function in raylib should be a tutorial on itself. + - raylib is a tool to enjoy videogames programming, every single function in raylib should be a tutorial on itself. - raylib is SIMPLE and EASY-TO-USE, I tried to keep it compact with a small set of functions, if a function is too complex or is not clearly useful, better not to include it. - raylib is open source and free; educators and institutions can use this tool to TEACH videogames programming completely by free. - raylib is collaborative; contribution of tutorials / code-examples / bugs-solving / code-comments are highly appreciated. diff --git a/HELPME.md b/HELPME.md index 8ec0edb5..5d902649 100644 --- a/HELPME.md +++ b/HELPME.md @@ -18,7 +18,7 @@ with a small [donation](http://www.raylib.com/helpme.html) or contributing with raylib philosophy ------------------ - * raylib is a tool to LEARN videogames programming, every single function in raylib should be a tutorial on itself. + * raylib is a tool to enjoy videogames programming, every single function in raylib should be a tutorial on itself. * raylib is SIMPLE and EASY-TO-USE, I tried to keep it compact with a small set of functions, if a function is too complex or has not a clear usefulness, better not to include it. * raylib is open source and free; educators and institutions can use this tool to TEACH videogames programming completely by free. * raylib is collaborative; contribution of tutorials / code-examples / bugs-solving / code-comments are highly appreciated. diff --git a/README.md b/README.md index 4c4b8c91..2c4c7601 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@ -**raylib is a simple and easy-to-use library to learn videogames programming.** +**raylib is a simple and easy-to-use library to enjoy videogames programming.** raylib is highly inspired by Borland BGI graphics lib and by XNA framework. raylib could be useful for prototyping, tools development, graphic applications, embedded systems and education. -NOTE for ADVENTURERS: raylib is a programming library to learn videogames programming; +NOTE for ADVENTURERS: raylib is a programming library to enjoy videogames programming; no fancy interface, no visual helpers, no auto-debugging... just coding in the most pure spartan-programmers way. Are you ready to learn? Jump to [code examples!](http://www.raylib.com/examples.html) diff --git a/examples/text/text_font_sdf.c b/examples/text/text_font_sdf.c index c23a1e2e..755b642d 100644 --- a/examples/text/text_font_sdf.c +++ b/examples/text/text_font_sdf.c @@ -29,19 +29,18 @@ int main() fontDefault.baseSize = 16; fontDefault.charsCount = 95; // Parameters > font size: 16, no chars array provided (0), chars count: 95 (autogenerate chars array) - fontDefault.chars = LoadFontData("resources/AnonymousPro-Bold.ttf", 16, 0, 95, false); + fontDefault.chars = LoadFontData("resources/AnonymousPro-Bold.ttf", 16, 0, 95, FONT_DEFAULT); // Parameters > chars count: 95, font size: 16, chars padding in image: 4 px, pack method: 0 (default) Image atlas = GenImageFontAtlas(fontDefault.chars, 95, 16, 4, 0); fontDefault.texture = LoadTextureFromImage(atlas); UnloadImage(atlas); // SDF font generation from TTF font - // NOTE: SDF chars data is generated with LoadFontData(), it's just a bool option Font fontSDF = { 0 }; fontSDF.baseSize = 16; fontSDF.charsCount = 95; // Parameters > font size: 16, no chars array provided (0), chars count: 0 (defaults to 95) - fontSDF.chars = LoadFontData("resources/AnonymousPro-Bold.ttf", 16, 0, 0, true); + fontSDF.chars = LoadFontData("resources/AnonymousPro-Bold.ttf", 16, 0, 0, FONT_SDF); // Parameters > chars count: 95, font size: 16, chars padding in image: 0 px, pack method: 1 (Skyline algorythm) atlas = GenImageFontAtlas(fontSDF.chars, 95, 16, 0, 1); fontSDF.texture = LoadTextureFromImage(atlas); diff --git a/examples/textures/textures_image_text.c b/examples/textures/textures_image_text.c index c69f0f55..fb99e827 100644 --- a/examples/textures/textures_image_text.c +++ b/examples/textures/textures_image_text.c @@ -26,7 +26,7 @@ int main() Image parrots = LoadImage("resources/parrots.png"); // Load image in CPU memory (RAM) // Draw over image using custom font - ImageDrawTextEx(&parrots, (Vector2){ 20.0f, 20.0f }, font, "[Parrots font drawing]", (float)font.baseSize, 0.0f, WHITE); + ImageDrawTextEx(&parrots, (Vector2){ 20.0f, 20.0f }, font, "[Parrots font drawing]", (float)font.baseSize, 0.0f, RED); Texture2D texture = LoadTextureFromImage(parrots); // Image converted to texture, uploaded to GPU memory (VRAM) UnloadImage(parrots); // Once image has been converted to texture and uploaded to VRAM, it can be unloaded from RAM diff --git a/games/just_do/screens/screen_logo.c b/games/just_do/screens/screen_logo.c index ab078289..1c9a9b49 100644 --- a/games/just_do/screens/screen_logo.c +++ b/games/just_do/screens/screen_logo.c @@ -37,7 +37,7 @@ static int framesCounter; static int finishScreen; const char msgLogoA[64] = "A simple and easy-to-use library"; -const char msgLogoB[64] = "to learn videogames programming"; +const char msgLogoB[64] = "to enjoy videogames programming"; int logoPositionX; int logoPositionY; diff --git a/games/koala_seasons/screens/screen_logo.c b/games/koala_seasons/screens/screen_logo.c index a3035b30..55785c3a 100644 --- a/games/koala_seasons/screens/screen_logo.c +++ b/games/koala_seasons/screens/screen_logo.c @@ -37,7 +37,7 @@ static int framesCounter; static int finishScreen; const char msgLogoA[64] = "A simple and easy-to-use library"; -const char msgLogoB[64] = "to learn videogames programming"; +const char msgLogoB[64] = "to enjoy videogames programming"; int logoPositionX; int logoPositionY; diff --git a/raylib.pc.in b/raylib.pc.in index c6d54389..837b2d9a 100644 --- a/raylib.pc.in +++ b/raylib.pc.in @@ -4,7 +4,7 @@ libdir=${exec_prefix}/lib includedir=${prefix}/include Name: raylib -Description: Simple and easy-to-use library to learn videogames programming +Description: Simple and easy-to-use library to enjoy videogames programming URL: http://github.com/raysan5/raylib Version: @PROJECT_VERSION@ Libs: -L${libdir} -lraylib @PKG_CONFIG_LIBS_EXTRA@ diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 5581cc7a..c86cc0e7 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -259,7 +259,7 @@ message(STATUS " GRAPHICS=" ${GRAPHICS}) # Packaging SET(CPACK_PACKAGE_NAME "raylib") -SET(CPACK_PACKAGE_DESCRIPTION_SUMMARY "Simple and easy-to-use library to learn videogames programming") +SET(CPACK_PACKAGE_DESCRIPTION_SUMMARY "Simple and easy-to-use library to enjoy videogames programming") SET(CPACK_PACKAGE_VERSION "${PROJECT_VERSION}") SET(CPACK_PACKAGE_VERSION_MAJOR "${PROJECT_VERSION_MAJOR}") SET(CPACK_PACKAGE_VERSION_MINOR "${PROJECT_VERSION_MINOR}") diff --git a/src/CMakeOptions.txt b/src/CMakeOptions.txt index c8af55a0..84643b28 100644 --- a/src/CMakeOptions.txt +++ b/src/CMakeOptions.txt @@ -45,6 +45,7 @@ option(SUPPORT_FONT_TEXTURE "Draw rectangle shapes using font texture white char option(SUPPORT_QUADS_DRAW_MODE "Use QUADS instead of TRIANGLES for drawing when possible. Some lines-based shapes could still use lines" ON) # textures.c +option(SUPPORT_IMAGE_EXPORT "Support image exporting to file" ON) option(SUPPORT_IMAGE_GENERATION "Support procedural image generation functionality (gradient, spot, perlin-noise, cellular)" ON) option(SUPPORT_FILEFORMAT_PNG "Support loading PNG as textures" ON) option(SUPPORT_FILEFORMAT_DDS "Support loading DDS as textures" ON) @@ -72,8 +73,6 @@ option(SUPPORT_FILEFORMAT_MOD "Support loading MOD for sound" ON) option(SUPPORT_FILEFORMAT_FLAC "Support loading FLAC for sound" ${OFF}) # utils.c -option(SUPPORT_SAVE_PNG "Support saving image data in PNG file format" ON) -option(SUPPORT_SAVE_BMP "Support saving image data in BMP file format" ${OFF}) option(SUPPORT_TRACELOG "Show TraceLog() output messages. NOTE: By default LOG_DEBUG traces not shown" ON) option(SUPPORT_FILEFORMAT_FNT "Support loading fonts in FNT format" ON) diff --git a/src/audio.c b/src/audio.c index 274134b2..9e44d709 100644 --- a/src/audio.c +++ b/src/audio.c @@ -221,6 +221,9 @@ static Wave LoadOGG(const char *fileName); // Load OGG file #if defined(SUPPORT_FILEFORMAT_FLAC) static Wave LoadFLAC(const char *fileName); // Load FLAC file #endif +#if defined(SUPPORT_FILEFORMAT_MP3) +static Wave LoadMP3(const char *fileName); // Load MP3 file +#endif #if defined(AUDIO_STANDALONE) bool IsFileExtension(const char *fileName, const char *ext); // Check file extension @@ -857,6 +860,9 @@ Wave LoadWave(const char *fileName) #endif #if defined(SUPPORT_FILEFORMAT_FLAC) else if (IsFileExtension(fileName, ".flac")) wave = LoadFLAC(fileName); +#endif +#if defined(SUPPORT_FILEFORMAT_MP3) + else if (IsFileExtension(fileName, ".mp3")) wave = LoadMP3(fileName); #endif else TraceLog(LOG_WARNING, "[%s] Audio fileformat not supported, it can't be loaded", fileName); @@ -1049,6 +1055,89 @@ void UpdateSound(Sound sound, const void *data, int samplesCount) #endif } +// Export wave data to file +void ExportWave(Wave wave, const char *fileName) +{ + bool success = false; + + if (IsFileExtension(fileName, ".wav")) + { + // Basic WAV headers structs + typedef struct { + char chunkID[4]; + int chunkSize; + char format[4]; + } RiffHeader; + + typedef struct { + char subChunkID[4]; + int subChunkSize; + short audioFormat; + short numChannels; + int sampleRate; + int byteRate; + short blockAlign; + short bitsPerSample; + } WaveFormat; + + typedef struct { + char subChunkID[4]; + int subChunkSize; + } WaveData; + + RiffHeader riffHeader; + WaveFormat waveFormat; + WaveData waveData; + + // Fill structs with data + riffHeader.chunkID[0] = 'R'; + riffHeader.chunkID[1] = 'I'; + riffHeader.chunkID[2] = 'F'; + riffHeader.chunkID[3] = 'F'; + riffHeader.chunkSize = 44 - 4 + wave.sampleCount*wave.sampleSize/8; + riffHeader.format[0] = 'W'; + riffHeader.format[1] = 'A'; + riffHeader.format[2] = 'V'; + riffHeader.format[3] = 'E'; + + waveFormat.subChunkID[0] = 'f'; + waveFormat.subChunkID[1] = 'm'; + waveFormat.subChunkID[2] = 't'; + waveFormat.subChunkID[3] = ' '; + waveFormat.subChunkSize = 16; + waveFormat.audioFormat = 1; + waveFormat.numChannels = wave.channels; + waveFormat.sampleRate = wave.sampleRate; + waveFormat.byteRate = wave.sampleRate*wave.sampleSize/8; + waveFormat.blockAlign = wave.sampleSize/8; + waveFormat.bitsPerSample = wave.sampleSize; + + waveData.subChunkID[0] = 'd'; + waveData.subChunkID[1] = 'a'; + waveData.subChunkID[2] = 't'; + waveData.subChunkID[3] = 'a'; + waveData.subChunkSize = wave.sampleCount*wave.channels*wave.sampleSize/8; + + FILE *wavFile = fopen(fileName, "wb"); + + if (wavFile == NULL) return; + + fwrite(&riffHeader, 1, sizeof(RiffHeader), wavFile); + fwrite(&waveFormat, 1, sizeof(WaveFormat), wavFile); + fwrite(&waveData, 1, sizeof(WaveData), wavFile); + + fwrite(wave.data, 1, wave.sampleCount*wave.channels*wave.sampleSize/8, wavFile); + + fclose(wavFile); + + success = true; + } + else if (IsFileExtension(fileName, ".raw")) { } // TODO: Support additional file formats to export wave sample data + + if (success) TraceLog(LOG_INFO, "Wave exported successfully: %s", fileName); + else TraceLog(LOG_WARNING, "Wave could not be exported."); +} + // Play a sound void PlaySound(Sound sound) { @@ -1565,6 +1654,9 @@ void StopMusicStream(Music music) #if defined(SUPPORT_FILEFORMAT_FLAC) case MUSIC_AUDIO_FLAC: /* TODO: Restart FLAC context */ break; #endif +#if defined(SUPPORT_FILEFORMAT_MP3) + case MUSIC_AUDIO_MP3: /* TODO: Restart MP3 context */ break; +#endif #if defined(SUPPORT_FILEFORMAT_XM) case MUSIC_MODULE_XM: /* TODO: Restart XM context */ break; #endif @@ -1705,6 +1797,13 @@ void UpdateMusicStream(Music music) } break; #endif + #if defined(SUPPORT_FILEFORMAT_MP3) + case MUSIC_AUDIO_MP3: + { + // NOTE: Returns the number of samples to process + unsigned int numSamplesMp3 = (unsigned int)drmp3_read_f32(&music->ctxMp3, samplesCount*music->stream.channels, (float *)pcm); + } break; + #endif #if defined(SUPPORT_FILEFORMAT_XM) case MUSIC_MODULE_XM: jar_xm_generate_samples_16bit(music->ctxXm, pcm, samplesCount); break; #endif @@ -2294,6 +2393,33 @@ static Wave LoadFLAC(const char *fileName) } #endif +#if defined(SUPPORT_FILEFORMAT_MP3) +// Load MP3 file into Wave structure +// NOTE: Using dr_mp3 library +static Wave LoadMP3(const char *fileName) +{ + Wave wave; + + // Decode an entire MP3 file in one go + uint64_t totalSampleCount; + drmp3_config *config; + wave.data = drmp3_open_and_decode_file_f32(fileName, config, &totalSampleCount); + + wave.channels = config->outputChannels; + wave.sampleRate = config->outputSampleRate; + wave.sampleCount = (int)totalSampleCount/wave.channels; + wave.sampleSize = 16; + + // NOTE: Only support up to 2 channels (mono, stereo) + if (wave.channels > 2) TraceLog(LOG_WARNING, "[%s] MP3 channels number (%i) not supported", fileName, wave.channels); + + if (wave.data == NULL) TraceLog(LOG_WARNING, "[%s] MP3 data could not be loaded", fileName); + else TraceLog(LOG_INFO, "[%s] MP3 file loaded successfully (%i Hz, %i bit, %s)", fileName, wave.sampleRate, wave.sampleSize, (wave.channels == 1) ? "Mono" : "Stereo"); + + return wave; +} +#endif + // Some required functions for audio standalone module version #if defined(AUDIO_STANDALONE) // Check file extension diff --git a/src/config.h b/src/config.h index 317df46b..c2238a79 100644 --- a/src/config.h +++ b/src/config.h @@ -86,6 +86,8 @@ //#define SUPPORT_FILEFORMAT_PKM 1 //#define SUPPORT_FILEFORMAT_PVR 1 +// Support image export functionality (.png, .bmp, .tga, .jpg) +#define SUPPORT_IMAGE_EXPORT 1 // Support multiple image editing functions to scale, adjust colors, flip, draw on images, crop... // If not defined only three image editing functions supported: ImageFormat(), ImageAlphaMask(), ImageToPOT() #define SUPPORT_IMAGE_MANIPULATION 1 @@ -133,10 +135,6 @@ // Show TraceLog() output messages // NOTE: By default LOG_DEBUG traces not shown #define SUPPORT_TRACELOG 1 -// Support saving image data fileformats -// NOTE: Requires stb_image_write library -#define SUPPORT_SAVE_PNG 1 -//#define SUPPORT_SAVE_BMP 1 #endif //defined(RAYLIB_CMAKE) diff --git a/src/config.h.in b/src/config.h.in index f9d474cd..742067ce 100644 --- a/src/config.h.in +++ b/src/config.h.in @@ -41,6 +41,9 @@ #cmakedefine SUPPORT_FILEFORMAT_PKM 1 #cmakedefine SUPPORT_FILEFORMAT_PVR 1 +// Support image export functionality (.png, .bmp, .tga, .jpg) +#define SUPPORT_IMAGE_EXPORT 1 + /* Support multiple image editing functions to scale, adjust colors, flip, draw on images, crop... If not defined only three image editing functions supported: ImageFormat(), ImageAlphaMask(), ImageToPOT() */ #cmakedefine SUPPORT_IMAGE_MANIPULATION 1 diff --git a/src/core.c b/src/core.c index 3cd75f64..fc1a5a09 100644 --- a/src/core.c +++ b/src/core.c @@ -886,7 +886,7 @@ void EndDrawing(void) // Wait for some milliseconds... if (frameTime < targetTime) { - Wait( (float)(targetTime - frameTime)*1000.0f); + Wait((float)(targetTime - frameTime)*1000.0f); currentTime = GetTime(); double extraTime = currentTime - previousTime; @@ -1297,7 +1297,9 @@ void TakeScreenshot(const char *fileName) { #if defined(PLATFORM_DESKTOP) || defined(PLATFORM_RPI) unsigned char *imgData = rlReadScreenPixels(renderWidth, renderHeight); - SavePNG(fileName, imgData, renderWidth, renderHeight, 4); // Save image as PNG + + Image image = { imgData, renderWidth, renderHeight, 1, UNCOMPRESSED_R8G8B8A8 }; + ExportImage(image, fileName); free(imgData); TraceLog(LOG_INFO, "Screenshot taken: %s", fileName); diff --git a/src/external/cgltf.h b/src/external/cgltf.h new file mode 100644 index 00000000..ed04d545 --- /dev/null +++ b/src/external/cgltf.h @@ -0,0 +1,2023 @@ +#ifndef CGLTF_H_INCLUDED__ +#define CGLTF_H_INCLUDED__ + +#ifdef __cplusplus +extern "C" { +#endif + + +typedef unsigned long cgltf_size; +typedef float cgltf_float; +typedef int cgltf_bool; + +typedef enum cgltf_file_type +{ + cgltf_file_type_invalid, + cgltf_file_type_gltf, + cgltf_file_type_glb, +} cgltf_file_type; + +typedef struct cgltf_options +{ + cgltf_file_type type; + cgltf_size json_token_count; /* 0 == auto */ + void* (*memory_alloc)(void* user, cgltf_size size); + void (*memory_free) (void* user, void* ptr); + void* memory_user_data; +} cgltf_options; + +typedef enum cgltf_result +{ + cgltf_result_success, + cgltf_result_data_too_short, + cgltf_result_unknown_format, + cgltf_result_invalid_json, + cgltf_result_invalid_options, +} cgltf_result; + +typedef enum cgltf_buffer_view_type +{ + cgltf_buffer_view_type_invalid, + cgltf_buffer_view_type_indices, + cgltf_buffer_view_type_vertices, +} cgltf_buffer_view_type; + +typedef enum cgltf_attribute_type +{ + cgltf_attribute_type_invalid, + cgltf_attribute_type_position, + cgltf_attribute_type_normal, + cgltf_attribute_type_tangent, + cgltf_attribute_type_texcoord_0, + cgltf_attribute_type_texcoord_1, + cgltf_attribute_type_color_0, + cgltf_attribute_type_joints_0, + cgltf_attribute_type_weights_0, +} cgltf_attribute_type; + +typedef enum cgltf_component_type +{ + cgltf_component_type_invalid, + cgltf_component_type_rgb_32f, + cgltf_component_type_rgba_32f, + cgltf_component_type_rg_32f, + cgltf_component_type_rg_8, + cgltf_component_type_rg_16, + cgltf_component_type_rgba_8, + cgltf_component_type_rgba_16, + cgltf_component_type_r_8, + cgltf_component_type_r_8u, + cgltf_component_type_r_16, + cgltf_component_type_r_16u, + cgltf_component_type_r_32u, + cgltf_component_type_r_32f, +} cgltf_component_type; + +typedef enum cgltf_type +{ + cgltf_type_invalid, + cgltf_type_scalar, + cgltf_type_vec2, + cgltf_type_vec3, + cgltf_type_vec4, + cgltf_type_mat2, + cgltf_type_mat3, + cgltf_type_mat4, +} cgltf_type; + +typedef enum cgltf_primitive_type +{ + cgltf_type_points, + cgltf_type_lines, + cgltf_type_line_loop, + cgltf_type_line_strip, + cgltf_type_triangles, + cgltf_type_triangle_strip, + cgltf_type_triangle_fan, +} cgltf_primitive_type; + +typedef struct cgltf_buffer +{ + cgltf_size size; + char* uri; +} cgltf_buffer; + +typedef struct cgltf_buffer_view +{ + cgltf_buffer* buffer; + cgltf_size offset; + cgltf_size size; + cgltf_size stride; /* 0 == automatically determined by accessor */ + cgltf_buffer_view_type type; +} cgltf_buffer_view; + +typedef struct cgltf_accessor +{ + cgltf_component_type component_type; + cgltf_type type; + cgltf_size offset; + cgltf_size count; + cgltf_size stride; + cgltf_buffer_view* buffer_view; +} cgltf_accessor; + +typedef struct cgltf_attribute +{ + cgltf_attribute_type name; + cgltf_accessor* data; +} cgltf_attribute; + + +typedef struct cgltf_rgba +{ + cgltf_float r; + cgltf_float g; + cgltf_float b; + cgltf_float a; +} cgltf_rgba; + +typedef struct cgltf_image +{ + char* uri; + cgltf_buffer_view* buffer_view; + char* mime_type; +} cgltf_image; + +typedef struct cgltf_sampler +{ + cgltf_float mag_filter; + cgltf_float min_filter; + cgltf_float wrap_s; + cgltf_float wrap_t; +} cgltf_sampler; + +typedef struct cgltf_texture +{ + cgltf_image* image; + cgltf_sampler* sampler; +} cgltf_texture; + +typedef struct cgltf_texture_view +{ + cgltf_texture* texture; + cgltf_size texcoord; + cgltf_float scale; +} cgltf_texture_view; + +typedef struct cgltf_pbr +{ + cgltf_texture_view base_color_texture; + cgltf_texture_view metallic_roughness_texture; + + cgltf_rgba base_color; + cgltf_float metallic_factor; + cgltf_float roughness_factor; +} cgltf_pbr; + +typedef struct cgltf_material +{ + char* name; + cgltf_pbr pbr; + cgltf_rgba emissive_color; + cgltf_texture_view normal_texture; + cgltf_texture_view emissive_texture; + cgltf_texture_view occlusion_texture; + cgltf_bool double_sided; +} cgltf_material; + +typedef struct cgltf_primitive { + cgltf_primitive_type type; + cgltf_accessor* indices; + cgltf_material* material; + cgltf_attribute* attributes; + cgltf_size attributes_count; +} cgltf_primitive; + +typedef struct cgltf_mesh { + char* name; + cgltf_primitive* primitives; + cgltf_size primitives_count; +} cgltf_mesh; + +typedef struct cgltf_data +{ + unsigned version; + cgltf_file_type file_type; + + cgltf_mesh* meshes; + cgltf_size meshes_count; + + cgltf_material* materials; + cgltf_size materials_count; + + cgltf_accessor* accessors; + cgltf_size accessors_count; + + cgltf_buffer_view* buffer_views; + cgltf_size buffer_views_count; + + cgltf_buffer* buffers; + cgltf_size buffers_count; + + cgltf_image* images; + cgltf_size images_count; + + cgltf_texture* textures; + cgltf_size textures_count; + + cgltf_sampler* samplers; + cgltf_size samplers_count; + + const void* bin; + cgltf_size bin_size; + + void (*memory_free) (void* user, void* ptr); + void* memory_user_data; +} cgltf_data; + +cgltf_result cgltf_parse( + const cgltf_options* options, + const void* data, + cgltf_size size, + cgltf_data* out_data); + +void cgltf_free(cgltf_data* data); + +#endif /* #ifndef CGLTF_H_INCLUDED__ */ + +/* + * + * Stop now, if you are only interested in the API. + * Below, you find the implementation. + * + */ + +#ifdef __INTELLISENSE__ +/* This makes MSVC intellisense work. */ +#define CGLTF_IMPLEMENTATION +#endif + +#ifdef CGLTF_IMPLEMENTATION + +#include /* For uint8_t, uint32_t */ +#include /* For strncpy */ +#include /* For malloc, free */ + + +/* + * -- jsmn.h start -- + * Source: https://github.com/zserge/jsmn + * License: MIT + */ +typedef enum { + JSMN_UNDEFINED = 0, + JSMN_OBJECT = 1, + JSMN_ARRAY = 2, + JSMN_STRING = 3, + JSMN_PRIMITIVE = 4 +} jsmntype_t; +enum jsmnerr { + /* Not enough tokens were provided */ + JSMN_ERROR_NOMEM = -1, + /* Invalid character inside JSON string */ + JSMN_ERROR_INVAL = -2, + /* The string is not a full JSON packet, more bytes expected */ + JSMN_ERROR_PART = -3 +}; +typedef struct { + jsmntype_t type; + int start; + int end; + int size; +#ifdef JSMN_PARENT_LINKS + int parent; +#endif +} jsmntok_t; +typedef struct { + unsigned int pos; /* offset in the JSON string */ + unsigned int toknext; /* next token to allocate */ + int toksuper; /* superior token node, e.g parent object or array */ +} jsmn_parser; +void jsmn_init(jsmn_parser *parser); +int jsmn_parse(jsmn_parser *parser, const char *js, size_t len, jsmntok_t *tokens, unsigned int num_tokens); +/* + * -- jsmn.h end -- + */ + + +static const cgltf_size GltfHeaderSize = 12; +static const cgltf_size GltfChunkHeaderSize = 8; +static const uint32_t GltfMagic = 0x46546C67; +static const uint32_t GltfMagicJsonChunk = 0x4E4F534A; +static const uint32_t GltfMagicBinChunk = 0x004E4942; + +static void* cgltf_mem_alloc(void* user, cgltf_size size) +{ + return malloc(size); +} + +static void cgltf_mem_free(void* user, void* ptr) +{ + free(ptr); +} + +static cgltf_result cgltf_parse_json(cgltf_options* options, const uint8_t* json_chunk, cgltf_size size, cgltf_data* out_data); + +cgltf_result cgltf_parse(const cgltf_options* options, const void* data, cgltf_size size, cgltf_data* out_data) +{ + if (size < GltfHeaderSize) + { + return cgltf_result_data_too_short; + } + + if (options == NULL) + { + return cgltf_result_invalid_options; + } + + cgltf_options fixed_options = *options; + if (fixed_options.memory_alloc == NULL) + { + fixed_options.memory_alloc = &cgltf_mem_alloc; + } + if (fixed_options.memory_free == NULL) + { + fixed_options.memory_free = &cgltf_mem_free; + } + + uint32_t tmp; + // Magic + memcpy(&tmp, data, 4); + if (tmp != GltfMagic) + { + if (fixed_options.type == cgltf_file_type_invalid) + { + fixed_options.type = cgltf_file_type_gltf; + } + else + { + return cgltf_result_unknown_format; + } + } + + memset(out_data, 0, sizeof(cgltf_data)); + out_data->memory_free = fixed_options.memory_free; + out_data->memory_user_data = fixed_options.memory_user_data; + + if (fixed_options.type == cgltf_file_type_gltf) + { + out_data->file_type = cgltf_file_type_gltf; + return cgltf_parse_json(&fixed_options, data, size, out_data); + } + + const uint8_t* ptr = (const uint8_t*)data; + // Version + memcpy(&tmp, ptr + 4, 4); + out_data->version = tmp; + + // Total length + memcpy(&tmp, ptr + 8, 4); + if (tmp > size) + { + return cgltf_result_data_too_short; + } + + const uint8_t* json_chunk = ptr + GltfHeaderSize; + + // JSON chunk: length + uint32_t json_length; + memcpy(&json_length, json_chunk, 4); + if (GltfHeaderSize + GltfChunkHeaderSize + json_length > size) + { + return cgltf_result_data_too_short; + } + + // JSON chunk: magic + memcpy(&tmp, json_chunk + 4, 4); + if (tmp != GltfMagicJsonChunk) + { + return cgltf_result_unknown_format; + } + + json_chunk += GltfChunkHeaderSize; + cgltf_result json_result = cgltf_parse_json(&fixed_options, json_chunk, json_length, out_data); + if (json_result != cgltf_result_success) + { + return json_result; + } + + out_data->file_type = cgltf_file_type_invalid; + if (GltfHeaderSize + GltfChunkHeaderSize + json_length + GltfChunkHeaderSize <= size) + { + // We can read another chunk + const uint8_t* bin_chunk = json_chunk + json_length; + + // Bin chunk: length + uint32_t bin_length; + memcpy(&bin_length, bin_chunk, 4); + if (GltfHeaderSize + GltfChunkHeaderSize + json_length + GltfChunkHeaderSize + bin_length > size) + { + return cgltf_result_data_too_short; + } + + // Bin chunk: magic + memcpy(&tmp, bin_chunk + 4, 4); + if (tmp != GltfMagicBinChunk) + { + return cgltf_result_unknown_format; + } + + bin_chunk += GltfChunkHeaderSize; + + out_data->file_type = cgltf_file_type_glb; + out_data->bin = bin_chunk; + out_data->bin_size = bin_length; + } + + return cgltf_result_success; +} + +void cgltf_free(cgltf_data* data) +{ + data->memory_free(data->memory_user_data, data->accessors); + data->memory_free(data->memory_user_data, data->buffer_views); + + + for (cgltf_size i = 0; i < data->buffers_count; ++i) + { + data->memory_free(data->memory_user_data, data->buffers[i].uri); + } + data->memory_free(data->memory_user_data, data->buffers); + + for (cgltf_size i = 0; i < data->meshes_count; ++i) + { + data->memory_free(data->memory_user_data, data->meshes[i].name); + for (cgltf_size j = 0; j < data->meshes[i].primitives_count; ++j) + { + data->memory_free(data->memory_user_data, data->meshes[i].primitives[j].attributes); + } + data->memory_free(data->memory_user_data, data->meshes[i].primitives); + } + data->memory_free(data->memory_user_data, data->meshes); + + for (cgltf_size i = 0; i < data->materials_count; ++i) + { + data->memory_free(data->memory_user_data, data->materials[i].name); + } + + data->memory_free(data->memory_user_data, data->materials); + + for (cgltf_size i = 0; i < data->images_count; ++i) + { + data->memory_free(data->memory_user_data, data->images[i].uri); + data->memory_free(data->memory_user_data, data->images[i].mime_type); + } + + data->memory_free(data->memory_user_data, data->images); + data->memory_free(data->memory_user_data, data->textures); + data->memory_free(data->memory_user_data, data->samplers); +} + +#define CGLTF_CHECK_TOKTYPE(tok_, type_) if ((tok_).type != (type_)) { return -128; } + +static char cgltf_to_lower(char c) +{ + if (c >= 'A' && c <= 'Z') + { + c = 'a' + (c - 'A'); + } + return c; +} + +static int cgltf_json_strcmp(jsmntok_t const* tok, const uint8_t* json_chunk, const char* str) +{ + CGLTF_CHECK_TOKTYPE(*tok, JSMN_STRING); + int const str_len = strlen(str); + int const name_length = tok->end - tok->start; + if (name_length == str_len) + { + for (int i = 0; i < str_len; ++i) + { + char const a = cgltf_to_lower(*((const char*)json_chunk + tok->start + i)); + char const b = cgltf_to_lower(*(str + i)); + if (a < b) + { + return -1; + } + else if (a > b) + { + return 1; + } + } + return 0; + } + return 128; +} + +static int cgltf_json_to_int(jsmntok_t const* tok, const uint8_t* json_chunk) +{ + char tmp[128]; + CGLTF_CHECK_TOKTYPE(*tok, JSMN_PRIMITIVE); + int size = tok->end - tok->start; + strncpy(tmp, + (const char*)json_chunk + tok->start, + size); + tmp[size] = 0; + return atoi(tmp); +} + +static cgltf_float cgltf_json_to_float(jsmntok_t const* tok, const uint8_t* json_chunk) { + char tmp[128]; + CGLTF_CHECK_TOKTYPE(*tok, JSMN_PRIMITIVE); + int size = tok->end - tok->start; + strncpy(tmp, + (const char*)json_chunk + tok->start, + size); + tmp[size] = 0; + return atof(tmp); +} + +static cgltf_bool cgltf_json_to_bool(jsmntok_t const* tok, const uint8_t* json_chunk) { + //TODO: error handling? + if (memcmp(json_chunk + tok->start, "true", 4) == 0) + return 1; + + return 0; +} + +static int cgltf_skip_json(jsmntok_t const* tokens, int i) +{ + if (tokens[i].type == JSMN_ARRAY) + { + int size = tokens[i].size; + ++i; + for (int j = 0; j < size; ++j) + { + i = cgltf_skip_json(tokens, i); + } + } + else if (tokens[i].type == JSMN_OBJECT) + { + int size = tokens[i].size; + ++i; + for (int j = 0; j < size; ++j) + { + i = cgltf_skip_json(tokens, i); + i = cgltf_skip_json(tokens, i); + } + } + else if (tokens[i].type == JSMN_PRIMITIVE + || tokens[i].type == JSMN_STRING) + { + return i + 1; + } + return i; +} + + +static int cgltf_parse_json_primitive(cgltf_options* options, jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, + cgltf_primitive* out_prim) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + + int size = tokens[i].size; + ++i; + + out_prim->indices = (void* )-1; + out_prim->material = NULL; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens+i, json_chunk, "mode") == 0) + { + ++i; + out_prim->type + = (cgltf_primitive_type) + cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "indices") == 0) + { + ++i; + out_prim->indices = + (void*)(size_t)cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "material") == 0) + { + ++i; + out_prim->material = + (void*)(size_t)cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "attributes") == 0) + { + ++i; + if (tokens[i].type != JSMN_OBJECT) + { + return -1; + } + out_prim->attributes_count = tokens[i].size; + out_prim->attributes + = options->memory_alloc(options->memory_user_data, sizeof(cgltf_attribute) * tokens[i].size); + ++i; + for (cgltf_size iattr = 0; iattr < out_prim->attributes_count; ++iattr) + { + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_STRING); + out_prim->attributes[iattr].name = cgltf_attribute_type_invalid; + if (cgltf_json_strcmp(tokens+i, json_chunk, "POSITION") == 0) + { + out_prim->attributes[iattr].name = cgltf_attribute_type_position; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "NORMAL") == 0) + { + out_prim->attributes[iattr].name = cgltf_attribute_type_normal; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "TANGENT") == 0) + { + out_prim->attributes[iattr].name = cgltf_attribute_type_tangent; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "TEXCOORD_0") == 0) + { + out_prim->attributes[iattr].name = cgltf_attribute_type_texcoord_0; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "TEXCOORD_1") == 0) + { + out_prim->attributes[iattr].name = cgltf_attribute_type_texcoord_1; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "COLOR_0") == 0) + { + out_prim->attributes[iattr].name = cgltf_attribute_type_color_0; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "JOINTS_0") == 0) + { + out_prim->attributes[iattr].name = cgltf_attribute_type_joints_0; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "WEIGHTS_0") == 0) + { + out_prim->attributes[iattr].name = cgltf_attribute_type_weights_0; + } + ++i; + out_prim->attributes[iattr].data = + (void*)(size_t)cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + } + else + { + i = cgltf_skip_json(tokens, i+1); + } + } + + return i; +} + +static int cgltf_parse_json_mesh(cgltf_options* options, jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, cgltf_size mesh_index, + cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + + out_data->meshes[mesh_index].name = NULL; + + int size = tokens[i].size; + ++i; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens+i, json_chunk, "name") == 0) + { + ++i; + int strsize = tokens[i].end - tokens[i].start; + out_data->meshes[mesh_index].name = options->memory_alloc(options->memory_user_data, strsize + 1); + strncpy(out_data->meshes[mesh_index].name, + (const char*)json_chunk + tokens[i].start, + strsize); + out_data->meshes[mesh_index].name[strsize] = 0; + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "primitives") == 0) + { + ++i; + if (tokens[i].type != JSMN_ARRAY) + { + return -1; + } + out_data->meshes[mesh_index].primitives_count = tokens[i].size; + out_data->meshes[mesh_index].primitives = options->memory_alloc(options->memory_user_data, sizeof(cgltf_primitive) * tokens[i].size); + ++i; + + for (cgltf_size prim_index = 0; + prim_index < out_data->meshes[mesh_index].primitives_count; + ++prim_index) + { + i = cgltf_parse_json_primitive(options, tokens, i, json_chunk, + &out_data->meshes[mesh_index].primitives[prim_index]); + if (i < 0) + { + return i; + } + } + } + else + { + i = cgltf_skip_json(tokens, i+1); + } + } + + return i; +} + +static int cgltf_parse_json_meshes(cgltf_options* options, jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_ARRAY); + out_data->meshes_count = tokens[i].size; + out_data->meshes = options->memory_alloc(options->memory_user_data, sizeof(cgltf_mesh) * out_data->meshes_count); + ++i; + for (cgltf_size j = 0 ; j < out_data->meshes_count; ++j) + { + i = cgltf_parse_json_mesh(options, tokens, i, json_chunk, j, out_data); + if (i < 0) + { + return i; + } + } + return i; +} + +static int cgltf_parse_json_accessor(jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, cgltf_size accessor_index, + cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + + memset(&out_data->accessors[accessor_index], 0, sizeof(cgltf_accessor)); + out_data->accessors[accessor_index].buffer_view = (void*)-1; + + int size = tokens[i].size; + ++i; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens+i, json_chunk, "bufferView") == 0) + { + ++i; + out_data->accessors[accessor_index].buffer_view = + (void*)(size_t)cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "byteOffset") == 0) + { + ++i; + out_data->accessors[accessor_index].offset = + cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "componentType") == 0) + { + ++i; + int type = cgltf_json_to_int(tokens+i, json_chunk); + switch (type) + { + case 5120: + type = cgltf_component_type_r_8; + break; + case 5121: + type = cgltf_component_type_r_8u; + break; + case 5122: + type = cgltf_component_type_r_16; + break; + case 5123: + type = cgltf_component_type_r_16u; + break; + case 5125: + type = cgltf_component_type_r_32u; + break; + case 5126: + type = cgltf_component_type_r_32f; + break; + default: + type = cgltf_component_type_invalid; + break; + } + out_data->accessors[accessor_index].component_type = type; + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "count") == 0) + { + ++i; + out_data->accessors[accessor_index].count = + cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "type") == 0) + { + ++i; + if (cgltf_json_strcmp(tokens+i, json_chunk, "SCALAR") == 0) + { + out_data->accessors[accessor_index].type = cgltf_type_scalar; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "VEC2") == 0) + { + out_data->accessors[accessor_index].type = cgltf_type_vec2; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "VEC3") == 0) + { + out_data->accessors[accessor_index].type = cgltf_type_vec3; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "VEC4") == 0) + { + out_data->accessors[accessor_index].type = cgltf_type_vec4; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "MAT2") == 0) + { + out_data->accessors[accessor_index].type = cgltf_type_mat2; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "MAT3") == 0) + { + out_data->accessors[accessor_index].type = cgltf_type_mat3; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "MAT4") == 0) + { + out_data->accessors[accessor_index].type = cgltf_type_mat4; + } + ++i; + } + else + { + i = cgltf_skip_json(tokens, i+1); + } + } + + return i; +} + +static int cgltf_parse_json_rgba(jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_rgba* out) +{ + int components = tokens[i].size; + if (components >= 2) { + out->r = cgltf_json_to_float(tokens + ++i, json_chunk); + out->g = cgltf_json_to_float(tokens + ++i, json_chunk); + + if (components > 2) + out->b = cgltf_json_to_float(tokens + ++i, json_chunk); + + if (components > 3) + out->a = cgltf_json_to_float(tokens + ++i, json_chunk); + } + else { + out->r = cgltf_json_to_float(tokens + ++i, json_chunk); + out->g = out->r; + out->b = out->r; + out->a = out->r; + } + + return ++i; +} + +static int cgltf_parse_json_texture_view(jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_texture_view* out) { + + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + int size = tokens[i].size; + ++i; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens + i, json_chunk, "index") == 0) + { + ++i; + out->texture = (void*)(size_t)cgltf_json_to_int(tokens + i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "texCoord") == 0) + { + ++i; + out->texcoord = cgltf_json_to_int(tokens + i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "scale") == 0) + { + ++i; + out->scale = cgltf_json_to_float(tokens + i, json_chunk); + ++i; + } + else + { + i = cgltf_skip_json(tokens, i + 1); + } + } + + return i; +} + +static int cgltf_parse_json_pbr(jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, cgltf_size mat_index, cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + int size = tokens[i].size; + ++i; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens+i, json_chunk, "metallicFactor") == 0) + { + ++i; + out_data->materials[mat_index].pbr.metallic_factor = + cgltf_json_to_float(tokens + i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "roughnessFactor") == 0) + { + ++i; + out_data->materials[mat_index].pbr.roughness_factor = + cgltf_json_to_float(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "baseColorFactor") == 0) + { + i = cgltf_parse_json_rgba(tokens, i + 1, json_chunk, + &(out_data->materials[mat_index].pbr.base_color)); + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "baseColorTexture") == 0) + { + i = cgltf_parse_json_texture_view(tokens, i + 1, json_chunk, + &(out_data->materials[mat_index].pbr.base_color_texture)); + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "metallicRoughnessTexture") == 0) + { + i = cgltf_parse_json_texture_view(tokens, i + 1, json_chunk, + &(out_data->materials[mat_index].pbr.metallic_roughness_texture)); + } + else + { + i = cgltf_skip_json(tokens, i+1); + } + } + + return i; +} + +static int cgltf_parse_json_image(cgltf_options* options, jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, cgltf_size img_index, cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + + memset(&out_data->images[img_index], 0, sizeof(cgltf_image)); + int size = tokens[i].size; + ++i; + + out_data->images[img_index].buffer_view = (void*)-1; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens + i, json_chunk, "uri") == 0) + { + ++i; + int strsize = tokens[i].end - tokens[i].start; + out_data->images[img_index].uri = options->memory_alloc(options->memory_user_data, strsize + 1); + strncpy(out_data->images[img_index].uri, + (const char*)json_chunk + tokens[i].start, + strsize); + out_data->images[img_index].uri[strsize] = 0; + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "bufferView") == 0) + { + ++i; + out_data->images[img_index].buffer_view = + (void*)(size_t)cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "mimeType") == 0) + { + ++i; + int strsize = tokens[i].end - tokens[i].start; + out_data->images[img_index].mime_type = options->memory_alloc(options->memory_user_data, strsize + 1); + strncpy(out_data->images[img_index].mime_type, + (const char*)json_chunk + tokens[i].start, + strsize); + out_data->images[img_index].mime_type[strsize] = 0; + ++i; + } + else + { + i = cgltf_skip_json(tokens, i + 1); + } + } + + return i; +} + +static int cgltf_parse_json_sampler(cgltf_options* options, jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, cgltf_size smp_index, cgltf_data* out_data) { + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + + memset(&out_data->samplers[smp_index], 0, sizeof(cgltf_sampler)); + int size = tokens[i].size; + ++i; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens + i, json_chunk, "magFilter") == 0) + { + ++i; + out_data->samplers[smp_index].mag_filter + = cgltf_json_to_int(tokens + i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "minFilter") == 0) + { + ++i; + out_data->samplers[smp_index].min_filter + = cgltf_json_to_int(tokens + i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "wrapS") == 0) + { + ++i; + out_data->samplers[smp_index].wrap_s + = cgltf_json_to_int(tokens + i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "wrapT") == 0) + { + ++i; + out_data->samplers[smp_index].wrap_t + = cgltf_json_to_int(tokens + i, json_chunk); + ++i; + } + else + { + i = cgltf_skip_json(tokens, i + 1); + } + } + + return i; +} + + +static int cgltf_parse_json_texture(cgltf_options* options, jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, cgltf_size tex_index, cgltf_data* out_data) { + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + + memset(&out_data->textures[tex_index], 0, sizeof(cgltf_texture)); + out_data->textures[tex_index].image = (void*)-1; + out_data->textures[tex_index].sampler = (void*)-1; + + int size = tokens[i].size; + ++i; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens + i, json_chunk, "sampler") == 0) + { + ++i; + out_data->textures[tex_index].sampler + = (void*)(size_t)cgltf_json_to_int(tokens + i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "source") == 0) + { + ++i; + out_data->textures[tex_index].image + = (void*)(size_t)cgltf_json_to_int(tokens + i, json_chunk); + ++i; + } + else + { + i = cgltf_skip_json(tokens, i + 1); + } + } + + return i; +} + +static int cgltf_parse_json_material(cgltf_options* options, jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, cgltf_size mat_index, cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + cgltf_material* material = &out_data->materials[mat_index]; + + memset(material, 0, sizeof(cgltf_material)); + material->emissive_texture.texture = (void*)-1; + material->emissive_texture.scale = 1.0f; + + material->normal_texture.texture = (void*)-1; + material->normal_texture.scale = 1.0f; + + material->occlusion_texture.texture = (void*)-1; + material->occlusion_texture.scale = 1.0f; + + material->pbr.base_color_texture.texture = (void*)-1; + material->pbr.base_color_texture.scale = 1.0f; + + material->pbr.metallic_roughness_texture.texture = (void*)-1; + material->pbr.metallic_roughness_texture.scale = 1.0f; + + int size = tokens[i].size; + ++i; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens+i, json_chunk, "name") == 0) + { + ++i; + int strsize = tokens[i].end - tokens[i].start; + material->name = options->memory_alloc(options->memory_user_data, strsize + 1); + strncpy(material->name, + (const char*)json_chunk + tokens[i].start, + strsize); + material->name[strsize] = 0; + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "pbrMetallicRoughness") == 0) + { + i = cgltf_parse_json_pbr(tokens, i+1, json_chunk, mat_index, out_data); + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "emissiveFactor") == 0) + { + i = cgltf_parse_json_rgba(tokens, i + 1, json_chunk, + &(material->emissive_color)); + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "normalTexture") == 0) + { + i = cgltf_parse_json_texture_view(tokens, i + 1, json_chunk, + &(material->normal_texture)); + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "emissiveTexture") == 0) + { + i = cgltf_parse_json_texture_view(tokens, i + 1, json_chunk, + &(material->emissive_texture)); + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "occlusionTexture") == 0) + { + i = cgltf_parse_json_texture_view(tokens, i + 1, json_chunk, + &(material->occlusion_texture)); + } + else if (cgltf_json_strcmp(tokens + i, json_chunk, "doubleSided") == 0) + { + ++i; + material->double_sided = + cgltf_json_to_bool(tokens + i, json_chunk); + ++i; + } + else + { + i = cgltf_skip_json(tokens, i+1); + } + } + + return i; +} + +static int cgltf_parse_json_accessors(cgltf_options* options, jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_ARRAY); + out_data->accessors_count = tokens[i].size; + out_data->accessors = options->memory_alloc(options->memory_user_data, sizeof(cgltf_accessor) * out_data->accessors_count); + ++i; + for (cgltf_size j = 0 ; j < out_data->accessors_count; ++j) + { + i = cgltf_parse_json_accessor(tokens, i, json_chunk, j, out_data); + if (i < 0) + { + return i; + } + } + return i; +} + +static int cgltf_parse_json_materials(cgltf_options* options, jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_ARRAY); + out_data->materials_count = tokens[i].size; + out_data->materials = options->memory_alloc(options->memory_user_data, sizeof(cgltf_material) * out_data->materials_count); + ++i; + for (cgltf_size j = 0; j < out_data->materials_count; ++j) + { + i = cgltf_parse_json_material(options, tokens, i, json_chunk, j, out_data); + if (i < 0) + { + return i; + } + } + return i; +} + +static int cgltf_parse_json_images(cgltf_options* options, jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_data* out_data) { + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_ARRAY); + out_data->images_count = tokens[i].size; + out_data->images = options->memory_alloc(options->memory_user_data, sizeof(cgltf_image) * out_data->images_count); + ++i; + + for (cgltf_size j = 0; j < out_data->images_count; ++j) { + i = cgltf_parse_json_image(options, tokens, i, json_chunk, j, out_data); + if (i < 0) { + return i; + } + } + return i; +} + +static int cgltf_parse_json_textures(cgltf_options* options, jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_data* out_data) { + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_ARRAY); + out_data->textures_count = tokens[i].size; + out_data->textures = options->memory_alloc(options->memory_user_data, sizeof(cgltf_texture) * out_data->textures_count); + ++i; + + for (cgltf_size j = 0; j < out_data->textures_count; ++j) { + i = cgltf_parse_json_texture(options, tokens, i, json_chunk, j, out_data); + if (i < 0) { + return i; + } + } + return i; +} + +static int cgltf_parse_json_samplers(cgltf_options* options, jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_data* out_data) { + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_ARRAY); + out_data->samplers_count = tokens[i].size; + out_data->samplers = options->memory_alloc(options->memory_user_data, sizeof(cgltf_sampler) * out_data->samplers_count); + ++i; + + for (cgltf_size j = 0; j < out_data->samplers_count; ++j) { + i = cgltf_parse_json_sampler(options, tokens, i, json_chunk, j, out_data); + if (i < 0) { + return i; + } + } + return i; +} + +static int cgltf_parse_json_buffer_view(jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, cgltf_size buffer_view_index, + cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + + int size = tokens[i].size; + ++i; + + memset(&out_data->buffer_views[buffer_view_index], 0, sizeof(cgltf_buffer_view)); + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens+i, json_chunk, "buffer") == 0) + { + ++i; + out_data->buffer_views[buffer_view_index].buffer = + (void*)(size_t)cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "byteOffset") == 0) + { + ++i; + out_data->buffer_views[buffer_view_index].offset = + cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "byteLength") == 0) + { + ++i; + out_data->buffer_views[buffer_view_index].size = + cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "byteStride") == 0) + { + ++i; + out_data->buffer_views[buffer_view_index].stride = + cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "target") == 0) + { + ++i; + int type = cgltf_json_to_int(tokens+i, json_chunk); + switch (type) + { + case 34962: + type = cgltf_buffer_view_type_vertices; + break; + case 34963: + type = cgltf_buffer_view_type_indices; + break; + default: + type = cgltf_buffer_view_type_invalid; + break; + } + out_data->buffer_views[buffer_view_index].type = type; + ++i; + } + else + { + i = cgltf_skip_json(tokens, i+1); + } + } + + return i; +} + +static int cgltf_parse_json_buffer_views(cgltf_options* options, jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_ARRAY); + out_data->buffer_views_count = tokens[i].size; + out_data->buffer_views = options->memory_alloc(options->memory_user_data, sizeof(cgltf_buffer_view) * out_data->buffer_views_count); + ++i; + for (cgltf_size j = 0 ; j < out_data->buffer_views_count; ++j) + { + i = cgltf_parse_json_buffer_view(tokens, i, json_chunk, j, out_data); + if (i < 0) + { + return i; + } + } + return i; +} + +static int cgltf_parse_json_buffer(cgltf_options* options, jsmntok_t const* tokens, int i, + const uint8_t* json_chunk, cgltf_size buffer_index, + cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_OBJECT); + + out_data->buffers[buffer_index].uri = NULL; + + int size = tokens[i].size; + ++i; + + for (int j = 0; j < size; ++j) + { + if (cgltf_json_strcmp(tokens+i, json_chunk, "byteLength") == 0) + { + ++i; + out_data->buffers[buffer_index].size = + cgltf_json_to_int(tokens+i, json_chunk); + ++i; + } + else if (cgltf_json_strcmp(tokens+i, json_chunk, "uri") == 0) + { + ++i; + int strsize = tokens[i].end - tokens[i].start; + out_data->buffers[buffer_index].uri = options->memory_alloc(options->memory_user_data, strsize + 1); + strncpy(out_data->buffers[buffer_index].uri, + (const char*)json_chunk + tokens[i].start, + strsize); + out_data->buffers[buffer_index].uri[strsize] = 0; + ++i; + } + else + { + i = cgltf_skip_json(tokens, i+1); + } + } + + return i; +} + +static int cgltf_parse_json_buffers(cgltf_options* options, jsmntok_t const* tokens, int i, const uint8_t* json_chunk, cgltf_data* out_data) +{ + CGLTF_CHECK_TOKTYPE(tokens[i], JSMN_ARRAY); + out_data->buffers_count = tokens[i].size; + out_data->buffers = options->memory_alloc(options->memory_user_data, sizeof(cgltf_buffer) * out_data->buffers_count); + ++i; + for (cgltf_size j = 0 ; j < out_data->buffers_count; ++j) + { + i = cgltf_parse_json_buffer(options, tokens, i, json_chunk, j, out_data); + if (i < 0) + { + return i; + } + } + return i; +} + +static cgltf_size cgltf_calc_size(cgltf_type type, cgltf_component_type component_type) +{ + cgltf_type size = 0; + + switch (component_type) + { + case cgltf_component_type_rgb_32f: + size = 12; + break; + case cgltf_component_type_rgba_32f: + size = 16; + break; + case cgltf_component_type_rg_32f: + size = 8; + break; + case cgltf_component_type_rg_8: + size = 2; + break; + case cgltf_component_type_rg_16: + size = 4; + break; + case cgltf_component_type_rgba_8: + size = 4; + break; + case cgltf_component_type_rgba_16: + size = 8; + break; + case cgltf_component_type_r_8: + case cgltf_component_type_r_8u: + size = 1; + break; + case cgltf_component_type_r_16: + case cgltf_component_type_r_16u: + size = 2; + break; + case cgltf_component_type_r_32u: + case cgltf_component_type_r_32f: + size = 4; + break; + case cgltf_component_type_invalid: + default: + size = 0; + break; + } + + switch (type) + { + case cgltf_type_vec2: + size *= 2; + break; + case cgltf_type_vec3: + size *= 3; + break; + case cgltf_type_vec4: + size *= 4; + break; + case cgltf_type_mat2: + size *= 4; + break; + case cgltf_type_mat3: + size *= 9; + break; + case cgltf_type_mat4: + size *= 16; + break; + case cgltf_type_invalid: + case cgltf_type_scalar: + default: + size *= 1; + break; + } + + return size; +} + +cgltf_result cgltf_parse_json(cgltf_options* options, const uint8_t* json_chunk, cgltf_size size, cgltf_data* out_data) +{ + jsmn_parser parser = {0}; + + if (options->json_token_count == 0) + { + options->json_token_count = jsmn_parse(&parser, (const char*)json_chunk, size, NULL, 0); + } + + jsmntok_t* tokens = options->memory_alloc(options->memory_user_data, sizeof(jsmntok_t) * options->json_token_count); + + jsmn_init(&parser); + + int token_count = jsmn_parse(&parser, (const char*)json_chunk, size, tokens, options->json_token_count); + + if (token_count < 0 + || tokens[0].type != JSMN_OBJECT) + { + return cgltf_result_invalid_json; + } + + // The root is an object. + + for (int i = 1; i < token_count; ) + { + jsmntok_t const* tok = &tokens[i]; + if (tok->type == JSMN_STRING + && i + 1 < token_count) + { + int const name_length = tok->end - tok->start; + if (name_length == 6 + && strncmp((const char*)json_chunk + tok->start, "meshes", 6) == 0) + { + i = cgltf_parse_json_meshes(options, tokens, i+1, json_chunk, out_data); + } + else if (name_length == 9 + && strncmp((const char*)json_chunk + tok->start, "accessors", 9) == 0) + { + i = cgltf_parse_json_accessors(options, tokens, i+1, json_chunk, out_data); + } + else if (name_length == 11 + && strncmp((const char*)json_chunk + tok->start, "bufferViews", 11) == 0) + { + i = cgltf_parse_json_buffer_views(options, tokens, i+1, json_chunk, out_data); + } + else if (name_length == 7 + && strncmp((const char*)json_chunk + tok->start, "buffers", 7) == 0) + { + i = cgltf_parse_json_buffers(options, tokens, i+1, json_chunk, out_data); + } + else if (name_length == 9 + && strncmp((const char*)json_chunk + tok->start, "materials", 9) == 0) + { + i = cgltf_parse_json_materials(options, tokens, i+1, json_chunk, out_data); + } + else if (name_length == 6 + && strncmp((const char*)json_chunk + tok->start, "images", 6) == 0) + { + i = cgltf_parse_json_images(options, tokens, i + 1, json_chunk, out_data); + } + else if (name_length == 8 + && strncmp((const char*)json_chunk + tok->start, "textures", 8) == 0) + { + i = cgltf_parse_json_textures(options, tokens, i + 1, json_chunk, out_data); + } + else if (name_length == 8 + && strncmp((const char*)json_chunk + tok->start, "samplers", 8) == 0) + { + i = cgltf_parse_json_samplers(options, tokens, i + 1, json_chunk, out_data); + } + else + { + i = cgltf_skip_json(tokens, i+1); + } + + if (i < 0) + { + return cgltf_result_invalid_json; + } + } + } + + options->memory_free(options->memory_user_data, tokens); + + /* Fix up pointers */ + for (cgltf_size i = 0; i < out_data->meshes_count; ++i) + { + for (cgltf_size j = 0; j < out_data->meshes[i].primitives_count; ++j) + { + if (out_data->meshes[i].primitives[j].indices ==(void*)-1) + { + out_data->meshes[i].primitives[j].indices = NULL; + } + else + { + out_data->meshes[i].primitives[j].indices + = &out_data->accessors[(cgltf_size)out_data->meshes[i].primitives[j].indices]; + } + + for (cgltf_size k = 0; k < out_data->meshes[i].primitives[j].attributes_count; ++k) + { + out_data->meshes[i].primitives[j].attributes[k].data + = &out_data->accessors[(cgltf_size)out_data->meshes[i].primitives[j].attributes[k].data]; + } + } + } + + for (cgltf_size i = 0; i < out_data->accessors_count; ++i) + { + if (out_data->accessors[i].buffer_view == (void*)-1) + { + out_data->accessors[i].buffer_view = NULL; + } + else + { + out_data->accessors[i].buffer_view + = &out_data->buffer_views[(cgltf_size)out_data->accessors[i].buffer_view]; + out_data->accessors[i].stride = 0; + if (out_data->accessors[i].buffer_view) + { + out_data->accessors[i].stride = out_data->accessors[i].buffer_view->stride; + } + } + if (out_data->accessors[i].stride == 0) + { + out_data->accessors[i].stride = cgltf_calc_size(out_data->accessors[i].type, out_data->accessors[i].component_type); + } + } + + for (cgltf_size i = 0; i < out_data->textures_count; ++i) + { + if (out_data->textures[i].image == (void*)-1) + { + out_data->textures[i].image = NULL; + } + else + { + out_data->textures[i].image = + &out_data->images[(cgltf_size)out_data->textures[i].image]; + } + + if (out_data->textures[i].sampler == (void*)-1) + { + out_data->textures[i].sampler = NULL; + } + else + { + out_data->textures[i].sampler = + &out_data->samplers[(cgltf_size)out_data->textures[i].sampler]; + } + } + + for (cgltf_size i = 0; i < out_data->images_count; ++i) + { + if (out_data->images[i].buffer_view == (void*)-1) + { + out_data->images[i].buffer_view = NULL; + } + else + { + out_data->images[i].buffer_view + = &out_data->buffer_views[(cgltf_size)out_data->images[i].buffer_view]; + } + } + + for (cgltf_size i = 0; i < out_data->materials_count; ++i) + { + if (out_data->materials[i].emissive_texture.texture == (void*)-1) + { + out_data->materials[i].emissive_texture.texture = NULL; + } + else + { + out_data->materials[i].emissive_texture.texture = + &out_data->textures[(cgltf_size)out_data->materials[i].emissive_texture.texture]; + } + + if (out_data->materials[i].normal_texture.texture == (void*)-1) + { + out_data->materials[i].normal_texture.texture = NULL; + } + else + { + out_data->materials[i].normal_texture.texture = + &out_data->textures[(cgltf_size)out_data->materials[i].normal_texture.texture]; + } + + if (out_data->materials[i].occlusion_texture.texture == (void*)-1) + { + out_data->materials[i].occlusion_texture.texture = NULL; + } + else + { + out_data->materials[i].occlusion_texture.texture = + &out_data->textures[(cgltf_size)out_data->materials[i].occlusion_texture.texture]; + } + + if (out_data->materials[i].pbr.base_color_texture.texture == (void*)-1) + { + out_data->materials[i].pbr.base_color_texture.texture = NULL; + } + else + { + out_data->materials[i].pbr.base_color_texture.texture = + &out_data->textures[(cgltf_size)out_data->materials[i].pbr.base_color_texture.texture]; + } + + if (out_data->materials[i].pbr.metallic_roughness_texture.texture == (void*)-1) + { + out_data->materials[i].pbr.metallic_roughness_texture.texture = NULL; + } + else + { + out_data->materials[i].pbr.metallic_roughness_texture.texture = + &out_data->textures[(cgltf_size)out_data->materials[i].pbr.metallic_roughness_texture.texture]; + } + } + + for (cgltf_size i = 0; i < out_data->buffer_views_count; ++i) + { + out_data->buffer_views[i].buffer + = &out_data->buffers[(cgltf_size)out_data->buffer_views[i].buffer]; + } + + return cgltf_result_success; +} + +/* + * -- jsmn.c start -- + * Source: https://github.com/zserge/jsmn + * License: MIT + */ +/** + * Allocates a fresh unused token from the token pull. + */ +static jsmntok_t *jsmn_alloc_token(jsmn_parser *parser, + jsmntok_t *tokens, size_t num_tokens) { + jsmntok_t *tok; + if (parser->toknext >= num_tokens) { + return NULL; + } + tok = &tokens[parser->toknext++]; + tok->start = tok->end = -1; + tok->size = 0; +#ifdef JSMN_PARENT_LINKS + tok->parent = -1; +#endif + return tok; +} + +/** + * Fills token type and boundaries. + */ +static void jsmn_fill_token(jsmntok_t *token, jsmntype_t type, + int start, int end) { + token->type = type; + token->start = start; + token->end = end; + token->size = 0; +} + +/** + * Fills next available token with JSON primitive. + */ +static int jsmn_parse_primitive(jsmn_parser *parser, const char *js, + size_t len, jsmntok_t *tokens, size_t num_tokens) { + jsmntok_t *token; + int start; + + start = parser->pos; + + for (; parser->pos < len && js[parser->pos] != '\0'; parser->pos++) { + switch (js[parser->pos]) { +#ifndef JSMN_STRICT + /* In strict mode primitive must be followed by "," or "}" or "]" */ + case ':': +#endif + case '\t' : case '\r' : case '\n' : case ' ' : + case ',' : case ']' : case '}' : + goto found; + } + if (js[parser->pos] < 32 || js[parser->pos] >= 127) { + parser->pos = start; + return JSMN_ERROR_INVAL; + } + } +#ifdef JSMN_STRICT + /* In strict mode primitive must be followed by a comma/object/array */ + parser->pos = start; + return JSMN_ERROR_PART; +#endif + +found: + if (tokens == NULL) { + parser->pos--; + return 0; + } + token = jsmn_alloc_token(parser, tokens, num_tokens); + if (token == NULL) { + parser->pos = start; + return JSMN_ERROR_NOMEM; + } + jsmn_fill_token(token, JSMN_PRIMITIVE, start, parser->pos); +#ifdef JSMN_PARENT_LINKS + token->parent = parser->toksuper; +#endif + parser->pos--; + return 0; +} + +/** + * Fills next token with JSON string. + */ +static int jsmn_parse_string(jsmn_parser *parser, const char *js, + size_t len, jsmntok_t *tokens, size_t num_tokens) { + jsmntok_t *token; + + int start = parser->pos; + + parser->pos++; + + /* Skip starting quote */ + for (; parser->pos < len && js[parser->pos] != '\0'; parser->pos++) { + char c = js[parser->pos]; + + /* Quote: end of string */ + if (c == '\"') { + if (tokens == NULL) { + return 0; + } + token = jsmn_alloc_token(parser, tokens, num_tokens); + if (token == NULL) { + parser->pos = start; + return JSMN_ERROR_NOMEM; + } + jsmn_fill_token(token, JSMN_STRING, start+1, parser->pos); +#ifdef JSMN_PARENT_LINKS + token->parent = parser->toksuper; +#endif + return 0; + } + + /* Backslash: Quoted symbol expected */ + if (c == '\\' && parser->pos + 1 < len) { + int i; + parser->pos++; + switch (js[parser->pos]) { + /* Allowed escaped symbols */ + case '\"': case '/' : case '\\' : case 'b' : + case 'f' : case 'r' : case 'n' : case 't' : + break; + /* Allows escaped symbol \uXXXX */ + case 'u': + parser->pos++; + for(i = 0; i < 4 && parser->pos < len && js[parser->pos] != '\0'; i++) { + /* If it isn't a hex character we have an error */ + if(!((js[parser->pos] >= 48 && js[parser->pos] <= 57) || /* 0-9 */ + (js[parser->pos] >= 65 && js[parser->pos] <= 70) || /* A-F */ + (js[parser->pos] >= 97 && js[parser->pos] <= 102))) { /* a-f */ + parser->pos = start; + return JSMN_ERROR_INVAL; + } + parser->pos++; + } + parser->pos--; + break; + /* Unexpected symbol */ + default: + parser->pos = start; + return JSMN_ERROR_INVAL; + } + } + } + parser->pos = start; + return JSMN_ERROR_PART; +} + +/** + * Parse JSON string and fill tokens. + */ +int jsmn_parse(jsmn_parser *parser, const char *js, size_t len, + jsmntok_t *tokens, unsigned int num_tokens) { + int r; + int i; + jsmntok_t *token; + int count = parser->toknext; + + for (; parser->pos < len && js[parser->pos] != '\0'; parser->pos++) { + char c; + jsmntype_t type; + + c = js[parser->pos]; + switch (c) { + case '{': case '[': + count++; + if (tokens == NULL) { + break; + } + token = jsmn_alloc_token(parser, tokens, num_tokens); + if (token == NULL) + return JSMN_ERROR_NOMEM; + if (parser->toksuper != -1) { + tokens[parser->toksuper].size++; +#ifdef JSMN_PARENT_LINKS + token->parent = parser->toksuper; +#endif + } + token->type = (c == '{' ? JSMN_OBJECT : JSMN_ARRAY); + token->start = parser->pos; + parser->toksuper = parser->toknext - 1; + break; + case '}': case ']': + if (tokens == NULL) + break; + type = (c == '}' ? JSMN_OBJECT : JSMN_ARRAY); +#ifdef JSMN_PARENT_LINKS + if (parser->toknext < 1) { + return JSMN_ERROR_INVAL; + } + token = &tokens[parser->toknext - 1]; + for (;;) { + if (token->start != -1 && token->end == -1) { + if (token->type != type) { + return JSMN_ERROR_INVAL; + } + token->end = parser->pos + 1; + parser->toksuper = token->parent; + break; + } + if (token->parent == -1) { + if(token->type != type || parser->toksuper == -1) { + return JSMN_ERROR_INVAL; + } + break; + } + token = &tokens[token->parent]; + } +#else + for (i = parser->toknext - 1; i >= 0; i--) { + token = &tokens[i]; + if (token->start != -1 && token->end == -1) { + if (token->type != type) { + return JSMN_ERROR_INVAL; + } + parser->toksuper = -1; + token->end = parser->pos + 1; + break; + } + } + /* Error if unmatched closing bracket */ + if (i == -1) return JSMN_ERROR_INVAL; + for (; i >= 0; i--) { + token = &tokens[i]; + if (token->start != -1 && token->end == -1) { + parser->toksuper = i; + break; + } + } +#endif + break; + case '\"': + r = jsmn_parse_string(parser, js, len, tokens, num_tokens); + if (r < 0) return r; + count++; + if (parser->toksuper != -1 && tokens != NULL) + tokens[parser->toksuper].size++; + break; + case '\t' : case '\r' : case '\n' : case ' ': + break; + case ':': + parser->toksuper = parser->toknext - 1; + break; + case ',': + if (tokens != NULL && parser->toksuper != -1 && + tokens[parser->toksuper].type != JSMN_ARRAY && + tokens[parser->toksuper].type != JSMN_OBJECT) { +#ifdef JSMN_PARENT_LINKS + parser->toksuper = tokens[parser->toksuper].parent; +#else + for (i = parser->toknext - 1; i >= 0; i--) { + if (tokens[i].type == JSMN_ARRAY || tokens[i].type == JSMN_OBJECT) { + if (tokens[i].start != -1 && tokens[i].end == -1) { + parser->toksuper = i; + break; + } + } + } +#endif + } + break; +#ifdef JSMN_STRICT + /* In strict mode primitives are: numbers and booleans */ + case '-': case '0': case '1' : case '2': case '3' : case '4': + case '5': case '6': case '7' : case '8': case '9': + case 't': case 'f': case 'n' : + /* And they must not be keys of the object */ + if (tokens != NULL && parser->toksuper != -1) { + jsmntok_t *t = &tokens[parser->toksuper]; + if (t->type == JSMN_OBJECT || + (t->type == JSMN_STRING && t->size != 0)) { + return JSMN_ERROR_INVAL; + } + } +#else + /* In non-strict mode every unquoted value is a primitive */ + default: +#endif + r = jsmn_parse_primitive(parser, js, len, tokens, num_tokens); + if (r < 0) return r; + count++; + if (parser->toksuper != -1 && tokens != NULL) + tokens[parser->toksuper].size++; + break; + +#ifdef JSMN_STRICT + /* Unexpected char in strict mode */ + default: + return JSMN_ERROR_INVAL; +#endif + } + } + + if (tokens != NULL) { + for (i = parser->toknext - 1; i >= 0; i--) { + /* Unmatched opened object or array */ + if (tokens[i].start != -1 && tokens[i].end == -1) { + return JSMN_ERROR_PART; + } + } + } + + return count; +} + +/** + * Creates a new parser based over a given buffer with an array of tokens + * available. + */ +void jsmn_init(jsmn_parser *parser) { + parser->pos = 0; + parser->toknext = 0; + parser->toksuper = -1; +} +/* + * -- jsmn.c end -- + */ + +#endif /* #ifdef CGLTF_IMPLEMENTATION */ + +#ifdef __cplusplus +} +#endif diff --git a/src/external/mini_al.h b/src/external/mini_al.h index 0b9a74e1..759662c7 100644 --- a/src/external/mini_al.h +++ b/src/external/mini_al.h @@ -1,5 +1,5 @@ // Audio playback and capture library. Public domain. See "unlicense" statement at the end of this file. -// mini_al - v0.8.5 - 2018-08-12 +// mini_al - v0.8.8 - 2018-09-14 // // David Reid - davidreidsoftware@gmail.com @@ -122,16 +122,15 @@ // integer samples, interleaved. Let me know if you need non-interleaved and I'll look into it. // - The sndio backend is currently only enabled on OpenBSD builds. // - The audio(4) backend is supported on OpenBSD, but you may need to disable sndiod before you can use it. +// - If you are using the platform's default device, mini_al will try automatically switching the internal +// device when the device is unplugged. This feature is disabled when the device is opened in exclusive +// mode. // // // // BACKEND NUANCES // =============== // -// DirectSound -// ----------- -// - DirectSound currently supports a maximum of 4 periods. -// // Android // ------- // - To capture audio on Android, remember to add the RECORD_AUDIO permission to your manifest: @@ -140,7 +139,6 @@ // // UWP // --- -// - UWP is only supported when compiling as C++. // - UWP only supports default playback and capture devices. // - UWP requires the Microphone capability to be enabled in the application's manifest (Package.appxmanifest): // @@ -150,14 +148,6 @@ // // // -// PulseAudio -// ---------- -// - Each device has it's own dedicated main loop. -// -// JACK -// ---- -// - It's possible for mal_device.bufferSizeInFrames to change during run time. -// // // OPTIONS // ======= @@ -210,9 +200,8 @@ // // #define MAL_BASE_BUFFER_SIZE_IN_MILLISECONDS_LOW_LATENCY // #define MAL_BASE_BUFFER_SIZE_IN_MILLISECONDS_CONSERVATIVE -// When a buffer size of 0 is specified when a device is initialized it will default to a buffer of this size (depending -// on the chosen performance profile) multiplied by a weight which is calculated at run-time. These can be increased or -// decreased depending on your specific requirements. +// When a buffer size of 0 is specified when a device is initialized it will default to a buffer of this size, depending +// on the chosen performance profile. These can be increased or decreased depending on your specific requirements. // // #define MAL_NO_DECODING // Disables the decoding APIs. @@ -265,7 +254,9 @@ extern "C" { // Platform/backend detection. #ifdef _WIN32 #define MAL_WIN32 - #if (!defined(WINAPI_FAMILY) || WINAPI_FAMILY == WINAPI_FAMILY_DESKTOP_APP) + #if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_PC_APP || WINAPI_FAMILY == WINAPI_FAMILY_PHONE_APP) + #define MAL_WIN32_UWP + #else #define MAL_WIN32_DESKTOP #endif #else @@ -362,9 +353,6 @@ typedef void* mal_handle; typedef void* mal_ptr; typedef void (* mal_proc)(void); -typedef struct mal_context mal_context; -typedef struct mal_device mal_device; - #if defined(_MSC_VER) && !defined(_WCHAR_T_DEFINED) typedef mal_uint16 wchar_t; #endif @@ -419,6 +407,8 @@ typedef mal_uint16 wchar_t; #define MAL_LOG_LEVEL MAL_LOG_LEVEL_ERROR #endif +typedef struct mal_context mal_context; +typedef struct mal_device mal_device; typedef mal_uint8 mal_channel; #define MAL_CHANNEL_NONE 0 @@ -513,6 +503,7 @@ typedef int mal_result; #define MAL_ACCESS_DENIED -32 #define MAL_TOO_LARGE -33 #define MAL_DEVICE_UNAVAILABLE -34 +#define MAL_TIMEOUT -35 // Standard sample rates. #define MAL_SAMPLE_RATE_8000 8000 @@ -1224,6 +1215,16 @@ void mal_pcm_convert(void* pOut, mal_format formatOut, const void* pIn, mal_form #define MAL_ENABLE_NULL #endif +#ifdef MAL_SUPPORT_WASAPI +// We need a IMMNotificationClient object for WASAPI. +typedef struct +{ + void* lpVtbl; + mal_uint32 counter; + mal_device* pDevice; +} mal_IMMNotificationClient; +#endif + typedef enum { @@ -1489,6 +1490,7 @@ struct mal_context mal_result (* onGetDeviceInfo )(mal_context* pContext, mal_device_type type, const mal_device_id* pDeviceID, mal_share_mode shareMode, mal_device_info* pDeviceInfo); mal_result (* onDeviceInit )(mal_context* pContext, mal_device_type type, const mal_device_id* pDeviceID, const mal_device_config* pConfig, mal_device* pDevice); void (* onDeviceUninit )(mal_device* pDevice); + mal_result (* onDeviceReinit )(mal_device* pDevice); mal_result (* onDeviceStart )(mal_device* pDevice); mal_result (* onDeviceStop )(mal_device* pDevice); mal_result (* onDeviceBreakMainLoop)(mal_device* pDevice); @@ -1672,6 +1674,7 @@ struct mal_context mal_proc AudioObjectGetPropertyData; mal_proc AudioObjectGetPropertyDataSize; mal_proc AudioObjectSetPropertyData; + mal_proc AudioObjectAddPropertyListener; mal_handle hAudioUnit; // Could possibly be set to AudioToolbox on later versions of macOS. mal_proc AudioComponentFindNext; @@ -1904,14 +1907,15 @@ MAL_ALIGNED_STRUCT(MAL_SIMD_ALIGNMENT) mal_device mal_recv_proc onRecv; mal_send_proc onSend; mal_stop_proc onStop; - void* pUserData; // Application defined data. + void* pUserData; // Application defined data. char name[256]; + mal_device_config initConfig; // The configuration passed in to mal_device_init(). Mainly used for reinitializing the backend device. mal_mutex lock; mal_event wakeupEvent; mal_event startEvent; mal_event stopEvent; mal_thread thread; - mal_result workResult; // This is set by the worker thread after it's finished doing a job. + mal_result workResult; // This is set by the worker thread after it's finished doing a job. mal_bool32 usingDefaultFormat : 1; mal_bool32 usingDefaultChannels : 1; mal_bool32 usingDefaultSampleRate : 1; @@ -1920,6 +1924,7 @@ MAL_ALIGNED_STRUCT(MAL_SIMD_ALIGNMENT) mal_device mal_bool32 usingDefaultPeriods : 1; mal_bool32 exclusiveMode : 1; mal_bool32 isOwnerOfContext : 1; // When set to true, uninitializing the device will also uninitialize the context. Set to true when NULL is passed into mal_device_init(). + mal_bool32 isDefaultDevice : 1; // Used to determine if the backend should try reinitializing if the default device is unplugged. mal_format internalFormat; mal_uint32 internalChannels; mal_uint32 internalSampleRate; @@ -1936,9 +1941,12 @@ MAL_ALIGNED_STRUCT(MAL_SIMD_ALIGNMENT) mal_device /*IAudioClient**/ mal_ptr pAudioClient; /*IAudioRenderClient**/ mal_ptr pRenderClient; /*IAudioCaptureClient**/ mal_ptr pCaptureClient; + /*IMMDeviceEnumerator**/ mal_ptr pDeviceEnumerator; /* <-- Used for IMMNotificationClient notifications. Required for detecting default device changes. */ + mal_IMMNotificationClient notificationClient; /*HANDLE*/ mal_handle hEvent; - /*HANDLE*/ mal_handle hStopEvent; + /*HANDLE*/ mal_handle hBreakEvent; /* <-- Used to break from WaitForMultipleObjects() in the main loop. */ mal_bool32 breakFromMainLoop; + mal_bool32 hasDefaultDeviceChanged; /* <-- Make sure this is always a whole 32-bits because we use atomic assignments. */ } wasapi; #endif #ifdef MAL_SUPPORT_DSOUND @@ -2006,6 +2014,7 @@ MAL_ALIGNED_STRUCT(MAL_SIMD_ALIGNMENT) mal_device /*AudioComponent*/ mal_ptr component; // <-- Can this be per-context? /*AudioUnit*/ mal_ptr audioUnit; /*AudioBufferList**/ mal_ptr pAudioBufferList; // Only used for input devices. + mal_bool32 isSwitchingDevice; /* <-- Set to true when the default device has changed and mini_al is in the process of switching. */ } coreaudio; #endif #ifdef MAL_SUPPORT_SNDIO @@ -2279,6 +2288,8 @@ void mal_device_uninit(mal_device* pDevice); // // Thread Safety: SAFE // This API is implemented as a simple atomic assignment. +// +// DEPRECATED. Set this when the device is initialized with mal_device_init*(). void mal_device_set_recv_callback(mal_device* pDevice, mal_recv_proc proc); // Sets the callback to use when the application needs to send data to the device for playback. @@ -2289,6 +2300,8 @@ void mal_device_set_recv_callback(mal_device* pDevice, mal_recv_proc proc); // // Thread Safety: SAFE // This API is implemented as a simple atomic assignment. +// +// DEPRECATED. Set this when the device is initialized with mal_device_init*(). void mal_device_set_send_callback(mal_device* pDevice, mal_send_proc proc); // Sets the callback to use when the device has stopped, either explicitly or as a result of an error. @@ -2922,7 +2935,7 @@ static MAL_INLINE mal_bool32 mal_has_avx2() #if defined(_AVX2_) || defined(__AVX2__) return MAL_TRUE; // If the compiler is allowed to freely generate AVX2 code we can assume support. #else - // AVX requires both CPU and OS support. + // AVX2 requires both CPU and OS support. #if defined(MAL_NO_CPUID) || defined(MAL_NO_XGETBV) return MAL_FALSE; #else @@ -2943,7 +2956,7 @@ static MAL_INLINE mal_bool32 mal_has_avx2() #endif #endif #else - return MAL_FALSE; // AVX is only supported on x86 and x64 architectures. + return MAL_FALSE; // AVX2 is only supported on x86 and x64 architectures. #endif #else return MAL_FALSE; // No compiler support. @@ -4705,6 +4718,7 @@ mal_uint32 mal_get_format_priority_index(mal_format format) // Lower = better. return (mal_uint32)-1; } +void mal_device__post_init_setup(mal_device* pDevice); /////////////////////////////////////////////////////////////////////////////// // @@ -5305,19 +5319,27 @@ static MAL_INLINE void mal_PropVariantInit(PROPVARIANT* pProp) mal_zero_object(pProp); } -const PROPERTYKEY MAL_PKEY_Device_FriendlyName = {{0xA45C254E, 0xDF1C, 0x4EFD, {0x80, 0x20, 0x67, 0xD1, 0x46, 0xA8, 0x50, 0xE0}}, 14}; -const PROPERTYKEY MAL_PKEY_AudioEngine_DeviceFormat = {{0xF19F064D, 0x82C, 0x4E27, {0xBC, 0x73, 0x68, 0x82, 0xA1, 0xBB, 0x8E, 0x4C}}, 0}; -const IID MAL_IID_IAudioClient = {0x1CB9AD4C, 0xDBFA, 0x4C32, {0xB1, 0x78, 0xC2, 0xF5, 0x68, 0xA7, 0x03, 0xB2}}; // 1CB9AD4C-DBFA-4C32-B178-C2F568A703B2 = __uuidof(IAudioClient) -const IID MAL_IID_IAudioRenderClient = {0xF294ACFC, 0x3146, 0x4483, {0xA7, 0xBF, 0xAD, 0xDC, 0xA7, 0xC2, 0x60, 0xE2}}; // F294ACFC-3146-4483-A7BF-ADDCA7C260E2 = __uuidof(IAudioRenderClient) -const IID MAL_IID_IAudioCaptureClient = {0xC8ADBD64, 0xE71E, 0x48A0, {0xA4, 0xDE, 0x18, 0x5C, 0x39, 0x5C, 0xD3, 0x17}}; // C8ADBD64-E71E-48A0-A4DE-185C395CD317 = __uuidof(IAudioCaptureClient) +const PROPERTYKEY MAL_PKEY_Device_FriendlyName = {{0xA45C254E, 0xDF1C, 0x4EFD, {0x80, 0x20, 0x67, 0xD1, 0x46, 0xA8, 0x50, 0xE0}}, 14}; +const PROPERTYKEY MAL_PKEY_AudioEngine_DeviceFormat = {{0xF19F064D, 0x82C, 0x4E27, {0xBC, 0x73, 0x68, 0x82, 0xA1, 0xBB, 0x8E, 0x4C}}, 0}; + +const IID MAL_IID_IUnknown = {0x00000000, 0x0000, 0x0000, {0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x46}}; // 00000000-0000-0000-C000-000000000046 +const IID MAL_IID_IAgileObject = {0x94EA2B94, 0xE9CC, 0x49E0, {0xC0, 0xFF, 0xEE, 0x64, 0xCA, 0x8F, 0x5B, 0x90}}; // 94EA2B94-E9CC-49E0-C0FF-EE64CA8F5B90 + +const IID MAL_IID_IAudioClient = {0x1CB9AD4C, 0xDBFA, 0x4C32, {0xB1, 0x78, 0xC2, 0xF5, 0x68, 0xA7, 0x03, 0xB2}}; // 1CB9AD4C-DBFA-4C32-B178-C2F568A703B2 = __uuidof(IAudioClient) +const IID MAL_IID_IAudioClient2 = {0x726778CD, 0xF60A, 0x4EDA, {0x82, 0xDE, 0xE4, 0x76, 0x10, 0xCD, 0x78, 0xAA}}; // 726778CD-F60A-4EDA-82DE-E47610CD78AA = __uuidof(IAudioClient2) +const IID MAL_IID_IAudioClient3 = {0x7ED4EE07, 0x8E67, 0x4CD4, {0x8C, 0x1A, 0x2B, 0x7A, 0x59, 0x87, 0xAD, 0x42}}; // 7ED4EE07-8E67-4CD4-8C1A-2B7A5987AD42 = __uuidof(IAudioClient3) +const IID MAL_IID_IAudioRenderClient = {0xF294ACFC, 0x3146, 0x4483, {0xA7, 0xBF, 0xAD, 0xDC, 0xA7, 0xC2, 0x60, 0xE2}}; // F294ACFC-3146-4483-A7BF-ADDCA7C260E2 = __uuidof(IAudioRenderClient) +const IID MAL_IID_IAudioCaptureClient = {0xC8ADBD64, 0xE71E, 0x48A0, {0xA4, 0xDE, 0x18, 0x5C, 0x39, 0x5C, 0xD3, 0x17}}; // C8ADBD64-E71E-48A0-A4DE-185C395CD317 = __uuidof(IAudioCaptureClient) +const IID MAL_IID_IMMNotificationClient = {0x7991EEC9, 0x7E89, 0x4D85, {0x83, 0x90, 0x6C, 0x70, 0x3C, 0xEC, 0x60, 0xC0}}; // 7991EEC9-7E89-4D85-8390-6C703CEC60C0 = __uuidof(IMMNotificationClient) #ifndef MAL_WIN32_DESKTOP -const IID MAL_IID_DEVINTERFACE_AUDIO_RENDER = {0xE6327CAD, 0xDCEC, 0x4949, {0xAE, 0x8A, 0x99, 0x1E, 0x97, 0x6A, 0x79, 0xD2}}; // E6327CAD-DCEC-4949-AE8A-991E976A79D2 -const IID MAL_IID_DEVINTERFACE_AUDIO_CAPTURE = {0x2EEF81BE, 0x33FA, 0x4800, {0x96, 0x70, 0x1C, 0xD4, 0x74, 0x97, 0x2C, 0x3F}}; // 2EEF81BE-33FA-4800-9670-1CD474972C3F +const IID MAL_IID_DEVINTERFACE_AUDIO_RENDER = {0xE6327CAD, 0xDCEC, 0x4949, {0xAE, 0x8A, 0x99, 0x1E, 0x97, 0x6A, 0x79, 0xD2}}; // E6327CAD-DCEC-4949-AE8A-991E976A79D2 +const IID MAL_IID_DEVINTERFACE_AUDIO_CAPTURE = {0x2EEF81BE, 0x33FA, 0x4800, {0x96, 0x70, 0x1C, 0xD4, 0x74, 0x97, 0x2C, 0x3F}}; // 2EEF81BE-33FA-4800-9670-1CD474972C3F +const IID MAL_IID_IActivateAudioInterfaceCompletionHandler = {0x41D949AB, 0x9862, 0x444A, {0x80, 0xF6, 0xC2, 0x61, 0x33, 0x4D, 0xA5, 0xEB}}; // 41D949AB-9862-444A-80F6-C261334DA5EB #endif -const IID MAL_CLSID_MMDeviceEnumerator_Instance = {0xBCDE0395, 0xE52F, 0x467C, {0x8E, 0x3D, 0xC4, 0x57, 0x92, 0x91, 0x69, 0x2E}}; // BCDE0395-E52F-467C-8E3D-C4579291692E = __uuidof(MMDeviceEnumerator) -const IID MAL_IID_IMMDeviceEnumerator_Instance = {0xA95664D2, 0x9614, 0x4F35, {0xA7, 0x46, 0xDE, 0x8D, 0xB6, 0x36, 0x17, 0xE6}}; // A95664D2-9614-4F35-A746-DE8DB63617E6 = __uuidof(IMMDeviceEnumerator) +const IID MAL_CLSID_MMDeviceEnumerator_Instance = {0xBCDE0395, 0xE52F, 0x467C, {0x8E, 0x3D, 0xC4, 0x57, 0x92, 0x91, 0x69, 0x2E}}; // BCDE0395-E52F-467C-8E3D-C4579291692E = __uuidof(MMDeviceEnumerator) +const IID MAL_IID_IMMDeviceEnumerator_Instance = {0xA95664D2, 0x9614, 0x4F35, {0xA7, 0x46, 0xDE, 0x8D, 0xB6, 0x36, 0x17, 0xE6}}; // A95664D2-9614-4F35-A746-DE8DB63617E6 = __uuidof(IMMDeviceEnumerator) #ifdef __cplusplus #define MAL_CLSID_MMDeviceEnumerator MAL_CLSID_MMDeviceEnumerator_Instance #define MAL_IID_IMMDeviceEnumerator MAL_IID_IMMDeviceEnumerator_Instance @@ -5326,6 +5348,7 @@ const IID MAL_IID_IMMDeviceEnumerator_Instance = {0xA95664D2, 0x9614, 0x4F3 #define MAL_IID_IMMDeviceEnumerator &MAL_IID_IMMDeviceEnumerator_Instance #endif +typedef struct mal_IUnknown mal_IUnknown; #ifdef MAL_WIN32_DESKTOP #define MAL_MM_DEVICE_STATE_ACTIVE 1 #define MAL_MM_DEVICE_STATE_DISABLED 2 @@ -5335,12 +5358,14 @@ const IID MAL_IID_IMMDeviceEnumerator_Instance = {0xA95664D2, 0x9614, 0x4F3 typedef struct mal_IMMDeviceEnumerator mal_IMMDeviceEnumerator; typedef struct mal_IMMDeviceCollection mal_IMMDeviceCollection; typedef struct mal_IMMDevice mal_IMMDevice; -typedef struct mal_IMMNotificationClient mal_IMMNotificationClient; #else +typedef struct mal_IActivateAudioInterfaceCompletionHandler mal_IActivateAudioInterfaceCompletionHandler; typedef struct mal_IActivateAudioInterfaceAsyncOperation mal_IActivateAudioInterfaceAsyncOperation; #endif typedef struct mal_IPropertyStore mal_IPropertyStore; typedef struct mal_IAudioClient mal_IAudioClient; +typedef struct mal_IAudioClient2 mal_IAudioClient2; +typedef struct mal_IAudioClient3 mal_IAudioClient3; typedef struct mal_IAudioRenderClient mal_IAudioRenderClient; typedef struct mal_IAudioCaptureClient mal_IAudioCaptureClient; @@ -5361,6 +5386,7 @@ typedef mal_int64 MAL_REFERENCE_TIME; #define MAL_AUDCLNT_E_INVALID_DEVICE_PERIOD (-2004287456) #define MAL_AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED (-2004287463) #define MAL_AUDCLNT_S_BUFFER_EMPTY (143196161) +#define MAL_AUDCLNT_E_DEVICE_IN_USE (-2004287478) typedef enum { @@ -5382,7 +5408,51 @@ typedef enum MAL_AUDCLNT_SHAREMODE_EXCLUSIVE } MAL_AUDCLNT_SHAREMODE; +typedef enum +{ + MAL_AudioCategory_Other = 0, // <-- mini_al is only caring about Other. +} MAL_AUDIO_STREAM_CATEGORY; + +typedef struct +{ + UINT32 cbSize; + BOOL bIsOffload; + MAL_AUDIO_STREAM_CATEGORY eCategory; +} mal_AudioClientProperties; + +// IUnknown +typedef struct +{ + // IUnknown + HRESULT (STDMETHODCALLTYPE * QueryInterface)(mal_IUnknown* pThis, const IID* const riid, void** ppObject); + ULONG (STDMETHODCALLTYPE * AddRef) (mal_IUnknown* pThis); + ULONG (STDMETHODCALLTYPE * Release) (mal_IUnknown* pThis); +} mal_IUnknownVtbl; +struct mal_IUnknown +{ + mal_IUnknownVtbl* lpVtbl; +}; +HRESULT mal_IUnknown_QueryInterface(mal_IUnknown* pThis, const IID* const riid, void** ppObject) { return pThis->lpVtbl->QueryInterface(pThis, riid, ppObject); } +ULONG mal_IUnknown_AddRef(mal_IUnknown* pThis) { return pThis->lpVtbl->AddRef(pThis); } +ULONG mal_IUnknown_Release(mal_IUnknown* pThis) { return pThis->lpVtbl->Release(pThis); } + #ifdef MAL_WIN32_DESKTOP + // IMMNotificationClient + typedef struct + { + // IUnknown + HRESULT (STDMETHODCALLTYPE * QueryInterface)(mal_IMMNotificationClient* pThis, const IID* const riid, void** ppObject); + ULONG (STDMETHODCALLTYPE * AddRef) (mal_IMMNotificationClient* pThis); + ULONG (STDMETHODCALLTYPE * Release) (mal_IMMNotificationClient* pThis); + + // IMMNotificationClient + HRESULT (STDMETHODCALLTYPE * OnDeviceStateChanged) (mal_IMMNotificationClient* pThis, LPCWSTR pDeviceID, DWORD dwNewState); + HRESULT (STDMETHODCALLTYPE * OnDeviceAdded) (mal_IMMNotificationClient* pThis, LPCWSTR pDeviceID); + HRESULT (STDMETHODCALLTYPE * OnDeviceRemoved) (mal_IMMNotificationClient* pThis, LPCWSTR pDeviceID); + HRESULT (STDMETHODCALLTYPE * OnDefaultDeviceChanged)(mal_IMMNotificationClient* pThis, mal_EDataFlow dataFlow, mal_ERole role, LPCWSTR pDefaultDeviceID); + HRESULT (STDMETHODCALLTYPE * OnPropertyValueChanged)(mal_IMMNotificationClient* pThis, LPCWSTR pDeviceID, const PROPERTYKEY key); + } mal_IMMNotificationClientVtbl; + // IMMDeviceEnumerator typedef struct { @@ -5461,7 +5531,6 @@ typedef enum HRESULT mal_IMMDevice_GetId(mal_IMMDevice* pThis, LPWSTR *pID) { return pThis->lpVtbl->GetId(pThis, pID); } HRESULT mal_IMMDevice_GetState(mal_IMMDevice* pThis, DWORD *pState) { return pThis->lpVtbl->GetState(pThis, pState); } #else - // IActivateAudioInterfaceAsyncOperation typedef struct { @@ -5471,7 +5540,7 @@ typedef enum ULONG (STDMETHODCALLTYPE * Release) (mal_IActivateAudioInterfaceAsyncOperation* pThis); // IActivateAudioInterfaceAsyncOperation - HRESULT (STDMETHODCALLTYPE * GetActivateResult)(mal_IActivateAudioInterfaceAsyncOperation* pThis, HRESULT *pActivateResult, IUnknown** ppActivatedInterface); + HRESULT (STDMETHODCALLTYPE * GetActivateResult)(mal_IActivateAudioInterfaceAsyncOperation* pThis, HRESULT *pActivateResult, mal_IUnknown** ppActivatedInterface); } mal_IActivateAudioInterfaceAsyncOperationVtbl; struct mal_IActivateAudioInterfaceAsyncOperation { @@ -5480,7 +5549,7 @@ typedef enum HRESULT mal_IActivateAudioInterfaceAsyncOperation_QueryInterface(mal_IActivateAudioInterfaceAsyncOperation* pThis, const IID* const riid, void** ppObject) { return pThis->lpVtbl->QueryInterface(pThis, riid, ppObject); } ULONG mal_IActivateAudioInterfaceAsyncOperation_AddRef(mal_IActivateAudioInterfaceAsyncOperation* pThis) { return pThis->lpVtbl->AddRef(pThis); } ULONG mal_IActivateAudioInterfaceAsyncOperation_Release(mal_IActivateAudioInterfaceAsyncOperation* pThis) { return pThis->lpVtbl->Release(pThis); } - HRESULT mal_IActivateAudioInterfaceAsyncOperation_GetActivateResult(mal_IActivateAudioInterfaceAsyncOperation* pThis, HRESULT *pActivateResult, IUnknown** ppActivatedInterface) { return pThis->lpVtbl->GetActivateResult(pThis, pActivateResult, ppActivatedInterface); } + HRESULT mal_IActivateAudioInterfaceAsyncOperation_GetActivateResult(mal_IActivateAudioInterfaceAsyncOperation* pThis, HRESULT *pActivateResult, mal_IUnknown** ppActivatedInterface) { return pThis->lpVtbl->GetActivateResult(pThis, pActivateResult, ppActivatedInterface); } #endif // IPropertyStore @@ -5554,6 +5623,115 @@ HRESULT mal_IAudioClient_Reset(mal_IAudioClient* pThis) HRESULT mal_IAudioClient_SetEventHandle(mal_IAudioClient* pThis, HANDLE eventHandle) { return pThis->lpVtbl->SetEventHandle(pThis, eventHandle); } HRESULT mal_IAudioClient_GetService(mal_IAudioClient* pThis, const IID* const riid, void** pp) { return pThis->lpVtbl->GetService(pThis, riid, pp); } +// IAudioClient2 +typedef struct +{ + // IUnknown + HRESULT (STDMETHODCALLTYPE * QueryInterface)(mal_IAudioClient2* pThis, const IID* const riid, void** ppObject); + ULONG (STDMETHODCALLTYPE * AddRef) (mal_IAudioClient2* pThis); + ULONG (STDMETHODCALLTYPE * Release) (mal_IAudioClient2* pThis); + + // IAudioClient + HRESULT (STDMETHODCALLTYPE * Initialize) (mal_IAudioClient2* pThis, MAL_AUDCLNT_SHAREMODE shareMode, DWORD streamFlags, MAL_REFERENCE_TIME bufferDuration, MAL_REFERENCE_TIME periodicity, const WAVEFORMATEX* pFormat, const GUID* pAudioSessionGuid); + HRESULT (STDMETHODCALLTYPE * GetBufferSize) (mal_IAudioClient2* pThis, mal_uint32* pNumBufferFrames); + HRESULT (STDMETHODCALLTYPE * GetStreamLatency) (mal_IAudioClient2* pThis, MAL_REFERENCE_TIME* pLatency); + HRESULT (STDMETHODCALLTYPE * GetCurrentPadding)(mal_IAudioClient2* pThis, mal_uint32* pNumPaddingFrames); + HRESULT (STDMETHODCALLTYPE * IsFormatSupported)(mal_IAudioClient2* pThis, MAL_AUDCLNT_SHAREMODE shareMode, const WAVEFORMATEX* pFormat, WAVEFORMATEX** ppClosestMatch); + HRESULT (STDMETHODCALLTYPE * GetMixFormat) (mal_IAudioClient2* pThis, WAVEFORMATEX** ppDeviceFormat); + HRESULT (STDMETHODCALLTYPE * GetDevicePeriod) (mal_IAudioClient2* pThis, MAL_REFERENCE_TIME* pDefaultDevicePeriod, MAL_REFERENCE_TIME* pMinimumDevicePeriod); + HRESULT (STDMETHODCALLTYPE * Start) (mal_IAudioClient2* pThis); + HRESULT (STDMETHODCALLTYPE * Stop) (mal_IAudioClient2* pThis); + HRESULT (STDMETHODCALLTYPE * Reset) (mal_IAudioClient2* pThis); + HRESULT (STDMETHODCALLTYPE * SetEventHandle) (mal_IAudioClient2* pThis, HANDLE eventHandle); + HRESULT (STDMETHODCALLTYPE * GetService) (mal_IAudioClient2* pThis, const IID* const riid, void** pp); + + // IAudioClient2 + HRESULT (STDMETHODCALLTYPE * IsOffloadCapable) (mal_IAudioClient2* pThis, MAL_AUDIO_STREAM_CATEGORY category, BOOL* pOffloadCapable); + HRESULT (STDMETHODCALLTYPE * SetClientProperties)(mal_IAudioClient2* pThis, const mal_AudioClientProperties* pProperties); + HRESULT (STDMETHODCALLTYPE * GetBufferSizeLimits)(mal_IAudioClient2* pThis, const WAVEFORMATEX* pFormat, BOOL eventDriven, MAL_REFERENCE_TIME* pMinBufferDuration, MAL_REFERENCE_TIME* pMaxBufferDuration); +} mal_IAudioClient2Vtbl; +struct mal_IAudioClient2 +{ + mal_IAudioClient2Vtbl* lpVtbl; +}; +HRESULT mal_IAudioClient2_QueryInterface(mal_IAudioClient2* pThis, const IID* const riid, void** ppObject) { return pThis->lpVtbl->QueryInterface(pThis, riid, ppObject); } +ULONG mal_IAudioClient2_AddRef(mal_IAudioClient2* pThis) { return pThis->lpVtbl->AddRef(pThis); } +ULONG mal_IAudioClient2_Release(mal_IAudioClient2* pThis) { return pThis->lpVtbl->Release(pThis); } +HRESULT mal_IAudioClient2_Initialize(mal_IAudioClient2* pThis, MAL_AUDCLNT_SHAREMODE shareMode, DWORD streamFlags, MAL_REFERENCE_TIME bufferDuration, MAL_REFERENCE_TIME periodicity, const WAVEFORMATEX* pFormat, const GUID* pAudioSessionGuid) { return pThis->lpVtbl->Initialize(pThis, shareMode, streamFlags, bufferDuration, periodicity, pFormat, pAudioSessionGuid); } +HRESULT mal_IAudioClient2_GetBufferSize(mal_IAudioClient2* pThis, mal_uint32* pNumBufferFrames) { return pThis->lpVtbl->GetBufferSize(pThis, pNumBufferFrames); } +HRESULT mal_IAudioClient2_GetStreamLatency(mal_IAudioClient2* pThis, MAL_REFERENCE_TIME* pLatency) { return pThis->lpVtbl->GetStreamLatency(pThis, pLatency); } +HRESULT mal_IAudioClient2_GetCurrentPadding(mal_IAudioClient2* pThis, mal_uint32* pNumPaddingFrames) { return pThis->lpVtbl->GetCurrentPadding(pThis, pNumPaddingFrames); } +HRESULT mal_IAudioClient2_IsFormatSupported(mal_IAudioClient2* pThis, MAL_AUDCLNT_SHAREMODE shareMode, const WAVEFORMATEX* pFormat, WAVEFORMATEX** ppClosestMatch) { return pThis->lpVtbl->IsFormatSupported(pThis, shareMode, pFormat, ppClosestMatch); } +HRESULT mal_IAudioClient2_GetMixFormat(mal_IAudioClient2* pThis, WAVEFORMATEX** ppDeviceFormat) { return pThis->lpVtbl->GetMixFormat(pThis, ppDeviceFormat); } +HRESULT mal_IAudioClient2_GetDevicePeriod(mal_IAudioClient2* pThis, MAL_REFERENCE_TIME* pDefaultDevicePeriod, MAL_REFERENCE_TIME* pMinimumDevicePeriod) { return pThis->lpVtbl->GetDevicePeriod(pThis, pDefaultDevicePeriod, pMinimumDevicePeriod); } +HRESULT mal_IAudioClient2_Start(mal_IAudioClient2* pThis) { return pThis->lpVtbl->Start(pThis); } +HRESULT mal_IAudioClient2_Stop(mal_IAudioClient2* pThis) { return pThis->lpVtbl->Stop(pThis); } +HRESULT mal_IAudioClient2_Reset(mal_IAudioClient2* pThis) { return pThis->lpVtbl->Reset(pThis); } +HRESULT mal_IAudioClient2_SetEventHandle(mal_IAudioClient2* pThis, HANDLE eventHandle) { return pThis->lpVtbl->SetEventHandle(pThis, eventHandle); } +HRESULT mal_IAudioClient2_GetService(mal_IAudioClient2* pThis, const IID* const riid, void** pp) { return pThis->lpVtbl->GetService(pThis, riid, pp); } +HRESULT mal_IAudioClient2_IsOffloadCapable(mal_IAudioClient2* pThis, MAL_AUDIO_STREAM_CATEGORY category, BOOL* pOffloadCapable) { return pThis->lpVtbl->IsOffloadCapable(pThis, category, pOffloadCapable); } +HRESULT mal_IAudioClient2_SetClientProperties(mal_IAudioClient2* pThis, const mal_AudioClientProperties* pProperties) { return pThis->lpVtbl->SetClientProperties(pThis, pProperties); } +HRESULT mal_IAudioClient2_GetBufferSizeLimits(mal_IAudioClient2* pThis, const WAVEFORMATEX* pFormat, BOOL eventDriven, MAL_REFERENCE_TIME* pMinBufferDuration, MAL_REFERENCE_TIME* pMaxBufferDuration) { return pThis->lpVtbl->GetBufferSizeLimits(pThis, pFormat, eventDriven, pMinBufferDuration, pMaxBufferDuration); } + + +// IAudioClient3 +typedef struct +{ + // IUnknown + HRESULT (STDMETHODCALLTYPE * QueryInterface)(mal_IAudioClient3* pThis, const IID* const riid, void** ppObject); + ULONG (STDMETHODCALLTYPE * AddRef) (mal_IAudioClient3* pThis); + ULONG (STDMETHODCALLTYPE * Release) (mal_IAudioClient3* pThis); + + // IAudioClient + HRESULT (STDMETHODCALLTYPE * Initialize) (mal_IAudioClient3* pThis, MAL_AUDCLNT_SHAREMODE shareMode, DWORD streamFlags, MAL_REFERENCE_TIME bufferDuration, MAL_REFERENCE_TIME periodicity, const WAVEFORMATEX* pFormat, const GUID* pAudioSessionGuid); + HRESULT (STDMETHODCALLTYPE * GetBufferSize) (mal_IAudioClient3* pThis, mal_uint32* pNumBufferFrames); + HRESULT (STDMETHODCALLTYPE * GetStreamLatency) (mal_IAudioClient3* pThis, MAL_REFERENCE_TIME* pLatency); + HRESULT (STDMETHODCALLTYPE * GetCurrentPadding)(mal_IAudioClient3* pThis, mal_uint32* pNumPaddingFrames); + HRESULT (STDMETHODCALLTYPE * IsFormatSupported)(mal_IAudioClient3* pThis, MAL_AUDCLNT_SHAREMODE shareMode, const WAVEFORMATEX* pFormat, WAVEFORMATEX** ppClosestMatch); + HRESULT (STDMETHODCALLTYPE * GetMixFormat) (mal_IAudioClient3* pThis, WAVEFORMATEX** ppDeviceFormat); + HRESULT (STDMETHODCALLTYPE * GetDevicePeriod) (mal_IAudioClient3* pThis, MAL_REFERENCE_TIME* pDefaultDevicePeriod, MAL_REFERENCE_TIME* pMinimumDevicePeriod); + HRESULT (STDMETHODCALLTYPE * Start) (mal_IAudioClient3* pThis); + HRESULT (STDMETHODCALLTYPE * Stop) (mal_IAudioClient3* pThis); + HRESULT (STDMETHODCALLTYPE * Reset) (mal_IAudioClient3* pThis); + HRESULT (STDMETHODCALLTYPE * SetEventHandle) (mal_IAudioClient3* pThis, HANDLE eventHandle); + HRESULT (STDMETHODCALLTYPE * GetService) (mal_IAudioClient3* pThis, const IID* const riid, void** pp); + + // IAudioClient2 + HRESULT (STDMETHODCALLTYPE * IsOffloadCapable) (mal_IAudioClient3* pThis, MAL_AUDIO_STREAM_CATEGORY category, BOOL* pOffloadCapable); + HRESULT (STDMETHODCALLTYPE * SetClientProperties)(mal_IAudioClient3* pThis, const mal_AudioClientProperties* pProperties); + HRESULT (STDMETHODCALLTYPE * GetBufferSizeLimits)(mal_IAudioClient3* pThis, const WAVEFORMATEX* pFormat, BOOL eventDriven, MAL_REFERENCE_TIME* pMinBufferDuration, MAL_REFERENCE_TIME* pMaxBufferDuration); + + // IAudioClient3 + HRESULT (STDMETHODCALLTYPE * GetSharedModeEnginePeriod) (mal_IAudioClient3* pThis, const WAVEFORMATEX* pFormat, UINT32* pDefaultPeriodInFrames, UINT32* pFundamentalPeriodInFrames, UINT32* pMinPeriodInFrames, UINT32* pMaxPeriodInFrames); + HRESULT (STDMETHODCALLTYPE * GetCurrentSharedModeEnginePeriod)(mal_IAudioClient3* pThis, WAVEFORMATEX** ppFormat, UINT32* pCurrentPeriodInFrames); + HRESULT (STDMETHODCALLTYPE * InitializeSharedAudioStream) (mal_IAudioClient3* pThis, DWORD streamFlags, UINT32 periodInFrames, const WAVEFORMATEX* pFormat, const GUID* pAudioSessionGuid); +} mal_IAudioClient3Vtbl; +struct mal_IAudioClient3 +{ + mal_IAudioClient3Vtbl* lpVtbl; +}; +HRESULT mal_IAudioClient3_QueryInterface(mal_IAudioClient3* pThis, const IID* const riid, void** ppObject) { return pThis->lpVtbl->QueryInterface(pThis, riid, ppObject); } +ULONG mal_IAudioClient3_AddRef(mal_IAudioClient3* pThis) { return pThis->lpVtbl->AddRef(pThis); } +ULONG mal_IAudioClient3_Release(mal_IAudioClient3* pThis) { return pThis->lpVtbl->Release(pThis); } +HRESULT mal_IAudioClient3_Initialize(mal_IAudioClient3* pThis, MAL_AUDCLNT_SHAREMODE shareMode, DWORD streamFlags, MAL_REFERENCE_TIME bufferDuration, MAL_REFERENCE_TIME periodicity, const WAVEFORMATEX* pFormat, const GUID* pAudioSessionGuid) { return pThis->lpVtbl->Initialize(pThis, shareMode, streamFlags, bufferDuration, periodicity, pFormat, pAudioSessionGuid); } +HRESULT mal_IAudioClient3_GetBufferSize(mal_IAudioClient3* pThis, mal_uint32* pNumBufferFrames) { return pThis->lpVtbl->GetBufferSize(pThis, pNumBufferFrames); } +HRESULT mal_IAudioClient3_GetStreamLatency(mal_IAudioClient3* pThis, MAL_REFERENCE_TIME* pLatency) { return pThis->lpVtbl->GetStreamLatency(pThis, pLatency); } +HRESULT mal_IAudioClient3_GetCurrentPadding(mal_IAudioClient3* pThis, mal_uint32* pNumPaddingFrames) { return pThis->lpVtbl->GetCurrentPadding(pThis, pNumPaddingFrames); } +HRESULT mal_IAudioClient3_IsFormatSupported(mal_IAudioClient3* pThis, MAL_AUDCLNT_SHAREMODE shareMode, const WAVEFORMATEX* pFormat, WAVEFORMATEX** ppClosestMatch) { return pThis->lpVtbl->IsFormatSupported(pThis, shareMode, pFormat, ppClosestMatch); } +HRESULT mal_IAudioClient3_GetMixFormat(mal_IAudioClient3* pThis, WAVEFORMATEX** ppDeviceFormat) { return pThis->lpVtbl->GetMixFormat(pThis, ppDeviceFormat); } +HRESULT mal_IAudioClient3_GetDevicePeriod(mal_IAudioClient3* pThis, MAL_REFERENCE_TIME* pDefaultDevicePeriod, MAL_REFERENCE_TIME* pMinimumDevicePeriod) { return pThis->lpVtbl->GetDevicePeriod(pThis, pDefaultDevicePeriod, pMinimumDevicePeriod); } +HRESULT mal_IAudioClient3_Start(mal_IAudioClient3* pThis) { return pThis->lpVtbl->Start(pThis); } +HRESULT mal_IAudioClient3_Stop(mal_IAudioClient3* pThis) { return pThis->lpVtbl->Stop(pThis); } +HRESULT mal_IAudioClient3_Reset(mal_IAudioClient3* pThis) { return pThis->lpVtbl->Reset(pThis); } +HRESULT mal_IAudioClient3_SetEventHandle(mal_IAudioClient3* pThis, HANDLE eventHandle) { return pThis->lpVtbl->SetEventHandle(pThis, eventHandle); } +HRESULT mal_IAudioClient3_GetService(mal_IAudioClient3* pThis, const IID* const riid, void** pp) { return pThis->lpVtbl->GetService(pThis, riid, pp); } +HRESULT mal_IAudioClient3_IsOffloadCapable(mal_IAudioClient3* pThis, MAL_AUDIO_STREAM_CATEGORY category, BOOL* pOffloadCapable) { return pThis->lpVtbl->IsOffloadCapable(pThis, category, pOffloadCapable); } +HRESULT mal_IAudioClient3_SetClientProperties(mal_IAudioClient3* pThis, const mal_AudioClientProperties* pProperties) { return pThis->lpVtbl->SetClientProperties(pThis, pProperties); } +HRESULT mal_IAudioClient3_GetBufferSizeLimits(mal_IAudioClient3* pThis, const WAVEFORMATEX* pFormat, BOOL eventDriven, MAL_REFERENCE_TIME* pMinBufferDuration, MAL_REFERENCE_TIME* pMaxBufferDuration) { return pThis->lpVtbl->GetBufferSizeLimits(pThis, pFormat, eventDriven, pMinBufferDuration, pMaxBufferDuration); } +HRESULT mal_IAudioClient3_GetSharedModeEnginePeriod(mal_IAudioClient3* pThis, const WAVEFORMATEX* pFormat, UINT32* pDefaultPeriodInFrames, UINT32* pFundamentalPeriodInFrames, UINT32* pMinPeriodInFrames, UINT32* pMaxPeriodInFrames) { return pThis->lpVtbl->GetSharedModeEnginePeriod(pThis, pFormat, pDefaultPeriodInFrames, pFundamentalPeriodInFrames, pMinPeriodInFrames, pMaxPeriodInFrames); } +HRESULT mal_IAudioClient3_GetCurrentSharedModeEnginePeriod(mal_IAudioClient3* pThis, WAVEFORMATEX** ppFormat, UINT32* pCurrentPeriodInFrames) { return pThis->lpVtbl->GetCurrentSharedModeEnginePeriod(pThis, ppFormat, pCurrentPeriodInFrames); } +HRESULT mal_IAudioClient3_InitializeSharedAudioStream(mal_IAudioClient3* pThis, DWORD streamFlags, UINT32 periodInFrames, const WAVEFORMATEX* pFormat, const GUID* pAudioSessionGUID) { return pThis->lpVtbl->InitializeSharedAudioStream(pThis, streamFlags, periodInFrames, pFormat, pAudioSessionGUID); } + // IAudioRenderClient typedef struct @@ -5602,60 +5780,226 @@ HRESULT mal_IAudioCaptureClient_GetBuffer(mal_IAudioCaptureClient* pThis, BYTE** HRESULT mal_IAudioCaptureClient_ReleaseBuffer(mal_IAudioCaptureClient* pThis, mal_uint32 numFramesRead) { return pThis->lpVtbl->ReleaseBuffer(pThis, numFramesRead); } HRESULT mal_IAudioCaptureClient_GetNextPacketSize(mal_IAudioCaptureClient* pThis, mal_uint32* pNumFramesInNextPacket) { return pThis->lpVtbl->GetNextPacketSize(pThis, pNumFramesInNextPacket); } -// This is the part that's preventing mini_al from being compiled as C with UWP. We need to implement IActivateAudioInterfaceCompletionHandler -// in C which is quite annoying. #ifndef MAL_WIN32_DESKTOP - #ifdef __cplusplus - #include - #include +#include +typedef struct mal_completion_handler_uwp mal_completion_handler_uwp; - class malCompletionHandler : public Microsoft::WRL::RuntimeClass< Microsoft::WRL::RuntimeClassFlags< Microsoft::WRL::ClassicCom >, Microsoft::WRL::FtmBase, IActivateAudioInterfaceCompletionHandler > - { - public: +typedef struct +{ + // IUnknown + HRESULT (STDMETHODCALLTYPE * QueryInterface)(mal_completion_handler_uwp* pThis, const IID* const riid, void** ppObject); + ULONG (STDMETHODCALLTYPE * AddRef) (mal_completion_handler_uwp* pThis); + ULONG (STDMETHODCALLTYPE * Release) (mal_completion_handler_uwp* pThis); - malCompletionHandler() - : m_hEvent(NULL) - { - } + // IActivateAudioInterfaceCompletionHandler + HRESULT (STDMETHODCALLTYPE * ActivateCompleted)(mal_completion_handler_uwp* pThis, mal_IActivateAudioInterfaceAsyncOperation* pActivateOperation); +} mal_completion_handler_uwp_vtbl; +struct mal_completion_handler_uwp +{ + mal_completion_handler_uwp_vtbl* lpVtbl; + mal_uint32 counter; + HANDLE hEvent; +}; - mal_result Init() - { - m_hEvent = CreateEventA(NULL, FALSE, FALSE, NULL); - if (m_hEvent == NULL) { - return MAL_ERROR; - } +HRESULT STDMETHODCALLTYPE mal_completion_handler_uwp_QueryInterface(mal_completion_handler_uwp* pThis, const IID* const riid, void** ppObject) +{ + // We need to "implement" IAgileObject which is just an indicator that's used internally by WASAPI for some multithreading management. To + // "implement" this, we just make sure we return pThis when the IAgileObject is requested. + if (!mal_is_guid_equal(riid, &MAL_IID_IUnknown) && !mal_is_guid_equal(riid, &MAL_IID_IActivateAudioInterfaceCompletionHandler) && !mal_is_guid_equal(riid, &MAL_IID_IAgileObject)) { + *ppObject = NULL; + return E_NOINTERFACE; + } - return MAL_SUCCESS; - } + // Getting here means the IID is IUnknown or IMMNotificationClient. + *ppObject = (void*)pThis; + ((mal_completion_handler_uwp_vtbl*)pThis->lpVtbl)->AddRef(pThis); + return S_OK; +} - void Uninit() - { - if (m_hEvent != NULL) { - CloseHandle(m_hEvent); - } - } +ULONG STDMETHODCALLTYPE mal_completion_handler_uwp_AddRef(mal_completion_handler_uwp* pThis) +{ + return (ULONG)mal_atomic_increment_32(&pThis->counter); +} - void Wait() - { - WaitForSingleObject(m_hEvent, INFINITE); - } +ULONG STDMETHODCALLTYPE mal_completion_handler_uwp_Release(mal_completion_handler_uwp* pThis) +{ + mal_uint32 newRefCount = mal_atomic_decrement_32(&pThis->counter); + if (newRefCount == 0) { + return 0; // We don't free anything here because we never allocate the object on the heap. + } - HRESULT STDMETHODCALLTYPE ActivateCompleted(IActivateAudioInterfaceAsyncOperation *activateOperation) - { - (void)activateOperation; - SetEvent(m_hEvent); - return S_OK; - } + return (ULONG)newRefCount; +} - private: - HANDLE m_hEvent; // This is created in Init(), deleted in Uninit(), waited on in Wait() and signaled in ActivateCompleted(). - }; - #else - #error "The UWP build is currently only supported in C++." - #endif +HRESULT STDMETHODCALLTYPE mal_completion_handler_uwp_ActivateCompleted(mal_completion_handler_uwp* pThis, mal_IActivateAudioInterfaceAsyncOperation* pActivateOperation) +{ + (void)pActivateOperation; + SetEvent(pThis->hEvent); + return S_OK; +} + + +static mal_completion_handler_uwp_vtbl g_malCompletionHandlerVtblInstance = { + mal_completion_handler_uwp_QueryInterface, + mal_completion_handler_uwp_AddRef, + mal_completion_handler_uwp_Release, + mal_completion_handler_uwp_ActivateCompleted +}; + +mal_result mal_completion_handler_uwp_init(mal_completion_handler_uwp* pHandler) +{ + mal_assert(pHandler != NULL); + mal_zero_object(pHandler); + + pHandler->lpVtbl = &g_malCompletionHandlerVtblInstance; + pHandler->counter = 1; + pHandler->hEvent = CreateEventA(NULL, FALSE, FALSE, NULL); + if (pHandler->hEvent == NULL) { + return MAL_ERROR; + } + + return MAL_SUCCESS; +} + +void mal_completion_handler_uwp_uninit(mal_completion_handler_uwp* pHandler) +{ + if (pHandler->hEvent != NULL) { + CloseHandle(pHandler->hEvent); + } +} + +void mal_completion_handler_uwp_wait(mal_completion_handler_uwp* pHandler) +{ + WaitForSingleObject(pHandler->hEvent, INFINITE); +} #endif // !MAL_WIN32_DESKTOP +// We need a virtual table for our notification client object that's used for detecting changes to the default device. +#ifdef MAL_WIN32_DESKTOP +HRESULT STDMETHODCALLTYPE mal_IMMNotificationClient_QueryInterface(mal_IMMNotificationClient* pThis, const IID* const riid, void** ppObject) +{ + // We care about two interfaces - IUnknown and IMMNotificationClient. If the requested IID is something else + // we just return E_NOINTERFACE. Otherwise we need to increment the reference counter and return S_OK. + if (!mal_is_guid_equal(riid, &MAL_IID_IUnknown) && !mal_is_guid_equal(riid, &MAL_IID_IMMNotificationClient)) { + *ppObject = NULL; + return E_NOINTERFACE; + } + // Getting here means the IID is IUnknown or IMMNotificationClient. + *ppObject = (void*)pThis; + ((mal_IMMNotificationClientVtbl*)pThis->lpVtbl)->AddRef(pThis); + return S_OK; +} + +ULONG STDMETHODCALLTYPE mal_IMMNotificationClient_AddRef(mal_IMMNotificationClient* pThis) +{ + return (ULONG)mal_atomic_increment_32(&pThis->counter); +} + +ULONG STDMETHODCALLTYPE mal_IMMNotificationClient_Release(mal_IMMNotificationClient* pThis) +{ + mal_uint32 newRefCount = mal_atomic_decrement_32(&pThis->counter); + if (newRefCount == 0) { + return 0; // We don't free anything here because we never allocate the object on the heap. + } + + return (ULONG)newRefCount; +} + + +HRESULT STDMETHODCALLTYPE mal_IMMNotificationClient_OnDeviceStateChanged(mal_IMMNotificationClient* pThis, LPCWSTR pDeviceID, DWORD dwNewState) +{ +#ifdef MAL_DEBUG_OUTPUT + printf("IMMNotificationClient_OnDeviceStateChanged(pDeviceID=%S, dwNewState=%u)\n", pDeviceID, (unsigned int)dwNewState); +#endif + + (void)pThis; + (void)pDeviceID; + (void)dwNewState; + return S_OK; +} + +HRESULT STDMETHODCALLTYPE mal_IMMNotificationClient_OnDeviceAdded(mal_IMMNotificationClient* pThis, LPCWSTR pDeviceID) +{ +#ifdef MAL_DEBUG_OUTPUT + printf("IMMNotificationClient_OnDeviceAdded(pDeviceID=%S)\n", pDeviceID); +#endif + + // We don't need to worry about this event for our purposes. + (void)pThis; + (void)pDeviceID; + return S_OK; +} + +HRESULT STDMETHODCALLTYPE mal_IMMNotificationClient_OnDeviceRemoved(mal_IMMNotificationClient* pThis, LPCWSTR pDeviceID) +{ +#ifdef MAL_DEBUG_OUTPUT + printf("IMMNotificationClient_OnDeviceRemoved(pDeviceID=%S)\n", pDeviceID); +#endif + + // We don't need to worry about this event for our purposes. + (void)pThis; + (void)pDeviceID; + return S_OK; +} + +HRESULT STDMETHODCALLTYPE mal_IMMNotificationClient_OnDefaultDeviceChanged(mal_IMMNotificationClient* pThis, mal_EDataFlow dataFlow, mal_ERole role, LPCWSTR pDefaultDeviceID) +{ +#ifdef MAL_DEBUG_OUTPUT + printf("IMMNotificationClient_OnDefaultDeviceChanged(dataFlow=%d, role=%d, pDefaultDeviceID=%S)\n", dataFlow, role, pDefaultDeviceID); +#endif + + // We only ever use the eConsole role in mini_al. + if (role != mal_eConsole) { + return S_OK; + } + + // We only care about devices with the same data flow and role as the current device. + if ((pThis->pDevice->type == mal_device_type_playback && dataFlow != mal_eRender ) || + (pThis->pDevice->type == mal_device_type_capture && dataFlow != mal_eCapture)) { + return S_OK; + } + + // Not currently supporting automatic stream routing in exclusive mode. This is not working correctly on my machine due to + // AUDCLNT_E_DEVICE_IN_USE errors when reinitializing the device. If this is a bug in mini_al, we can try re-enabling this once + // it's fixed. + if (pThis->pDevice->exclusiveMode) { + return S_OK; + } + + // We don't change the device here - we change it in the worker thread to keep synchronization simple. To this I'm just setting a flag to + // indicate that the default device has changed. + mal_atomic_exchange_32(&pThis->pDevice->wasapi.hasDefaultDeviceChanged, MAL_TRUE); + SetEvent(pThis->pDevice->wasapi.hBreakEvent); // <-- The main loop will be waiting on some events. We want to break from this wait ASAP so we can change the device as quickly as possible. + + + (void)pDefaultDeviceID; + return S_OK; +} + +HRESULT STDMETHODCALLTYPE mal_IMMNotificationClient_OnPropertyValueChanged(mal_IMMNotificationClient* pThis, LPCWSTR pDeviceID, const PROPERTYKEY key) +{ +#ifdef MAL_DEBUG_OUTPUT + printf("IMMNotificationClient_OnPropertyValueChanged(pDeviceID=%S)\n", pDeviceID); +#endif + + (void)pThis; + (void)pDeviceID; + (void)key; + return S_OK; +} + +static mal_IMMNotificationClientVtbl g_malNotificationCientVtbl = { + mal_IMMNotificationClient_QueryInterface, + mal_IMMNotificationClient_AddRef, + mal_IMMNotificationClient_Release, + mal_IMMNotificationClient_OnDeviceStateChanged, + mal_IMMNotificationClient_OnDeviceAdded, + mal_IMMNotificationClient_OnDeviceRemoved, + mal_IMMNotificationClient_OnDefaultDeviceChanged, + mal_IMMNotificationClient_OnPropertyValueChanged +}; +#endif // MAL_WIN32_DESKTOP mal_bool32 mal_context_is_device_id_equal__wasapi(mal_context* pContext, const mal_device_id* pID0, const mal_device_id* pID1) { @@ -5681,13 +6025,13 @@ void mal_set_device_info_from_WAVEFORMATEX(const WAVEFORMATEX* pWF, mal_device_i } #ifndef MAL_WIN32_DESKTOP -mal_result mal_context_get_IAudioClient_UWP__wasapi(mal_context* pContext, mal_device_type deviceType, const mal_device_id* pDeviceID, mal_IAudioClient** ppAudioClient, IUnknown** ppActivatedInterface) +mal_result mal_context_get_IAudioClient_UWP__wasapi(mal_context* pContext, mal_device_type deviceType, const mal_device_id* pDeviceID, mal_IAudioClient** ppAudioClient, mal_IUnknown** ppActivatedInterface) { mal_assert(pContext != NULL); mal_assert(ppAudioClient != NULL); mal_IActivateAudioInterfaceAsyncOperation *pAsyncOp = NULL; - malCompletionHandler completionHandler; + mal_completion_handler_uwp completionHandler; IID iid; if (pDeviceID != NULL) { @@ -5701,20 +6045,28 @@ mal_result mal_context_get_IAudioClient_UWP__wasapi(mal_context* pContext, mal_d } LPOLESTR iidStr; +#if defined(__cplusplus) HRESULT hr = StringFromIID(iid, &iidStr); +#else + HRESULT hr = StringFromIID(&iid, &iidStr); +#endif if (FAILED(hr)) { return mal_context_post_error(pContext, NULL, MAL_LOG_LEVEL_ERROR, "[WASAPI] Failed to convert device IID to string for ActivateAudioInterfaceAsync(). Out of memory.", MAL_OUT_OF_MEMORY); } - mal_result result = completionHandler.Init(); + mal_result result = mal_completion_handler_uwp_init(&completionHandler); if (result != MAL_SUCCESS) { mal_CoTaskMemFree(pContext, iidStr); return mal_context_post_error(pContext, NULL, MAL_LOG_LEVEL_ERROR, "[WASAPI] Failed to create event for waiting for ActivateAudioInterfaceAsync().", MAL_FAILED_TO_OPEN_BACKEND_DEVICE); } +#if defined(__cplusplus) hr = ActivateAudioInterfaceAsync(iidStr, MAL_IID_IAudioClient, NULL, (IActivateAudioInterfaceCompletionHandler*)&completionHandler, (IActivateAudioInterfaceAsyncOperation**)&pAsyncOp); +#else + hr = ActivateAudioInterfaceAsync(iidStr, &MAL_IID_IAudioClient, NULL, (IActivateAudioInterfaceCompletionHandler*)&completionHandler, (IActivateAudioInterfaceAsyncOperation**)&pAsyncOp); +#endif if (FAILED(hr)) { - completionHandler.Uninit(); + mal_completion_handler_uwp_uninit(&completionHandler); mal_CoTaskMemFree(pContext, iidStr); return mal_context_post_error(pContext, NULL, MAL_LOG_LEVEL_ERROR, "[WASAPI] ActivateAudioInterfaceAsync() failed.", MAL_FAILED_TO_OPEN_BACKEND_DEVICE); } @@ -5722,11 +6074,11 @@ mal_result mal_context_get_IAudioClient_UWP__wasapi(mal_context* pContext, mal_d mal_CoTaskMemFree(pContext, iidStr); // Wait for the async operation for finish. - completionHandler.Wait(); - completionHandler.Uninit(); + mal_completion_handler_uwp_wait(&completionHandler); + mal_completion_handler_uwp_uninit(&completionHandler); HRESULT activateResult; - IUnknown* pActivatedInterface; + mal_IUnknown* pActivatedInterface; hr = mal_IActivateAudioInterfaceAsyncOperation_GetActivateResult(pAsyncOp, &activateResult, &pActivatedInterface); mal_IActivateAudioInterfaceAsyncOperation_Release(pAsyncOp); @@ -5735,7 +6087,7 @@ mal_result mal_context_get_IAudioClient_UWP__wasapi(mal_context* pContext, mal_d } // Here is where we grab the IAudioClient interface. - hr = pActivatedInterface->QueryInterface(MAL_IID_IAudioClient, (void**)ppAudioClient); + hr = mal_IUnknown_QueryInterface(pActivatedInterface, &MAL_IID_IAudioClient, (void**)ppAudioClient); if (FAILED(hr)) { return mal_context_post_error(pContext, NULL, MAL_LOG_LEVEL_ERROR, "[WASAPI] Failed to query IAudioClient interface.", MAL_FAILED_TO_OPEN_BACKEND_DEVICE); } @@ -5743,7 +6095,7 @@ mal_result mal_context_get_IAudioClient_UWP__wasapi(mal_context* pContext, mal_d if (ppActivatedInterface) { *ppActivatedInterface = pActivatedInterface; } else { - pActivatedInterface->Release(); + mal_IUnknown_Release(pActivatedInterface); } return MAL_SUCCESS; @@ -6085,6 +6437,13 @@ void mal_device_uninit__wasapi(mal_device* pDevice) { mal_assert(pDevice != NULL); +#ifdef MAL_WIN32_DESKTOP + if (pDevice->wasapi.pDeviceEnumerator) { + ((mal_IMMDeviceEnumerator*)pDevice->wasapi.pDeviceEnumerator)->lpVtbl->UnregisterEndpointNotificationCallback((mal_IMMDeviceEnumerator*)pDevice->wasapi.pDeviceEnumerator, &pDevice->wasapi.notificationClient); + mal_IMMDeviceEnumerator_Release((mal_IMMDeviceEnumerator*)pDevice->wasapi.pDeviceEnumerator); + } +#endif + if (pDevice->wasapi.pRenderClient) { mal_IAudioRenderClient_Release((mal_IAudioRenderClient*)pDevice->wasapi.pRenderClient); } @@ -6098,25 +6457,60 @@ void mal_device_uninit__wasapi(mal_device* pDevice) if (pDevice->wasapi.hEvent) { CloseHandle(pDevice->wasapi.hEvent); } - if (pDevice->wasapi.hStopEvent) { - CloseHandle(pDevice->wasapi.hStopEvent); + if (pDevice->wasapi.hBreakEvent) { + CloseHandle(pDevice->wasapi.hBreakEvent); } } -mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, const mal_device_id* pDeviceID, const mal_device_config* pConfig, mal_device* pDevice) +typedef struct +{ + // Input. + mal_format formatIn; + mal_uint32 channelsIn; + mal_uint32 sampleRateIn; + mal_channel channelMapIn[MAL_MAX_CHANNELS]; + mal_uint32 bufferSizeInFramesIn; + mal_uint32 bufferSizeInMillisecondsIn; + mal_uint32 periodsIn; + mal_bool32 usingDefaultFormat; + mal_bool32 usingDefaultChannels; + mal_bool32 usingDefaultSampleRate; + mal_bool32 usingDefaultChannelMap; + mal_share_mode shareMode; + + // Output. + mal_IAudioClient* pAudioClient; + mal_IAudioRenderClient* pRenderClient; + mal_IAudioCaptureClient* pCaptureClient; + mal_format formatOut; + mal_uint32 channelsOut; + mal_uint32 sampleRateOut; + mal_channel channelMapOut[MAL_MAX_CHANNELS]; + mal_uint32 bufferSizeInFramesOut; + mal_uint32 periodsOut; + mal_bool32 exclusiveMode; + char deviceName[256]; +} mal_device_init_internal_data__wasapi; + +mal_result mal_device_init_internal__wasapi(mal_context* pContext, mal_device_type type, const mal_device_id* pDeviceID, mal_device_init_internal_data__wasapi* pData) { (void)pContext; - mal_assert(pDevice != NULL); - mal_zero_object(&pDevice->wasapi); + mal_assert(pContext != NULL); + mal_assert(pData != NULL); + + pData->pAudioClient = NULL; + pData->pRenderClient = NULL; + pData->pCaptureClient = NULL; + HRESULT hr; mal_result result = MAL_SUCCESS; const char* errorMsg = ""; MAL_AUDCLNT_SHAREMODE shareMode = MAL_AUDCLNT_SHAREMODE_SHARED; - WAVEFORMATEXTENSIBLE* pBestFormatTemp = NULL; MAL_REFERENCE_TIME bufferDurationInMicroseconds; - + mal_bool32 wasInitializedUsingIAudioClient3 = MAL_FALSE; + WAVEFORMATEXTENSIBLE wf; #ifdef MAL_WIN32_DESKTOP mal_IMMDevice* pMMDevice = NULL; @@ -6125,40 +6519,39 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, goto done; } - hr = mal_IMMDevice_Activate(pMMDevice, &MAL_IID_IAudioClient, CLSCTX_ALL, NULL, &pDevice->wasapi.pAudioClient); + hr = mal_IMMDevice_Activate(pMMDevice, &MAL_IID_IAudioClient, CLSCTX_ALL, NULL, (void**)&pData->pAudioClient); if (FAILED(hr)) { errorMsg = "[WASAPI] Failed to activate device.", result = MAL_FAILED_TO_OPEN_BACKEND_DEVICE; goto done; } #else - IUnknown* pActivatedInterface = NULL; - result = mal_context_get_IAudioClient_UWP__wasapi(pContext, type, pDeviceID, (mal_IAudioClient**)&pDevice->wasapi.pAudioClient, &pActivatedInterface); + mal_IUnknown* pActivatedInterface = NULL; + result = mal_context_get_IAudioClient_UWP__wasapi(pContext, type, pDeviceID, &pData->pAudioClient, &pActivatedInterface); if (result != MAL_SUCCESS) { goto done; } #endif - WAVEFORMATEXTENSIBLE wf; - mal_zero_object(&wf); - wf.Format.cbSize = sizeof(wf); - wf.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE; - wf.Format.nChannels = (WORD)pDevice->channels; - wf.Format.nSamplesPerSec = (DWORD)pDevice->sampleRate; - wf.Format.wBitsPerSample = (WORD)mal_get_bytes_per_sample(pDevice->format)*8; - wf.Format.nBlockAlign = (wf.Format.nChannels * wf.Format.wBitsPerSample) / 8; - wf.Format.nAvgBytesPerSec = wf.Format.nBlockAlign * wf.Format.nSamplesPerSec; - wf.Samples.wValidBitsPerSample = /*(pDevice->format == mal_format_s24_32) ? 24 :*/ wf.Format.wBitsPerSample; - wf.dwChannelMask = mal_channel_map_to_channel_mask__win32(pDevice->channelMap, pDevice->channels); - if (pDevice->format == mal_format_f32) { - wf.SubFormat = MAL_GUID_KSDATAFORMAT_SUBTYPE_IEEE_FLOAT; - } else { - wf.SubFormat = MAL_GUID_KSDATAFORMAT_SUBTYPE_PCM; + // Try enabling hardware offloading. + mal_IAudioClient2* pAudioClient2; + hr = mal_IAudioClient_QueryInterface(pData->pAudioClient, &MAL_IID_IAudioClient2, (void**)&pAudioClient2); + if (SUCCEEDED(hr)) { + BOOL isHardwareOffloadingSupported = 0; + hr = mal_IAudioClient2_IsOffloadCapable(pAudioClient2, MAL_AudioCategory_Other, &isHardwareOffloadingSupported); + if (SUCCEEDED(hr) && isHardwareOffloadingSupported) { + mal_AudioClientProperties clientProperties; + mal_zero_object(&clientProperties); + clientProperties.cbSize = sizeof(clientProperties); + clientProperties.bIsOffload = 1; + clientProperties.eCategory = MAL_AudioCategory_Other; + mal_IAudioClient2_SetClientProperties(pAudioClient2, &clientProperties); + } } // Here is where we try to determine the best format to use with the device. If the client if wanting exclusive mode, first try finding the best format for that. If this fails, fall back to shared mode. result = MAL_FORMAT_NOT_SUPPORTED; - if (pConfig->shareMode == mal_share_mode_exclusive) { + if (pData->shareMode == mal_share_mode_exclusive) { #ifdef MAL_WIN32_DESKTOP // In exclusive mode on desktop we always use the backend's native format. mal_IPropertyStore* pStore = NULL; @@ -6169,19 +6562,23 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, hr = mal_IPropertyStore_GetValue(pStore, &MAL_PKEY_AudioEngine_DeviceFormat, &prop); if (SUCCEEDED(hr)) { WAVEFORMATEX* pActualFormat = (WAVEFORMATEX*)prop.blob.pBlobData; - hr = mal_IAudioClient_IsFormatSupported((mal_IAudioClient*)pDevice->wasapi.pAudioClient, MAL_AUDCLNT_SHAREMODE_EXCLUSIVE, pActualFormat, NULL); + hr = mal_IAudioClient_IsFormatSupported((mal_IAudioClient*)pData->pAudioClient, MAL_AUDCLNT_SHAREMODE_EXCLUSIVE, pActualFormat, NULL); if (SUCCEEDED(hr)) { mal_copy_memory(&wf, pActualFormat, sizeof(WAVEFORMATEXTENSIBLE)); } - mal_PropVariantClear(pDevice->pContext, &prop); + mal_PropVariantClear(pContext, &prop); } mal_IPropertyStore_Release(pStore); } #else - // With non-Desktop builds we just try using the requested format. - hr = mal_IAudioClient_IsFormatSupported((mal_IAudioClient*)pDevice->wasapi.pAudioClient, MAL_AUDCLNT_SHAREMODE_EXCLUSIVE, (WAVEFORMATEX*)&wf, NULL); + // I do not know how to query the device's native format on UWP so for now I'm just disabling support for + // exclusive mode. The alternative is to enumerate over different formats and check IsFormatSupported() + // until you find one that works. + // + // TODO: Add support for exclusive mode to UWP. + hr = S_FALSE; #endif if (hr == S_OK) { @@ -6192,42 +6589,18 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, // Fall back to shared mode if necessary. if (result != MAL_SUCCESS) { + // In shared mode we are always using the format reported by the operating system. WAVEFORMATEXTENSIBLE* pNativeFormat = NULL; - hr = mal_IAudioClient_GetMixFormat((mal_IAudioClient*)pDevice->wasapi.pAudioClient, (WAVEFORMATEX**)&pNativeFormat); - if (hr == S_OK) { - if (pDevice->usingDefaultFormat) { - wf.Format.wBitsPerSample = pNativeFormat->Format.wBitsPerSample; - wf.Format.nBlockAlign = pNativeFormat->Format.nBlockAlign; - wf.Format.nAvgBytesPerSec = pNativeFormat->Format.nAvgBytesPerSec; - wf.Samples.wValidBitsPerSample = pNativeFormat->Samples.wValidBitsPerSample; - wf.SubFormat = pNativeFormat->SubFormat; - } - if (pDevice->usingDefaultChannels) { - wf.Format.nChannels = pNativeFormat->Format.nChannels; - } - if (pDevice->usingDefaultSampleRate) { - wf.Format.nSamplesPerSec = pNativeFormat->Format.nSamplesPerSec; - } - if (pDevice->usingDefaultChannelMap) { - wf.dwChannelMask = pNativeFormat->dwChannelMask; - } - - mal_CoTaskMemFree(pDevice->pContext, pNativeFormat); - pNativeFormat = NULL; - } - - hr = mal_IAudioClient_IsFormatSupported((mal_IAudioClient*)pDevice->wasapi.pAudioClient, MAL_AUDCLNT_SHAREMODE_SHARED, (WAVEFORMATEX*)&wf, (WAVEFORMATEX**)&pBestFormatTemp); - if (hr != S_OK && hr != S_FALSE) { - hr = mal_IAudioClient_GetMixFormat((mal_IAudioClient*)pDevice->wasapi.pAudioClient, (WAVEFORMATEX**)&pBestFormatTemp); - if (hr != S_OK) { - result = MAL_FORMAT_NOT_SUPPORTED; - } else { - result = MAL_SUCCESS; - } + hr = mal_IAudioClient_GetMixFormat((mal_IAudioClient*)pData->pAudioClient, (WAVEFORMATEX**)&pNativeFormat); + if (hr != S_OK) { + result = MAL_FORMAT_NOT_SUPPORTED; } else { + mal_copy_memory(&wf, pNativeFormat, sizeof(wf)); result = MAL_SUCCESS; } + mal_CoTaskMemFree(pContext, pNativeFormat); + shareMode = MAL_AUDCLNT_SHAREMODE_SHARED; } @@ -6237,25 +6610,22 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, goto done; } - if (pBestFormatTemp != NULL) { - mal_copy_memory(&wf, pBestFormatTemp, sizeof(wf)); - mal_CoTaskMemFree(pDevice->pContext, pBestFormatTemp); - } - - - pDevice->internalFormat = mal_format_from_WAVEFORMATEX((WAVEFORMATEX*)&wf); - pDevice->internalChannels = wf.Format.nChannels; - pDevice->internalSampleRate = wf.Format.nSamplesPerSec; + pData->formatOut = mal_format_from_WAVEFORMATEX((WAVEFORMATEX*)&wf); + pData->channelsOut = wf.Format.nChannels; + pData->sampleRateOut = wf.Format.nSamplesPerSec; // Get the internal channel map based on the channel mask. - mal_channel_mask_to_channel_map__win32(wf.dwChannelMask, pDevice->internalChannels, pDevice->internalChannelMap); + mal_channel_mask_to_channel_map__win32(wf.dwChannelMask, pData->channelsOut, pData->channelMapOut); // If we're using a default buffer size we need to calculate it based on the efficiency of the system. - if (pDevice->bufferSizeInFrames == 0) { - pDevice->bufferSizeInFrames = mal_calculate_buffer_size_in_frames_from_milliseconds(pDevice->bufferSizeInMilliseconds, pDevice->internalSampleRate); + pData->periodsOut = pData->periodsIn; + pData->bufferSizeInFramesOut = pData->bufferSizeInFramesIn; + if (pData->bufferSizeInFramesOut == 0) { + pData->bufferSizeInFramesOut = mal_calculate_buffer_size_in_frames_from_milliseconds(pData->bufferSizeInMillisecondsIn, pData->sampleRateOut); } - bufferDurationInMicroseconds = ((mal_uint64)pDevice->bufferSizeInFrames * 1000 * 1000) / pDevice->internalSampleRate; + bufferDurationInMicroseconds = ((mal_uint64)pData->bufferSizeInFramesOut * 1000 * 1000) / pData->sampleRateOut; + // Slightly different initialization for shared and exclusive modes. We try exclusive mode first, and if it fails, fall back to shared mode. if (shareMode == MAL_AUDCLNT_SHAREMODE_EXCLUSIVE) { @@ -6266,7 +6636,7 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, // it and trying it again. hr = E_FAIL; for (;;) { - hr = mal_IAudioClient_Initialize((mal_IAudioClient*)pDevice->wasapi.pAudioClient, shareMode, MAL_AUDCLNT_STREAMFLAGS_EVENTCALLBACK, bufferDuration, bufferDuration, (WAVEFORMATEX*)&wf, NULL); + hr = mal_IAudioClient_Initialize((mal_IAudioClient*)pData->pAudioClient, shareMode, MAL_AUDCLNT_STREAMFLAGS_EVENTCALLBACK, bufferDuration, bufferDuration, (WAVEFORMATEX*)&wf, NULL); if (hr == MAL_AUDCLNT_E_INVALID_DEVICE_PERIOD) { if (bufferDuration > 500*10000) { break; @@ -6285,21 +6655,21 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, if (hr == MAL_AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED) { UINT bufferSizeInFrames; - hr = mal_IAudioClient_GetBufferSize((mal_IAudioClient*)pDevice->wasapi.pAudioClient, &bufferSizeInFrames); + hr = mal_IAudioClient_GetBufferSize((mal_IAudioClient*)pData->pAudioClient, &bufferSizeInFrames); if (SUCCEEDED(hr)) { bufferDuration = (MAL_REFERENCE_TIME)((10000.0 * 1000 / wf.Format.nSamplesPerSec * bufferSizeInFrames) + 0.5); // Unfortunately we need to release and re-acquire the audio client according to MSDN. Seems silly - why not just call IAudioClient_Initialize() again?! - mal_IAudioClient_Release((mal_IAudioClient*)pDevice->wasapi.pAudioClient); + mal_IAudioClient_Release((mal_IAudioClient*)pData->pAudioClient); #ifdef MAL_WIN32_DESKTOP - hr = mal_IMMDevice_Activate(pMMDevice, &MAL_IID_IAudioClient, CLSCTX_ALL, NULL, &pDevice->wasapi.pAudioClient); + hr = mal_IMMDevice_Activate(pMMDevice, &MAL_IID_IAudioClient, CLSCTX_ALL, NULL, (void**)&pData->pAudioClient); #else - hr = pActivatedInterface->QueryInterface(MAL_IID_IAudioClient, &pDevice->wasapi.pAudioClient); + hr = mal_IUnknown_QueryInterface(pActivatedInterface, &MAL_IID_IAudioClient, (void**)&pData->pAudioClient); #endif if (SUCCEEDED(hr)) { - hr = mal_IAudioClient_Initialize((mal_IAudioClient*)pDevice->wasapi.pAudioClient, shareMode, MAL_AUDCLNT_STREAMFLAGS_EVENTCALLBACK, bufferDuration, bufferDuration, (WAVEFORMATEX*)&wf, NULL); + hr = mal_IAudioClient_Initialize((mal_IAudioClient*)pData->pAudioClient, shareMode, MAL_AUDCLNT_STREAMFLAGS_EVENTCALLBACK, bufferDuration, bufferDuration, (WAVEFORMATEX*)&wf, NULL); } } } @@ -6307,37 +6677,72 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, if (FAILED(hr)) { // Failed to initialize in exclusive mode. We don't return an error here, but instead fall back to shared mode. shareMode = MAL_AUDCLNT_SHAREMODE_SHARED; - - //errorMsg = "[WASAPI] Failed to initialize device.", result = MAL_FAILED_TO_OPEN_BACKEND_DEVICE; - //goto done; } } if (shareMode == MAL_AUDCLNT_SHAREMODE_SHARED) { // Shared. - MAL_REFERENCE_TIME bufferDuration = bufferDurationInMicroseconds*10; - hr = mal_IAudioClient_Initialize((mal_IAudioClient*)pDevice->wasapi.pAudioClient, shareMode, MAL_AUDCLNT_STREAMFLAGS_EVENTCALLBACK, bufferDuration, 0, (WAVEFORMATEX*)&wf, NULL); - if (FAILED(hr)) { - if (hr == E_ACCESSDENIED) { - errorMsg = "[WASAPI] Failed to initialize device. Access denied.", result = MAL_ACCESS_DENIED; - } else { - errorMsg = "[WASAPI] Failed to initialize device.", result = MAL_FAILED_TO_OPEN_BACKEND_DEVICE; + + // Low latency shared mode via IAudioClient3. + mal_IAudioClient3* pAudioClient3 = NULL; + hr = mal_IAudioClient_QueryInterface(pData->pAudioClient, &MAL_IID_IAudioClient3, (void**)&pAudioClient3); + if (SUCCEEDED(hr)) { + UINT32 defaultPeriodInFrames; + UINT32 fundamentalPeriodInFrames; + UINT32 minPeriodInFrames; + UINT32 maxPeriodInFrames; + hr = mal_IAudioClient3_GetSharedModeEnginePeriod(pAudioClient3, (WAVEFORMATEX*)&wf, &defaultPeriodInFrames, &fundamentalPeriodInFrames, &minPeriodInFrames, &maxPeriodInFrames); + if (SUCCEEDED(hr)) { + UINT32 desiredPeriodInFrames = pData->bufferSizeInFramesOut / pData->periodsOut; + + // Make sure the period size is a multiple of fundamentalPeriodInFrames. + desiredPeriodInFrames = desiredPeriodInFrames / fundamentalPeriodInFrames; + desiredPeriodInFrames = desiredPeriodInFrames * fundamentalPeriodInFrames; + + // The period needs to be clamped between minPeriodInFrames and maxPeriodInFrames. + desiredPeriodInFrames = mal_clamp(desiredPeriodInFrames, minPeriodInFrames, maxPeriodInFrames); + + hr = mal_IAudioClient3_InitializeSharedAudioStream(pAudioClient3, MAL_AUDCLNT_STREAMFLAGS_EVENTCALLBACK, desiredPeriodInFrames, (WAVEFORMATEX*)&wf, NULL); + if (SUCCEEDED(hr)) { + wasInitializedUsingIAudioClient3 = MAL_TRUE; + pData->bufferSizeInFramesOut = desiredPeriodInFrames * pData->periodsOut; + } } + mal_IAudioClient3_Release(pAudioClient3); + pAudioClient3 = NULL; + } + + // If we don't have an IAudioClient3 then we need to use the normal initialization routine. + if (!wasInitializedUsingIAudioClient3) { + MAL_REFERENCE_TIME bufferDuration = bufferDurationInMicroseconds*10; + hr = mal_IAudioClient_Initialize((mal_IAudioClient*)pData->pAudioClient, shareMode, MAL_AUDCLNT_STREAMFLAGS_EVENTCALLBACK, bufferDuration, 0, (WAVEFORMATEX*)&wf, NULL); + if (FAILED(hr)) { + if (hr == E_ACCESSDENIED) { + errorMsg = "[WASAPI] Failed to initialize device. Access denied.", result = MAL_ACCESS_DENIED; + } else if (hr == MAL_AUDCLNT_E_DEVICE_IN_USE) { + errorMsg = "[WASAPI] Failed to initialize device. Device in use.", result = MAL_DEVICE_BUSY; + } else { + errorMsg = "[WASAPI] Failed to initialize device.", result = MAL_FAILED_TO_OPEN_BACKEND_DEVICE; + } + + goto done; + } + } + } + + if (!wasInitializedUsingIAudioClient3) { + hr = mal_IAudioClient_GetBufferSize((mal_IAudioClient*)pData->pAudioClient, &pData->bufferSizeInFramesOut); + if (FAILED(hr)) { + errorMsg = "[WASAPI] Failed to get audio client's actual buffer size.", result = MAL_FAILED_TO_OPEN_BACKEND_DEVICE; goto done; } } - hr = mal_IAudioClient_GetBufferSize((mal_IAudioClient*)pDevice->wasapi.pAudioClient, &pDevice->bufferSizeInFrames); - if (FAILED(hr)) { - errorMsg = "[WASAPI] Failed to get audio client's actual buffer size.", result = MAL_FAILED_TO_OPEN_BACKEND_DEVICE; - goto done; - } - if (type == mal_device_type_playback) { - hr = mal_IAudioClient_GetService((mal_IAudioClient*)pDevice->wasapi.pAudioClient, &MAL_IID_IAudioRenderClient, &pDevice->wasapi.pRenderClient); + hr = mal_IAudioClient_GetService((mal_IAudioClient*)pData->pAudioClient, &MAL_IID_IAudioRenderClient, (void**)&pData->pRenderClient); } else { - hr = mal_IAudioClient_GetService((mal_IAudioClient*)pDevice->wasapi.pAudioClient, &MAL_IID_IAudioCaptureClient, &pDevice->wasapi.pCaptureClient); + hr = mal_IAudioClient_GetService((mal_IAudioClient*)pData->pAudioClient, &MAL_IID_IAudioCaptureClient, (void**)&pData->pCaptureClient); } if (FAILED(hr)) { @@ -6347,9 +6752,9 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, if (shareMode == MAL_AUDCLNT_SHAREMODE_SHARED) { - pDevice->exclusiveMode = MAL_FALSE; + pData->exclusiveMode = MAL_FALSE; } else /*if (shareMode == MAL_AUDCLNT_SHAREMODE_EXCLUSIVE)*/ { - pDevice->exclusiveMode = MAL_TRUE; + pData->exclusiveMode = MAL_TRUE; } @@ -6362,7 +6767,7 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, mal_PropVariantInit(&varName); hr = mal_IPropertyStore_GetValue(pProperties, &MAL_PKEY_Device_FriendlyName, &varName); if (SUCCEEDED(hr)) { - WideCharToMultiByte(CP_UTF8, 0, varName.pwszVal, -1, pDevice->name, sizeof(pDevice->name), 0, FALSE); + WideCharToMultiByte(CP_UTF8, 0, varName.pwszVal, -1, pData->deviceName, sizeof(pData->deviceName), 0, FALSE); mal_PropVariantClear(pContext, &varName); } @@ -6370,6 +6775,157 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, } #endif +done: + // Clean up. +#ifdef MAL_WIN32_DESKTOP + if (pMMDevice != NULL) { + mal_IMMDevice_Release(pMMDevice); + } +#else + if (pActivatedInterface != NULL) { + mal_IUnknown_Release(pActivatedInterface); + } +#endif + + if (result != MAL_SUCCESS) { + if (pData->pRenderClient) { + mal_IAudioRenderClient_Release((mal_IAudioRenderClient*)pData->pRenderClient); + pData->pRenderClient = NULL; + } + if (pData->pCaptureClient) { + mal_IAudioCaptureClient_Release((mal_IAudioCaptureClient*)pData->pCaptureClient); + pData->pCaptureClient = NULL; + } + if (pData->pAudioClient) { + mal_IAudioClient_Release((mal_IAudioClient*)pData->pAudioClient); + pData->pAudioClient = NULL; + } + + return mal_context_post_error(pContext, NULL, MAL_LOG_LEVEL_ERROR, errorMsg, result); + } else { + return MAL_SUCCESS; + } +} + +mal_result mal_device_reinit__wasapi(mal_device* pDevice) +{ + mal_device_init_internal_data__wasapi data; + data.formatIn = pDevice->format; + data.channelsIn = pDevice->channels; + data.sampleRateIn = pDevice->sampleRate; + mal_copy_memory(data.channelMapIn, pDevice->channelMap, sizeof(pDevice->channelMap)); + data.bufferSizeInFramesIn = pDevice->bufferSizeInFrames; + data.bufferSizeInMillisecondsIn = pDevice->bufferSizeInMilliseconds; + data.periodsIn = pDevice->periods; + data.usingDefaultFormat = pDevice->usingDefaultFormat; + data.usingDefaultChannels = pDevice->usingDefaultChannels; + data.usingDefaultSampleRate = pDevice->usingDefaultSampleRate; + data.usingDefaultChannelMap = pDevice->usingDefaultChannelMap; + data.shareMode = pDevice->initConfig.shareMode; + mal_result result = mal_device_init_internal__wasapi(pDevice->pContext, pDevice->type, NULL, &data); + if (result != MAL_SUCCESS) { + return result; + } + + // At this point we have some new objects ready to go. We need to uninitialize the previous ones and then set the new ones. + if (pDevice->wasapi.pRenderClient) { + mal_IAudioRenderClient_Release((mal_IAudioRenderClient*)pDevice->wasapi.pRenderClient); + pDevice->wasapi.pRenderClient = NULL; + } + if (pDevice->wasapi.pCaptureClient) { + mal_IAudioCaptureClient_Release((mal_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient); + pDevice->wasapi.pCaptureClient = NULL; + } + if (pDevice->wasapi.pAudioClient) { + mal_IAudioClient_Release((mal_IAudioClient*)pDevice->wasapi.pAudioClient); + pDevice->wasapi.pAudioClient = NULL; + } + + pDevice->wasapi.pAudioClient = data.pAudioClient; + pDevice->wasapi.pRenderClient = data.pRenderClient; + pDevice->wasapi.pCaptureClient = data.pCaptureClient; + + pDevice->internalFormat = data.formatOut; + pDevice->internalChannels = data.channelsOut; + pDevice->internalSampleRate = data.sampleRateOut; + mal_copy_memory(pDevice->internalChannelMap, data.channelMapOut, sizeof(data.channelMapOut)); + pDevice->bufferSizeInFrames = data.bufferSizeInFramesOut; + pDevice->periods = data.periodsOut; + pDevice->exclusiveMode = data.exclusiveMode; + mal_strcpy_s(pDevice->name, sizeof(pDevice->name), data.deviceName); + + mal_IAudioClient_SetEventHandle((mal_IAudioClient*)pDevice->wasapi.pAudioClient, pDevice->wasapi.hEvent); + + return MAL_SUCCESS; +} + +mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, const mal_device_id* pDeviceID, const mal_device_config* pConfig, mal_device* pDevice) +{ + (void)pContext; + (void)pConfig; + + mal_assert(pDevice != NULL); + mal_zero_object(&pDevice->wasapi); + + mal_result result = MAL_SUCCESS; + const char* errorMsg = ""; + + mal_device_init_internal_data__wasapi data; + data.formatIn = pDevice->format; + data.channelsIn = pDevice->channels; + data.sampleRateIn = pDevice->sampleRate; + mal_copy_memory(data.channelMapIn, pDevice->channelMap, sizeof(pDevice->channelMap)); + data.bufferSizeInFramesIn = pDevice->bufferSizeInFrames; + data.bufferSizeInMillisecondsIn = pDevice->bufferSizeInMilliseconds; + data.periodsIn = pDevice->periods; + data.usingDefaultFormat = pDevice->usingDefaultFormat; + data.usingDefaultChannels = pDevice->usingDefaultChannels; + data.usingDefaultSampleRate = pDevice->usingDefaultSampleRate; + data.usingDefaultChannelMap = pDevice->usingDefaultChannelMap; + data.shareMode = pDevice->initConfig.shareMode; + result = mal_device_init_internal__wasapi(pDevice->pContext, type, pDeviceID, &data); + if (result != MAL_SUCCESS) { + return result; + } + + pDevice->wasapi.pAudioClient = data.pAudioClient; + pDevice->wasapi.pRenderClient = data.pRenderClient; + pDevice->wasapi.pCaptureClient = data.pCaptureClient; + + pDevice->internalFormat = data.formatOut; + pDevice->internalChannels = data.channelsOut; + pDevice->internalSampleRate = data.sampleRateOut; + mal_copy_memory(pDevice->internalChannelMap, data.channelMapOut, sizeof(data.channelMapOut)); + pDevice->bufferSizeInFrames = data.bufferSizeInFramesOut; + pDevice->periods = data.periodsOut; + pDevice->exclusiveMode = data.exclusiveMode; + mal_strcpy_s(pDevice->name, sizeof(pDevice->name), data.deviceName); + + + + // We need to get notifications of when the default device changes. We do this through a device enumerator by + // registering a IMMNotificationClient with it. We only care about this if it's the default device. +#ifdef MAL_WIN32_DESKTOP + mal_IMMDeviceEnumerator* pDeviceEnumerator; + HRESULT hr = mal_CoCreateInstance(pContext, MAL_CLSID_MMDeviceEnumerator, NULL, CLSCTX_ALL, MAL_IID_IMMDeviceEnumerator, (void**)&pDeviceEnumerator); + if (FAILED(hr)) { + errorMsg = "[WASAPI] Failed to create device enumerator.", result = MAL_FAILED_TO_OPEN_BACKEND_DEVICE; + goto done; + } + + pDevice->wasapi.notificationClient.lpVtbl = (void*)&g_malNotificationCientVtbl; + pDevice->wasapi.notificationClient.counter = 1; + pDevice->wasapi.notificationClient.pDevice = pDevice; + + hr = pDeviceEnumerator->lpVtbl->RegisterEndpointNotificationCallback(pDeviceEnumerator, &pDevice->wasapi.notificationClient); + if (SUCCEEDED(hr)) { + pDevice->wasapi.pDeviceEnumerator = (mal_ptr)pDeviceEnumerator; + } else { + // Not the end of the world if we fail to register the notification callback. We just won't support automatic stream routing. + mal_IMMDeviceEnumerator_Release(pDeviceEnumerator); + } +#endif + // We need to create and set the event for event-driven mode. This event is signalled whenever a new chunk of audio // data needs to be written or read from the device. @@ -6384,9 +6940,9 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, // When the device is playing the worker thread will be waiting on a bunch of notification events. To return from // this wait state we need to signal a special event. - pDevice->wasapi.hStopEvent = CreateEventA(NULL, FALSE, FALSE, NULL); - if (pDevice->wasapi.hStopEvent == NULL) { - errorMsg = "[WASAPI] Failed to create stop event for main loop break notification.", result = MAL_FAILED_TO_CREATE_EVENT; + pDevice->wasapi.hBreakEvent = CreateEventA(NULL, FALSE, FALSE, NULL); + if (pDevice->wasapi.hBreakEvent == NULL) { + errorMsg = "[WASAPI] Failed to create break event for main loop break notification.", result = MAL_FAILED_TO_CREATE_EVENT; goto done; } @@ -6394,16 +6950,6 @@ mal_result mal_device_init__wasapi(mal_context* pContext, mal_device_type type, done: // Clean up. -#ifdef MAL_WIN32_DESKTOP - if (pMMDevice != NULL) { - mal_IMMDevice_Release(pMMDevice); - } -#else - if (pActivatedInterface != NULL) { - pActivatedInterface->Release(); - } -#endif - if (result != MAL_SUCCESS) { mal_device_uninit__wasapi(pDevice); return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, errorMsg, result); @@ -6444,6 +6990,10 @@ mal_result mal_device__stop_backend__wasapi(mal_device* pDevice) { mal_assert(pDevice != NULL); + if (pDevice->wasapi.pAudioClient == NULL) { + return MAL_DEVICE_NOT_INITIALIZED; + } + HRESULT hr = mal_IAudioClient_Stop((mal_IAudioClient*)pDevice->wasapi.pAudioClient); if (FAILED(hr)) { return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[WASAPI] Failed to stop internal device.", MAL_FAILED_TO_STOP_BACKEND_DEVICE); @@ -6465,7 +7015,7 @@ mal_result mal_device__break_main_loop__wasapi(mal_device* pDevice) // The main loop will be waiting on a bunch of events via the WaitForMultipleObjects() API. One of those events // is a special event we use for forcing that function to return. pDevice->wasapi.breakFromMainLoop = MAL_TRUE; - SetEvent(pDevice->wasapi.hStopEvent); + SetEvent(pDevice->wasapi.hBreakEvent); return MAL_SUCCESS; } @@ -6476,32 +7026,6 @@ mal_result mal_device__get_available_frames__wasapi(mal_device* pDevice, mal_uin *pFrameCount = 0; -#if 0 - if (pDevice->type == mal_device_type_playback) { - mal_uint32 paddingFramesCount; - HRESULT hr = mal_IAudioClient_GetCurrentPadding((mal_IAudioClient*)pDevice->wasapi.pAudioClient, &paddingFramesCount); - if (FAILED(hr)) { - return MAL_ERROR; - } - - if (pDevice->exclusiveMode) { - *pFrameCount = paddingFramesCount; - return MAL_SUCCESS; - } else { - *pFrameCount = pDevice->bufferSizeInFrames - paddingFramesCount; - return MAL_SUCCESS; - } - } else { - mal_uint32 framesAvailable; - HRESULT hr = mal_IAudioCaptureClient_GetNextPacketSize((mal_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient, &framesAvailable); - if (FAILED(hr)) { - return MAL_ERROR; - } - - *pFrameCount = framesAvailable; - return MAL_SUCCESS; - } -#else mal_uint32 paddingFramesCount; HRESULT hr = mal_IAudioClient_GetCurrentPadding((mal_IAudioClient*)pDevice->wasapi.pAudioClient, &paddingFramesCount); if (FAILED(hr)) { @@ -6520,7 +7044,6 @@ mal_result mal_device__get_available_frames__wasapi(mal_device* pDevice, mal_uin } return MAL_SUCCESS; -#endif } mal_result mal_device__wait_for_frames__wasapi(mal_device* pDevice, mal_uint32* pFrameCount) @@ -6533,7 +7056,7 @@ mal_result mal_device__wait_for_frames__wasapi(mal_device* pDevice, mal_uint32* // Wait for a buffer to become available or for the stop event to be signalled. HANDLE hEvents[2]; hEvents[0] = (HANDLE)pDevice->wasapi.hEvent; - hEvents[1] = (HANDLE)pDevice->wasapi.hStopEvent; + hEvents[1] = (HANDLE)pDevice->wasapi.hBreakEvent; if (WaitForMultipleObjects(mal_countof(hEvents), hEvents, FALSE, INFINITE) == WAIT_FAILED) { break; } @@ -6544,11 +7067,58 @@ mal_result mal_device__wait_for_frames__wasapi(mal_device* pDevice, mal_uint32* break; } - result = mal_device__get_available_frames__wasapi(pDevice, pFrameCount); - if (result != MAL_SUCCESS) { - return result; + // Make sure we break from the main loop if requested from an external factor. + if (pDevice->wasapi.breakFromMainLoop) { + break; } + // We may want to reinitialize the device. Only do this if this device is the default. + mal_bool32 needDeviceReinit = MAL_FALSE; + + mal_bool32 hasDefaultDeviceChanged = pDevice->wasapi.hasDefaultDeviceChanged; + if (hasDefaultDeviceChanged && pDevice->isDefaultDevice) { + needDeviceReinit = MAL_TRUE; + } + + if (!needDeviceReinit) { + result = mal_device__get_available_frames__wasapi(pDevice, pFrameCount); + if (result != MAL_SUCCESS) { + if (!pDevice->exclusiveMode) { + needDeviceReinit = MAL_TRUE; + } else { + return result; + } + } + } + + + mal_atomic_exchange_32(&pDevice->wasapi.hasDefaultDeviceChanged, MAL_FALSE); + + // Here is where the device is re-initialized if required. + if (needDeviceReinit) { + #ifdef MAL_DEBUG_OUTPUT + printf("=== CHANGING DEVICE ===\n"); + #endif + + if (pDevice->pContext->onDeviceReinit) { + mal_result reinitResult = pDevice->pContext->onDeviceReinit(pDevice); + if (reinitResult != MAL_SUCCESS) { + return reinitResult; + } + + mal_device__post_init_setup(pDevice); + + // Start playing the device again, and then continue the loop from the top. + if (mal_device__get_state(pDevice) == MAL_STATE_STARTED) { + if (pDevice->pContext->onDeviceStart) { + pDevice->pContext->onDeviceStart(pDevice); + } + continue; + } + } + } + + if (*pFrameCount > 0) { return MAL_SUCCESS; } @@ -6562,8 +7132,8 @@ mal_result mal_device__main_loop__wasapi(mal_device* pDevice) { mal_assert(pDevice != NULL); - // Make sure the stop event is not signaled to ensure we don't end up immediately returning from WaitForMultipleObjects(). - ResetEvent(pDevice->wasapi.hStopEvent); + // Make sure the break event is not signaled to ensure we don't end up immediately returning from WaitForMultipleObjects(). + ResetEvent(pDevice->wasapi.hBreakEvent); pDevice->wasapi.breakFromMainLoop = MAL_FALSE; while (!pDevice->wasapi.breakFromMainLoop) { @@ -6671,6 +7241,7 @@ mal_result mal_context_init__wasapi(mal_context* pContext) pContext->onGetDeviceInfo = mal_context_get_device_info__wasapi; pContext->onDeviceInit = mal_device_init__wasapi; pContext->onDeviceUninit = mal_device_uninit__wasapi; + pContext->onDeviceReinit = mal_device_reinit__wasapi; pContext->onDeviceStart = mal_device__start_backend__wasapi; pContext->onDeviceStop = mal_device__stop_backend__wasapi; pContext->onDeviceBreakMainLoop = mal_device__break_main_loop__wasapi; @@ -8431,6 +9002,8 @@ void mal_device_uninit__winmm(mal_device* pDevice) mal_free(pDevice->winmm._pHeapData); CloseHandle((HANDLE)pDevice->winmm.hEvent); + + mal_zero_object(&pDevice->winmm); // Safety. } mal_result mal_device_init__winmm(mal_context* pContext, mal_device_type type, const mal_device_id* pDeviceID, const mal_device_config* pConfig, mal_device* pDevice) @@ -8615,6 +9188,10 @@ mal_result mal_device__start_backend__winmm(mal_device* pDevice) { mal_assert(pDevice != NULL); + if (pDevice->winmm.hDevice == NULL) { + return MAL_INVALID_ARGS; + } + if (pDevice->type == mal_device_type_playback) { // Playback. The device is started when we call waveOutWrite() with a block of data. From MSDN: // @@ -8679,6 +9256,10 @@ mal_result mal_device__stop_backend__winmm(mal_device* pDevice) { mal_assert(pDevice != NULL); + if (pDevice->winmm.hDevice == NULL) { + return MAL_INVALID_ARGS; + } + if (pDevice->type == mal_device_type_playback) { MMRESULT resultMM = ((MAL_PFN_waveOutReset)pDevice->pContext->winmm.waveOutReset)((HWAVEOUT)pDevice->winmm.hDevice); if (resultMM != MMSYSERR_NOERROR) { @@ -8891,7 +9472,6 @@ mal_result mal_context_init__winmm(mal_context* pContext) /////////////////////////////////////////////////////////////////////////////// #ifdef MAL_HAS_ALSA -#include #ifdef MAL_NO_RUNTIME_LINKING #include typedef snd_pcm_uframes_t mal_snd_pcm_uframes_t; @@ -13062,6 +13642,7 @@ typedef Boolean (* mal_CFStringGetCString_proc)(CFStringRef theString, char* buf typedef OSStatus (* mal_AudioObjectGetPropertyData_proc)(AudioObjectID inObjectID, const AudioObjectPropertyAddress* inAddress, UInt32 inQualifierDataSize, const void* inQualifierData, UInt32* ioDataSize, void* outData); typedef OSStatus (* mal_AudioObjectGetPropertyDataSize_proc)(AudioObjectID inObjectID, const AudioObjectPropertyAddress* inAddress, UInt32 inQualifierDataSize, const void* inQualifierData, UInt32* outDataSize); typedef OSStatus (* mal_AudioObjectSetPropertyData_proc)(AudioObjectID inObjectID, const AudioObjectPropertyAddress* inAddress, UInt32 inQualifierDataSize, const void* inQualifierData, UInt32 inDataSize, const void* inData); +typedef OSStatus (* mal_AudioObjectAddPropertyListener_proc)(AudioObjectID inObjectID, const AudioObjectPropertyAddress* inAddress, AudioObjectPropertyListenerProc inListener, void* inClientData); #endif // AudioToolbox @@ -13080,6 +13661,8 @@ typedef OSStatus (* mal_AudioUnitRender_proc)(AudioUnit inUnit, AudioUnitRenderA #define MAL_COREAUDIO_OUTPUT_BUS 0 #define MAL_COREAUDIO_INPUT_BUS 1 +mal_result mal_device_reinit_internal__coreaudio(mal_device* pDevice, mal_bool32 disposePreviousAudioUnit); + // Core Audio // @@ -13893,21 +14476,17 @@ mal_result mal_find_AudioObjectID(mal_context* pContext, mal_device_type type, c } -mal_result mal_device_find_best_format__coreaudio(const mal_device* pDevice, AudioStreamBasicDescription* pFormat) +mal_result mal_find_best_format__coreaudio(mal_context* pContext, AudioObjectID deviceObjectID, mal_device_type deviceType, mal_format format, mal_uint32 channels, mal_uint32 sampleRate, mal_bool32 usingDefaultFormat, mal_bool32 usingDefaultChannels, mal_bool32 usingDefaultSampleRate, AudioStreamBasicDescription* pFormat) { - mal_assert(pDevice != NULL); - - AudioObjectID deviceObjectID = (AudioObjectID)pDevice->coreaudio.deviceObjectID; - UInt32 deviceFormatDescriptionCount; AudioStreamRangedDescription* pDeviceFormatDescriptions; - mal_result result = mal_get_AudioObject_stream_descriptions(pDevice->pContext, deviceObjectID, pDevice->type, &deviceFormatDescriptionCount, &pDeviceFormatDescriptions); + mal_result result = mal_get_AudioObject_stream_descriptions(pContext, deviceObjectID, deviceType, &deviceFormatDescriptionCount, &pDeviceFormatDescriptions); if (result != MAL_SUCCESS) { return result; } - mal_uint32 desiredSampleRate = pDevice->sampleRate; - if (pDevice->usingDefaultSampleRate) { + mal_uint32 desiredSampleRate = sampleRate; + if (usingDefaultSampleRate) { // When using the device's default sample rate, we get the highest priority standard rate supported by the device. Otherwise // we just use the pre-set rate. for (mal_uint32 iStandardRate = 0; iStandardRate < mal_countof(g_malStandardSampleRatePriorities); ++iStandardRate) { @@ -13930,13 +14509,13 @@ mal_result mal_device_find_best_format__coreaudio(const mal_device* pDevice, Aud } } - mal_uint32 desiredChannelCount = pDevice->channels; - if (pDevice->usingDefaultChannels) { - mal_get_AudioObject_channel_count(pDevice->pContext, deviceObjectID, pDevice->type, &desiredChannelCount); // <-- Not critical if this fails. + mal_uint32 desiredChannelCount = channels; + if (usingDefaultChannels) { + mal_get_AudioObject_channel_count(pContext, deviceObjectID, deviceType, &desiredChannelCount); // <-- Not critical if this fails. } - mal_format desiredFormat = pDevice->format; - if (pDevice->usingDefaultFormat) { + mal_format desiredFormat = format; + if (usingDefaultFormat) { desiredFormat = g_malFormatPriorities[0]; } @@ -14405,6 +14984,30 @@ void on_start_stop__coreaudio(void* pUserData, AudioUnit audioUnit, AudioUnitPro } if (!isRunning) { + // The stop event is a bit annoying in Core Audio because it will be called when we automatically switch the default device. Some scenarios to consider: + // + // 1) When the device is unplugged, this will be called _before_ the default device change notification. + // 2) When the device is changed via the default device change notification, this will be called _after_ the switch. + // + // For case #1, we just check if there's a new default device available. If so, we just ignore the stop event. For case #2 we check a flag. + if (pDevice->isDefaultDevice && mal_device__get_state(pDevice) != MAL_STATE_STOPPING && mal_device__get_state(pDevice) != MAL_STATE_STOPPED) { + // It looks like the device is switching through an external event, such as the user unplugging the device or changing the default device + // via the operating system's sound settings. If we're re-initializing the device, we just terminate because we want the stopping of the + // device to be seamless to the client (we don't want them receiving the onStop event and thinking that the device has stopped when it + // hasn't!). + if (pDevice->coreaudio.isSwitchingDevice) { + return; + } + + // Getting here means the device is not reinitializing which means it may have been unplugged. From what I can see, it looks like Core Audio + // will try switching to the new default device seamlessly. We need to somehow find a way to determine whether or not Core Audio will most + // likely be successful in switching to the new device. + // + // TODO: Try to predict if Core Audio will switch devices. If not, the onStop callback needs to be posted. + return; + } + + // Getting here means we need to stop the device. mal_stop_proc onStop = pDevice->onStop; if (onStop) { onStop(pDevice); @@ -14412,14 +15015,92 @@ void on_start_stop__coreaudio(void* pUserData, AudioUnit audioUnit, AudioUnitPro } } +#if defined(MAL_APPLE_DESKTOP) +OSStatus mal_default_output_device_changed__coreaudio(AudioObjectID objectID, UInt32 addressCount, const AudioObjectPropertyAddress* pAddresses, void* pUserData) +{ + (void)objectID; -mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type deviceType, const mal_device_id* pDeviceID, const mal_device_config* pConfig, mal_device* pDevice) + mal_device* pDevice = (mal_device*)pUserData; + mal_assert(pDevice != NULL); + + if (pDevice->isDefaultDevice) { + // Not sure if I really need to check this, but it makes me feel better. + if (addressCount == 0) { + return noErr; + } + + if ((pDevice->type == mal_device_type_playback && pAddresses[0].mSelector == kAudioHardwarePropertyDefaultOutputDevice) || + (pDevice->type == mal_device_type_capture && pAddresses[0].mSelector == kAudioHardwarePropertyDefaultInputDevice)) { +#ifdef MAL_DEBUG_OUTPUT + printf("Device Changed: addressCount=%d, pAddresses[0].mElement=%d\n", addressCount, pAddresses[0].mElement); +#endif + pDevice->coreaudio.isSwitchingDevice = MAL_TRUE; + mal_result reinitResult = mal_device_reinit_internal__coreaudio(pDevice, MAL_TRUE); + pDevice->coreaudio.isSwitchingDevice = MAL_FALSE; + + if (reinitResult == MAL_SUCCESS) { + mal_device__post_init_setup(pDevice); + + // Make sure we resume the device if applicable. + if (mal_device__get_state(pDevice) == MAL_STATE_STARTED) { + mal_result startResult = pDevice->pContext->onDeviceStart(pDevice); + if (startResult != MAL_SUCCESS) { + mal_device__set_state(pDevice, MAL_STATE_STOPPED); + } + } + } + } + } + + return noErr; +} +#endif + +typedef struct +{ + // Input. + mal_format formatIn; + mal_uint32 channelsIn; + mal_uint32 sampleRateIn; + mal_channel channelMapIn[MAL_MAX_CHANNELS]; + mal_uint32 bufferSizeInFramesIn; + mal_uint32 bufferSizeInMillisecondsIn; + mal_uint32 periodsIn; + mal_bool32 usingDefaultFormat; + mal_bool32 usingDefaultChannels; + mal_bool32 usingDefaultSampleRate; + mal_bool32 usingDefaultChannelMap; + mal_share_mode shareMode; + + // Output. +#if defined(MAL_APPLE_DESKTOP) + AudioObjectID deviceObjectID; +#endif + AudioComponent component; + AudioUnit audioUnit; + AudioBufferList* pAudioBufferList; // Only used for input devices. + mal_format formatOut; + mal_uint32 channelsOut; + mal_uint32 sampleRateOut; + mal_channel channelMapOut[MAL_MAX_CHANNELS]; + mal_uint32 bufferSizeInFramesOut; + mal_uint32 periodsOut; + mal_bool32 exclusiveMode; + char deviceName[256]; +} mal_device_init_internal_data__coreaudio; + +mal_result mal_device_init_internal__coreaudio(mal_context* pContext, mal_device_type deviceType, const mal_device_id* pDeviceID, mal_device_init_internal_data__coreaudio* pData, void* pDevice_DoNotReference) /* <-- pDevice is typed as void* intentionally so as to avoid accidentally referencing it. */ { mal_assert(pContext != NULL); - mal_assert(pConfig != NULL); - mal_assert(pDevice != NULL); mal_assert(deviceType == mal_device_type_playback || deviceType == mal_device_type_capture); +#if defined(MAL_APPLE_DESKTOP) + pData->deviceObjectID = 0; +#endif + pData->component = NULL; + pData->audioUnit = NULL; + pData->pAudioBufferList = NULL; + mal_result result; #if defined(MAL_APPLE_DESKTOP) @@ -14429,15 +15110,16 @@ mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type dev return result; } - pDevice->coreaudio.deviceObjectID = deviceObjectID; + pData->deviceObjectID = deviceObjectID; #endif // Core audio doesn't really use the notion of a period so we can leave this unmodified, but not too over the top. - if (pDevice->periods < 1) { - pDevice->periods = 1; + pData->periodsOut = pData->periodsIn; + if (pData->periodsOut < 1) { + pData->periodsOut = 1; } - if (pDevice->periods > 16) { - pDevice->periods = 16; + if (pData->periodsOut > 16) { + pData->periodsOut = 16; } @@ -14453,14 +15135,14 @@ mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type dev desc.componentFlags = 0; desc.componentFlagsMask = 0; - pDevice->coreaudio.component = ((mal_AudioComponentFindNext_proc)pContext->coreaudio.AudioComponentFindNext)(NULL, &desc); - if (pDevice->coreaudio.component == NULL) { + pData->component = ((mal_AudioComponentFindNext_proc)pContext->coreaudio.AudioComponentFindNext)(NULL, &desc); + if (pData->component == NULL) { return MAL_FAILED_TO_INIT_BACKEND; } // Audio unit. - OSStatus status = ((mal_AudioComponentInstanceNew_proc)pContext->coreaudio.AudioComponentInstanceNew)((AudioComponent)pDevice->coreaudio.component, (AudioUnit*)&pDevice->coreaudio.audioUnit); + OSStatus status = ((mal_AudioComponentInstanceNew_proc)pContext->coreaudio.AudioComponentInstanceNew)(pData->component, (AudioUnit*)&pData->audioUnit); if (status != noErr) { return mal_result_from_OSStatus(status); } @@ -14472,25 +15154,25 @@ mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type dev enableIOFlag = 0; } - status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, MAL_COREAUDIO_OUTPUT_BUS, &enableIOFlag, sizeof(enableIOFlag)); + status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)(pData->audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, MAL_COREAUDIO_OUTPUT_BUS, &enableIOFlag, sizeof(enableIOFlag)); if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return mal_result_from_OSStatus(status); } enableIOFlag = (enableIOFlag == 0) ? 1 : 0; - status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, MAL_COREAUDIO_INPUT_BUS, &enableIOFlag, sizeof(enableIOFlag)); + status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)(pData->audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, MAL_COREAUDIO_INPUT_BUS, &enableIOFlag, sizeof(enableIOFlag)); if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return mal_result_from_OSStatus(status); } // Set the device to use with this audio unit. This is only used on desktop since we are using defaults on mobile. #if defined(MAL_APPLE_DESKTOP) - status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioOutputUnitProperty_CurrentDevice, kAudioUnitScope_Global, (deviceType == mal_device_type_playback) ? MAL_COREAUDIO_OUTPUT_BUS : MAL_COREAUDIO_INPUT_BUS, &deviceObjectID, sizeof(AudioDeviceID)); + status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)(pData->audioUnit, kAudioOutputUnitProperty_CurrentDevice, kAudioUnitScope_Global, (deviceType == mal_device_type_playback) ? MAL_COREAUDIO_OUTPUT_BUS : MAL_COREAUDIO_INPUT_BUS, &deviceObjectID, sizeof(AudioDeviceID)); if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return mal_result_from_OSStatus(result); } #endif @@ -14511,9 +15193,9 @@ mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type dev AudioUnitElement formatElement = (deviceType == mal_device_type_playback) ? MAL_COREAUDIO_OUTPUT_BUS : MAL_COREAUDIO_INPUT_BUS; #if defined(MAL_APPLE_DESKTOP) - result = mal_device_find_best_format__coreaudio(pDevice, &bestFormat); + result = mal_find_best_format__coreaudio(pContext, deviceObjectID, deviceType, pData->formatIn, pData->channelsIn, pData->sampleRateIn, pData->usingDefaultFormat, pData->usingDefaultChannels, pData->usingDefaultSampleRate, &bestFormat); if (result != MAL_SUCCESS) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return result; } @@ -14521,28 +15203,28 @@ mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type dev AudioStreamBasicDescription origFormat; UInt32 origFormatSize = sizeof(origFormat); if (deviceType == mal_device_type_playback) { - status = ((mal_AudioUnitGetProperty_proc)pContext->coreaudio.AudioUnitGetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, MAL_COREAUDIO_OUTPUT_BUS, &origFormat, &origFormatSize); + status = ((mal_AudioUnitGetProperty_proc)pContext->coreaudio.AudioUnitGetProperty)(pData->audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, MAL_COREAUDIO_OUTPUT_BUS, &origFormat, &origFormatSize); } else { - status = ((mal_AudioUnitGetProperty_proc)pContext->coreaudio.AudioUnitGetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, MAL_COREAUDIO_INPUT_BUS, &origFormat, &origFormatSize); + status = ((mal_AudioUnitGetProperty_proc)pContext->coreaudio.AudioUnitGetProperty)(pData->audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, MAL_COREAUDIO_INPUT_BUS, &origFormat, &origFormatSize); } if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return result; } bestFormat.mSampleRate = origFormat.mSampleRate; - status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioUnitProperty_StreamFormat, formatScope, formatElement, &bestFormat, sizeof(bestFormat)); + status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)(pData->audioUnit, kAudioUnitProperty_StreamFormat, formatScope, formatElement, &bestFormat, sizeof(bestFormat)); if (status != noErr) { // We failed to set the format, so fall back to the current format of the audio unit. bestFormat = origFormat; } #else UInt32 propSize = sizeof(bestFormat); - status = ((mal_AudioUnitGetProperty_proc)pContext->coreaudio.AudioUnitGetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioUnitProperty_StreamFormat, formatScope, formatElement, &bestFormat, &propSize); + status = ((mal_AudioUnitGetProperty_proc)pContext->coreaudio.AudioUnitGetProperty)(pData->audioUnit, kAudioUnitProperty_StreamFormat, formatScope, formatElement, &bestFormat, &propSize); if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return mal_result_from_OSStatus(status); } @@ -14554,54 +15236,54 @@ mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type dev AVAudioSession* pAudioSession = [AVAudioSession sharedInstance]; mal_assert(pAudioSession != NULL); - [pAudioSession setPreferredSampleRate:(double)pDevice->sampleRate error:nil]; + [pAudioSession setPreferredSampleRate:(double)pData->sampleRateIn error:nil]; bestFormat.mSampleRate = pAudioSession.sampleRate; } - status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioUnitProperty_StreamFormat, formatScope, formatElement, &bestFormat, sizeof(bestFormat)); + status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)(pData->audioUnit, kAudioUnitProperty_StreamFormat, formatScope, formatElement, &bestFormat, sizeof(bestFormat)); if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return mal_result_from_OSStatus(status); } #endif - result = mal_format_from_AudioStreamBasicDescription(&bestFormat, &pDevice->internalFormat); + result = mal_format_from_AudioStreamBasicDescription(&bestFormat, &pData->formatOut); if (result != MAL_SUCCESS) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return result; } - if (pDevice->internalFormat == mal_format_unknown) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + if (pData->formatOut == mal_format_unknown) { + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return MAL_FORMAT_NOT_SUPPORTED; } - pDevice->internalChannels = bestFormat.mChannelsPerFrame; - pDevice->internalSampleRate = bestFormat.mSampleRate; + pData->channelsOut = bestFormat.mChannelsPerFrame; + pData->sampleRateOut = bestFormat.mSampleRate; } // Internal channel map. #if defined(MAL_APPLE_DESKTOP) - result = mal_get_AudioObject_channel_map(pContext, deviceObjectID, deviceType, pDevice->internalChannelMap); + result = mal_get_AudioObject_channel_map(pContext, deviceObjectID, deviceType, pData->channelMapOut); if (result != MAL_SUCCESS) { return result; } #else // TODO: Figure out how to get the channel map using AVAudioSession. - mal_get_standard_channel_map(mal_standard_channel_map_default, pDevice->internalChannels, pDevice->internalChannelMap); + mal_get_standard_channel_map(mal_standard_channel_map_default, pData->channelsOut, pData->channelMapOut); #endif // Buffer size. Not allowing this to be configurable on iOS. - mal_uint32 actualBufferSizeInFrames = pDevice->bufferSizeInFrames; + mal_uint32 actualBufferSizeInFrames = pData->bufferSizeInFramesIn; #if defined(MAL_APPLE_DESKTOP) if (actualBufferSizeInFrames == 0) { - actualBufferSizeInFrames = mal_calculate_buffer_size_in_frames_from_milliseconds(pDevice->bufferSizeInMilliseconds, pDevice->internalSampleRate); + actualBufferSizeInFrames = mal_calculate_buffer_size_in_frames_from_milliseconds(pData->bufferSizeInMillisecondsIn, pData->sampleRateOut); } - actualBufferSizeInFrames = actualBufferSizeInFrames / pDevice->periods; + actualBufferSizeInFrames = actualBufferSizeInFrames / pData->periodsOut; result = mal_set_AudioObject_buffer_size_in_frames(pContext, deviceObjectID, deviceType, &actualBufferSizeInFrames); if (result != MAL_SUCCESS) { return result; @@ -14610,7 +15292,7 @@ mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type dev actualBufferSizeInFrames = 4096; #endif - pDevice->bufferSizeInFrames = actualBufferSizeInFrames * pDevice->periods; + pData->bufferSizeInFramesOut = actualBufferSizeInFrames * pData->periodsOut; // During testing I discovered that the buffer size can be too big. You'll get an error like this: // @@ -14622,47 +15304,19 @@ mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type dev /*AudioUnitScope propScope = (deviceType == mal_device_type_playback) ? kAudioUnitScope_Input : kAudioUnitScope_Output; AudioUnitElement propBus = (deviceType == mal_device_type_playback) ? MAL_COREAUDIO_OUTPUT_BUS : MAL_COREAUDIO_INPUT_BUS; - status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioUnitProperty_MaximumFramesPerSlice, propScope, propBus, &actualBufferSizeInFrames, sizeof(actualBufferSizeInFrames)); + status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)(pData->audioUnit, kAudioUnitProperty_MaximumFramesPerSlice, propScope, propBus, &actualBufferSizeInFrames, sizeof(actualBufferSizeInFrames)); if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return mal_result_from_OSStatus(status); }*/ - status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &actualBufferSizeInFrames, sizeof(actualBufferSizeInFrames)); + status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)(pData->audioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &actualBufferSizeInFrames, sizeof(actualBufferSizeInFrames)); if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return mal_result_from_OSStatus(status); } } - - // Callbacks. - AURenderCallbackStruct callbackInfo; - callbackInfo.inputProcRefCon = pDevice; - if (deviceType == mal_device_type_playback) { - callbackInfo.inputProc = mal_on_output__coreaudio; - status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, MAL_COREAUDIO_OUTPUT_BUS, &callbackInfo, sizeof(callbackInfo)); - if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); - return mal_result_from_OSStatus(status); - } - } else { - callbackInfo.inputProc = mal_on_input__coreaudio; - status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, MAL_COREAUDIO_INPUT_BUS, &callbackInfo, sizeof(callbackInfo)); - if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); - return mal_result_from_OSStatus(status); - } - } - - // We need to listen for stop events. - status = ((mal_AudioUnitAddPropertyListener_proc)pContext->coreaudio.AudioUnitAddPropertyListener)((AudioUnit)pDevice->coreaudio.audioUnit, kAudioOutputUnitProperty_IsRunning, on_start_stop__coreaudio, pDevice); - if (status != noErr) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); - return mal_result_from_OSStatus(status); - } - - // We need a buffer list if this is an input device. We render into this in the input callback. if (deviceType == mal_device_type_capture) { mal_bool32 isInterleaved = (bestFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved) == 0; @@ -14671,49 +15325,194 @@ mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type dev if (isInterleaved) { // Interleaved case. This is the simple case because we just have one buffer. allocationSize += sizeof(AudioBuffer) * 1; - allocationSize += actualBufferSizeInFrames * mal_get_bytes_per_frame(pDevice->internalFormat, pDevice->internalChannels); + allocationSize += actualBufferSizeInFrames * mal_get_bytes_per_frame(pData->formatOut, pData->channelsOut); } else { // Non-interleaved case. This is the more complex case because there's more than one buffer. - allocationSize += sizeof(AudioBuffer) * pDevice->internalChannels; - allocationSize += actualBufferSizeInFrames * mal_get_bytes_per_sample(pDevice->internalFormat) * pDevice->internalChannels; + allocationSize += sizeof(AudioBuffer) * pData->channelsOut; + allocationSize += actualBufferSizeInFrames * mal_get_bytes_per_sample(pData->formatOut) * pData->channelsOut; } AudioBufferList* pBufferList = (AudioBufferList*)mal_malloc(allocationSize); if (pBufferList == NULL) { - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return MAL_OUT_OF_MEMORY; } if (isInterleaved) { pBufferList->mNumberBuffers = 1; - pBufferList->mBuffers[0].mNumberChannels = pDevice->internalChannels; - pBufferList->mBuffers[0].mDataByteSize = actualBufferSizeInFrames * mal_get_bytes_per_frame(pDevice->internalFormat, pDevice->internalChannels); + pBufferList->mBuffers[0].mNumberChannels = pData->channelsOut; + pBufferList->mBuffers[0].mDataByteSize = actualBufferSizeInFrames * mal_get_bytes_per_frame(pData->formatOut, pData->channelsOut); pBufferList->mBuffers[0].mData = (mal_uint8*)pBufferList + sizeof(AudioBufferList); } else { - pBufferList->mNumberBuffers = pDevice->internalChannels; + pBufferList->mNumberBuffers = pData->channelsOut; for (mal_uint32 iBuffer = 0; iBuffer < pBufferList->mNumberBuffers; ++iBuffer) { pBufferList->mBuffers[iBuffer].mNumberChannels = 1; - pBufferList->mBuffers[iBuffer].mDataByteSize = actualBufferSizeInFrames * mal_get_bytes_per_sample(pDevice->internalFormat); - pBufferList->mBuffers[iBuffer].mData = (mal_uint8*)pBufferList + ((sizeof(AudioBufferList) - sizeof(AudioBuffer)) + (sizeof(AudioBuffer) * pDevice->internalChannels)) + (actualBufferSizeInFrames * mal_get_bytes_per_sample(pDevice->internalFormat) * iBuffer); + pBufferList->mBuffers[iBuffer].mDataByteSize = actualBufferSizeInFrames * mal_get_bytes_per_sample(pData->formatOut); + pBufferList->mBuffers[iBuffer].mData = (mal_uint8*)pBufferList + ((sizeof(AudioBufferList) - sizeof(AudioBuffer)) + (sizeof(AudioBuffer) * pData->channelsOut)) + (actualBufferSizeInFrames * mal_get_bytes_per_sample(pData->formatOut) * iBuffer); } } - pDevice->coreaudio.pAudioBufferList = pBufferList; + pData->pAudioBufferList = pBufferList; } + // Callbacks. + AURenderCallbackStruct callbackInfo; + callbackInfo.inputProcRefCon = pDevice_DoNotReference; + if (deviceType == mal_device_type_playback) { + callbackInfo.inputProc = mal_on_output__coreaudio; + status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)(pData->audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, MAL_COREAUDIO_OUTPUT_BUS, &callbackInfo, sizeof(callbackInfo)); + if (status != noErr) { + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); + return mal_result_from_OSStatus(status); + } + } else { + callbackInfo.inputProc = mal_on_input__coreaudio; + status = ((mal_AudioUnitSetProperty_proc)pContext->coreaudio.AudioUnitSetProperty)(pData->audioUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, MAL_COREAUDIO_INPUT_BUS, &callbackInfo, sizeof(callbackInfo)); + if (status != noErr) { + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); + return mal_result_from_OSStatus(status); + } + } - // Initialize the audio unit. - status = ((mal_AudioUnitInitialize_proc)pContext->coreaudio.AudioUnitInitialize)((AudioUnit)pDevice->coreaudio.audioUnit); + // We need to listen for stop events. + status = ((mal_AudioUnitAddPropertyListener_proc)pContext->coreaudio.AudioUnitAddPropertyListener)(pData->audioUnit, kAudioOutputUnitProperty_IsRunning, on_start_stop__coreaudio, pDevice_DoNotReference); if (status != noErr) { - mal_free(pDevice->coreaudio.pAudioBufferList); - ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); return mal_result_from_OSStatus(status); } + // Initialize the audio unit. + status = ((mal_AudioUnitInitialize_proc)pContext->coreaudio.AudioUnitInitialize)(pData->audioUnit); + if (status != noErr) { + mal_free(pData->pAudioBufferList); + pData->pAudioBufferList = NULL; + ((mal_AudioComponentInstanceDispose_proc)pContext->coreaudio.AudioComponentInstanceDispose)(pData->audioUnit); + return mal_result_from_OSStatus(status); + } + + // Grab the name. +#if defined(MAL_APPLE_DESKTOP) + mal_get_AudioObject_name(pContext, deviceObjectID, sizeof(pData->deviceName), pData->deviceName); +#endif + + return result; +} + +mal_result mal_device_reinit_internal__coreaudio(mal_device* pDevice, mal_bool32 disposePreviousAudioUnit) +{ + mal_device_init_internal_data__coreaudio data; + data.formatIn = pDevice->format; + data.channelsIn = pDevice->channels; + data.sampleRateIn = pDevice->sampleRate; + mal_copy_memory(data.channelMapIn, pDevice->channelMap, sizeof(pDevice->channelMap)); + data.bufferSizeInFramesIn = pDevice->bufferSizeInFrames; + data.bufferSizeInMillisecondsIn = pDevice->bufferSizeInMilliseconds; + data.periodsIn = pDevice->periods; + data.usingDefaultFormat = pDevice->usingDefaultFormat; + data.usingDefaultChannels = pDevice->usingDefaultChannels; + data.usingDefaultSampleRate = pDevice->usingDefaultSampleRate; + data.usingDefaultChannelMap = pDevice->usingDefaultChannelMap; + data.shareMode = pDevice->initConfig.shareMode; + + mal_result result = mal_device_init_internal__coreaudio(pDevice->pContext, pDevice->type, NULL, &data, (void*)pDevice); + if (result != MAL_SUCCESS) { + return result; + } + + // We have successfully initialized the new objects. We now need to uninitialize the previous objects and re-set them. + if (disposePreviousAudioUnit) { + pDevice->pContext->onDeviceStop(pDevice); + ((mal_AudioComponentInstanceDispose_proc)pDevice->pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + } + if (pDevice->coreaudio.pAudioBufferList) { + mal_free(pDevice->coreaudio.pAudioBufferList); + } + +#if defined(MAL_APPLE_DESKTOP) + pDevice->coreaudio.deviceObjectID = (mal_uint32)data.deviceObjectID; +#endif + pDevice->coreaudio.component = (mal_ptr)data.component; + pDevice->coreaudio.audioUnit = (mal_ptr)data.audioUnit; + pDevice->coreaudio.pAudioBufferList = (mal_ptr)data.pAudioBufferList; + + pDevice->internalFormat = data.formatOut; + pDevice->internalChannels = data.channelsOut; + pDevice->internalSampleRate = data.sampleRateOut; + mal_copy_memory(pDevice->internalChannelMap, data.channelMapOut, sizeof(data.channelMapOut)); + pDevice->bufferSizeInFrames = data.bufferSizeInFramesOut; + pDevice->periods = data.periodsOut; + pDevice->exclusiveMode = MAL_FALSE; + mal_strcpy_s(pDevice->name, sizeof(pDevice->name), data.deviceName); + + return MAL_SUCCESS; +} + + +mal_result mal_device_init__coreaudio(mal_context* pContext, mal_device_type deviceType, const mal_device_id* pDeviceID, const mal_device_config* pConfig, mal_device* pDevice) +{ + (void)pConfig; + + mal_assert(pContext != NULL); + mal_assert(pConfig != NULL); + mal_assert(pDevice != NULL); + mal_assert(deviceType == mal_device_type_playback || deviceType == mal_device_type_capture); + + mal_device_init_internal_data__coreaudio data; + data.formatIn = pDevice->format; + data.channelsIn = pDevice->channels; + data.sampleRateIn = pDevice->sampleRate; + mal_copy_memory(data.channelMapIn, pDevice->channelMap, sizeof(pDevice->channelMap)); + data.bufferSizeInFramesIn = pDevice->bufferSizeInFrames; + data.bufferSizeInMillisecondsIn = pDevice->bufferSizeInMilliseconds; + data.periodsIn = pDevice->periods; + data.usingDefaultFormat = pDevice->usingDefaultFormat; + data.usingDefaultChannels = pDevice->usingDefaultChannels; + data.usingDefaultSampleRate = pDevice->usingDefaultSampleRate; + data.usingDefaultChannelMap = pDevice->usingDefaultChannelMap; + data.shareMode = pDevice->initConfig.shareMode; + + mal_result result = mal_device_init_internal__coreaudio(pDevice->pContext, pDevice->type, NULL, &data, (void*)pDevice); + if (result != MAL_SUCCESS) { + return result; + } + + // We have successfully initialized the new objects. We now need to uninitialize the previous objects and re-set them. + pDevice->pContext->onDeviceStop(pDevice); + ((mal_AudioComponentInstanceDispose_proc)pDevice->pContext->coreaudio.AudioComponentInstanceDispose)((AudioUnit)pDevice->coreaudio.audioUnit); + if (pDevice->coreaudio.pAudioBufferList) { + mal_free(pDevice->coreaudio.pAudioBufferList); + } + +#if defined(MAL_APPLE_DESKTOP) + pDevice->coreaudio.deviceObjectID = (mal_uint32)data.deviceObjectID; +#endif + pDevice->coreaudio.component = (mal_ptr)data.component; + pDevice->coreaudio.audioUnit = (mal_ptr)data.audioUnit; + pDevice->coreaudio.pAudioBufferList = (mal_ptr)data.pAudioBufferList; + + pDevice->internalFormat = data.formatOut; + pDevice->internalChannels = data.channelsOut; + pDevice->internalSampleRate = data.sampleRateOut; + mal_copy_memory(pDevice->internalChannelMap, data.channelMapOut, sizeof(data.channelMapOut)); + pDevice->bufferSizeInFrames = data.bufferSizeInFramesOut; + pDevice->periods = data.periodsOut; + pDevice->exclusiveMode = MAL_FALSE; + mal_strcpy_s(pDevice->name, sizeof(pDevice->name), data.deviceName); + +#if defined(MAL_APPLE_DESKTOP) + // If we are using the default device we'll need to listen for changes to the system's default device so we can seemlessly + // switch the device in the background. + AudioObjectPropertyAddress propAddress; + propAddress.mSelector = (deviceType == mal_device_type_playback) ? kAudioHardwarePropertyDefaultOutputDevice : kAudioHardwarePropertyDefaultInputDevice; + propAddress.mScope = kAudioObjectPropertyScopeGlobal; + propAddress.mElement = kAudioObjectPropertyElementMaster; + ((mal_AudioObjectAddPropertyListener_proc)pDevice->pContext->coreaudio.AudioObjectAddPropertyListener)(kAudioObjectSystemObject, &propAddress, &mal_default_output_device_changed__coreaudio, pDevice); +#endif return MAL_SUCCESS; } + mal_result mal_device__start_backend__coreaudio(mal_device* pDevice) { mal_assert(pDevice != NULL); @@ -14776,7 +15575,8 @@ mal_result mal_context_init__coreaudio(mal_context* pContext) pContext->coreaudio.AudioObjectGetPropertyData = mal_dlsym(pContext->coreaudio.hCoreAudio, "AudioObjectGetPropertyData"); pContext->coreaudio.AudioObjectGetPropertyDataSize = mal_dlsym(pContext->coreaudio.hCoreAudio, "AudioObjectGetPropertyDataSize"); pContext->coreaudio.AudioObjectSetPropertyData = mal_dlsym(pContext->coreaudio.hCoreAudio, "AudioObjectSetPropertyData"); - + pContext->coreaudio.AudioObjectAddPropertyListener = mal_dlsym(pContext->coreaudio.hCoreAudio, "AudioObjectAddPropertyListener"); + // It looks like Apple has moved some APIs from AudioUnit into AudioToolbox on more recent versions of macOS. They are still // defined in AudioUnit, but just in case they decide to remove them from there entirely I'm going to implement a fallback. @@ -14817,6 +15617,7 @@ mal_result mal_context_init__coreaudio(mal_context* pContext) pContext->coreaudio.AudioObjectGetPropertyData = (mal_proc)AudioObjectGetPropertyData; pContext->coreaudio.AudioObjectGetPropertyDataSize = (mal_proc)AudioObjectGetPropertyDataSize; pContext->coreaudio.AudioObjectSetPropertyData = (mal_proc)AudioObjectSetPropertyData; + pContext->coreaudio.AudioObjectAddPropertyListener = (mal_proc)AudioObjectAddPropertyListener; #endif pContext->coreaudio.AudioComponentFindNext = (mal_proc)AudioComponentFindNext; @@ -15445,6 +16246,9 @@ mal_result mal_device_init__sndio(mal_context* pContext, mal_device_type deviceT mal_get_standard_channel_map(mal_standard_channel_map_sndio, pDevice->internalChannels, pDevice->internalChannelMap); + // The device is always shared with sndio. + pDevice->exclusiveMode = MAL_FALSE; + pDevice->sndio.pIntermediaryBuffer = mal_malloc(pDevice->sndio.fragmentSizeInFrames * mal_get_bytes_per_frame(pDevice->internalFormat, pDevice->internalChannels)); if (pDevice->sndio.pIntermediaryBuffer == NULL) { ((mal_sio_close_proc)pContext->sndio.sio_close)((struct mal_sio_hdl*)pDevice->sndio.handle); @@ -15622,6 +16426,8 @@ mal_result mal_context_init__sndio(mal_context* pContext) /////////////////////////////////////////////////////////////////////////////// #ifdef MAL_HAS_AUDIO4 #include +#include +#include #include #include #include @@ -15934,6 +16740,7 @@ mal_result mal_device_init__audio4(mal_context* pContext, mal_device_type device mal_assert(pDevice != NULL); mal_zero_object(&pDevice->audio4); + pDevice->audio4.fd = -1; // The first thing to do is open the file. const char* deviceName = "/dev/audio"; @@ -15941,7 +16748,7 @@ mal_result mal_device_init__audio4(mal_context* pContext, mal_device_type device deviceName = pDeviceID->audio4; } - pDevice->audio4.fd = open(deviceName, (deviceType == mal_device_type_playback) ? O_WRONLY : O_RDONLY, 0); + pDevice->audio4.fd = open(deviceName, ((deviceType == mal_device_type_playback) ? O_WRONLY : O_RDONLY) | O_NONBLOCK, 0); if (pDevice->audio4.fd == -1) { return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to open device.", MAL_FAILED_TO_OPEN_BACKEND_DEVICE); } @@ -16107,6 +16914,16 @@ mal_result mal_device_init__audio4(mal_context* pContext, mal_device_type device // using the channels defined in FreeBSD's sound(4) man page. mal_get_standard_channel_map(mal_standard_channel_map_sound4, pDevice->internalChannels, pDevice->internalChannelMap); + + // The version of the operating system dictates whether or not the device is exclusive or shared. NetBSD + // introduced in-kernel mixing which means it's shared. All other BSD flavours are exclusive as far as + // I'm aware. +#if defined(__NetBSD_Version__) && __NetBSD_Version__ >= 800000000 + pDevice->exclusiveMode = MAL_FALSE; +#else + pDevice->exclusiveMode = MAL_TRUE; +#endif + // When not using MMAP mode we need to use an intermediary buffer to the data transfer between the client // and device. Everything is done by the size of a fragment. @@ -16124,6 +16941,10 @@ mal_result mal_device__start_backend__audio4(mal_device* pDevice) { mal_assert(pDevice != NULL); + if (pDevice->audio4.fd == -1) { + return MAL_INVALID_ARGS; + } + // The device is started by the next calls to read() and write(). For playback it's simple - just read // data from the client, then write it to the device with write() which will in turn start the device. // For capture it's a bit less intuitive - we do nothing (it'll be started automatically by the first @@ -16149,13 +16970,17 @@ mal_result mal_device__stop_backend__audio4(mal_device* pDevice) { mal_assert(pDevice != NULL); -#if defined(__NetBSD__) + if (pDevice->audio4.fd == -1) { + return MAL_INVALID_ARGS; + } + +#if !defined(MAL_AUDIO4_USE_NEW_API) if (ioctl(pDevice->audio4.fd, AUDIO_FLUSH, 0) < 0) { return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to stop device. AUDIO_FLUSH failed.", MAL_FAILED_TO_STOP_BACKEND_DEVICE); } #else if (ioctl(pDevice->audio4.fd, AUDIO_STOP, 0) < 0) { - return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to stop device. AUDIO_FLUSH failed.", MAL_FAILED_TO_STOP_BACKEND_DEVICE); + return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to stop device. AUDIO_STOP failed.", MAL_FAILED_TO_STOP_BACKEND_DEVICE); } #endif @@ -16170,6 +16995,44 @@ mal_result mal_device__break_main_loop__audio4(mal_device* pDevice) return MAL_SUCCESS; } +mal_result mal_device__wait__audio4(mal_device* pDevice) +{ + mal_assert(pDevice != NULL); + + struct pollfd fds[1]; + fds[0].fd = pDevice->audio4.fd; + fds[0].events = (pDevice->type == mal_device_type_playback) ? (POLLOUT | POLLWRBAND) : (POLLIN | POLLPRI); + int timeout = 2 * 1000; + int ioresult = poll(fds, mal_countof(fds), timeout); + if (ioresult < 0) { + #ifdef MAL_DEBUG_OUTPUT + printf("poll() failed: timeout=%d, ioresult=%d\n", pDevice->bufferSizeInMilliseconds, ioresult); + #endif + return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to wait for device.", MAL_ERROR); + } + + // Check for a timeout. This has been annoying in my testing. In my testing, when the device is unplugged it will just + // hang on the next calls to write(), ioctl(), etc. The only way I have figured out how to handle this is to wait for + // a timeout from poll(). In the unplugging case poll() will timeout, however there's no indication that the device is + // unusable - no flags are set, no errors are reported, nothing. To work around this I have decided to outright fail + // in the event of a timeout. + if (ioresult == 0) { + // Check for errors. + if ((fds[0].revents & (POLLERR | POLLNVAL)) != 0) { + return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to wait for device.", MAL_NO_DEVICE); + } + if ((fds[0].revents & (POLLHUP)) != 0) { + return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to wait for device. Disconnected.", MAL_NO_DEVICE); + } + + // A return value of 0 from poll indicates a timeout. + return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Timeout while waiting for device.", MAL_TIMEOUT); + } + + mal_assert(ioresult > 0); + return MAL_SUCCESS; +} + mal_result mal_device__main_loop__audio4(mal_device* pDevice) { mal_assert(pDevice != NULL); @@ -16186,18 +17049,53 @@ mal_result mal_device__main_loop__audio4(mal_device* pDevice) // Playback. mal_device__read_frames_from_client(pDevice, pDevice->audio4.fragmentSizeInFrames, pDevice->audio4.pIntermediaryBuffer); - int bytesWritten = write(pDevice->audio4.fd, pDevice->audio4.pIntermediaryBuffer, pDevice->audio4.fragmentSizeInFrames * pDevice->internalChannels * mal_get_bytes_per_sample(pDevice->internalFormat)); - if (bytesWritten < 0) { - return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to send data from the client to the device.", MAL_FAILED_TO_SEND_DATA_TO_DEVICE); + // Wait for data to become available. + mal_result result = mal_device__wait__audio4(pDevice); + if (result != MAL_SUCCESS) { + return result; + } + + size_t bytesToWrite = pDevice->audio4.fragmentSizeInFrames * mal_get_bytes_per_frame(pDevice->internalFormat, pDevice->internalChannels); + while (bytesToWrite > 0) { + ssize_t bytesWritten = write(pDevice->audio4.fd, pDevice->audio4.pIntermediaryBuffer, bytesToWrite); + if (bytesWritten < 0) { + return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to send data from the client to the device.", MAL_FAILED_TO_SEND_DATA_TO_DEVICE); + } + + if (bytesWritten == 0) { + return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to write any data to the device.", MAL_FAILED_TO_SEND_DATA_TO_DEVICE); + } + + bytesToWrite -= bytesWritten; } } else { // Capture. - int bytesRead = read(pDevice->audio4.fd, pDevice->audio4.pIntermediaryBuffer, pDevice->audio4.fragmentSizeInFrames * mal_get_bytes_per_sample(pDevice->internalFormat)); - if (bytesRead < 0) { - return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to read data from the device to be sent to the client.", MAL_FAILED_TO_READ_DATA_FROM_DEVICE); + mal_result result = mal_device__wait__audio4(pDevice); + if (result != MAL_SUCCESS) { + return result; } - mal_uint32 framesRead = (mal_uint32)bytesRead / pDevice->internalChannels / mal_get_bytes_per_sample(pDevice->internalFormat); + size_t totalBytesRead = 0; + size_t bytesToRead = pDevice->audio4.fragmentSizeInFrames * mal_get_bytes_per_frame(pDevice->internalFormat, pDevice->internalChannels); + while (bytesToRead > 0) { + ssize_t bytesRead = read(pDevice->audio4.fd, pDevice->audio4.pIntermediaryBuffer, bytesToRead); + if (bytesRead < 0) { + if (errno == EAGAIN) { + break; + } + + return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to read data from the device to be sent to the client.", MAL_FAILED_TO_READ_DATA_FROM_DEVICE); + } + + if (bytesRead == 0) { + return mal_post_error(pDevice, MAL_LOG_LEVEL_ERROR, "[audio4] Failed to read any data from the device.", MAL_FAILED_TO_READ_DATA_FROM_DEVICE); + } + + bytesToRead -= bytesRead; + totalBytesRead += bytesRead; + } + + mal_uint32 framesRead = (mal_uint32)totalBytesRead / mal_get_bytes_per_frame(pDevice->internalFormat, pDevice->internalChannels); mal_device__send_frames_to_client(pDevice, framesRead, pDevice->audio4.pIntermediaryBuffer); } } @@ -16563,6 +17461,8 @@ mal_result mal_device_init__oss(mal_context* pContext, mal_device_type type, con // Set the internal channel map. Not sure if this can be queried. For now just using the channel layouts defined in FreeBSD's sound(4) man page. mal_get_standard_channel_map(mal_standard_channel_map_sound4, pDevice->internalChannels, pDevice->internalChannelMap); + // OSS seems to be shared. + pDevice->exclusiveMode = MAL_FALSE; // When not using MMAP mode, we need to use an intermediary buffer for the client <-> device transfer. We do // everything by the size of a fragment. @@ -19063,6 +19963,65 @@ mal_bool32 mal__is_channel_map_valid(const mal_channel* channelMap, mal_uint32 c } +void mal_device__post_init_setup(mal_device* pDevice) +{ + mal_assert(pDevice != NULL); + + // Make sure the internal channel map was set correctly by the backend. If it's not valid, just fall back to defaults. + if (!mal_channel_map_valid(pDevice->internalChannels, pDevice->internalChannelMap)) { + mal_get_standard_channel_map(mal_standard_channel_map_default, pDevice->internalChannels, pDevice->internalChannelMap); + } + + + // If the format/channels/rate is using defaults we need to set these to be the same as the internal config. + if (pDevice->usingDefaultFormat) { + pDevice->format = pDevice->internalFormat; + } + if (pDevice->usingDefaultChannels) { + pDevice->channels = pDevice->internalChannels; + } + if (pDevice->usingDefaultSampleRate) { + pDevice->sampleRate = pDevice->internalSampleRate; + } + if (pDevice->usingDefaultChannelMap) { + mal_copy_memory(pDevice->channelMap, pDevice->internalChannelMap, sizeof(pDevice->channelMap)); + } + + // Buffer size. The backend will have set bufferSizeInFrames. We need to calculate bufferSizeInMilliseconds here. + pDevice->bufferSizeInMilliseconds = pDevice->bufferSizeInFrames / (pDevice->internalSampleRate/1000); + + + // We need a DSP object which is where samples are moved through in order to convert them to the + // format required by the backend. + mal_dsp_config dspConfig = mal_dsp_config_init_new(); + dspConfig.neverConsumeEndOfInput = MAL_TRUE; + dspConfig.pUserData = pDevice; + if (pDevice->type == mal_device_type_playback) { + dspConfig.formatIn = pDevice->format; + dspConfig.channelsIn = pDevice->channels; + dspConfig.sampleRateIn = pDevice->sampleRate; + mal_copy_memory(dspConfig.channelMapIn, pDevice->channelMap, sizeof(dspConfig.channelMapIn)); + dspConfig.formatOut = pDevice->internalFormat; + dspConfig.channelsOut = pDevice->internalChannels; + dspConfig.sampleRateOut = pDevice->internalSampleRate; + mal_copy_memory(dspConfig.channelMapOut, pDevice->internalChannelMap, sizeof(dspConfig.channelMapOut)); + dspConfig.onRead = mal_device__on_read_from_client; + mal_dsp_init(&dspConfig, &pDevice->dsp); + } else { + dspConfig.formatIn = pDevice->internalFormat; + dspConfig.channelsIn = pDevice->internalChannels; + dspConfig.sampleRateIn = pDevice->internalSampleRate; + mal_copy_memory(dspConfig.channelMapIn, pDevice->internalChannelMap, sizeof(dspConfig.channelMapIn)); + dspConfig.formatOut = pDevice->format; + dspConfig.channelsOut = pDevice->channels; + dspConfig.sampleRateOut = pDevice->sampleRate; + mal_copy_memory(dspConfig.channelMapOut, pDevice->channelMap, sizeof(dspConfig.channelMapOut)); + dspConfig.onRead = mal_device__on_read_from_device; + mal_dsp_init(&dspConfig, &pDevice->dsp); + } +} + + mal_thread_result MAL_THREADCALL mal_worker_thread(void* pData) { mal_device* pDevice = (mal_device*)pData; @@ -19072,7 +20031,6 @@ mal_thread_result MAL_THREADCALL mal_worker_thread(void* pData) mal_CoInitializeEx(pDevice->pContext, NULL, MAL_COINIT_VALUE); #endif -#if 1 // When the device is being initialized it's initial state is set to MAL_STATE_UNINITIALIZED. Before returning from // mal_device_init(), the state needs to be set to something valid. In mini_al the device's default state immediately // after initialization is stopped, so therefore we need to mark the device as such. mini_al will wait on the worker @@ -19111,11 +20069,43 @@ mal_thread_result MAL_THREADCALL mal_worker_thread(void* pData) // Now we just enter the main loop. When the main loop is terminated the device needs to be marked as stopped. This can // be broken with mal_device__break_main_loop(). - pDevice->pContext->onDeviceMainLoop(pDevice); + mal_result mainLoopResult = pDevice->pContext->onDeviceMainLoop(pDevice); + if (mainLoopResult != MAL_SUCCESS && pDevice->isDefaultDevice && mal_device__get_state(pDevice) == MAL_STATE_STARTED && !pDevice->exclusiveMode) { + // Something has failed during the main loop. It could be that the device has been lost. If it's the default device, + // we can try switching over to the new default device by uninitializing and reinitializing. + mal_result reinitResult = MAL_ERROR; + if (pDevice->pContext->onDeviceReinit) { + reinitResult = pDevice->pContext->onDeviceReinit(pDevice); + } else { + pDevice->pContext->onDeviceStop(pDevice); + mal_device__set_state(pDevice, MAL_STATE_STOPPED); + + pDevice->pContext->onDeviceUninit(pDevice); + mal_device__set_state(pDevice, MAL_STATE_UNINITIALIZED); + + reinitResult = pDevice->pContext->onDeviceInit(pDevice->pContext, pDevice->type, NULL, &pDevice->initConfig, pDevice); + } + + // Perform the post initialization setup just in case the data conversion pipeline needs to be reinitialized. + if (reinitResult == MAL_SUCCESS) { + mal_device__post_init_setup(pDevice); + } + + // If reinitialization was successful, loop back to the start. + if (reinitResult == MAL_SUCCESS) { + mal_device__set_state(pDevice, MAL_STATE_STARTING); // <-- The device is restarting. + mal_event_signal(&pDevice->wakeupEvent); + continue; + } + } - // Getting here means we have broken from the main loop which happens the application has requested that device be stopped. - pDevice->pContext->onDeviceStop(pDevice); + // Getting here means we have broken from the main loop which happens the application has requested that device be stopped. Note that this + // may have actually already happened above if the device was lost and mini_al has attempted to re-initialize the device. In this case we + // don't want to be doing this a second time. + if (mal_device__get_state(pDevice) != MAL_STATE_UNINITIALIZED) { + pDevice->pContext->onDeviceStop(pDevice); + } // After the device has stopped, make sure an event is posted. mal_stop_proc onStop = pDevice->onStop; @@ -19123,63 +20113,14 @@ mal_thread_result MAL_THREADCALL mal_worker_thread(void* pData) onStop(pDevice); } - // A function somewhere is waiting for the device to have stopped for real so we need to signal an event to allow it to continue. - mal_device__set_state(pDevice, MAL_STATE_STOPPED); - mal_event_signal(&pDevice->stopEvent); + // A function somewhere is waiting for the device to have stopped for real so we need to signal an event to allow it to continue. Note that + // it's possible that the device has been uninitialized which means we need to _not_ change the status to stopped. We cannot go from an + // uninitialized state to stopped state. + if (mal_device__get_state(pDevice) != MAL_STATE_UNINITIALIZED) { + mal_device__set_state(pDevice, MAL_STATE_STOPPED); + mal_event_signal(&pDevice->stopEvent); + } } -#else - // This is only used to prevent posting onStop() when the device is first initialized. - mal_bool32 skipNextStopEvent = MAL_TRUE; - - for (;;) { - // At the start of iteration the device is stopped - we must explicitly mark it as such. - pDevice->pContext->onDeviceStop(pDevice); - - if (!skipNextStopEvent) { - mal_stop_proc onStop = pDevice->onStop; - if (onStop) { - onStop(pDevice); - } - } else { - skipNextStopEvent = MAL_FALSE; - } - - - // Let the other threads know that the device has stopped. - mal_device__set_state(pDevice, MAL_STATE_STOPPED); - mal_event_signal(&pDevice->stopEvent); - - // We use an event to wait for a request to wake up. - mal_event_wait(&pDevice->wakeupEvent); - - // Default result code. - pDevice->workResult = MAL_SUCCESS; - - // Just break if we're terminating. - if (mal_device__get_state(pDevice) == MAL_STATE_UNINITIALIZED) { - break; - } - - - // Getting here means we just started the device and we need to wait for the device to - // either deliver us data (recording) or request more data (playback). - mal_assert(mal_device__get_state(pDevice) == MAL_STATE_STARTING); - - pDevice->workResult = pDevice->pContext->onDeviceStart(pDevice); - if (pDevice->workResult != MAL_SUCCESS) { - mal_event_signal(&pDevice->startEvent); - continue; - } - - // The thread that requested the device to start playing is waiting for this thread to start the - // device for real, which is now. - mal_device__set_state(pDevice, MAL_STATE_STARTED); - mal_event_signal(&pDevice->startEvent); - - // Now we just enter the main loop. The main loop can be broken with mal_device__break_main_loop(). - pDevice->pContext->onDeviceMainLoop(pDevice); - } -#endif // Make sure we aren't continuously waiting on a stop event. mal_event_signal(&pDevice->stopEvent); // <-- Is this still needed? @@ -19690,6 +20631,7 @@ mal_result mal_device_init(mal_context* pContext, mal_device_type type, mal_devi mal_zero_object(pDevice); pDevice->pContext = pContext; + pDevice->initConfig = config; // Set the user data and log callback ASAP to ensure it is available for the entire initialization process. pDevice->pUserData = pUserData; @@ -19703,6 +20645,10 @@ mal_result mal_device_init(mal_context* pContext, mal_device_type type, mal_devi } } + if (pDeviceID == NULL) { + pDevice->isDefaultDevice = MAL_TRUE; + } + // When passing in 0 for the format/channels/rate/chmap it means the device will be using whatever is chosen by the backend. If everything is set // to defaults it means the format conversion pipeline will run on a fast path where data transfer is just passed straight through to the backend. @@ -19781,6 +20727,8 @@ mal_result mal_device_init(mal_context* pContext, mal_device_type type, mal_devi return MAL_NO_BACKEND; // The error message will have been posted with mal_post_error() by the source of the error so don't bother calling it here. } + mal_device__post_init_setup(pDevice); + // If the backend did not fill out a name for the device, try a generic method. if (pDevice->name[0] == '\0') { @@ -19803,61 +20751,6 @@ mal_result mal_device_init(mal_context* pContext, mal_device_type type, mal_devi } - // Make sure the internal channel map was set correctly by the backend. If it's not valid, just fall back to defaults. - if (!mal_channel_map_valid(pDevice->internalChannels, pDevice->internalChannelMap)) { - mal_get_standard_channel_map(mal_standard_channel_map_default, pDevice->internalChannels, pDevice->internalChannelMap); - } - - - // If the format/channels/rate is using defaults we need to set these to be the same as the internal config. - if (pDevice->usingDefaultFormat) { - pDevice->format = pDevice->internalFormat; - } - if (pDevice->usingDefaultChannels) { - pDevice->channels = pDevice->internalChannels; - } - if (pDevice->usingDefaultSampleRate) { - pDevice->sampleRate = pDevice->internalSampleRate; - } - if (pDevice->usingDefaultChannelMap) { - mal_copy_memory(pDevice->channelMap, pDevice->internalChannelMap, sizeof(pDevice->channelMap)); - } - - // Buffer size. The backend will have set bufferSizeInFrames. We need to calculate bufferSizeInMilliseconds here. - pDevice->bufferSizeInMilliseconds = pDevice->bufferSizeInFrames / (pDevice->internalSampleRate/1000); - - - // We need a DSP object which is where samples are moved through in order to convert them to the - // format required by the backend. - mal_dsp_config dspConfig = mal_dsp_config_init_new(); - dspConfig.neverConsumeEndOfInput = MAL_TRUE; - dspConfig.pUserData = pDevice; - if (type == mal_device_type_playback) { - dspConfig.formatIn = pDevice->format; - dspConfig.channelsIn = pDevice->channels; - dspConfig.sampleRateIn = pDevice->sampleRate; - mal_copy_memory(dspConfig.channelMapIn, pDevice->channelMap, sizeof(dspConfig.channelMapIn)); - dspConfig.formatOut = pDevice->internalFormat; - dspConfig.channelsOut = pDevice->internalChannels; - dspConfig.sampleRateOut = pDevice->internalSampleRate; - mal_copy_memory(dspConfig.channelMapOut, pDevice->internalChannelMap, sizeof(dspConfig.channelMapOut)); - dspConfig.onRead = mal_device__on_read_from_client; - mal_dsp_init(&dspConfig, &pDevice->dsp); - } else { - dspConfig.formatIn = pDevice->internalFormat; - dspConfig.channelsIn = pDevice->internalChannels; - dspConfig.sampleRateIn = pDevice->internalSampleRate; - mal_copy_memory(dspConfig.channelMapIn, pDevice->internalChannelMap, sizeof(dspConfig.channelMapIn)); - dspConfig.formatOut = pDevice->format; - dspConfig.channelsOut = pDevice->channels; - dspConfig.sampleRateOut = pDevice->sampleRate; - mal_copy_memory(dspConfig.channelMapOut, pDevice->channelMap, sizeof(dspConfig.channelMapOut)); - dspConfig.onRead = mal_device__on_read_from_device; - mal_dsp_init(&dspConfig, &pDevice->dsp); - } - - - // Some backends don't require the worker thread. if (!mal_context_is_backend_asynchronous(pContext)) { // The worker thread. @@ -19872,6 +20765,15 @@ mal_result mal_device_init(mal_context* pContext, mal_device_type type, mal_devi mal_device__set_state(pDevice, MAL_STATE_STOPPED); } + +#ifdef MAL_DEBUG_OUTPUT + printf("[WASAPI] %s (%s)\n", pDevice->name, (pDevice->type == mal_device_type_playback) ? "Playback" : "Capture"); + printf(" Format: %s -> %s\n", mal_get_format_name(pDevice->format), mal_get_format_name(pDevice->internalFormat)); + printf(" Channels: %d -> %d\n", pDevice->channels, pDevice->internalChannels); + printf(" Sample Rate: %d -> %d\n", pDevice->sampleRate, pDevice->internalSampleRate); +#endif + + mal_assert(mal_device__get_state(pDevice) == MAL_STATE_STOPPED); return MAL_SUCCESS; } @@ -27394,6 +28296,27 @@ mal_uint64 mal_sine_wave_read_ex(mal_sine_wave* pSineWave, mal_uint64 frameCount // REVISION HISTORY // ================ // +// v0.8.8 - 2018-09-14 +// - Fix Linux build with the ALSA backend. +// - Minor documentation fix. +// +// v0.8.7 - 2018-09-12 +// - Fix a bug with UWP detection. +// +// v0.8.6 - 2018-08-26 +// - Automatically switch the internal device when the default device is unplugged. Note that this is still in the +// early stages and not all backends handle this the same way. As of this version, this will not detect a default +// device switch when changed from the operating system's audio preferences (unless the backend itself handles +// this automatically). This is not supported in exclusive mode. +// - WASAPI and Core Audio: Add support for stream routing. When the application is using a default device and the +// user switches the default device via the operating system's audio preferences, mini_al will automatically switch +// the internal device to the new default. This is not supported in exclusive mode. +// - WASAPI: Add support for hardware offloading via IAudioClient2. Only supported on Windows 8 and newer. +// - WASAPI: Add support for low-latency shared mode via IAudioClient3. Only supported on Windows 10 and newer. +// - Add support for compiling the UWP build as C. +// - mal_device_set_recv_callback() and mal_device_set_send_callback() have been deprecated. You must now set this +// when the device is initialized with mal_device_init*(). These will be removed in version 0.9.0. +// // v0.8.5 - 2018-08-12 // - Add support for specifying the size of a device's buffer in milliseconds. You can still set the buffer size in // frames if that suits you. When bufferSizeInFrames is 0, bufferSizeInMilliseconds will be used. If both are non-0 diff --git a/src/models.c b/src/models.c index b1abe66d..8a498139 100644 --- a/src/models.c +++ b/src/models.c @@ -5,10 +5,10 @@ * CONFIGURATION: * * #define SUPPORT_FILEFORMAT_OBJ -* Selected desired fileformats to be supported for loading. -* * #define SUPPORT_FILEFORMAT_MTL -* Selected desired fileformats to be supported for loading. +* #define SUPPORT_FILEFORMAT_IQM +* #define SUPPORT_FILEFORMAT_GLTF +* Selected desired fileformats to be supported for model data loading. * * #define SUPPORT_MESH_GENERATION * Support procedural mesh generation functions, uses external par_shapes.h library @@ -48,8 +48,20 @@ #include "rlgl.h" // raylib OpenGL abstraction layer to OpenGL 1.1, 2.1, 3.3+ or ES2 -#define PAR_SHAPES_IMPLEMENTATION -#include "external/par_shapes.h" // Shapes 3d parametric generation +#if defined(SUPPORT_FILEFORMAT_IQM) + #define RIQM_IMPLEMENTATION + #include "external/riqm.h" // IQM file format loading +#endif + +#if defined(SUPPORT_FILEFORMAT_GLTF) + #define CGLTF_IMPLEMENTATION + #include "external/cgltf.h" // glTF file format loading +#endif + +#if defined(SUPPORT_MESH_GENERATION) + #define PAR_SHAPES_IMPLEMENTATION + #include "external/par_shapes.h" // Shapes 3d parametric generation +#endif //---------------------------------------------------------------------------------- // Defines and Macros @@ -75,6 +87,12 @@ static Mesh LoadOBJ(const char *fileName); // Load OBJ mesh data #if defined(SUPPORT_FILEFORMAT_MTL) static Material LoadMTL(const char *fileName); // Load MTL material data #endif +#if defined(SUPPORT_FILEFORMAT_GLTF) +static Mesh LoadIQM(const char *fileName); // Load IQM mesh data +#endif +#if defined(SUPPORT_FILEFORMAT_GLTF) +static Mesh LoadGLTF(const char *fileName); // Load GLTF mesh data +#endif //---------------------------------------------------------------------------------- // Module Functions Definition @@ -646,45 +664,54 @@ void UnloadMesh(Mesh *mesh) rlUnloadMesh(mesh); } -// Export mesh as an OBJ file -void ExportMesh(const char *fileName, Mesh mesh) +// Export mesh data to file +void ExportMesh(Mesh mesh, const char *fileName) { - FILE *objFile = fopen(fileName, "wt"); + bool success = false; - fprintf(objFile, "# raylib Mesh OBJ exporter v1.0\n\n"); - fprintf(objFile, "# Mesh exported as triangle faces and not optimized.\n"); - fprintf(objFile, "# Vertex Count: %i\n", mesh.vertexCount); - fprintf(objFile, "# Triangle Count: %i\n\n", mesh.triangleCount); - fprintf(objFile, "# LICENSE: zlib/libpng\n"); - fprintf(objFile, "# Copyright (c) 2018 Ramon Santamaria (@raysan5)\n\n"); - - fprintf(objFile, "g mesh\n"); - - for (int i = 0, v = 0; i < mesh.vertexCount; i++, v += 3) + if (IsFileExtension(fileName, ".obj")) { - fprintf(objFile, "v %.2f %.2f %.2f\n", mesh.vertices[v], mesh.vertices[v + 1], mesh.vertices[v + 2]); + FILE *objFile = fopen(fileName, "wt"); + + fprintf(objFile, "# raylib Mesh OBJ exporter v1.0\n\n"); + fprintf(objFile, "# Mesh exported as triangle faces and not optimized.\n"); + fprintf(objFile, "# Vertex Count: %i\n", mesh.vertexCount); + fprintf(objFile, "# Triangle Count: %i\n\n", mesh.triangleCount); + fprintf(objFile, "# LICENSE: zlib/libpng\n"); + fprintf(objFile, "# Copyright (c) 2018 Ramon Santamaria (@raysan5)\n\n"); + + fprintf(objFile, "g mesh\n"); + + for (int i = 0, v = 0; i < mesh.vertexCount; i++, v += 3) + { + fprintf(objFile, "v %.2f %.2f %.2f\n", mesh.vertices[v], mesh.vertices[v + 1], mesh.vertices[v + 2]); + } + + for (int i = 0, v = 0; i < mesh.vertexCount; i++, v += 2) + { + fprintf(objFile, "vt %.2f %.2f\n", mesh.texcoords[v], mesh.texcoords[v + 1]); + } + + for (int i = 0, v = 0; i < mesh.vertexCount; i++, v += 3) + { + fprintf(objFile, "vn %.2f %.2f %.2f\n", mesh.normals[v], mesh.normals[v + 1], mesh.normals[v + 2]); + } + + for (int i = 0; i < mesh.triangleCount; i += 3) + { + fprintf(objFile, "f %i/%i/%i %i/%i/%i %i/%i/%i\n", i, i, i, i + 1, i + 1, i + 1, i + 2, i + 2, i + 2); + } + + fprintf(objFile, "\n"); + + fclose(objFile); + + success = true; } - - for (int i = 0, v = 0; i < mesh.vertexCount; i++, v += 2) - { - fprintf(objFile, "vt %.2f %.2f\n", mesh.texcoords[v], mesh.texcoords[v + 1]); - } - - for (int i = 0, v = 0; i < mesh.vertexCount; i++, v += 3) - { - fprintf(objFile, "vn %.2f %.2f %.2f\n", mesh.normals[v], mesh.normals[v + 1], mesh.normals[v + 2]); - } - - for (int i = 0; i < mesh.triangleCount; i += 3) - { - fprintf(objFile, "f %i/%i/%i %i/%i/%i %i/%i/%i\n", i, i, i, i + 1, i + 1, i + 1, i + 2, i + 2, i + 2); - } - - fprintf(objFile, "\n"); - - fclose(objFile); + else if (IsFileExtension(fileName, ".raw")) { } // TODO: Support additional file formats to export mesh vertex data - TraceLog(LOG_INFO, "Mesh saved: %s", fileName); + if (success) TraceLog(LOG_INFO, "Mesh exported successfully: %s", fileName); + else TraceLog(LOG_WARNING, "Mesh could not be exported."); } #if defined(SUPPORT_MESH_GENERATION) @@ -699,7 +726,7 @@ Mesh GenMeshPlane(float width, float length, int resX, int resZ) resZ++; // Vertices definition - int vertexCount = resX*resZ*6; // 6 vertex by quad + int vertexCount = resX*resZ; // vertices get reused for the faces Vector3 *vertices = (Vector3 *)malloc(vertexCount*sizeof(Vector3)); for (int z = 0; z < resZ; z++) @@ -718,7 +745,7 @@ Mesh GenMeshPlane(float width, float length, int resX, int resZ) Vector3 *normals = (Vector3 *)malloc(vertexCount*sizeof(Vector3)); for (int n = 0; n < vertexCount; n++) normals[n] = (Vector3){ 0.0f, 1.0f, 0.0f }; // Vector3.up; - // TexCoords definition + // TexCoords definition Vector2 *texcoords = (Vector2 *)malloc(vertexCount*sizeof(Vector2)); for (int v = 0; v < resZ; v++) { @@ -741,7 +768,7 @@ Mesh GenMeshPlane(float width, float length, int resX, int resZ) triangles[t++] = i + 1; triangles[t++] = i; - triangles[t++] = i + resX; + triangles[t++] = i + resX; triangles[t++] = i + resX + 1; triangles[t++] = i + 1; } @@ -2206,9 +2233,8 @@ void MeshBinormals(Mesh *mesh) Vector3 tangent = { mesh->tangents[i*4 + 0], mesh->tangents[i*4 + 1], mesh->tangents[i*4 + 2] }; float tangentW = mesh->tangents[i*4 + 3]; - // TODO: Register computed binormal in mesh->binormal ? - // Vector3 binormal = Vector3Multiply( Vector3CrossProduct( normal, tangent ), tangentW ); + // Vector3 binormal = Vector3Multiply(Vector3CrossProduct(normal, tangent), tangentW); } } @@ -2222,7 +2248,7 @@ static Mesh LoadOBJ(const char *fileName) { Mesh mesh = { 0 }; - char dataType; + char dataType = 0; char comments[200]; int vertexCount = 0; @@ -2245,7 +2271,7 @@ static Mesh LoadOBJ(const char *fileName) // NOTE: faces MUST be defined as TRIANGLES (3 vertex per face) while (!feof(objFile)) { - dataType = '\0'; + dataType = 0; fscanf(objFile, "%c", &dataType); switch (dataType) @@ -2631,3 +2657,59 @@ static Material LoadMTL(const char *fileName) return material; } #endif + +#if defined(SUPPORT_FILEFORMAT_GLTF) +// Load IQM mesh data +static Mesh LoadIQM(const char *fileName) +{ + Mesh mesh = { 0 }; + + // TODO: Load IQM file + + return mesh; +} +#endif + +#if defined(SUPPORT_FILEFORMAT_GLTF) +// Load GLTF mesh data +static Mesh LoadGLTF(const char *fileName) +{ + Mesh mesh = { 0 }; + + // GLTF file loading + FILE *gltfFile = fopen(fileName, "rb"); + + if (gltfFile == NULL) + { + TraceLog(LOG_WARNING, "[%s] GLTF file could not be opened", fileName); + return mesh; + } + + fseek(gltfFile, 0, SEEK_END); + int size = ftell(gltfFile); + fseek(gltfFile, 0, SEEK_SET); + + void *buffer = malloc(size); + fread(buffer, size, 1, gltfFile); + + fclose(gltfFile); + + // GLTF data loading + cgltf_options options = {0}; + cgltf_data data; + cgltf_result result = cgltf_parse(&options, buffer, size, &data); + + if (result == cgltf_result_success) + { + printf("Type: %u\n", data.file_type); + printf("Version: %d\n", data.version); + printf("Meshes: %lu\n", data.meshes_count); + } + else TraceLog(LOG_WARNING, "[%s] GLTF data could not be loaded", fileName); + + free(buffer); + cgltf_free(&data); + + return mesh; +} +#endif diff --git a/src/raylib.h b/src/raylib.h index 1c0207ac..a44b77ee 100644 --- a/src/raylib.h +++ b/src/raylib.h @@ -1,6 +1,6 @@ /********************************************************************************************** * -* raylib - A simple and easy-to-use library to learn videogames programming (www.raylib.com) +* raylib - A simple and easy-to-use library to enjoy videogames programming (www.raylib.com) * * FEATURES: * - NO external dependencies, all required libraries included with raylib @@ -442,6 +442,7 @@ typedef struct RenderTexture2D { // RenderTexture type, same as RenderTexture2D typedef RenderTexture2D RenderTexture; +// N-Patch layout info typedef struct NPatchInfo { Rectangle sourceRec; // Region in the texture int left; // left border offset @@ -720,6 +721,13 @@ typedef enum { WRAP_MIRROR } TextureWrapMode; +// Font type, defines generation method +typedef enum { + FONT_DEFAULT = 0, // Default font generation, anti-aliased + FONT_BITMAP, // Bitmap font generation, no anti-aliasing + FONT_SDF // SDF font generation, requires external shader +} FontType; + // Color blending modes (pre-defined) typedef enum { BLEND_ALPHA = 0, @@ -980,7 +988,7 @@ RLAPI Image LoadImage(const char *fileName); RLAPI Image LoadImageEx(Color *pixels, int width, int height); // Load image from Color array data (RGBA - 32bit) RLAPI Image LoadImagePro(void *data, int width, int height, int format); // Load image from raw data with parameters RLAPI Image LoadImageRaw(const char *fileName, int width, int height, int format, int headerSize); // Load image from RAW file data -RLAPI void ExportImage(const char *fileName, Image image); // Export image as a PNG file +RLAPI void ExportImage(Image image, const char *fileName); // Export image data to file RLAPI Texture2D LoadTexture(const char *fileName); // Load texture from file into GPU memory (VRAM) RLAPI Texture2D LoadTextureFromImage(Image image); // Load texture from image data RLAPI RenderTexture2D LoadRenderTexture(int width, int height); // Load texture for rendering (framebuffer) @@ -1055,7 +1063,7 @@ RLAPI void DrawTextureNPatch(Texture2D texture, NPatchInfo nPatchInfo, Rectangle RLAPI Font GetFontDefault(void); // Get the default Font RLAPI Font LoadFont(const char *fileName); // Load font from file into GPU memory (VRAM) RLAPI Font LoadFontEx(const char *fileName, int fontSize, int charsCount, int *fontChars); // Load font from file with extended parameters -RLAPI CharInfo *LoadFontData(const char *fileName, int fontSize, int *fontChars, int charsCount, bool sdf); // Load font data for further use +RLAPI CharInfo *LoadFontData(const char *fileName, int fontSize, int *fontChars, int charsCount, int type); // Load font data for further use RLAPI Image GenImageFontAtlas(CharInfo *chars, int fontSize, int charsCount, int padding, int packMethod); // Generate image font atlas using chars info RLAPI void UnloadFont(Font font); // Unload Font from GPU memory (VRAM) @@ -1105,7 +1113,7 @@ RLAPI void UnloadModel(Model model); // Mesh loading/unloading functions RLAPI Mesh LoadMesh(const char *fileName); // Load mesh from file RLAPI void UnloadMesh(Mesh *mesh); // Unload mesh from memory (RAM and/or VRAM) -RLAPI void ExportMesh(const char *fileName, Mesh mesh); // Export mesh as an OBJ file +RLAPI void ExportMesh(Mesh mesh, const char *fileName); // Export mesh data to file // Mesh manipulation functions RLAPI BoundingBox MeshBoundingBox(Mesh mesh); // Compute mesh bounding box limits @@ -1213,6 +1221,7 @@ RLAPI Sound LoadSoundFromWave(Wave wave); // Load so RLAPI void UpdateSound(Sound sound, const void *data, int samplesCount);// Update sound buffer with new data RLAPI void UnloadWave(Wave wave); // Unload wave data RLAPI void UnloadSound(Sound sound); // Unload sound +RLAPI void ExportWave(Wave wave, const char *fileName); // Export wave data to file // Wave/Sound management functions RLAPI void PlaySound(Sound sound); // Play a sound diff --git a/src/raymath.h b/src/raymath.h index c5c6588f..33116532 100644 --- a/src/raymath.h +++ b/src/raymath.h @@ -229,6 +229,13 @@ RMDEF Vector2 Vector2Scale(Vector2 v, float scale) return result; } +// Multiply vector by vector +RMDEF Vector2 Vector2MultiplyV(Vector2 v1, Vector2 v2) +{ + Vector2 result = { v1.x*v2.x, v1.y*v2.y }; + return result; +} + // Negate vector RMDEF Vector2 Vector2Negate(Vector2 v) { @@ -243,6 +250,13 @@ RMDEF Vector2 Vector2Divide(Vector2 v, float div) return result; } +// Divide vector by vector +RMDEF Vector2 Vector2DivideV(Vector2 v1, Vector2 v2) +{ + Vector2 result = { v1.x/v2.x, v1.y/v2.y }; + return result; +} + // Normalize provided vector RMDEF Vector2 Vector2Normalize(Vector2 v) { @@ -378,6 +392,20 @@ RMDEF Vector3 Vector3Negate(Vector3 v) return result; } +// Divide vector by a float value +RMDEF Vector3 Vector3Divide(Vector3 v, float div) +{ + Vector3 result = { v.x / div, v.y / div, v.z / div }; + return result; +} + +// Divide vector by vector +RMDEF Vector3 Vector3DivideV(Vector3 v1, Vector3 v2) +{ + Vector3 result = { v1.x/v2.x, v1.y/v2.y, v1.z/v2.z }; + return result; +} + // Normalize provided vector RMDEF Vector3 Vector3Normalize(Vector3 v) { diff --git a/src/text.c b/src/text.c index 9ed283b2..3dbb261b 100644 --- a/src/text.c +++ b/src/text.c @@ -86,9 +86,6 @@ static Font LoadImageFont(Image image, Color key, int firstChar); // Load a Imag #if defined(SUPPORT_FILEFORMAT_FNT) static Font LoadBMFont(const char *fileName); // Load a BMFont file (AngelCode font file) #endif -#if defined(SUPPORT_FILEFORMAT_TTF) -//static Font LoadTTF(const char *fileName, int fontSize, int charsCount, int *fontChars); // Load spritefont from TTF data -#endif #if defined(SUPPORT_DEFAULT_FONT) extern void LoadDefaultFont(void); @@ -221,7 +218,7 @@ extern void LoadDefaultFont(void) defaultFont.chars[i].value = 32 + i; // First char is 32 defaultFont.chars[i].rec.x = (float)currentPosX; - defaultFont.chars[i].rec.y = (float)charsDivisor + currentLine*(charsHeight + charsDivisor); + defaultFont.chars[i].rec.y = (float)(charsDivisor + currentLine*(charsHeight + charsDivisor)); defaultFont.chars[i].rec.width = (float)charsWidth[i]; defaultFont.chars[i].rec.height = (float)charsHeight; @@ -234,7 +231,7 @@ extern void LoadDefaultFont(void) testPosX = currentPosX; defaultFont.chars[i].rec.x = (float)charsDivisor; - defaultFont.chars[i].rec.y = (float)charsDivisor + currentLine*(charsHeight + charsDivisor); + defaultFont.chars[i].rec.y = (float)(charsDivisor + currentLine*(charsHeight + charsDivisor)); } else currentPosX = testPosX; @@ -244,7 +241,7 @@ extern void LoadDefaultFont(void) defaultFont.chars[i].advanceX = 0; } - defaultFont.baseSize = (int) defaultFont.chars[0].rec.height; + defaultFont.baseSize = (int)defaultFont.chars[0].rec.height; TraceLog(LOG_INFO, "[TEX ID %i] Default font loaded successfully", defaultFont.texture.id); } @@ -283,7 +280,7 @@ Font LoadFont(const char *fileName) { font.baseSize = DEFAULT_TTF_FONTSIZE; font.charsCount = DEFAULT_TTF_NUMCHARS; - font.chars = LoadFontData(fileName, font.baseSize, NULL, font.charsCount, false); + font.chars = LoadFontData(fileName, font.baseSize, NULL, font.charsCount, FONT_DEFAULT); Image atlas = GenImageFontAtlas(font.chars, font.charsCount, font.baseSize, 4, 0); font.texture = LoadTextureFromImage(atlas); UnloadImage(atlas); @@ -319,8 +316,8 @@ Font LoadFontEx(const char *fileName, int fontSize, int charsCount, int *fontCha font.baseSize = fontSize; font.charsCount = (charsCount > 0) ? charsCount : 95; - font.chars = LoadFontData(fileName, font.baseSize, fontChars, font.charsCount, false); - Image atlas = GenImageFontAtlas(font.chars, font.charsCount, font.baseSize, 0, 0); + font.chars = LoadFontData(fileName, font.baseSize, fontChars, font.charsCount, FONT_DEFAULT); + Image atlas = GenImageFontAtlas(font.chars, font.charsCount, font.baseSize, 2, 0); font.texture = LoadTextureFromImage(atlas); UnloadImage(atlas); @@ -329,7 +326,7 @@ Font LoadFontEx(const char *fileName, int fontSize, int charsCount, int *fontCha // Load font data for further use // NOTE: Requires TTF font and can generate SDF data -CharInfo *LoadFontData(const char *fileName, int fontSize, int *fontChars, int charsCount, bool sdf) +CharInfo *LoadFontData(const char *fileName, int fontSize, int *fontChars, int charsCount, int type) { // NOTE: Using some SDF generation default values, // trades off precision with ability to handle *smaller* sizes @@ -337,6 +334,8 @@ CharInfo *LoadFontData(const char *fileName, int fontSize, int *fontChars, int c #define SDF_ON_EDGE_VALUE 128 #define SDF_PIXEL_DIST_SCALE 64.0f + #define BITMAP_ALPHA_THRESHOLD 80 + // In case no chars count provided, default to 95 charsCount = (charsCount > 0) ? charsCount : 95; @@ -367,8 +366,6 @@ CharInfo *LoadFontData(const char *fileName, int fontSize, int *fontChars, int c // NOTE: ascent is equivalent to font baseline int ascent, descent, lineGap; stbtt_GetFontVMetrics(&fontInfo, &ascent, &descent, &lineGap); - ascent *= (int) scaleFactor; - descent *= (int) scaleFactor; // Fill fontChars in case not provided externally // NOTE: By default we fill charsCount consecutevely, starting at 32 (Space) @@ -392,22 +389,33 @@ CharInfo *LoadFontData(const char *fileName, int fontSize, int *fontChars, int c // stbtt_GetCodepointBitmapBox() -- how big the bitmap must be // stbtt_MakeCodepointBitmap() -- renders into bitmap you provide - if (!sdf) chars[i].data = stbtt_GetCodepointBitmap(&fontInfo, scaleFactor, scaleFactor, ch, &chw, &chh, &chars[i].offsetX, &chars[i].offsetY); + if (type != FONT_SDF) chars[i].data = stbtt_GetCodepointBitmap(&fontInfo, scaleFactor, scaleFactor, ch, &chw, &chh, &chars[i].offsetX, &chars[i].offsetY); else if (ch != 32) chars[i].data = stbtt_GetCodepointSDF(&fontInfo, scaleFactor, ch, SDF_CHAR_PADDING, SDF_ON_EDGE_VALUE, SDF_PIXEL_DIST_SCALE, &chw, &chh, &chars[i].offsetX, &chars[i].offsetY); + if (type == FONT_BITMAP) + { + // Aliased bitmap (black & white) font generation, avoiding anti-aliasing + // NOTE: For optimum results, bitmap font should be generated at base pixel size + for (int p = 0; p < chw*chh; p++) + { + if (chars[i].data[p] < BITMAP_ALPHA_THRESHOLD) chars[i].data[p] = 0; + else chars[i].data[p] = 255; + } + } + chars[i].rec.width = (float)chw; chars[i].rec.height = (float)chh; - chars[i].offsetY += ascent; + chars[i].offsetY += (int)((float)ascent*scaleFactor); // Get bounding box for character (may be offset to account for chars that dip above or below the line) int chX1, chY1, chX2, chY2; stbtt_GetCodepointBitmapBox(&fontInfo, ch, scaleFactor, scaleFactor, &chX1, &chY1, &chX2, &chY2); TraceLog(LOG_DEBUG, "Character box measures: %i, %i, %i, %i", chX1, chY1, chX2 - chX1, chY2 - chY1); - TraceLog(LOG_DEBUG, "Character offsetY: %i", ascent + chY1); + TraceLog(LOG_DEBUG, "Character offsetY: %i", (int)((float)ascent*scaleFactor) + chY1); stbtt_GetCodepointHMetrics(&fontInfo, ch, &chars[i].advanceX, NULL); - chars[i].advanceX *= (int) scaleFactor; + chars[i].advanceX *= scaleFactor; } free(fontBuffer); @@ -660,11 +668,11 @@ const char *SubText(const char *text, int position, int length) for (int c = 0 ; c < length ; c++) { - *(buffer+c) = *(text+position); + *(buffer + c) = *(text + position); text++; } - *(buffer+length) = '\0'; + *(buffer + length) = '\0'; return buffer; } @@ -887,7 +895,7 @@ static Font LoadImageFont(Image image, Color key, int firstChar) spriteFont.chars[i].advanceX = 0; } - spriteFont.baseSize = (int) spriteFont.chars[0].rec.height; + spriteFont.baseSize = (int)spriteFont.chars[0].rec.height; TraceLog(LOG_INFO, "Image file loaded correctly as Font"); diff --git a/src/textures.c b/src/textures.c index 700b4be9..a7b9d3e2 100644 --- a/src/textures.c +++ b/src/textures.c @@ -19,6 +19,9 @@ * Selecte desired fileformats to be supported for image data loading. Some of those formats are * supported by default, to remove support, just comment unrequired #define in this module * +* #define SUPPORT_IMAGE_EXPORT +* Support image export in multiple file formats +* * #define SUPPORT_IMAGE_MANIPULATION * Support multiple image editing functions to scale, adjust colors, flip, draw on images, crop... * If not defined only three image editing functions supported: ImageFormat(), ImageAlphaMask(), ImageToPOT() @@ -103,6 +106,11 @@ // NOTE: Used to read image data (multiple formats support) #endif +#if defined(SUPPORT_IMAGE_EXPORT) + #define STB_IMAGE_WRITE_IMPLEMENTATION + #include "external/stb_image_write.h" // Required for: stbi_write_*() +#endif + #if defined(SUPPORT_IMAGE_MANIPULATION) #define STB_IMAGE_RESIZE_IMPLEMENTATION #include "external/stb_image_resize.h" // Required for: stbir_resize_uint8() @@ -706,15 +714,32 @@ void UpdateTexture(Texture2D texture, const void *pixels) rlUpdateTexture(texture.id, texture.width, texture.height, texture.format, pixels); } -// Export image as a PNG file -void ExportImage(const char *fileName, Image image) +// Export image data to file +// NOTE: File format depends on fileName extension +void ExportImage(Image image, const char *fileName) { + int success = 0; + // NOTE: Getting Color array as RGBA unsigned char values unsigned char *imgData = (unsigned char *)GetImageData(image); - // NOTE: SavePNG() not supported by some platforms: PLATFORM_WEB, PLATFORM_ANDROID - SavePNG(fileName, imgData, image.width, image.height, 4); - + if (IsFileExtension(fileName, ".png")) success = stbi_write_png(fileName, image.width, image.height, 4, imgData, image.width*4); + else if (IsFileExtension(fileName, ".bmp")) success = stbi_write_bmp(fileName, image.width, image.height, 4, imgData); + else if (IsFileExtension(fileName, ".tga")) success = stbi_write_tga(fileName, image.width, image.height, 4, imgData); + else if (IsFileExtension(fileName, ".jpg")) success = stbi_write_jpg(fileName, image.width, image.height, 4, imgData, 80); // Between 1 and 100 + else if (IsFileExtension(fileName, ".raw")) + { + // Export raw pixel data + // NOTE: It's up to the user to track image parameters + FILE *rawFile = fopen(fileName, "wb"); + fwrite(image.data, GetPixelDataSize(image.width, image.height, image.format), 1, rawFile); + fclose(rawFile); + } + else if (IsFileExtension(fileName, ".h")) { } // TODO: Export pixel data as an array of bytes + + if (success != 0) TraceLog(LOG_INFO, "Image exported successfully: %s", fileName); + else TraceLog(LOG_WARNING, "Image could not be exported."); + free(imgData); } @@ -1555,7 +1580,9 @@ Image ImageTextEx(Font font, const char *text, float fontSize, float spacing, Co // Define ImageFont struct? or include Image spritefont in Font struct? Image imFont = GetTextureData(font.texture); - ImageColorTint(&imFont, tint); // Apply color tint to font + ImageFormat(&imFont, UNCOMPRESSED_R8G8B8A8); // Make sure image format could be properly colored! + + ImageColorTint(&imFont, tint); // Apply color tint to font // Create image to store text Image imText = GenImageColor((int)imSize.x, (int)imSize.y, BLANK); @@ -2827,7 +2854,11 @@ static Image LoadKTX(const char *fileName) // GL_COMPRESSED_RGBA8_ETC2_EAC 0x9278 // KTX file Header (64 bytes) - // https://www.khronos.org/opengles/sdk/tools/KTX/file_format_spec/ + // v1.1 - https://www.khronos.org/opengles/sdk/tools/KTX/file_format_spec/ + // v2.0 - http://github.khronos.org/KTX-Specification/ + + // TODO: Support KTX 2.2 specs! + typedef struct { char id[12]; // Identifier: "«KTX 11»\r\n\x1A\n" unsigned int endianness; // Little endian: 0x01 0x02 0x03 0x04 diff --git a/src/utils.c b/src/utils.c index f7c19afb..b2951040 100644 --- a/src/utils.c +++ b/src/utils.c @@ -4,21 +4,10 @@ * * CONFIGURATION: * -* #define SUPPORT_SAVE_PNG (defined by default) -* Support saving image data as PNG fileformat -* NOTE: Requires stb_image_write library -* -* #define SUPPORT_SAVE_BMP -* Support saving image data as BMP fileformat -* NOTE: Requires stb_image_write library -* * #define SUPPORT_TRACELOG * Show TraceLog() output messages * NOTE: By default LOG_DEBUG traces not shown * -* DEPENDENCIES: -* stb_image_write - BMP/PNG writting functions -* * * LICENSE: zlib/libpng * @@ -62,12 +51,6 @@ FILE *funopen(const void *cookie, int (*readfn)(void *, char *, int), int (*writefn)(void *, const char *, int), fpos_t (*seekfn)(void *, fpos_t, int), int (*closefn)(void *)); - -#if defined(PLATFORM_DESKTOP) || defined(PLATFORM_RPI) - #define STB_IMAGE_WRITE_IMPLEMENTATION - #include "external/stb_image_write.h" // Required for: stbi_write_bmp(), stbi_write_png() -#endif - //---------------------------------------------------------------------------------- // Global Variables Definition //---------------------------------------------------------------------------------- @@ -160,24 +143,6 @@ void TraceLog(int msgType, const char *text, ...) #endif // SUPPORT_TRACELOG } -// Creates a BMP image file from an array of pixel data -void SaveBMP(const char *fileName, unsigned char *imgData, int width, int height, int compSize) -{ -#if defined(SUPPORT_SAVE_BMP) && (defined(PLATFORM_DESKTOP) || defined(PLATFORM_RPI)) - stbi_write_bmp(fileName, width, height, compSize, imgData); - TraceLog(LOG_INFO, "BMP Image saved: %s", fileName); -#endif -} - -// Creates a PNG image file from an array of pixel data -void SavePNG(const char *fileName, unsigned char *imgData, int width, int height, int compSize) -{ -#if defined(SUPPORT_SAVE_PNG) && (defined(PLATFORM_DESKTOP) || defined(PLATFORM_RPI)) - stbi_write_png(fileName, width, height, compSize, imgData, width*compSize); - TraceLog(LOG_INFO, "PNG Image saved: %s", fileName); -#endif -} - // Keep track of memory allocated // NOTE: mallocType defines the type of data allocated /* diff --git a/src/utils.h b/src/utils.h index dfdb0c2a..08b33962 100644 --- a/src/utils.h +++ b/src/utils.h @@ -32,10 +32,6 @@ #include // Required for: AAssetManager #endif -#ifndef SUPPORT_SAVE_PNG -#define SUPPORT_SAVE_PNG 1 -#endif - //---------------------------------------------------------------------------------- // Some basic Defines //---------------------------------------------------------------------------------- @@ -58,13 +54,6 @@ extern "C" { // Prevents name mangling of functions //---------------------------------------------------------------------------------- // Module Functions Declaration //---------------------------------------------------------------------------------- -#if defined(SUPPORT_SAVE_BMP) -void SaveBMP(const char *fileName, unsigned char *imgData, int width, int height, int compSize); -#endif -#if defined(SUPPORT_SAVE_PNG) -void SavePNG(const char *fileName, unsigned char *imgData, int width, int height, int compSize); -#endif - #if defined(PLATFORM_ANDROID) void InitAssetManager(AAssetManager *manager); // Initialize asset manager from android app FILE *android_fopen(const char *fileName, const char *mode); // Replacement for fopen()