2014-09-17 00:51:31 +04:00
/**********************************************************************************************
2013-11-19 02:38:44 +04:00
*
2013-11-23 16:30:54 +04:00
* raylib . audio
2013-11-19 02:38:44 +04:00
*
2013-11-23 16:30:54 +04:00
* Basic functions to manage Audio : InitAudioDevice , LoadAudioFiles , PlayAudioFiles
2014-09-03 18:51:28 +04:00
*
* Uses external lib :
2014-09-17 00:51:31 +04:00
* OpenAL Soft - Audio device management lib ( http : //kcat.strangesoft.net/openal.html)
* stb_vorbis - Ogg audio files loading ( http : //www.nothings.org/stb_vorbis/)
2014-09-03 18:51:28 +04:00
*
2015-07-29 22:41:19 +03:00
* Copyright ( c ) 2014 Ramon Santamaria ( @ raysan5 )
2014-09-03 18:51:28 +04:00
*
* This software is provided " as-is " , without any express or implied warranty . In no event
2013-11-23 16:30:54 +04:00
* will the authors be held liable for any damages arising from the use of this software .
2013-11-19 02:38:44 +04:00
*
2014-09-03 18:51:28 +04:00
* Permission is granted to anyone to use this software for any purpose , including commercial
2013-11-23 16:30:54 +04:00
* applications , and to alter it and redistribute it freely , subject to the following restrictions :
2013-11-19 02:38:44 +04:00
*
2014-09-03 18:51:28 +04:00
* 1. The origin of this software must not be misrepresented ; you must not claim that you
* wrote the original software . If you use this software in a product , an acknowledgment
2013-11-23 16:30:54 +04:00
* in the product documentation would be appreciated but is not required .
2013-11-19 02:38:44 +04:00
*
2013-11-23 16:30:54 +04:00
* 2. Altered source versions must be plainly marked as such , and must not be misrepresented
* as being the original software .
2013-11-19 02:38:44 +04:00
*
2013-11-23 16:30:54 +04:00
* 3. This notice may not be removed or altered from any source distribution .
2013-11-19 02:38:44 +04:00
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
2015-07-29 22:41:19 +03:00
//#define AUDIO_STANDALONE // NOTE: To use the audio module as standalone lib, just uncomment this line
# if defined(AUDIO_STANDALONE)
# include "audio.h"
# else
# include "raylib.h"
# endif
2013-11-19 02:38:44 +04:00
2014-09-17 00:51:31 +04:00
# include "AL/al.h" // OpenAL basic header
# include "AL/alc.h" // OpenAL context header (like OpenGL, OpenAL requires a context to work)
2016-05-02 04:53:40 +03:00
# include "AL/alext.h" // extensions for other format types
2013-11-19 02:38:44 +04:00
2014-09-17 00:51:31 +04:00
# include <stdlib.h> // Declares malloc() and free() for memory management
# include <string.h> // Required for strcmp()
# include <stdio.h> // Used for .WAV loading
2013-11-19 02:38:44 +04:00
2015-07-31 13:31:39 +03:00
# if defined(AUDIO_STANDALONE)
# include <stdarg.h> // Used for functions with variable number of parameters (TraceLog())
# else
# include "utils.h" // rRES data decompression utility function
2014-09-17 00:51:31 +04:00
// NOTE: Includes Android fopen function map
2015-07-31 13:31:39 +03:00
# endif
2014-01-23 15:36:18 +04:00
2015-05-21 15:13:51 +03:00
//#define STB_VORBIS_HEADER_ONLY
2016-04-26 04:40:19 +03:00
# include "stb_vorbis.h" // OGG loading functions
# define JAR_XM_IMPLEMENTATION
# include "jar_xm.h" // For playing .xm files
2013-11-19 02:38:44 +04:00
//----------------------------------------------------------------------------------
// Defines and Macros
//----------------------------------------------------------------------------------
2016-05-03 07:59:55 +03:00
# define MAX_STREAM_BUFFERS 2
# define MAX_AUDIO_CONTEXTS 4 // Number of open AL sources
2016-05-11 10:37:10 +03:00
# define MAX_MUSIC_STREAMS 2
2014-12-31 20:03:32 +03:00
2016-04-17 15:48:20 +03:00
# if defined(PLATFORM_RPI) || defined(PLATFORM_ANDROID)
// NOTE: On RPI and Android should be lower to avoid frame-stalls
2016-05-03 07:59:55 +03:00
# define MUSIC_BUFFER_SIZE_SHORT 4096*2 // PCM data buffer (short) - 16Kb (RPI)
# define MUSIC_BUFFER_SIZE_FLOAT 4096 // PCM data buffer (float) - 16Kb (RPI)
2015-02-02 02:53:49 +03:00
# else
2014-12-31 20:03:32 +03:00
// NOTE: On HTML5 (emscripten) this is allocated on heap, by default it's only 16MB!...just take care...
2016-05-03 07:59:55 +03:00
# define MUSIC_BUFFER_SIZE_SHORT 4096*8 // PCM data buffer (short) - 64Kb
# define MUSIC_BUFFER_SIZE_FLOAT 4096*4 // PCM data buffer (float) - 64Kb
2014-12-31 20:03:32 +03:00
# endif
2013-11-19 02:38:44 +04:00
//----------------------------------------------------------------------------------
// Types and Structures Definition
//----------------------------------------------------------------------------------
2014-04-19 18:36:49 +04:00
2016-04-30 09:43:21 +03:00
// Audio Context, used to create custom audio streams that are not bound to a sound file. There can be
// no more than 4 concurrent audio contexts in use. This is due to each active context being tied to
2016-05-02 04:53:40 +03:00
// a dedicated mix channel. All audio is 32bit floating point in stereo.
2016-04-30 09:43:21 +03:00
typedef struct AudioContext_t {
2016-05-03 07:59:55 +03:00
unsigned short sampleRate ; // default is 48000
unsigned char channels ; // 1=mono,2=stereo
unsigned char mixChannel ; // 0-3 or mixA-mixD, each mix channel can receive up to one dedicated audio stream
bool floatingPoint ; // if false then the short datatype is used instead
2016-05-14 10:25:40 +03:00
bool playing ; // false if paused
2016-05-03 07:59:55 +03:00
ALenum alFormat ; // openAL format specifier
ALuint alSource ; // openAL source
ALuint alBuffer [ MAX_STREAM_BUFFERS ] ; // openAL sample buffer
2016-04-30 09:43:21 +03:00
} AudioContext_t ;
2016-05-13 07:14:02 +03:00
// Music type (file streaming from memory)
// NOTE: Anything longer than ~10 seconds should be streamed...
typedef struct Music {
stb_vorbis * stream ;
jar_xm_context_t * chipctx ; // Stores jar_xm context
AudioContext_t * ctx ; // audio context
int totalSamplesLeft ;
float totalLengthSeconds ;
bool loop ;
bool chipTune ; // True if chiptune is loaded
} Music ;
2015-07-31 13:31:39 +03:00
# if defined(AUDIO_STANDALONE)
typedef enum { INFO = 0 , ERROR , WARNING , DEBUG , OTHER } TraceLogType ;
# endif
2013-11-19 02:38:44 +04:00
//----------------------------------------------------------------------------------
// Global Variables Definition
//----------------------------------------------------------------------------------
2016-05-03 12:52:45 +03:00
static AudioContext_t * mixChannelsActive_g [ MAX_AUDIO_CONTEXTS ] ; // What mix channels are currently active
2016-05-15 01:26:17 +03:00
static bool musicEnabled_g = false ;
2016-05-12 04:14:59 +03:00
static Music currentMusic [ MAX_MUSIC_STREAMS ] ; // Current music loaded, up to two can play at the same time
2016-05-11 10:37:10 +03:00
2013-11-19 02:38:44 +04:00
//----------------------------------------------------------------------------------
// Module specific Functions Declaration
//----------------------------------------------------------------------------------
2016-05-13 02:02:23 +03:00
static Wave LoadWAV ( const char * fileName ) ; // Load WAV file
static Wave LoadOGG ( char * fileName ) ; // Load OGG file
static void UnloadWave ( Wave wave ) ; // Unload wave data
2014-04-09 22:25:26 +04:00
2016-05-13 02:02:23 +03:00
static bool BufferMusicStream ( int index ) ; // Fill music buffers with data
static void EmptyMusicStream ( int index ) ; // Empty music buffers
2015-07-31 13:31:39 +03:00
2016-05-03 07:59:55 +03:00
static unsigned short FillAlBufferWithSilence ( AudioContext_t * context , ALuint buffer ) ; // fill buffer with zeros, returns number processed
2016-05-15 01:26:17 +03:00
static void ResampleShortToFloat ( short * shorts , float * floats , unsigned short len ) ; // pass two arrays of the same legnth in
static void ResampleByteToFloat ( char * chars , float * floats , unsigned short len ) ; // pass two arrays of same length in
static bool isMusicStreamReady ( int index ) ; // Checks if music buffer is ready to be refilled
2016-05-02 04:53:40 +03:00
2015-07-31 13:31:39 +03:00
# if defined(AUDIO_STANDALONE)
const char * GetExtension ( const char * fileName ) ; // Get the extension for a filename
void TraceLog ( int msgType , const char * text , . . . ) ; // Outputs a trace log message (INFO, ERROR, WARNING)
# endif
2013-11-19 02:38:44 +04:00
//----------------------------------------------------------------------------------
2014-04-19 18:36:49 +04:00
// Module Functions Definition - Audio Device initialization and Closing
2013-11-19 02:38:44 +04:00
//----------------------------------------------------------------------------------
// Initialize audio device and context
2014-09-03 19:06:10 +04:00
void InitAudioDevice ( void )
2013-11-19 02:38:44 +04:00
{
2013-11-23 16:30:54 +04:00
// Open and initialize a device with default settings
ALCdevice * device = alcOpenDevice ( NULL ) ;
2014-09-03 18:51:28 +04:00
2014-12-31 20:03:32 +03:00
if ( ! device ) TraceLog ( ERROR , " Audio device could not be opened " ) ;
2013-11-23 16:30:54 +04:00
ALCcontext * context = alcCreateContext ( device , NULL ) ;
2014-09-03 18:51:28 +04:00
2013-11-23 16:30:54 +04:00
if ( context = = NULL | | alcMakeContextCurrent ( context ) = = ALC_FALSE )
{
2014-04-09 22:25:26 +04:00
if ( context ! = NULL ) alcDestroyContext ( context ) ;
2014-09-03 18:51:28 +04:00
2013-11-23 16:30:54 +04:00
alcCloseDevice ( device ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
TraceLog ( ERROR , " Could not setup audio context " ) ;
2013-11-23 16:30:54 +04:00
}
2014-09-17 00:51:31 +04:00
TraceLog ( INFO , " Audio device and context initialized successfully: %s " , alcGetString ( device , ALC_DEVICE_SPECIFIER ) ) ;
2014-09-03 18:51:28 +04:00
2013-11-23 16:30:54 +04:00
// Listener definition (just for 2D)
alListener3f ( AL_POSITION , 0 , 0 , 0 ) ;
alListener3f ( AL_VELOCITY , 0 , 0 , 0 ) ;
alListener3f ( AL_ORIENTATION , 0 , 0 , - 1 ) ;
2013-11-19 02:38:44 +04:00
}
2016-05-14 10:25:40 +03:00
// Close the audio device for all contexts
2014-09-03 19:06:10 +04:00
void CloseAudioDevice ( void )
2013-11-19 02:38:44 +04:00
{
2016-05-13 07:14:02 +03:00
for ( int index = 0 ; index < MAX_MUSIC_STREAMS ; index + + )
{
if ( currentMusic [ index ] . ctx ) StopMusicStream ( index ) ; // Stop music streaming and close current stream
}
2016-05-14 10:25:40 +03:00
2014-04-19 18:36:49 +04:00
2013-11-23 16:30:54 +04:00
ALCdevice * device ;
ALCcontext * context = alcGetCurrentContext ( ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
if ( context = = NULL ) TraceLog ( WARNING , " Could not get current audio context for closing " ) ;
2013-11-19 02:38:44 +04:00
2013-11-23 16:30:54 +04:00
device = alcGetContextsDevice ( context ) ;
2013-11-19 02:38:44 +04:00
2013-11-23 16:30:54 +04:00
alcMakeContextCurrent ( NULL ) ;
alcDestroyContext ( context ) ;
alcCloseDevice ( device ) ;
2013-11-19 02:38:44 +04:00
}
2016-04-30 09:00:12 +03:00
// True if call to InitAudioDevice() was successful and CloseAudioDevice() has not been called yet
2016-05-01 02:05:43 +03:00
bool IsAudioDeviceReady ( void )
2016-04-30 09:00:12 +03:00
{
ALCcontext * context = alcGetCurrentContext ( ) ;
if ( context = = NULL ) return false ;
else {
ALCdevice * device = alcGetContextsDevice ( context ) ;
if ( device = = NULL ) return false ;
else return true ;
}
}
//----------------------------------------------------------------------------------
// Module Functions Definition - Custom audio output
//----------------------------------------------------------------------------------
// Audio contexts are for outputing custom audio waveforms, This will shut down any other sound sources currently playing
2016-05-01 02:05:43 +03:00
// The mixChannel is what mix channel you want to operate on, 0-3 are the ones available. Each mix channel can only be used one at a time.
2016-05-03 07:59:55 +03:00
// exmple usage is InitAudioContext(48000, 0, 2, true); // mixchannel 1, 48khz, stereo, floating point
AudioContext InitAudioContext ( unsigned short sampleRate , unsigned char mixChannel , unsigned char channels , bool floatingPoint )
2016-04-30 09:00:12 +03:00
{
2016-05-03 07:59:55 +03:00
if ( mixChannel > = MAX_AUDIO_CONTEXTS ) return NULL ;
2016-05-01 02:05:43 +03:00
if ( ! IsAudioDeviceReady ( ) ) InitAudioDevice ( ) ;
2016-04-30 09:00:12 +03:00
if ( ! mixChannelsActive_g [ mixChannel ] ) {
2016-05-10 11:54:20 +03:00
AudioContext_t * ac = ( AudioContext_t * ) malloc ( sizeof ( AudioContext_t ) ) ;
2016-04-30 09:00:12 +03:00
ac - > sampleRate = sampleRate ;
2016-05-02 11:24:24 +03:00
ac - > channels = channels ;
2016-04-30 09:00:12 +03:00
ac - > mixChannel = mixChannel ;
2016-05-03 07:59:55 +03:00
ac - > floatingPoint = floatingPoint ;
2016-04-30 09:43:21 +03:00
mixChannelsActive_g [ mixChannel ] = ac ;
2016-05-01 01:41:46 +03:00
// setup openAL format
2016-05-02 11:24:24 +03:00
if ( channels = = 1 )
2016-05-03 07:59:55 +03:00
{
if ( floatingPoint )
ac - > alFormat = AL_FORMAT_MONO_FLOAT32 ;
else
ac - > alFormat = AL_FORMAT_MONO16 ;
}
else if ( channels = = 2 )
{
if ( floatingPoint )
ac - > alFormat = AL_FORMAT_STEREO_FLOAT32 ;
else
ac - > alFormat = AL_FORMAT_STEREO16 ;
}
2016-05-01 01:41:46 +03:00
// Create an audio source
alGenSources ( 1 , & ac - > alSource ) ;
alSourcef ( ac - > alSource , AL_PITCH , 1 ) ;
alSourcef ( ac - > alSource , AL_GAIN , 1 ) ;
alSource3f ( ac - > alSource , AL_POSITION , 0 , 0 , 0 ) ;
alSource3f ( ac - > alSource , AL_VELOCITY , 0 , 0 , 0 ) ;
// Create Buffer
2016-05-03 07:59:55 +03:00
alGenBuffers ( MAX_STREAM_BUFFERS , ac - > alBuffer ) ;
2016-05-02 04:53:40 +03:00
//fill buffers
2016-05-03 07:59:55 +03:00
int x ;
for ( x = 0 ; x < MAX_STREAM_BUFFERS ; x + + )
FillAlBufferWithSilence ( ac , ac - > alBuffer [ x ] ) ;
alSourceQueueBuffers ( ac - > alSource , MAX_STREAM_BUFFERS , ac - > alBuffer ) ;
2016-05-02 04:53:40 +03:00
alSourcePlay ( ac - > alSource ) ;
2016-05-10 11:54:20 +03:00
ac - > playing = true ;
2016-05-01 01:41:46 +03:00
2016-04-30 09:00:12 +03:00
return ac ;
}
return NULL ;
}
// Frees buffer in audio context
2016-04-30 09:43:21 +03:00
void CloseAudioContext ( AudioContext ctx )
2016-04-30 09:00:12 +03:00
{
2016-04-30 09:43:21 +03:00
AudioContext_t * context = ( AudioContext_t * ) ctx ;
if ( context ) {
2016-05-02 04:53:40 +03:00
alSourceStop ( context - > alSource ) ;
2016-05-10 11:54:20 +03:00
context - > playing = false ;
2016-05-02 04:53:40 +03:00
//flush out all queued buffers
ALuint buffer = 0 ;
int queued = 0 ;
alGetSourcei ( context - > alSource , AL_BUFFERS_QUEUED , & queued ) ;
while ( queued > 0 )
{
alSourceUnqueueBuffers ( context - > alSource , 1 , & buffer ) ;
queued - - ;
}
2016-05-02 09:07:02 +03:00
//delete source and buffers
2016-05-01 01:41:46 +03:00
alDeleteSources ( 1 , & context - > alSource ) ;
2016-05-03 07:59:55 +03:00
alDeleteBuffers ( MAX_STREAM_BUFFERS , context - > alBuffer ) ;
2016-04-30 09:43:21 +03:00
mixChannelsActive_g [ context - > mixChannel ] = NULL ;
free ( context ) ;
2016-05-01 01:41:46 +03:00
ctx = NULL ;
2016-04-30 09:00:12 +03:00
}
}
2016-05-03 07:59:55 +03:00
// Pushes more audio data into context mix channel, if none are ever pushed then zeros are fed in.
2016-05-03 12:52:45 +03:00
// Call "UpdateAudioContext(ctx, NULL, 0)" if you want to pause the audio.
2016-05-03 07:59:55 +03:00
// @Returns number of samples that where processed.
unsigned short UpdateAudioContext ( AudioContext ctx , void * data , unsigned short numberElements )
2016-04-30 09:43:21 +03:00
{
2016-05-01 01:41:46 +03:00
AudioContext_t * context = ( AudioContext_t * ) ctx ;
2016-05-03 00:37:00 +03:00
2016-05-10 11:54:20 +03:00
if ( ! context | | ( context - > channels = = 2 & & numberElements % 2 ! = 0 ) ) return 0 ; // when there is two channels there must be an even number of samples
2016-05-03 12:52:45 +03:00
2016-05-10 11:54:20 +03:00
if ( ! data | | ! numberElements )
{ // pauses audio until data is given
alSourcePause ( context - > alSource ) ;
context - > playing = false ;
return 0 ;
}
else
{ // restart audio otherwise
2016-05-03 12:52:45 +03:00
ALint state ;
alGetSourcei ( context - > alSource , AL_SOURCE_STATE , & state ) ;
2016-05-10 11:54:20 +03:00
if ( state ! = AL_PLAYING ) {
alSourcePlay ( context - > alSource ) ;
context - > playing = true ;
}
2016-05-03 12:52:45 +03:00
}
2016-05-10 11:54:20 +03:00
if ( context & & context - > playing & & mixChannelsActive_g [ context - > mixChannel ] = = context )
2016-05-01 01:41:46 +03:00
{
2016-05-02 04:53:40 +03:00
ALint processed = 0 ;
ALuint buffer = 0 ;
2016-05-03 12:52:45 +03:00
unsigned short numberProcessed = 0 ;
unsigned short numberRemaining = numberElements ;
2016-05-02 09:07:02 +03:00
2016-05-03 12:52:45 +03:00
alGetSourcei ( context - > alSource , AL_BUFFERS_PROCESSED , & processed ) ; // Get the number of already processed buffers (if any)
2016-05-10 11:54:20 +03:00
if ( ! processed ) return 0 ; // nothing to process, queue is still full
2016-05-02 09:07:02 +03:00
2016-05-10 11:54:20 +03:00
while ( processed > 0 )
2016-05-03 07:59:55 +03:00
{
2016-05-10 11:54:20 +03:00
if ( context - > floatingPoint ) // process float buffers
2016-05-03 00:37:00 +03:00
{
2016-05-10 11:54:20 +03:00
float * ptr = ( float * ) data ;
alSourceUnqueueBuffers ( context - > alSource , 1 , & buffer ) ;
if ( numberRemaining > = MUSIC_BUFFER_SIZE_FLOAT )
2016-05-03 00:37:00 +03:00
{
2016-05-03 07:59:55 +03:00
alBufferData ( buffer , context - > alFormat , & ptr [ numberProcessed ] , MUSIC_BUFFER_SIZE_FLOAT * sizeof ( float ) , context - > sampleRate ) ;
numberProcessed + = MUSIC_BUFFER_SIZE_FLOAT ;
numberRemaining - = MUSIC_BUFFER_SIZE_FLOAT ;
2016-05-03 00:37:00 +03:00
}
2016-05-10 11:54:20 +03:00
else
{
alBufferData ( buffer , context - > alFormat , & ptr [ numberProcessed ] , numberRemaining * sizeof ( float ) , context - > sampleRate ) ;
numberProcessed + = numberRemaining ;
numberRemaining = 0 ;
}
alSourceQueueBuffers ( context - > alSource , 1 , & buffer ) ;
processed - - ;
}
else if ( ! context - > floatingPoint ) // process short buffers
{
short * ptr = ( short * ) data ;
alSourceUnqueueBuffers ( context - > alSource , 1 , & buffer ) ;
if ( numberRemaining > = MUSIC_BUFFER_SIZE_SHORT )
2016-05-03 00:37:00 +03:00
{
2016-05-10 11:54:20 +03:00
alBufferData ( buffer , context - > alFormat , & ptr [ numberProcessed ] , MUSIC_BUFFER_SIZE_FLOAT * sizeof ( short ) , context - > sampleRate ) ;
2016-05-03 07:59:55 +03:00
numberProcessed + = MUSIC_BUFFER_SIZE_SHORT ;
numberRemaining - = MUSIC_BUFFER_SIZE_SHORT ;
2016-05-03 00:37:00 +03:00
}
2016-05-10 11:54:20 +03:00
else
{
alBufferData ( buffer , context - > alFormat , & ptr [ numberProcessed ] , numberRemaining * sizeof ( short ) , context - > sampleRate ) ;
numberProcessed + = numberRemaining ;
numberRemaining = 0 ;
}
alSourceQueueBuffers ( context - > alSource , 1 , & buffer ) ;
2016-05-03 00:37:00 +03:00
processed - - ;
2016-05-02 04:53:40 +03:00
}
2016-05-10 11:54:20 +03:00
else
break ;
2016-05-03 07:59:55 +03:00
}
2016-05-10 11:54:20 +03:00
return numberProcessed ;
2016-05-01 01:41:46 +03:00
}
2016-05-10 11:54:20 +03:00
return 0 ;
2016-04-30 09:43:21 +03:00
}
2016-05-03 07:59:55 +03:00
// fill buffer with zeros, returns number processed
static unsigned short FillAlBufferWithSilence ( AudioContext_t * context , ALuint buffer )
2016-05-02 04:53:40 +03:00
{
2016-05-03 07:59:55 +03:00
if ( context - > floatingPoint ) {
float pcm [ MUSIC_BUFFER_SIZE_FLOAT ] = { 0.f } ;
alBufferData ( buffer , context - > alFormat , pcm , MUSIC_BUFFER_SIZE_FLOAT * sizeof ( float ) , context - > sampleRate ) ;
return MUSIC_BUFFER_SIZE_FLOAT ;
}
else
{
short pcm [ MUSIC_BUFFER_SIZE_SHORT ] = { 0 } ;
alBufferData ( buffer , context - > alFormat , pcm , MUSIC_BUFFER_SIZE_SHORT * sizeof ( short ) , context - > sampleRate ) ;
return MUSIC_BUFFER_SIZE_SHORT ;
}
2016-05-02 04:53:40 +03:00
}
2016-05-02 11:24:24 +03:00
// example usage:
// short sh[3] = {1,2,3};float fl[3];
// ResampleShortToFloat(sh,fl,3);
static void ResampleShortToFloat ( short * shorts , float * floats , unsigned short len )
{
int x ;
for ( x = 0 ; x < len ; x + + )
{
if ( shorts [ x ] < 0 )
floats [ x ] = ( float ) shorts [ x ] / 32766.f ;
else
floats [ x ] = ( float ) shorts [ x ] / 32767.f ;
}
}
// example usage:
// char ch[3] = {1,2,3};float fl[3];
2016-05-03 00:37:00 +03:00
// ResampleByteToFloat(ch,fl,3);
2016-05-02 11:24:24 +03:00
static void ResampleByteToFloat ( char * chars , float * floats , unsigned short len )
{
int x ;
for ( x = 0 ; x < len ; x + + )
{
if ( chars [ x ] < 0 )
floats [ x ] = ( float ) chars [ x ] / 127.f ;
else
floats [ x ] = ( float ) chars [ x ] / 128.f ;
}
}
2016-04-30 09:00:12 +03:00
2014-04-19 18:36:49 +04:00
//----------------------------------------------------------------------------------
// Module Functions Definition - Sounds loading and playing (.WAV)
//----------------------------------------------------------------------------------
2013-11-19 02:38:44 +04:00
// Load sound to memory
Sound LoadSound ( char * fileName )
{
2016-01-23 15:22:13 +03:00
Sound sound = { 0 } ;
Wave wave = { 0 } ;
2014-09-17 00:51:31 +04:00
2013-11-23 16:30:54 +04:00
// NOTE: The entire file is loaded to memory to play it all at once (no-streaming)
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
// Audio file loading
// NOTE: Buffer space is allocated inside function, Wave must be freed
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
if ( strcmp ( GetExtension ( fileName ) , " wav " ) = = 0 ) wave = LoadWAV ( fileName ) ;
else if ( strcmp ( GetExtension ( fileName ) , " ogg " ) = = 0 ) wave = LoadOGG ( fileName ) ;
else TraceLog ( WARNING , " [%s] Sound extension not recognized, it can't be loaded " , fileName ) ;
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
if ( wave . data ! = NULL )
{
ALenum format = 0 ;
// The OpenAL format is worked out by looking at the number of channels and the bits per sample
2014-09-03 18:51:28 +04:00
if ( wave . channels = = 1 )
2014-04-19 18:36:49 +04:00
{
if ( wave . bitsPerSample = = 8 ) format = AL_FORMAT_MONO8 ;
else if ( wave . bitsPerSample = = 16 ) format = AL_FORMAT_MONO16 ;
2014-09-03 18:51:28 +04:00
}
else if ( wave . channels = = 2 )
2014-04-19 18:36:49 +04:00
{
if ( wave . bitsPerSample = = 8 ) format = AL_FORMAT_STEREO8 ;
else if ( wave . bitsPerSample = = 16 ) format = AL_FORMAT_STEREO16 ;
}
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
// Create an audio source
ALuint source ;
alGenSources ( 1 , & source ) ; // Generate pointer to audio source
2014-09-03 18:51:28 +04:00
alSourcef ( source , AL_PITCH , 1 ) ;
2014-04-19 18:36:49 +04:00
alSourcef ( source , AL_GAIN , 1 ) ;
alSource3f ( source , AL_POSITION , 0 , 0 , 0 ) ;
alSource3f ( source , AL_VELOCITY , 0 , 0 , 0 ) ;
alSourcei ( source , AL_LOOPING , AL_FALSE ) ;
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
// Convert loaded data to OpenAL buffer
//----------------------------------------
ALuint buffer ;
alGenBuffers ( 1 , & buffer ) ; // Generate pointer to buffer
2013-11-23 16:30:54 +04:00
2014-04-19 18:36:49 +04:00
// Upload sound data to buffer
alBufferData ( buffer , format , wave . data , wave . dataSize , wave . sampleRate ) ;
2013-11-23 16:30:54 +04:00
2014-04-19 18:36:49 +04:00
// Attach sound buffer to source
alSourcei ( source , AL_BUFFER , buffer ) ;
2015-02-02 02:53:49 +03:00
2014-12-31 20:03:32 +03:00
TraceLog ( INFO , " [%s] Sound file loaded successfully (SampleRate: %i, BitRate: %i, Channels: %i) " , fileName , wave . sampleRate , wave . bitsPerSample , wave . channels ) ;
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
// Unallocate WAV data
UnloadWave ( wave ) ;
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
sound . source = source ;
sound . buffer = buffer ;
}
2014-09-03 18:51:28 +04:00
2013-11-23 16:30:54 +04:00
return sound ;
2013-11-19 02:38:44 +04:00
}
2014-12-15 03:08:30 +03:00
// Load sound from wave data
Sound LoadSoundFromWave ( Wave wave )
{
2016-01-23 15:22:13 +03:00
Sound sound = { 0 } ;
2014-12-15 03:08:30 +03:00
if ( wave . data ! = NULL )
{
ALenum format = 0 ;
// The OpenAL format is worked out by looking at the number of channels and the bits per sample
if ( wave . channels = = 1 )
{
if ( wave . bitsPerSample = = 8 ) format = AL_FORMAT_MONO8 ;
else if ( wave . bitsPerSample = = 16 ) format = AL_FORMAT_MONO16 ;
}
else if ( wave . channels = = 2 )
{
if ( wave . bitsPerSample = = 8 ) format = AL_FORMAT_STEREO8 ;
else if ( wave . bitsPerSample = = 16 ) format = AL_FORMAT_STEREO16 ;
}
// Create an audio source
ALuint source ;
alGenSources ( 1 , & source ) ; // Generate pointer to audio source
alSourcef ( source , AL_PITCH , 1 ) ;
alSourcef ( source , AL_GAIN , 1 ) ;
alSource3f ( source , AL_POSITION , 0 , 0 , 0 ) ;
alSource3f ( source , AL_VELOCITY , 0 , 0 , 0 ) ;
alSourcei ( source , AL_LOOPING , AL_FALSE ) ;
// Convert loaded data to OpenAL buffer
//----------------------------------------
ALuint buffer ;
alGenBuffers ( 1 , & buffer ) ; // Generate pointer to buffer
// Upload sound data to buffer
alBufferData ( buffer , format , wave . data , wave . dataSize , wave . sampleRate ) ;
// Attach sound buffer to source
alSourcei ( source , AL_BUFFER , buffer ) ;
// Unallocate WAV data
UnloadWave ( wave ) ;
2014-12-31 20:03:32 +03:00
TraceLog ( INFO , " [Wave] Sound file loaded successfully (SampleRate: %i, BitRate: %i, Channels: %i) " , wave . sampleRate , wave . bitsPerSample , wave . channels ) ;
2014-12-15 03:08:30 +03:00
sound . source = source ;
sound . buffer = buffer ;
}
return sound ;
}
2014-01-23 15:36:18 +04:00
// Load sound to memory from rRES file (raylib Resource)
2016-02-12 14:22:56 +03:00
// TODO: Maybe rresName could be directly a char array with all the data?
2014-01-23 15:36:18 +04:00
Sound LoadSoundFromRES ( const char * rresName , int resId )
{
2016-01-23 15:22:13 +03:00
Sound sound = { 0 } ;
2014-01-23 15:36:18 +04:00
2015-07-31 13:31:39 +03:00
# if defined(AUDIO_STANDALONE)
TraceLog ( WARNING , " Sound loading from rRES resource file not supported on standalone mode " ) ;
# else
bool found = false ;
2014-01-23 15:36:18 +04:00
char id [ 4 ] ; // rRES file identifier
unsigned char version ; // rRES file version and subversion
char useless ; // rRES header reserved data
short numRes ;
2014-09-03 18:51:28 +04:00
2014-01-23 15:36:18 +04:00
ResInfoHeader infoHeader ;
2014-09-03 18:51:28 +04:00
2014-01-23 15:36:18 +04:00
FILE * rresFile = fopen ( rresName , " rb " ) ;
2015-02-02 02:53:49 +03:00
if ( rresFile = = NULL )
2014-12-31 20:03:32 +03:00
{
TraceLog ( WARNING , " [%s] rRES raylib resource file could not be opened " , rresName ) ;
}
2014-04-09 22:25:26 +04:00
else
2014-01-23 15:36:18 +04:00
{
2014-04-09 22:25:26 +04:00
// Read rres file (basic file check - id)
fread ( & id [ 0 ] , sizeof ( char ) , 1 , rresFile ) ;
fread ( & id [ 1 ] , sizeof ( char ) , 1 , rresFile ) ;
fread ( & id [ 2 ] , sizeof ( char ) , 1 , rresFile ) ;
fread ( & id [ 3 ] , sizeof ( char ) , 1 , rresFile ) ;
fread ( & version , sizeof ( char ) , 1 , rresFile ) ;
fread ( & useless , sizeof ( char ) , 1 , rresFile ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
if ( ( id [ 0 ] ! = ' r ' ) & & ( id [ 1 ] ! = ' R ' ) & & ( id [ 2 ] ! = ' E ' ) & & ( id [ 3 ] ! = ' S ' ) )
2014-01-23 15:36:18 +04:00
{
2014-04-09 22:25:26 +04:00
TraceLog ( WARNING , " [%s] This is not a valid raylib resource file " , rresName ) ;
}
else
{
// Read number of resources embedded
fread ( & numRes , sizeof ( short ) , 1 , rresFile ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
for ( int i = 0 ; i < numRes ; i + + )
{
fread ( & infoHeader , sizeof ( ResInfoHeader ) , 1 , rresFile ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
if ( infoHeader . id = = resId )
2014-01-23 15:36:18 +04:00
{
2014-04-09 22:25:26 +04:00
found = true ;
// Check data is of valid SOUND type
if ( infoHeader . type = = 1 ) // SOUND data type
{
// TODO: Check data compression type
// NOTE: We suppose compression type 2 (DEFLATE - default)
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Reading SOUND parameters
Wave wave ;
short sampleRate , bps ;
char channels , reserved ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
fread ( & sampleRate , sizeof ( short ) , 1 , rresFile ) ; // Sample rate (frequency)
fread ( & bps , sizeof ( short ) , 1 , rresFile ) ; // Bits per sample
fread ( & channels , 1 , 1 , rresFile ) ; // Channels (1 - mono, 2 - stereo)
fread ( & reserved , 1 , 1 , rresFile ) ; // <reserved>
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
wave . sampleRate = sampleRate ;
wave . dataSize = infoHeader . srcSize ;
wave . bitsPerSample = bps ;
wave . channels = ( short ) channels ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
unsigned char * data = malloc ( infoHeader . size ) ;
fread ( data , infoHeader . size , 1 , rresFile ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
wave . data = DecompressData ( data , infoHeader . size , infoHeader . srcSize ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
free ( data ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Convert wave to Sound (OpenAL)
ALenum format = 0 ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// The OpenAL format is worked out by looking at the number of channels and the bits per sample
2014-09-03 18:51:28 +04:00
if ( wave . channels = = 1 )
2014-04-09 22:25:26 +04:00
{
if ( wave . bitsPerSample = = 8 ) format = AL_FORMAT_MONO8 ;
else if ( wave . bitsPerSample = = 16 ) format = AL_FORMAT_MONO16 ;
2014-09-03 18:51:28 +04:00
}
else if ( wave . channels = = 2 )
2014-04-09 22:25:26 +04:00
{
if ( wave . bitsPerSample = = 8 ) format = AL_FORMAT_STEREO8 ;
else if ( wave . bitsPerSample = = 16 ) format = AL_FORMAT_STEREO16 ;
}
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Create an audio source
ALuint source ;
alGenSources ( 1 , & source ) ; // Generate pointer to audio source
2014-09-03 18:51:28 +04:00
alSourcef ( source , AL_PITCH , 1 ) ;
2014-04-09 22:25:26 +04:00
alSourcef ( source , AL_GAIN , 1 ) ;
alSource3f ( source , AL_POSITION , 0 , 0 , 0 ) ;
alSource3f ( source , AL_VELOCITY , 0 , 0 , 0 ) ;
alSourcei ( source , AL_LOOPING , AL_FALSE ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Convert loaded data to OpenAL buffer
//----------------------------------------
ALuint buffer ;
alGenBuffers ( 1 , & buffer ) ; // Generate pointer to buffer
// Upload sound data to buffer
alBufferData ( buffer , format , ( void * ) wave . data , wave . dataSize , wave . sampleRate ) ;
// Attach sound buffer to source
alSourcei ( source , AL_BUFFER , buffer ) ;
2015-02-02 02:53:49 +03:00
2014-12-31 20:03:32 +03:00
TraceLog ( INFO , " [%s] Sound loaded successfully from resource (SampleRate: %i, BitRate: %i, Channels: %i) " , rresName , wave . sampleRate , wave . bitsPerSample , wave . channels ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Unallocate WAV data
2014-04-19 18:36:49 +04:00
UnloadWave ( wave ) ;
2014-04-09 22:25:26 +04:00
sound . source = source ;
sound . buffer = buffer ;
}
else
{
TraceLog ( WARNING , " [%s] Required resource do not seem to be a valid SOUND resource " , rresName ) ;
}
2014-01-23 15:36:18 +04:00
}
2014-04-09 22:25:26 +04:00
else
{
// Depending on type, skip the right amount of parameters
switch ( infoHeader . type )
{
case 0 : fseek ( rresFile , 6 , SEEK_CUR ) ; break ; // IMAGE: Jump 6 bytes of parameters
case 1 : fseek ( rresFile , 6 , SEEK_CUR ) ; break ; // SOUND: Jump 6 bytes of parameters
case 2 : fseek ( rresFile , 5 , SEEK_CUR ) ; break ; // MODEL: Jump 5 bytes of parameters (TODO: Review)
case 3 : break ; // TEXT: No parameters
case 4 : break ; // RAW: No parameters
default : break ;
}
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Jump DATA to read next infoHeader
fseek ( rresFile , infoHeader . size , SEEK_CUR ) ;
2014-09-03 18:51:28 +04:00
}
2014-01-23 15:36:18 +04:00
}
}
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
fclose ( rresFile ) ;
2014-01-23 15:36:18 +04:00
}
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
if ( ! found ) TraceLog ( WARNING , " [%s] Required resource id [%i] could not be found in the raylib resource file " , rresName , resId ) ;
2015-07-31 13:31:39 +03:00
# endif
2014-01-23 15:36:18 +04:00
return sound ;
}
2013-11-19 02:38:44 +04:00
// Unload sound
void UnloadSound ( Sound sound )
{
2013-11-23 16:30:54 +04:00
alDeleteSources ( 1 , & sound . source ) ;
alDeleteBuffers ( 1 , & sound . buffer ) ;
2015-08-05 20:17:56 +03:00
TraceLog ( INFO , " Unloaded sound data " ) ;
2013-11-19 02:38:44 +04:00
}
// Play a sound
void PlaySound ( Sound sound )
{
2013-11-23 16:30:54 +04:00
alSourcePlay ( sound . source ) ; // Play the sound
2014-09-03 18:51:28 +04:00
2014-07-23 21:50:06 +04:00
//TraceLog(INFO, "Playing sound");
2013-11-23 16:30:54 +04:00
// Find the current position of the sound being played
// NOTE: Only work when the entire file is in a single buffer
//int byteOffset;
//alGetSourcei(sound.source, AL_BYTE_OFFSET, &byteOffset);
2014-01-23 15:36:18 +04:00
//
//int sampleRate;
//alGetBufferi(sound.buffer, AL_FREQUENCY, &sampleRate); // AL_CHANNELS, AL_BITS (bps)
2014-09-03 18:51:28 +04:00
2013-11-23 16:30:54 +04:00
//float seconds = (float)byteOffset / sampleRate; // Number of seconds since the beginning of the sound
2014-01-23 15:36:18 +04:00
//or
//float result;
//alGetSourcef(sound.source, AL_SEC_OFFSET, &result); // AL_SAMPLE_OFFSET
2013-11-19 02:38:44 +04:00
}
// Pause a sound
void PauseSound ( Sound sound )
{
2013-11-23 16:30:54 +04:00
alSourcePause ( sound . source ) ;
2013-11-19 02:38:44 +04:00
}
// Stop reproducing a sound
void StopSound ( Sound sound )
{
2013-11-23 16:30:54 +04:00
alSourceStop ( sound . source ) ;
2013-11-19 02:38:44 +04:00
}
2014-01-23 15:36:18 +04:00
// Check if a sound is playing
2016-05-03 19:04:21 +03:00
bool IsSoundPlaying ( Sound sound )
2014-01-23 15:36:18 +04:00
{
bool playing = false ;
ALint state ;
2014-09-03 18:51:28 +04:00
2014-01-23 15:36:18 +04:00
alGetSourcei ( sound . source , AL_SOURCE_STATE , & state ) ;
if ( state = = AL_PLAYING ) playing = true ;
2014-09-03 18:51:28 +04:00
2014-01-23 15:36:18 +04:00
return playing ;
}
2014-04-19 18:36:49 +04:00
// Set volume for a sound
void SetSoundVolume ( Sound sound , float volume )
{
alSourcef ( sound . source , AL_GAIN , volume ) ;
}
// Set pitch for a sound
void SetSoundPitch ( Sound sound , float pitch )
{
alSourcef ( sound . source , AL_PITCH , pitch ) ;
}
//----------------------------------------------------------------------------------
// Module Functions Definition - Music loading and stream playing (.OGG)
//----------------------------------------------------------------------------------
// Start music playing (open stream)
2016-05-12 06:15:37 +03:00
// returns 0 on success
int PlayMusicStream ( int musicIndex , char * fileName )
2014-04-19 18:36:49 +04:00
{
2016-05-11 10:37:10 +03:00
int mixIndex ;
2016-05-12 06:15:37 +03:00
2016-05-14 10:25:40 +03:00
if ( currentMusic [ musicIndex ] . stream | | currentMusic [ musicIndex ] . chipctx ) return 1 ; // error
2016-05-12 06:15:37 +03:00
2016-05-11 10:37:10 +03:00
for ( mixIndex = 0 ; mixIndex < MAX_AUDIO_CONTEXTS ; mixIndex + + ) // find empty mix channel slot
{
if ( mixChannelsActive_g [ mixIndex ] = = NULL ) break ;
2016-05-14 10:25:40 +03:00
else if ( mixIndex = MAX_AUDIO_CONTEXTS - 1 ) return 2 ; // error
2016-05-11 10:37:10 +03:00
}
2014-04-19 18:36:49 +04:00
if ( strcmp ( GetExtension ( fileName ) , " ogg " ) = = 0 )
{
// Open audio stream
2016-05-11 10:37:10 +03:00
currentMusic [ musicIndex ] . stream = stb_vorbis_open_filename ( fileName , NULL , NULL ) ;
2014-09-03 18:51:28 +04:00
2016-05-11 10:37:10 +03:00
if ( currentMusic [ musicIndex ] . stream = = NULL )
2014-12-31 20:03:32 +03:00
{
TraceLog ( WARNING , " [%s] OGG audio file could not be opened " , fileName ) ;
2016-05-12 06:15:37 +03:00
return 3 ; // error
2014-12-31 20:03:32 +03:00
}
2014-04-19 18:36:49 +04:00
else
{
// Get file info
2016-05-11 10:37:10 +03:00
stb_vorbis_info info = stb_vorbis_get_info ( currentMusic [ musicIndex ] . stream ) ;
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
TraceLog ( INFO , " [%s] Ogg sample rate: %i " , fileName , info . sample_rate ) ;
TraceLog ( INFO , " [%s] Ogg channels: %i " , fileName , info . channels ) ;
2015-12-03 15:45:06 +03:00
TraceLog ( DEBUG , " [%s] Temp memory required: %i " , fileName , info . temp_memory_required ) ;
2014-09-03 18:51:28 +04:00
2016-05-12 04:14:59 +03:00
currentMusic [ musicIndex ] . loop = true ; // We loop by default
2016-05-15 01:26:17 +03:00
musicEnabled_g = true ;
2016-05-12 04:14:59 +03:00
currentMusic [ musicIndex ] . totalSamplesLeft = stb_vorbis_stream_length_in_samples ( currentMusic [ musicIndex ] . stream ) * info . channels ;
currentMusic [ musicIndex ] . totalLengthSeconds = stb_vorbis_stream_length_in_seconds ( currentMusic [ musicIndex ] . stream ) ;
2016-05-11 10:37:10 +03:00
if ( info . channels = = 2 ) {
currentMusic [ musicIndex ] . ctx = InitAudioContext ( info . sample_rate , mixIndex , 2 , false ) ;
2016-05-15 01:26:17 +03:00
currentMusic [ musicIndex ] . ctx - > playing = true ;
2016-05-11 10:37:10 +03:00
}
else {
currentMusic [ musicIndex ] . ctx = InitAudioContext ( info . sample_rate , mixIndex , 1 , false ) ;
2016-05-15 01:26:17 +03:00
currentMusic [ musicIndex ] . ctx - > playing = true ;
2016-05-11 10:37:10 +03:00
}
2016-05-15 01:26:17 +03:00
if ( ! currentMusic [ musicIndex ] . ctx ) return 4 ; // error
2014-04-19 18:36:49 +04:00
}
}
2016-04-25 04:18:18 +03:00
else if ( strcmp ( GetExtension ( fileName ) , " xm " ) = = 0 )
{
2016-04-26 06:05:03 +03:00
// only stereo is supported for xm
2016-05-11 10:37:10 +03:00
if ( ! jar_xm_create_context_from_file ( & currentMusic [ musicIndex ] . chipctx , 48000 , fileName ) )
2016-04-25 04:18:18 +03:00
{
2016-05-11 10:37:10 +03:00
currentMusic [ musicIndex ] . chipTune = true ;
currentMusic [ musicIndex ] . loop = true ;
jar_xm_set_max_loop_count ( currentMusic [ musicIndex ] . chipctx , 0 ) ; // infinite number of loops
currentMusic [ musicIndex ] . totalSamplesLeft = jar_xm_get_remaining_samples ( currentMusic [ musicIndex ] . chipctx ) ;
2016-05-12 04:14:59 +03:00
currentMusic [ musicIndex ] . totalLengthSeconds = ( ( float ) currentMusic [ musicIndex ] . totalSamplesLeft ) / 48000.f ;
2016-05-15 01:26:17 +03:00
musicEnabled_g = true ;
2016-04-26 06:05:03 +03:00
2016-05-11 10:37:10 +03:00
TraceLog ( INFO , " [%s] XM number of samples: %i " , fileName , currentMusic [ musicIndex ] . totalSamplesLeft ) ;
TraceLog ( INFO , " [%s] XM track length: %11.6f sec " , fileName , currentMusic [ musicIndex ] . totalLengthSeconds ) ;
2016-04-27 02:50:07 +03:00
2016-05-11 10:37:10 +03:00
currentMusic [ musicIndex ] . ctx = InitAudioContext ( 48000 , mixIndex , 2 , true ) ;
2016-05-15 01:26:17 +03:00
if ( ! currentMusic [ musicIndex ] . ctx ) return 5 ; // error
currentMusic [ musicIndex ] . ctx - > playing = true ;
2016-04-25 04:18:18 +03:00
}
2016-05-12 06:15:37 +03:00
else
{
TraceLog ( WARNING , " [%s] XM file could not be opened " , fileName ) ;
2016-05-15 01:26:17 +03:00
return 6 ; // error
2016-05-12 06:15:37 +03:00
}
}
else
{
TraceLog ( WARNING , " [%s] Music extension not recognized, it can't be loaded " , fileName ) ;
2016-05-15 01:26:17 +03:00
return 7 ; // error
2016-04-25 04:18:18 +03:00
}
2016-05-12 06:15:37 +03:00
return 0 ; // normal return
2014-04-19 18:36:49 +04:00
}
2016-05-11 10:37:10 +03:00
// Stop music playing for individual music index of currentMusic array (close stream)
void StopMusicStream ( int index )
2014-04-19 18:36:49 +04:00
{
2016-05-11 10:37:10 +03:00
if ( index < MAX_MUSIC_STREAMS & & currentMusic [ index ] . ctx )
2014-04-19 18:36:49 +04:00
{
2016-05-11 10:37:10 +03:00
CloseAudioContext ( currentMusic [ index ] . ctx ) ;
2016-04-25 04:18:18 +03:00
2016-05-11 10:37:10 +03:00
if ( currentMusic [ index ] . chipTune )
2016-04-25 04:18:18 +03:00
{
2016-05-11 10:37:10 +03:00
jar_xm_free_context ( currentMusic [ index ] . chipctx ) ;
2016-04-25 04:18:18 +03:00
}
else
{
2016-05-11 10:37:10 +03:00
stb_vorbis_close ( currentMusic [ index ] . stream ) ;
2016-04-25 04:18:18 +03:00
}
2016-05-11 10:37:10 +03:00
2016-05-15 01:26:17 +03:00
if ( ! getMusicStreamCount ( ) ) musicEnabled_g = false ;
2016-05-14 10:25:40 +03:00
if ( currentMusic [ index ] . stream | | currentMusic [ index ] . chipctx )
{
currentMusic [ index ] . stream = NULL ;
currentMusic [ index ] . chipctx = NULL ;
}
2014-04-19 18:36:49 +04:00
}
2016-05-11 10:37:10 +03:00
}
2014-09-03 18:51:28 +04:00
2016-05-11 10:37:10 +03:00
//get number of music channels active at this time, this does not mean they are playing
int getMusicStreamCount ( void )
{
2016-05-14 10:25:40 +03:00
int musicCount = 0 ;
2016-05-11 10:37:10 +03:00
for ( int musicIndex = 0 ; musicIndex < MAX_MUSIC_STREAMS ; musicIndex + + ) // find empty music slot
if ( currentMusic [ musicIndex ] . stream ! = NULL | | currentMusic [ musicIndex ] . chipTune ) musicCount + + ;
2016-05-14 10:25:40 +03:00
2016-05-11 10:37:10 +03:00
return musicCount ;
2014-04-19 18:36:49 +04:00
}
// Pause music playing
2016-05-11 10:37:10 +03:00
void PauseMusicStream ( int index )
2014-04-19 18:36:49 +04:00
{
2014-09-17 00:51:31 +04:00
// Pause music stream if music available!
2016-05-15 02:30:32 +03:00
if ( index < MAX_MUSIC_STREAMS & & currentMusic [ index ] . ctx & & musicEnabled_g )
2014-09-17 00:51:31 +04:00
{
TraceLog ( INFO , " Pausing music stream " ) ;
2016-05-14 10:25:40 +03:00
alSourcePause ( currentMusic [ index ] . ctx - > alSource ) ;
currentMusic [ index ] . ctx - > playing = false ;
2014-09-17 00:51:31 +04:00
}
}
// Resume music playing
2016-05-11 10:37:10 +03:00
void ResumeMusicStream ( int index )
2014-09-17 00:51:31 +04:00
{
// Resume music playing... if music available!
2015-01-21 02:12:54 +03:00
ALenum state ;
2016-05-15 02:30:32 +03:00
if ( index < MAX_MUSIC_STREAMS & & currentMusic [ index ] . ctx ) {
2016-05-13 07:14:02 +03:00
alGetSourcei ( currentMusic [ index ] . ctx - > alSource , AL_SOURCE_STATE , & state ) ;
2016-05-11 10:37:10 +03:00
if ( state = = AL_PAUSED )
{
TraceLog ( INFO , " Resuming music stream " ) ;
2016-05-13 07:14:02 +03:00
alSourcePlay ( currentMusic [ index ] . ctx - > alSource ) ;
2016-05-14 10:25:40 +03:00
currentMusic [ index ] . ctx - > playing = true ;
2016-05-11 10:37:10 +03:00
}
2014-09-17 00:51:31 +04:00
}
2014-04-19 18:36:49 +04:00
}
2016-05-12 04:14:59 +03:00
// Check if any music is playing
bool IsMusicPlaying ( int index )
2014-04-09 22:25:26 +04:00
{
2014-12-31 20:03:32 +03:00
bool playing = false ;
ALint state ;
2016-05-11 10:37:10 +03:00
2016-05-12 04:14:59 +03:00
if ( index < MAX_MUSIC_STREAMS & & currentMusic [ index ] . ctx ) {
alGetSourcei ( currentMusic [ index ] . ctx - > alSource , AL_SOURCE_STATE , & state ) ;
if ( state = = AL_PLAYING ) playing = true ;
2016-05-11 10:37:10 +03:00
}
2014-09-03 18:51:28 +04:00
2014-12-31 20:03:32 +03:00
return playing ;
2014-04-09 22:25:26 +04:00
}
2014-04-19 18:36:49 +04:00
// Set volume for music
2016-05-11 10:37:10 +03:00
void SetMusicVolume ( int index , float volume )
2014-01-23 15:36:18 +04:00
{
2016-05-12 04:14:59 +03:00
if ( index < MAX_MUSIC_STREAMS & & currentMusic [ index ] . ctx ) {
alSourcef ( currentMusic [ index ] . ctx - > alSource , AL_GAIN , volume ) ;
}
}
void SetMusicPitch ( int index , float pitch )
{
if ( index < MAX_MUSIC_STREAMS & & currentMusic [ index ] . ctx ) {
alSourcef ( currentMusic [ index ] . ctx - > alSource , AL_PITCH , pitch ) ;
2016-05-11 10:37:10 +03:00
}
2014-01-23 15:36:18 +04:00
}
2014-04-19 18:36:49 +04:00
// Get current music time length (in seconds)
2016-05-12 08:37:53 +03:00
float GetMusicTimeLength ( int index )
2014-01-23 15:36:18 +04:00
{
2016-04-25 04:18:18 +03:00
float totalSeconds ;
2016-05-12 08:37:53 +03:00
if ( currentMusic [ index ] . chipTune )
2016-04-25 04:18:18 +03:00
{
2016-05-12 08:37:53 +03:00
totalSeconds = currentMusic [ index ] . totalLengthSeconds ;
2016-04-25 04:18:18 +03:00
}
else
{
2016-05-12 08:37:53 +03:00
totalSeconds = stb_vorbis_stream_length_in_seconds ( currentMusic [ index ] . stream ) ;
2016-04-25 04:18:18 +03:00
}
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
return totalSeconds ;
}
// Get current music time played (in seconds)
2016-05-12 08:37:53 +03:00
float GetMusicTimePlayed ( int index )
2014-04-19 18:36:49 +04:00
{
2016-04-25 04:18:18 +03:00
float secondsPlayed ;
2016-05-15 02:30:32 +03:00
if ( index < MAX_MUSIC_STREAMS & & currentMusic [ index ] . ctx )
2016-04-25 04:18:18 +03:00
{
2016-05-15 02:30:32 +03:00
if ( currentMusic [ index ] . chipTune )
{
uint64_t samples ;
jar_xm_get_position ( currentMusic [ index ] . chipctx , NULL , NULL , NULL , & samples ) ;
secondsPlayed = ( float ) samples / ( 48000 * currentMusic [ index ] . ctx - > channels ) ; // Not sure if this is the correct value
}
else
{
int totalSamples = stb_vorbis_stream_length_in_samples ( currentMusic [ index ] . stream ) * currentMusic [ index ] . ctx - > channels ;
int samplesPlayed = totalSamples - currentMusic [ index ] . totalSamplesLeft ;
secondsPlayed = ( float ) samplesPlayed / ( currentMusic [ index ] . ctx - > sampleRate * currentMusic [ index ] . ctx - > channels ) ;
}
2016-04-25 04:18:18 +03:00
}
2016-04-25 01:25:48 +03:00
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
return secondsPlayed ;
}
//----------------------------------------------------------------------------------
// Module specific Functions Definition
//----------------------------------------------------------------------------------
// Fill music buffers with new data from music stream
2016-05-13 02:02:23 +03:00
static bool BufferMusicStream ( int index )
2014-04-19 18:36:49 +04:00
{
2016-05-03 07:59:55 +03:00
short pcm [ MUSIC_BUFFER_SIZE_SHORT ] ;
2016-05-12 08:37:53 +03:00
float pcmf [ MUSIC_BUFFER_SIZE_FLOAT ] ;
2016-04-26 08:18:49 +03:00
2016-05-15 02:30:32 +03:00
int size = 0 ; // Total size of data steamed in L+R samples
2014-04-19 18:36:49 +04:00
bool active = true ; // We can get more data from stream (not finished)
2016-05-13 07:14:02 +03:00
2016-05-15 02:30:32 +03:00
2016-05-15 12:09:57 +03:00
if ( ! currentMusic [ index ] . ctx - > playing & & currentMusic [ index ] . totalSamplesLeft > 0 )
2014-04-19 18:36:49 +04:00
{
2016-05-15 02:30:32 +03:00
UpdateAudioContext ( currentMusic [ index ] . ctx , NULL , 0 ) ;
2016-05-15 12:09:57 +03:00
return true ; // it is still active but it is paused
2016-05-15 02:30:32 +03:00
}
if ( currentMusic [ index ] . chipTune ) // There is no end of stream for xmfiles, once the end is reached zeros are generated for non looped chiptunes.
{
if ( currentMusic [ index ] . totalSamplesLeft > = MUSIC_BUFFER_SIZE_FLOAT / 2 )
size = MUSIC_BUFFER_SIZE_FLOAT / 2 ;
2016-04-26 08:18:49 +03:00
else
2016-05-15 02:30:32 +03:00
size = currentMusic [ index ] . totalSamplesLeft / 2 ;
jar_xm_generate_samples ( currentMusic [ index ] . chipctx , pcmf , size ) ; // reads 2*readlen shorts and moves them to buffer+size memory location
UpdateAudioContext ( currentMusic [ index ] . ctx , pcmf , size * 2 ) ;
currentMusic [ index ] . totalSamplesLeft - = size * 2 ;
2014-04-19 18:36:49 +04:00
}
2016-05-15 02:30:32 +03:00
else
{
if ( currentMusic [ index ] . totalSamplesLeft > = MUSIC_BUFFER_SIZE_SHORT )
size = MUSIC_BUFFER_SIZE_SHORT ;
else
size = currentMusic [ index ] . totalSamplesLeft ;
int streamedBytes = stb_vorbis_get_samples_short_interleaved ( currentMusic [ index ] . stream , currentMusic [ index ] . ctx - > channels , pcm , size ) ;
UpdateAudioContext ( currentMusic [ index ] . ctx , pcm , streamedBytes * currentMusic [ index ] . ctx - > channels ) ;
currentMusic [ index ] . totalSamplesLeft - = streamedBytes * currentMusic [ index ] . ctx - > channels ;
}
TraceLog ( DEBUG , " Buffering index:%i, chiptune:%i " , index , ( int ) currentMusic [ index ] . chipTune ) ;
if ( currentMusic [ index ] . totalSamplesLeft < = 0 ) active = false ;
2014-09-03 18:51:28 +04:00
return active ;
2014-04-19 18:36:49 +04:00
}
// Empty music buffers
2016-05-12 08:37:53 +03:00
static void EmptyMusicStream ( int index )
2014-04-19 18:36:49 +04:00
{
2014-09-03 18:51:28 +04:00
ALuint buffer = 0 ;
2014-04-19 18:36:49 +04:00
int queued = 0 ;
2014-09-03 18:51:28 +04:00
2016-05-13 07:14:02 +03:00
alGetSourcei ( currentMusic [ index ] . ctx - > alSource , AL_BUFFERS_QUEUED , & queued ) ;
2014-09-03 18:51:28 +04:00
2015-12-03 15:45:06 +03:00
while ( queued > 0 )
2014-04-19 18:36:49 +04:00
{
2016-05-13 07:14:02 +03:00
alSourceUnqueueBuffers ( currentMusic [ index ] . ctx - > alSource , 1 , & buffer ) ;
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
queued - - ;
}
}
2016-05-15 01:26:17 +03:00
//determine if a music stream is ready to be written to
static bool isMusicStreamReady ( int index )
{
ALint processed = 0 ;
alGetSourcei ( currentMusic [ index ] . ctx - > alSource , AL_BUFFERS_PROCESSED , & processed ) ;
2016-05-15 02:30:32 +03:00
if ( processed ) return true ;
2016-05-15 01:26:17 +03:00
return false ;
}
2014-04-19 18:36:49 +04:00
// Update (re-fill) music buffers if data already processed
2016-05-12 08:37:53 +03:00
void UpdateMusicStream ( int index )
2014-04-19 18:36:49 +04:00
{
2016-05-13 02:02:23 +03:00
ALenum state ;
2014-04-19 18:36:49 +04:00
bool active = true ;
2014-09-03 18:51:28 +04:00
2016-05-15 02:30:32 +03:00
if ( index < MAX_MUSIC_STREAMS & & musicEnabled_g & & currentMusic [ index ] . ctx & & isMusicStreamReady ( index ) )
2014-04-19 18:36:49 +04:00
{
2016-05-13 02:02:23 +03:00
active = BufferMusicStream ( index ) ;
2016-05-15 02:30:32 +03:00
if ( ! active & & currentMusic [ index ] . loop & & currentMusic [ index ] . ctx - > playing )
2014-04-19 18:36:49 +04:00
{
2016-05-15 12:09:57 +03:00
if ( currentMusic [ index ] . chipTune )
2014-04-19 18:36:49 +04:00
{
2016-05-13 02:02:23 +03:00
currentMusic [ index ] . totalSamplesLeft = currentMusic [ index ] . totalLengthSeconds * currentMusic [ index ] . ctx - > sampleRate ;
2014-04-19 18:36:49 +04:00
}
2016-05-13 02:02:23 +03:00
else
{
stb_vorbis_seek_start ( currentMusic [ index ] . stream ) ;
2016-05-15 02:30:32 +03:00
currentMusic [ index ] . totalSamplesLeft = stb_vorbis_stream_length_in_samples ( currentMusic [ index ] . stream ) * currentMusic [ index ] . ctx - > channels ;
2016-05-13 02:02:23 +03:00
}
2016-05-15 12:09:57 +03:00
active = true ;
2016-05-13 02:02:23 +03:00
}
2014-09-03 18:51:28 +04:00
2016-05-13 02:02:23 +03:00
if ( alGetError ( ) ! = AL_NO_ERROR ) TraceLog ( WARNING , " Error buffering data... " ) ;
2016-05-13 07:14:02 +03:00
alGetSourcei ( currentMusic [ index ] . ctx - > alSource , AL_SOURCE_STATE , & state ) ;
2014-09-03 18:51:28 +04:00
2016-05-15 12:09:57 +03:00
if ( state ! = AL_PLAYING & & active & & currentMusic [ index ] . ctx - > playing ) alSourcePlay ( currentMusic [ index ] . ctx - > alSource ) ;
2014-09-03 18:51:28 +04:00
2016-05-13 07:14:02 +03:00
if ( ! active ) StopMusicStream ( index ) ;
}
else
return ;
2014-09-03 18:51:28 +04:00
2014-01-23 15:36:18 +04:00
}
2013-11-19 02:38:44 +04:00
// Load WAV file into Wave structure
2014-04-19 18:36:49 +04:00
static Wave LoadWAV ( const char * fileName )
2013-11-19 02:38:44 +04:00
{
2013-12-01 15:34:31 +04:00
// Basic WAV headers structs
typedef struct {
char chunkID [ 4 ] ;
2014-11-09 10:06:58 +03:00
int chunkSize ;
2013-12-01 15:34:31 +04:00
char format [ 4 ] ;
} RiffHeader ;
typedef struct {
char subChunkID [ 4 ] ;
2014-11-09 10:06:58 +03:00
int subChunkSize ;
2013-12-01 15:34:31 +04:00
short audioFormat ;
short numChannels ;
2014-11-09 10:06:58 +03:00
int sampleRate ;
int byteRate ;
2013-12-01 15:34:31 +04:00
short blockAlign ;
short bitsPerSample ;
} WaveFormat ;
typedef struct {
char subChunkID [ 4 ] ;
2014-11-09 10:06:58 +03:00
int subChunkSize ;
2013-12-01 15:34:31 +04:00
} WaveData ;
2014-09-03 18:51:28 +04:00
2013-12-01 15:34:31 +04:00
RiffHeader riffHeader ;
WaveFormat waveFormat ;
WaveData waveData ;
2014-09-03 18:51:28 +04:00
2016-01-23 15:22:13 +03:00
Wave wave = { 0 } ;
2013-12-01 15:34:31 +04:00
FILE * wavFile ;
2014-09-03 18:51:28 +04:00
2013-11-19 02:38:44 +04:00
wavFile = fopen ( fileName , " rb " ) ;
2014-09-03 18:51:28 +04:00
2014-12-31 20:03:32 +03:00
if ( wavFile = = NULL )
2013-11-23 16:30:54 +04:00
{
2014-12-31 20:03:32 +03:00
TraceLog ( WARNING , " [%s] WAV file could not be opened " , fileName ) ;
2015-07-31 13:31:39 +03:00
wave . data = NULL ;
2013-11-23 16:30:54 +04:00
}
2014-04-09 22:25:26 +04:00
else
{
// Read in the first chunk into the struct
fread ( & riffHeader , sizeof ( RiffHeader ) , 1 , wavFile ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Check for RIFF and WAVE tags
2014-11-09 10:06:58 +03:00
if ( strncmp ( riffHeader . chunkID , " RIFF " , 4 ) | |
strncmp ( riffHeader . format , " WAVE " , 4 ) )
2014-04-09 22:25:26 +04:00
{
TraceLog ( WARNING , " [%s] Invalid RIFF or WAVE Header " , fileName ) ;
}
else
{
// Read in the 2nd chunk for the wave info
fread ( & waveFormat , sizeof ( WaveFormat ) , 1 , wavFile ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Check for fmt tag
if ( ( waveFormat . subChunkID [ 0 ] ! = ' f ' ) | | ( waveFormat . subChunkID [ 1 ] ! = ' m ' ) | |
( waveFormat . subChunkID [ 2 ] ! = ' t ' ) | | ( waveFormat . subChunkID [ 3 ] ! = ' ' ) )
{
TraceLog ( WARNING , " [%s] Invalid Wave format " , fileName ) ;
}
else
{
// Check for extra parameters;
if ( waveFormat . subChunkSize > 16 ) fseek ( wavFile , sizeof ( short ) , SEEK_CUR ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Read in the the last byte of data before the sound file
fread ( & waveData , sizeof ( WaveData ) , 1 , wavFile ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Check for data tag
if ( ( waveData . subChunkID [ 0 ] ! = ' d ' ) | | ( waveData . subChunkID [ 1 ] ! = ' a ' ) | |
( waveData . subChunkID [ 2 ] ! = ' t ' ) | | ( waveData . subChunkID [ 3 ] ! = ' a ' ) )
{
TraceLog ( WARNING , " [%s] Invalid data header " , fileName ) ;
}
else
{
// Allocate memory for data
2014-09-03 18:51:28 +04:00
wave . data = ( unsigned char * ) malloc ( sizeof ( unsigned char ) * waveData . subChunkSize ) ;
2014-04-09 22:25:26 +04:00
// Read in the sound data into the soundData variable
fread ( wave . data , waveData . subChunkSize , 1 , wavFile ) ;
2014-09-03 18:51:28 +04:00
2014-04-09 22:25:26 +04:00
// Now we set the variables that we need later
wave . dataSize = waveData . subChunkSize ;
wave . sampleRate = waveFormat . sampleRate ;
wave . channels = waveFormat . numChannels ;
wave . bitsPerSample = waveFormat . bitsPerSample ;
2014-09-03 18:51:28 +04:00
2014-12-31 20:03:32 +03:00
TraceLog ( INFO , " [%s] WAV file loaded successfully (SampleRate: %i, BitRate: %i, Channels: %i) " , fileName , wave . sampleRate , wave . bitsPerSample , wave . channels ) ;
2014-04-09 22:25:26 +04:00
}
}
}
2013-11-23 16:30:54 +04:00
2014-04-09 22:25:26 +04:00
fclose ( wavFile ) ;
}
2014-09-03 18:51:28 +04:00
2013-11-23 16:30:54 +04:00
return wave ;
2013-12-01 15:34:31 +04:00
}
2013-11-19 02:38:44 +04:00
2014-04-19 18:36:49 +04:00
// Load OGG file into Wave structure
2014-09-17 00:51:31 +04:00
// NOTE: Using stb_vorbis library
2014-04-19 18:36:49 +04:00
static Wave LoadOGG ( char * fileName )
2013-11-19 02:38:44 +04:00
{
2014-04-19 18:36:49 +04:00
Wave wave ;
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
stb_vorbis * oggFile = stb_vorbis_open_filename ( fileName , NULL , NULL ) ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
if ( oggFile = = NULL )
{
TraceLog ( WARNING , " [%s] OGG file could not be opened " , fileName ) ;
wave . data = NULL ;
}
else
{
stb_vorbis_info info = stb_vorbis_get_info ( oggFile ) ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
wave . sampleRate = info . sample_rate ;
wave . bitsPerSample = 16 ;
wave . channels = info . channels ;
2014-04-09 22:25:26 +04:00
2015-07-31 13:31:39 +03:00
TraceLog ( DEBUG , " [%s] Ogg sample rate: %i " , fileName , info . sample_rate ) ;
TraceLog ( DEBUG , " [%s] Ogg channels: %i " , fileName , info . channels ) ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
int totalSamplesLength = ( stb_vorbis_stream_length_in_samples ( oggFile ) * info . channels ) ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
wave . dataSize = totalSamplesLength * sizeof ( short ) ; // Size must be in bytes
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
TraceLog ( DEBUG , " [%s] Samples length: %i " , fileName , totalSamplesLength ) ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
float totalSeconds = stb_vorbis_stream_length_in_seconds ( oggFile ) ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
TraceLog ( DEBUG , " [%s] Total seconds: %f " , fileName , totalSeconds ) ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
if ( totalSeconds > 10 ) TraceLog ( WARNING , " [%s] Ogg audio lenght is larger than 10 seconds (%f), that's a big file in memory, consider music streaming " , fileName , totalSeconds ) ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
int totalSamples = totalSeconds * info . sample_rate * info . channels ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
TraceLog ( DEBUG , " [%s] Total samples calculated: %i " , fileName , totalSamples ) ;
2014-04-09 22:25:26 +04:00
2015-07-31 13:31:39 +03:00
wave . data = malloc ( sizeof ( short ) * totalSamplesLength ) ;
2014-09-03 18:51:28 +04:00
2015-07-31 13:31:39 +03:00
int samplesObtained = stb_vorbis_get_samples_short_interleaved ( oggFile , info . channels , wave . data , totalSamplesLength ) ;
2015-02-02 02:53:49 +03:00
2015-07-31 13:31:39 +03:00
TraceLog ( DEBUG , " [%s] Samples obtained: %i " , fileName , samplesObtained ) ;
2014-04-19 18:36:49 +04:00
2015-07-31 13:31:39 +03:00
TraceLog ( INFO , " [%s] OGG file loaded successfully (SampleRate: %i, BitRate: %i, Channels: %i) " , fileName , wave . sampleRate , wave . bitsPerSample , wave . channels ) ;
stb_vorbis_close ( oggFile ) ;
}
2014-09-03 18:51:28 +04:00
2014-04-19 18:36:49 +04:00
return wave ;
2014-04-09 22:25:26 +04:00
}
2013-11-19 02:38:44 +04:00
2014-04-19 18:36:49 +04:00
// Unload Wave data
static void UnloadWave ( Wave wave )
2014-04-09 22:25:26 +04:00
{
2014-04-19 18:36:49 +04:00
free ( wave . data ) ;
2015-08-05 20:17:56 +03:00
TraceLog ( INFO , " Unloaded wave data " ) ;
2015-07-31 13:31:39 +03:00
}
// Some required functions for audio standalone module version
# if defined(AUDIO_STANDALONE)
// Get the extension for a filename
const char * GetExtension ( const char * fileName )
{
const char * dot = strrchr ( fileName , ' . ' ) ;
if ( ! dot | | dot = = fileName ) return " " ;
return ( dot + 1 ) ;
}
// Outputs a trace log message (INFO, ERROR, WARNING)
// NOTE: If a file has been init, output log is written there
void TraceLog ( int msgType , const char * text , . . . )
{
va_list args ;
int traceDebugMsgs = 0 ;
# ifdef DO_NOT_TRACE_DEBUG_MSGS
traceDebugMsgs = 0 ;
# endif
switch ( msgType )
{
case INFO : fprintf ( stdout , " INFO: " ) ; break ;
case ERROR : fprintf ( stdout , " ERROR: " ) ; break ;
case WARNING : fprintf ( stdout , " WARNING: " ) ; break ;
case DEBUG : if ( traceDebugMsgs ) fprintf ( stdout , " DEBUG: " ) ; break ;
default : break ;
}
if ( ( msgType ! = DEBUG ) | | ( ( msgType = = DEBUG ) & & ( traceDebugMsgs ) ) )
{
va_start ( args , text ) ;
vfprintf ( stdout , text , args ) ;
va_end ( args ) ;
fprintf ( stdout , " \n " ) ;
}
if ( msgType = = ERROR ) exit ( 1 ) ; // If ERROR message, exit program
}
# endif