优化和统一音频处理方式

This commit is contained in:
sin365 2025-09-23 19:36:48 +08:00
parent baf2e5a8d6
commit da7944d521
5 changed files with 423 additions and 118 deletions

View File

@ -1,9 +1,10 @@
using AxibugEmuOnline.Client;
using AxibugEmuOnline.Client.ClientCore; using AxibugEmuOnline.Client.ClientCore;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using UnityEngine; using UnityEngine;
public class UEGSoundPlayer : MonoBehaviour public class UEGSoundPlayer : MonoBehaviour, AxiAudioPull
{ {
[SerializeField] [SerializeField]
private AudioSource m_as; private AudioSource m_as;
@ -18,32 +19,72 @@ public class UEGSoundPlayer : MonoBehaviour
void Awake() void Awake()
{ {
// 获取当前音频配置 return;
AudioConfiguration config = AudioSettings.GetConfiguration(); //// 获取当前音频配置
// 设置目标音频配置 //AudioConfiguration config = AudioSettings.GetConfiguration();
config.sampleRate = sampleRate; // 采样率为 44100Hz //// 设置目标音频配置
config.numRealVoices = 32; // 设置最大音频源数量(可选) //config.sampleRate = sampleRate; // 采样率为 44100Hz
config.numVirtualVoices = 512; // 设置虚拟音频源数量(可选) //config.numRealVoices = 32; // 设置最大音频源数量(可选)
config.dspBufferSize = 1024; // 设置 DSP 缓冲区大小(可选) //config.numVirtualVoices = 512; // 设置虚拟音频源数量(可选)
config.speakerMode = AudioSpeakerMode.Stereo; // 设置为立体声2 声道) //config.dspBufferSize = 1024; // 设置 DSP 缓冲区大小(可选)
App.audioMgr.SetAudioConfig(config); //config.speakerMode = AudioSpeakerMode.Stereo; // 设置为立体声2 声道)
//App.audioMgr.SetAudioConfig(config);
}
private void OnEnable()
{
App.audioMgr.RegisterStream(nameof(UEssgee), AudioSettings.outputSampleRate, this);
}
void OnDisable()
{
App.audioMgr.ClearAudioData(nameof(UEssgee));
} }
private Queue<float> sampleQueue = new Queue<float>(); private Queue<float> sampleQueue = new Queue<float>();
// Unity 音频线程回调 public unsafe void PullAudio(float[] data, int channels)
void OnAudioFilterRead(float[] data, int channels)
{ {
fixed (float* pData = data)
{
float* outputPtr = pData; // 指向数组起始位置的指针
int dataLength = data.Length;
for (int i = 0; i < dataLength; i++)
{
float rawData;
if (_buffer.TryRead(out rawData))
*outputPtr = rawData;
else
*outputPtr = 0; // 无数据时静音
outputPtr++; // 指针移动到下一个位置
}
}
/*
for (int i = 0; i < data.Length; i++) for (int i = 0; i < data.Length; i++)
{ {
if (_buffer.TryRead(out float rawData)) if (_buffer.TryRead(out float rawData))
data[i] = rawData; data[i] = rawData;
else else
data[i] = 0; // ÎÞÊý¾Ýʱ¾²Òô data[i] = 0; // ÎÞÊý¾Ýʱ¾²Òô
} }*/
} }
//// Unity 音频线程回调
//void OnAudioFilterRead(float[] data, int channels)
//{
// for (int i = 0; i < data.Length; i++)
// {
// if (_buffer.TryRead(out float rawData))
// data[i] = rawData;
// else
// data[i] = 0; // 无数据时静音
// }
//}
public void Initialize() public void Initialize()
{ {
@ -72,7 +113,7 @@ public class UEGSoundPlayer : MonoBehaviour
{ {
_buffer.Write(buffer[i] / 32767.0f); _buffer.Write(buffer[i] / 32767.0f);
} }
App.audioMgr.WriteToRecord(buffer, samples_a); //App.audioMgr.WriteToRecord(buffer, samples_a);
} }
public void BufferWirte(int Off, byte[] Data) public void BufferWirte(int Off, byte[] Data)
{ {
@ -91,4 +132,5 @@ public class UEGSoundPlayer : MonoBehaviour
return; return;
m_as.volume = Vol; m_as.volume = Vol;
} }
} }

View File

@ -1,8 +1,10 @@
using AxibugEmuOnline.Client;
using AxibugEmuOnline.Client.ClientCore;
using MAME.Core; using MAME.Core;
using System; using System;
using UnityEngine; using UnityEngine;
public class UniSoundPlayer : MonoBehaviour, ISoundPlayer public class UniSoundPlayer : MonoBehaviour, ISoundPlayer, AxiAudioPull
{ {
[SerializeField] [SerializeField]
private AudioSource m_as; private AudioSource m_as;
@ -13,14 +15,24 @@ public class UniSoundPlayer : MonoBehaviour, ISoundPlayer
void Awake() void Awake()
{ {
//TODO 采样率需要更准确而且和clip并没有关系 ////TODO 采样率需要更准确而且和clip并没有关系
var dummy = AudioClip.Create("dummy", 1, 1, AudioSettings.outputSampleRate, false); //var dummy = AudioClip.Create("dummy", 1, 1, AudioSettings.outputSampleRate, false);
dummy.SetData(new float[] { 1 }, 0); //dummy.SetData(new float[] { 1 }, 0);
m_as.clip = dummy; //just to let unity play the audiosource //m_as.clip = dummy; //just to let unity play the audiosource
m_as.loop = true; //m_as.loop = true;
m_as.spatialBlend = 1; //m_as.spatialBlend = 1;
m_as.Play(); //m_as.Play();
} }
private void OnEnable()
{
App.audioMgr.RegisterStream(nameof(UMAME), AudioSettings.outputSampleRate, this);
}
void OnDisable()
{
App.audioMgr.ClearAudioData(nameof(UMAME));
}
public void GetAudioParams(out int frequency, out int channels) public void GetAudioParams(out int frequency, out int channels)
{ {
frequency = m_as.clip.samples; frequency = m_as.clip.samples;
@ -38,12 +50,62 @@ public class UniSoundPlayer : MonoBehaviour, ISoundPlayer
public void StopPlay() public void StopPlay()
{ {
if (m_as.isPlaying) if (m_as.isPlaying)
{ {
m_as.Stop(); m_as.Stop();
} }
} }
void OnAudioFilterRead(float[] data, int channels) public unsafe void PullAudio(float[] data, int channels)
{
if (!UMAME.bInGame) return;
fixed (float* pData = data)
{
float* outputPtr = pData; // 指向数组起始位置的指针
int dataLength = data.Length;
for (int i = 0; i < dataLength; i += channels)
{
float rawFloat = lastData;
float rawData;
if (_buffer.TryRead(out rawData))
{
rawFloat = rawData;
}
*outputPtr = rawFloat;
outputPtr++; // 指针移动到下一个位置
// 填充剩余声道(模拟立体声或多声道)
for (int fill = 1; fill < channels; fill++)
{
*outputPtr = rawFloat;
outputPtr++; // 指针移动到下一个位置
}
lastData = rawFloat;
}
}
/*
int step = channels;
for (int i = 0; i < data.Length; i += step)
{
float rawFloat = lastData;
float rawData;
if (_buffer.TryRead(out rawData))
{
rawFloat = rawData;
}
data[i] = rawFloat;
for (int fill = 1; fill < step; fill++)
data[i + fill] = rawFloat;
lastData = rawFloat;
}*/
}
/*void OnAudioFilterRead(float[] data, int channels)
{ {
if (!UMAME.bInGame) return; if (!UMAME.bInGame) return;
int step = channels; int step = channels;
@ -61,7 +123,7 @@ public class UniSoundPlayer : MonoBehaviour, ISoundPlayer
data[i + fill] = rawFloat; data[i + fill] = rawFloat;
lastData = rawFloat; lastData = rawFloat;
} }
} }*/
public void SubmitSamples(byte[] buffer, int samples_a) public void SubmitSamples(byte[] buffer, int samples_a)
{ {
@ -96,4 +158,5 @@ public class UniSoundPlayer : MonoBehaviour, ISoundPlayer
return; return;
m_as.volume = Vol; m_as.volume = Vol;
} }
} }

View File

@ -4,7 +4,7 @@ using VirtualNes.Core;
namespace AxibugEmuOnline.Client namespace AxibugEmuOnline.Client
{ {
public class AudioProvider : MonoBehaviour public class AudioProvider : MonoBehaviour, AxiAudioPull
{ {
public NesEmulator NesEmu { get; set; } public NesEmulator NesEmu { get; set; }
@ -14,7 +14,7 @@ namespace AxibugEmuOnline.Client
private SoundBuffer _buffer = new SoundBuffer(4096); private SoundBuffer _buffer = new SoundBuffer(4096);
public void Start() public void Start()
{ {
return;
//// 获取当前音频配置 //// 获取当前音频配置
//AudioConfiguration config = AudioSettings.GetConfiguration(); //AudioConfiguration config = AudioSettings.GetConfiguration();
//// 设置目标音频配置 //// 设置目标音频配置
@ -25,22 +25,33 @@ namespace AxibugEmuOnline.Client
//config.speakerMode = AudioSpeakerMode.Stereo; // 设置为立体声2 声道) //config.speakerMode = AudioSpeakerMode.Stereo; // 设置为立体声2 声道)
//App.audioMgr.SetAudioConfig(new AudioConfiguration()); //App.audioMgr.SetAudioConfig(new AudioConfiguration());
//TODO 采样率需要更准确而且和clip并没有关系 ////TODO 采样率需要更准确而且和clip并没有关系
var dummy = AudioClip.Create("dummy", 1, 1, AudioSettings.outputSampleRate, false); //var dummy = AudioClip.Create("dummy", 1, 1, AudioSettings.outputSampleRate, false);
dummy.SetData(new float[] { 1 }, 0); //dummy.SetData(new float[] { 1 }, 0);
m_as.clip = dummy; //just to let unity play the audiosource //m_as.clip = dummy; //just to let unity play the audiosource
m_as.loop = true; //m_as.loop = true;
m_as.spatialBlend = 1; //m_as.spatialBlend = 1;
m_as.Play(); //m_as.Play();
} }
private void OnEnable()
{
App.audioMgr.RegisterStream(nameof(NesEmulator), AudioSettings.outputSampleRate, this);
}
void OnDisable()
{
App.audioMgr.ClearAudioData(nameof(NesEmulator));
}
public void GetAudioParams(out int frequency, out int channels) public void GetAudioParams(out int frequency, out int channels)
{ {
frequency = m_as.clip.samples; frequency = m_as.clip.samples;
channels = m_as.clip.channels; channels = m_as.clip.channels;
} }
void OnAudioFilterRead(float[] data, int channels) public unsafe void PullAudio(float[] data, int channels)
{ {
int step = channels; int step = channels;
@ -49,6 +60,29 @@ namespace AxibugEmuOnline.Client
ProcessSound(NesEmu.NesCore, (uint)(data.Length / channels)); ProcessSound(NesEmu.NesCore, (uint)(data.Length / channels));
fixed (float* pData = data)
{
float* outputPtr = pData; // 指向 data 数组起始位置的指针
int dataLength = data.Length;
for (int i = 0; i < dataLength; i += step)
{
byte rawData;
float rawFloat = 0f;
if (_buffer.TryRead(out rawData))
rawFloat = rawData / 255f;
*outputPtr = rawFloat;
outputPtr++; //指针移动到下一个位置
for (int fill = 1; fill < step; fill++)
{
*outputPtr = rawFloat;
outputPtr++; //指针移动到下一个位置
}
}
}
/*
for (int i = 0; i < data.Length; i += step) for (int i = 0; i < data.Length; i += step)
{ {
float rawFloat = 0; float rawFloat = 0;
@ -59,13 +93,36 @@ namespace AxibugEmuOnline.Client
data[i] = rawFloat; data[i] = rawFloat;
for (int fill = 1; fill < step; fill++) for (int fill = 1; fill < step; fill++)
data[i + fill] = rawFloat; data[i + fill] = rawFloat;
} }*/
} }
//void OnAudioFilterRead(float[] data, int channels)
//{
// int step = channels;
// if (NesEmu == null || NesEmu.NesCore == null) return;
// if (NesEmu.IsPause) return;
// ProcessSound(NesEmu.NesCore, (uint)(data.Length / channels));
// for (int i = 0; i < data.Length; i += step)
// {
// float rawFloat = 0;
// byte rawData;
// if (_buffer.TryRead(out rawData))
// rawFloat = rawData / 255f;
// data[i] = rawFloat;
// for (int fill = 1; fill < step; fill++)
// data[i + fill] = rawFloat;
// }
//}
void ProcessSound(NES nes, uint feedCount) void ProcessSound(NES nes, uint feedCount)
{ {
nes.apu.Process(_buffer, feedCount); nes.apu.Process(_buffer, feedCount);
} }
} }
} }

View File

@ -1,8 +1,10 @@
using AxibugEmuOnline.Client;
using AxibugEmuOnline.Client.ClientCore;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using UnityEngine; using UnityEngine;
public class SGSoundPlayer : MonoBehaviour//, ISoundPlayer public class SGSoundPlayer : MonoBehaviour, AxiAudioPull
{ {
[SerializeField] [SerializeField]
private AudioSource m_as; private AudioSource m_as;
@ -18,44 +20,82 @@ public class SGSoundPlayer : MonoBehaviour//, ISoundPlayer
void Awake() void Awake()
{ {
// 获取当前音频配置 return;
AudioConfiguration config = AudioSettings.GetConfiguration(); //// 获取当前音频配置
//AudioConfiguration config = AudioSettings.GetConfiguration();
// 设置目标音频配置 //// 设置目标音频配置
config.sampleRate = 44100; // 采样率为 44100Hz //config.sampleRate = 44100; // 采样率为 44100Hz
config.numRealVoices = 32; // 设置最大音频源数量(可选) //config.numRealVoices = 32; // 设置最大音频源数量(可选)
config.numVirtualVoices = 512; // 设置虚拟音频源数量(可选) //config.numVirtualVoices = 512; // 设置虚拟音频源数量(可选)
config.dspBufferSize = 1024; // 设置 DSP 缓冲区大小(可选) //config.dspBufferSize = 1024; // 设置 DSP 缓冲区大小(可选)
config.speakerMode = AudioSpeakerMode.Stereo; // 设置为立体声2 声道) //config.speakerMode = AudioSpeakerMode.Stereo; // 设置为立体声2 声道)
// 应用新的音频配置 //// 应用新的音频配置
if (AudioSettings.Reset(config)) //if (AudioSettings.Reset(config))
{ //{
Debug.Log("Audio settings updated successfully."); // Debug.Log("Audio settings updated successfully.");
Debug.Log("Sample Rate: " + config.sampleRate + "Hz"); // Debug.Log("Sample Rate: " + config.sampleRate + "Hz");
Debug.Log("Speaker Mode: " + config.speakerMode); // Debug.Log("Speaker Mode: " + config.speakerMode);
} //}
else //else
{ //{
Debug.LogError("Failed to update audio settings."); // Debug.LogError("Failed to update audio settings.");
} //}
} }
private void OnEnable()
{
App.audioMgr.RegisterStream(nameof(UStoicGoose), AudioSettings.outputSampleRate, this);
}
void OnDisable()
{
App.audioMgr.ClearAudioData(nameof(UStoicGoose));
}
private Queue<float> sampleQueue = new Queue<float>(); private Queue<float> sampleQueue = new Queue<float>();
// Unity 音频线程回调 public unsafe void PullAudio(float[] data, int channels)
void OnAudioFilterRead(float[] data, int channels)
{ {
fixed (float* pData = data)
{
float* outputPtr = pData; // 指向数组起始位置的指针
int dataLength = data.Length;
for (int i = 0; i < dataLength; i++)
{
float rawData;
if (_buffer.TryRead(out rawData))
*outputPtr = rawData;
else
*outputPtr = 0; // 无数据时静音
outputPtr++; // 指针移动到下一个位置
}
}
/*
for (int i = 0; i < data.Length; i++) for (int i = 0; i < data.Length; i++)
{ {
if (_buffer.TryRead(out float rawData)) if (_buffer.TryRead(out float rawData))
data[i] = rawData; data[i] = rawData;
else else
data[i] = 0; // 无数据时静音 data[i] = 0; // 无数据时静音
} }*/
} }
//// Unity 音频线程回调
//void OnAudioFilterRead(float[] data, int channels)
//{
// for (int i = 0; i < data.Length; i++)
// {
// if (_buffer.TryRead(out float rawData))
// data[i] = rawData;
// else
// data[i] = 0; // 无数据时静音
// }
//}
public void Initialize() public void Initialize()

View File

@ -4,9 +4,15 @@ using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using UnityEngine; using UnityEngine;
using UnityEngine.Audio;
namespace AxibugEmuOnline.Client namespace AxibugEmuOnline.Client
{ {
public interface AxiAudioPull
{
public void PullAudio(float[] data, int channels);
}
public class AudioMgr : MonoBehaviour public class AudioMgr : MonoBehaviour
{ {
public enum E_SFXTYPE public enum E_SFXTYPE
@ -19,33 +25,48 @@ namespace AxibugEmuOnline.Client
system_ok system_ok
} }
public Dictionary<E_SFXTYPE, AudioClip> dictAudioClip = new Dictionary<E_SFXTYPE, AudioClip>(); void Awake()
private AudioSource mSource;
private void Awake()
{ {
mSource = this.gameObject.AddComponent<AudioSource>(); DontDestroyOnLoad(gameObject);
LoadAudioClip(); InitializeAudioSystem();
PlaySFX(E_SFXTYPE.Launch);
} }
/// <summary> #region
/// 手动设置AudioCfg 主要用于模拟器各核心采样率对齐 Dictionary<E_SFXTYPE, AudioClip> dictAudioClip = new Dictionary<E_SFXTYPE, AudioClip>();
/// </summary> void LoadAudioClip()
/// <param name="config"></param>
public void SetAudioConfig(AudioConfiguration config)
{ {
// 应用新的音频配置 dictAudioClip[E_SFXTYPE.Cancel] = Resources.Load<AudioClip>("Sound/XMBSFX/cancel");
if (AudioSettings.Reset(config)) dictAudioClip[E_SFXTYPE.Cursor] = Resources.Load<AudioClip>("Sound/XMBSFX/cursor");
dictAudioClip[E_SFXTYPE.Option] = Resources.Load<AudioClip>("Sound/XMBSFX/option");
dictAudioClip[E_SFXTYPE.Launch] = Resources.Load<AudioClip>("Sound/XMBSFX/StartPSP");
dictAudioClip[E_SFXTYPE.system_ng] = Resources.Load<AudioClip>("Sound/XMBSFX/system_ng");
dictAudioClip[E_SFXTYPE.system_ok] = Resources.Load<AudioClip>("Sound/XMBSFX/system_ok");
}
#endregion
[SerializeField] private AudioMixerGroup _staticGroup; // 静态音效UI等输出组
[Header("静态音效")]
[SerializeField] private AudioSource _staticAudioSource; // 用于播放静态音效的源
AudioStreamData _audioStreams;
private int _targetOutputSampleRate; // Unity音频系统的输出采样率
/// <summary>
/// 初始化音频系统
/// </summary>
private void InitializeAudioSystem()
{
AudioSettings.OnAudioConfigurationChanged += OnAudioConfigurationChanged;
_targetOutputSampleRate = AudioSettings.outputSampleRate;
if (_staticAudioSource == null)
{ {
Debug.Log("Audio settings updated successfully."); _staticAudioSource = this.gameObject.AddComponent<AudioSource>();
Debug.Log("Sample Rate: " + config.sampleRate + "Hz"); _staticAudioSource.outputAudioMixerGroup = _staticGroup;
Debug.Log("Speaker Mode: " + config.speakerMode);
}
else
{
Debug.LogError("Failed to update audio settings.");
} }
// 设置初始音量
SetStaticVolume(0.9f);
Debug.Log($"Audio System Initialized. Output Sample Rate: {_targetOutputSampleRate}Hz");
LoadAudioClip();
} }
/// <summary> /// <summary>
@ -57,28 +78,117 @@ namespace AxibugEmuOnline.Client
//函数仅处理设备变化的情况,非设备变化不再本函数处理,避免核心采样率变化和本处循环调用 //函数仅处理设备变化的情况,非设备变化不再本函数处理,避免核心采样率变化和本处循环调用
if (deviceWasChanged) if (deviceWasChanged)
{ {
AudioConfiguration config = AudioSettings.GetConfiguration(); ResetAudioCfg();
AudioSettings.Reset(config); //AudioConfiguration config = AudioSettings.GetConfiguration();
//AudioSettings.Reset(config);
//TODO 重新播放音效但是DSP不用若有UI BGM后续 这里加重播 //TODO 重新播放音效但是DSP不用若有UI BGM后续 这里加重播
} }
} }
public void LoadAudioClip() #region
{
dictAudioClip[E_SFXTYPE.Cancel] = Resources.Load<AudioClip>("Sound/XMBSFX/cancel");
dictAudioClip[E_SFXTYPE.Cursor] = Resources.Load<AudioClip>("Sound/XMBSFX/cursor");
dictAudioClip[E_SFXTYPE.Option] = Resources.Load<AudioClip>("Sound/XMBSFX/option");
dictAudioClip[E_SFXTYPE.Launch] = Resources.Load<AudioClip>("Sound/XMBSFX/StartPSP");
dictAudioClip[E_SFXTYPE.system_ng] = Resources.Load<AudioClip>("Sound/XMBSFX/system_ng");
dictAudioClip[E_SFXTYPE.system_ok] = Resources.Load<AudioClip>("Sound/XMBSFX/system_ok");
}
public void PlaySFX(E_SFXTYPE type, bool isLoop = false) public void PlaySFX(E_SFXTYPE type, bool isLoop = false)
{ {
mSource.clip = dictAudioClip[type]; PlayStaticSound(dictAudioClip[type], 1, 1);
mSource.loop = isLoop;
mSource.Play();
} }
/// <summary>
/// 播放静态音频剪辑UI音效等
/// </summary>
void PlayStaticSound(AudioClip clip, float volume = 1.0f, float pitch = 1.0f)
{
if (clip == null) return;
_staticAudioSource.pitch = Mathf.Clamp(pitch, 0.5f, 2.0f);
_staticAudioSource.PlayOneShot(clip, Mathf.Clamp01(volume));
}
/// <summary>
/// 设置静态音频音量线性0.0-1.0
/// </summary>
public void SetStaticVolume(float volumeLinear)
{
if (_staticGroup != null && _staticGroup.audioMixer != null)
{
float volumeDB = ConvertLinearToDecibel(Mathf.Clamp01(volumeLinear));
_staticGroup.audioMixer.SetFloat("StaticVolume", volumeDB);
}
}
#endregion
#region
/// <summary>
/// 注册一个动态音频流通道(模拟器)
/// </summary>
/// <param name="channelId">通道标识符 (e.g., "NES", "MAME")</param>
/// <param name="inputSampleRate">该通道的原始采样率</param>
public void RegisterStream(string channelId, int inputSampleRate, AxiAudioPull audioPullHandle)
{
_audioStreams = null;
_audioStreams = new AudioStreamData(channelId, inputSampleRate, audioPullHandle);
ResetAudioCfg();
}
private void ResetAudioCfg()
{
// 获取当前音频配置
AudioConfiguration config = AudioSettings.GetConfiguration();
// 设置目标音频配置
config.sampleRate = 48000; // 采样率为 44100Hz
config.numRealVoices = 32; // 设置最大音频源数量(可选)
config.numVirtualVoices = 512; // 设置虚拟音频源数量(可选)
config.dspBufferSize = 1024; // 设置 DSP 缓冲区大小(可选)
config.speakerMode = AudioSpeakerMode.Stereo; // 设置为立体声2 声道)
// 应用新的音频配置
if (AudioSettings.Reset(config))
{
Debug.Log("Audio settings updated successfully.");
Debug.Log("Sample Rate: " + config.sampleRate + "Hz");
Debug.Log("Speaker Mode: " + config.speakerMode);
}
else
{
Debug.LogError("Failed to update audio settings.");
}
_staticAudioSource.Play();//只为让DSP继续
}
/// <summary>
/// 清空指定通道的音频数据
/// </summary>
public void ClearAudioData(string channelId)
{
if (_audioStreams == null || _audioStreams.channelid != channelId)
return;
_audioStreams = null;
}
#endregion
#region Core Audio Processing (Called automatically by Unity)
/// <summary>
/// Unity音频线程回调在这里处理和混合所有动态音频流[1](@ref)
/// </summary>
void OnAudioFilterRead(float[] data, int channels)
{
if (_audioStreams == null) return;
_audioStreams.AxiAudioPullHandle.PullAudio(data, channels);
//TODO 如果要处理采样率差异
if (_audioStreams.NeedsResampling){ }
}
#endregion
#region Helper Methods
/// <summary>
/// 线性音量值转换为分贝值 (dB)[4](@ref)
/// </summary>
private float ConvertLinearToDecibel(float linear)
{
if (linear <= 0.0001f) return -80.0f; // 避免log10(0)
return Mathf.Log10(linear) * 20.0f;
}
#endregion
#region #region
@ -138,14 +248,6 @@ namespace AxibugEmuOnline.Client
ms.Write(dataChunk.GetBytes(), 0, (int)dataChunk.Length()); ms.Write(dataChunk.GetBytes(), 0, (int)dataChunk.Length());
AxiIO.File.WriteAllBytesFromStream(filename, ms); AxiIO.File.WriteAllBytesFromStream(filename, ms);
} }
//using (FileStream file = new FileStream(filename, FileMode.Create, FileAccess.Write, FileShare.ReadWrite))
//{
// file.Write(waveHeader.GetBytes(), 0, (int)waveHeader.Length());
// file.Write(formatChunk.GetBytes(), 0, (int)formatChunk.Length());
// file.Write(dataChunk.GetBytes(), 0, (int)dataChunk.Length());
//}
IsRecording = false; IsRecording = false;
OverlayManager.PopTip("录音结束"); OverlayManager.PopTip("录音结束");
} }
@ -159,22 +261,35 @@ namespace AxibugEmuOnline.Client
#endregion #endregion
} }
// 用于描述一个动态音频流的数据结构
public class AudioStreamData
{
public string channelid;
public int SourceSampleRate;
public bool NeedsResampling;
public AxiAudioPull AxiAudioPullHandle;
public AudioStreamData(string channelid, int SourceSampleRate, AxiAudioPull audiohandle)
{
this.channelid = channelid;
this.SourceSampleRate = SourceSampleRate;
this.AxiAudioPullHandle = audiohandle;
NeedsResampling = SourceSampleRate != AudioSettings.outputSampleRate;
AudioSettings.GetDSPBufferSize(out int bufferLength, out int numBuffers);
}
}
class WaveHeader class WaveHeader
{ {
const string fileTypeId = "RIFF"; const string fileTypeId = "RIFF";
const string mediaTypeId = "WAVE"; const string mediaTypeId = "WAVE";
public string FileTypeId { get; private set; } public string FileTypeId { get; private set; }
public uint FileLength { get; set; } public uint FileLength { get; set; }
public string MediaTypeId { get; private set; } public string MediaTypeId { get; private set; }
public WaveHeader() public WaveHeader()
{ {
FileTypeId = fileTypeId; FileTypeId = fileTypeId;
MediaTypeId = mediaTypeId; MediaTypeId = mediaTypeId;
FileLength = 4; /* Minimum size is always 4 bytes */ FileLength = 4; /* Minimum size is always 4 bytes */
} }
public byte[] GetBytes() public byte[] GetBytes()
{ {
List<byte> chunkData = new List<byte>(); List<byte> chunkData = new List<byte>();
@ -185,7 +300,6 @@ namespace AxibugEmuOnline.Client
return chunkData.ToArray(); return chunkData.ToArray();
} }
public uint Length() public uint Length()
{ {
return (uint)GetBytes().Length; return (uint)GetBytes().Length;
@ -195,35 +309,28 @@ namespace AxibugEmuOnline.Client
class FormatChunk class FormatChunk
{ {
const string chunkId = "fmt "; const string chunkId = "fmt ";
ushort bitsPerSample, channels; ushort bitsPerSample, channels;
uint frequency; uint frequency;
public string ChunkId { get; private set; } public string ChunkId { get; private set; }
public uint ChunkSize { get; private set; } public uint ChunkSize { get; private set; }
public ushort FormatTag { get; private set; } public ushort FormatTag { get; private set; }
public ushort Channels public ushort Channels
{ {
get { return channels; } get { return channels; }
set { channels = value; RecalcBlockSizes(); } set { channels = value; RecalcBlockSizes(); }
} }
public uint Frequency public uint Frequency
{ {
get { return frequency; } get { return frequency; }
set { frequency = value; RecalcBlockSizes(); } set { frequency = value; RecalcBlockSizes(); }
} }
public uint AverageBytesPerSec { get; private set; } public uint AverageBytesPerSec { get; private set; }
public ushort BlockAlign { get; private set; } public ushort BlockAlign { get; private set; }
public ushort BitsPerSample public ushort BitsPerSample
{ {
get { return bitsPerSample; } get { return bitsPerSample; }
set { bitsPerSample = value; RecalcBlockSizes(); } set { bitsPerSample = value; RecalcBlockSizes(); }
} }
public FormatChunk() public FormatChunk()
{ {
ChunkId = chunkId; ChunkId = chunkId;
@ -234,20 +341,17 @@ namespace AxibugEmuOnline.Client
BitsPerSample = 16; /* Default to 16bits */ BitsPerSample = 16; /* Default to 16bits */
RecalcBlockSizes(); RecalcBlockSizes();
} }
public FormatChunk(int frequency, int channels) : this() public FormatChunk(int frequency, int channels) : this()
{ {
Channels = (ushort)channels; Channels = (ushort)channels;
Frequency = (ushort)frequency; Frequency = (ushort)frequency;
RecalcBlockSizes(); RecalcBlockSizes();
} }
private void RecalcBlockSizes() private void RecalcBlockSizes()
{ {
BlockAlign = (ushort)(channels * (bitsPerSample / 8)); BlockAlign = (ushort)(channels * (bitsPerSample / 8));
AverageBytesPerSec = frequency * BlockAlign; AverageBytesPerSec = frequency * BlockAlign;
} }
public byte[] GetBytes() public byte[] GetBytes()
{ {
List<byte> chunkBytes = new List<byte>(); List<byte> chunkBytes = new List<byte>();
@ -263,7 +367,6 @@ namespace AxibugEmuOnline.Client
return chunkBytes.ToArray(); return chunkBytes.ToArray();
} }
public uint Length() public uint Length()
{ {
return (uint)GetBytes().Length; return (uint)GetBytes().Length;