Wasapi adapter

Rewriting a lot
This commit is contained in:
Julia 2024-10-25 13:49:43 -05:00
parent 4864dbc767
commit 49b7a9f7ed
32 changed files with 903 additions and 515 deletions

View file

@ -63,6 +63,10 @@
"fumocommon.h": "c",
"terminal.h": "c",
"ios": "cpp"
},
"workbench.colorCustomizations": {
"minimap.background": "#00000000",
"scrollbar.shadow": "#00000000"
}
}
}

View file

@ -3,13 +3,26 @@ const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const i_audio = b.addModule("audio_interface", .{
.root_source_file = b.path("libs/audio_interface.zig"),
});
const i_minimal_input = b.addModule("minimal_input_interface", .{
.root_source_file = b.path("libs/minimal_input_interface.zig"),
});
const exe = b.addExecutable(.{
.name = "Fumofumotris",
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});
exe.root_module.addImport("audio_interface", i_audio);
exe.root_module.addImport("minimal_input_interface", i_minimal_input);
exe.linkLibC();
exe.linkSystemLibrary("ole32");
b.installArtifact(exe);
}

12
libs/audio_interface.zig Normal file
View file

@ -0,0 +1,12 @@
const std = @import("std");
pub const IAdapter = struct {
getOutputCount: *const fn (ptr: *anyopaque) usize,
getOutputName: *const fn (ptr: *anyopaque, index: usize) []u8,
setOutput: *const fn (output: usize) void,
};
pub const IUser = struct {
setFormat: *const fn (ptr: *anyopaque, channels: u32, sample_rate: u32) void,
render: *const fn (ptr: *anyopaque, buf: []f32) void,
};

View file

@ -0,0 +1,22 @@
const std = @import("std");
pub const Joystick = struct {
x: i32,
y: i32,
};
pub const Keyboard = struct {
state: u128 = 0,
last_pressed: [128]u32 = std.mem.zeroes(),
last_released: [128]u32 = std.mem.zeroes(),
};
pub const Mouse = struct {
pos: Joystick,
wheel: Joystick,
};
pub const Client = struct {
keyboard: Keyboard,
mouse: Mouse,
};

View file

@ -0,0 +1,16 @@
const Controller = struct {
keyboard: u128,
mouse_x: i16,
mouse_y: i16,
wheel_v: i8,
wheel_h: i8,
pub inline fn keyboardSet(this: *Controller, code: u8, value: u1) void {
this.keyboard &= ~(1 << code);
this.keyboard |= value << code;
}
pub inline fn keyboardGet(this: *Controller, code: u8) bool {
return (this.keyboard >> code) & 1 == 1;
}
};

65
src/audio.zig Normal file
View file

@ -0,0 +1,65 @@
const std = @import("std");
const AtomicOrder = std.builtin.AtomicOrder;
const audio_interface = @import("audio_interface");
const platform = @import("platform.zig");
pub const Engine = struct {
const Error = error{PlatformInitializationFailed};
const adapter_user_vt: audio_interface.IUser = .{
.setFormat = &setFormat,
.render = &render,
};
adapter: platform.audio.Adapter = undefined,
channels: u32 = 0,
sample_rate: u32 = 0,
theta: f32 = 0,
pub fn init(engine: *Engine) Error!void {
engine.* = .{};
if (!engine.adapter.init(&adapter_user_vt, engine))
return Error.PlatformInitializationFailed;
}
fn setFormat(ptr: *anyopaque, channels: u32, sample_rate: u32) void {
const engine: *Engine = @alignCast(@ptrCast(ptr));
@atomicStore(u32, &engine.channels, channels, AtomicOrder.unordered);
@atomicStore(u32, &engine.sample_rate, sample_rate, AtomicOrder.unordered);
}
fn render(ptr: *anyopaque, buf: []f32) void {
const engine: *Engine = @alignCast(@ptrCast(ptr));
var i: usize = 0;
while (i < buf.len) : (i += 2) {
buf[i] = testSin(engine.theta) * 0.25;
buf[i + 1] = testSin(engine.theta) * 0.25;
engine.theta += (1.0 / @as(f32, @floatFromInt(engine.sample_rate))) * 440;
}
}
};
pub const PluginMetadata = struct {
name: []u8,
author: []u8,
version: u64,
};
pub const PluginVTable = struct {
init: *const fn (engine: *Engine) anyerror!void,
deinit: *const fn () void,
render: *const fn (buf_in: []f32, buf_out: []f32) void,
};
fn testSin(x: f32) f32 {
const sign: f32 = if (x - @floor(x) > 0.5) -1 else 1;
const mod: f32 = (2 * x - @floor(2 * x)) / 2 - 0.25;
const abs: f32 = 1.25 * (1 / (mod * mod + 0.25)) - 4;
return abs * sign;
}

View file

@ -1,81 +0,0 @@
const Allocator = @import("std").mem.Allocator;
pub const DictionaryError = error{Full};
pub fn Dictionary(comptime K: type, comptime V: type) type {
return struct {
pub const Bucket = struct {
key: K = 0,
value: V = undefined,
};
buckets: []Bucket,
filled: usize = 0,
pub fn init(alloc: Allocator, n: usize) !@This() {
const dict = @This(){
.buckets = try alloc.alloc(Bucket, n),
};
@memset(dict.buckets, Bucket{});
return dict;
}
pub fn find(dict: @This(), key: K) ?*V {
const index: usize = key % dict.buckets.len;
const probe: ?*Bucket = dict.linearProbe(index, key);
if (probe) |bucket| {
return &bucket.value;
} else {
return null;
}
}
pub fn set(hashMap: *@This(), key: K, value: V) !void {
const index: usize = key % hashMap.buckets.len;
const probe: ?*Bucket = hashMap.linProbeEmpty(index, key);
if (probe) |bucket| {
bucket.* = Bucket{ .key = key, .value = value };
hashMap.filled += 1;
} else {
return DictionaryError.Full;
}
}
fn linearProbe(hashMap: @This(), start: usize, hash: usize) ?*Bucket {
var i: usize = 0;
var index: usize = start;
while (i < hashMap.buckets.len) : ({
i += 1;
index = (start + i) % hashMap.buckets.len;
}) {
if (hashMap.buckets[index].key == hash)
return &hashMap.buckets[index];
}
return null;
}
fn linProbeEmpty(hashMap: @This(), start: usize, hash: usize) ?*Bucket {
var i: usize = 0;
var index: usize = start;
while (i < hashMap.buckets.len) : ({
i += 1;
index = (start + i) % hashMap.buckets.len;
}) {
const cur: usize = hashMap.buckets[index].key;
if (cur == 0 or cur == hash)
return &hashMap.buckets[index];
}
return null;
}
};
}

Binary file not shown.

View file

@ -1,22 +0,0 @@
#include <windows.h>
#include <mmdeviceapi.h>
#include <audioclient.h>
#include <iostream>
using namespace std;
int main()
{
GUID sex = __uuidof(IAudioClient3);
cout << ".{ ";
cout << ".Data1 = 0x" << hex << sex.Data1 << ", ";
cout << ".Data2 = 0x" << hex << sex.Data2 << ", ";
cout << ".Data3 = 0x" << hex << sex.Data3 << ", ";
cout << ".Data4 = .{ ";
for (int i = 0; i < 8; i++)
cout << "0x" << hex << (int)sex.Data4[i] << ", ";
cout << "}}";
return 0;
}

View file

@ -1,9 +1,20 @@
const std = @import("std");
const Allocator = @import("std").mem.Allocator;
const time = @import("time.zig");
const platform = @import("platform.zig");
pub const Manager = struct {
adapter: platform.input.Adapter = undefined,
keyboard: Keyboard,
mouse: Mouse,
pub fn init(manager: *Manager) !void {
manager.adapter.init();
}
pub fn update(manager: *Manager) void {}
};
pub const EventBuffer = struct {
pub const buf_size = std.atomic.cache_line / 5;
@ -17,17 +28,12 @@ pub const Joystick = struct {
};
pub const Keyboard = struct {
state: u128,
last_pressed: [128]time.mysec32,
last_released: [128]time.mysec32,
state: u128 = 0,
last_pressed: [128]u32 = std.mem.zeroes(),
last_released: [128]u32 = std.mem.zeroes(),
};
pub const Mouse = struct {
pos: Joystick,
wheel: Joystick,
};
pub const Manager = struct {
keyboard: Keyboard,
mouse: Mouse,
};

View file

@ -1,5 +1,18 @@
const os = @import("os.zig");
const std = @import("std");
const audio = @import("audio.zig");
// const input = @import("input.zig");
pub fn main() !void {
var platform: os.Platform = try os.Platform.init();
var audio_engine: audio.Engine = undefined;
try audio_engine.init();
// var input_manager: input.Manager = undefined;
// try input_manager.init();
// while (true) {
// input_manager.update();
// }
audio_engine.adapter.deinit();
}

View file

@ -1,30 +0,0 @@
const std = @import("std");
fn fastSin(x: f32) f32 {
const sign: f32 = @as(f32, @floatFromInt(@intFromBool(x < 0.5))) * 2 - 1;
const restricted: f32 = @mod(x, 0.5) - 0.25;
const absolute: f32 = 1.25 * (1 / (restricted * restricted + 0.25)) - 4;
return absolute * sign;
}
const AudioOutput = struct {
sample_rate: f32 = 48000,
buf: [256]f32 = undefined,
sinTheta: f32 = 0,
fn generate(this: *AudioOutput) !void {
var i: usize = 0;
while (i < 128) : (i += 1) {
const sample = fastSin(this.sinTheta);
this.sinTheta += 440 / this.sample_rate;
this.buf[i * 2] = sample;
this.buf[i * 2 + 1] = sample;
}
}
};
fn main() !void {}

View file

@ -1,5 +0,0 @@
pub usingnamespace switch (@import("builtin").os.tag) {
.windows => @import("windows.zig"),
.linux => @import("linux.zig"),
else => @compileError("Unsupported operating system"),
};

11
src/platform.zig Normal file
View file

@ -0,0 +1,11 @@
const tag = @import("builtin").os.tag;
const os = switch (tag) {
.windows => @import("platform/windows.zig"),
.linux => @import("platform/linux.zig"),
else => {},
};
pub const audio = os.audio;
pub const input = os.input;
pub const display = os.display;

2
src/platform/windows.zig Normal file
View file

@ -0,0 +1,2 @@
pub const audio = @import("windows/wasapi.zig");
pub const input = @import("windows/wincon.zig");

View file

@ -0,0 +1,105 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const win = @import("win_common.zig");
const api = win.api;
pub const Notifier = extern struct {
const c_alloc: Allocator = std.heap.c_allocator;
const vt: api.IMMNotificationClientVtbl = .{
.QueryInterface = &QueryInterface,
.AddRef = &AddRef,
.Release = &Release,
.OnDeviceStateChanged = &OnDeviceStateChanged,
.OnDeviceAdded = &OnDeviceAdded,
.OnDeviceRemoved = &OnDeviceRemoved,
.OnDefaultDeviceChanged = &OnDefaultDeviceChanged,
.OnPropertyValueChanged = &OnPropertyValueChanged,
};
client: api.IMMNotificationClient,
ref_count: u32,
pub fn init() !*Notifier {
var notif: *Notifier = try c_alloc.create(api.IMMNotificationClient);
errdefer c_alloc.free(notif);
notif.client.lpVtbl = try c_alloc.create(api.IMMNotificationClientVtbl);
notif.client.lpVtbl.* = vt;
notif.ref_count = 0;
return notif;
}
fn QueryInterface(
client: *api.IMMNotificationClient,
iid: *api.IID,
object_out: **anyopaque,
) callconv(.C) api.HRESULT {
if (api.IsEqualIID(iid, &api.IID_IUnknown) || api.IsEqualIID(iid, &api.IID_IMMNotificationClient)) {
_ = client.AddRef();
object_out.* = client;
return api.S_OK;
} else {
object_out.* = null;
return api.E_NOINTERFACE;
}
}
fn AddRef(client: *api.IMMNotificationClient) callconv(.C) u32 {
const notif: *Notifier = @alignCast(@ptrCast(client));
return api.InterlockedIncrement(&notif.ref_count);
}
fn Release(client: *api.IMMNotificationClient) callconv(.C) u32 {
const notif: *Notifier = @alignCast(@ptrCast(client));
const dec: u32 = api.InterlockedDecrement(&notif.ref_count);
if (dec == 0)
c_alloc.free(notif);
return dec;
}
fn OnDeviceStateChanged(_: *api.IMMNotificationClient, _: ?[*:0]u16, _: u32) callconv(.C) api.HRESULT {
return api.S_OK;
}
fn OnDeviceAdded(_: *api.IMMNotificationClient, _: ?[*:0]u16) callconv(.C) api.HRESULT {
return api.S_OK;
}
fn OnDeviceRemoved(_: *api.IMMNotificationClient, _: ?[*:0]u16) callconv(.C) api.HRESULT {
return api.S_OK;
}
fn OnDefaultDeviceChanged(
client: *api.IMMNotificationClient,
flow: api.EDataFlow,
role: api.ERole,
id: ?[*:0]u16,
) callconv(.C) api.HRESULT {
if ((flow != api.eRender) || (role != api.eConsole))
return api.S_OK;
const notif: *Notifier = @alignCast(@ptrCast(client));
@atomicStore
return api.S_OK;
}
fn OnPropertyValueChanged(
client: *api.IMMNotificationClient,
id: ?[*:0]u16,
key: api.PROPERTYKEY,
) callconv(.C) api.HRESULT {
if (api.IsEqualPropertyKey(key, api.PKEY_AudioEngine_DeviceFormat)) {
}
return api.S_OK;
}
};

View file

@ -0,0 +1,100 @@
const std = @import("std");
const audio_interface = @import("audio_interface");
const Notifier = @import("notification_client.zig").Notifier;
const win = @import("win_common.zig");
const api = win.api;
pub const Adapter = struct {
const Flags = packed struct(u8) {
on: bool = false,
failed: bool = false,
use_default_output: bool = true,
signal_off: bool = false,
signal_output_changed: bool = false,
signal_format_changed: bool = false,
};
user_vt: *const audio_interface.IUser,
user_ptr: *anyopaque,
thread: api.HANDLE = undefined,
output_id: [*:0]u16 = undefined,
output_index: u32 = undefined,
flags: Flags = .{},
pub fn init(
adapter: *Adapter,
user_vt: *const audio_interface.IUser,
user_ptr: *anyopaque,
) bool {
adapter.* = Adapter{
.user_vt = user_vt,
.user_ptr = user_ptr,
.thread = api.CreateThread(null, 4096, &threadEntry, adapter, 0, null) orelse
return false,
};
return true;
}
pub fn deinit(adapter: *Adapter) void {
@atomicStore(bool, &adapter.flags.signal_off, true, .seq_cst);
_ = api.WaitForSingleObject(adapter.thread, api.INFINITE);
}
pub fn setOutputIndex(adapter: *Adapter, index: u32) void {
@atomicStore(u32, &adapter.output_index, index, .seq_cst);
@atomicStore(bool, &adapter.flags.signal_output_changed, true, .seq_cst);
}
pub fn setUseDefaultOutput(adapter: *Adapter, use_default_output: bool) void {
@atomicStore(bool, &adapter.flags.use_default_output, use_default_output, .seq_cst);
@atomicStore(bool, &adapter.flags.signal_output_changed, true, .seq_cst);
}
fn threadEntry(ptr: ?*anyopaque) callconv(.C) api.DWORD {
const adapter: *Adapter = @alignCast(@ptrCast(ptr));
@atomicStore(bool, &adapter.flags.on, true, .release);
var hr: api.HRESULT = api.S_OK;
thread(adapter, &hr) catch {};
if (api.FAILED(hr)) {
@atomicStore(bool, &adapter.flags.failed, true, .release);
}
@atomicStore(bool, &adapter.flags.on, false, .release);
return @bitCast(hr);
}
fn thread(adapter: *Adapter, hr: *api.HRESULT) !void {
const client: facade.Client = try facade.Client.init();
// var output: Output = try Output.init(
// hr,
// &facade,
// @atomicLoad(bool, &adapter.use_default_out, .acquire),
// @atomicLoad(u32, &adapter.out_index, .acquire),
// );
_ = hr;
while (@atomicLoad(Request, &adapter.request, .acquire) != Request.turn_off) {}
}
};
const facade = struct {
pub const Client = struct {
hr: api.HRESULT,
device_enumerator: *api.IMMDeviceEnumerator,
notif_client: *Notifier,
buf_event: *anyopaque,
};
pub const Output = struct {
audio_device: *api.IMMDevice,
audio_client: *api.IAudioClient3,
render_client: *api.IAudioRenderClient,
};
};

View file

@ -0,0 +1,388 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const AtomicOrder = std.builtin.AtomicOrder;
const audio_interface = @import("audio_interface");
const win = @import("win_common.zig");
const api = win.api;
const Error = error{
CreateEvent,
CoInitialize,
CreateDeviceEnumerator,
GetDefaultAudioEndpoint,
RegisterEndpointNotificationCallback,
EnumAudioEndpoints,
DeviceCollectionGetCount,
InvalidOutputDevice,
DeviceCollectionItem,
AudioDeviceGetId,
ActivateAudioClient,
GetMixFormat,
GetSharedModeEnginePeriod,
InitializeSharedAudioStream,
};
pub const Adapter = struct {
const Status = enum(u8) { off, on, failed };
user_vt: *const audio_interface.IUser,
user_ptr: *anyopaque,
thread: api.HANDLE = undefined,
status: Status = .off,
output_id: [*]u16 = undefined,
output_index: u32 = undefined,
use_default_output: bool = true,
pub fn init(adapter: *Adapter, user_vt: *const audio_interface.IUser, user_ptr: *anyopaque) bool {
adapter.* = .{
.user_vt = user_vt,
.user_ptr = user_ptr,
.thread = api.CreateThread(null, 4096, &threadEntry, adapter, 0, null) orelse
return false,
};
return true;
}
pub fn deinit(adapter: *Adapter) void {
@atomicStore(bool, &adapter.is_active, false, AtomicOrder.unordered);
_ = api.WaitForSingleObject(adapter.thread, 0);
}
fn threadEntry(ptr: ?*anyopaque) callconv(.C) api.DWORD {
const adapter: *Adapter = @alignCast(@ptrCast(ptr));
@atomicStore(Status, &adapter.status, .on, AtomicOrder.unordered);
const hr: api.HRESULT = thread(adapter);
if (api.FAILED(hr)) {
@atomicStore(Status, &adapter.status, .failed, AtomicOrder.unordered);
return @bitCast(hr);
}
return 0;
}
fn thread(adapter: *Adapter, hr: *api.HRESULT) !void {
hr = api.CoInitialize(null);
defer api.CoUninitialize();
if (api.FAILED(hr))
return Error.CoInitialize;
const client: WasapiClient = try WasapiClient.init(hr);
const output: WasapiOutput = try WasapiOutput.init(hr, &client, adapter);
// get id of audio device
hr = audio_device.lpVtbl.*.GetId.?(audio_device, &id);
if (api.FAILED(hr)) return Error.AudioDeviceGetId;
errdefer api.CoTaskMemFree(id);
// get audio client
hr = audio_device.lpVtbl.*.Activate.?(audio_device, @ptrCast(&api.IID_IAudioClient3), api.CLSCTX_ALL, null, @ptrCast(&audio_client));
if (api.FAILED(hr)) return Error.ActivateAudioClient;
errdefer _ = audio_client.lpVtbl.*.Release(audio_client);
// negotiate format
var negotiated_fmt: api.WAVEFORMATEX = undefined;
{
var device_fmt: *api.WAVEFORMATEX = undefined;
hr = audio_client.lpVtbl.*.GetMixFormat.?(audio_client, @ptrCast(&device_fmt));
if (api.FAILED(hr)) return Error.GetMixFormat;
negotiated_fmt = .{
.wFormatTag = api.WAVE_FORMAT_IEEE_FLOAT,
.nChannels = device_fmt.nChannels,
.nSamplesPerSec = device_fmt.nSamplesPerSec,
.nAvgBytesPerSec = device_fmt.nSamplesPerSec * device_fmt.nChannels * @sizeOf(f32),
.nBlockAlign = device_fmt.nChannels * @sizeOf(f32),
.wBitsPerSample = @bitSizeOf(f32),
.cbSize = 0,
};
api.CoTaskMemFree(device_fmt);
}
// get default period (the others aren't used)
var default_period: u32 = undefined;
var fundamental_period: u32 = undefined;
var min_period: u32 = undefined;
var max_period: u32 = undefined;
hr = audio_client.lpVtbl.*.GetSharedModeEnginePeriod.?(audio_client, &negotiated_fmt, &default_period, &fundamental_period, &min_period, &max_period);
if (api.FAILED(hr)) return Error.GetSharedModeEnginePeriod;
// FINALLY we initialize the audio stream
hr = audio_client.lpVtbl.*.InitializeSharedAudioStream.?(audio_client, api.AUDCLNT_STREAMFLAGS_EVENTCALLBACK, default_period, &negotiated_fmt, null);
if (api.FAILED(hr)) return Error.InitializeSharedAudioStream;
hr = audio_client.lpVtbl.*.SetEventHandle.?(audio_client, buf_event);
if (api.FAILED(hr)) return hr;
// Get render client
hr = audio_client.lpVtbl.*.GetService.?(audio_client, @ptrCast(&api.IID_IAudioRenderClient), @ptrCast(&render_client));
if (api.FAILED(hr)) return hr;
// Start audio client
hr = audio_client.lpVtbl.*.Start.?(audio_client);
if (api.FAILED(hr)) return hr;
var buf_frames: u32 = undefined;
hr = audio_client.lpVtbl.*.GetBufferSize.?(audio_client, &buf_frames);
if (api.FAILED(hr)) return hr;
adapter.channels = negotiated_fmt.nChannels;
adapter.sample_rate = negotiated_fmt.nSamplesPerSec;
while (true) : ({
if (@atomicLoad(bool, &adapter.is_active, AtomicOrder.unordered) == false)
break;
}) {
var pad_frames: u32 = undefined;
var buf: [*]f32 = undefined;
if (api.WaitForSingleObject(buf_event, api.INFINITE) != api.STATUS_WAIT_0)
return win.getLastHresult();
hr = audio_client.lpVtbl.*.GetCurrentPadding.?(audio_client, &pad_frames);
if (api.FAILED(hr)) return hr;
const avail_frames: u32 = buf_frames - pad_frames;
if (avail_frames == 0)
continue;
hr = render_client.lpVtbl.*.GetBuffer.?(render_client, avail_frames, @ptrCast(&buf));
if (api.FAILED(hr)) return hr;
adapter.vt.render(adapter.ptr, buf[0 .. avail_frames * negotiated_fmt.nChannels]);
hr = render_client.lpVtbl.*.ReleaseBuffer.?(render_client, avail_frames, 0);
if (api.FAILED(hr)) return hr;
}
_ = device_enumerator.lpVtbl.*.Release.?(device_enumerator);
_ = audio_client.lpVtbl.*.Release.?(audio_client);
_ = render_client.lpVtbl.*.Release.?(render_client);
_ = api.CoUninitialize();
return api.S_OK;
}
};
const WasapiClient = struct {
device_enumerator: *api.IMMDeviceEnumerator,
notif_client: *NotifClient,
buf_event: *anyopaque,
pub fn init(hr: *api.HRESULT) Error!WasapiClient {
var client: WasapiClient = undefined;
hr = api.CoCreateInstance(
&api.CLSID_MMDeviceEnumerator,
null,
api.CLSCTX_ALL,
&api.IID_IMMDeviceEnumerator,
&client.device_enumerator,
);
if (api.FAILED(hr))
return Error.CreateDeviceEnumerator;
client.notif_client = try NotifClient.init();
hr = client.device_enumerator.lpVtbl.*.RegisterEndpointNotificationCallback(
client.device_enumerator,
@ptrCast(client.notif_client),
);
if (api.FAILED(hr))
return Error.RegisterEndpointNotificationCallback;
client.buf_event = api.CreateEventW(null, 0, 0, null) orelse
return Error.CreateEvent;
return client;
}
};
const WasapiOutput = struct {
audio_device: *api.IMMDevice,
id: [*:0]u16,
audio_client: *api.IAudioClient3,
render_client: *api.IAudioRenderClient,
pub fn init(hr: *api.HRESULT, client: *WasapiClient, adapter: *Adapter) Error!WasapiOutput {
var output: WasapiOutput = undefined;
var use_default_output = @atomicLoad(bool, &adapter.use_default_output, AtomicOrder.acquire);
if (!use_default_output) {
const index = @atomicLoad(u32, &adapter.output_index, AtomicOrder.acquire);
output.audio_device = getNumberedAudioDevice(hr, index) catch |err| switch (err) {};
}
if (use_default_output) {}
}
fn getNumberedAudioDevice(hr: *api.HRESULT, index: u32) Error!*api.IMMDevice {
var device_collection: api.IMMDeviceCollection = undefined;
hr = client.device_enumerator.lpVtbl.*.EnumAudioEndpoints.?(
client.device_enumerator,
api.eRender,
api.DEVICE_STATE_ACTIVE,
&device_collection,
);
if (api.FAILED(hr))
return Error.EnumAudioEndpoints;
var device_count: u32 = undefined;
hr = device_collection.lpVtbl.*.GetCount.?(
device_collection,
&device_count,
);
if (api.FAILED(hr))
return Error.DeviceCollectionGetCount;
if (index >= device_count)
return Error.InvalidOutputDevice;
hr = device_collection.lpVtbl.*.Item.?(
device_collection,
index,
@ptrCast(&output.audio_client),
);
if (api.FAILED(hr))
return Error.DeviceCollectionItem;
}
fn getDefaultAudioDevice(
hr: *api.HRESULT,
device_enumerator: *api.IMMDeviceEnumerator,
) Error!*api.IMMDevice {
var audio_device: *api.IMMDevice = undefined;
hr = device_enumerator.lpVtbl.*.GetDefaultAudioEndpoint.?(
device_enumerator,
api.eRender,
api.eConsole,
@ptrCast(&audio_device),
);
if (api.FAILED(hr))
return Error.GetDefaultAudioEndpoint;
return audio_device;
}
};
const NotifClient = extern struct {
const c_alloc: Allocator = std.heap.c_allocator;
const imm_notif_client_vt: api.IMMNotificationClientVtbl = .{
.QueryInterface = &QueryInterface,
.AddRef = &AddRef,
.Release = &Release,
.OnDeviceStateChanged = &OnDeviceStateChanged,
.OnDeviceAdded = &OnDeviceAdded,
.OnDeviceRemoved = &OnDeviceRemoved,
.OnDefaultDeviceChanged = &OnDefaultDeviceChanged,
.OnPropertyValueChanged = &OnPropertyValueChanged,
};
imm_client: api.IMMNotificationClient = undefined,
ref_count: u32 = 0,
pub fn init() !*NotifClient {
var notif: *NotifClient = try c_alloc.create(api.IMMNotificationClient);
errdefer c_alloc.free(notif);
notif.* = NotifClient{};
notif.imm_client.lpVtbl = try c_alloc.create(api.IMMNotificationClientVtbl);
notif.imm_client.lpVtbl.* = imm_notif_client_vt;
return notif;
}
fn QueryInterface(
ptr: *api.IMMNotificationClient,
iid: *api.IID,
obj: **anyopaque,
) callconv(.C) api.HRESULT {
if (api.IsEqualIID(iid, &api.IID_IUnknown) ||
api.IsEqualIID(iid, &api.IID_IMMNotificationClient))
{
_ = ptr.AddRef();
obj.* = ptr;
return api.S_OK;
} else {
obj.* = null;
return api.E_NOINTERFACE;
}
}
fn AddRef(ptr: *api.IMMNotificationClient) callconv(.C) u32 {
const notif: *NotifClient = @alignCast(@ptrCast(ptr));
return api.InterlockedIncrement(&notif.ref_count);
}
fn Release(ptr: *api.IMMNotificationClient) callconv(.C) u32 {
const notif: *NotifClient = @alignCast(@ptrCast(ptr));
const dec: u32 = api.InterlockedDecrement(&notif.ref_count);
if (dec == 0)
c_alloc.free(notif);
return dec;
}
fn OnDeviceStateChanged(
ptr: *api.IMMNotificationClient,
id: [*]u16,
state: u32,
) callconv(.C) api.HRESULT {
// recheck sample rate and channels
_ = ptr;
_ = id;
_ = state;
return api.S_OK;
}
fn OnDeviceAdded(
_: *api.IMMNotificationClient,
_: [*]u16,
) callconv(.C) api.HRESULT {
return api.S_OK;
}
fn OnDeviceRemoved(
_: *api.IMMNotificationClient,
_: [*]u16,
) callconv(.C) api.HRESULT {
return api.S_OK;
}
fn OnDefaultDeviceChanged(
client: *api.IMMNotificationClient,
flow: api.EDataFlow,
role: api.ERole,
id: [*]u16,
) callconv(.C) api.HRESULT {
_ = client;
_ = flow;
_ = role;
_ = id;
return 0;
}
fn OnPropertyValueChanged(
client: *api.IMMNotificationClient,
id: [*]u16,
key: api.PROPERTYKEY,
) callconv(.C) api.HRESULT {
_ = client;
_ = id;
_ = key;
return 0;
}
};

View file

@ -0,0 +1,43 @@
const std = @import("std");
const W = std.unicode.utf8ToUtf16LeStringLiteral;
pub const api = @cImport({
@cInclude("initguid.h");
@cInclude("windows.h");
@cInclude("mmdeviceapi.h");
@cInclude("audioclient.h");
@cInclude("strsafe.h");
});
pub fn panic(err: api.DWORD) void {
var buf: [256]u16 = undefined;
var end: [*]u16 = undefined;
var remaining: usize = undefined;
_ = api.StringCchPrintfExW(
&buf,
buf.len,
@ptrCast(&end),
&remaining,
0,
W("Windows error 0x%x: "),
err,
);
_ = api.FormatMessageW(
api.FORMAT_MESSAGE_FROM_SYSTEM | api.FORMAT_MESSAGE_IGNORE_INSERTS,
null,
err,
api.MAKELANGID(api.LANG_NEUTRAL, api.SUBLANG_DEFAULT),
end,
@intCast(remaining),
null,
);
api.FatalAppExitW(0, &buf);
}
pub inline fn getLastHresult() api.HRESULT {
return @bitCast(api.GetLastError() | (api.FACILITY_WIN32 << 16) | 0x80000000);
}

View file

@ -0,0 +1,88 @@
// const std = @import("std");
// const win = @import("win_common.zig");
// const api = win.api;
// pub const Joystick = struct {
// x: i32,
// y: i32,
// };
// pub const Keyboard = struct {
// state: u128 = 0,
// last_pressed: [128]u32 = std.mem.zeroes(),
// last_released: [128]u32 = std.mem.zeroes(),
// };
// pub const Mouse = struct {
// pos: Joystick,
// wheel: Joystick,
// };
// pub const Adapter = struct {
// stdin_handle: *anyopaque,
// pub fn init(adapter: *Adapter) bool {
// const console_mode_flags: win.DWORD =
// win.ENABLE_EXTENDED_FLAGS |
// win.ENABLE_PROCESSED_INPUT |
// win.ENABLE_PROCESSED_OUTPUT |
// win.ENABLE_MOUSE_INPUT |
// win.ENABLE_WINDOW_INPUT;
// adapter.stdin_handle = api.GetStdHandle(api.STD_INPUT_HANDLE) orelse
// return false;
// if (api.SetConsoleMode(adapter.stdin_handle, console_mode_flags) == 0)
// return false;
// return true;
// }
// pub fn readInput(adapter: *Adapter) bool {
// }
// };
// fn inputWorker(platform_void: *anyopaque) callconv(.C) win.DWORD {
// const platform: *Windows = @ptrCast(platform_void);
// while (true) {
// var event_buf: [6]win.INPUT_RECORD = undefined;
// var events_read: c_ulong = undefined;
// if (win.ReadConsoleInputW(
// platform.std_handle,
// &event_buf,
// event_buf.len,
// &events_read,
// ) == 0) {
// return hresultFromWin32(win.GetLastError());
// }
// var uhh: @import("terminal_input.zig").Controller = 0;
// for (event_buf[0..events_read]) |*record| {
// switch (record.EventType) {
// win.KEY_EVENT => uhh.keyboardSet(@intCast(record.Event.KeyEvent.wVirtualKeyCode), record.Event.KeyEvent.bKeyDown),
// win.MOUSE_EVENT => switch (record.Event.MouseEvent.dwEventFlags) {
// win.MOUSE_MOVED => {
// uhh.mouse_x = record.Event.MouseEvent.dwMousePosition.X;
// uhh.mouse_y = record.Event.MouseEvent.dwMousePosition.Y;
// },
// win.MOUSE_WHEELED => {
// uhh.wheel_v = @intCast(@as(i16, (@bitCast(@as(u16, @intCast(record.Event.MouseEvent.dwButtonState >> 16))))));
// },
// win.MOUSE_HWHEELED => {
// uhh.wheel_h = @intCast(@as(i16, (@bitCast(@as(u16, @intCast(record.Event.MouseEvent.dwButtonState >> 16))))));
// },
// else => {},
// },
// else => {}, // implement this!
// }
// }
// }
// return 0;
// }
// };

4
src/plugintest.zig Normal file
View file

@ -0,0 +1,4 @@
const std = @import("std");
const audio = @import("audio.zig");
export fn load() audio.PluginMetadata {}

View file

@ -1,2 +0,0 @@
pub const mysec32 = u32;
pub const mysec64 = u64;

View file

@ -1,30 +0,0 @@
struct WavFileHeader { // Offset Size Type Description
// ----------------------------------------------------------------------------------------------------------------
unsigned RiffChunk; // 0 4 FourCC 'RIFF'
unsigned ChunkSize; // 4 4 DWord size of the riff chunk (should be always filesize - 8)
unsigned FileFormat; // 8 4 FourCC 'WAVE'
unsigned FormatChunk; // 12 4 FourCC 'fmt '
unsigned FormatSize; // 16 4 DWord size of Format structure (should be always 16 byte)
unsigned short PcmFlags; // 20 2 Word bit 1: Signed data, bit 2: Float data..
unsigned short Channels; // 22 2 Word samples per frame (example: one stereo frame consist from 2 samples)
unsigned SampleRate; // 24 4 DWord frames per second (example: 44100 stereo-frames are played back per seccond)
unsigned ByteRate; // 28 4 DWord bytes per second (example: one second float32 stereo-track data: 44100frames * 2channels * 4bytes )
unsigned short BlockAlign; // 32 2 Word byte per frame (example: each float32 stero frame is 8 byte in size - one float is 4byte - 2 channels are 2 floats, each 4byte)
unsigned short BitDepth; // 34 2 Word bits per sample (example: one float32 is 4 byte where each byte has 8 bit... so: 32 bit per sample ) */
unsigned DataChunk; // 36 4 FourCC 'data'
unsigned DataSize; // 40 4 DWord size of of payload data (should be the total file size minus this headers size of 44 byte)
};
const WavHeader = packed struct {
riff_id: [4]u8, // "RIFF"
riff_size: u32,
riff_fmt: [4]u8, // "WAVE"
fmt_id: [4]u8, // "fmt "
fmt_size: u32,
audio_format: u16,
channels: u16,
sample_rate: u32,
};

View file

@ -1,103 +0,0 @@
const win = @cImport(@cInclude("windows.h"));
const input = @import("input.zig");
pub const Platform = struct {
pub const WindowsError = error{
GetStdHandle,
CreateWaitableTimerW,
ReadConsoleInputW,
SetConsoleMode,
};
const max_events = input.EventBuffer.buf_size / 2;
const console_mode_flags =
win.ENABLE_EXTENDED_FLAGS |
win.ENABLE_PROCESSED_INPUT |
win.ENABLE_PROCESSED_OUTPUT |
win.ENABLE_MOUSE_INPUT |
win.ENABLE_WINDOW_INPUT;
std_handle: *anyopaque,
timer_handle: *anyopaque,
pub fn init() WindowsError!Platform {
var platform: Platform = undefined;
platform.std_handle = win.GetStdHandle(win.STD_INPUT_HANDLE) orelse return WindowsError.GetStdHandle;
platform.timer_handle = win.CreateWaitableTimerW(null, 1, null) orelse return WindowsError.CreateWaitableTimerW;
if (win.SetConsoleMode(platform.std_handle, console_mode_flags) == 0) {
return WindowsError.SetConsoleMode;
}
return platform;
}
pub fn readInput(platform: *Platform, buf: *input.EventBuffer) WindowsError!usize {
var event_buf: [max_events]win.INPUT_RECORD = undefined;
var events_read: c_ulong = undefined;
if (win.ReadConsoleInputW(
platform.std_handle,
&event_buf,
max_events,
&events_read,
) == 0) {
return WindowsError.ReadConsoleInputW;
}
var game_events: usize = 0;
for (event_buf[0..events_read]) |*record| {
game_events += translateEvent(record, buf);
}
return game_events;
}
// ugh
fn translateEvent(record: *win.INPUT_RECORD, buf: *input.EventBuffer) usize {
return switch (record.EventType) {
win.KEY_EVENT => {
buf.codes[n] = record.Event.KeyEvent.wVirtualKeyCode;
buf[0] = input.Event{
record.Event.KeyEvent.wVirtualKeyCode,
@intCast(record.Event.KeyEvent.bKeyDown),
};
return 1;
},
win.MOUSE_EVENT => switch (record.Event.MouseEvent.dwEventFlags) {
win.MOUSE_MOVED => {
buf[0] = input.Event{
@intFromEnum(VkCode.mouse_x),
record.Event.MouseEvent.dwMousePosition.X,
};
buf[1] = input.Event{
@intFromEnum(VkCode.mouse_y),
record.Event.MouseEvent.dwMousePosition.Y,
};
return 2;
},
win.MOUSE_WHEELED => {
buf[0] = input.Event{
@intFromEnum(VkCode.mouse_vwheel),
@as(i16, @bitCast(@as(u16, @intCast(record.Event.MouseEvent.dwButtonState >> 16)))),
};
return 1;
},
win.MOUSE_HWHEELED => {
buf[0] = input.Event{
@intFromEnum(VkCode.mouse_hwheel),
@intCast(record.Event.MouseEvent.dwButtonState), // this is broken, haven't tested
};
return 1;
},
else => 0,
},
win.WINDOW_BUFFER_SIZE_EVENT => unreachable, // implement this!!!
else => 0,
};
}
};

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -1,231 +0,0 @@
const std = @import("std");
const W = std.unicode.utf8ToUtf16LeStringLiteral;
const win = @cImport({
@cInclude("windows.h");
@cInclude("mmdeviceapi.h");
});
const wasapi = @cImport(@cInclude("audioclient.h"));
const CLSID_MMDeviceEnumerator: win.CLSID = .{ .Data1 = 0xbcde0395, .Data2 = 0xe52f, .Data3 = 0x467c, .Data4 = .{ 0x8e, 0x3d, 0xc4, 0x57, 0x92, 0x91, 0x69, 0x2e } };
const IID_IMMDeviceEnumerator: win.IID = .{ .Data1 = 0xa95664d2, .Data2 = 0x9614, .Data3 = 0x4f35, .Data4 = .{ 0xa7, 0x46, 0xde, 0x8d, 0xb6, 0x36, 0x17, 0xe6 } };
const IID_IAudioClient: win.IID = .{ .Data1 = 0x1cb9ad4c, .Data2 = 0xdbfa, .Data3 = 0x4c32, .Data4 = .{ 0xb1, 0x78, 0xc2, 0xf5, 0x68, 0xa7, 0x3, 0xb2 } };
const IID_IAudioClient3: win.IID = .{ .Data1 = 0x7ed4ee07, .Data2 = 0x8e67, .Data3 = 0x4cd4, .Data4 = .{ 0x8c, 0x1a, 0x2b, 0x7a, 0x59, 0x87, 0xad, 0x42 } };
const IID_IAudioRenderClient: win.IID = .{ .Data1 = 0xf294acfc, .Data2 = 0x3146, .Data3 = 0x4483, .Data4 = .{ 0xa7, 0xbf, 0xad, 0xdc, 0xa7, 0xc2, 0x60, 0xe2 } };
pub export fn WindowProc(window: win.HWND, msg: c_uint, w_param: win.WPARAM, l_param: win.LPARAM) callconv(.C) win.LRESULT {
switch (msg) {
win.WM_KEYDOWN => {
std.debug.print("keydown: {}\n", .{w_param});
},
else => {},
}
return win.DefWindowProcW(window, msg, w_param, l_param);
}
pub fn main() !void {
var rid: [1]win.RAWINPUTDEVICE = undefined;
rid[0].usUsagePage = 0x01;
rid[0].usUsage = 0x06;
rid[0].dwFlags = 0; // win.RIDEV_INPUTSINK;
rid[0].hwndTarget = null;
if (win.RegisterRawInputDevices(@ptrCast(&rid), 1, @sizeOf(win.RAWINPUTDEVICE)) == 0) {
std.debug.print("FUCK {}\n", .{win.GetLastError()});
return;
}
var message: win.MSG = undefined;
while (win.GetMessageW(&message, null, 0, 0) != 0) {
std.debug.print("meow\n", .{});
_ = win.TranslateMessage(&message);
_ = win.DispatchMessageW(&message);
}
}
pub const WindowsAudio = struct {
pub fn init() i32 {}
fn audioWorker() i32 {
if (failed(win.CoInitialize(null))) |err| return err;
const buf_event: *anyopaque = win.CreateEventW(
null,
win.FALSE,
win.FALSE,
null,
) orelse return hresultFromWin32(win.GetLastError());
// Get default audio device
var device_enumerator: *win.IMMDeviceEnumerator = undefined;
var audio_device: *win.IMMDevice = undefined;
if (failed(win.CoCreateInstance(
@ptrCast(&CLSID_MMDeviceEnumerator),
null,
win.CLSCTX_ALL,
@ptrCast(&IID_IMMDeviceEnumerator),
@ptrCast(&device_enumerator),
))) |err| return err;
if (failed(device_enumerator.lpVtbl.*.GetDefaultAudioEndpoint.?(
device_enumerator,
win.eRender,
win.eConsole,
@ptrCast(&audio_device),
))) |err| return err;
// Initialize audio client
var wasapi_audio_client: *wasapi.IAudioClient3 = undefined;
var device_format: *wasapi.WAVEFORMATEX = undefined;
var default_period: u32 = undefined;
var fundamental_period: u32 = undefined;
var min_period: u32 = undefined;
var max_period: u32 = undefined;
if (failed(audio_device.lpVtbl.*.Activate.?(
audio_device,
@ptrCast(&IID_IAudioClient3),
win.CLSCTX_ALL,
null,
@ptrCast(&wasapi_audio_client),
))) |err| return err;
if (failed(wasapi_audio_client.lpVtbl.*.GetMixFormat.?(
wasapi_audio_client,
@ptrCast(&device_format),
))) |err| return err;
const negotiated_format: wasapi.WAVEFORMATEX = .{
.wFormatTag = wasapi.WAVE_FORMAT_IEEE_FLOAT,
.nChannels = 2,
.nSamplesPerSec = device_format.nSamplesPerSec,
.nAvgBytesPerSec = device_format.nSamplesPerSec * 2 * @sizeOf(f32),
.nBlockAlign = 2 * @sizeOf(f32),
.wBitsPerSample = @bitSizeOf(f32),
.cbSize = 0,
};
win.CoTaskMemFree(device_format);
if (failed(wasapi_audio_client.lpVtbl.*.GetSharedModeEnginePeriod.?(
wasapi_audio_client,
&negotiated_format,
&default_period,
&fundamental_period,
&min_period,
&max_period,
))) |err| return err;
if (failed(wasapi_audio_client.lpVtbl.*.InitializeSharedAudioStream.?(
wasapi_audio_client,
wasapi.AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
default_period,
&negotiated_format,
null,
))) |err| return err;
if (failed(wasapi_audio_client.lpVtbl.*.SetEventHandle.?(
wasapi_audio_client,
buf_event,
))) |err| return err;
// Get render client
var wasapi_render_client: *wasapi.IAudioRenderClient = undefined;
if (failed(wasapi_audio_client.lpVtbl.*.GetService.?(
wasapi_audio_client,
@ptrCast(&IID_IAudioRenderClient),
@ptrCast(&wasapi_render_client),
))) |err| return err;
// Start audio client
var buf_frame_size: u32 = undefined;
if (failed(wasapi_audio_client.lpVtbl.*.Start.?(
wasapi_audio_client,
))) |err| return err;
if (failed(wasapi_audio_client.lpVtbl.*.GetBufferSize.?(
wasapi_audio_client,
&buf_frame_size,
))) |err| return err;
// Audio worker loop
var theta: f32 = 0;
var i: usize = 0;
while (i < (48000 * 5) / buf_frame_size) : (i += 1) {
if (win.WaitForSingleObject(buf_event, win.INFINITE) != win.WAIT_OBJECT_0) {
return hresultFromWin32(win.GetLastError());
}
var pad_frame_size: u32 = undefined;
if (failed(wasapi_audio_client.lpVtbl.*.GetCurrentPadding.?(
wasapi_audio_client,
&pad_frame_size,
))) |err| return err;
const available_frame_size: u32 = buf_frame_size - pad_frame_size;
std.debug.print("available: {}\n", .{available_frame_size});
if (available_frame_size == 0)
continue;
var buf: [*]f32 = undefined;
if (failed(wasapi_render_client.lpVtbl.*.GetBuffer.?(
wasapi_render_client,
available_frame_size,
@ptrCast(&buf),
))) |err| return err;
var j: usize = 0;
while (j < available_frame_size) : (j += 1) {
theta += 1.0 / 48000.0;
const sample: f32 = (fastSin(theta * 261.63) + fastSin(theta * 329.63) + fastSin(theta * 390)) * 0.25;
buf[j * 2] = sample;
buf[j * 2 + 1] = sample;
}
if (failed(wasapi_render_client.lpVtbl.*.ReleaseBuffer.?(
wasapi_render_client,
available_frame_size,
0,
))) |err| return err;
}
return 0;
}
inline fn failed(hresult: win.HRESULT) ?i32 {
if (win.FAILED(hresult)) {
return hresult;
} else {
return null;
}
}
inline fn hresultFromWin32(x: u32) i32 {
if (x == 0) {
return 0;
} else {
return @bitCast((x & 0x0000ffff) | (win.FACILITY_WIN32 << 16) | 0x80000000);
}
}
};
fn fastSin(x: f32) f32 {
const sign: f32 = if (x - @floor(x) > 0.5) -1 else 1;
const mod: f32 = (2 * x - @floor(2 * x)) / 2 - 0.25;
const abs: f32 = 1.25 * (1 / (mod * mod + 0.25)) - 4;
return abs * sign;
}
// pub fn main() !void {
// std.debug.print("exiting with code: {x}\n", .{@as(u32, @bitCast(WindowsAudio.audioWorker()))});
// }

Binary file not shown.

Binary file not shown.