diff --git a/.gitignore b/.gitignore
index ee15be5..b18daed 100755
--- a/.gitignore
+++ b/.gitignore
@@ -33,6 +33,8 @@ images/
lib/flutter_soloud_FFIGEN.dart
web/build
+web/worker.dart.js.deps
+web/worker.dart.js.map
**/android/caches
**/android/.tmp
diff --git a/.vscode/c_cpp_properties.json b/.vscode/c_cpp_properties.json
index d112e0a..7195ddf 100755
--- a/.vscode/c_cpp_properties.json
+++ b/.vscode/c_cpp_properties.json
@@ -15,6 +15,20 @@
"intelliSenseMode": "linux-clang-x64",
"configurationProvider": "ms-vscode.cmake-tools"
},
+ {
+ "name": "Chrome",
+ "includePath": [
+ "${workspaceFolder}/**",
+ "${workspaceFolder}/src",
+ "/usr/lib/emscripten/system/include"
+ ],
+ "defines": ["WITH_MINIAUDIO", "DR_MP3_IMPLEMENTATION", "__EMSCRIPTEN__"], // to see the code in between "#if defined"
+ "compilerPath": "/usr/bin/clang",
+ "cStandard": "c17",
+ "cppStandard": "c++17",
+ "intelliSenseMode": "${default}",
+ "configurationProvider": "ms-vscode.cmake-tools"
+ },
{
"name": "Win32",
"includePath": [
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 8e76531..72ab720 100755
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -78,6 +78,12 @@
"request": "launch",
"program": "${workspaceFolder}/example/build/linux/x64/debug/bundle/flutter_soloud_example",
"cwd": "${workspaceFolder}"
+ },
+ {
+ "name": "Chrome",
+ "type": "chrome",
+ "preLaunchTask": "compile web debug",
+ "request": "launch"
}
]
}
\ No newline at end of file
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
index 3c6802d..729c81a 100755
--- a/.vscode/tasks.json
+++ b/.vscode/tasks.json
@@ -10,49 +10,27 @@
{
"label": "compile linux debug",
"command": "cd ${workspaceFolder}/example; flutter build linux -t lib/main.dart --debug",
- // "args": ["build", "linux"],
"type": "shell"
},
{
"label": "compile linux test debug",
"command": "cd ${workspaceFolder}/example; flutter build linux -t tests/tests.dart --debug",
- // "args": ["build", "linux"],
"type": "shell"
},
{
"label": "compile windows debug verbose",
"command": "cd ${workspaceFolder}/example; flutter build windows -t lib/main.dart --debug --verbose",
- // "args": ["build", "linux"],
"type": "shell"
},
{
"label": "compile windows debug",
"command": "cd ${workspaceFolder}/example; flutter build windows -t lib/main.dart --debug",
- // "args": ["build", "linux"],
"type": "shell"
},
{
- "type": "cppbuild",
- "label": "C/C++: gcc build active file",
- "command": "/usr/bin/gcc",
- "args": [
- "-fdiagnostics-color=always",
- "-g",
- "${file}",
- "-o",
- "${fileDirname}/${fileBasenameNoExtension}"
- ],
- "options": {
- "cwd": "${fileDirname}"
- },
- "problemMatcher": [
- "$gcc"
- ],
- "group": {
- "kind": "build",
- "isDefault": true
- },
- "detail": "Task generated by Debugger."
+ "label": "compile web debug",
+ "command": "cd ${workspaceFolder}/example; flutter run -d chrome --web-renderer canvaskit --web-browser-flag '--disable-web-security' -t lib/main.dart --release",
+ "type": "shell"
}
]
}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 51168a9..05d1f4f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,9 @@
### 2.1.0
- added `getPan()`, `setPan()` and `setPanAbsolute()`.
+- added support for the Web platform.
+- added `loadMem()` to read the give audio file bytes buffer (not RAW data). Useful for the Web platform.
+- fixed `getFilterParamNames()`.
+- added `AudioData` class to manage audio samples.
### 2.0.2 (23 May 2024)
- Fixed wrong exception raised by `setVolume()` when a handle is no more valid.
diff --git a/README.md b/README.md
index d4806c8..acb9b37 100755
--- a/README.md
+++ b/README.md
@@ -5,7 +5,7 @@ A low-level audio plugin for Flutter.
|Linux|Windows|Android|MacOS|iOS|web|
|:-:|:-:|:-:|:-:|:-:|:-:|
-|π|π|π|π|π| WIP |
+|π|π|π|π|π|π|
### Select features:
@@ -42,6 +42,7 @@ with the [miniaudio](https://miniaud.io/) backend
through [Dart's C interop](https://dart.dev/interop/c-interop) (`dart:ffi`).
In other words, it is calling the C/C++ methods of the underlying audio engine
directly β there are no method channels in use.
+To use this plugin on the **Web platform**, please refer to [WEB_NOTES](https://github.com/alnitak/flutter_soloud/blob/main/WEB_NOTES.md).
## Example
diff --git a/WEB_NOTES.md b/WEB_NOTES.md
new file mode 100644
index 0000000..31b945a
--- /dev/null
+++ b/WEB_NOTES.md
@@ -0,0 +1,51 @@
+# Web Notes
+
+
+## Description
+
+The web platform is now supported, but some testing is welcome.
+
+## How to use
+
+To add the plugin to a web app, add the following line to the `
` section of `web/index.html`:
+```
+
+```
+
+---
+
+**`loadUrl()`** may produce the following error when the app is run:
+>> Cross-Origin Request Blocked: The Same Origin Policy disallows reading the remote resource at https://www.learningcontainer.com/wp-content/uploads/2020/02/Kalimba.mp3. (Reason: CORS header βAccess-Control-Allow-Originβ missing). Status code: 200.
+
+This is due for the default beavior of http servers which don't allow to make requests outside their domain. Refer [here](https://enable-cors.org/server.html) to learn how to enable your server to handle this situation.
+Instead, if you run the app locally, you could run the app with something like the following command:
+```
+flutter run -d chrome --web-renderer canvaskit --web-browser-flag '--disable-web-security' -t lib/main.dart --release
+```
+
+---
+
+***It is not possible to read a local audio file directly*** on the web. For this reason, `loadMem()` has been added, which requires the `Uint8List` byte buffer of the audio file.
+***IMPORTANT***: `loadMem()` with mode `LoadMode.memory` used on web platform will freeze the UI for the time needed to decompress the audio file. Please use it with mode `LoadMode.disk` or load your sound when the app starts.
+
+## For developers
+
+In the `web` directory, there is a `compile_wasm.sh` script that generates the `.js` and `.wasm` files for the native C code located in the `src` dir. Run it after installing *emscripten*. There is also a `compile_web.sh` to compile the web worker needed by native code to communicate with Dart. The generated files are already provided, but if it is needed to modify C/C++ code or the `web/worker.dart` code, the scripts must be run to reflect the changes.
+
+The `compile_wasm.sh` script uses the `-O3` code optimization flag.
+To see a better errors logs, use `-O0 -g -s ASSERTIONS=1` in `compile_wasm.sh`.
+
+---
+
+The `AudioIsolate` [has been removed](https://github.com/alnitak/flutter_soloud/pull/89) and all the logic has been implemented natively. Events like `voice ended` are sent from C back to Dart. However, since it is not possible to call Dart from a native thread (the audio thread), a new web worker is created using the WASM `EM_ASM` directive. This allows sending the `voice ended` event back to Dart via the worker.
+
+Here a sketch to show the step used:
+![sketch](img/wasmWorker.png)
+
+**#1.** This function is called while initializing the player with `FlutterSoLoudWeb.setDartEventCallbacks()`.
+It creates a Web Worker in the [WASM Module](https://emscripten.org/docs/api_reference/module.html) using the compiled `web/worker.dart`. After calling this, the WASM Module will have a new variable called `Module.wasmWorker` which will be used in Dart to receive messages.
+By doing this it will be easy to use the Worker to send messages from within the CPP code.
+**#2.** This function, like #1, uses [EM_ASM](https://emscripten.org/docs/porting/connecting_cpp_and_javascript/Interacting-with-code.html#interacting-with-code-call-javascript-from-native) to inline JS. This JS code uses the `Module.wasmWorker` created in #1 to send a message.
+**#3.** This is the JS used and created in #1. Every messages sent by #2 are managed here and sent to #4.
+**#4.** Here when the event message has been received, a new event is added to a Stream. This Stream is listened by the SoLoud API.
+**#5.** Here we listen to the event messages coming from the `WorkerController` stream. Currently, only the "voice ended" event is supported. The Stream is listened in `SoLoud._initializeNativeCallbacks()`.
diff --git a/example/.metadata b/example/.metadata
index 730798a..68b331f 100644
--- a/example/.metadata
+++ b/example/.metadata
@@ -1,11 +1,11 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
-# This file should be version controlled.
+# This file should be version controlled and should not be manually edited.
version:
- revision: f468f3366c26a5092eb964a230ce7892fda8f2f8
- channel: stable
+ revision: "761747bfc538b5af34aa0d3fac380f1bc331ec49"
+ channel: "stable"
project_type: app
@@ -13,11 +13,11 @@ project_type: app
migration:
platforms:
- platform: root
- create_revision: f468f3366c26a5092eb964a230ce7892fda8f2f8
- base_revision: f468f3366c26a5092eb964a230ce7892fda8f2f8
- - platform: windows
- create_revision: f468f3366c26a5092eb964a230ce7892fda8f2f8
- base_revision: f468f3366c26a5092eb964a230ce7892fda8f2f8
+ create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
+ base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
+ - platform: web
+ create_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
+ base_revision: 761747bfc538b5af34aa0d3fac380f1bc331ec49
# User provided section
diff --git a/example/assets/audio/Tropical Beeper.mp3 b/example/assets/audio/TropicalBeeper.mp3
similarity index 100%
rename from example/assets/audio/Tropical Beeper.mp3
rename to example/assets/audio/TropicalBeeper.mp3
diff --git a/example/assets/audio/X trackTure.mp3 b/example/assets/audio/XtrackTure.mp3
similarity index 100%
rename from example/assets/audio/X trackTure.mp3
rename to example/assets/audio/XtrackTure.mp3
diff --git a/example/assets/shaders/test9.frag b/example/assets/shaders/test9.frag
index 5b80c31..8cd31a8 100644
--- a/example/assets/shaders/test9.frag
+++ b/example/assets/shaders/test9.frag
@@ -174,11 +174,11 @@ void mainImage( out vec4 fragColor, in vec2 fragCoord )
// rotate view
float a;
- a = -0.6;
+ a = -0.8;
rd = rotateX(rd, a);
ro = rotateX(ro, a);
- a = 1.5;
+ a = 1.8;
// a = sin(iTime)*.5 + 1.570796327;
rd = rotateY(rd, a);
ro = rotateY(ro, a);
diff --git a/example/lib/controls.dart b/example/lib/controls.dart
index fce548e..2eb8da6 100644
--- a/example/lib/controls.dart
+++ b/example/lib/controls.dart
@@ -1,6 +1,6 @@
import 'dart:async';
-import 'dart:io';
+import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_soloud/flutter_soloud.dart';
import 'package:permission_handler/permission_handler.dart';
@@ -29,8 +29,8 @@ class _ControlsState extends State {
// ignore: avoid_positional_boolean_parameters
ButtonStyle buttonStyle(bool enabled) {
return enabled
- ? ButtonStyle(backgroundColor: MaterialStateProperty.all(Colors.green))
- : ButtonStyle(backgroundColor: MaterialStateProperty.all(Colors.red));
+ ? ButtonStyle(backgroundColor: WidgetStateProperty.all(Colors.green))
+ : ButtonStyle(backgroundColor: WidgetStateProperty.all(Colors.red));
}
@override
@@ -49,7 +49,8 @@ class _ControlsState extends State {
ElevatedButton(
onPressed: () async {
/// Ask recording permission on mobile
- if (Platform.isAndroid || Platform.isIOS) {
+ if (defaultTargetPlatform == TargetPlatform.android ||
+ defaultTargetPlatform == TargetPlatform.iOS) {
final p = await Permission.microphone.isGranted;
if (!p) {
unawaited(Permission.microphone.request());
@@ -76,9 +77,8 @@ class _ControlsState extends State {
blurSigmaX: 6,
blurSigmaY: 6,
),
- linearShapeParams: LinearShapeParams(
+ linearShapeParams: const LinearShapeParams(
angle: -90,
- space: Platform.isAndroid || Platform.isIOS ? -10 : 10,
alignment: LinearAlignment.left,
),
),
diff --git a/example/lib/main.dart b/example/lib/main.dart
index a0d2496..cbc0e62 100644
--- a/example/lib/main.dart
+++ b/example/lib/main.dart
@@ -82,7 +82,6 @@ class MyHomePage extends StatelessWidget {
Widget build(BuildContext context) {
return DefaultTabController(
length: 5,
- initialIndex: 2,
child: SafeArea(
child: Scaffold(
body: Column(
diff --git a/example/lib/page_3d_audio.dart b/example/lib/page_3d_audio.dart
index 51f176f..33f84e2 100644
--- a/example/lib/page_3d_audio.dart
+++ b/example/lib/page_3d_audio.dart
@@ -70,11 +70,12 @@ class _Page3DAudioState extends State {
/// load the audio file
currentSound = await SoLoud.instance.loadUrl(
- 'https://www.learningcontainer.com/wp-content/uploads/2020/02/Kalimba.mp3',
+ // From https://freetestdata.com/audio-files/mp3/
+ 'https://marcobavagnoli.com/Free_Test_Data_500KB_MP3.mp3',
);
/// play it
- await SoLoud.instance.play3d(currentSound!, 0, 0, 0);
+ await SoLoud.instance.play3d(currentSound!, 0, 0, 0, looping: true);
spinAround = true;
diff --git a/example/lib/page_hello_flutter.dart b/example/lib/page_hello_flutter.dart
index 1ed3a77..39c9f2e 100644
--- a/example/lib/page_hello_flutter.dart
+++ b/example/lib/page_hello_flutter.dart
@@ -1,11 +1,11 @@
import 'dart:async';
-import 'dart:ffi' as ffi;
-import 'dart:typed_data';
+import 'dart:io';
-import 'package:ffi/ffi.dart';
import 'package:file_picker/file_picker.dart';
+import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
+import 'package:flutter/services.dart';
import 'package:flutter_soloud/flutter_soloud.dart';
import 'package:logging/logging.dart';
@@ -31,6 +31,26 @@ class _PageHelloFlutterSoLoudState extends State {
child: Column(
mainAxisAlignment: MainAxisAlignment.spaceAround,
children: [
+ /// pick audio file
+ ElevatedButton(
+ onPressed: () async {
+ final paths = (await FilePicker.platform.pickFiles(
+ type: FileType.custom,
+ allowedExtensions: ['mp3', 'wav', 'ogg', 'flac'],
+ onFileLoading: print,
+ dialogTitle: 'Pick audio file\n(not for web)',
+ ))
+ ?.files;
+ if (paths != null) {
+ unawaited(playFile(paths.first.path!));
+ }
+ },
+ child: const Text(
+ 'pick audio\n(not for web)',
+ textAlign: TextAlign.center,
+ ),
+ ),
+
/// pick audio file
ElevatedButton(
onPressed: () async {
@@ -41,11 +61,21 @@ class _PageHelloFlutterSoLoudState extends State {
dialogTitle: 'Pick audio file',
))
?.files;
+
if (paths != null) {
- unawaited(play(paths.first.path!));
+ if (kIsWeb) {
+ unawaited(playBuffer(paths.first.name, paths.first.bytes!));
+ } else {
+ final f = File(paths.first.path!);
+ final buffer = f.readAsBytesSync();
+ unawaited(playBuffer(paths.first.path!, buffer));
+ }
}
},
- child: const Text('pick audio'),
+ child: const Text(
+ 'pick audio using "loadMem()"\n(all platforms)',
+ textAlign: TextAlign.center,
+ ),
),
Column(
children: [
@@ -56,7 +86,7 @@ class _PageHelloFlutterSoLoudState extends State {
SoLoudCapture.instance.stopCapture();
if (context.mounted) setState(() {});
} else {
- final a = SoLoudCapture.instance.initialize();
+ final a = SoLoudCapture.instance.init();
final b = SoLoudCapture.instance.startCapture();
if (context.mounted &&
a == CaptureErrors.captureNoError &&
@@ -82,7 +112,7 @@ class _PageHelloFlutterSoLoudState extends State {
}
/// play file
- Future play(String file) async {
+ Future playFile(String file) async {
/// stop any previous sound loaded
if (currentSound != null) {
try {
@@ -107,6 +137,33 @@ class _PageHelloFlutterSoLoudState extends State {
/// play it
await SoLoud.instance.play(currentSound!);
}
+
+ /// play bytes for web.
+ Future playBuffer(String fileName, Uint8List bytes) async {
+ /// stop any previous sound loaded
+ if (currentSound != null) {
+ try {
+ await SoLoud.instance.disposeSource(currentSound!);
+ } catch (e) {
+ _log.severe('dispose error', e);
+ return;
+ }
+ }
+
+ /// load the audio file
+ final AudioSource newSound;
+ try {
+ newSound = await SoLoud.instance.loadMem(fileName, bytes);
+ } catch (e) {
+ _log.severe('load error', e);
+ return;
+ }
+
+ currentSound = newSound;
+
+ /// play it
+ await SoLoud.instance.play(currentSound!);
+ }
}
/// widget that uses a ticker to read and provide audio
@@ -127,27 +184,32 @@ class MicAudioWidget extends StatefulWidget {
class _MicAudioWidgetState extends State
with SingleTickerProviderStateMixin {
- late Ticker ticker;
- late ffi.Pointer> audioData;
+ Ticker? ticker;
+ final audioData = AudioData(
+ GetSamplesFrom.microphone,
+ GetSamplesKind.wave,
+ );
@override
void initState() {
super.initState();
- audioData = calloc();
- SoLoudCapture.instance.getCaptureAudioTexture2D(audioData);
ticker = createTicker((Duration elapsed) {
- if (mounted) {
- SoLoudCapture.instance.getCaptureAudioTexture2D(audioData);
- setState(() {});
+ if (context.mounted) {
+ try {
+ audioData.updateSamples();
+ setState(() {});
+ } on Exception catch (e) {
+ debugPrint('$e');
+ }
}
});
- ticker.start();
+ ticker?.start();
}
@override
void dispose() {
- ticker.stop();
- calloc.free(audioData);
+ ticker?.stop();
+ audioData.dispose();
super.dispose();
}
@@ -168,7 +230,7 @@ class MicAudioPainter extends CustomPainter {
const MicAudioPainter({
required this.audioData,
});
- final ffi.Pointer> audioData;
+ final AudioData audioData;
@override
void paint(Canvas canvas, Size size) {
@@ -188,9 +250,7 @@ class MicAudioPainter extends CustomPainter {
for (var n = 0; n < 32; n++) {
var f = 0.0;
for (var i = 0; i < 8; i++) {
- /// audioData[n * 8 + i] is the FFT data
- /// If you want wave data, add 256 to the index
- f += audioData.value[n * 8 + i + 256];
+ f += audioData.getWave(SampleWave(n * 8 + i));
}
data[n] = f / 8;
}
diff --git a/example/lib/page_multi_track.dart b/example/lib/page_multi_track.dart
index 25a1fae..dbb27e8 100644
--- a/example/lib/page_multi_track.dart
+++ b/example/lib/page_multi_track.dart
@@ -1,12 +1,9 @@
// ignore_for_file: public_member_api_docs, sort_constructors_first
import 'dart:async';
-import 'dart:io';
import 'package:flutter/material.dart';
-import 'package:flutter/services.dart';
import 'package:flutter_soloud/flutter_soloud.dart';
import 'package:logging/logging.dart';
-import 'package:path_provider/path_provider.dart';
class PageMultiTrack extends StatefulWidget {
const PageMultiTrack({super.key});
@@ -175,15 +172,8 @@ class _PlaySoundWidgetState extends State {
}
Future loadAsset() async {
- final path = (await getAssetFile(widget.assetsAudio)).path;
-
final AudioSource? newSound;
- try {
- newSound = await SoLoud.instance.loadFile(path);
- } catch (e) {
- _log.severe('Load sound asset failed', e);
- return false;
- }
+ newSound = await SoLoud.instance.loadAsset(widget.assetsAudio);
soundLength = SoLoud.instance.getLength(newSound);
sound = newSound;
@@ -260,24 +250,6 @@ class _PlaySoundWidgetState extends State {
isPaused[newHandle] = ValueNotifier(false);
soundPosition[newHandle] = ValueNotifier(0);
}
-
- /// get the assets file and copy it to the temp dir
- Future getAssetFile(String assetsFile) async {
- final tempDir = await getTemporaryDirectory();
- final tempPath = tempDir.path;
- final filePath = '$tempPath/$assetsFile';
- final file = File(filePath);
- if (file.existsSync()) {
- return file;
- } else {
- final byteData = await rootBundle.load(assetsFile);
- final buffer = byteData.buffer;
- await file.create(recursive: true);
- return file.writeAsBytes(
- buffer.asUint8List(byteData.offsetInBytes, byteData.lengthInBytes),
- );
- }
- }
}
/// row widget containing play/pause and time slider
diff --git a/example/lib/page_visualizer.dart b/example/lib/page_visualizer.dart
index 27d645b..16a04b6 100644
--- a/example/lib/page_visualizer.dart
+++ b/example/lib/page_visualizer.dart
@@ -1,15 +1,12 @@
import 'dart:async';
-import 'dart:io';
-import 'dart:ui' as ui;
import 'package:file_picker/file_picker.dart';
+import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
-import 'package:flutter/services.dart';
import 'package:flutter_soloud/flutter_soloud.dart';
import 'package:flutter_soloud_example/controls.dart';
import 'package:flutter_soloud_example/visualizer/visualizer.dart';
import 'package:logging/logging.dart';
-import 'package:path_provider/path_provider.dart';
import 'package:star_menu/star_menu.dart';
class PageVisualizer extends StatefulWidget {
@@ -40,19 +37,35 @@ class _PageVisualizerState extends State {
'assets/audio/12Bands/audiocheck.net_sin_16000Hz_-3dBFS_2s.wav',
'assets/audio/12Bands/audiocheck.net_sin_20000Hz_-3dBFS_2s.wav',
];
- final ValueNotifier textureType =
- ValueNotifier(TextureType.fft2D);
+ late final ValueNotifier samplesKind;
final ValueNotifier fftSmoothing = ValueNotifier(0.8);
final ValueNotifier isVisualizerForPlayer = ValueNotifier(true);
final ValueNotifier isVisualizerEnabled = ValueNotifier(true);
- final ValueNotifier fftImageRange =
- ValueNotifier(const RangeValues(0, 255));
- final ValueNotifier maxFftImageRange = ValueNotifier(255);
+ late ValueNotifier fftImageRange;
final ValueNotifier soundLength = ValueNotifier(0);
final ValueNotifier soundPosition = ValueNotifier(0);
Timer? timer;
AudioSource? currentSound;
- FftController visualizerController = FftController();
+ late final VisualizerController visualizerController;
+
+ @override
+ void initState() {
+ super.initState();
+ samplesKind = ValueNotifier(GetSamplesKind.linear);
+ visualizerController = VisualizerController(samplesKind: samplesKind.value);
+ fftImageRange = ValueNotifier(
+ RangeValues(
+ visualizerController.minRange.toDouble(),
+ visualizerController.maxRange.toDouble(),
+ ),
+ );
+ }
+
+ @override
+ void dispose() {
+ visualizerController.audioData.dispose();
+ super.dispose();
+ }
@override
Widget build(BuildContext context) {
@@ -66,14 +79,14 @@ class _PageVisualizerState extends State {
SoLoudCapture.instance.stopCapture();
visualizerController.changeIsCaptureStarted(false);
} else {
- SoLoudCapture.instance.initialize(deviceID: deviceID);
+ SoLoudCapture.instance.init(deviceID: deviceID);
SoLoudCapture.instance.startCapture();
visualizerController.changeIsCaptureStarted(true);
}
},
onDeviceIdChanged: (deviceID) {
SoLoudCapture.instance.stopCapture();
- SoLoudCapture.instance.initialize(deviceID: deviceID);
+ SoLoudCapture.instance.init(deviceID: deviceID);
SoLoudCapture.instance.startCapture();
},
),
@@ -93,7 +106,10 @@ class _PageVisualizerState extends State {
),
linearShapeParams: LinearShapeParams(
angle: -90,
- space: Platform.isAndroid || Platform.isIOS ? -10 : 10,
+ space: defaultTargetPlatform == TargetPlatform.android ||
+ defaultTargetPlatform == TargetPlatform.iOS
+ ? -10
+ : 10,
alignment: LinearAlignment.left,
),
),
@@ -111,14 +127,14 @@ class _PageVisualizerState extends State {
ActionChip(
backgroundColor: Colors.blue,
onPressed: () {
- playAsset('assets/audio/Tropical Beeper.mp3');
+ playAsset('assets/audio/TropicalBeeper.mp3');
},
label: const Text('Tropical Beeper'),
),
ActionChip(
backgroundColor: Colors.blue,
onPressed: () {
- playAsset('assets/audio/X trackTure.mp3');
+ playAsset('assets/audio/XtrackTure.mp3');
},
label: const Text('X trackTure'),
),
@@ -179,7 +195,7 @@ class _PageVisualizerState extends State {
),
const SizedBox(width: 10),
- /// texture type
+ /// texture kind
StarMenu(
params: StarMenuParameters(
shape: MenuShape.linear,
@@ -198,48 +214,52 @@ class _PageVisualizerState extends State {
controller.closeMenu!();
},
items: [
- /// frequencies on 1st 256 px row
- /// wave on 2nd 256 px row
+ /// wave data (amplitudes)
ActionChip(
backgroundColor: Colors.blue,
onPressed: () {
- textureType.value = TextureType.both1D;
+ samplesKind.value = GetSamplesKind.wave;
+ visualizerController
+ .changeSamplesKind(GetSamplesKind.wave);
+ fftImageRange.value = const RangeValues(0, 255);
},
- label: const Text('both 1D'),
+ label: const Text('wave data'),
),
- /// frequencies (FFT)
+ /// frequencies on 1st 256 px row
+ /// wave on 2nd 256 px row
ActionChip(
backgroundColor: Colors.blue,
onPressed: () {
- textureType.value = TextureType.fft2D;
+ samplesKind.value = GetSamplesKind.linear;
+ visualizerController
+ .changeSamplesKind(GetSamplesKind.linear);
+ fftImageRange.value = const RangeValues(0, 255);
},
- label: const Text('frequencies'),
+ label: const Text('linear'),
),
- /// wave data (amplitudes)
+ /// both fft and wave
ActionChip(
backgroundColor: Colors.blue,
onPressed: () {
- textureType.value = TextureType.wave2D;
+ samplesKind.value = GetSamplesKind.texture;
+ visualizerController
+ .changeSamplesKind(GetSamplesKind.texture);
+ fftImageRange.value = const RangeValues(0, 511);
},
- label: const Text('wave data'),
+ label: const Text('texture'),
),
-
- /// both fft and wave
- /// not implemented yet
- // ActionChip(
- // backgroundColor: Colors.blue,
- // onPressed: () {
- // textureType.value = TextureType.both2D;
- // },
- // label: const Text('both'),
- // ),
],
- child: const Chip(
- label: Text('texture'),
- backgroundColor: Colors.blue,
- avatar: Icon(Icons.arrow_drop_down),
+ child: ValueListenableBuilder(
+ valueListenable: samplesKind,
+ builder: (_, type, __) {
+ return Chip(
+ label: Text(type.name),
+ backgroundColor: Colors.blue,
+ avatar: const Icon(Icons.arrow_drop_down),
+ );
+ },
),
),
],
@@ -270,8 +290,17 @@ class _PageVisualizerState extends State {
dialogTitle: 'Pick audio file',
))
?.files;
+
if (paths != null) {
- unawaited(play(paths.first.path!));
+ final AudioSource audioFile;
+ if (kIsWeb) {
+ audioFile = await SoLoud.instance
+ .loadMem(paths.first.name, paths.first.bytes!);
+ } else {
+ audioFile =
+ await SoLoud.instance.loadFile(paths.first.path!);
+ }
+ unawaited(play(audioFile));
}
},
child: const Text('pick audio'),
@@ -279,47 +308,51 @@ class _PageVisualizerState extends State {
],
),
- /// Seek slider
- ValueListenableBuilder(
- valueListenable: soundLength,
- builder: (_, length, __) {
- return ValueListenableBuilder(
- valueListenable: soundPosition,
- builder: (_, position, __) {
- if (position >= length) {
- position = 0;
- if (length == 0) length = 1;
- }
-
- return Row(
- children: [
- Text(position.toInt().toString()),
- Expanded(
- child: Slider.adaptive(
- value: position,
- max: length < position ? position : length,
- onChanged: (value) {
- if (currentSound == null) return;
- stopTimer();
- final position = Duration(
- milliseconds:
- (value * Duration.millisecondsPerSecond)
- .round(),
- );
- SoLoud.instance
- .seek(currentSound!.handles.last, position);
- soundPosition.value = value;
- startTimer();
- },
+ /// Seek slider.
+ /// Not used on web platforms because [LoadMode.disk]
+ /// is used with `loadMem()`. Otherwise the seek problem will
+ /// be noticeable while seeking. See [SoLoud.seek] note.
+ if (!kIsWeb)
+ ValueListenableBuilder(
+ valueListenable: soundLength,
+ builder: (_, length, __) {
+ return ValueListenableBuilder(
+ valueListenable: soundPosition,
+ builder: (_, position, __) {
+ if (position >= length) {
+ position = 0;
+ if (length == 0) length = 1;
+ }
+
+ return Row(
+ children: [
+ Text(position.toInt().toString()),
+ Expanded(
+ child: Slider.adaptive(
+ value: position,
+ max: length < position ? position : length,
+ onChanged: (value) {
+ if (currentSound == null) return;
+ stopTimer();
+ final position = Duration(
+ milliseconds:
+ (value * Duration.millisecondsPerSecond)
+ .round(),
+ );
+ SoLoud.instance
+ .seek(currentSound!.handles.last, position);
+ soundPosition.value = value;
+ startTimer();
+ },
+ ),
),
- ),
- Text(length.toInt().toString()),
- ],
- );
- },
- );
- },
- ),
+ Text(length.toInt().toString()),
+ ],
+ );
+ },
+ );
+ },
+ ),
/// fft range slider values to put into the texture
ValueListenableBuilder(
@@ -330,14 +363,13 @@ class _PageVisualizerState extends State {
Text('FFT range ${fftRange.start.toInt()}'),
Expanded(
child: RangeSlider(
- max: 255,
- divisions: 256,
+ max: visualizerController.maxRangeLimit.toDouble() + 1,
values: fftRange,
onChanged: (values) {
fftImageRange.value = values;
visualizerController
- ..changeMinFreq(values.start.toInt())
- ..changeMaxFreq(values.end.toInt());
+ ..changeMin(values.start.toInt())
+ ..changeMax(values.end.toInt());
},
),
),
@@ -387,7 +419,7 @@ class _PageVisualizerState extends State {
.changeIsVisualizerForPlayer(!value);
},
),
- const Text('show capture data'),
+ const Text('show mic data'),
Checkbox(
value: forPlayer,
onChanged: (value) {
@@ -401,53 +433,11 @@ class _PageVisualizerState extends State {
},
),
- /// switch to enable / disable retrieving audio data
- ValueListenableBuilder(
- valueListenable: isVisualizerEnabled,
- builder: (_, isEnabled, __) {
- return Row(
- children: [
- Switch(
- materialTapTargetSize: MaterialTapTargetSize.shrinkWrap,
- value: isEnabled,
- onChanged: (value) {
- isVisualizerEnabled.value = value;
- visualizerController.changeIsVisualizerEnabled(value);
- },
- ),
- const Text('FFT data'),
- ],
- );
- },
- ),
-
/// VISUALIZER
- FutureBuilder(
- future: loadShader(),
- builder: (context, snapshot) {
- if (snapshot.hasData) {
- return ValueListenableBuilder(
- valueListenable: textureType,
- builder: (_, type, __) {
- return Visualizer(
- key: UniqueKey(),
- controller: visualizerController,
- shader: snapshot.data!,
- textureType: type,
- );
- },
- );
- } else {
- if (snapshot.data == null) {
- return const Placeholder(
- child: Align(
- child: Text('Error compiling shader.\nSee log'),
- ),
- );
- }
- return const CircularProgressIndicator();
- }
- },
+ Visualizer(
+ // key: UniqueKey(),
+ controller: visualizerController,
+ shader: shader,
),
],
),
@@ -455,19 +445,8 @@ class _PageVisualizerState extends State {
);
}
- /// load asynchronously the fragment shader
- Future loadShader() async {
- try {
- final program = await ui.FragmentProgram.fromAsset(shader);
- return program.fragmentShader();
- } catch (e) {
- _log.severe('error compiling the shader', e);
- }
- return null;
- }
-
/// play file
- Future play(String file) async {
+ Future play(AudioSource source) async {
if (currentSound != null) {
try {
await SoLoud.instance.disposeSource(currentSound!);
@@ -477,9 +456,7 @@ class _PageVisualizerState extends State {
}
stopTimer();
}
-
- /// load the file
- currentSound = await SoLoud.instance.loadFile(file);
+ currentSound = source;
/// play it
await SoLoud.instance.play(currentSound!);
@@ -494,10 +471,12 @@ class _PageVisualizerState extends State {
(event) {
stopTimer();
- /// It's needed to call dispose when it end else it will
+ /// It's needed to call dispose when it ends else it will
/// not be cleared
- SoLoud.instance.disposeSource(currentSound!);
- currentSound = null;
+ if (currentSound != null) {
+ SoLoud.instance.disposeSource(currentSound!);
+ currentSound = null;
+ }
},
);
startTimer();
@@ -505,26 +484,12 @@ class _PageVisualizerState extends State {
/// plays an assets file
Future playAsset(String assetsFile) async {
- final audioFile = await getAssetFile(assetsFile);
- return play(audioFile.path);
- }
-
- /// get the assets file and copy it to the temp dir
- Future getAssetFile(String assetsFile) async {
- final tempDir = await getTemporaryDirectory();
- final tempPath = tempDir.path;
- final filePath = '$tempPath/$assetsFile';
- final file = File(filePath);
- if (file.existsSync()) {
- return file;
- } else {
- final byteData = await rootBundle.load(assetsFile);
- final buffer = byteData.buffer;
- await file.create(recursive: true);
- return file.writeAsBytes(
- buffer.asUint8List(byteData.offsetInBytes, byteData.lengthInBytes),
- );
- }
+ // final audioFile = await getAssetFile(assetsFile);
+ final audioFile = await SoLoud.instance.loadAsset(
+ assetsFile,
+ mode: kIsWeb ? LoadMode.disk : LoadMode.memory,
+ );
+ return play(audioFile);
}
/// start timer to update the audio position slider
diff --git a/example/lib/page_waveform.dart b/example/lib/page_waveform.dart
index 7f7ce77..3e13a60 100644
--- a/example/lib/page_waveform.dart
+++ b/example/lib/page_waveform.dart
@@ -1,12 +1,10 @@
-import 'dart:io';
-
import 'package:flutter/material.dart';
import 'package:flutter_soloud/flutter_soloud.dart';
-import 'package:flutter_soloud_example/waveform/bars.dart';
-import 'package:flutter_soloud_example/waveform/filter_fx.dart';
-import 'package:flutter_soloud_example/waveform/keyboard_widget.dart';
-import 'package:flutter_soloud_example/waveform/knobs_groups.dart';
-import 'package:flutter_soloud_example/waveform/text_slider.dart';
+import 'package:flutter_soloud_example/ui/bars.dart';
+import 'package:flutter_soloud_example/ui/filter_fx.dart';
+import 'package:flutter_soloud_example/ui/keyboard_widget.dart';
+import 'package:flutter_soloud_example/ui/knobs_groups.dart';
+import 'package:flutter_soloud_example/ui/text_slider.dart';
import 'package:star_menu/star_menu.dart';
/// Example to demostrate how waveforms work with a keyboard
@@ -82,13 +80,8 @@ class _PageWaveformState extends State {
await SoLoud.instance.disposeSource(sound!);
}
- /// text created by ChatGPT :)
await SoLoud.instance
- .speechText('Flutter and So Loud audio plugin are the '
- "tech tag team you never knew you needed - they're "
- 'like Batman and Robin, swooping in to save your '
- 'app with style and sound effects that would make '
- 'even Gotham jealous!')
+ .speechText('Hello Flutter Soloud!')
.then((value) => sound = value);
},
child: const Text('T2S'),
@@ -290,9 +283,8 @@ class _PageWaveformState extends State {
blurSigmaX: 6,
blurSigmaY: 6,
),
- linearShapeParams: LinearShapeParams(
+ linearShapeParams: const LinearShapeParams(
angle: -90,
- space: Platform.isAndroid || Platform.isIOS ? -10 : 10,
alignment: LinearAlignment.left,
),
),
diff --git a/example/lib/waveform/bars.dart b/example/lib/ui/bars.dart
similarity index 76%
rename from example/lib/waveform/bars.dart
rename to example/lib/ui/bars.dart
index e2d49aa..37465be 100644
--- a/example/lib/waveform/bars.dart
+++ b/example/lib/ui/bars.dart
@@ -1,6 +1,3 @@
-import 'dart:ffi' as ffi;
-
-import 'package:ffi/ffi.dart';
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
import 'package:flutter_soloud/flutter_soloud.dart';
@@ -8,8 +5,9 @@ import 'package:flutter_soloud_example/visualizer/bars_fft_widget.dart';
import 'package:flutter_soloud_example/visualizer/bars_wave_widget.dart';
/// Visualizer for FFT and wave data
-///
class Bars extends StatefulWidget {
+ /// If true get audio data from the player else from the mic
+
const Bars({super.key});
@override
@@ -18,12 +16,13 @@ class Bars extends StatefulWidget {
class BarsState extends State with SingleTickerProviderStateMixin {
late final Ticker ticker;
- ffi.Pointer> playerData = ffi.nullptr;
-
+ final AudioData audioData = AudioData(
+ GetSamplesFrom.player,
+ GetSamplesKind.linear,
+ );
@override
void initState() {
super.initState();
- playerData = calloc();
ticker = createTicker(_tick);
ticker.start();
}
@@ -31,15 +30,18 @@ class BarsState extends State with SingleTickerProviderStateMixin {
@override
void dispose() {
ticker.stop();
- calloc.free(playerData);
- playerData = ffi.nullptr;
+ audioData.dispose();
super.dispose();
}
void _tick(Duration elapsed) {
- if (mounted) {
- SoLoud.instance.getAudioTexture2D(playerData);
- setState(() {});
+ if (context.mounted) {
+ try {
+ audioData.updateSamples();
+ setState(() {});
+ } on Exception catch (e) {
+ debugPrint('$e');
+ }
}
}
@@ -50,7 +52,7 @@ class BarsState extends State with SingleTickerProviderStateMixin {
child: Row(
children: [
BarsFftWidget(
- audioData: playerData.value,
+ audioData: audioData,
minFreq: 0,
maxFreq: 128,
width: MediaQuery.sizeOf(context).width / 2 - 17,
@@ -58,7 +60,7 @@ class BarsState extends State with SingleTickerProviderStateMixin {
),
const SizedBox(width: 6),
BarsWaveWidget(
- audioData: playerData.value,
+ audioData: audioData,
width: MediaQuery.sizeOf(context).width / 2 - 17,
height: MediaQuery.sizeOf(context).width / 6,
),
diff --git a/example/lib/waveform/filter_fx.dart b/example/lib/ui/filter_fx.dart
similarity index 97%
rename from example/lib/waveform/filter_fx.dart
rename to example/lib/ui/filter_fx.dart
index 99c66f1..c768e18 100644
--- a/example/lib/waveform/filter_fx.dart
+++ b/example/lib/ui/filter_fx.dart
@@ -1,6 +1,6 @@
import 'package:flutter/material.dart';
import 'package:flutter_soloud/flutter_soloud.dart';
-import 'package:flutter_soloud_example/waveform/touch_slider.dart';
+import 'package:flutter_soloud_example/ui/touch_slider.dart';
class FilterFx extends StatefulWidget {
const FilterFx({
@@ -9,6 +9,7 @@ class FilterFx extends StatefulWidget {
});
final FilterType filterType;
+
@override
State createState() => _FilterFxState();
}
diff --git a/example/lib/waveform/keyboard_widget.dart b/example/lib/ui/keyboard_widget.dart
similarity index 100%
rename from example/lib/waveform/keyboard_widget.dart
rename to example/lib/ui/keyboard_widget.dart
diff --git a/example/lib/waveform/knobs_groups.dart b/example/lib/ui/knobs_groups.dart
similarity index 95%
rename from example/lib/waveform/knobs_groups.dart
rename to example/lib/ui/knobs_groups.dart
index db2b71b..07a9cab 100644
--- a/example/lib/waveform/knobs_groups.dart
+++ b/example/lib/ui/knobs_groups.dart
@@ -1,5 +1,5 @@
import 'package:flutter/material.dart';
-import 'package:flutter_soloud_example/waveform/touch_slider.dart';
+import 'package:flutter_soloud_example/ui/touch_slider.dart';
class KnobsGroup extends StatefulWidget {
const KnobsGroup({
diff --git a/example/lib/waveform/text_slider.dart b/example/lib/ui/text_slider.dart
similarity index 100%
rename from example/lib/waveform/text_slider.dart
rename to example/lib/ui/text_slider.dart
diff --git a/example/lib/waveform/touch_slider.dart b/example/lib/ui/touch_slider.dart
similarity index 100%
rename from example/lib/waveform/touch_slider.dart
rename to example/lib/ui/touch_slider.dart
diff --git a/example/lib/visualizer/bars_fft_widget.dart b/example/lib/visualizer/bars_fft_widget.dart
index 3801c32..85920af 100644
--- a/example/lib/visualizer/bars_fft_widget.dart
+++ b/example/lib/visualizer/bars_fft_widget.dart
@@ -1,7 +1,8 @@
-import 'dart:ffi' as ffi;
-
+// ignore_for_file: public_member_api_docs
import 'package:flutter/material.dart';
+import 'package:flutter_soloud/flutter_soloud.dart';
+
/// Draw the audio FFT data
///
class BarsFftWidget extends StatelessWidget {
@@ -14,7 +15,7 @@ class BarsFftWidget extends StatelessWidget {
super.key,
});
- final ffi.Pointer audioData;
+ final AudioData audioData;
final int minFreq;
final int maxFreq;
final double width;
@@ -22,7 +23,9 @@ class BarsFftWidget extends StatelessWidget {
@override
Widget build(BuildContext context) {
- if (audioData.address == 0x0) return const SizedBox.shrink();
+ if (audioData.getSamplesKind == GetSamplesKind.wave) {
+ return const Placeholder();
+ }
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
@@ -55,20 +58,31 @@ class FftPainter extends CustomPainter {
required this.minFreq,
required this.maxFreq,
});
- final ffi.Pointer audioData;
+ final AudioData audioData;
final int minFreq;
final int maxFreq;
@override
void paint(Canvas canvas, Size size) {
- final barWidth = size.width / (maxFreq - minFreq);
+ final barWidth = size.width / (maxFreq - minFreq).clamp(0, 255);
final paint = Paint()
..color = Colors.yellow
..strokeWidth = barWidth * 0.8
..style = PaintingStyle.stroke;
- for (var i = minFreq; i <= maxFreq; i++) {
- final barHeight = size.height * audioData[i];
+ for (var i = minFreq; i <= maxFreq.clamp(0, 255); i++) {
+ late final double barHeight;
+ try {
+ final double data;
+ if (audioData.getSamplesKind == GetSamplesKind.linear) {
+ data = audioData.getLinearFft(SampleLinear(i));
+ } else {
+ data = audioData.getTexture(SampleRow(0), SampleColumn(i));
+ }
+ barHeight = size.height * data;
+ } on Exception {
+ barHeight = 0;
+ }
canvas.drawRect(
Rect.fromLTWH(
barWidth * (i - minFreq),
diff --git a/example/lib/visualizer/bars_wave_widget.dart b/example/lib/visualizer/bars_wave_widget.dart
index 0967064..14250c1 100644
--- a/example/lib/visualizer/bars_wave_widget.dart
+++ b/example/lib/visualizer/bars_wave_widget.dart
@@ -1,6 +1,5 @@
-import 'dart:ffi' as ffi;
-
import 'package:flutter/material.dart';
+import 'package:flutter_soloud/flutter_soloud.dart';
/// Draw the audio wave data
///
@@ -12,14 +11,12 @@ class BarsWaveWidget extends StatelessWidget {
super.key,
});
- final ffi.Pointer audioData;
+ final AudioData audioData;
final double width;
final double height;
@override
Widget build(BuildContext context) {
- if (audioData.address == 0x0) return const SizedBox.shrink();
-
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
@@ -45,7 +42,7 @@ class WavePainter extends CustomPainter {
const WavePainter({
required this.audioData,
});
- final ffi.Pointer audioData;
+ final AudioData audioData;
@override
void paint(Canvas canvas, Size size) {
@@ -56,7 +53,20 @@ class WavePainter extends CustomPainter {
..style = PaintingStyle.stroke;
for (var i = 0; i < 256; i++) {
- final barHeight = size.height * audioData[i + 256];
+ late final double barHeight;
+ try {
+ final double data;
+ if (audioData.getSamplesKind == GetSamplesKind.wave) {
+ data = audioData.getWave(SampleWave(i));
+ } else if (audioData.getSamplesKind == GetSamplesKind.linear) {
+ data = audioData.getLinearWave(SampleLinear(i));
+ } else {
+ data = audioData.getTexture(SampleRow(0), SampleColumn(i + 256));
+ }
+ barHeight = size.height * data;
+ } on Exception {
+ barHeight = 0;
+ }
canvas.drawRect(
Rect.fromLTWH(
barWidth * i,
diff --git a/example/lib/visualizer/visualizer.dart b/example/lib/visualizer/visualizer.dart
index 23c0ce2..922c777 100644
--- a/example/lib/visualizer/visualizer.dart
+++ b/example/lib/visualizer/visualizer.dart
@@ -1,11 +1,9 @@
// ignore_for_file: avoid_positional_boolean_parameters
import 'dart:async';
-import 'dart:ffi' as ffi;
-import 'dart:typed_data';
import 'dart:ui' as ui;
-import 'package:ffi/ffi.dart';
+import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
import 'package:flutter_soloud/flutter_soloud.dart';
@@ -15,49 +13,52 @@ import 'package:flutter_soloud_example/visualizer/bars_wave_widget.dart';
import 'package:flutter_soloud_example/visualizer/bmp_header.dart';
import 'package:flutter_soloud_example/visualizer/paint_texture.dart';
-/// enum to tell [Visualizer] to build a texture as:
-/// [both1D] frequencies data on the 1st 256px row, wave on the 2nd 256px
-/// [fft2D] frequencies data 256x256 px
-/// [wave2D] wave data 256x256px
-/// [both2D] both frequencies & wave data interleaved 256x512px
-enum TextureType {
- both1D,
- fft2D,
- wave2D,
- both2D, // no implemented yet
-}
-
-class FftController extends ChangeNotifier {
- FftController({
- this.minFreqRange = 0,
- this.maxFreqRange = 255,
+class VisualizerController extends ChangeNotifier {
+ VisualizerController({
this.isVisualizerEnabled = true,
this.isVisualizerForPlayer = true,
this.isCaptureStarted = false,
- });
+ this.samplesKind = GetSamplesKind.texture,
+ }) : maxRangeLimit = samplesKind == GetSamplesKind.texture ? 511 : 255,
+ maxRange = samplesKind == GetSamplesKind.texture ? 511 : 255,
+ minRange = 0 {
+ audioData = AudioData(
+ isVisualizerForPlayer ? GetSamplesFrom.player : GetSamplesFrom.microphone,
+ samplesKind,
+ );
+ }
- int minFreqRange;
- int maxFreqRange;
+ int maxRangeLimit;
+ int minRange;
+ int maxRange;
bool isVisualizerEnabled;
bool isVisualizerForPlayer;
bool isCaptureStarted;
+ GetSamplesKind samplesKind;
+ late AudioData audioData;
- void changeMinFreq(int minFreq) {
- if (minFreq < 0) return;
- if (minFreq >= maxFreqRange) return;
- minFreqRange = minFreq;
- notifyListeners();
+ void changeMin(int min, {bool notify = true}) {
+ minRange = min.clamp(0, maxRange);
+ if (notify) {
+ notifyListeners();
+ }
}
- void changeMaxFreq(int maxFreq) {
- if (maxFreq > 255) return;
- if (maxFreq <= minFreqRange) return;
- maxFreqRange = maxFreq;
- notifyListeners();
+ void changeMax(int max, {bool notify = true}) {
+ final nMax = samplesKind == GetSamplesKind.texture ? 511 : 255;
+ maxRange = max.clamp(minRange, nMax);
+ if (notify) {
+ notifyListeners();
+ }
}
void changeIsVisualizerForPlayer(bool isForPlayer) {
isVisualizerForPlayer = isForPlayer;
+ audioData.dispose();
+ audioData = AudioData(
+ isVisualizerForPlayer ? GetSamplesFrom.player : GetSamplesFrom.microphone,
+ samplesKind,
+ );
notifyListeners();
}
@@ -71,51 +72,58 @@ class FftController extends ChangeNotifier {
isCaptureStarted = enabled;
notifyListeners();
}
+
+ void changeSamplesKind(GetSamplesKind kind) {
+ samplesKind = kind;
+ switch (kind) {
+ case GetSamplesKind.linear:
+ changeMin(0, notify: false);
+ changeMax(255, notify: false);
+ maxRangeLimit = 255;
+ case GetSamplesKind.texture:
+ changeMin(0, notify: false);
+ changeMax(511, notify: false);
+ maxRangeLimit = 511;
+ case GetSamplesKind.wave:
+ changeMin(0, notify: false);
+ changeMax(255, notify: false);
+ maxRangeLimit = 255;
+ }
+ audioData.changeType(
+ isVisualizerForPlayer ? GetSamplesFrom.player : GetSamplesFrom.microphone,
+ samplesKind,
+ );
+ notifyListeners();
+ }
}
class Visualizer extends StatefulWidget {
const Visualizer({
required this.controller,
required this.shader,
- this.textureType = TextureType.fft2D,
super.key,
});
- final FftController controller;
- final ui.FragmentShader shader;
- final TextureType textureType;
+ final VisualizerController controller;
+ final String shader;
@override
State createState() => _VisualizerState();
}
-class _VisualizerState extends State
- with SingleTickerProviderStateMixin {
+class _VisualizerState extends State with TickerProviderStateMixin {
late Ticker ticker;
late Stopwatch sw;
- late Bmp32Header fftImageRow;
- late Bmp32Header fftImageMatrix;
- late int fftSize;
- late int halfFftSize;
- late int fftBitmapRange;
- ffi.Pointer> playerData = ffi.nullptr;
- ffi.Pointer> captureData = ffi.nullptr;
+ late Bmp32Header image;
+ late int bitmapRange;
late Future Function() buildImageCallback;
- late int Function(int row, int col) textureTypeCallback;
+ late int Function(SampleRow row, SampleColumn col) textureTypeCallback;
int nFrames = 0;
@override
void initState() {
super.initState();
- /// these constants must not be touched since SoLoud
- /// gives back a size of 256 values
- fftSize = 512;
- halfFftSize = fftSize >> 1;
-
- playerData = calloc();
- captureData = calloc();
-
ticker = createTicker(_tick);
sw = Stopwatch();
sw.start();
@@ -126,7 +134,10 @@ class _VisualizerState extends State
SoLoudCapture.instance.isCaptureStarted();
widget.controller.addListener(() {
- ticker.stop();
+ ticker
+ ..stop()
+ ..dispose();
+ ticker = createTicker(_tick);
setupBitmapSize();
ticker.start();
sw.reset();
@@ -136,55 +147,65 @@ class _VisualizerState extends State
@override
void dispose() {
- ticker.stop();
+ ticker
+ ..stop()
+ ..dispose();
sw.stop();
- calloc.free(playerData);
- playerData = ffi.nullptr;
- calloc.free(captureData);
- captureData = ffi.nullptr;
super.dispose();
}
void _tick(Duration elapsed) {
nFrames++;
- if (mounted) {
- setState(() {});
+ if (context.mounted) {
+ try {
+ widget.controller.audioData.updateSamples();
+ setState(() {});
+ } on Exception catch (e) {
+ debugPrint('$e');
+ }
}
}
- void setupBitmapSize() {
- fftBitmapRange =
- widget.controller.maxFreqRange - widget.controller.minFreqRange;
- fftImageRow = Bmp32Header.setHeader(fftBitmapRange, 2);
- fftImageMatrix = Bmp32Header.setHeader(fftBitmapRange, 256);
-
- switch (widget.textureType) {
- case TextureType.both1D:
- {
- buildImageCallback = buildImageFromLatestSamplesRow;
- break;
- }
- case TextureType.fft2D:
- {
- buildImageCallback = buildImageFromAllSamplesMatrix;
- textureTypeCallback = getFFTDataCallback;
- break;
- }
- case TextureType.wave2D:
- {
- buildImageCallback = buildImageFromAllSamplesMatrix;
- textureTypeCallback = getWaveDataCallback;
- break;
- }
- // TODO(marco): implement this
- case TextureType.both2D:
- {
- buildImageCallback = buildImageFromAllSamplesMatrix;
- textureTypeCallback = getWaveDataCallback;
- break;
- }
- }
- }
+ // @override
+ // Widget build(BuildContext context) {
+ // return Row(
+ // children: [
+ // Column(
+ // children: [
+ // const Text(
+ // 'FFT data',
+ // style: TextStyle(fontWeight: FontWeight.bold),
+ // ),
+
+ // /// FFT bars
+ // BarsFftWidget(
+ // audioData: widget.controller.audioData,
+ // minFreq: widget.controller.minRange,
+ // maxFreq: widget.controller.maxRange,
+ // width: 250,
+ // height: 120,
+ // ),
+ // ],
+ // ),
+ // const SizedBox(width: 6),
+ // Column(
+ // children: [
+ // const Text(
+ // '256 wave data',
+ // style: TextStyle(fontWeight: FontWeight.bold),
+ // ),
+
+ // /// wave data bars
+ // BarsWaveWidget(
+ // audioData: widget.controller.audioData,
+ // width: 250,
+ // height: 120,
+ // ),
+ // ],
+ // ),
+ // ],
+ // );
+ // }
@override
Widget build(BuildContext context) {
@@ -193,19 +214,13 @@ class _VisualizerState extends State
builder: (context, dataTexture) {
final fps = nFrames.toDouble() / (sw.elapsedMilliseconds / 1000.0);
if (!dataTexture.hasData || dataTexture.data == null) {
- return Placeholder(
- color: Colors.yellow,
- fallbackWidth: 100,
- fallbackHeight: 100,
+ return const Placeholder(
+ color: Colors.red,
strokeWidth: 0.5,
- child: Text("can't get audio samples\n"
- 'FPS: ${fps.toStringAsFixed(1)}'),
+ child: Text("\n can't get audio samples \n"),
);
}
- final nFft =
- widget.controller.maxFreqRange - widget.controller.minFreqRange;
-
return LayoutBuilder(
builder: (context, constraints) {
return Column(
@@ -244,12 +259,28 @@ class _VisualizerState extends State
sw.reset();
nFrames = 0;
},
- child: AudioShader(
- width: constraints.maxWidth,
- height: constraints.maxWidth / 2.4,
- image: dataTexture.data!,
- shader: widget.shader,
- iTime: sw.elapsedMilliseconds / 1000.0,
+ child: FutureBuilder(
+ future: loadShader(),
+ builder: (context, snapshot) {
+ if (snapshot.hasData) {
+ return AudioShader(
+ width: constraints.maxWidth,
+ height: constraints.maxWidth / 2.4,
+ image: dataTexture.data!,
+ shader: snapshot.data!,
+ iTime: sw.elapsedMilliseconds / 1000.0,
+ );
+ } else {
+ if (snapshot.data == null) {
+ return const Placeholder(
+ child: Align(
+ child: Text('Error compiling shader.\nSee log'),
+ ),
+ );
+ }
+ return const CircularProgressIndicator();
+ }
+ },
),
),
@@ -257,9 +288,9 @@ class _VisualizerState extends State
children: [
Column(
children: [
- Text(
- '$nFft FFT data',
- style: const TextStyle(fontWeight: FontWeight.bold),
+ const Text(
+ 'FFT data',
+ style: TextStyle(fontWeight: FontWeight.bold),
),
/// FFT bars
@@ -271,11 +302,9 @@ class _VisualizerState extends State
nFrames = 0;
},
child: BarsFftWidget(
- audioData: widget.controller.isVisualizerForPlayer
- ? playerData.value
- : captureData.value,
- minFreq: widget.controller.minFreqRange,
- maxFreq: widget.controller.maxFreqRange,
+ audioData: widget.controller.audioData,
+ minFreq: widget.controller.minRange,
+ maxFreq: widget.controller.maxRange,
width: constraints.maxWidth / 2 - 3,
height: constraints.maxWidth / 6,
),
@@ -299,9 +328,7 @@ class _VisualizerState extends State
nFrames = 0;
},
child: BarsWaveWidget(
- audioData: widget.controller.isVisualizerForPlayer
- ? playerData.value
- : captureData.value,
+ audioData: widget.controller.audioData,
width: constraints.maxWidth / 2 - 3,
height: constraints.maxWidth / 6,
),
@@ -318,142 +345,183 @@ class _VisualizerState extends State
);
}
- /// build an image to be passed to the shader.
- /// The image is a matrix of 256x2 RGBA pixels representing:
- /// in the 1st row the frequencies data
- /// in the 2nd row the wave data
- Future buildImageFromLatestSamplesRow() async {
- if (!widget.controller.isVisualizerEnabled) {
- return null;
+ /// load asynchronously the fragment shader
+ Future loadShader() async {
+ try {
+ final program = await ui.FragmentProgram.fromAsset(widget.shader);
+ return program.fragmentShader();
+ } catch (e) {
+ debugPrint('error compiling the shader $e');
}
+ return null;
+ }
- /// get audio data from player or capture device
- if (widget.controller.isVisualizerForPlayer) {
- try {
- SoLoud.instance.getAudioTexture2D(playerData);
- } catch (e) {
- return null;
- }
- } else if (!widget.controller.isVisualizerForPlayer &&
- widget.controller.isCaptureStarted) {
- final ret = SoLoudCapture.instance.getCaptureAudioTexture2D(captureData);
- if (ret != CaptureErrors.captureNoError) {
- return null;
- }
- } else {
- return null;
+ void setupBitmapSize() {
+ bitmapRange = widget.controller.maxRange - widget.controller.minRange + 1;
+
+ switch (widget.controller.samplesKind) {
+ case GetSamplesKind.wave:
+ {
+ image = Bmp32Header.setHeader(bitmapRange, 1);
+ buildImageCallback = buildImageForWave;
+ break;
+ }
+ case GetSamplesKind.linear:
+ {
+ image = Bmp32Header.setHeader(bitmapRange, 2);
+ buildImageCallback = buildImageForLinear;
+ break;
+ }
+ case GetSamplesKind.texture:
+ {
+ image = Bmp32Header.setHeader(bitmapRange, 256);
+ buildImageCallback = buildImageForTexture;
+ break;
+ }
}
+ }
- if (!mounted) {
+ /// Build an image to be passed to the shader.
+ /// The image is a matrix of 256x1 RGBA pixels representing the wave data.
+ Future buildImageForWave() async {
+ if (!context.mounted) {
+ return null;
+ }
+ if (!(widget.controller.isVisualizerEnabled &&
+ SoLoud.instance.getVoiceCount() > 0) &&
+ !widget.controller.isCaptureStarted) {
return null;
}
final completer = Completer();
- final bytes = Uint8List(fftBitmapRange * 2 * 4);
+ final bytes = Uint8List(bitmapRange * 4);
// Fill the texture bitmap
var col = 0;
- for (var i = widget.controller.minFreqRange;
- i < widget.controller.maxFreqRange;
+ for (var i = widget.controller.minRange;
+ i <= widget.controller.maxRange;
++i, ++col) {
- // fill 1st bitmap row with magnitude
- bytes[col * 4 + 0] = getFFTDataCallback(0, i);
+ // fill bitmap row with wave data
+ final z = getWave(SampleWave(i));
+ bytes[col * 4 + 0] = z;
bytes[col * 4 + 1] = 0;
bytes[col * 4 + 2] = 0;
bytes[col * 4 + 3] = 255;
- // fill 2nd bitmap row with amplitude
- bytes[(fftBitmapRange + col) * 4 + 0] = getWaveDataCallback(0, i);
- bytes[(fftBitmapRange + col) * 4 + 1] = 0;
- bytes[(fftBitmapRange + col) * 4 + 2] = 0;
- bytes[(fftBitmapRange + col) * 4 + 3] = 255;
}
- final img = fftImageRow.storeBitmap(bytes);
+ final img = image.storeBitmap(bytes);
ui.decodeImageFromList(img, completer.complete);
return completer.future;
}
- /// build an image to be passed to the shader.
- /// The image is a matrix of 256x256 RGBA pixels representing
- /// rows of wave data or frequencies data.
- /// Passing [getWaveDataCallback] as parameter, it will return wave data
- /// Passing [getFFTDataCallback] as parameter, it will return FFT data
- Future buildImageFromAllSamplesMatrix() async {
- if (!widget.controller.isVisualizerEnabled) {
+ /// Build an image to be passed to the shader.
+ /// The image is a matrix of 256x2 RGBA pixels representing:
+ /// in the 1st row the frequencies data
+ /// in the 2nd row the wave data
+ Future buildImageForLinear() async {
+ if (!context.mounted) {
return null;
}
-
- /// get audio data from player or capture device
- if (widget.controller.isVisualizerForPlayer) {
- try {
- SoLoud.instance.getAudioTexture2D(playerData);
- } catch (e) {
- return null;
- }
- } else if (!widget.controller.isVisualizerForPlayer &&
- widget.controller.isCaptureStarted) {
- final ret = SoLoudCapture.instance.getCaptureAudioTexture2D(captureData);
- if (ret != CaptureErrors.captureNoError) {
- return null;
- }
- } else {
+ if (!(widget.controller.isVisualizerEnabled &&
+ SoLoud.instance.getVoiceCount() > 0) &&
+ !widget.controller.isCaptureStarted) {
return null;
}
- if (!mounted) {
- return null;
+ final completer = Completer();
+ final bytes = Uint8List(bitmapRange * 4 * 2);
+ var col = 0;
+ // Fill the texture bitmap
+ for (var i = widget.controller.minRange;
+ i <= widget.controller.maxRange;
+ ++i, ++col) {
+ // fill 1st bitmap row with FFT magnitude
+ bytes[col * 4 + 0] = getLinearFft(SampleLinear(i));
+ bytes[col * 4 + 1] = 0;
+ bytes[col * 4 + 2] = 0;
+ bytes[col * 4 + 3] = 255;
+ // fill 2nd bitmap row with wave amplitudes
+ bytes[col * 4 + 256 * 4 + 0] = getLinearWave(SampleLinear(i));
+ bytes[col * 4 + 256 * 4 + 1] = 0;
+ bytes[col * 4 + 256 * 4 + 2] = 0;
+ bytes[col * 4 + 256 * 4 + 3] = 255;
}
- /// IMPORTANT: if [mounted] is not checked here, could happens that
- /// dispose() is called before this is called but it is called!
- /// Since in dispose the [audioData] is freed, there will be a crash!
- /// I do not understand why this happens because the FutureBuilder
- /// seems has not finished before dispose()!?
- if (!mounted) {
+ final img = image.storeBitmap(bytes);
+ ui.decodeImageFromList(img, completer.complete);
+
+ return completer.future;
+ }
+
+ /// Build an image to be passed to the shader.
+ /// The image is a matrix of 256x256 RGBA pixels representing
+ /// rows of wave data or frequencies data.
+ Future buildImageForTexture() async {
+ if (!context.mounted) {
return null;
}
+ if (!(widget.controller.isVisualizerEnabled &&
+ SoLoud.instance.getVoiceCount() > 0) &&
+ !widget.controller.isCaptureStarted) {
+ return null;
+ }
+
+ final width = widget.controller.maxRange - widget.controller.minRange;
+
+ /// On the web there are worst performance getting data because for every
+ /// single data a JS function must be called.
+ /// Setting here an height of 100 instead of 256 to improve.
+ const height = kIsWeb ? 100 : 256;
+
final completer = Completer();
- final bytes = Uint8List(fftBitmapRange * 256 * 4);
+ final bytes = Uint8List(width * height * 4);
// Fill the texture bitmap with wave data
- for (var y = 0; y < 256; ++y) {
+ var row = 0;
+ for (var y = 0; y < height; ++y, ++row) {
var col = 0;
- for (var x = widget.controller.minFreqRange;
- x < widget.controller.maxFreqRange;
- ++x, ++col) {
- bytes[y * fftBitmapRange * 4 + col * 4 + 0] = textureTypeCallback(y, x);
- bytes[y * fftBitmapRange * 4 + col * 4 + 1] = 0;
- bytes[y * fftBitmapRange * 4 + col * 4 + 2] = 0;
- bytes[y * fftBitmapRange * 4 + col * 4 + 3] = 255;
+ for (var x = 0; x < width; ++x, ++col) {
+ final z = getTexture(SampleRow(y), SampleColumn(x));
+ bytes[row * width * 4 + col * 4 + 0] = z;
+ bytes[row * width * 4 + col * 4 + 1] = 0;
+ bytes[row * width * 4 + col * 4 + 2] = 0;
+ bytes[row * width * 4 + col * 4 + 3] = 255;
}
}
- final img = fftImageMatrix.storeBitmap(bytes);
+ image = Bmp32Header.setHeader(width, height);
+ final img = image.storeBitmap(bytes);
ui.decodeImageFromList(img, completer.complete);
+ // final ui.Codec codec = await ui.instantiateImageCodec(img);
+ // final ui.FrameInfo frameInfo = await codec.getNextFrame();
+ // completer.complete(frameInfo.image);
return completer.future;
}
- int getFFTDataCallback(int row, int col) {
- if (widget.controller.isVisualizerForPlayer) {
- return (playerData.value[row * fftSize + col] * 255.0).toInt();
- } else {
- return (captureData.value[row * fftSize + col] * 255.0).toInt();
- }
+ int getWave(SampleWave offset) {
+ final n = widget.controller.audioData.getWave(offset);
+ return (((n + 1.0) / 2.0).clamp(0, 1) * 128).toInt();
}
- int getWaveDataCallback(int row, int col) {
- if (widget.controller.isVisualizerForPlayer) {
- return (((playerData.value[row * fftSize + halfFftSize + col] + 1.0) /
- 2.0) *
- 128)
- .toInt();
- } else {
- return (((captureData.value[row * fftSize + halfFftSize + col] + 1.0) /
- 2.0) *
- 128)
- .toInt();
- }
+ int getLinearFft(SampleLinear offset) {
+ return (widget.controller.audioData.getLinearFft(offset).clamp(0, 1) * 255)
+ .toInt();
+ }
+
+ int getLinearWave(SampleLinear offset) {
+ final n = widget.controller.audioData.getLinearWave(offset).abs();
+ return (((n + 1.0) / 2.0).clamp(0, 1) * 128).toInt();
+ }
+
+ int getTexture(SampleRow row, SampleColumn col) {
+ final n = widget.controller.audioData.getTexture(row, col);
+
+ /// With col<256 we are asking for FFT values.
+ if (col.value < 256) return (n.clamp(0, 1) * 255).toInt();
+
+ /// With col>256 we are asking for wave values.
+ return (((n + 1.0) / 2.0).clamp(0, 1) * 128).toInt();
}
}
diff --git a/example/test/widget_test.dart b/example/test/widget_test.dart
new file mode 100644
index 0000000..092d222
--- /dev/null
+++ b/example/test/widget_test.dart
@@ -0,0 +1,30 @@
+// This is a basic Flutter widget test.
+//
+// To perform an interaction with a widget in your test, use the WidgetTester
+// utility in the flutter_test package. For example, you can send tap and scroll
+// gestures. You can also use WidgetTester to find child widgets in the widget
+// tree, read text, and verify that the values of widget properties are correct.
+
+import 'package:flutter/material.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import 'package:example/main.dart';
+
+void main() {
+ testWidgets('Counter increments smoke test', (WidgetTester tester) async {
+ // Build our app and trigger a frame.
+ await tester.pumpWidget(const MyApp());
+
+ // Verify that our counter starts at 0.
+ expect(find.text('0'), findsOneWidget);
+ expect(find.text('1'), findsNothing);
+
+ // Tap the '+' icon and trigger a frame.
+ await tester.tap(find.byIcon(Icons.add));
+ await tester.pump();
+
+ // Verify that our counter has incremented.
+ expect(find.text('0'), findsNothing);
+ expect(find.text('1'), findsOneWidget);
+ });
+}
diff --git a/example/tests/tests.dart b/example/tests/tests.dart
index afae10a..5688e73 100644
--- a/example/tests/tests.dart
+++ b/example/tests/tests.dart
@@ -1,100 +1,258 @@
import 'dart:async';
-import 'dart:io';
+import 'dart:ui';
import 'package:flutter/material.dart';
-import 'package:flutter/services.dart';
import 'package:flutter_soloud/flutter_soloud.dart';
-import 'package:flutter_soloud/src/soloud_controller.dart';
-import 'package:logging/logging.dart';
+import 'package:flutter_soloud/src/bindings/soloud_controller.dart';
-/// An end-to-end test.
+enum TestStatus {
+ none,
+ passed,
+ failed,
+}
+
+typedef TestFunction = ({
+ String name,
+ Future Function() callback,
+ TestStatus status,
+});
+
+/// A GUI for tests.
///
/// Run this with `flutter run tests/tests.dart`.
-void main() async {
- // Make sure we can see logs from the engine, even in release mode.
- // ignore: avoid_print
- final errorsBuffer = StringBuffer();
- Logger.root.onRecord.listen((record) {
- debugPrint(record.toString(), wrapWidth: 80);
- if (record.level >= Level.WARNING) {
- // Exception for deiniting.
- if (record.error is SoLoudInitializationStoppedByDeinitException) {
- return;
- }
-
- // Make sure the warnings are visible.
- stderr.writeln('TEST error (${record.level} log): $record');
- errorsBuffer.writeln('- $record');
- // Set exit code but keep running to see all logs.
- exitCode = 1;
- }
- });
- Logger.root.level = Level.ALL;
-
+void main() {
WidgetsFlutterBinding.ensureInitialized();
- var tests = Function()>[
- testProtectVoice,
- testAllInstancesFinished,
- testCreateNotes,
- testPlaySeekPause,
- testPan,
- testHandles,
- loopingTests,
- ];
- for (final f in tests) {
- await runZonedGuarded(
- () async => f(),
- (error, stack) => printError,
+ runApp(
+ MaterialApp(
+ themeMode: ThemeMode.dark,
+ darkTheme: ThemeData.dark(useMaterial3: true),
+ scrollBehavior: const MaterialScrollBehavior().copyWith(
+ // enable mouse dragging
+ dragDevices: PointerDeviceKind.values.toSet(),
+ ),
+ home: const Padding(
+ padding: EdgeInsets.all(8),
+ child: MyHomePage(),
+ ),
+ ),
+ );
+}
+
+class MyHomePage extends StatefulWidget {
+ const MyHomePage({super.key});
+
+ @override
+ State createState() => _MyHomePageState();
+}
+
+class _MyHomePageState extends State {
+ final output = StringBuffer();
+ final List tests = [];
+ final textEditingController = TextEditingController();
+
+ @override
+ void initState() {
+ super.initState();
+
+ /// Add all testing functions.
+ tests.addAll([
+ (
+ name: 'testProtectVoice',
+ status: TestStatus.none,
+ callback: testProtectVoice,
+ ),
+ (
+ name: 'testAllInstancesFinished',
+ status: TestStatus.none,
+ callback: testAllInstancesFinished,
+ ),
+ (
+ name: 'testCreateNotes',
+ status: TestStatus.none,
+ callback: testCreateNotes,
+ ),
+ (
+ name: 'testPlaySeekPause',
+ status: TestStatus.none,
+ callback: testPlaySeekPause,
+ ),
+ (
+ name: 'testPan',
+ status: TestStatus.none,
+ callback: testPan,
+ ),
+ (
+ name: 'testHandles',
+ status: TestStatus.none,
+ callback: testHandles,
+ ),
+ (
+ name: 'loopingTests',
+ status: TestStatus.none,
+ callback: loopingTests,
+ ),
+ (
+ name: 'testSynchronousDeinit',
+ status: TestStatus.none,
+ callback: testSynchronousDeinit,
+ ),
+ (
+ name: 'testAsynchronousDeinit',
+ status: TestStatus.none,
+ callback: testAsynchronousDeinit,
+ ),
+ ]);
+ }
+
+ @override
+ void dispose() {
+ super.dispose();
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ return SafeArea(
+ child: Scaffold(
+ body: Column(
+ children: [
+ Column(
+ mainAxisSize: MainAxisSize.min,
+ children: [
+ OutlinedButton(
+ onPressed: () async {
+ for (var i = 0; i < tests.length; i++) {
+ await runTest(i);
+ }
+ },
+ child: const Text('Run All'),
+ ),
+ const SizedBox(height: 16),
+ Wrap(
+ spacing: 8,
+ runSpacing: 8,
+ children: List.generate(
+ tests.length,
+ (index) {
+ return OutlinedButton(
+ style: ButtonStyle(
+ backgroundColor: tests[index].status ==
+ TestStatus.failed
+ ? const WidgetStatePropertyAll(Colors.red)
+ : tests[index].status == TestStatus.passed
+ ? const WidgetStatePropertyAll(Colors.green)
+ : null,
+ ),
+ onPressed: () async {
+ await runTest(index);
+ },
+ child: Text(
+ tests[index].name,
+ ),
+ );
+ },
+ ),
+ ),
+ const SizedBox(height: 16),
+ ],
+ ),
+ Expanded(
+ child: TextField(
+ controller: textEditingController,
+ style: const TextStyle(color: Colors.black, fontSize: 12),
+ expands: true,
+ maxLines: null,
+ decoration: const InputDecoration(
+ fillColor: Colors.white,
+ filled: true,
+ ),
+ ),
+ ),
+ ],
+ ),
+ ),
);
}
- tests = Function()>[
- testSynchronousDeinit,
- testAsynchronousDeinit,
- ];
- for (final f in tests) {
- await runZonedGuarded(
- () async => f(),
+ /// Run text with index [index].
+ ///
+ /// This outputs the asserts logs and the `StringBuffer` returned by
+ /// the test functions.
+ /// It also update the state of text buttons.
+ Future runTest(int index) async {
+ await runZonedGuarded>(
+ () async {
+ output
+ ..write('===== RUNNING "${tests[index].name}" =====\n')
+ ..write(await tests[index].callback())
+ ..write('===== PASSED! =====\n\n')
+ ..writeln();
+ tests[index] = (
+ name: tests[index].name,
+ status: TestStatus.passed,
+ callback: tests[index].callback,
+ );
+ textEditingController.text = output.toString();
+ debugPrint(output.toString());
+ if (context.mounted) setState(() {});
+ },
(error, stack) {
- if (error is SoLoudInitializationStoppedByDeinitException) {
- // This is to be expected in this test.
- return;
- }
- printError(error, stack);
+ // if (error is SoLoudInitializationStoppedByDeinitException) {
+ // // This is to be expected in this test.
+ // return;
+ // }
+ output
+ ..write('== TESTS "${tests[index].name}" FAILED with '
+ 'the following error(s) ==')
+ ..writeln()
+ ..writeAll([error, stack], '\n\n')
+ ..writeln()
+ ..writeln();
+ // ignore: parameter_assignments
+ tests[index] = (
+ name: tests[index].name,
+ status: TestStatus.failed,
+ callback: tests[index].callback,
+ );
+ textEditingController.text = output.toString();
+ debugPrint(output.toString());
+ if (context.mounted) setState(() {});
},
);
}
+}
- stdout.write('\n\n\n---\n\n\n');
-
- if (exitCode != 0) {
- // Since we're running this inside `flutter run`, the exit code
- // will be overridden to 0 by the Flutter tool.
- // The following is making sure that the errors are noticed.
- stderr
- ..writeln('===== TESTS FAILED with the following error(s) =====')
- ..writeln()
- ..writeln(errorsBuffer.toString())
- ..writeln()
- ..writeln('See logs above for details.')
- ..writeln();
- } else {
- debugPrint('===== TESTS PASSED! =====');
- stdout
- ..writeln('===== TESTS PASSED! =====')
- ..writeln();
- }
+// ////////////////////////////
+// / Common methods
+// ////////////////////////////
+
+Future initialize() async {
+ await SoLoud.instance.init();
+ SoLoud.instance.setGlobalVolume(0.2);
+}
- // Cleanly close the app.
- await SystemChannels.platform.invokeMethod('SystemNavigator.pop');
+void deinit() {
+ SoLoud.instance.deinit();
}
-String output = '';
-AudioSource? currentSound;
+Future delay(int ms) async {
+ await Future.delayed(Duration(milliseconds: ms), () {});
+}
+
+bool closeTo(num value, num expected, num epsilon) {
+ return (value - expected).abs() <= epsilon.abs();
+}
+
+Future loadAsset() async {
+ return SoLoud.instance.loadAsset('assets/audio/explosion.mp3');
+}
+
+// ///////////////////////////
+// / Tests
+// ///////////////////////////
/// Test setMaxActiveVoiceCount, setProtectedVoice and getProtectedVoice
-Future testProtectVoice() async {
+Future testProtectVoice() async {
await initialize();
final defaultVoiceCount = SoLoud.instance.getMaxActiveVoiceCount();
@@ -147,11 +305,13 @@ Future testProtectVoice() async {
'Max active voices are not reset to the default value after reinit!',
);
deinit();
+
+ return StringBuffer();
}
/// Test allInstancesFinished stream
-Future testAllInstancesFinished() async {
- final log = Logger('testAllInstancesFinished');
+Future testAllInstancesFinished() async {
+ final ret = StringBuffer();
await initialize();
await SoLoud.instance.disposeAllSources();
@@ -170,14 +330,14 @@ Future testAllInstancesFinished() async {
var songDisposed = false;
unawaited(
explosion.allInstancesFinished.first.then((_) async {
- log.info('All instances of explosion finished.');
+ ret.write('All instances of explosion finished.\n');
await SoLoud.instance.disposeSource(explosion);
explosionDisposed = true;
}),
);
unawaited(
song.allInstancesFinished.first.then((_) async {
- log.info('All instances of song finished.');
+ ret.write('All instances of song finished.\n');
await SoLoud.instance.disposeSource(song);
songDisposed = true;
}),
@@ -198,127 +358,12 @@ Future testAllInstancesFinished() async {
assert(songDisposed, "Song sound wasn't disposed.");
deinit();
-}
-
-/// Test asynchronous `init()`-`deinit()`
-Future testAsynchronousDeinit() async {
- final log = Logger('testAsynchronousDeinit');
-
- /// test asynchronous init-deinit looping with a short decreasing time
- for (var t = 100; t >= 0; t--) {
- var error = '';
-
- /// Initialize the player
- unawaited(
- SoLoud.instance.init().then(
- (_) {},
- onError: (Object e) {
- if (e is SoLoudInitializationStoppedByDeinitException) {
- // This is to be expected.
- log.info('$e');
- return;
- }
- e = 'TEST FAILED delay: $t. Player starting error: $e';
- error = e.toString();
- },
- ),
- );
- assert(error.isEmpty, error);
-
- /// wait for [t] ms and deinit()
- await delay(t);
- SoLoud.instance.deinit();
- final after = SoLoudController().soLoudFFI.isInited();
-
- assert(
- after == false,
- 'TEST FAILED delay: $t. The player has not been deinited correctly!',
- );
-
- stderr.writeln('------------- awaited init delay $t passed\n');
- }
-}
-
-/// Test synchronous `init()`-`deinit()`
-Future testSynchronousDeinit() async {
- final log = Logger('testSynchronousDeinit');
-
- /// test synchronous init-deinit looping with a short decreasing time
- /// waiting for `initialize()` to finish
- for (var t = 100; t >= 0; t--) {
- var error = '';
-
- /// Initialize the player
- await SoLoud.instance.init().then(
- (_) {},
- onError: (Object e) {
- if (e is SoLoudInitializationStoppedByDeinitException) {
- // This is to be expected.
- log.info('$e');
- return;
- }
- e = 'TEST FAILED delay: $t. Player starting error: $e';
- error = e.toString();
- },
- );
- assert(
- error.isEmpty,
- 'ASSERT FAILED delay: $t. The player has not been '
- 'inited correctly!',
- );
-
- SoLoud.instance.deinit();
-
- assert(
- !SoLoud.instance.isInitialized ||
- !SoLoudController().soLoudFFI.isInited(),
- 'ASSERT FAILED delay: $t. The player has not been '
- 'inited or deinited correctly!',
- );
-
- stderr.writeln('------------- awaited init #$t passed\n');
- }
-
- /// Try init-play-deinit and again init-play without disposing the sound
- await SoLoud.instance.init();
- SoLoud.instance.setGlobalVolume(0.2);
-
- await loadAsset();
- await SoLoud.instance.play(currentSound!);
- await delay(100);
- await SoLoud.instance.play(currentSound!);
- await delay(100);
- await SoLoud.instance.play(currentSound!);
-
- await delay(2000);
-
- SoLoud.instance.deinit();
-
- /// Initialize again and check if the sound has been
- /// disposed correctly by `deinit()`
- await SoLoud.instance.init();
- assert(
- SoLoudController()
- .soLoudFFI
- .getIsValidVoiceHandle(currentSound!.handles.first) ==
- false,
- 'getIsValidVoiceHandle(): sound not disposed by the engine',
- );
- assert(
- SoLoudController().soLoudFFI.countAudioSource(currentSound!.soundHash) == 0,
- 'getCountAudioSource(): sound not disposed by the engine',
- );
- assert(
- SoLoudController().soLoudFFI.getActiveVoiceCount() == 0,
- 'getActiveVoiceCount(): sound not disposed by the engine',
- );
- SoLoud.instance.deinit();
+ return ret;
}
/// Test waveform
-///
-Future testCreateNotes() async {
+Future testCreateNotes() async {
await initialize();
final notes0 = await SoLoudTools.createNotes(
@@ -332,7 +377,7 @@ Future testCreateNotes() async {
);
assert(
notes0.length == 12 && notes1.length == 12 && notes2.length == 12,
- 'SoLoudTools.createNotes() failed!',
+ 'SoLoudTools.createNotes() failed!\n',
);
await SoLoud.instance.play(notes1[5]);
@@ -366,42 +411,46 @@ Future testCreateNotes() async {
await SoLoud.instance.stop(notes1[1].handles.first);
deinit();
+
+ return StringBuffer();
}
/// Test play, pause, seek, position
///
-Future testPlaySeekPause() async {
+Future testPlaySeekPause() async {
/// Start audio isolate
await initialize();
/// Load sample
- await loadAsset();
+ final currentSound =
+ await SoLoud.instance.loadAsset('assets/audio/explosion.mp3');
/// pause, seek test
{
- await SoLoud.instance.play(currentSound!);
- final length = SoLoud.instance.getLength(currentSound!);
+ await SoLoud.instance.play(currentSound);
+ final length = SoLoud.instance.getLength(currentSound);
assert(
length.inMilliseconds == 3840,
- 'getLength() failed: ${length.inMilliseconds}!',
+ 'getLength() failed: ${length.inMilliseconds}!\n',
);
await delay(1000);
- SoLoud.instance.pauseSwitch(currentSound!.handles.first);
- final paused = SoLoud.instance.getPause(currentSound!.handles.first);
+ SoLoud.instance.pauseSwitch(currentSound.handles.first);
+ final paused = SoLoud.instance.getPause(currentSound.handles.first);
assert(paused, 'pauseSwitch() failed!');
/// seek
const wantedPosition = Duration(seconds: 2);
- SoLoud.instance.seek(currentSound!.handles.first, wantedPosition);
- final position = SoLoud.instance.getPosition(currentSound!.handles.first);
+ SoLoud.instance.seek(currentSound.handles.first, wantedPosition);
+ final position = SoLoud.instance.getPosition(currentSound.handles.first);
assert(position == wantedPosition, 'getPosition() failed!');
}
deinit();
+ return StringBuffer();
}
/// Test instancing playing handles and their disposal
-Future testPan() async {
+Future testPan() async {
/// Start audio isolate
await initialize();
@@ -422,20 +471,33 @@ Future testPan() async {
await delay(1000);
deinit();
+ return StringBuffer();
}
/// Test instancing playing handles and their disposal
-Future testHandles() async {
+Future testHandles() async {
+ var output = '';
+
/// Start audio isolate
await initialize();
/// Load sample
- await loadAsset();
+ final currentSound =
+ await SoLoud.instance.loadAsset('assets/audio/explosion.mp3');
+
+ currentSound.soundEvents.listen((event) {
+ if (event.event == SoundEventType.handleIsNoMoreValid) {
+ output = 'SoundEvent.handleIsNoMoreValid';
+ }
+ if (event.event == SoundEventType.soundDisposed) {
+ output = 'SoundEvent.soundDisposed';
+ }
+ });
/// Play sample
- await SoLoud.instance.play(currentSound!);
+ await SoLoud.instance.play(currentSound);
assert(
- currentSound!.soundHash.isValid && currentSound!.handles.length == 1,
+ currentSound.soundHash.isValid && currentSound.handles.length == 1,
'play() failed!',
);
@@ -447,12 +509,12 @@ Future testHandles() async {
);
/// Play 4 sample
- await SoLoud.instance.play(currentSound!);
- await SoLoud.instance.play(currentSound!);
- await SoLoud.instance.play(currentSound!);
- await SoLoud.instance.play(currentSound!);
+ await SoLoud.instance.play(currentSound);
+ await SoLoud.instance.play(currentSound);
+ await SoLoud.instance.play(currentSound);
+ await SoLoud.instance.play(currentSound);
assert(
- currentSound!.handles.length == 4,
+ currentSound.handles.length == 4,
'loadFromAssets() failed!',
);
@@ -461,77 +523,156 @@ Future testHandles() async {
/// 3798ms explosion.mp3 sample duration
await delay(4500);
assert(
- currentSound!.handles.isEmpty,
+ currentSound.handles.isEmpty,
'Play 4 sample handles failed!',
);
deinit();
+ return StringBuffer();
}
/// Test looping state and `loopingStartAt`
-Future loopingTests() async {
+Future loopingTests() async {
await initialize();
- await loadAsset();
+ /// Load sample
+ final currentSound =
+ await SoLoud.instance.loadAsset('assets/audio/explosion.mp3');
await SoLoud.instance.play(
- currentSound!,
+ currentSound,
looping: true,
loopingStartAt: const Duration(seconds: 1),
);
assert(
- SoLoud.instance.getLooping(currentSound!.handles.first),
+ SoLoud.instance.getLooping(currentSound.handles.first),
'looping failed!',
);
/// Wait for the first loop to start at 1s
await delay(4100);
assert(
- SoLoud.instance.getLoopPoint(currentSound!.handles.first) ==
+ SoLoud.instance.getLoopPoint(currentSound.handles.first) ==
const Duration(seconds: 1) &&
- SoLoud.instance.getPosition(currentSound!.handles.first) >
+ SoLoud.instance.getPosition(currentSound.handles.first) >
const Duration(seconds: 1),
'looping start failed!',
);
deinit();
+ return StringBuffer();
}
-/// Common methods
-Future initialize() async {
- await SoLoud.instance.init();
- SoLoud.instance.setGlobalVolume(0.2);
-}
+/// Test asynchronous `init()`-`deinit()`
+Future testAsynchronousDeinit() async {
+ /// test asynchronous init-deinit looping with a short decreasing time
+ for (var t = 100; t >= 0; t--) {
+ var error = '';
-void deinit() {
- SoLoud.instance.deinit();
-}
+ /// Initialize the player
+ unawaited(
+ SoLoud.instance.init().then(
+ (_) {},
+ onError: (Object e) {
+ if (e is SoLoudInitializationStoppedByDeinitException) {
+ // This is to be expected.
+ debugPrint('$e\n');
+ return;
+ }
+ debugPrint('TEST FAILED delay: $t. Player starting error: $e\n');
+ error = e.toString();
+ },
+ ),
+ );
-Future delay(int ms) async {
- await Future.delayed(Duration(milliseconds: ms), () {});
+ assert(error.isEmpty, error);
+
+ /// wait for [t] ms and deinit()
+ await delay(t);
+ SoLoud.instance.deinit();
+ final after = SoLoudController().soLoudFFI.isInited();
+
+ assert(
+ after == false,
+ 'TEST FAILED delay: $t. The player has not been deinited correctly!',
+ );
+
+ debugPrint('------------- awaited init delay $t passed\n');
+ }
+ return StringBuffer();
}
-Future loadAsset() async {
- if (currentSound != null) {
- await SoLoud.instance.disposeSource(currentSound!);
+/// Test synchronous `init()`-`deinit()`
+Future testSynchronousDeinit() async {
+ /// test synchronous init-deinit looping with a short decreasing time
+ /// waiting for `initialize()` to finish
+ for (var t = 100; t >= 0; t--) {
+ var error = '';
+
+ /// Initialize the player
+ await SoLoud.instance.init().then(
+ (_) {},
+ onError: (Object e) {
+ if (e is SoLoudInitializationStoppedByDeinitException) {
+ // This is to be expected.
+ debugPrint('$e\n');
+ return;
+ }
+ debugPrint('TEST FAILED delay: $t. Player starting error: $e');
+ error = e.toString();
+ },
+ );
+ assert(error.isEmpty, error);
+
+ SoLoud.instance.deinit();
+
+ assert(
+ !SoLoud.instance.isInitialized ||
+ !SoLoudController().soLoudFFI.isInited(),
+ 'ASSERT FAILED delay: $t. The player has not been '
+ 'inited or deinited correctly!',
+ );
+
+ debugPrint('------------- awaited init #$t passed\n');
}
- currentSound = await SoLoud.instance.loadAsset('assets/audio/explosion.mp3');
- currentSound!.soundEvents.listen((event) {
- if (event.event == SoundEventType.handleIsNoMoreValid) {
- output = 'SoundEvent.handleIsNoMoreValid';
- }
- if (event.event == SoundEventType.soundDisposed) {
- output = 'SoundEvent.soundDisposed';
- }
- });
-}
+ /// Try init-play-deinit and again init-play without disposing the sound
+ await SoLoud.instance.init();
+ SoLoud.instance.setGlobalVolume(0.2);
-bool closeTo(num value, num expected, num epsilon) {
- return (value - expected).abs() <= epsilon.abs();
-}
+ /// Load sample
+ final currentSound =
+ await SoLoud.instance.loadAsset('assets/audio/explosion.mp3');
+
+ await SoLoud.instance.play(currentSound);
+ await delay(100);
+ await SoLoud.instance.play(currentSound);
+ await delay(100);
+ await SoLoud.instance.play(currentSound);
+
+ await delay(2000);
+
+ SoLoud.instance.deinit();
+
+ /// Initialize again and check if the sound has been
+ /// disposed correctly by `deinit()`
+ await SoLoud.instance.init();
+ assert(
+ SoLoudController()
+ .soLoudFFI
+ .getIsValidVoiceHandle(currentSound.handles.first) ==
+ false,
+ 'getIsValidVoiceHandle(): sound not disposed by the engine',
+ );
+ assert(
+ SoLoudController().soLoudFFI.countAudioSource(currentSound.soundHash) == 0,
+ 'getCountAudioSource(): sound not disposed by the engine',
+ );
+ assert(
+ SoLoudController().soLoudFFI.getActiveVoiceCount() == 0,
+ 'getActiveVoiceCount(): sound not disposed by the engine',
+ );
+ SoLoud.instance.deinit();
-void printError(Object error, StackTrace stack) {
- stderr.writeln('TEST error: $error\nstack: $stack');
- exitCode = 1;
+ return StringBuffer();
}
diff --git a/example/web/favicon.png b/example/web/favicon.png
new file mode 100644
index 0000000..8aaa46a
Binary files /dev/null and b/example/web/favicon.png differ
diff --git a/example/web/icons/Icon-192.png b/example/web/icons/Icon-192.png
new file mode 100644
index 0000000..b749bfe
Binary files /dev/null and b/example/web/icons/Icon-192.png differ
diff --git a/example/web/icons/Icon-512.png b/example/web/icons/Icon-512.png
new file mode 100644
index 0000000..88cfd48
Binary files /dev/null and b/example/web/icons/Icon-512.png differ
diff --git a/example/web/icons/Icon-maskable-192.png b/example/web/icons/Icon-maskable-192.png
new file mode 100644
index 0000000..eb9b4d7
Binary files /dev/null and b/example/web/icons/Icon-maskable-192.png differ
diff --git a/example/web/icons/Icon-maskable-512.png b/example/web/icons/Icon-maskable-512.png
new file mode 100644
index 0000000..d69c566
Binary files /dev/null and b/example/web/icons/Icon-maskable-512.png differ
diff --git a/example/web/index.html b/example/web/index.html
new file mode 100644
index 0000000..94a97ee
--- /dev/null
+++ b/example/web/index.html
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ example
+
+
+
+
+
+
+
diff --git a/example/web/manifest.json b/example/web/manifest.json
new file mode 100644
index 0000000..096edf8
--- /dev/null
+++ b/example/web/manifest.json
@@ -0,0 +1,35 @@
+{
+ "name": "example",
+ "short_name": "example",
+ "start_url": ".",
+ "display": "standalone",
+ "background_color": "#0175C2",
+ "theme_color": "#0175C2",
+ "description": "A new Flutter project.",
+ "orientation": "portrait-primary",
+ "prefer_related_applications": false,
+ "icons": [
+ {
+ "src": "icons/Icon-192.png",
+ "sizes": "192x192",
+ "type": "image/png"
+ },
+ {
+ "src": "icons/Icon-512.png",
+ "sizes": "512x512",
+ "type": "image/png"
+ },
+ {
+ "src": "icons/Icon-maskable-192.png",
+ "sizes": "192x192",
+ "type": "image/png",
+ "purpose": "maskable"
+ },
+ {
+ "src": "icons/Icon-maskable-512.png",
+ "sizes": "512x512",
+ "type": "image/png",
+ "purpose": "maskable"
+ }
+ ]
+}
diff --git a/img/audacity_spectrum.png b/img/audacity_spectrum.png
deleted file mode 100644
index 0d2cffe..0000000
Binary files a/img/audacity_spectrum.png and /dev/null differ
diff --git a/img/flutter_soloud_spectrum.png b/img/flutter_soloud_spectrum.png
deleted file mode 100644
index 650467d..0000000
Binary files a/img/flutter_soloud_spectrum.png and /dev/null differ
diff --git a/img/wasmWorker.png b/img/wasmWorker.png
new file mode 100644
index 0000000..23ea479
Binary files /dev/null and b/img/wasmWorker.png differ
diff --git a/lib/flutter_soloud.dart b/lib/flutter_soloud.dart
index ddc7ec9..a860ad2 100644
--- a/lib/flutter_soloud.dart
+++ b/lib/flutter_soloud.dart
@@ -2,6 +2,8 @@
library flutter_soloud;
export 'src/audio_source.dart';
+export 'src/bindings/audio_data.dart';
+export 'src/bindings/audio_data_extensions.dart';
export 'src/enums.dart' hide PlayerErrors, PlayerStateNotification;
export 'src/exceptions/exceptions.dart';
export 'src/filter_params.dart';
diff --git a/lib/src/bindings/audio_data.dart b/lib/src/bindings/audio_data.dart
new file mode 100644
index 0000000..c581863
--- /dev/null
+++ b/lib/src/bindings/audio_data.dart
@@ -0,0 +1,262 @@
+import 'package:flutter_soloud/src/bindings/audio_data_extensions.dart';
+import 'package:flutter_soloud/src/bindings/audio_data_ffi.dart'
+ if (dart.library.js_interop) 'audio_data_web.dart';
+import 'package:flutter_soloud/src/bindings/soloud_controller.dart';
+import 'package:flutter_soloud/src/exceptions/exceptions.dart';
+import 'package:meta/meta.dart';
+
+/// Enum to tell [AudioData] from where to get audio data.
+/// Every time [AudioData.updateSamples] is called, the audio data will
+/// be acquired by the respective device.
+enum GetSamplesFrom {
+ /// Take data from the player.
+ player,
+
+ /// Take data from the microphone.
+ microphone,
+}
+
+/// The way the audio data should be acquired.
+///
+/// Every time [AudioData.updateSamples] is called it is possible to query the
+/// acquired new audio data using [AudioData.getLinearFft],
+/// [AudioData.getLinearWave], [AudioData.getTexture] or [AudioData.getWave].
+enum GetSamplesKind {
+ /// Get data in a linear manner: the first 256 floats are audio FFI values,
+ /// the other 256 are audio wave samples.
+ /// To get the audio data use [AudioData.getLinearFft] or
+ /// [AudioData.getLinearWave].
+ linear,
+
+ /// Get data in a 2D way. The resulting data will be a matrix of 256
+ /// [linear] rows. Each time the [AudioData.updateSamples] method is called,
+ /// the last row is discarded and the new one will be the first.
+ /// To get the audio data use [AudioData.getTexture].
+ texture,
+
+ /// Get the 256 float of wave audio data.
+ /// To get the audio data use [AudioData.getWave].
+ wave,
+}
+
+/// Class to manage audio samples.
+///
+/// The `visualization` must be enabled to be able to acquire data from the
+/// player. You can achieve this by calling
+/// `SoLoud.instance.setVisualizationEnabled(true);`.
+///
+/// Audio samples can be get from the player or from the microphone, and
+/// in a texture matrix or a linear array way.
+///
+/// IMPORTANT: remember to call [dispose] method when there is no more need
+/// to acquire audio.
+///
+/// After calling [updateSamples] it's possible to call the proper getter
+/// to have back the audio samples. For example, using a "Ticker"
+/// in a Widget that needs the audio data to be displayed:
+/// ```
+/// ...
+/// late final Ticker ticker;
+/// late final AudioData audioData;
+/// late final double waveData;
+/// late final double fftData;
+///
+/// @override
+/// void initState() {
+/// super.initState();
+/// audioData = AudioData(GetSamplesFrom.player, GetSamplesKind.linear);
+/// ticker = createTicker(_tick);
+/// ticker.start();
+/// }
+///
+/// @override
+/// void dispose() {
+/// ticker.stop();
+/// audioData.dispose();
+/// super.dispose();
+/// }
+///
+/// void _tick(Duration elapsed) {
+/// if (context.mounted) {
+/// try {
+/// audioData.updateSamples();
+/// setState(() {});
+/// } on Exception {
+/// debugPrint('Player not initialized or visualization is not enabled!');
+/// }
+/// }
+/// }
+/// ```
+/// Then in your "build" method, you can read the audio data:
+/// ```
+/// try {
+/// /// Use [getTexture] if you have inizialized [AudioData]
+/// /// with [GetSamplesKind.texture]
+/// ffiData = audioData.getLinearFft(i);
+/// waveData = audioData.getLinearWave(i);
+/// } on Exception {
+/// ffiData = 0;
+/// waveData = 0;
+/// }
+/// ```
+///
+/// To smooth FFT values use [SoLoud.instance.setFftSmoothing] or
+/// [SoLoudCapture.instance.setCaptureFftSmoothing].
+///
+///
+// TODO(all): make AudioData singleton?
+@experimental
+class AudioData {
+ /// Initialize the way the audio data should be acquired.
+ AudioData(
+ this._getSamplesFrom,
+ this._getSamplesKind,
+ ) : ctrl = AudioDataCtrl() {
+ _init();
+ ctrl.allocSamples();
+ }
+
+ void _init() {
+ switch (_getSamplesFrom) {
+ case GetSamplesFrom.player:
+ switch (_getSamplesKind) {
+ case GetSamplesKind.wave:
+ _updateCallback = ctrl.waveCallback;
+ case GetSamplesKind.linear:
+ _updateCallback = ctrl.textureCallback;
+ case GetSamplesKind.texture:
+ _updateCallback = ctrl.texture2DCallback;
+ }
+ case GetSamplesFrom.microphone:
+ switch (_getSamplesKind) {
+ case GetSamplesKind.wave:
+ _updateCallback = ctrl.captureWaveCallback;
+ case GetSamplesKind.linear:
+ _updateCallback = ctrl.captureAudioTextureCallback;
+ case GetSamplesKind.texture:
+ _updateCallback = ctrl.captureTexture2DCallback;
+ }
+ }
+ }
+
+ /// The controller used to allocate, dispose and get audio data.
+ @internal
+ final AudioDataCtrl ctrl;
+
+ /// Where to get audio samples. See [GetSamplesFrom].
+ GetSamplesFrom _getSamplesFrom;
+
+ /// The current device to acquire data.
+ GetSamplesFrom get getSamplesFrom => _getSamplesFrom;
+
+ /// Kind of audio samples. See [GetSamplesKind].
+ GetSamplesKind _getSamplesKind;
+
+ /// The current type of data to acquire.
+ GetSamplesKind get getSamplesKind => _getSamplesKind;
+
+ /// The callback used to get new audio samples.
+ /// This callback is used in [updateSamples] to avoid to
+ /// do the [GetSamplesFrom] and [GetSamplesKind] checks on every calls.
+ late void Function(AudioData) _updateCallback;
+
+ /// Update the content of samples memory to be get with [getWave],
+ /// [getLinearFft], [getLinearWave] or [getTexture].
+ ///
+ /// When using [GetSamplesFrom.microphone] throws
+ /// [SoLoudCaptureNotYetInitializededException] if the capture is
+ /// not initialized.
+ /// When using [GetSamplesFrom.player] throws [SoLoudNotInitializedException]
+ /// if the engine is not initialized.
+ /// When using [GetSamplesFrom.player] throws
+ /// [SoLoudVisualizationNotEnabledException] if the visualization
+ /// flag is not enableb. Please, Use `setVisualizationEnabled(true)`
+ /// when needed.
+ /// Throws [SoLoudNullPointerException] something is going wrong with the
+ /// player engine. Please, open an issue on
+ /// [GitHub](https://github.com/alnitak/flutter_soloud/issues) providing
+ /// a simple working example.
+ void updateSamples() {
+ _updateCallback(this);
+ }
+
+ /// Changes the input device from which to retrieve audio data and its kind.
+ void changeType(GetSamplesFrom newFrom, GetSamplesKind newKind) {
+ _getSamplesKind = newKind;
+ _getSamplesFrom = newFrom;
+ _init();
+ }
+
+ /// Dispose the memory allocated to acquire audio data.
+ /// Must be called when there is no more need of [AudioData] otherwise memory
+ /// leaks will occur.
+ void dispose() {
+ ctrl.dispose(_getSamplesKind);
+ }
+
+ /// Get the wave data at offset [offset].
+ ///
+ /// Use this method to get data when using [GetSamplesKind.wave].
+ /// The data is composed of 256 floats.
+ double getWave(SampleWave offset) {
+ if (_getSamplesKind != GetSamplesKind.wave) {
+ return 0;
+ }
+
+ if (_getSamplesFrom == GetSamplesFrom.player &&
+ !SoLoudController().soLoudFFI.getVisualizationEnabled()) {
+ throw const SoLoudVisualizationNotEnabledException();
+ }
+ return ctrl.getWave(offset);
+ }
+
+ /// Get the FFT audio data at offset [offset].
+ ///
+ /// Use this method to get FFT data when using [GetSamplesKind.linear].
+ /// The data is composed of 256 floats.
+ double getLinearFft(SampleLinear offset) {
+ if (_getSamplesKind != GetSamplesKind.linear) {
+ return 0;
+ }
+
+ if (_getSamplesFrom == GetSamplesFrom.player &&
+ !SoLoudController().soLoudFFI.getVisualizationEnabled()) {
+ throw const SoLoudVisualizationNotEnabledException();
+ }
+ return ctrl.getLinearFft(offset);
+ }
+
+ /// Get the wave audio data at offset [offset].
+ ///
+ /// Use this method to get wave data when using [GetSamplesKind.linear].
+ /// The data is composed of 256 floats.
+ double getLinearWave(SampleLinear offset) {
+ if (_getSamplesKind != GetSamplesKind.linear) {
+ return 0;
+ }
+
+ if (_getSamplesFrom == GetSamplesFrom.player &&
+ !SoLoudController().soLoudFFI.getVisualizationEnabled()) {
+ throw const SoLoudVisualizationNotEnabledException();
+ }
+ return ctrl.getLinearWave(offset);
+ }
+
+ /// Get the audio data at row [row] and column [column].
+ /// Use this method to get data when using [GetSamplesKind.texture].
+ /// This matrix represents 256 rows. Each rows is represented by 256 floats
+ /// of FFT data and 256 floats of wave data.
+ /// Each time the [AudioData.updateSamples] method is called,
+ /// the last row is discarded and the new one will be the first.
+ double getTexture(SampleRow row, SampleColumn column) {
+ if (_getSamplesKind != GetSamplesKind.texture) {
+ return 0;
+ }
+
+ if (_getSamplesFrom == GetSamplesFrom.player &&
+ !SoLoudController().soLoudFFI.getVisualizationEnabled()) {
+ throw const SoLoudVisualizationNotEnabledException();
+ }
+ return ctrl.getTexture(_getSamplesFrom, row, column);
+ }
+}
diff --git a/lib/src/bindings/audio_data_extensions.dart b/lib/src/bindings/audio_data_extensions.dart
new file mode 100644
index 0000000..c30a835
--- /dev/null
+++ b/lib/src/bindings/audio_data_extensions.dart
@@ -0,0 +1,110 @@
+/// The extension type for the `AudioData.get2D` method which accepts
+/// the [value] value in 0~255 range.
+extension type SampleRow._(int value) {
+ /// Constructs a valid row with [value].
+ SampleRow(this.value)
+ : assert(value >= 0 && value <= 255, 'row must in 0~255 included range.');
+
+ /// Operator "*", clamp the resulting value.
+ SampleRow operator *(int other) {
+ final result = (other * value).clamp(0, 255);
+ return SampleRow(result);
+ }
+
+ /// Operator "+", clamp the resulting value.
+ SampleRow operator +(int other) {
+ final result = (other + value).clamp(0, 255);
+ return SampleRow(result);
+ }
+
+ /// Operator "-", clamp the resulting value.
+ SampleRow operator -(int other) {
+ final result = (other - value).clamp(0, 255);
+ return SampleRow(result);
+ }
+}
+
+/// The extension type for the `AudioData.get2D` method which accepts
+/// the [value] value in 0~511 range.
+extension type SampleColumn._(int value) {
+ /// Constructs a valid column with [value].
+ SampleColumn(this.value)
+ : assert(value >= 0 && value <= 511, 'row must in 0~511 included range.');
+
+ /// Operator "*", clamp the resulting value.
+ SampleColumn operator *(int other) {
+ final result = (other * value).clamp(0, 511);
+ return SampleColumn(result);
+ }
+
+ /// Operator "+", clamp the resulting value.
+ SampleColumn operator +(int other) {
+ final result = (other + value).clamp(0, 511);
+ return SampleColumn(result);
+ }
+
+ /// Operator "-", clamp the resulting value.
+ SampleColumn operator -(int other) {
+ final result = (other - value).clamp(0, 511);
+ return SampleColumn(result);
+ }
+}
+
+/// The extension type for the `AudioData.getLinearFft` and
+/// `AudioData.getLinearWave` method which accept
+/// the [value] value in 0~255 range.
+extension type SampleLinear._(int value) {
+ /// Constructs a valid offset with [value].
+ SampleLinear(this.value)
+ : assert(
+ value >= 0 && value <= 255,
+ 'offset must in 0~255 included range.',
+ );
+
+ /// Operator "*", clamp the resulting value.
+ SampleLinear operator *(int other) {
+ final result = (other * value).clamp(0, 255);
+ return SampleLinear(result);
+ }
+
+ /// Operator "+", clamp the resulting value.
+ SampleLinear operator +(int other) {
+ final result = (other + value).clamp(0, 255);
+ return SampleLinear(result);
+ }
+
+ /// Operator "-", clamp the resulting value.
+ SampleLinear operator -(int other) {
+ final result = (other - value).clamp(0, 255);
+ return SampleLinear(result);
+ }
+}
+
+/// The extension type for the `AudioData.getWave`
+/// method which accepts the [value] value in 0~255 range.
+extension type SampleWave._(int value) {
+ /// Constructs a valid offset with [value].
+ SampleWave(this.value)
+ : assert(
+ value >= 0 && value <= 255,
+ 'offset must in 0~255 included range.',
+ );
+
+ /// Operator "*", clamp the resulting value.
+ SampleWave operator *(int other) {
+ final result = (other * value).clamp(0, 255);
+ return SampleWave(result);
+ }
+
+ /// Operator "+", clamp the resulting value.
+ SampleWave operator +(int other) {
+ final result = (other + value).clamp(0, 255);
+ return SampleWave(result);
+ }
+
+ /// Operator "-", clamp the resulting value.
+ SampleWave operator -(int other) {
+ final result = (other - value).clamp(0, 255);
+ return SampleWave(result);
+ }
+}
diff --git a/lib/src/bindings/audio_data_ffi.dart b/lib/src/bindings/audio_data_ffi.dart
new file mode 100644
index 0000000..5941469
--- /dev/null
+++ b/lib/src/bindings/audio_data_ffi.dart
@@ -0,0 +1,81 @@
+// ignore_for_file: public_member_api_docs
+
+import 'dart:ffi';
+
+import 'package:ffi/ffi.dart' show calloc;
+import 'package:flutter_soloud/src/bindings/audio_data.dart';
+import 'package:flutter_soloud/src/bindings/audio_data_extensions.dart';
+import 'package:flutter_soloud/src/bindings/soloud_controller.dart';
+import 'package:flutter_soloud/src/enums.dart';
+
+class AudioDataCtrl {
+ /// To reflect [AudioDataCtrl] for web. Not used with `dart:ffi`
+ final int _samplesPtr = 0;
+ int get samplesPtr => _samplesPtr;
+
+ /// Where the FFT or wave data is stored.
+ late Pointer> samplesWave;
+
+ /// Where the audio 2D data is stored.
+ late Pointer> samples2D;
+
+ /// Where the audio 1D data is stored.
+ late Pointer samples1D;
+
+ final void Function(AudioData) waveCallback =
+ SoLoudController().soLoudFFI.getWave;
+
+ final PlayerErrors Function(AudioData) texture2DCallback =
+ SoLoudController().soLoudFFI.getAudioTexture2D;
+
+ final void Function(AudioData) textureCallback =
+ SoLoudController().soLoudFFI.getAudioTexture;
+
+ final void Function(AudioData) captureWaveCallback =
+ SoLoudController().captureFFI.getCaptureWave;
+
+ final CaptureErrors Function(AudioData) captureTexture2DCallback =
+ SoLoudController().captureFFI.getCaptureAudioTexture2D;
+
+ final void Function(AudioData) captureAudioTextureCallback =
+ SoLoudController().captureFFI.getCaptureAudioTexture;
+
+ void allocSamples() {
+ samples2D = calloc();
+ samples1D = calloc(512 * 4);
+ samplesWave = calloc();
+ }
+
+ void dispose(
+ GetSamplesKind getSamplesKind,
+ ) {
+ if (samplesWave != nullptr) calloc.free(samplesWave);
+ if (samples1D != nullptr) calloc.free(samples1D);
+ if (samples2D != nullptr) calloc.free(samples2D);
+ }
+
+ double getWave(SampleWave offset) {
+ final val = Pointer.fromAddress(samplesWave.value.address);
+ if (val == nullptr) return 0;
+ return val[offset.value];
+ }
+
+ double getLinearFft(SampleLinear offset) {
+ return samples1D[offset.value];
+ }
+
+ double getLinearWave(SampleLinear offset) {
+ return samples1D[offset.value + 256];
+ }
+
+ double getTexture(
+ GetSamplesFrom getSamplesFrom,
+ SampleRow row,
+ SampleColumn column,
+ ) {
+ const stride = 512;
+ final val = samples2D.value;
+ if (val == nullptr) return 0;
+ return val[stride * row.value + column.value];
+ }
+}
diff --git a/lib/src/bindings/audio_data_web.dart b/lib/src/bindings/audio_data_web.dart
new file mode 100644
index 0000000..7342f67
--- /dev/null
+++ b/lib/src/bindings/audio_data_web.dart
@@ -0,0 +1,78 @@
+// ignore_for_file: public_member_api_docs
+
+import 'package:flutter_soloud/src/bindings/audio_data.dart';
+import 'package:flutter_soloud/src/bindings/audio_data_extensions.dart';
+import 'package:flutter_soloud/src/bindings/js_extension.dart';
+import 'package:flutter_soloud/src/bindings/soloud_controller.dart';
+import 'package:flutter_soloud/src/enums.dart';
+
+class AudioDataCtrl {
+ late final int _samplesPtr;
+ int get samplesPtr => _samplesPtr;
+
+ final void Function(AudioData) waveCallback =
+ SoLoudController().soLoudFFI.getWave;
+
+ final void Function(AudioData) texture2DCallback =
+ SoLoudController().soLoudFFI.getAudioTexture2D;
+
+ final void Function(AudioData) textureCallback =
+ SoLoudController().soLoudFFI.getAudioTexture;
+
+ final void Function(AudioData) captureWaveCallback =
+ SoLoudController().captureFFI.getCaptureWave;
+
+ final CaptureErrors Function(AudioData) captureTexture2DCallback =
+ SoLoudController().captureFFI.getCaptureAudioTexture2D;
+
+ final void Function(AudioData) captureAudioTextureCallback =
+ SoLoudController().captureFFI.getCaptureAudioTexture;
+
+ void allocSamples() {
+ /// This is the max amount of memory [_samplePtr] may need. This number
+ /// is needed when acquiring data with [getTexture] which is a matrix of
+ /// 256 rows and 512 columns of floats (4 bytes each).
+ _samplesPtr = wasmMalloc(512 * 256 * 4);
+ }
+
+ void dispose(
+ GetSamplesKind getSamplesKind,
+ ) {
+ if (_samplesPtr != 0) {
+ wasmFree(_samplesPtr);
+ }
+ }
+
+ double getWave(SampleWave offset) {
+ final samplePtr = wasmGetI32Value(_samplesPtr, '*');
+ final data = wasmGetF32Value(samplePtr + offset.value * 4, 'float');
+ return data;
+ }
+
+ double getLinearFft(SampleLinear offset) {
+ final data = wasmGetF32Value(_samplesPtr + offset.value * 4, 'float');
+ return data;
+ }
+
+ double getLinearWave(SampleLinear offset) {
+ final data =
+ wasmGetF32Value(_samplesPtr + offset.value * 4 + 256 * 4, 'float');
+ return data;
+ }
+
+ double getTexture(
+ GetSamplesFrom getSamplesFrom,
+ SampleRow row,
+ SampleColumn column,
+ ) {
+ // final offset = samplesPtr + ((row.value * 256 + column.value) * 4);
+ // final data = wasmGetF32Value(offset, 'float');
+ final double data;
+ if (getSamplesFrom == GetSamplesFrom.player) {
+ data = wasmGetTextureValue(row.value, column.value);
+ } else {
+ data = wasmGetCaptureTextureValue(row.value, column.value);
+ }
+ return data;
+ }
+}
diff --git a/lib/src/bindings/bindings_capture.dart b/lib/src/bindings/bindings_capture.dart
new file mode 100644
index 0000000..3f2f2b3
--- /dev/null
+++ b/lib/src/bindings/bindings_capture.dart
@@ -0,0 +1,51 @@
+// ignore_for_file: public_member_api_docs
+
+import 'package:flutter_soloud/src/bindings/audio_data.dart';
+import 'package:flutter_soloud/src/enums.dart';
+import 'package:meta/meta.dart';
+
+export 'package:flutter_soloud/src/bindings/bindings_capture_ffi.dart'
+ if (dart.library.js_interop) 'package:flutter_soloud/src/bindings/bindings_capture_web.dart';
+
+/// The experimenta functionality to use the microphone used by "SoLoudCapture".
+@experimental
+abstract class FlutterCapture {
+ @mustBeOverridden
+ List listCaptureDevices();
+
+ @mustBeOverridden
+ CaptureErrors initCapture(int deviceID);
+
+ @mustBeOverridden
+ void disposeCapture();
+
+ @mustBeOverridden
+ bool isCaptureInited();
+
+ @mustBeOverridden
+ bool isCaptureStarted();
+
+ @mustBeOverridden
+ CaptureErrors startCapture();
+
+ @mustBeOverridden
+ CaptureErrors stopCapture();
+
+ @mustBeOverridden
+ void getCaptureFft(AudioData fft);
+
+ @mustBeOverridden
+ void getCaptureWave(AudioData wave);
+
+ @mustBeOverridden
+ void getCaptureAudioTexture(AudioData samples);
+
+ @mustBeOverridden
+ CaptureErrors getCaptureAudioTexture2D(AudioData samples);
+
+ @mustBeOverridden
+ double getCaptureTextureValue(int row, int column);
+
+ @mustBeOverridden
+ CaptureErrors setCaptureFftSmoothing(double smooth);
+}
diff --git a/lib/src/bindings/bindings_capture_ffi.dart b/lib/src/bindings/bindings_capture_ffi.dart
new file mode 100644
index 0000000..8575202
--- /dev/null
+++ b/lib/src/bindings/bindings_capture_ffi.dart
@@ -0,0 +1,229 @@
+import 'dart:ffi' as ffi;
+
+import 'package:ffi/ffi.dart';
+import 'package:flutter_soloud/src/bindings/audio_data.dart';
+import 'package:flutter_soloud/src/bindings/bindings_capture.dart';
+import 'package:flutter_soloud/src/enums.dart';
+
+/// FFI bindings to capture with miniaudio.
+class FlutterCaptureFfi extends FlutterCapture {
+ /// The symbols are looked up in [dynamicLibrary].
+ FlutterCaptureFfi(ffi.DynamicLibrary dynamicLibrary)
+ : _lookup = dynamicLibrary.lookup;
+
+ /// The symbols are looked up with [lookup].
+ FlutterCaptureFfi.fromLookup(
+ ffi.Pointer Function(String symbolName) lookup,
+ ) : _lookup = lookup;
+
+ /// Holds the symbol lookup function.
+ final ffi.Pointer Function(String symbolName)
+ _lookup;
+
+ @override
+ List listCaptureDevices() {
+ final ret = [];
+ // ignore: omit_local_variable_types
+ final ffi.Pointer> deviceNames =
+ calloc(ffi.sizeOf>>() * 50);
+ // ignore: omit_local_variable_types
+ final ffi.Pointer> deviceIsDefault =
+ calloc(ffi.sizeOf>>() * 50);
+ // ignore: omit_local_variable_types
+ final ffi.Pointer nDevices = calloc();
+
+ _listCaptureDevices(
+ deviceNames,
+ deviceIsDefault,
+ nDevices,
+ );
+
+ final ndev = nDevices.value;
+ for (var i = 0; i < ndev; i++) {
+ final s1 = (deviceNames + i).value;
+ final s = s1.cast().toDartString();
+ final n1 = (deviceIsDefault + i).value;
+ final n = n1.value;
+ ret.add(CaptureDevice(s, n == 1));
+ }
+
+ /// Free allocated memory done in C.
+ /// This work on all platforms but not on win.
+ // for (int i = 0; i < ndev; i++) {
+ // calloc.free(devices.elementAt(i).value.ref.name);
+ // calloc.free(devices.elementAt(i).value);
+ // }
+ _freeListCaptureDevices(
+ deviceNames,
+ deviceIsDefault,
+ ndev,
+ );
+
+ calloc
+ ..free(deviceNames)
+ ..free(nDevices);
+ return ret;
+ }
+
+ late final _listCaptureDevicesPtr = _lookup<
+ ffi.NativeFunction<
+ ffi.Void Function(
+ ffi.Pointer>,
+ ffi.Pointer>,
+ ffi.Pointer,
+ )>>('listCaptureDevices');
+ late final _listCaptureDevices = _listCaptureDevicesPtr.asFunction<
+ void Function(
+ ffi.Pointer>,
+ ffi.Pointer>,
+ ffi.Pointer,
+ )>();
+
+ late final _freeListCaptureDevicesPtr = _lookup<
+ ffi.NativeFunction<
+ ffi.Void Function(
+ ffi.Pointer>,
+ ffi.Pointer>,
+ ffi.Int,
+ )>>('freeListCaptureDevices');
+ late final _freeListCaptureDevices = _freeListCaptureDevicesPtr.asFunction<
+ void Function(
+ ffi.Pointer>,
+ ffi.Pointer>,
+ int,
+ )>();
+
+ @override
+ CaptureErrors initCapture(int deviceID) {
+ final e = _initCapture(deviceID);
+ return CaptureErrors.values[e];
+ }
+
+ late final _initCapturePtr =
+ _lookup>('initCapture');
+ late final _initCapture = _initCapturePtr.asFunction();
+
+ @override
+ void disposeCapture() {
+ return _disposeCapture();
+ }
+
+ late final _disposeCapturePtr =
+ _lookup>('disposeCapture');
+ late final _disposeCapture = _disposeCapturePtr.asFunction();
+
+ @override
+ bool isCaptureInited() {
+ return _isCaptureInited() == 1;
+ }
+
+ late final _isCaptureInitedPtr =
+ _lookup>('isCaptureInited');
+ late final _isCaptureInited =
+ _isCaptureInitedPtr.asFunction();
+
+ @override
+ bool isCaptureStarted() {
+ return _isCaptureStarted() == 1;
+ }
+
+ late final _isCaptureStartedPtr =
+ _lookup>('isCaptureStarted');
+ late final _isCaptureStarted =
+ _isCaptureStartedPtr.asFunction();
+
+ @override
+ CaptureErrors startCapture() {
+ return CaptureErrors.values[_startCapture()];
+ }
+
+ late final _startCapturePtr =
+ _lookup>('startCapture');
+ late final _startCapture = _startCapturePtr.asFunction();
+
+ @override
+ CaptureErrors stopCapture() {
+ return CaptureErrors.values[_stopCapture()];
+ }
+
+ late final _stopCapturePtr =
+ _lookup>('stopCapture');
+ late final _stopCapture = _stopCapturePtr.asFunction();
+
+ @override
+ void getCaptureFft(AudioData fft) {
+ return _getCaptureFft(fft.ctrl.samplesWave);
+ }
+
+ late final _getCaptureFftPtr = _lookup<
+ ffi.NativeFunction<
+ ffi.Void Function(
+ ffi.Pointer>,
+ )>>('getCaptureFft');
+ late final _getCaptureFft = _getCaptureFftPtr
+ .asFunction>)>();
+
+ @override
+ void getCaptureWave(AudioData wave) {
+ return _getCaptureWave(wave.ctrl.samplesWave);
+ }
+
+ late final _getCaptureWavePtr = _lookup<
+ ffi.NativeFunction<
+ ffi.Void Function(
+ ffi.Pointer>,
+ )>>('getCaptureWave');
+ late final _getCaptureWave = _getCaptureWavePtr
+ .asFunction>)>();
+
+ @override
+ void getCaptureAudioTexture(AudioData samples) {
+ return _getCaptureTexture(samples.ctrl.samples1D);
+ }
+
+ late final _getCaptureTexturePtr =
+ _lookup)>>(
+ 'getCaptureTexture',
+ );
+ late final _getCaptureTexture =
+ _getCaptureTexturePtr.asFunction)>();
+
+ @override
+ CaptureErrors getCaptureAudioTexture2D(AudioData samples) {
+ final ret = _getCaptureAudioTexture2D(samples.ctrl.samples2D);
+ return CaptureErrors.values[ret];
+ }
+
+ late final _getCaptureAudioTexture2DPtr = _lookup<
+ ffi
+ .NativeFunction>)>>(
+ 'getCaptureAudioTexture2D',
+ );
+ late final _getCaptureAudioTexture2D = _getCaptureAudioTexture2DPtr
+ .asFunction>)>();
+
+ @override
+ double getCaptureTextureValue(int row, int column) {
+ return _getCaptureTextureValue(row, column);
+ }
+
+ late final _getCaptureTextureValuePtr =
+ _lookup>(
+ 'getCaptureTextureValue',
+ );
+ late final _getCaptureTextureValue =
+ _getCaptureTextureValuePtr.asFunction();
+
+ @override
+ CaptureErrors setCaptureFftSmoothing(double smooth) {
+ final ret = _setCaptureFftSmoothing(smooth);
+ return CaptureErrors.values[ret];
+ }
+
+ late final _setCaptureFftSmoothingPtr =
+ _lookup>(
+ 'setCaptureFftSmoothing',
+ );
+ late final _setCaptureFftSmoothing =
+ _setCaptureFftSmoothingPtr.asFunction();
+}
diff --git a/lib/src/bindings/bindings_capture_web.dart b/lib/src/bindings/bindings_capture_web.dart
new file mode 100644
index 0000000..95d6cb9
--- /dev/null
+++ b/lib/src/bindings/bindings_capture_web.dart
@@ -0,0 +1,105 @@
+// ignore_for_file: public_member_api_docs
+
+import 'package:flutter_soloud/src/bindings/audio_data.dart';
+import 'package:flutter_soloud/src/bindings/bindings_capture.dart';
+import 'package:flutter_soloud/src/bindings/js_extension.dart';
+import 'package:flutter_soloud/src/enums.dart';
+
+class FlutterCaptureWeb extends FlutterCapture {
+ @override
+ List listCaptureDevices() {
+ /// allocate 50 device strings
+ final namesPtr = wasmMalloc(50 * 150);
+ final isDefaultPtr = wasmMalloc(50 * 4);
+ final nDevicesPtr = wasmMalloc(4); // 4 bytes for an int
+
+ wasmListCaptureDevices(
+ namesPtr,
+ isDefaultPtr,
+ nDevicesPtr,
+ );
+
+ final nDevices = wasmGetI32Value(nDevicesPtr, '*');
+ final devices = [];
+ for (var i = 0; i < nDevices; i++) {
+ final namePtr = wasmGetI32Value(namesPtr + i * 4, '*');
+ final name = wasmUtf8ToString(namePtr);
+ final isDefault =
+ wasmGetI32Value(wasmGetI32Value(isDefaultPtr + i * 4, '*'), '*');
+
+ devices.add(CaptureDevice(name, isDefault == 1));
+ }
+
+ wasmFreeListCaptureDevices(namesPtr, isDefaultPtr, nDevices);
+
+ wasmFree(nDevicesPtr);
+ wasmFree(isDefaultPtr);
+ wasmFree(namesPtr);
+
+ return devices;
+ }
+
+ @override
+ CaptureErrors initCapture(int deviceID) {
+ final e = wasmInitCapture(deviceID);
+ return CaptureErrors.values[e];
+ }
+
+ @override
+ void disposeCapture() {
+ return wasmDisposeCapture();
+ }
+
+ @override
+ bool isCaptureInited() {
+ return wasmIsCaptureInited() == 1;
+ }
+
+ @override
+ bool isCaptureStarted() {
+ return wasmIsCaptureStarted() == 1;
+ }
+
+ @override
+ CaptureErrors startCapture() {
+ return CaptureErrors.values[wasmStartCapture()];
+ }
+
+ @override
+ CaptureErrors stopCapture() {
+ return CaptureErrors.values[wasmStopCapture()];
+ }
+
+ @override
+ void getCaptureFft(AudioData fft) {
+ return wasmGetCaptureFft(fft.ctrl.samplesPtr);
+ }
+
+ @override
+ void getCaptureWave(AudioData wave) {
+ return wasmGetCaptureWave(wave.ctrl.samplesPtr);
+ }
+
+ @override
+ void getCaptureAudioTexture(AudioData samples) {
+ wasmGetCaptureAudioTexture(samples.ctrl.samplesPtr);
+ }
+
+ @override
+ CaptureErrors getCaptureAudioTexture2D(AudioData samples) {
+ final e = wasmGetCaptureAudioTexture2D(samples.ctrl.samplesPtr);
+ return CaptureErrors.values[e];
+ }
+
+ @override
+ double getCaptureTextureValue(int row, int column) {
+ final value = wasmGetCaptureTextureValue(row, column);
+ return value;
+ }
+
+ @override
+ CaptureErrors setCaptureFftSmoothing(double smooth) {
+ final e = wasmSetCaptureFftSmoothing(smooth);
+ return CaptureErrors.values[e];
+ }
+}
diff --git a/lib/src/bindings/bindings_player.dart b/lib/src/bindings/bindings_player.dart
new file mode 100644
index 0000000..79e8b9a
--- /dev/null
+++ b/lib/src/bindings/bindings_player.dart
@@ -0,0 +1,752 @@
+import 'dart:async';
+import 'dart:typed_data';
+
+import 'package:flutter_soloud/src/bindings/audio_data.dart';
+import 'package:flutter_soloud/src/enums.dart';
+import 'package:flutter_soloud/src/filter_params.dart';
+import 'package:flutter_soloud/src/sound_handle.dart';
+import 'package:flutter_soloud/src/sound_hash.dart';
+import 'package:meta/meta.dart';
+
+export 'package:flutter_soloud/src/bindings/bindings_player_ffi.dart'
+ if (dart.library.js_interop) 'package:flutter_soloud/src/bindings/bindings_player_web.dart';
+
+/// Abstract class defining the interface for the platform-specific
+/// implementations.
+abstract class FlutterSoLoud {
+ /// Controller to listen to voice ended events.
+ late final StreamController voiceEndedEventController =
+ StreamController.broadcast();
+
+ /// Listener for voices ended.
+ Stream get voiceEndedEvents => voiceEndedEventController.stream;
+
+ /// Controller to listen to file loaded events.
+ /// Not used on the web.
+ late final StreamController