From 1ec860f7b89f00d9ed21565a315e3423015738a3 Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Mon, 5 Sep 2022 16:11:57 +0200 Subject: [PATCH 01/21] chore(deps): upgrade permission_handler dependency to 10.0.0 (#44) NOTE: also upgrades compileSdk to 33 accordingly --- CHANGELOG.md | 4 ++++ README.md | 2 +- android/build.gradle | 4 ++-- example/android/app/build.gradle | 2 +- pubspec.yaml | 4 ++-- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 532a499..9c87ff8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +### 0.6.2 + +* Upgrade `permission_handler` to version 10.0.0 and update compileSdk to 33 accordingly + ### 0.6.1 * Fix issues of the Audio Recorder not always being properly reinitialised on android diff --git a/README.md b/README.md index 4dea2ae..85d1cec 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# mic_stream: 0.6.1 +# mic_stream: 0.6.2 [Flutter Plugin] Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. diff --git a/android/build.gradle b/android/build.gradle index 0f8c32f..f234cf0 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -22,11 +22,11 @@ rootProject.allprojects { apply plugin: 'com.android.library' android { - compileSdkVersion 29 + compileSdkVersion 33 defaultConfig { minSdkVersion 16 - targetSdkVersion 29 + targetSdkVersion 33 testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" } diff --git a/example/android/app/build.gradle b/example/android/app/build.gradle index cb32f5f..5678889 100644 --- a/example/android/app/build.gradle +++ b/example/android/app/build.gradle @@ -40,7 +40,7 @@ android { // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html). applicationId "com.aaron.mic_stream" minSdkVersion 16 - targetSdkVersion 30 + targetSdkVersion 33 versionCode flutterVersionCode.toInteger() versionName flutterVersionName } diff --git a/pubspec.yaml b/pubspec.yaml index 27c44e3..f189494 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 0.6.1 +version: 0.6.2 homepage: https://github.com/anarchuser/mic_stream environment: @@ -13,7 +13,7 @@ module: dependencies: flutter: sdk: flutter - permission_handler: ^9.2.0 + permission_handler: ^10.0.0 # For information on the generic Dart part of this file, see the # following page: https://www.dartlang.org/tools/pub/pubspec From b0abbf073ed8b4f0a791b1914d8933351df7aa5e Mon Sep 17 00:00:00 2001 From: xloc Date: Tue, 13 Dec 2022 01:31:50 -0800 Subject: [PATCH 02/21] change macOS native implementation: AVAudioEngine (#50) Resolves #49 --- .../Flutter/GeneratedPluginRegistrant.swift | 2 - macos/Classes/MicStreamPlugin.swift | 259 +++++++----------- 2 files changed, 102 insertions(+), 159 deletions(-) diff --git a/example/macos/Flutter/GeneratedPluginRegistrant.swift b/example/macos/Flutter/GeneratedPluginRegistrant.swift index 1e25b10..1aa0366 100644 --- a/example/macos/Flutter/GeneratedPluginRegistrant.swift +++ b/example/macos/Flutter/GeneratedPluginRegistrant.swift @@ -6,9 +6,7 @@ import FlutterMacOS import Foundation import mic_stream -import path_provider_macos func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) { MicStreamPlugin.register(with: registry.registrar(forPlugin: "MicStreamPlugin")) - PathProviderPlugin.register(with: registry.registrar(forPlugin: "PathProviderPlugin")) } diff --git a/macos/Classes/MicStreamPlugin.swift b/macos/Classes/MicStreamPlugin.swift index 02e67ca..6b385c1 100644 --- a/macos/Classes/MicStreamPlugin.swift +++ b/macos/Classes/MicStreamPlugin.swift @@ -1,170 +1,115 @@ import Cocoa import FlutterMacOS -//import UIKit import AVFoundation -import Dispatch - -enum AudioFormat : Int { case ENCODING_PCM_8BIT=3, ENCODING_PCM_16BIT=2 } -enum ChannelConfig : Int { case CHANNEL_IN_MONO=16 , CHANNEL_IN_STEREO=12 } -enum AudioSource : Int { case DEFAULT } - -public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin, AVCaptureAudioDataOutputSampleBufferDelegate { - public static func register(with registrar: FlutterPluginRegistrar) { - let channel = FlutterEventChannel(name:"aaron.code.com/mic_stream", binaryMessenger: registrar.messenger) - let methodChannel = FlutterMethodChannel(name: "aaron.code.com/mic_stream_method_channel", binaryMessenger: registrar.messenger) - let instance = SwiftMicStreamPlugin() - channel.setStreamHandler(instance); - registrar.addMethodCallDelegate(instance, channel: methodChannel) - } - let isRecording:Bool = false; - var CHANNEL_CONFIG:ChannelConfig = ChannelConfig.CHANNEL_IN_MONO; - var SAMPLE_RATE:Int = 44100; // this is the sample rate the user wants - var actualSampleRate:Float64?; // this is the actual hardware sample rate the device is using - var AUDIO_FORMAT:AudioFormat = AudioFormat.ENCODING_PCM_16BIT; // this is the encoding/bit-depth the user wants - var actualBitDepth:UInt32?; // this is the actual hardware bit-depth - var AUDIO_SOURCE:AudioSource = AudioSource.DEFAULT; - var BUFFER_SIZE = 4096; - var eventSink:FlutterEventSink?; - var session : AVCaptureSession! - - public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { - switch call.method { - case "getSampleRate": - result(self.actualSampleRate) - break; - case "getBitDepth": - result(self.actualBitDepth) - break; - case "getBufferSize": - result(self.BUFFER_SIZE) - break; - default: - result(FlutterMethodNotImplemented) - } +/// Notes: +/// 1. currently the only config supported is: +/// audioSource == DEFAULT +/// sampleRate == 48000 +/// channelConfig == MONO +/// audioFormat == 16BIT +/// 2. AVAudioEngine is used to acquire the audio. The previous version uses +/// AVCaptureAudioDataOutputSampleBufferDelegate, which records noise on +/// my machine +/// 3. The native audio sample is of float32 type. the samples are casted into +/// int16 to conform with the library definition + + +public class SwiftMicStreamPlugin: NSObject, FlutterPlugin, FlutterStreamHandler { + public static func register(with registrar: FlutterPluginRegistrar) { + let instance = SwiftMicStreamPlugin() + + let micChannel = FlutterEventChannel(name:"aaron.code.com/mic_stream", binaryMessenger: registrar.messenger) + micChannel.setStreamHandler(instance); + + let channel = FlutterMethodChannel(name: "aaron.code.com/mic_stream_method_channel", binaryMessenger: registrar.messenger) + registrar.addMethodCallDelegate(instance, channel: channel) + } + + var sampleRate: Float64? = 48000; + + public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { + switch call.method { + case "getSampleRate": + result(self.sampleRate) + break; + case "getBitDepth": + result(16) // always 16 + break; + case "getBufferSize": + result(-1) // not given, check received buffer length instead + default: + result(FlutterMethodNotImplemented) } - - public func onCancel(withArguments arguments:Any?) -> FlutterError? { - self.session?.stopRunning() - return nil + } + + var audioEngine = AVAudioEngine(); + var isRecording = false; + + public func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? { + if (isRecording) { + NSLog("onListen being called while recording") + return FlutterError() } + isRecording = true; - public func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? { - NSLog("ON LISTEN CALLED................... *"); - if (isRecording) { - return nil; - } - - let config = arguments as! [Int?]; - // Set parameters, if available - print(config); - switch config.count { - case 4: - AUDIO_FORMAT = AudioFormat(rawValue:config[3]!)!; - fallthrough - case 3: - CHANNEL_CONFIG = ChannelConfig(rawValue:config[2]!)!; - if(CHANNEL_CONFIG != ChannelConfig.CHANNEL_IN_MONO) { - events(FlutterError(code: "-3", - message: "Currently only ChannelConfig CHANNEL_IN_MONO is supported", details:nil)) - return nil - } - fallthrough - case 2: - SAMPLE_RATE = config[1]!; - fallthrough - case 1: - AUDIO_SOURCE = AudioSource(rawValue:config[0]!)!; - if(AUDIO_SOURCE != AudioSource.DEFAULT) { - events(FlutterError(code: "-3", - message: "Currently only default AUDIO_SOURCE (id: 0) is supported", details:nil)) - return nil - } - default: - events(FlutterError(code: "-3", - message: "At least one argument (AudioSource) must be provided ", details:nil)) - return nil - } - NSLog("Setting eventSinkn: \(config.count)"); - self.eventSink = events; - startCapture(); - return nil; + // argument check + let config = arguments as! [Int?]; + NSLog("received config \(config)") + if ( + config.count == 4 && + config[0] == 0 && // audio source must be DEFAULT + config[1] == 48000 && // sampleRate must be 48000 as tested on my machine + config[2] == 16 && // channel config must be MONO + config[3] == 2 // audio format must be ENCODING_PCM_16BIT + ) {} else { + NSLog("warning: configuration not supported. The only supported config is (DEFAULT, 48000, MONO, 16BIT) ") } - - func startCapture() { - if let audioCaptureDevice : AVCaptureDevice = AVCaptureDevice.default(for:AVMediaType.audio) { - - self.session = AVCaptureSession() - do { - try audioCaptureDevice.lockForConfiguration() - - let audioInput = try AVCaptureDeviceInput(device: audioCaptureDevice) - audioCaptureDevice.unlockForConfiguration() - - if(self.session.canAddInput(audioInput)){ - self.session.addInput(audioInput) - } - - - //let numChannels = CHANNEL_CONFIG == ChannelConfig.CHANNEL_IN_MONO ? 1 : 2 - // setting the preferred sample rate on AVAudioSession doesn't magically change the sample rate for our AVCaptureSession - // try AVAudioSession.sharedInstance().setPreferredSampleRate(Double(SAMPLE_RATE)) - - // neither does setting AVLinearPCMBitDepthKey on audioOutput.audioSettings (unavailable on iOS) - // 99% sure it's not possible to set streaming sample rate/bitrate - // try AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(numChannels) - let audioOutput = AVCaptureAudioDataOutput() - audioOutput.setSampleBufferDelegate(self, queue: DispatchQueue.global()) - - if(self.session.canAddOutput(audioOutput)){ - self.session.addOutput(audioOutput) - } - - DispatchQueue.main.async { - self.session.startRunning() - } - } catch let e { - self.eventSink!(FlutterError(code: "-3", - message: "Error encountered starting audio capture, see details for more information.", details:e)) - } - } + + + let input = audioEngine.inputNode + let busID = 0 + let inputFormat = input.inputFormat(forBus: busID) + + sampleRate = inputFormat.sampleRate + + + input.installTap(onBus: busID, bufferSize: 512, format: inputFormat) { (buffer, time) in + guard let channelData = buffer.floatChannelData?[0] else { return } + + let floatArray = Array(UnsafeBufferPointer(start: channelData, count: Int(buffer.frameLength))) + //// used to findout the range of sample. it is even broader than -2 ... 2 + // NSLog("max \(floatArray.max()!) min \(floatArray.min()!)") + var intArray = floatArray.map { val in + // clamp the val to -2.0 ... 2.0 + let clamped = min(max(-2.0, val), 2.0) + return Int16(clamped * 16383) + } + //// use the following to get length information + // NSLog("\(intArray.count)") + // NSLog("\(buffer.frameLength)") + + intArray.withUnsafeMutableBytes { unsafeMutableRawBufferPointer in + let nBytes = Int(buffer.frameLength) * MemoryLayout.size + let unsafeMutableRawPointer = unsafeMutableRawBufferPointer.baseAddress! + + let data = Data(bytesNoCopy: unsafeMutableRawPointer, count: nBytes, deallocator: .none) + events(FlutterStandardTypedData(bytes: data)) + } } + + try! audioEngine.start() + return nil + } + + public func onCancel(withArguments arguments: Any?) -> FlutterError? { + NSLog("audio engine canceled"); - public func captureOutput(_ output : AVCaptureOutput, - didOutput sampleBuffer: CMSampleBuffer, - from connection : AVCaptureConnection) { - - let format = CMSampleBufferGetFormatDescription(sampleBuffer)! - let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(format)!.pointee - - let nChannels = Int(asbd.mChannelsPerFrame) // probably 2 - let bufferlistSize = AudioBufferList.sizeInBytes(maximumBuffers: nChannels) - let audioBufferList = AudioBufferList.allocate(maximumBuffers: nChannels) - for i in 0..? = CMAudioFormatDescriptionGetStreamBasicDescription(fd!) - self.actualSampleRate = asbd.mSampleRate - self.actualBitDepth = asbd.mBitsPerChannel - } - - let data = Data(bytesNoCopy: audioBufferList.unsafePointer.pointee.mBuffers.mData!, count: Int(audioBufferList.unsafePointer.pointee.mBuffers.mDataByteSize), deallocator: .none) - self.eventSink!(FlutterStandardTypedData(bytes: data)) + audioEngine.stop() + audioEngine = AVAudioEngine() - } + isRecording = false; + return nil + } } From 61a5c35d0842e6be6b59e7ee80e6fc20135a3a8b Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Tue, 13 Dec 2022 10:41:36 +0100 Subject: [PATCH 03/21] chore(docs): update documentation for version 0.6.3-dev --- CHANGELOG.md | 4 ++++ README.md | 4 +++- pubspec.yaml | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c87ff8..4f68908 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.6.3-dev + +* Switch to a different MacOS backend to resolve issues of white noise (#49) + ### 0.6.2 * Upgrade `permission_handler` to version 10.0.0 and update compileSdk to 33 accordingly diff --git a/README.md b/README.md index 85d1cec..987f826 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,9 @@ Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. ## About mic_stream: -As Flutter still lacks some functionality, this plugin aims to provide the possibility to easily get an audio stream from the microphone, using a simple java and swift implementation. +As Flutter still lacks some functionality, this plugin aims to provide the possibility to easily get an audio stream from the microphone of mobile devices. + +**NOTE: If you receive white noise under MacOS, try version `0.6.3-dev` instead.** ## How to use: diff --git a/pubspec.yaml b/pubspec.yaml index f189494..e2fc0a3 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 0.6.2 +version: 0.6.3-dev homepage: https://github.com/anarchuser/mic_stream environment: From 0e7ccbd8c04c7568a6cf02a073484610a7c6ebd9 Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Mon, 30 Jan 2023 10:50:20 +0100 Subject: [PATCH 04/21] docs: officially release v0.6.3-dev as v0.6.3 --- CHANGELOG.md | 2 +- README.md | 4 +--- pubspec.yaml | 2 +- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4f68908..7a789a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## 0.6.3-dev +## 0.6.3 * Switch to a different MacOS backend to resolve issues of white noise (#49) diff --git a/README.md b/README.md index 987f826..550f0c1 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# mic_stream: 0.6.2 +# mic_stream: 0.6.3 [Flutter Plugin] Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. @@ -7,8 +7,6 @@ Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. As Flutter still lacks some functionality, this plugin aims to provide the possibility to easily get an audio stream from the microphone of mobile devices. -**NOTE: If you receive white noise under MacOS, try version `0.6.3-dev` instead.** - ## How to use: The plugin provides one method: diff --git a/pubspec.yaml b/pubspec.yaml index e2fc0a3..af673ca 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 0.6.3-dev +version: 0.6.3 homepage: https://github.com/anarchuser/mic_stream environment: From 75eee49f05ef7e53cc3cc63f88c1d4ec321a2e97 Mon Sep 17 00:00:00 2001 From: Andrew Brown Date: Mon, 30 Jan 2023 20:56:21 +1100 Subject: [PATCH 05/21] refactor: change defaultable args to nullable Resolves #54 --- lib/mic_stream.dart | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index e304e36..655416b 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -35,12 +35,12 @@ enum AudioFormat { ENCODING_PCM_8BIT, ENCODING_PCM_16BIT } class MicStream { static bool _requestPermission = true; - static const AudioSource _DEFAULT_AUDIO_SOURCE = AudioSource.DEFAULT; - static const ChannelConfig _DEFAULT_CHANNELS_CONFIG = + static const AudioSource DEFAULT_AUDIO_SOURCE = AudioSource.DEFAULT; + static const ChannelConfig DEFAULT_CHANNELS_CONFIG = ChannelConfig.CHANNEL_IN_MONO; - static const AudioFormat _DEFAULT_AUDIO_FORMAT = + static const AudioFormat DEFAULT_AUDIO_FORMAT = AudioFormat.ENCODING_PCM_8BIT; - static const int _DEFAULT_SAMPLE_RATE = 16000; + static const int DEFAULT_SAMPLE_RATE = 16000; static const int _MIN_SAMPLE_RATE = 1; static const int _MAX_SAMPLE_RATE = 100000; @@ -93,10 +93,16 @@ class MicStream { /// audioFormat: Switch between 8- and 16-bit PCM streams /// static Future?> microphone( - {AudioSource audioSource: _DEFAULT_AUDIO_SOURCE, - int sampleRate: _DEFAULT_SAMPLE_RATE, - ChannelConfig channelConfig: _DEFAULT_CHANNELS_CONFIG, - AudioFormat audioFormat: _DEFAULT_AUDIO_FORMAT}) async { + {AudioSource? audioSource, + int? sampleRate, + ChannelConfig? channelConfig, + AudioFormat? audioFormat}) async { + + audioSource ??= DEFAULT_AUDIO_SOURCE; + sampleRate ??= DEFAULT_SAMPLE_RATE; + channelConfig ??= DEFAULT_CHANNELS_CONFIG; + audioFormat ??= DEFAULT_AUDIO_FORMAT; + if (sampleRate < _MIN_SAMPLE_RATE || sampleRate > _MAX_SAMPLE_RATE) throw (RangeError.range(sampleRate, _MIN_SAMPLE_RATE, _MAX_SAMPLE_RATE)); if (_requestPermission) if (!(await permissionStatus)) From 789d04a1e9ac90406d7ee672afab4300db95a0e9 Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Mon, 30 Jan 2023 10:59:07 +0100 Subject: [PATCH 06/21] docs: adjust docs to new version v0.6.4 --- CHANGELOG.md | 5 +++++ README.md | 2 +- pubspec.yaml | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a789a8..097e339 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.6.4 + +* Change interface from having const default values to taking nullable parameters (#54) +* Make default values publicly accessible + ## 0.6.3 * Switch to a different MacOS backend to resolve issues of white noise (#49) diff --git a/README.md b/README.md index 550f0c1..4f1f4cf 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# mic_stream: 0.6.3 +# mic_stream: 0.6.4 [Flutter Plugin] Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. diff --git a/pubspec.yaml b/pubspec.yaml index af673ca..1e7163c 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 0.6.3 +version: 0.6.4 homepage: https://github.com/anarchuser/mic_stream environment: From 0f8e6c852f3fe17bfc15b738cde5c4b1ad6bcb9f Mon Sep 17 00:00:00 2001 From: yama-yeah <82094614+yama-yeah@users.noreply.github.com> Date: Sat, 1 Apr 2023 18:07:52 +0900 Subject: [PATCH 07/21] fix(iOS): ensure sampleRate settings get applied appropriately (#61) --- .gitignore | 2 + CHANGELOG.md | 3 + README.md | 2 +- example/ios/Flutter/AppFrameworkInfo.plist | 2 +- example/ios/Podfile | 2 +- example/ios/Runner.xcodeproj/project.pbxproj | 24 +++---- .../xcshareddata/xcschemes/Runner.xcscheme | 10 +-- example/ios/Runner/Info.plist | 4 ++ example/lib/main.dart | 57 ++++++++--------- example/macos/Podfile | 2 +- .../macos/Runner.xcodeproj/project.pbxproj | 11 ++-- .../xcshareddata/xcschemes/Runner.xcscheme | 2 +- ios/Classes/SwiftMicStreamPlugin.swift | 62 ++++++++++++++++--- ios/mic_stream.podspec | 2 +- lib/mic_stream.dart | 9 +-- pubspec.yaml | 2 +- 16 files changed, 119 insertions(+), 77 deletions(-) diff --git a/.gitignore b/.gitignore index 398126e..521957d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ .DS_Store .dart_tool/ +.vscode + .idea .packages .pub/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 097e339..d07d43d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,6 @@ +## 0.6.5 +* Fixed sampleRate settings to be adapted to iOS + ## 0.6.4 * Change interface from having const default values to taking nullable parameters (#54) diff --git a/README.md b/README.md index 4f1f4cf..094e9e0 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# mic_stream: 0.6.4 +# mic_stream: 0.6.5 [Flutter Plugin] Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. diff --git a/example/ios/Flutter/AppFrameworkInfo.plist b/example/ios/Flutter/AppFrameworkInfo.plist index 6b4c0f7..4f8d4d2 100644 --- a/example/ios/Flutter/AppFrameworkInfo.plist +++ b/example/ios/Flutter/AppFrameworkInfo.plist @@ -21,6 +21,6 @@ CFBundleVersion 1.0 MinimumOSVersion - 8.0 + 11.0 diff --git a/example/ios/Podfile b/example/ios/Podfile index 1e8c3c9..88359b2 100644 --- a/example/ios/Podfile +++ b/example/ios/Podfile @@ -1,5 +1,5 @@ # Uncomment this line to define a global platform for your project -# platform :ios, '9.0' +# platform :ios, '11.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/example/ios/Runner.xcodeproj/project.pbxproj b/example/ios/Runner.xcodeproj/project.pbxproj index fdcb86b..ffd6fff 100644 --- a/example/ios/Runner.xcodeproj/project.pbxproj +++ b/example/ios/Runner.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 50; + objectVersion = 54; objects = { /* Begin PBXBuildFile section */ @@ -163,7 +163,7 @@ 97C146E61CF9000F007C117D /* Project object */ = { isa = PBXProject; attributes = { - LastUpgradeCheck = 1020; + LastUpgradeCheck = 1300; ORGANIZATIONNAME = ""; TargetAttributes = { 97C146ED1CF9000F007C117D = { @@ -207,6 +207,7 @@ /* Begin PBXShellScriptBuildPhase section */ 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); @@ -238,6 +239,7 @@ }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); @@ -347,7 +349,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -363,7 +365,7 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; - DEVELOPMENT_TEAM = TM2B4SJXNJ; + DEVELOPMENT_TEAM = ""; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -378,7 +380,7 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - PRODUCT_BUNDLE_IDENTIFIER = "com.aaron.mic-stream-example"; + PRODUCT_BUNDLE_IDENTIFIER = "com.example.mic-tester"; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; SWIFT_VERSION = 5.0; @@ -433,7 +435,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -482,7 +484,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 8.0; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -500,7 +502,7 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; - DEVELOPMENT_TEAM = TM2B4SJXNJ; + DEVELOPMENT_TEAM = ""; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -515,7 +517,7 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - PRODUCT_BUNDLE_IDENTIFIER = "com.aaron.mic-stream-example"; + PRODUCT_BUNDLE_IDENTIFIER = "com.example.mic-tester"; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; @@ -531,7 +533,7 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; - DEVELOPMENT_TEAM = TM2B4SJXNJ; + DEVELOPMENT_TEAM = ""; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -546,7 +548,7 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - PRODUCT_BUNDLE_IDENTIFIER = "com.aaron.mic-stream-example"; + PRODUCT_BUNDLE_IDENTIFIER = "com.example.mic-tester"; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; SWIFT_VERSION = 5.0; diff --git a/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index a28140c..c87d15a 100644 --- a/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -1,6 +1,6 @@ - - - - + + - - + CADisableMinimumFrameDurationOnPhone + CFBundleDevelopmentRegion $(DEVELOPMENT_LANGUAGE) CFBundleExecutable @@ -24,6 +26,8 @@ NSMicrophoneUsageDescription Microphone access required + UIApplicationSupportsIndirectInputEvents + UILaunchStoryboardName LaunchScreen UIMainStoryboardFile diff --git a/example/lib/main.dart b/example/lib/main.dart index c9f9ad0..fce72e1 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -2,10 +2,7 @@ import 'dart:async'; import 'dart:math'; import 'dart:core'; -import 'package:flutter/widgets.dart'; import 'package:flutter/material.dart'; -import 'package:flutter/animation.dart'; -import 'package:flutter/rendering.dart'; import 'package:mic_stream/mic_stream.dart'; @@ -35,7 +32,6 @@ class _MicStreamExampleAppState extends State Random rng = new Random(); - // Refreshes the Widget for every possible tick to force a rebuild of the sound wave late AnimationController controller; @@ -48,7 +44,6 @@ class _MicStreamExampleAppState extends State int page = 0; List state = ["SoundWavePage", "IntensityWavePage", "InformationPage"]; - @override void initState() { print("Init application"); @@ -79,7 +74,6 @@ class _MicStreamExampleAppState extends State Future _changeListening() async => !isRecording ? await _startListening() : _stopListening(); - late int bytesPerSample; late int samplesPerSecond; @@ -96,13 +90,15 @@ class _MicStreamExampleAppState extends State stream = await MicStream.microphone( audioSource: AudioSource.DEFAULT, - sampleRate: 1000 * (rng.nextInt(50) + 30), + // sampleRate: 1000 * (rng.nextInt(50) + 30), + sampleRate: 48000, channelConfig: ChannelConfig.CHANNEL_IN_MONO, audioFormat: AUDIO_FORMAT); // after invoking the method for the first time, though, these will be available; // It is not necessary to setup a listener first, the stream only needs to be returned first - print("Start Listening to the microphone, sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); - bytesPerSample = (await MicStream.bitDepth)! ~/ 8; + print( + "Start Listening to the microphone, sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); + bytesPerSample = (await MicStream.bitDepth)! ~/ 8; samplesPerSecond = (await MicStream.sampleRate)!.toInt(); localMax = null; localMin = null; @@ -117,10 +113,8 @@ class _MicStreamExampleAppState extends State } void _calculateSamples(samples) { - if (page == 0) - _calculateWaveSamples(samples); - else if (page == 1) - _calculateIntensitySamples(samples); + if (page == 0) _calculateWaveSamples(samples); + else if (page == 1) _calculateIntensitySamples(samples); } void _calculateWaveSamples(samples) { @@ -144,7 +138,7 @@ class _MicStreamExampleAppState extends State } first = !first; } - print(visibleSamples); + print(visibleSamples.length); } void _calculateIntensitySamples(samples) { @@ -152,20 +146,21 @@ class _MicStreamExampleAppState extends State int currentSample = 0; eachWithIndex(samples, (i, int sample) { currentSample += sample; - if ((i % bytesPerSample) == bytesPerSample-1) { + if ((i % bytesPerSample) == bytesPerSample - 1) { currentSamples!.add(currentSample); currentSample = 0; } }); - if (currentSamples!.length >= samplesPerSecond/10) { - visibleSamples.add(currentSamples!.map((i) => i).toList().reduce((a, b) => a+b)); + if (currentSamples!.length >= samplesPerSecond / 10) { + visibleSamples + .add(currentSamples!.map((i) => i).toList().reduce((a, b) => a + b)); localMax ??= visibleSamples.last; localMin ??= visibleSamples.last; localMax = max(localMax!, visibleSamples.last); localMin = min(localMin!, visibleSamples.last); currentSamples = []; - setState(() {}); + setState(() {}); } } @@ -195,8 +190,7 @@ class _MicStreamExampleAppState extends State if (isRecording) setState(() {}); }) ..addStatusListener((status) { - if (status == AnimationStatus.completed) - controller.reverse(); + if (status == AnimationStatus.completed) controller.reverse(); else if (status == AnimationStatus.dismissed) controller.forward(); }) ..forward(); @@ -297,7 +291,8 @@ class WavePainter extends CustomPainter { // int absMax = 255*4; //(AUDIO_FORMAT == AudioFormat.ENCODING_PCM_8BIT) ? 127 : 32767; // int absMin; //(AUDIO_FORMAT == AudioFormat.ENCODING_PCM_8BIT) ? 127 : 32767; - WavePainter({this.samples, this.color, this.context, this.localMax, this.localMin}); + WavePainter( + {this.samples, this.color, this.context, this.localMax, this.localMin}); @override void paint(Canvas canvas, Size? size) { @@ -309,9 +304,7 @@ class WavePainter extends CustomPainter { ..strokeWidth = 1.0 ..style = PaintingStyle.stroke; - if (samples!.length == 0) - return; - + if (samples!.length == 0) return; points = toPoints(samples); @@ -327,18 +320,22 @@ class WavePainter extends CustomPainter { // Maps a list of ints and their indices to a list of points on a cartesian grid List toPoints(List? samples) { List points = []; - if (samples == null) - samples = List.filled(size!.width.toInt(), (0.5).toInt()); - double pixelsPerSample = size!.width/samples.length; + if (samples == null) samples = List.filled(size!.width.toInt(), (0.5).toInt()); + double pixelsPerSample = size!.width / samples.length; for (int i = 0; i < samples.length; i++) { - var point = Offset(i * pixelsPerSample, 0.5 * size!.height * pow((samples[i] - localMin!)/(localMax! - localMin!), 5)); + var point = Offset( + i * pixelsPerSample, + 0.5 * + size!.height * + pow((samples[i] - localMin!) / (localMax! - localMin!), 5)); points.add(point); } return points; } double project(int val, int max, double height) { - double waveHeight = (max == 0) ? val.toDouble() : (val / max) * 0.5 * height; + double waveHeight = + (max == 0) ? val.toDouble() : (val / max) * 0.5 * height; return waveHeight + 0.5 * height; } } @@ -370,7 +367,6 @@ class Statistics extends StatelessWidget { } } - Iterable eachWithIndex( Iterable items, E Function(int index, T item) f) { var index = 0; @@ -382,4 +378,3 @@ Iterable eachWithIndex( return items; } - diff --git a/example/macos/Podfile b/example/macos/Podfile index dade8df..049abe2 100644 --- a/example/macos/Podfile +++ b/example/macos/Podfile @@ -1,4 +1,4 @@ -platform :osx, '10.11' +platform :osx, '10.14' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/example/macos/Runner.xcodeproj/project.pbxproj b/example/macos/Runner.xcodeproj/project.pbxproj index 0a5c946..a845323 100644 --- a/example/macos/Runner.xcodeproj/project.pbxproj +++ b/example/macos/Runner.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 51; + objectVersion = 54; objects = { /* Begin PBXAggregateTarget section */ @@ -203,7 +203,7 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0920; - LastUpgradeCheck = 0930; + LastUpgradeCheck = 1300; ORGANIZATIONNAME = ""; TargetAttributes = { 33CC10EC2044A3C60003C045 = { @@ -256,6 +256,7 @@ /* Begin PBXShellScriptBuildPhase section */ 3399D490228B24CF009A79C7 /* ShellScript */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); @@ -404,7 +405,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - MACOSX_DEPLOYMENT_TARGET = 10.11; + MACOSX_DEPLOYMENT_TARGET = 10.14; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = macosx; SWIFT_COMPILATION_MODE = wholemodule; @@ -483,7 +484,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - MACOSX_DEPLOYMENT_TARGET = 10.11; + MACOSX_DEPLOYMENT_TARGET = 10.14; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = macosx; @@ -530,7 +531,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - MACOSX_DEPLOYMENT_TARGET = 10.11; + MACOSX_DEPLOYMENT_TARGET = 10.14; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = macosx; SWIFT_COMPILATION_MODE = wholemodule; diff --git a/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index 2f4543e..3f9170c 100644 --- a/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/example/macos/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -1,6 +1,6 @@ 48000) { + events(FlutterError(code: "-3", + message: "iPhone only sample rates between 8000 and 48000 are supported", details:nil)) + return nil + } fallthrough case 1: AUDIO_SOURCE = AudioSource(rawValue:config[0]!)!; @@ -94,24 +105,50 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin if let audioCaptureDevice : AVCaptureDevice = AVCaptureDevice.default(for:AVMediaType.audio) { self.session = AVCaptureSession() + self.audioSession=AVAudioSession.sharedInstance() do { + //magic word + //This will allow developers to specify sample rates, etc. + try session.automaticallyConfiguresApplicationAudioSession = false + try audioCaptureDevice.lockForConfiguration() + + try audioSession.setCategory(AVAudioSession.Category.record,mode: .measurement) + + try audioSession.setPreferredSampleRate(Double(SAMPLE_RATE)) + + //Calculate the time required for BufferSize + let preferredIOBufferDuration: TimeInterval = 1.0 / audioSession.sampleRate * Double(self.BUFFER_SIZE) + try audioSession.setPreferredIOBufferDuration(Double(preferredIOBufferDuration)) + + //it does not seem like this is working + //let numChannels = CHANNEL_CONFIG == ChannelConfig.CHANNEL_IN_MONO ? 1 : 2 + //try audioSession.setPreferredInputNumberOfChannels(1) + + + // print("this is the session sample rate: \(audioSession.sampleRate)") + // print("this is the session preferred sample rate: \(audioSession.preferredSampleRate)") + // print("this is the session preferred IOBufferDuration: \(audioSession.preferredIOBufferDuration)") + // print("this is the session IOBufferDuration: \(audioSession.ioBufferDuration)") + // print("this is the session preferred input number of channels: \(audioSession.preferredInputNumberOfChannels)") + // print("this is the session input number of channels: \(audioSession.inputNumberOfChannels)") + + try audioSession.setActive(true) + let audioInput = try AVCaptureDeviceInput(device: audioCaptureDevice) + + audioCaptureDevice.unlockForConfiguration() if(self.session.canAddInput(audioInput)){ self.session.addInput(audioInput) } - - //let numChannels = CHANNEL_CONFIG == ChannelConfig.CHANNEL_IN_MONO ? 1 : 2 - // setting the preferred sample rate on AVAudioSession doesn't magically change the sample rate for our AVCaptureSession - // try AVAudioSession.sharedInstance().setPreferredSampleRate(Double(SAMPLE_RATE)) - // neither does setting AVLinearPCMBitDepthKey on audioOutput.audioSettings (unavailable on iOS) // 99% sure it's not possible to set streaming sample rate/bitrate // try AVAudioSession.sharedInstance().setPreferredOutputNumberOfChannels(numChannels) + let audioOutput = AVCaptureAudioDataOutput() audioOutput.setSampleBufferDelegate(self, queue: DispatchQueue.global()) @@ -123,6 +160,9 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin self.session.startRunning() } } catch let e { + // print("Error encountered starting audio capture, see details for more information.") + // print(e) + self.eventSink!(FlutterError(code: "-3", message: "Error encountered starting audio capture, see details for more information.", details:e)) } @@ -160,8 +200,10 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin self.actualSampleRate = asbd?.pointee.mSampleRate self.actualBitDepth = asbd?.pointee.mBitsPerChannel } - + //print(actualSampleRate) + //print(audioSession.sampleRate) let data = Data(bytesNoCopy: audioBufferList.mBuffers.mData!, count: Int(audioBufferList.mBuffers.mDataByteSize), deallocator: .none) + self.eventSink!(FlutterStandardTypedData(bytes: data)) } diff --git a/ios/mic_stream.podspec b/ios/mic_stream.podspec index 38840d8..e797f98 100644 --- a/ios/mic_stream.podspec +++ b/ios/mic_stream.podspec @@ -16,6 +16,6 @@ Provides a tool to get the microphone input as Byte Stream s.public_header_files = 'Classes/**/*.h' s.dependency 'Flutter' - s.ios.deployment_target = '8.0' + s.ios.deployment_target = '11.0' end diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index 655416b..0ac3458 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -3,7 +3,6 @@ import 'dart:io'; import 'package:permission_handler/permission_handler.dart' as handler; import 'package:flutter/services.dart'; -import 'dart:typed_data'; // In reference to the implementation of the official sensors plugin // https://github.com/flutter/plugins/tree/master/packages/sensors @@ -38,8 +37,7 @@ class MicStream { static const AudioSource DEFAULT_AUDIO_SOURCE = AudioSource.DEFAULT; static const ChannelConfig DEFAULT_CHANNELS_CONFIG = ChannelConfig.CHANNEL_IN_MONO; - static const AudioFormat DEFAULT_AUDIO_FORMAT = - AudioFormat.ENCODING_PCM_8BIT; + static const AudioFormat DEFAULT_AUDIO_FORMAT = AudioFormat.ENCODING_PCM_8BIT; static const int DEFAULT_SAMPLE_RATE = 16000; static const int _MIN_SAMPLE_RATE = 1; @@ -97,7 +95,6 @@ class MicStream { int? sampleRate, ChannelConfig? channelConfig, AudioFormat? audioFormat}) async { - audioSource ??= DEFAULT_AUDIO_SOURCE; sampleRate ??= DEFAULT_SAMPLE_RATE; channelConfig ??= DEFAULT_CHANNELS_CONFIG; @@ -152,7 +149,7 @@ class MicStream { } /// Updates flag to determine whether to request audio recording permission. Set to false to disable dialogue, set to true (default) to request permission if necessary - static bool shouldRequestPermission(bool request_permission) { - return _requestPermission = request_permission; + static bool shouldRequestPermission(bool requestPermission) { + return _requestPermission = requestPermission; } } diff --git a/pubspec.yaml b/pubspec.yaml index 1e7163c..b34d64d 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 0.6.4 +version: 0.6.5 homepage: https://github.com/anarchuser/mic_stream environment: From 2e46664d128a3483a38a76936bc54db441b52569 Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Mon, 1 May 2023 13:07:29 +0200 Subject: [PATCH 08/21] docs: update docs to 0.7.0-dev --- CHANGELOG.md | 5 +++++ README.md | 15 ++++++++++++--- pubspec.yaml | 2 +- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d07d43d..a2818c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +## 0.7.0-dev + +### !!! This version changes the API !!! +* Change return value of `microphone(...)` from `Future?>` to `Stream` + ## 0.6.5 * Fixed sampleRate settings to be adapted to iOS diff --git a/README.md b/README.md index 094e9e0..223b408 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# mic_stream: 0.6.5 +# mic_stream: 0.7.0-dev [Flutter Plugin] Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. @@ -11,14 +11,23 @@ As Flutter still lacks some functionality, this plugin aims to provide the possi The plugin provides one method: -`Future> MicStream.microphone({options})` +`Stream MicStream.microphone({options})` Listening to this stream starts the audio recorder while cancelling the subscription stops the stream. -The plugin also provides information about some properties: +Available options are as follows: +```dart +audioSource: AudioSource // The microphone you want to record from +sampleRate: int // The amount of data points to record per second +channelConfig: ChannelConfig // Mono or Stereo +audioFormat: AudioFormat // 8 bit PCM or 16 bit PCM. Other formats are not yet supported ``` + +The plugin also provides information about some properties: + +```dart Future sampleRate = await MicStream.sampleRate; Future bitDepth = await MicStream.bitDepth; Future bufferSize = await MicStream.bufferSize; diff --git a/pubspec.yaml b/pubspec.yaml index b34d64d..c6c78f3 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 0.6.5 +version: 0.7.0-dev homepage: https://github.com/anarchuser/mic_stream environment: From c56456d61a6f6880101c9bc1ed1c4083d8590b78 Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Mon, 1 May 2023 14:24:05 +0200 Subject: [PATCH 09/21] chore: update android config files --- android/build.gradle | 2 ++ android/gradle.properties | 1 - .../code/aaron/micstream/MicStreamPlugin.java | 19 +++++++-------- example/android/app/build.gradle | 4 +++- .../android/app/src/debug/AndroidManifest.xml | 3 +-- .../android/app/src/main/AndroidManifest.xml | 23 +++---------------- .../com/aaron/mic_stream/MainActivity.kt | 6 ----- .../com/example/example/MainActivity.kt | 6 ----- .../app/src/profile/AndroidManifest.xml | 3 +-- example/android/build.gradle | 6 ++--- example/android/gradle.properties | 4 +++- .../gradle/wrapper/gradle-wrapper.properties | 2 +- example/lib/main.dart | 4 ++-- 13 files changed, 29 insertions(+), 54 deletions(-) delete mode 100644 example/android/app/src/main/kotlin/com/aaron/mic_stream/MainActivity.kt delete mode 100644 example/android/app/src/main/kotlin/com/example/example/MainActivity.kt diff --git a/android/build.gradle b/android/build.gradle index f234cf0..543a6d8 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -22,6 +22,8 @@ rootProject.allprojects { apply plugin: 'com.android.library' android { + namespace 'com.code.aaron.micstream' + compileSdkVersion 33 defaultConfig { diff --git a/android/gradle.properties b/android/gradle.properties index d2032bc..08f2b5f 100644 --- a/android/gradle.properties +++ b/android/gradle.properties @@ -1,4 +1,3 @@ org.gradle.jvmargs=-Xmx1536M android.enableJetifier=true android.useAndroidX=true -android.enableR8=true diff --git a/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java b/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java index daeb37e..9323878 100644 --- a/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java +++ b/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java @@ -1,16 +1,17 @@ package com.code.aaron.micstream; -import java.lang.Math; import java.util.ArrayList; import java.util.Arrays; -import android.annotation.TargetApi; +import android.annotation.SuppressLint; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.Handler; import android.os.Looper; +import androidx.annotation.NonNull; + import io.flutter.embedding.engine.plugins.FlutterPlugin; import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; @@ -25,7 +26,6 @@ * and the example of the streams_channel (v0.2.2) plugin */ -@TargetApi(16) // Should be unnecessary, but isn't // fix build.gradle...? public class MicStreamPlugin implements FlutterPlugin, EventChannel.StreamHandler, MethodCallHandler { private static final String MICROPHONE_CHANNEL_NAME = "aaron.code.com/mic_stream"; private static final String MICROPHONE_METHOD_CHANNEL_NAME = "aaron.code.com/mic_stream_method_channel"; @@ -38,7 +38,7 @@ public void onAttachedToEngine(FlutterPluginBinding binding) { /// Cleanup after connection loss to flutter @Override - public void onDetachedFromEngine(FlutterPluginBinding binding) { + public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) { onCancel(null); } @@ -73,10 +73,10 @@ private volatile boolean record = false; // Method channel handlers to get sample rate / bit-depth @Override - public void onMethodCall(MethodCall call, Result result) { + public void onMethodCall(MethodCall call, @NonNull Result result) { switch (call.method) { case "getSampleRate": - result.success((double)this.actualSampleRate); // cast to double just for compatibility with the iOS version + result.success((double) this.actualSampleRate); // cast to double just for compatibility with the iOS version break; case "getBitDepth": result.success(this.actualBitDepth); @@ -90,7 +90,8 @@ public void onMethodCall(MethodCall call, Result result) { } } - private void initRecorder () { + @SuppressLint("MissingPermission") + private void initRecorder() { // Try to initialize and start the recorder recorder = new AudioRecord(AUDIO_SOURCE, SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT, BUFFER_SIZE); if (recorder.getState() != AudioRecord.STATE_INITIALIZED) { @@ -135,8 +136,8 @@ public void run() { /// Bug fix by https://github.com/Lokhozt /// following https://github.com/flutter/flutter/issues/34993 private static class MainThreadEventSink implements EventChannel.EventSink { - private EventChannel.EventSink eventSink; - private Handler handler; + private final EventChannel.EventSink eventSink; + private final Handler handler; MainThreadEventSink(EventChannel.EventSink eventSink) { this.eventSink = eventSink; diff --git a/example/android/app/build.gradle b/example/android/app/build.gradle index 5678889..6e3eb8f 100644 --- a/example/android/app/build.gradle +++ b/example/android/app/build.gradle @@ -26,7 +26,7 @@ apply plugin: 'kotlin-android' apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle" android { - compileSdkVersion 31 + namespace 'com.code.aaron.micstream' sourceSets { main.java.srcDirs += 'src/main/kotlin' @@ -41,6 +41,7 @@ android { applicationId "com.aaron.mic_stream" minSdkVersion 16 targetSdkVersion 33 + compileSdkVersion 33 versionCode flutterVersionCode.toInteger() versionName flutterVersionName } @@ -52,6 +53,7 @@ android { signingConfig signingConfigs.debug } } + namespace 'com.aaron.mic_stream' } flutter { diff --git a/example/android/app/src/debug/AndroidManifest.xml b/example/android/app/src/debug/AndroidManifest.xml index 0495c6c..f880684 100644 --- a/example/android/app/src/debug/AndroidManifest.xml +++ b/example/android/app/src/debug/AndroidManifest.xml @@ -1,5 +1,4 @@ - + diff --git a/example/android/app/src/main/AndroidManifest.xml b/example/android/app/src/main/AndroidManifest.xml index e4a6582..499834e 100644 --- a/example/android/app/src/main/AndroidManifest.xml +++ b/example/android/app/src/main/AndroidManifest.xml @@ -1,5 +1,4 @@ - + - - - diff --git a/example/android/app/src/main/kotlin/com/aaron/mic_stream/MainActivity.kt b/example/android/app/src/main/kotlin/com/aaron/mic_stream/MainActivity.kt deleted file mode 100644 index 59097fd..0000000 --- a/example/android/app/src/main/kotlin/com/aaron/mic_stream/MainActivity.kt +++ /dev/null @@ -1,6 +0,0 @@ -package com.aaron.mic_stream - -import io.flutter.embedding.android.FlutterActivity - -class MainActivity: FlutterActivity() { -} diff --git a/example/android/app/src/main/kotlin/com/example/example/MainActivity.kt b/example/android/app/src/main/kotlin/com/example/example/MainActivity.kt deleted file mode 100644 index e793a00..0000000 --- a/example/android/app/src/main/kotlin/com/example/example/MainActivity.kt +++ /dev/null @@ -1,6 +0,0 @@ -package com.example.example - -import io.flutter.embedding.android.FlutterActivity - -class MainActivity: FlutterActivity() { -} diff --git a/example/android/app/src/profile/AndroidManifest.xml b/example/android/app/src/profile/AndroidManifest.xml index c208884..f880684 100644 --- a/example/android/app/src/profile/AndroidManifest.xml +++ b/example/android/app/src/profile/AndroidManifest.xml @@ -1,5 +1,4 @@ - + diff --git a/example/android/build.gradle b/example/android/build.gradle index 0c8cd4f..0ab2b70 100644 --- a/example/android/build.gradle +++ b/example/android/build.gradle @@ -1,12 +1,12 @@ buildscript { - ext.kotlin_version = '1.5.10' + ext.kotlin_version = '1.6.21' repositories { google() jcenter() } dependencies { - classpath 'com.android.tools.build:gradle:4.0.1' + classpath 'com.android.tools.build:gradle:7.4.2' classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" } } @@ -28,4 +28,4 @@ subprojects { task clean(type: Delete) { delete rootProject.buildDir -} +} \ No newline at end of file diff --git a/example/android/gradle.properties b/example/android/gradle.properties index 38c8d45..b9a9a24 100644 --- a/example/android/gradle.properties +++ b/example/android/gradle.properties @@ -1,4 +1,6 @@ org.gradle.jvmargs=-Xmx1536M -android.enableR8=true android.useAndroidX=true android.enableJetifier=true +android.defaults.buildfeatures.buildconfig=true +android.nonTransitiveRClass=false +android.nonFinalResIds=false diff --git a/example/android/gradle/wrapper/gradle-wrapper.properties b/example/android/gradle/wrapper/gradle-wrapper.properties index e5e73da..89e56bd 100644 --- a/example/android/gradle/wrapper/gradle-wrapper.properties +++ b/example/android/gradle/wrapper/gradle-wrapper.properties @@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-all.zip diff --git a/example/lib/main.dart b/example/lib/main.dart index fce72e1..f608bdf 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -48,7 +48,7 @@ class _MicStreamExampleAppState extends State void initState() { print("Init application"); super.initState(); - WidgetsBinding.instance!.addObserver(this); + WidgetsBinding.instance.addObserver(this); setState(() { initPlatformState(); }); @@ -273,7 +273,7 @@ class _MicStreamExampleAppState extends State void dispose() { listener.cancel(); controller.dispose(); - WidgetsBinding.instance!.removeObserver(this); + WidgetsBinding.instance.removeObserver(this); super.dispose(); } } From c5795ddef87494676bc286d2ac16373bad619795 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mat=C3=ADas=20Irland?= Date: Mon, 1 May 2023 09:37:03 -0300 Subject: [PATCH 10/21] feat: change API return type to `Stream` (#65) --- example/lib/main.dart | 10 +++++----- lib/mic_stream.dart | 37 +++++++++++++++++++++++++++++-------- 2 files changed, 34 insertions(+), 13 deletions(-) diff --git a/example/lib/main.dart b/example/lib/main.dart index f608bdf..ca8eb26 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -88,7 +88,7 @@ class _MicStreamExampleAppState extends State // Default option. Set to false to disable request permission dialogue MicStream.shouldRequestPermission(true); - stream = await MicStream.microphone( + stream = MicStream.microphone( audioSource: AudioSource.DEFAULT, // sampleRate: 1000 * (rng.nextInt(50) + 30), sampleRate: 48000, @@ -98,17 +98,17 @@ class _MicStreamExampleAppState extends State // It is not necessary to setup a listener first, the stream only needs to be returned first print( "Start Listening to the microphone, sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); - bytesPerSample = (await MicStream.bitDepth)! ~/ 8; - samplesPerSecond = (await MicStream.sampleRate)!.toInt(); localMax = null; localMin = null; + visibleSamples = []; + listener = stream!.listen(_calculateSamples); + bytesPerSample = (await MicStream.bitDepth)! ~/ 8; + samplesPerSecond = (await MicStream.sampleRate)!.toInt(); setState(() { isRecording = true; startTime = DateTime.now(); }); - visibleSamples = []; - listener = stream!.listen(_calculateSamples); return true; } diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index 0ac3458..8f8e325 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -1,8 +1,8 @@ import 'dart:async'; import 'dart:io'; -import 'package:permission_handler/permission_handler.dart' as handler; import 'package:flutter/services.dart'; +import 'package:permission_handler/permission_handler.dart' as handler; // In reference to the implementation of the official sensors plugin // https://github.com/flutter/plugins/tree/master/packages/sensors @@ -90,21 +90,43 @@ class MicStream { /// channelConfig: States whether audio is mono or stereo /// audioFormat: Switch between 8- and 16-bit PCM streams /// - static Future?> microphone( + static Stream microphone( {AudioSource? audioSource, int? sampleRate, ChannelConfig? channelConfig, - AudioFormat? audioFormat}) async { + AudioFormat? audioFormat}) { audioSource ??= DEFAULT_AUDIO_SOURCE; sampleRate ??= DEFAULT_SAMPLE_RATE; channelConfig ??= DEFAULT_CHANNELS_CONFIG; audioFormat ??= DEFAULT_AUDIO_FORMAT; if (sampleRate < _MIN_SAMPLE_RATE || sampleRate > _MAX_SAMPLE_RATE) - throw (RangeError.range(sampleRate, _MIN_SAMPLE_RATE, _MAX_SAMPLE_RATE)); - if (_requestPermission) if (!(await permissionStatus)) - throw (PlatformException); + return Stream.error( + RangeError.range(sampleRate, _MIN_SAMPLE_RATE, _MAX_SAMPLE_RATE)); + + final initStream = _requestPermission + ? Stream.fromFuture(permissionStatus) + : Stream.value(true); + + return initStream.asyncExpand((grantedPermission) { + if (!grantedPermission) { + throw Exception('Microphone permission is not granted'); + } + return _setupMicStream( + audioSource!, + sampleRate!, + channelConfig!, + audioFormat!, + ); + }); + } + static Stream _setupMicStream( + AudioSource audioSource, + int sampleRate, + ChannelConfig channelConfig, + AudioFormat audioFormat, + ) { // If first time or configs have changed reinitialise audio recorder if (audioSource != __audioSource || sampleRate != __sampleRate || @@ -144,8 +166,7 @@ class MicStream { bufferSizeCompleter.complete( await _microphoneMethodChannel.invokeMethod("getBufferSize") as int?); }); - - return _microphone; + return _microphone!; } /// Updates flag to determine whether to request audio recording permission. Set to false to disable dialogue, set to true (default) to request permission if necessary From 528338d903c30110dc8170b86e62efdccded3731 Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Mon, 1 May 2023 15:42:09 +0200 Subject: [PATCH 11/21] fix: ensure completion of sampleRate/bitDepth/bufferSize completion --- example/lib/main.dart | 14 ++++++++------ lib/mic_stream.dart | 43 ++++++++++++++++++++----------------------- 2 files changed, 28 insertions(+), 29 deletions(-) diff --git a/example/lib/main.dart b/example/lib/main.dart index ca8eb26..b6fdb94 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -1,6 +1,7 @@ import 'dart:async'; import 'dart:math'; import 'dart:core'; +import 'dart:typed_data'; import 'package:flutter/material.dart'; @@ -23,7 +24,7 @@ class MicStreamExampleApp extends StatefulWidget { class _MicStreamExampleAppState extends State with SingleTickerProviderStateMixin, WidgetsBindingObserver { - Stream? stream; + Stream? stream; late StreamSubscription listener; List? currentSamples = []; List visibleSamples = []; @@ -94,15 +95,16 @@ class _MicStreamExampleAppState extends State sampleRate: 48000, channelConfig: ChannelConfig.CHANNEL_IN_MONO, audioFormat: AUDIO_FORMAT); + listener = stream!.listen(_calculateSamples); + // after invoking the method for the first time, though, these will be available; // It is not necessary to setup a listener first, the stream only needs to be returned first - print( - "Start Listening to the microphone, sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); + print("Start Listening to the microphone, sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); + localMax = null; localMin = null; visibleSamples = []; - listener = stream!.listen(_calculateSamples); bytesPerSample = (await MicStream.bitDepth)! ~/ 8; samplesPerSecond = (await MicStream.sampleRate)!.toInt(); setState(() { @@ -112,7 +114,8 @@ class _MicStreamExampleAppState extends State return true; } - void _calculateSamples(samples) { + void _calculateSamples(samples) async { + // print("Sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); if (page == 0) _calculateWaveSamples(samples); else if (page == 1) _calculateIntensitySamples(samples); } @@ -138,7 +141,6 @@ class _MicStreamExampleAppState extends State } first = !first; } - print(visibleSamples.length); } void _calculateIntensitySamples(samples) { diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index 8f8e325..847eb2d 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -49,19 +49,16 @@ class MicStream { MethodChannel('aaron.code.com/mic_stream_method_channel'); /// The actual sample rate used for streaming. This may return zero if invoked without listening to the _microphone Stream - static Future? get sampleRate => _sampleRate; - - static Future? _sampleRate; + static Future get sampleRate => _sampleRateCompleter.future; + static Completer _sampleRateCompleter = new Completer(); /// The actual bit depth used for streaming. This may return zero if invoked without listening to the _microphone Stream first. - static Future? get bitDepth => _bitDepth; - - static Future? _bitDepth; + static Future get bitDepth => _bitDepthCompleter.future; + static Completer _bitDepthCompleter = new Completer(); /// The amount of recorded data, per sample, in bytes - static Future? get bufferSize => _bufferSize; - - static Future? _bufferSize; + static Future get bufferSize => _bufferSizeCompleter.future; + static Completer _bufferSizeCompleter = new Completer(); /// The configured microphone stream and its config static Stream? _microphone; @@ -132,7 +129,6 @@ class MicStream { sampleRate != __sampleRate || channelConfig != __channelConfig || audioFormat != __audioFormat) { - //TODO: figure out whether the old stream needs to be cancelled _microphone = _microphoneEventChannel.receiveBroadcastStream([ audioSource.index, sampleRate, @@ -145,27 +141,28 @@ class MicStream { __audioFormat = audioFormat; } + if (_microphone == null) { + return Stream.error(StateError); + } + // sampleRate/bitDepth should be populated before any attempt to consume the stream externally. // configure these as Completers and listen to the stream internally before returning // these will complete only when this internal listener is called + _sampleRateCompleter = new Completer(); + _bitDepthCompleter = new Completer(); + _bufferSizeCompleter = new Completer(); StreamSubscription? listener; - var sampleRateCompleter = new Completer(); - var bitDepthCompleter = new Completer(); - var bufferSizeCompleter = new Completer(); - _sampleRate = sampleRateCompleter.future; - _bitDepth = bitDepthCompleter.future; - _bufferSize = bufferSizeCompleter.future; - listener = _microphone!.listen((x) async { await listener!.cancel(); listener = null; - sampleRateCompleter.complete(await _microphoneMethodChannel - .invokeMethod("getSampleRate") as double?); - bitDepthCompleter.complete( - await _microphoneMethodChannel.invokeMethod("getBitDepth") as int?); - bufferSizeCompleter.complete( - await _microphoneMethodChannel.invokeMethod("getBufferSize") as int?); + _sampleRateCompleter.complete(await _microphoneMethodChannel + .invokeMethod("getSampleRate") as double); + _bitDepthCompleter.complete( + await _microphoneMethodChannel.invokeMethod("getBitDepth") as int); + _bufferSizeCompleter.complete( + await _microphoneMethodChannel.invokeMethod("getBufferSize") as int); }); + return _microphone!; } From 7cd6aa8e36620e3790d1d4ca1408e3e2251db3c3 Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Sat, 29 Jul 2023 16:46:44 +0200 Subject: [PATCH 12/21] fix: upon stream recreation, uncompleted getters await the new values instead --- example/lib/main.dart | 14 +++----------- lib/mic_stream.dart | 29 ++++++++++++++++++++++------- 2 files changed, 25 insertions(+), 18 deletions(-) diff --git a/example/lib/main.dart b/example/lib/main.dart index b6fdb94..7fe1141 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -79,13 +79,7 @@ class _MicStreamExampleAppState extends State late int samplesPerSecond; Future _startListening() async { - print("START LISTENING"); if (isRecording) return false; - // if this is the first time invoking the microphone() - // method to get the stream, we don't yet have access - // to the sampleRate and bitDepth properties - print("wait for stream"); - // Default option. Set to false to disable request permission dialogue MicStream.shouldRequestPermission(true); @@ -96,17 +90,15 @@ class _MicStreamExampleAppState extends State channelConfig: ChannelConfig.CHANNEL_IN_MONO, audioFormat: AUDIO_FORMAT); listener = stream!.listen(_calculateSamples); - - // after invoking the method for the first time, though, these will be available; - // It is not necessary to setup a listener first, the stream only needs to be returned first + listener.onError(print); print("Start Listening to the microphone, sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); localMax = null; localMin = null; visibleSamples = []; - bytesPerSample = (await MicStream.bitDepth)! ~/ 8; - samplesPerSecond = (await MicStream.sampleRate)!.toInt(); + bytesPerSample = (await MicStream.bitDepth) ~/ 8; + samplesPerSecond = (await MicStream.sampleRate).toInt(); setState(() { isRecording = true; startTime = DateTime.now(); diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index 847eb2d..e52a4c5 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -73,7 +73,8 @@ class MicStream { return true; } var micStatus = await handler.Permission.microphone.request(); - return !micStatus.isDenied; + print(micStatus); + return !micStatus.isDenied && !micStatus.isPermanentlyDenied; } /// This function initializes a connection to the native backend (if not already available). @@ -148,15 +149,29 @@ class MicStream { // sampleRate/bitDepth should be populated before any attempt to consume the stream externally. // configure these as Completers and listen to the stream internally before returning // these will complete only when this internal listener is called - _sampleRateCompleter = new Completer(); + var _tmpSampleRateCompleter = _sampleRateCompleter; + _sampleRateCompleter = new Completer(); + if (!_tmpSampleRateCompleter.isCompleted) { + _tmpSampleRateCompleter.complete(_sampleRateCompleter.future); + } + + var _tmpBitDepthCompleter = _bitDepthCompleter; _bitDepthCompleter = new Completer(); + if (!_tmpBitDepthCompleter.isCompleted) { + _tmpBitDepthCompleter.complete(_bitDepthCompleter.future); + } + + var _tmpBufferSizeCompleter = _bufferSizeCompleter; _bufferSizeCompleter = new Completer(); - StreamSubscription? listener; + if (!_tmpBufferSizeCompleter.isCompleted) { + _tmpBufferSizeCompleter.complete(_bufferSizeCompleter.future); + } + + late StreamSubscription listener; listener = _microphone!.listen((x) async { - await listener!.cancel(); - listener = null; - _sampleRateCompleter.complete(await _microphoneMethodChannel - .invokeMethod("getSampleRate") as double); + listener.cancel(); + _sampleRateCompleter.complete( + await _microphoneMethodChannel.invokeMethod("getSampleRate") as double); _bitDepthCompleter.complete( await _microphoneMethodChannel.invokeMethod("getBitDepth") as int); _bufferSizeCompleter.complete( From 958fe1a3cc0c534a6357d1d0316b935f8eb3607c Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Sat, 29 Jul 2023 17:15:43 +0200 Subject: [PATCH 13/21] feat: change type of sample rate from double to int --- CHANGELOG.md | 6 ++++++ README.md | 4 ++-- lib/mic_stream.dart | 14 +++++++------- pubspec.yaml | 2 +- 4 files changed, 16 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a2818c6..25ebe4f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +## 0.7.1-dev + +### !!! This version changes the API !!! +* Change type of get sampleRate from double to int +* Fix parameter getters potentially never returning + ## 0.7.0-dev ### !!! This version changes the API !!! diff --git a/README.md b/README.md index 223b408..b0a684e 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# mic_stream: 0.7.0-dev +# mic_stream: 0.7.1-dev [Flutter Plugin] Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. @@ -28,7 +28,7 @@ audioFormat: AudioFormat // 8 bit PCM or 16 bit PCM. Other formats are not The plugin also provides information about some properties: ```dart -Future sampleRate = await MicStream.sampleRate; +Future sampleRate = await MicStream.sampleRate; Future bitDepth = await MicStream.bitDepth; Future bufferSize = await MicStream.bufferSize; ``` diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index e52a4c5..a9b6a15 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -49,16 +49,16 @@ class MicStream { MethodChannel('aaron.code.com/mic_stream_method_channel'); /// The actual sample rate used for streaming. This may return zero if invoked without listening to the _microphone Stream - static Future get sampleRate => _sampleRateCompleter.future; - static Completer _sampleRateCompleter = new Completer(); + static Future get sampleRate => _sampleRateCompleter.future; + static Completer _sampleRateCompleter = new Completer(); /// The actual bit depth used for streaming. This may return zero if invoked without listening to the _microphone Stream first. static Future get bitDepth => _bitDepthCompleter.future; - static Completer _bitDepthCompleter = new Completer(); + static Completer _bitDepthCompleter = new Completer(); /// The amount of recorded data, per sample, in bytes static Future get bufferSize => _bufferSizeCompleter.future; - static Completer _bufferSizeCompleter = new Completer(); + static Completer _bufferSizeCompleter = new Completer(); /// The configured microphone stream and its config static Stream? _microphone; @@ -150,7 +150,7 @@ class MicStream { // configure these as Completers and listen to the stream internally before returning // these will complete only when this internal listener is called var _tmpSampleRateCompleter = _sampleRateCompleter; - _sampleRateCompleter = new Completer(); + _sampleRateCompleter = new Completer(); if (!_tmpSampleRateCompleter.isCompleted) { _tmpSampleRateCompleter.complete(_sampleRateCompleter.future); } @@ -170,8 +170,8 @@ class MicStream { late StreamSubscription listener; listener = _microphone!.listen((x) async { listener.cancel(); - _sampleRateCompleter.complete( - await _microphoneMethodChannel.invokeMethod("getSampleRate") as double); + _sampleRateCompleter.complete(( + await _microphoneMethodChannel.invokeMethod("getSampleRate") as double).toInt()); _bitDepthCompleter.complete( await _microphoneMethodChannel.invokeMethod("getBitDepth") as int); _bufferSizeCompleter.complete( diff --git a/pubspec.yaml b/pubspec.yaml index c6c78f3..a1436ce 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 0.7.0-dev +version: 0.7.1-dev homepage: https://github.com/anarchuser/mic_stream environment: From 0030fa5388b2b216b50e0caf17564e100b73eaaa Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Sat, 29 Jul 2023 18:28:14 +0200 Subject: [PATCH 14/21] fix: fix example app sound wave --- example/lib/main.dart | 51 ++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 27 deletions(-) diff --git a/example/lib/main.dart b/example/lib/main.dart index 7fe1141..b64ce9e 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -28,8 +28,8 @@ class _MicStreamExampleAppState extends State late StreamSubscription listener; List? currentSamples = []; List visibleSamples = []; - int? localMax; - int? localMin; + late int localMax; + late int localMin; Random rng = new Random(); @@ -93,8 +93,8 @@ class _MicStreamExampleAppState extends State listener.onError(print); print("Start Listening to the microphone, sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); - localMax = null; - localMin = null; + localMax = 0; + localMin = 0; visibleSamples = []; bytesPerSample = (await MicStream.bitDepth) ~/ 8; @@ -107,9 +107,17 @@ class _MicStreamExampleAppState extends State } void _calculateSamples(samples) async { - // print("Sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); - if (page == 0) _calculateWaveSamples(samples); - else if (page == 1) _calculateIntensitySamples(samples); + var _samples = normalise(samples); + if (page == 0) _calculateWaveSamples(_samples); + else if (page == 1) _calculateIntensitySamples(_samples); + } + + List normalise(samples) { + List newSamples = []; + for (int sample in samples) { + newSamples.add((sample + 128) % 256); + } + return newSamples; } void _calculateWaveSamples(samples) { @@ -117,19 +125,15 @@ class _MicStreamExampleAppState extends State visibleSamples = []; int tmp = 0; for (int sample in samples) { - if (sample > 128) sample -= 255; if (first) { - tmp = sample * 128; + tmp = sample; } else { - tmp += sample; + tmp += sample * 128; + tmp -= pow(2, 14).toInt(); visibleSamples.add(tmp); - localMax ??= visibleSamples.last; - localMin ??= visibleSamples.last; - localMax = max(localMax!, visibleSamples.last); - localMin = min(localMin!, visibleSamples.last); - - tmp = 0; + localMax = max(localMax, visibleSamples.last); + localMin = min(localMin, visibleSamples.last); } first = !first; } @@ -149,10 +153,8 @@ class _MicStreamExampleAppState extends State if (currentSamples!.length >= samplesPerSecond / 10) { visibleSamples .add(currentSamples!.map((i) => i).toList().reduce((a, b) => a + b)); - localMax ??= visibleSamples.last; - localMin ??= visibleSamples.last; - localMax = max(localMax!, visibleSamples.last); - localMin = min(localMin!, visibleSamples.last); + localMax = max(localMax, visibleSamples.last); + localMin = min(localMin, visibleSamples.last); currentSamples = []; setState(() {}); } @@ -281,10 +283,6 @@ class WavePainter extends CustomPainter { BuildContext? context; Size? size; - // Set max val possible in stream, depending on the config - // int absMax = 255*4; //(AUDIO_FORMAT == AudioFormat.ENCODING_PCM_8BIT) ? 127 : 32767; - // int absMin; //(AUDIO_FORMAT == AudioFormat.ENCODING_PCM_8BIT) ? 127 : 32767; - WavePainter( {this.samples, this.color, this.context, this.localMax, this.localMin}); @@ -319,9 +317,8 @@ class WavePainter extends CustomPainter { for (int i = 0; i < samples.length; i++) { var point = Offset( i * pixelsPerSample, - 0.5 * - size!.height * - pow((samples[i] - localMin!) / (localMax! - localMin!), 5)); + 0.5 * size!.height * + pow((samples[i] - localMin!) / (localMax! - localMin!), 1)); points.add(point); } return points; From a3c1ddb42db94ba17aef5ae32f7fb6addec1b12e Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Mon, 31 Jul 2023 20:22:15 +0200 Subject: [PATCH 15/21] fix(java): use ByteBuffer to ensure native endianness for >=16bit PCM --- .../code/aaron/micstream/MicStreamPlugin.java | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java b/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java index 9323878..8c89dc6 100644 --- a/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java +++ b/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java @@ -1,5 +1,7 @@ package com.code.aaron.micstream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Arrays; @@ -114,18 +116,24 @@ public void run() { // Wait until recorder is initialised while (recorder == null || recorder.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING); + // Allocate a new buffer to write data to + ByteBuffer data = ByteBuffer.allocateDirect(BUFFER_SIZE); + + // Set ByteOrder to native + ByteOrder nativeOrder = ByteOrder.nativeOrder(); + data.order(nativeOrder); + System.out.println("Using native byte order " + nativeOrder); + // Repeatedly push audio samples to stream while (record) { - - // Read audio data into new byte array - byte[] data = new byte[BUFFER_SIZE]; - recorder.read(data, 0, BUFFER_SIZE); + // Read audio data into buffer + recorder.read(data, BUFFER_SIZE, AudioRecord.READ_BLOCKING); // push data into stream try { - eventSink.success(data); + eventSink.success(data.array()); } catch (IllegalArgumentException e) { - System.out.println("mic_stream: " + Arrays.hashCode(data) + " is not valid!"); + System.out.println("mic_stream: " + data + " is not valid!"); eventSink.error("-1", "Invalid Data", e); } } From d24eb9860d833f1499a4b30a79e3c259f094e2ff Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Mon, 31 Jul 2023 20:26:41 +0200 Subject: [PATCH 16/21] feat: add higher quality audio formats (float, 24bit, 32bit PCM) --- lib/mic_stream.dart | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index a9b6a15..74c7e78 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -24,12 +24,21 @@ enum AudioSource { /// Mono: Records using one microphone; /// Stereo: Records using two spatially distant microphones (if applicable) -enum ChannelConfig { CHANNEL_IN_MONO, CHANNEL_IN_STEREO } +enum ChannelConfig { + CHANNEL_IN_MONO, + CHANNEL_IN_STEREO, +} /// Bit depth. /// 8-bit means each sample consists of 1 byte /// 16-bit means each sample consists of 2 consecutive bytes, in little endian -enum AudioFormat { ENCODING_PCM_8BIT, ENCODING_PCM_16BIT } +enum AudioFormat { + ENCODING_PCM_8BIT, + ENCODING_PCM_16BIT, + ENCODING_PCM_FLOAT, + ENCODING_PCM_24BIT_PACKED, + ENCODING_PCM_32BIT +} class MicStream { static bool _requestPermission = true; From cdba4d587ada2f80229396bf187ffc69a102445b Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Sun, 1 Oct 2023 17:28:05 +0200 Subject: [PATCH 17/21] improve example app --- example/lib/main.dart | 26 +++++++++++++++----------- lib/mic_stream.dart | 1 - 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/example/lib/main.dart b/example/lib/main.dart index b64ce9e..62e0841 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -28,8 +28,8 @@ class _MicStreamExampleAppState extends State late StreamSubscription listener; List? currentSamples = []; List visibleSamples = []; - late int localMax; - late int localMin; + int localMax = 0; + int localMin = 0; Random rng = new Random(); @@ -129,7 +129,6 @@ class _MicStreamExampleAppState extends State tmp = sample; } else { tmp += sample * 128; - tmp -= pow(2, 14).toInt(); visibleSamples.add(tmp); localMax = max(localMax, visibleSamples.last); @@ -314,20 +313,25 @@ class WavePainter extends CustomPainter { List points = []; if (samples == null) samples = List.filled(size!.width.toInt(), (0.5).toInt()); double pixelsPerSample = size!.width / samples.length; + double max = (localMin!.abs() + localMax!) / 2; for (int i = 0; i < samples.length; i++) { - var point = Offset( - i * pixelsPerSample, - 0.5 * size!.height * - pow((samples[i] - localMin!) / (localMax! - localMin!), 1)); + var height = project( + (samples[i] - localMin!) / (localMax! - localMin!), + max, size!.height); + var point = Offset(i * pixelsPerSample, height); points.add(point); } + print(points); return points; } - double project(int val, int max, double height) { - double waveHeight = - (max == 0) ? val.toDouble() : (val / max) * 0.5 * height; - return waveHeight + 0.5 * height; + double project(double val, double max, double height) { + if (max == 0) { + return 0.5 * height; + } + var rv = val / max * 0.5 * height; + print("val $val / max $max = rv $rv"); + return rv; } } diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index 74c7e78..b599a5b 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -82,7 +82,6 @@ class MicStream { return true; } var micStatus = await handler.Permission.microphone.request(); - print(micStatus); return !micStatus.isDenied && !micStatus.isPermanentlyDenied; } From 1f8e42c7f9d97267956a68241b04a58323d718d1 Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Sun, 1 Oct 2023 19:01:03 +0200 Subject: [PATCH 18/21] feat: add StreamTransformer to convert raw stream to individual samples --- .../code/aaron/micstream/MicStreamPlugin.java | 50 ++--- example/android/build.gradle | 4 +- example/lib/main.dart | 177 ++++++++-------- example/pubspec.yaml | 2 +- lib/mic_stream.dart | 190 +++++++++++++----- pubspec.yaml | 8 +- 6 files changed, 258 insertions(+), 173 deletions(-) diff --git a/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java b/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java index 8c89dc6..07be9b8 100644 --- a/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java +++ b/android/src/main/java/com/code/aaron/micstream/MicStreamPlugin.java @@ -111,7 +111,20 @@ public void run() { isRecording = true; actualSampleRate = recorder.getSampleRate(); - actualBitDepth = (recorder.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT ? 8 : 16); + switch (recorder.getAudioFormat()) { + case AudioFormat.ENCODING_PCM_8BIT: + actualBitDepth = 8; + break; + case AudioFormat.ENCODING_PCM_16BIT: + actualBitDepth = 16; + break; + case AudioFormat.ENCODING_PCM_32BIT: + actualBitDepth = 32; + break; + case AudioFormat.ENCODING_PCM_FLOAT: + actualBitDepth = 32; + break; + } // Wait until recorder is initialised while (recorder == null || recorder.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING); @@ -122,7 +135,7 @@ public void run() { // Set ByteOrder to native ByteOrder nativeOrder = ByteOrder.nativeOrder(); data.order(nativeOrder); - System.out.println("Using native byte order " + nativeOrder); + System.out.println("mic_stream: Using native byte order " + nativeOrder); // Repeatedly push audio samples to stream while (record) { @@ -188,29 +201,18 @@ public void run() { public void onListen(Object args, final EventChannel.EventSink eventSink) { if (isRecording) return; + // Read and validate AudioRecord parameters ArrayList config = (ArrayList) args; - - // Set parameters, if available - switch(config.size()) { - case 4: - AUDIO_FORMAT = config.get(3); - case 3: - CHANNEL_CONFIG = config.get(2); - case 2: - SAMPLE_RATE = config.get(1); - case 1: - AUDIO_SOURCE = config.get(0); - default: - try { - BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT); - } catch (Exception e) { - eventSink.error("-3", "Invalid AudioRecord parameters", e); - } - } - - if(AUDIO_FORMAT != AudioFormat.ENCODING_PCM_8BIT && AUDIO_FORMAT != AudioFormat.ENCODING_PCM_16BIT) { - eventSink.error("-3", "Invalid Audio Format specified", null); - return; + try { + AUDIO_SOURCE = config.get(0); + SAMPLE_RATE = config.get(1); + CHANNEL_CONFIG = config.get(2); + AUDIO_FORMAT = config.get(3); + BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT); + } catch (java.lang.IndexOutOfBoundsException e) { + eventSink.error("-4", "Invalid number of parameteres. Expected 4, got " + config.size(), e); + } catch (Exception e) { + eventSink.error("-3", "Invalid AudioRecord parameters", e); } this.eventSink = new MainThreadEventSink(eventSink); diff --git a/example/android/build.gradle b/example/android/build.gradle index 0ab2b70..f0f44d5 100644 --- a/example/android/build.gradle +++ b/example/android/build.gradle @@ -26,6 +26,6 @@ subprojects { project.evaluationDependsOn(':app') } -task clean(type: Delete) { +tasks.register("clean", Delete) { delete rootProject.buildDir -} \ No newline at end of file +} diff --git a/example/lib/main.dart b/example/lib/main.dart index 62e0841..a79de6f 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -13,7 +13,7 @@ enum Command { change, } -const AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT; +int screenWidth = 0; void main() => runApp(MicStreamExampleApp()); @@ -26,12 +26,12 @@ class _MicStreamExampleAppState extends State with SingleTickerProviderStateMixin, WidgetsBindingObserver { Stream? stream; late StreamSubscription listener; - List? currentSamples = []; - List visibleSamples = []; - int localMax = 0; - int localMin = 0; - Random rng = new Random(); + List? waveSamples; + List? intensitySamples; + int sampleIndex = 0; + double localMax = 0; + double localMin = 0; // Refreshes the Widget for every possible tick to force a rebuild of the sound wave late AnimationController controller; @@ -58,7 +58,7 @@ class _MicStreamExampleAppState extends State void _controlPage(int index) => setState(() => page = index); // Responsible for switching between recording / idle state - void _controlMicStream({Command command: Command.change}) async { + void _controlMicStream({Command command = Command.change}) async { switch (command) { case Command.change: _changeListening(); @@ -85,20 +85,20 @@ class _MicStreamExampleAppState extends State stream = MicStream.microphone( audioSource: AudioSource.DEFAULT, - // sampleRate: 1000 * (rng.nextInt(50) + 30), sampleRate: 48000, channelConfig: ChannelConfig.CHANNEL_IN_MONO, - audioFormat: AUDIO_FORMAT); - listener = stream!.listen(_calculateSamples); + audioFormat: AudioFormat.ENCODING_PCM_16BIT); + listener = stream! + .transform(MicStream.toSampleStream) + .listen(_processSamples); listener.onError(print); - print("Start Listening to the microphone, sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); + print("Start listening to the microphone, sample rate is ${await MicStream.sampleRate}, bit depth is ${await MicStream.bitDepth}, bufferSize: ${await MicStream.bufferSize}"); localMax = 0; localMin = 0; - visibleSamples = []; - bytesPerSample = (await MicStream.bitDepth) ~/ 8; - samplesPerSecond = (await MicStream.sampleRate).toInt(); + bytesPerSample = await MicStream.bitDepth ~/ 8; + samplesPerSecond = await MicStream.sampleRate; setState(() { isRecording = true; startTime = DateTime.now(); @@ -106,67 +106,51 @@ class _MicStreamExampleAppState extends State return true; } - void _calculateSamples(samples) async { - var _samples = normalise(samples); - if (page == 0) _calculateWaveSamples(_samples); - else if (page == 1) _calculateIntensitySamples(_samples); - } + void _processSamples(_sample) async { + if (screenWidth == 0) return; - List normalise(samples) { - List newSamples = []; - for (int sample in samples) { - newSamples.add((sample + 128) % 256); + double sample = 0; + if ("${_sample.runtimeType}" == "(int, int)" || "${_sample.runtimeType}" == "(double, double)") { + sample = 0.5 * (_sample.$1 + _sample.$2); + } else { + sample = _sample.toDouble(); } - return newSamples; - } + waveSamples ??= List.filled(screenWidth, 0); - void _calculateWaveSamples(samples) { - bool first = true; - visibleSamples = []; - int tmp = 0; - for (int sample in samples) { - if (first) { - tmp = sample; - } else { - tmp += sample * 128; - visibleSamples.add(tmp); - - localMax = max(localMax, visibleSamples.last); - localMin = min(localMin, visibleSamples.last); + final overridden = waveSamples![sampleIndex]; + waveSamples![sampleIndex] = sample; + sampleIndex = (sampleIndex + 1) % screenWidth; + + if (overridden == localMax) { + localMax = 0; + for (final val in waveSamples!) { + localMax = max(localMax, val); + } + } else if (overridden == localMin) { + localMin = 0; + for (final val in waveSamples!) { + localMin = min(localMin, val); } - first = !first; + } else { + if (sample > 0) localMax = max(localMax, sample); + else localMin = min(localMin, sample); } - } - void _calculateIntensitySamples(samples) { - currentSamples ??= []; - int currentSample = 0; - eachWithIndex(samples, (i, int sample) { - currentSample += sample; - if ((i % bytesPerSample) == bytesPerSample - 1) { - currentSamples!.add(currentSample); - currentSample = 0; - } - }); + _calculateIntensitySamples(); + } - if (currentSamples!.length >= samplesPerSecond / 10) { - visibleSamples - .add(currentSamples!.map((i) => i).toList().reduce((a, b) => a + b)); - localMax = max(localMax, visibleSamples.last); - localMin = min(localMin, visibleSamples.last); - currentSamples = []; - setState(() {}); - } + void _calculateIntensitySamples() { } bool _stopListening() { if (!isRecording) return false; - print("Stop Listening to the microphone"); + print("Stop listening to the microphone"); listener.cancel(); setState(() { isRecording = false; - currentSamples = null; + waveSamples = List.filled(screenWidth, 0); + intensitySamples = List.filled(screenWidth, 0); startTime = null; }); return true; @@ -232,13 +216,9 @@ class _MicStreamExampleAppState extends State ), body: (page == 0 || page == 1) ? CustomPaint( - painter: WavePainter( - samples: visibleSamples, - color: _getBgColor(), - localMax: localMax, - localMin: localMin, - context: context, - ), + painter: page == 0 + ? WavePainter(samples: waveSamples, color: _getBgColor(), index: sampleIndex, localMax: localMax, localMin: localMin, context: context,) + : IntensityPainter(samples: intensitySamples, color: _getBgColor(), index: sampleIndex, localMax: localMax, localMin: localMin, context: context,) ) : Statistics( isRecording, @@ -253,8 +233,7 @@ class _MicStreamExampleAppState extends State isActive = true; print("Resume app"); - _controlMicStream( - command: memRecordingState ? Command.start : Command.stop); + _controlMicStream(command: memRecordingState ? Command.start : Command.stop); } else if (isActive) { memRecordingState = isRecording; _controlMicStream(command: Command.stop); @@ -274,30 +253,32 @@ class _MicStreamExampleAppState extends State } class WavePainter extends CustomPainter { - int? localMax; - int? localMin; - List? samples; + int? index; + double? localMax; + double? localMin; + List? samples; late List points; Color? color; BuildContext? context; Size? size; - WavePainter( - {this.samples, this.color, this.context, this.localMax, this.localMin}); + WavePainter({this.samples, this.color, this.context, this.index, this.localMax, this.localMin}); @override void paint(Canvas canvas, Size? size) { this.size = context!.size; size = this.size; + if (size == null) return; + screenWidth = size.width.toInt(); Paint paint = new Paint() ..color = color! ..strokeWidth = 1.0 ..style = PaintingStyle.stroke; - if (samples!.length == 0) return; - - points = toPoints(samples); + samples ??= List.filled(screenWidth, 0); + index ??= 0; + points = toPoints(samples!, index!); Path path = new Path(); path.addPolygon(points, false); @@ -309,19 +290,40 @@ class WavePainter extends CustomPainter { bool shouldRepaint(CustomPainter oldPainting) => true; // Maps a list of ints and their indices to a list of points on a cartesian grid - List toPoints(List? samples) { + List toPoints(List samples, int index) { List points = []; - if (samples == null) samples = List.filled(size!.width.toInt(), (0.5).toInt()); - double pixelsPerSample = size!.width / samples.length; - double max = (localMin!.abs() + localMax!) / 2; - for (int i = 0; i < samples.length; i++) { - var height = project( - (samples[i] - localMin!) / (localMax! - localMin!), - max, size!.height); - var point = Offset(i * pixelsPerSample, height); + double totalMax = max(-1 * localMin!, localMax!); + double maxHeight = 0.5 * size!.height; + for (int i = 0; i < screenWidth; i++) { + double height = maxHeight + ((totalMax == 0 || index == 0) ? 0 : (samples[(i + index) % index] / totalMax * maxHeight)); + var point = Offset(i.toDouble(), height); points.add(point); } - print(points); + return points; + } +} + +class IntensityPainter extends CustomPainter { + int? index; + double? localMax; + double? localMin; + List? samples; + late List points; + Color? color; + BuildContext? context; + Size? size; + + IntensityPainter({this.samples, this.color, this.context, this.index, this.localMax, this.localMin}); + + @override + void paint(Canvas canvas, Size? size) { + } + + @override + bool shouldRepaint(CustomPainter oldPainting) => true; + + // Maps a list of ints and their indices to a list of points on a cartesian grid + List toPoints(List? samples) { return points; } @@ -330,7 +332,6 @@ class WavePainter extends CustomPainter { return 0.5 * height; } var rv = val / max * 0.5 * height; - print("val $val / max $max = rv $rv"); return rv; } } diff --git a/example/pubspec.yaml b/example/pubspec.yaml index e684439..7141cfe 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -3,7 +3,7 @@ description: Demonstrates how to use the mic_stream plugin. publish_to: 'none' environment: - sdk: '>=2.12.0 <3.0.0' + sdk: '>=3.0.0' dependencies: flutter: diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index b599a5b..b96d28a 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -30,13 +30,16 @@ enum ChannelConfig { } /// Bit depth. -/// 8-bit means each sample consists of 1 byte -/// 16-bit means each sample consists of 2 consecutive bytes, in little endian +/// 8 bit means each sample consists of 1 byte +/// 16 bit means each sample consists of 2 consecutive bytes, in little endian +/// 24 bit is currently not supported (cause nobody needs this) +/// 32 bit means each sample consists of 4 consecutive bytes, in little endian +/// float is the same as 32 bit, except it represents a floating point number enum AudioFormat { ENCODING_PCM_8BIT, ENCODING_PCM_16BIT, ENCODING_PCM_FLOAT, - ENCODING_PCM_24BIT_PACKED, +//ENCODING_PCM_24BIT_PACKED, ENCODING_PCM_32BIT } @@ -57,20 +60,46 @@ class MicStream { static const MethodChannel _microphoneMethodChannel = MethodChannel('aaron.code.com/mic_stream_method_channel'); - /// The actual sample rate used for streaming. This may return zero if invoked without listening to the _microphone Stream - static Future get sampleRate => _sampleRateCompleter.future; - static Completer _sampleRateCompleter = new Completer(); + /// The actual sample rate used for streaming. Only completes once a stream started. + static Future get sampleRate async { + _memoisedSampleRate ??= await _microphoneFuture.then((_) { + return _microphoneMethodChannel.invokeMethod("getSampleRate") + .then((value) => (value as double).toInt()); + }); + return _memoisedSampleRate!; + } + static int? _memoisedSampleRate; - /// The actual bit depth used for streaming. This may return zero if invoked without listening to the _microphone Stream first. - static Future get bitDepth => _bitDepthCompleter.future; - static Completer _bitDepthCompleter = new Completer(); + /// The actual bit depth used for streaming. Only completes once a stream started. + static Future get bitDepth async { + _memoisedBitDepth = await _microphoneFuture.then((_) { + return _microphoneMethodChannel.invokeMethod("getBitDepth") + .then((value) => value as int); + }); + return _memoisedBitDepth!; + } + static int? _memoisedBitDepth; - /// The amount of recorded data, per sample, in bytes - static Future get bufferSize => _bufferSizeCompleter.future; - static Completer _bufferSizeCompleter = new Completer(); + /// The amount of recorded data, per sample, in bytes. Only completes once a stream started. + static Future get bufferSize async { + _memoisedBufferSize ??= await _microphoneFuture.then((_) { + return _microphoneMethodChannel.invokeMethod("getBufferSize") + .then((value) => value as int); + }); + return _memoisedBufferSize!; + } + static int? _memoisedBufferSize; - /// The configured microphone stream and its config + /// The configured microphone stream static Stream? _microphone; + static Completer _microphoneCompleter = new Completer(); + static Future get _microphoneFuture async { + if (!_microphoneCompleter.isCompleted) { + await _microphoneCompleter.future; + } + } + + /// The configured stream config static AudioSource? __audioSource; static int? __sampleRate; static ChannelConfig? __channelConfig; @@ -89,12 +118,15 @@ class MicStream { /// Returns a Uint8List stream representing the captured audio. /// IMPORTANT - on iOS, there is no guarantee that captured audio will be encoded with the requested sampleRate/bitDepth. /// You must check the sampleRate and bitDepth properties of the MicStream object *after* invoking this method (though this does not need to be before listening to the returned stream). - /// This is why this method returns a Uint8List - if you request a 16-bit encoding, you will need to check that - /// the returned stream is actually returning 16-bit data, and if so, manually cast uint8List.buffer.asUint16List() + /// This is why this method returns a Uint8List - if you request a deeper encoding, + /// you will need to manually convert the returned stream to the appropriate type, + /// e.g., for 16 bit map each element using uint8List.buffer.asUint16List(). + /// Alternatively, you can call `toSampleStream(Stream)` to transform the raw stream to a more easily usable stream. + /// /// audioSource: The device used to capture audio. The default let's the OS decide. /// sampleRate: The amount of samples per second. More samples give better quality at the cost of higher data transmission /// channelConfig: States whether audio is mono or stereo - /// audioFormat: Switch between 8- and 16-bit PCM streams + /// audioFormat: Switch between 8, 16, 32 bit, and floating point PCM streams /// static Stream microphone( {AudioSource? audioSource, @@ -110,11 +142,11 @@ class MicStream { return Stream.error( RangeError.range(sampleRate, _MIN_SAMPLE_RATE, _MAX_SAMPLE_RATE)); - final initStream = _requestPermission - ? Stream.fromFuture(permissionStatus) + final permissionStatus = _requestPermission + ? Stream.fromFuture(MicStream.permissionStatus) : Stream.value(true); - return initStream.asyncExpand((grantedPermission) { + return permissionStatus.asyncExpand((grantedPermission) { if (!grantedPermission) { throw Exception('Microphone permission is not granted'); } @@ -138,57 +170,107 @@ class MicStream { sampleRate != __sampleRate || channelConfig != __channelConfig || audioFormat != __audioFormat) { + + // Reset runtime values + if (_microphone != null) { + var _tmpCompleter = _microphoneCompleter; + _microphoneCompleter = new Completer(); + _tmpCompleter.complete(_microphoneCompleter.future); + } + _memoisedSampleRate = null; + _memoisedBitDepth = null; + _memoisedBufferSize = null; + + // Reset configuration + __audioSource = audioSource; + __sampleRate = sampleRate; + __channelConfig = channelConfig; + __audioFormat = audioFormat; + + // Reset audio stream _microphone = _microphoneEventChannel.receiveBroadcastStream([ audioSource.index, sampleRate, channelConfig == ChannelConfig.CHANNEL_IN_MONO ? 16 : 12, - audioFormat == AudioFormat.ENCODING_PCM_8BIT ? 3 : 2 + switch (audioFormat) { + AudioFormat.ENCODING_PCM_8BIT => 3, + AudioFormat.ENCODING_PCM_16BIT => 2, +// AudioFormat.ENCODING_PCM_24BIT_PACKED => 21, + AudioFormat.ENCODING_PCM_32BIT => 22, + AudioFormat.ENCODING_PCM_FLOAT => 4 + } ]).cast(); - __audioSource = audioSource; - __sampleRate = sampleRate; - __channelConfig = channelConfig; - __audioFormat = audioFormat; } + // Check for errors if (_microphone == null) { + if (!_microphoneCompleter.isCompleted) { + _microphoneCompleter.completeError(StateError); + } return Stream.error(StateError); } - // sampleRate/bitDepth should be populated before any attempt to consume the stream externally. - // configure these as Completers and listen to the stream internally before returning - // these will complete only when this internal listener is called - var _tmpSampleRateCompleter = _sampleRateCompleter; - _sampleRateCompleter = new Completer(); - if (!_tmpSampleRateCompleter.isCompleted) { - _tmpSampleRateCompleter.complete(_sampleRateCompleter.future); - } - - var _tmpBitDepthCompleter = _bitDepthCompleter; - _bitDepthCompleter = new Completer(); - if (!_tmpBitDepthCompleter.isCompleted) { - _tmpBitDepthCompleter.complete(_bitDepthCompleter.future); - } - - var _tmpBufferSizeCompleter = _bufferSizeCompleter; - _bufferSizeCompleter = new Completer(); - if (!_tmpBufferSizeCompleter.isCompleted) { - _tmpBufferSizeCompleter.complete(_bufferSizeCompleter.future); - } - - late StreamSubscription listener; - listener = _microphone!.listen((x) async { - listener.cancel(); - _sampleRateCompleter.complete(( - await _microphoneMethodChannel.invokeMethod("getSampleRate") as double).toInt()); - _bitDepthCompleter.complete( - await _microphoneMethodChannel.invokeMethod("getBitDepth") as int); - _bufferSizeCompleter.complete( - await _microphoneMethodChannel.invokeMethod("getBufferSize") as int); + // Force evaluation of actual config values + _microphone!.first.then((value) { + if (!_microphoneCompleter.isCompleted) { + _microphoneCompleter.complete(); + } }); return _microphone!; } + /// StreamTransformer to convert a raw Stream to num streams, e.g.: + /// 8 bit PCM + mono => Stream, where each int is a *signed* byte, i.e., [-2^7; 2^7) + /// 16 bit PCM + stereo => Stream<(int, int)>, where each int is a *signed* byte, i.e., [-2^15; 2^15) + /// float bit PCM + stereo => Stream<(double, double)>, with double e [-1.0; 1.0), and 32 bit precision + static StreamTransformer get toSampleStream => + // TODO: check bitDepth here already and call different handlers for every possible combination + (__channelConfig == ChannelConfig.CHANNEL_IN_MONO) + ? new StreamTransformer.fromHandlers(handleData: _expandUint8ListMono) + : new StreamTransformer.fromHandlers(handleData: _expandUint8ListStereo); + + static void _expandUint8ListMono(Uint8List raw, EventSink sink) async { + switch (await bitDepth) { + case 8: raw.buffer.asInt8List().forEach(sink.add); break; + case 16: raw.buffer.asInt16List().forEach(sink.add); break; + case 24: sink.addError("24 bit PCM encoding is not supported"); break; + case 32: (__audioFormat == AudioFormat.ENCODING_PCM_32BIT) + ? raw.buffer.asInt32List().forEach(sink.add) + : raw.buffer.asFloat32List().forEach(sink.add); + break; + default: + sink.addError("No stream configured yet"); + } + } + static void _expandUint8ListStereo(Uint8List raw, EventSink sink) async { + switch (await bitDepth) { + case 8: _listToPairList(raw.buffer.asInt8List()).forEach(sink.add); break; + case 16: _listToPairList(raw.buffer.asInt16List()).forEach(sink.add); break; + case 24: sink.addError("24 bit PCM encoding is not supported"); break; + case 32: (__audioFormat == AudioFormat.ENCODING_PCM_32BIT) + ? _listToPairList(raw.buffer.asInt32List()).forEach(sink.add) + : _listToPairList(raw.buffer.asFloat32List()).forEach(sink.add); + break; + default: + sink.addError("No stream configured yet"); + } + } + static List<(num, num)> _listToPairList(List mono) { + List<(num, num)> stereo = List.empty(growable: true); + num? first; + for (num sample in mono) { + if (first == null) { + first = sample; + } + else { + stereo.add((first, sample)); + first = null; + } + } + return stereo; + } + /// Updates flag to determine whether to request audio recording permission. Set to false to disable dialogue, set to true (default) to request permission if necessary static bool shouldRequestPermission(bool requestPermission) { return _requestPermission = requestPermission; diff --git a/pubspec.yaml b/pubspec.yaml index a1436ce..a749418 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,11 +1,11 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 0.7.1-dev +version: 1.0.0-dev homepage: https://github.com/anarchuser/mic_stream environment: - sdk: '>=2.12.0 <3.0.0' - flutter: ">=2.8.0" + sdk: '>=3.0.0' + flutter: ">=3.13.5" module: androidX: true @@ -13,7 +13,7 @@ module: dependencies: flutter: sdk: flutter - permission_handler: ^10.0.0 + permission_handler: ^11.0.0 # For information on the generic Dart part of this file, see the # following page: https://www.dartlang.org/tools/pub/pubspec From ed557942d96596f551df3aff91eeb9824e02491b Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Mon, 2 Oct 2023 18:38:25 +0200 Subject: [PATCH 19/21] docs: udpate docs for version 0.7.1 --- CHANGELOG.md | 4 +++- README.md | 24 +++++++++++++++++++++--- example/pubspec.yaml | 2 +- pubspec.yaml | 8 ++++---- test/mic_stream_test.dart | 4 ++-- 5 files changed, 31 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 25ebe4f..03d852b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,8 @@ -## 0.7.1-dev +## 0.7.1 ### !!! This version changes the API !!! ++ Add StreamTransformer for easier processing +* Fix Big/Litte endian issues * Change type of get sampleRate from double to int * Fix parameter getters potentially never returning diff --git a/README.md b/README.md index b0a684e..c0982ae 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ -# mic_stream: 0.7.1-dev +# mic_stream: 0.7.1 [Flutter Plugin] Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. +32 bit and floating point PCM are experimental WIP. ## About mic_stream: @@ -9,10 +10,18 @@ As Flutter still lacks some functionality, this plugin aims to provide the possi ## How to use: -The plugin provides one method: +The plugin mainly provides one method to provide a raw audio stream: `Stream MicStream.microphone({options})` +and a `StreamTransformer` to provide a Stream of individual samples (not lists of samples): + +`MicStream.toSampleStream` + +that you can use to transform your mic stream: + +`stream.transform(MicStream.toSampleStream)` + Listening to this stream starts the audio recorder while cancelling the subscription stops the stream. @@ -25,7 +34,8 @@ channelConfig: ChannelConfig // Mono or Stereo audioFormat: AudioFormat // 8 bit PCM or 16 bit PCM. Other formats are not yet supported ``` -The plugin also provides information about some properties: +Some configuration options are platform dependent and can differ from the originally configured ones. +You can check the real values using: ```dart Future sampleRate = await MicStream.sampleRate; @@ -47,6 +57,9 @@ In the Info.plist: Microphone access required ``` +You can disable the permission request dialogue by calling +`MicStream.shouldRequestPermission(false)` +This _will_ lead to an error if no permission to record audio has been requested, though. For MacOS: @@ -63,6 +76,11 @@ Stream> stream = await MicStream.microphone(sampleRate: 44100); StreamSubscription> listener = stream.listen((samples) => print(samples)); ``` +``` +// Transform the stream and print each sample individually +stream.transform(MicStream.toSampleStream).listen(print); +``` + ``` // Cancel the subscription listener.cancel() diff --git a/example/pubspec.yaml b/example/pubspec.yaml index 7141cfe..22a06b9 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -3,7 +3,7 @@ description: Demonstrates how to use the mic_stream plugin. publish_to: 'none' environment: - sdk: '>=3.0.0' + sdk: '>=3.0.0 <=3.1.3' dependencies: flutter: diff --git a/pubspec.yaml b/pubspec.yaml index a749418..5a1afaf 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,11 +1,11 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 1.0.0-dev +version: 0.7.1+2 homepage: https://github.com/anarchuser/mic_stream environment: - sdk: '>=3.0.0' - flutter: ">=3.13.5" + sdk: '^3.0.0' + flutter: '>=3.13.5' module: androidX: true @@ -13,7 +13,7 @@ module: dependencies: flutter: sdk: flutter - permission_handler: ^11.0.0 + permission_handler: '^11.0.0' # For information on the generic Dart part of this file, see the # following page: https://www.dartlang.org/tools/pub/pubspec diff --git a/test/mic_stream_test.dart b/test/mic_stream_test.dart index 4a37a08..7d89be3 100644 --- a/test/mic_stream_test.dart +++ b/test/mic_stream_test.dart @@ -1,5 +1,5 @@ -import 'package:flutter/services.dart'; -import 'package:mic_stream/mic_stream.dart'; +// import 'package:flutter/services.dart'; +// import 'package:mic_stream/mic_stream.dart'; void main() { } From dcc208a7b5888a2fb58da7aa79b9a6a9855bd49a Mon Sep 17 00:00:00 2001 From: CarmeloBeeapp <122460225+CarmeloBeeapp@users.noreply.github.com> Date: Fri, 28 Apr 2023 17:27:27 +0200 Subject: [PATCH 20/21] Reset previous audio session category on onCancel Into startCapture method, there is an override of audioSession category but the previous category wasn't set after stopping the session --- ios/Classes/SwiftMicStreamPlugin.swift | 37 +++++++++++++------------- lib/mic_stream.dart | 4 +++ 2 files changed, 22 insertions(+), 19 deletions(-) diff --git a/ios/Classes/SwiftMicStreamPlugin.swift b/ios/Classes/SwiftMicStreamPlugin.swift index fc8e75c..f930239 100644 --- a/ios/Classes/SwiftMicStreamPlugin.swift +++ b/ios/Classes/SwiftMicStreamPlugin.swift @@ -27,7 +27,8 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin var eventSink:FlutterEventSink?; var session : AVCaptureSession! var audioSession: AVAudioSession! - + var oldAudioSessionCategory: AVAudioSession.Category? + public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { switch call.method { case "getSampleRate": @@ -39,6 +40,9 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin case "getBufferSize": result(Int(self.audioSession.ioBufferDuration*self.audioSession.sampleRate))//calculate the true buffer size break; + case "clean": + onCancel(withArguments: nil) + break; default: result(FlutterMethodNotImplemented) } @@ -46,53 +50,48 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin public func onCancel(withArguments arguments:Any?) -> FlutterError? { self.session?.stopRunning() + if let category = oldAudioSessionCategory { + try? audioSession.setCategory(category) + } return nil } public func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? { - if (isRecording) { return nil; } - let config = arguments as! [Int?]; - // Set parameters, if available - //print("this is config: \(config)") + let config = arguments as! [Int?] switch config.count { case 4: AUDIO_FORMAT = AudioFormat(rawValue:config[3]!)!; if(AUDIO_FORMAT != AudioFormat.ENCODING_PCM_16BIT) { - events(FlutterError(code: "-3", - message: "Currently only AudioFormat ENCODING_PCM_16BIT is supported", details:nil)) + events(FlutterError(code: "-3", message: "Currently only AudioFormat ENCODING_PCM_16BIT is supported", details:nil)) return nil } fallthrough case 3: CHANNEL_CONFIG = ChannelConfig(rawValue:config[2]!)!; if(CHANNEL_CONFIG != ChannelConfig.CHANNEL_IN_MONO) { - events(FlutterError(code: "-3", - message: "Currently only ChannelConfig CHANNEL_IN_MONO is supported", details:nil)) + events(FlutterError(code: "-3", message: "Currently only ChannelConfig CHANNEL_IN_MONO is supported", details:nil)) return nil } fallthrough case 2: SAMPLE_RATE = config[1]!; if(SAMPLE_RATE<8000 || SAMPLE_RATE>48000) { - events(FlutterError(code: "-3", - message: "iPhone only sample rates between 8000 and 48000 are supported", details:nil)) + events(FlutterError(code: "-3", message: "iPhone only sample rates between 8000 and 48000 are supported", details:nil)) return nil } fallthrough case 1: AUDIO_SOURCE = AudioSource(rawValue:config[0]!)!; if(AUDIO_SOURCE != AudioSource.DEFAULT) { - events(FlutterError(code: "-3", - message: "Currently only default AUDIO_SOURCE (id: 0) is supported", details:nil)) + events(FlutterError(code: "-3", message: "Currently only default AUDIO_SOURCE (id: 0) is supported", details:nil)) return nil } default: - events(FlutterError(code: "-3", - message: "At least one argument (AudioSource) must be provided ", details:nil)) + events(FlutterError(code: "-3", message: "At least one argument (AudioSource) must be provided ", details:nil)) return nil } self.eventSink = events; @@ -105,7 +104,7 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin if let audioCaptureDevice : AVCaptureDevice = AVCaptureDevice.default(for:AVMediaType.audio) { self.session = AVCaptureSession() - self.audioSession=AVAudioSession.sharedInstance() + self.audioSession = AVAudioSession.sharedInstance() do { //magic word //This will allow developers to specify sample rates, etc. @@ -113,6 +112,8 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin try audioCaptureDevice.lockForConfiguration() + oldAudioSessionCategory = audioSession.category + try audioSession.setCategory(AVAudioSession.Category.record,mode: .measurement) try audioSession.setPreferredSampleRate(Double(SAMPLE_RATE)) @@ -163,8 +164,7 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin // print("Error encountered starting audio capture, see details for more information.") // print(e) - self.eventSink!(FlutterError(code: "-3", - message: "Error encountered starting audio capture, see details for more information.", details:e)) + self.eventSink!(FlutterError(code: "-3", message: "Error encountered starting audio capture, see details for more information.", details:e)) } } } @@ -205,6 +205,5 @@ public class SwiftMicStreamPlugin: NSObject, FlutterStreamHandler, FlutterPlugin let data = Data(bytesNoCopy: audioBufferList.mBuffers.mData!, count: Int(audioBufferList.mBuffers.mDataByteSize), deallocator: .none) self.eventSink!(FlutterStandardTypedData(bytes: data)) - } } diff --git a/lib/mic_stream.dart b/lib/mic_stream.dart index b96d28a..7849dd8 100644 --- a/lib/mic_stream.dart +++ b/lib/mic_stream.dart @@ -271,6 +271,10 @@ class MicStream { return stereo; } + static void clean() { + _microphoneMethodChannel.invokeMethod("clean"); + } + /// Updates flag to determine whether to request audio recording permission. Set to false to disable dialogue, set to true (default) to request permission if necessary static bool shouldRequestPermission(bool requestPermission) { return _requestPermission = requestPermission; From ed9e89105f8516afb8230f116a8ba60218594032 Mon Sep 17 00:00:00 2001 From: Aaron Alef Date: Thu, 14 Dec 2023 15:49:37 +0100 Subject: [PATCH 21/21] docs: adjust docs to new version 0.7.2 --- CHANGELOG.md | 4 ++++ README.md | 2 +- example/pubspec.yaml | 2 +- pubspec.yaml | 2 +- 4 files changed, 7 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 03d852b..fb052f2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.7.2 + +* iOS: Reset previous audio session category on onCancel (#67) + ## 0.7.1 ### !!! This version changes the API !!! diff --git a/README.md b/README.md index c0982ae..632226f 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# mic_stream: 0.7.1 +# mic_stream: 0.7.2 [Flutter Plugin] Provides a tool to get the microphone input as 8 or 16 bit PCM Stream. diff --git a/example/pubspec.yaml b/example/pubspec.yaml index 22a06b9..c823001 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -3,7 +3,7 @@ description: Demonstrates how to use the mic_stream plugin. publish_to: 'none' environment: - sdk: '>=3.0.0 <=3.1.3' + sdk: '>=3.0.0 <=3.13.7' dependencies: flutter: diff --git a/pubspec.yaml b/pubspec.yaml index 5a1afaf..331b467 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: mic_stream description: A plugin to receive raw byte streams from a device's microphone. Audio is returned as `Stream`. -version: 0.7.1+2 +version: 0.7.2 homepage: https://github.com/anarchuser/mic_stream environment: