Capture device audio
Create an AudioRecorder to capture the audio source REMOTE_SUBMIX. For now, the captured packets are just logged into the console. PR #3757 <https://github.com/Genymobile/scrcpy/pull/3757> Co-authored-by: Romain Vimont <rom@rom1v.com> Signed-off-by: Romain Vimont <rom@rom1v.com>
This commit is contained in:
parent
e841241a8e
commit
11d32616a9
2 changed files with 109 additions and 9 deletions
80
server/src/main/java/com/genymobile/scrcpy/AudioEncoder.java
Normal file
80
server/src/main/java/com/genymobile/scrcpy/AudioEncoder.java
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
package com.genymobile.scrcpy;
|
||||||
|
|
||||||
|
import android.annotation.SuppressLint;
|
||||||
|
import android.annotation.TargetApi;
|
||||||
|
import android.media.AudioFormat;
|
||||||
|
import android.media.AudioRecord;
|
||||||
|
import android.media.MediaRecorder;
|
||||||
|
import android.os.Build;
|
||||||
|
|
||||||
|
public final class AudioEncoder {
|
||||||
|
|
||||||
|
private static final int SAMPLE_RATE = 48000;
|
||||||
|
private static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_STEREO;
|
||||||
|
private static final int CHANNELS = 2;
|
||||||
|
private static final int FORMAT = AudioFormat.ENCODING_PCM_16BIT;
|
||||||
|
private static final int BYTES_PER_SAMPLE = 2;
|
||||||
|
|
||||||
|
private static final int READ_MS = 5; // milliseconds
|
||||||
|
private static final int READ_SIZE = SAMPLE_RATE * CHANNELS * BYTES_PER_SAMPLE * READ_MS / 1000;
|
||||||
|
|
||||||
|
private Thread thread;
|
||||||
|
|
||||||
|
private static AudioFormat createAudioFormat() {
|
||||||
|
AudioFormat.Builder builder = new AudioFormat.Builder();
|
||||||
|
builder.setEncoding(FORMAT);
|
||||||
|
builder.setSampleRate(SAMPLE_RATE);
|
||||||
|
builder.setChannelMask(CHANNEL_CONFIG);
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@TargetApi(Build.VERSION_CODES.M)
|
||||||
|
@SuppressLint({"WrongConstant", "MissingPermission"})
|
||||||
|
private static AudioRecord createAudioRecord() {
|
||||||
|
AudioRecord.Builder builder = new AudioRecord.Builder();
|
||||||
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||||
|
// On older APIs, Workarounds.fillAppInfo() must be called beforehand
|
||||||
|
builder.setContext(FakeContext.get());
|
||||||
|
}
|
||||||
|
builder.setAudioSource(MediaRecorder.AudioSource.REMOTE_SUBMIX);
|
||||||
|
builder.setAudioFormat(createAudioFormat());
|
||||||
|
int minBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, FORMAT);
|
||||||
|
// This buffer size does not impact latency
|
||||||
|
builder.setBufferSizeInBytes(8 * minBufferSize);
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void start() {
|
||||||
|
AudioRecord recorder = createAudioRecord();
|
||||||
|
|
||||||
|
thread = new Thread(() -> {
|
||||||
|
recorder.startRecording();
|
||||||
|
try {
|
||||||
|
byte[] buf = new byte[READ_SIZE];
|
||||||
|
while (!Thread.currentThread().isInterrupted()) {
|
||||||
|
int r = recorder.read(buf, 0, READ_SIZE);
|
||||||
|
if (r > 0) {
|
||||||
|
Ln.i("Audio captured: " + r + " bytes");
|
||||||
|
} else {
|
||||||
|
Ln.e("Audio capture error: " + r);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
recorder.stop();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
thread.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void stop() {
|
||||||
|
if (thread != null) {
|
||||||
|
thread.interrupt();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void join() throws InterruptedException {
|
||||||
|
if (thread != null) {
|
||||||
|
thread.join();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -72,19 +72,28 @@ public final class Server {
|
||||||
boolean sendDummyByte = options.getSendDummyByte();
|
boolean sendDummyByte = options.getSendDummyByte();
|
||||||
|
|
||||||
Workarounds.prepareMainLooper();
|
Workarounds.prepareMainLooper();
|
||||||
if (Build.BRAND.equalsIgnoreCase("meizu")) {
|
|
||||||
// Workarounds must be applied for Meizu phones:
|
// Workarounds must be applied for Meizu phones:
|
||||||
// - <https://github.com/Genymobile/scrcpy/issues/240>
|
// - <https://github.com/Genymobile/scrcpy/issues/240>
|
||||||
// - <https://github.com/Genymobile/scrcpy/issues/365>
|
// - <https://github.com/Genymobile/scrcpy/issues/365>
|
||||||
// - <https://github.com/Genymobile/scrcpy/issues/2656>
|
// - <https://github.com/Genymobile/scrcpy/issues/2656>
|
||||||
//
|
//
|
||||||
// But only apply when strictly necessary, since workarounds can cause other issues:
|
// But only apply when strictly necessary, since workarounds can cause other issues:
|
||||||
// - <https://github.com/Genymobile/scrcpy/issues/940>
|
// - <https://github.com/Genymobile/scrcpy/issues/940>
|
||||||
// - <https://github.com/Genymobile/scrcpy/issues/994>
|
// - <https://github.com/Genymobile/scrcpy/issues/994>
|
||||||
|
boolean mustFillAppInfo = Build.BRAND.equalsIgnoreCase("meizu");
|
||||||
|
|
||||||
|
// Before Android 11, audio is not supported.
|
||||||
|
// Since Android 12, we can properly set a context on the AudioRecord.
|
||||||
|
// Only on Android 11 we must fill app info for the AudioRecord to work.
|
||||||
|
mustFillAppInfo |= audio && Build.VERSION.SDK_INT == Build.VERSION_CODES.R;
|
||||||
|
|
||||||
|
if (mustFillAppInfo) {
|
||||||
Workarounds.fillAppInfo();
|
Workarounds.fillAppInfo();
|
||||||
}
|
}
|
||||||
|
|
||||||
Controller controller = null;
|
Controller controller = null;
|
||||||
|
AudioEncoder audioEncoder = null;
|
||||||
|
|
||||||
try (DesktopConnection connection = DesktopConnection.open(scid, tunnelForward, audio, control, sendDummyByte)) {
|
try (DesktopConnection connection = DesktopConnection.open(scid, tunnelForward, audio, control, sendDummyByte)) {
|
||||||
VideoCodec codec = options.getCodec();
|
VideoCodec codec = options.getCodec();
|
||||||
|
@ -101,6 +110,11 @@ public final class Server {
|
||||||
device.setClipboardListener(text -> controllerRef.getSender().pushClipboardText(text));
|
device.setClipboardListener(text -> controllerRef.getSender().pushClipboardText(text));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (audio) {
|
||||||
|
audioEncoder = new AudioEncoder();
|
||||||
|
audioEncoder.start();
|
||||||
|
}
|
||||||
|
|
||||||
Streamer videoStreamer = new Streamer(connection.getVideoFd(), codec, options.getSendCodecId(), options.getSendFrameMeta());
|
Streamer videoStreamer = new Streamer(connection.getVideoFd(), codec, options.getSendCodecId(), options.getSendFrameMeta());
|
||||||
ScreenEncoder screenEncoder = new ScreenEncoder(device, videoStreamer, options.getBitRate(), options.getMaxFps(),
|
ScreenEncoder screenEncoder = new ScreenEncoder(device, videoStreamer, options.getBitRate(), options.getMaxFps(),
|
||||||
codecOptions, options.getEncoderName(), options.getDownsizeOnError());
|
codecOptions, options.getEncoderName(), options.getDownsizeOnError());
|
||||||
|
@ -116,12 +130,18 @@ public final class Server {
|
||||||
} finally {
|
} finally {
|
||||||
Ln.d("Screen streaming stopped");
|
Ln.d("Screen streaming stopped");
|
||||||
initThread.interrupt();
|
initThread.interrupt();
|
||||||
|
if (audioEncoder != null) {
|
||||||
|
audioEncoder.stop();
|
||||||
|
}
|
||||||
if (controller != null) {
|
if (controller != null) {
|
||||||
controller.stop();
|
controller.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
initThread.join();
|
initThread.join();
|
||||||
|
if (audioEncoder != null) {
|
||||||
|
audioEncoder.join();
|
||||||
|
}
|
||||||
if (controller != null) {
|
if (controller != null) {
|
||||||
controller.join();
|
controller.join();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue