New Api AudioPlaybackCaptureConfiguration (android 10) fails to record voice calls

2.5k Views Asked by At

I tried to use new AudioPlaybackCapture method to record some media in an android 10 device.

I managed to record all media sounds such as youtube videos, music etc. But my goal is to check weather we can use this in recording incoming voice calls. I tried to set various values for the usage parameter but with no luck. Always no voice from call is recorded in either case, even with speaker phone on. I used the following service to record calls.
Tried to set
AudioAttributes.USAGE_MEDIA
AudioAttributes.USAGE_VOICE_COMMUNICATION
and most other parameters but did not manage to record calls. Other media can be perfectly recorded using this code.

Here's my service, Broadcast massages from an activity is used to invoke start and stop recording functionalities.

public class MediaCaptureService extends Service {
    public static final String ACTION_ALL = "ALL";
    public static final String ACTION_START = "ACTION_START";
    public static final String ACTION_STOP = "ACTION_STOP";
    public static final String EXTRA_RESULT_CODE = "EXTRA_RESULT_CODE";
    public static final String EXTRA_ACTION_NAME = "ACTION_NAME";

    private static final int RECORDER_SAMPLERATE = 8000;
    private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_MONO;
    private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;

    NotificationCompat.Builder _notificationBuilder;
    NotificationManager _notificationManager;
    private String NOTIFICATION_CHANNEL_ID = "ChannelId";
    private String NOTIFICATION_CHANNEL_NAME = "Channel";
    private String NOTIFICATION_CHANNEL_DESC = "ChannelDescription";
    private int NOTIFICATION_ID = 1000;
    private static final String ONGING_NOTIFICATION_TICKER = "RecorderApp";

    int BufferElements2Rec = 1024; // want to play 2048 (2K) since 2 bytes we use only 1024
    int BytesPerElement = 2; // 2 bytes in 16bit format

    AudioRecord _recorder;
    private boolean _isRecording = false;

    private MediaProjectionManager _mediaProjectionManager;
    private MediaProjection _mediaProjection;
    private Thread _recordingThread = null;

    Intent _callingIntent;

    public MediaCaptureService() {
    }

    @Override
    public void onCreate() {
        super.onCreate();
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            //Call Start foreground with notification
            Intent notificationIntent = new Intent(this, MediaCaptureService.class);
            PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, notificationIntent, 0);
            _notificationBuilder = new NotificationCompat.Builder(this, NOTIFICATION_CHANNEL_ID)
                    .setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher_foreground))
                    .setSmallIcon(R.drawable.ic_launcher_foreground)
                    .setContentTitle("Starting Service")
                    .setContentText("Starting monitoring service")
                    .setTicker(ONGING_NOTIFICATION_TICKER)
                    .setContentIntent(pendingIntent);
            Notification notification = _notificationBuilder.build();
            NotificationChannel channel = new NotificationChannel(NOTIFICATION_CHANNEL_ID, NOTIFICATION_CHANNEL_NAME, NotificationManager.IMPORTANCE_DEFAULT);
            channel.setDescription(NOTIFICATION_CHANNEL_DESC);
            _notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
            _notificationManager.createNotificationChannel(channel);
            startForeground(NOTIFICATION_ID, notification);
        }

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            _mediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
        }

    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    @Override
    public int onStartCommand(Intent intent, int flags, int startId) {
        _callingIntent = intent;

        IntentFilter filter = new IntentFilter();
        filter.addAction(ACTION_ALL);
        registerReceiver(_actionReceiver, filter);

        return START_STICKY;
    }

    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private void startRecording(Intent intent) {
        //final int resultCode = intent.getIntExtra(EXTRA_RESULT_CODE, 0);
        _mediaProjection = _mediaProjectionManager.getMediaProjection(-1, intent);
        startRecording(_mediaProjection);
    }

    @TargetApi(29)
    private void startRecording(MediaProjection mediaProjection ) {
        AudioPlaybackCaptureConfiguration config =
                new AudioPlaybackCaptureConfiguration.Builder(mediaProjection)
                        .addMatchingUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION)
                        .build();
        AudioFormat audioFormat = new AudioFormat.Builder()
                .setEncoding(RECORDER_AUDIO_ENCODING)
                .setSampleRate(RECORDER_SAMPLERATE)
                .setChannelMask(RECORDER_CHANNELS)
                .build();
        _recorder = new AudioRecord.Builder()
                //.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION)
                .setAudioFormat(audioFormat)
                .setBufferSizeInBytes(BufferElements2Rec * BytesPerElement)
                .setAudioPlaybackCaptureConfig(config)
                .build();

        _recorder.startRecording();

        _recordingThread = new Thread(new Runnable() {
            public void run() {
                writeAudioDataToFile();
            }
        }, "AudioRecorder Thread");
        _isRecording = true;
        _recordingThread.start();
    }

    private byte[] short2byte(short[] sData) {
        int shortArrsize = sData.length;
        byte[] bytes = new byte[shortArrsize * 2];
        for (int i = 0; i < shortArrsize; i++) {
            bytes[i * 2] = (byte) (sData[i] & 0x00FF);
            bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
            sData[i] = 0;
        }
        return bytes;

    }

    private void writeAudioDataToFile() {
        // Write the output audio in byte
        Log.i(MainActivity.LOG_PREFIX, "Recording started. Computing output file name");
        File sampleDir = new File(getExternalFilesDir(null), "/TestRecordingDasa1");
        if (!sampleDir.exists()) {
            sampleDir.mkdirs();
        }
        String fileName = "Record-" + new SimpleDateFormat("dd-MM-yyyy-hh-mm-ss").format(new Date()) + ".pcm";
        String filePath = sampleDir.getAbsolutePath() + "/" + fileName;
        //String filePath = "/sdcard/voice8K16bitmono.pcm";
        short sData[] = new short[BufferElements2Rec];

        FileOutputStream os = null;
        try {
            os = new FileOutputStream(filePath);
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }

        while (_isRecording) {
            // gets the voice output from microphone to byte format
            _recorder.read(sData, 0, BufferElements2Rec);
            System.out.println("Short wirting to file" + sData.toString());
            try {
                // // writes the data to file from buffer
                // // stores the voice buffer
                byte bData[] = short2byte(sData);
                os.write(bData, 0, BufferElements2Rec * BytesPerElement);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        try {
            os.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

        Log.i(MainActivity.LOG_PREFIX, String.format("Recording finished. File saved to '%s'\nadb pull %s .", filePath, filePath));
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private void stopRecording() {
        // stops the recording activity
        if (null != _recorder) {
            _isRecording = false;
            _recorder.stop();
            _recorder.release();
            _recorder = null;
        }

        _mediaProjection.stop();

        stopSelf();
    }

    @Override
    public void onDestroy() {
        super.onDestroy();
        unregisterReceiver(_actionReceiver);
    }

    BroadcastReceiver _actionReceiver = new BroadcastReceiver() {
        @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
        @Override
        public void onReceive(Context context, Intent intent) {
            String action = intent.getAction();
            if (action.equalsIgnoreCase(ACTION_ALL)) {
                String actionName = intent.getStringExtra(EXTRA_ACTION_NAME);
                if (actionName != null && !actionName.isEmpty()) {
                    if (actionName.equalsIgnoreCase(ACTION_START)) {
                        startRecording(_callingIntent);
                    } else if (actionName.equalsIgnoreCase(ACTION_STOP)){
                        stopRecording();
                    }
                }

            }
        }
    };
}

This was tested in google pixel device with android 10. Just wanted to know weather this new api can be used in recording calls or media only. Here is the documentation of this new API. Playback capture.

What would be the possible mistake im doing here?

0

There are 0 best solutions below