videosdk-live/videosdk-rtc-flutter-sdk-example

In IOS audio output changed from main speaker to earpiece speaker And after mute still another person on call able to hear my audio and in android it works fine

cg-sangam opened this issue · 8 comments

When i am enabling my mic from IOS my audio output is changed from main speaker to earpiece speaker.
Below is my code snipet:

Mute and Unmute fuction:
onMicButtonPressed: () {
if (audioStream != null) {
meeting.muteMic();
} else {
meeting.unmuteMic();
}
},

Full code sample

class ConferenceScreen extends StatefulWidget {
final bool micEnabled, camEnabled, chatEnabled, isHost;
final String customRoomId;
final int eventId;

const ConferenceScreen({
Key? key,
required this.isHost,
required this.eventId,
required this.customRoomId,
this.micEnabled = true,
this.camEnabled = true,
this.chatEnabled = true,
}) : super(key: key);

@OverRide
State createState() => _ConferenceScreenState();
}

class _ConferenceScreenState extends State {
late Room meeting;
bool _joined = false;
bool callLeave = false;
Stream? videoStream;
Stream? audioStream;
List cameras = [];
List<Map<String,String>> participantList = [{}];

// Stream? remoteParticipantShareStream;

Future joinAndCreateInitCall() async {
String _userId =
await SharedPreferencesHelper.getValue(SharedPreferencesHelper.userId);
ProfileDTO _profile =
await SharedPreferencesHelper.getValue(SharedPreferencesHelper.profile)
.then((profile) {
return ProfileDTO.fromJson(json.decode(profile), _userId);
}).catchError((e) {});
Room room = VideoSDK.createRoom(
roomId: GroupSharingApi.instance.meetingId,
token: GroupSharingApi.instance.videoSDKToken,
displayName: '${_profile.firstName} ${_profile.lastName}',
micEnabled: widget.micEnabled,
camEnabled: widget.camEnabled,
maxResolution: 'hd',
multiStream: true,
participantId: _userId,
defaultCameraIndex: 1,
notification: const NotificationInfo(
title: "Video SDK",
message: "Video SDK is sharing screen in the meeting",
icon: "notification_share", // drawable icon name
),
);

// Register meeting events
registerMeetingEvents(room);

log('room join');
// Join meeting
room.join();
Wakelock.enable();
log('room join');

}

@OverRide
void initState() {
joinAndCreateInitCall();
super.initState();
}

@OverRide
void dispose() {
log('dispose called');
meeting.leave();
Wakelock.disable();
super.dispose();
}

@OverRide
Widget build(BuildContext context) {
return _joined
? Container(
height: MediaQuery.of(context).size.height * 0.4,
color: Colors.black,
alignment: Alignment.center,
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
Flexible(child: ConferenceParticipantGrid(meeting: meeting)),
const SizedBox(height: 10),
AnimatedCrossFade(
duration: const Duration(milliseconds: 300),
crossFadeState: CrossFadeState.showFirst,
secondChild: const SizedBox.shrink(),
firstChild: MeetingActionBar(
isMicEnabled: audioStream != null,
isCamEnabled: videoStream != null,
// Called when Call End button is pressed
onCallEndButtonPressed: () {
callLeave = true;
meeting.leave();
},
// Called when Call leave button is pressed
// onCallLeaveButtonPressed: () {
// callLeave = true;
// meeting.leave();
// },
// Called when mic button is pressed
onMicButtonPressed: () {
if (audioStream != null) {
meeting.muteMic();
} else {
meeting.unmuteMic();
}
},
// Called when camera button is pressed
onCameraButtonPressed: () {
if (videoStream != null) {
meeting.disableCam();
} else {
meeting.enableCam();
}
},
onSwitchMicButtonPressed: (details) async {
List outptuDevice =
meeting.getAudioOutputDevices();
double bottomMargin = (70.0 * outptuDevice.length);
final screenSize = MediaQuery.of(context).size;
await showMenu(
context: context,
color: Colors.white,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(12)),
position: RelativeRect.fromLTRB(
screenSize.width - details.globalPosition.dx,
details.globalPosition.dy - bottomMargin,
details.globalPosition.dx,
(bottomMargin),
),
items: outptuDevice.map((e) {
return PopupMenuItem(
value: e, child: Text(e.label));
}).toList(),
elevation: 8.0,
).then((value) {
if (value != null) {
meeting.switchAudioDevice(value);
}
});
},

                onParticipantPressed: () {
                  log('custom room id from confrense screen ${widget.customRoomId}');
                  showModalBottomSheet(
                    context: context,
                    enableDrag: false,
                    builder: (context) => ParticipantList(meeting: meeting, isHost: widget.isHost,eventId:widget.eventId,customRoomId: widget.customRoomId),
                  );
                },
                onSwitchCamera: (){
                  MediaDeviceInfo newCam = cameras.firstWhere((camera) => camera.deviceId != meeting.selectedCamId);
                  meeting.changeCam(newCam.deviceId);
                },
              ),
            ),
          ],
        ),
      )
    : SizedBox(
        height: MediaQuery.of(context).size.height * 0.4,
        child: Center(
          child: Column(
            mainAxisAlignment: MainAxisAlignment.center,
            mainAxisSize: MainAxisSize.min,
            children: [
              Text(widget.isHost ? "Creating a Room" : "Please wait host will let you in soon!",
                  style: TextStyle(fontSize: 20, color: Colors.white, fontWeight: FontWeight.w500)),
              const SizedBox(height: 10),
              CupertinoActivityIndicator(
                color: Colors.white,
              ),
            ],
          ),
        ),
      );

}

void registerMeetingEvents(Room _meeting) {
// Called when joined in meeting
_meeting.on(
Events.roomJoined,
() {
log('on room joined ');
setState(() {
meeting = _meeting;
cameras = meeting.getCameras();
_joined = true;
});
},
);
// Called when meeting is ended
_meeting.on(Events.roomLeft, (String? errorMsg){
if (errorMsg != null) {
showSnackBarMessage(message: "Meeting left due to $errorMsg !!", context: context);
}
GroupSharingApi.instance.showCallScreen.value = false;
});

_meeting.on(Events.participantJoined, (Participant participant){
  participantList.add({'${participant.id}':'${participant.displayName}'});
  showSnackBarMessage(message: "${participant.displayName} joinded the room!", context: context);
});
_meeting.on(Events.participantLeft, (String participantId){
  showSnackBarMessage(message: "${participantList.firstWhere((element) => element.containsKey(participantId))[participantId]} left the room!", context: context);
  participantList.removeWhere((element) => element.containsKey(participantId));
});
// Called when stream is enabled
_meeting.localParticipant.on(Events.streamEnabled, (Stream _stream) {
  if (_stream.kind == 'video') {
    setState(() {
      videoStream = _stream;
    });
  } else if (_stream.kind == 'audio') {
    setState(() {
      audioStream = _stream;
    });
  }
});

// Called when stream is disabled
_meeting.localParticipant.on(Events.streamDisabled, (Stream _stream) {
  if (_stream.kind == 'video' && videoStream?.id == _stream.id) {
    setState(() {
      videoStream = null;
    });
  } else if (_stream.kind == 'audio' && audioStream?.id == _stream.id) {
    setState(() {
      audioStream = null;
    });
  }
});

// Called when you requesting to join meeting
_meeting.on(Events.entryRequested, (data) {
  log('user entery request');
  var name = data["name"];
  var allow = data["allow"];
  var deny = data["deny"];
  showRequestDialog(context,"Join Request","Do you want to allow $name to join room?",allow,deny);
});

// Handle camera Requested
_meeting.on(Events.cameraRequested, (data) {
  log('requested for camera');
  var allow = data["accept"];
  var deny = data["reject"];
  showRequestDialog(context,"Enable Camera Request","Host requested to enable camera!",allow,deny);
});

// Handle Mic Requested
_meeting.on(Events.micRequested, (data){
  log('requested for mic');
  var allow = data["accept"];
  var deny = data["reject"];
  showRequestDialog(context,"Enable Mic Request","Host requested to enable mic!",allow,deny);
});

_meeting.on(
    Events.error,
    (error) => {
          showSnackBarMessage(
              message: error['name'].toString() + " :: " + error['message'].toString(), context: context)
        });

}
}

void showSnackBarMessage(
{required String message,
Widget? icon,
Color messageColor = Colors.white,
required BuildContext context}) {
ScaffoldMessenger.of(context).removeCurrentSnackBar();

ScaffoldMessenger.of(context).showSnackBar(SnackBar(
margin: const EdgeInsets.symmetric(horizontal: 16, vertical: 4),
behavior: SnackBarBehavior.floating,
shape: RoundedRectangleBorder(borderRadius: BorderRadius.circular(8)),
content: Row(
children: [
if (icon != null) icon,
Flexible(
child: Text(
message,
style: TextStyle(
color: messageColor,
fontSize: 14,
fontWeight: FontWeight.w500,
),
overflow: TextOverflow.fade,
),
)
],
)));
}

void showRequestDialog(BuildContext context,String title,String content,Function allow,Function deny)=>showDialog(
context: context,
builder: (context) => AlertDialog(
title: Text(title),
content: Text(content),
actions: [
TextButton(
child: const Text("Deny"),
onPressed: () async{
await deny();
Navigator.of(context).pop();
},
),
TextButton(
child: const Text("Allow"),
onPressed: () async{
await allow();
Navigator.of(context).pop();
},
),
],
),
);

My flutter Doctor:
Doctor summary (to see all details, run flutter doctor -v):
[!] Flutter (Channel stable, 3.7.3, on macOS 13.0.1 22A400 darwin-arm64, locale
en-IN)
! Warning: dart on your path resolves to
/opt/homebrew/Cellar/dart/2.18.5/libexec/bin/dart, which is not inside
your current Flutter SDK checkout at
/Users/macintosh/Desktop/Sangam/flutter_sdk/flutter. Consider adding
/Users/macintosh/Desktop/Sangam/flutter_sdk/flutter/bin to the front of
your path.
[!] Android toolchain - develop for Android devices (Android SDK version 33.0.0)
✗ cmdline-tools component is missing
Run path/to/sdkmanager --install "cmdline-tools;latest"
See https://developer.android.com/studio/command-line for more details.
✗ Android license status unknown.
Run flutter doctor --android-licenses to accept the SDK licenses.
See https://flutter.dev/docs/get-started/install/macos#android-setup for
more details.
[✓] Xcode - develop for iOS and macOS (Xcode 14.2)
[✓] Chrome - develop for the web
[✓] Android Studio (version 2021.3)
[✓] VS Code (version 1.75.1)
[✓] Connected device (4 available)
[✓] HTTP Host Availability

! Doctor found issues in 2 categories.

My video sdk version : 1.0.9

And after mute still another person on call able to hear my audio and in android it works fine

Video Sample:

WhatsApp.Video.2023-02-26.at.8.01.33.PM.mp4

@cg-sangam Are both the device place in the same room?

Let us check the ios output device switch.

@rajansurani yes both device are placed in same room

Now the main issue is In Iphone11 if user in on mute then still audio of that user coming

And i am running release build

Audio which is being played as seen in the video is coming from the other device you have in room after muteing mic. YOu can check the speaker indication for that.

Now i implemented speaker as given in your example once i click on speaker it working fine.

but if user mute himself then still his audio is coming. basically muting functionality is not working in Iphone 11

can you try the same scenario of audio coming after mute, by keeping the device in separate rooms?

I tried still this problem occurred.
Test case is :
when user join initial time mic mute :- then audio is not coming which is ok.
when user unmute then : then audio coming which is again ok.
when user mute himself then : still audio coming which is the issue.

@cg-sangam We are not able to reproduce these issue of audio coming after muting the mic. Can you share with us the test app.?