|
@@ -114,7 +114,7 @@ public class Nexilis: NSObject {
|
|
|
Nexilis.dispatch?.wait()
|
|
|
Nexilis.dispatch = nil
|
|
|
|
|
|
- Nexilis.initiateAudio()
|
|
|
+// Nexilis.initiateAudio()
|
|
|
|
|
|
if(!id.isEmpty && (UserDefaults.standard.string(forKey: "me") == nil)){
|
|
|
if let response = Nexilis.writeSync(message: CoreMessage_TMessageBank.getSignUpApi(api: apiKey, p_pin: id), timeout: 30 * 1000){
|
|
@@ -425,7 +425,7 @@ public class Nexilis: NSObject {
|
|
|
|
|
|
public static func setSpeaker(_ isEnabled: Bool) {
|
|
|
do {
|
|
|
-
|
|
|
+ API.adjustVolume(fValue: isEnabled ? 10.0: 3.0)
|
|
|
} catch {
|
|
|
}
|
|
|
}
|
|
@@ -438,25 +438,25 @@ public class Nexilis: NSObject {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- public static func startAudio(isVideo: Bool = true) {
|
|
|
- do {
|
|
|
- try AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
|
|
|
- try AVAudioSession.sharedInstance().setMode(.voiceChat)
|
|
|
- try AVAudioSession.sharedInstance().overrideOutputAudioPort(isVideo ? .speaker : .none)
|
|
|
- try AVAudioSession.sharedInstance().setActive(true)
|
|
|
- } catch {
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- public static func stopAudio() {
|
|
|
- do {
|
|
|
- try AVAudioSession.sharedInstance().setCategory(.soloAmbient)
|
|
|
- try AVAudioSession.sharedInstance().setMode(.default)
|
|
|
- try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
|
|
|
- try AVAudioSession.sharedInstance().setActive(true)
|
|
|
- } catch {
|
|
|
- }
|
|
|
- }
|
|
|
+// public static func startAudio(isVideo: Bool = true) {
|
|
|
+// do {
|
|
|
+// try AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
|
|
|
+// try AVAudioSession.sharedInstance().setMode(.voiceChat)
|
|
|
+// try AVAudioSession.sharedInstance().overrideOutputAudioPort(isVideo ? .speaker : .none)
|
|
|
+// try AVAudioSession.sharedInstance().setActive(true)
|
|
|
+// } catch {
|
|
|
+// }
|
|
|
+// }
|
|
|
+//
|
|
|
+// public static func stopAudio() {
|
|
|
+// do {
|
|
|
+// try AVAudioSession.sharedInstance().setCategory(.soloAmbient)
|
|
|
+// try AVAudioSession.sharedInstance().setMode(.default)
|
|
|
+// try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
|
|
|
+// try AVAudioSession.sharedInstance().setActive(true)
|
|
|
+// } catch {
|
|
|
+// }
|
|
|
+// }
|
|
|
|
|
|
public static func muteMicrophone(isMute: Bool!){
|
|
|
do {
|
|
@@ -1663,7 +1663,6 @@ extension Nexilis: CallDelegate {
|
|
|
}
|
|
|
if (state == 21 && message.split(separator: ",")[1] != "joining Ac.room on channel 0") {
|
|
|
if onGoingCC.isEmpty {
|
|
|
- let backgroundTaskIdentifier = UIApplication.shared.beginBackgroundTask(expirationHandler: nil)
|
|
|
let data = User.getDataCanNil(pin: String(deviceId))
|
|
|
if data == nil {
|
|
|
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5, execute: {
|
|
@@ -1671,10 +1670,21 @@ extension Nexilis: CallDelegate {
|
|
|
})
|
|
|
return
|
|
|
}
|
|
|
- uuidOngoing = UUID()
|
|
|
- self.displayIncomingCall(uuid: uuidOngoing, handle: String(deviceId), hasVideo: false) { error in
|
|
|
- UIApplication.shared.endBackgroundTask(backgroundTaskIdentifier)
|
|
|
+// let backgroundTaskIdentifier = UIApplication.shared.beginBackgroundTask(expirationHandler: nil)
|
|
|
+// uuidOngoing = UUID()
|
|
|
+// self.displayIncomingCall(uuid: uuidOngoing, handle: String(deviceId), hasVideo: false) { error in
|
|
|
+// UIApplication.shared.endBackgroundTask(backgroundTaskIdentifier)
|
|
|
+// }
|
|
|
+ let controller = QmeraAudioViewController()
|
|
|
+ controller.user = User.getData(pin: String(deviceId))
|
|
|
+ controller.isOutgoing = false
|
|
|
+ controller.modalPresentationStyle = .overCurrentContext
|
|
|
+ if UIApplication.shared.visibleViewController?.navigationController != nil {
|
|
|
+ UIApplication.shared.visibleViewController?.navigationController?.present(controller, animated: true, completion: nil)
|
|
|
+ } else {
|
|
|
+ UIApplication.shared.visibleViewController?.present(controller, animated: true, completion: nil)
|
|
|
}
|
|
|
+// API.receiveCCall(sParty: String(deviceId))
|
|
|
} else {
|
|
|
DispatchQueue.main.asyncAfter(deadline: .now() + isShowAlert!, execute: {
|
|
|
let controller = QmeraAudioViewController()
|
|
@@ -1780,12 +1790,12 @@ extension Nexilis: CallDelegate {
|
|
|
}
|
|
|
}
|
|
|
} else if state == 22 {
|
|
|
- if let call = callManager.call(with: String(r[1])) {
|
|
|
- print("onStatusCall:answerCall")
|
|
|
- DispatchQueue.main.async {
|
|
|
- call.answerCall()
|
|
|
- }
|
|
|
- }
|
|
|
+// if let call = callManager.call(with: String(r[1])) {
|
|
|
+// print("onStatusCall:answerCall")
|
|
|
+// DispatchQueue.main.async {
|
|
|
+// call.answerCall()
|
|
|
+// }
|
|
|
+// }
|
|
|
} else if state == 28 {
|
|
|
DispatchQueue.main.async {
|
|
|
if QmeraAudioViewController().viewIfLoaded?.window == nil {
|