|
@@ -29,6 +29,9 @@ class SecondTabViewController: UIViewController, UIScrollViewDelegate, UIGesture
|
|
|
var childrenMenu = [UIAction]()
|
|
|
|
|
|
var groupMap: [String:Int] = [:]
|
|
|
+
|
|
|
+ var isAllowSpeech = false
|
|
|
+ var alertController = UIAlertController()
|
|
|
|
|
|
lazy var searchController: UISearchController = {
|
|
|
var searchController = UISearchController(searchResultsController: nil)
|
|
@@ -39,6 +42,7 @@ class SecondTabViewController: UIViewController, UIScrollViewDelegate, UIGesture
|
|
|
searchController.searchBar.barTintColor = .secondaryColor
|
|
|
searchController.searchBar.searchTextField.backgroundColor = .secondaryColor
|
|
|
searchController.obscuresBackgroundDuringPresentation = false
|
|
|
+ searchController.searchBar.placeholder = "Search chats & messages".localized()
|
|
|
|
|
|
return searchController
|
|
|
}()
|
|
@@ -62,7 +66,7 @@ class SecondTabViewController: UIViewController, UIScrollViewDelegate, UIGesture
|
|
|
}
|
|
|
@IBOutlet var tableView: UITableView!
|
|
|
|
|
|
- let speechRecognizer = SFSpeechRecognizer()
|
|
|
+ var speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "id"))
|
|
|
|
|
|
var recognitionRequest : SFSpeechAudioBufferRecognitionRequest?
|
|
|
var recognitionTask : SFSpeechRecognitionTask?
|
|
@@ -80,31 +84,6 @@ class SecondTabViewController: UIViewController, UIScrollViewDelegate, UIGesture
|
|
|
|
|
|
override func viewDidLoad() {
|
|
|
super.viewDidLoad()
|
|
|
- SFSpeechRecognizer.requestAuthorization({ authStatus in
|
|
|
- var isButtonEnabled = false
|
|
|
-
|
|
|
- OperationQueue.main.addOperation {
|
|
|
- switch authStatus {
|
|
|
- case .authorized:
|
|
|
- isButtonEnabled = true
|
|
|
- print("User allowed access to speech recognition")
|
|
|
-
|
|
|
- case .denied:
|
|
|
- isButtonEnabled = false
|
|
|
- print("User denied access to speech recognition")
|
|
|
-
|
|
|
- case .restricted:
|
|
|
- isButtonEnabled = false
|
|
|
- print("Speech recognition restricted on this device")
|
|
|
-
|
|
|
- case .notDetermined:
|
|
|
- isButtonEnabled = false
|
|
|
- print("Speech recognition not yet authorized")
|
|
|
- @unknown default:
|
|
|
- print("Speech recognition not yet authorized")
|
|
|
- }
|
|
|
- }
|
|
|
- })
|
|
|
let me = UserDefaults.standard.string(forKey: "me")!
|
|
|
Database.shared.database?.inTransaction({ fmdb, rollback in
|
|
|
if let cursor = Database.shared.getRecords(fmdb: fmdb, query: "select FIRST_NAME, LAST_NAME, IMAGE_ID, USER_TYPE from BUDDY where F_PIN = '\(me)'"), cursor.next() {
|
|
@@ -113,8 +92,6 @@ class SecondTabViewController: UIViewController, UIScrollViewDelegate, UIGesture
|
|
|
}
|
|
|
})
|
|
|
|
|
|
- cancelSearchButton = UIBarButtonItem(title: "Cancel".localized(), style: .plain, target: self, action: #selector(cancel(sender:)))
|
|
|
-
|
|
|
var childrenMenu : [UIAction] = []
|
|
|
|
|
|
if(isAdmin){
|
|
@@ -171,93 +148,129 @@ class SecondTabViewController: UIViewController, UIScrollViewDelegate, UIGesture
|
|
|
}
|
|
|
|
|
|
@objc func recordAudio(){
|
|
|
- if audioEngine.isRunning {
|
|
|
- self.audioEngine.stop()
|
|
|
- self.recognitionRequest?.endAudio()
|
|
|
- voiceItem.image = UIImage(systemName: "mic.fill")
|
|
|
- searchController.searchBar.isUserInteractionEnabled = true
|
|
|
- searchController.automaticallyShowsCancelButton = true
|
|
|
- searchController.hidesNavigationBarDuringPresentation = true
|
|
|
+ if !isAllowSpeech {
|
|
|
+ setupSpeech()
|
|
|
} else {
|
|
|
- print("start recording")
|
|
|
- self.startRecording()
|
|
|
- voiceItem.image = UIImage(systemName: "mic")
|
|
|
- searchController.searchBar.isUserInteractionEnabled = false
|
|
|
+ runVoice()
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- func startRecording() {
|
|
|
+ func setupSpeech() {
|
|
|
|
|
|
- // Clear all previous session data and cancel task
|
|
|
- if recognitionTask != nil {
|
|
|
- recognitionTask?.cancel()
|
|
|
- recognitionTask = nil
|
|
|
- }
|
|
|
+ self.speechRecognizer?.delegate = self
|
|
|
|
|
|
- // Create instance of audio session to record voice
|
|
|
- let audioSession = AVAudioSession.sharedInstance()
|
|
|
- do {
|
|
|
- try audioSession.setCategory(AVAudioSession.Category.record, mode: AVAudioSession.Mode.measurement, options: AVAudioSession.CategoryOptions.defaultToSpeaker)
|
|
|
- try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
|
|
|
- } catch {
|
|
|
- print("audioSession properties weren't set because of an error.")
|
|
|
- }
|
|
|
+ SFSpeechRecognizer.requestAuthorization { (authStatus) in
|
|
|
+
|
|
|
+ var isButtonEnabled = false
|
|
|
|
|
|
- self.recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
|
|
|
+ switch authStatus {
|
|
|
+ case .authorized:
|
|
|
+ isButtonEnabled = true
|
|
|
|
|
|
- let inputNode = audioEngine.inputNode
|
|
|
+ case .denied:
|
|
|
+ isButtonEnabled = false
|
|
|
+ print("User denied access to speech recognition")
|
|
|
|
|
|
- guard let recognitionRequest = recognitionRequest else {
|
|
|
- fatalError("Unable to create an SFSpeechAudioBufferRecognitionRequest object")
|
|
|
+ case .restricted:
|
|
|
+ isButtonEnabled = false
|
|
|
+ print("Speech recognition restricted on this device")
|
|
|
+
|
|
|
+ case .notDetermined:
|
|
|
+ isButtonEnabled = false
|
|
|
+ print("Speech recognition not yet authorized")
|
|
|
+ @unknown default:
|
|
|
+ isButtonEnabled = false
|
|
|
+ }
|
|
|
+
|
|
|
+ OperationQueue.main.addOperation() {
|
|
|
+ self.isAllowSpeech = isButtonEnabled
|
|
|
+ if isButtonEnabled {
|
|
|
+ UserDefaults.standard.set(isButtonEnabled, forKey: "allowSpeech")
|
|
|
+ self.runVoice()
|
|
|
+ }
|
|
|
}
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ func startRecording() {
|
|
|
|
|
|
- recognitionRequest.shouldReportPartialResults = true
|
|
|
+ // Clear all previous session data and cancel task
|
|
|
+ if recognitionTask != nil {
|
|
|
+ recognitionTask?.cancel()
|
|
|
+ recognitionTask = nil
|
|
|
+ }
|
|
|
|
|
|
- self.recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest, resultHandler: { (result, error) in
|
|
|
+ // Create instance of audio session to record voice
|
|
|
+ let audioSession = AVAudioSession.sharedInstance()
|
|
|
+ do {
|
|
|
+ try audioSession.setCategory(AVAudioSession.Category.record, mode: AVAudioSession.Mode.measurement, options: AVAudioSession.CategoryOptions.defaultToSpeaker)
|
|
|
+ try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
|
|
|
+ } catch {
|
|
|
+ print("audioSession properties weren't set because of an error.")
|
|
|
+ }
|
|
|
|
|
|
- var isFinal = false
|
|
|
+ self.recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
|
|
|
|
|
|
- if result != nil {
|
|
|
+ let inputNode = audioEngine.inputNode
|
|
|
|
|
|
- self.searchController.searchBar.searchTextField.text = result?.bestTranscription.formattedString
|
|
|
- self.updateSearchResults(for: self.searchController)
|
|
|
- isFinal = (result?.isFinal)!
|
|
|
- }
|
|
|
+ guard let recognitionRequest = recognitionRequest else {
|
|
|
+ fatalError("Unable to create an SFSpeechAudioBufferRecognitionRequest object")
|
|
|
+ }
|
|
|
|
|
|
- if error != nil || isFinal {
|
|
|
+ recognitionRequest.shouldReportPartialResults = true
|
|
|
|
|
|
- self.audioEngine.stop()
|
|
|
- inputNode.removeTap(onBus: 0)
|
|
|
+ self.recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest, resultHandler: { (result, error) in
|
|
|
|
|
|
- self.recognitionRequest = nil
|
|
|
- self.recognitionTask = nil
|
|
|
+ var isFinal = false
|
|
|
|
|
|
-// self.btnStart.isEnabled = true
|
|
|
- }
|
|
|
- })
|
|
|
+ if result != nil {
|
|
|
|
|
|
- let recordingFormat = inputNode.outputFormat(forBus: 0)
|
|
|
- inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, when) in
|
|
|
- self.recognitionRequest?.append(buffer)
|
|
|
+ self.searchController.searchBar.searchTextField.text = result?.bestTranscription.formattedString
|
|
|
+ self.updateSearchResults(for: self.searchController)
|
|
|
+ self.alertController.dismiss(animated: true)
|
|
|
+ self.audioEngine.stop()
|
|
|
+ self.recognitionRequest?.endAudio()
|
|
|
+ self.voiceItem.image = UIImage(systemName: "mic.fill")
|
|
|
+ isFinal = (result?.isFinal)!
|
|
|
}
|
|
|
|
|
|
- self.audioEngine.prepare()
|
|
|
+ if error != nil || isFinal {
|
|
|
+
|
|
|
+ self.audioEngine.stop()
|
|
|
+ inputNode.removeTap(onBus: 0)
|
|
|
|
|
|
- do {
|
|
|
- try self.audioEngine.start()
|
|
|
- } catch {
|
|
|
- print("audioEngine couldn't start because of an error.")
|
|
|
+ self.recognitionRequest = nil
|
|
|
+ self.recognitionTask = nil
|
|
|
}
|
|
|
-//
|
|
|
-// self.lblText.text = "Say something, I'm listening!"
|
|
|
+ })
|
|
|
+
|
|
|
+ let recordingFormat = inputNode.outputFormat(forBus: 0)
|
|
|
+ inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, when) in
|
|
|
+ self.recognitionRequest?.append(buffer)
|
|
|
+ }
|
|
|
+
|
|
|
+ self.audioEngine.prepare()
|
|
|
+
|
|
|
+ do {
|
|
|
+ try self.audioEngine.start()
|
|
|
+ } catch {
|
|
|
+ print("audioEngine couldn't start because of an error.")
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ func runVoice() {
|
|
|
+ if !audioEngine.isRunning {
|
|
|
+ self.voiceItem.image = UIImage(systemName: "mic")
|
|
|
+ alertController = UIAlertController(title: "Start Recording".localized(), message: "Say something, I'm listening!".localized(), preferredStyle: .alert)
|
|
|
+ self.present(alertController, animated: true)
|
|
|
+ self.startRecording()
|
|
|
}
|
|
|
+ }
|
|
|
|
|
|
override func viewWillAppear(_ animated: Bool) {
|
|
|
// tabBarController?.navigationItem.leftBarButtonItem = cancelSearchButton
|
|
|
self.navigationController?.navigationBar.topItem?.title = Bundle.main.displayName
|
|
|
self.navigationController?.navigationBar.titleTextAttributes = [NSAttributedString.Key.foregroundColor: UIColor.black]
|
|
|
- let cpaasMode = PrefsUtil.getCpaasMode()
|
|
|
- let isBurger = cpaasMode == PrefsUtil.CPAAS_MODE_BURGER
|
|
|
navigationController?.navigationBar.backgroundColor = .clear
|
|
|
navigationController?.navigationBar.setBackgroundImage(UIImage(), for: .default)
|
|
|
navigationController?.navigationBar.shadowImage = UIImage()
|
|
@@ -267,15 +280,16 @@ class SecondTabViewController: UIViewController, UIScrollViewDelegate, UIGesture
|
|
|
tabBarController?.navigationItem.leftBarButtonItem = voiceItem
|
|
|
tabBarController?.navigationItem.searchController = searchController
|
|
|
tabBarController?.navigationItem.rightBarButtonItem = menuItem
|
|
|
+ let lang = UserDefaults.standard.string(forKey: "i18n_language")
|
|
|
+ speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: lang ?? "en"))
|
|
|
let randomInt = Int.random(in: 1..<10)
|
|
|
backgroundImage.image = UIImage(named: "pb_lbackground_\(randomInt)")
|
|
|
getData()
|
|
|
- searchController.searchBar.placeholder = "Search chats & messages".localized()
|
|
|
- searchController.searchBar.setValue("Cancel".localized(), forKey: "cancelButtonText")
|
|
|
if segment.numberOfSegments == 2 {
|
|
|
segment.setTitle("Chats".localized(), forSegmentAt: 0)
|
|
|
segment.setTitle("Groups".localized(), forSegmentAt: 1)
|
|
|
}
|
|
|
+ searchController.searchBar.placeholder = "Search chats & messages".localized()
|
|
|
}
|
|
|
|
|
|
override func viewWillDisappear(_ animated: Bool) {
|
|
@@ -297,55 +311,13 @@ class SecondTabViewController: UIViewController, UIScrollViewDelegate, UIGesture
|
|
|
}
|
|
|
|
|
|
@objc func onReceiveMessage(notification: NSNotification) {
|
|
|
- let data:[AnyHashable : Any] = notification.userInfo!
|
|
|
- guard let dataMessage = data["message"] as? TMessage else {
|
|
|
- return
|
|
|
- }
|
|
|
- let isUser = User.getData(pin: dataMessage.getBody(key: CoreMessage_TMessageKey.L_PIN)) != nil
|
|
|
- let chatId = dataMessage.getBody(key: CoreMessage_TMessageKey.CHAT_ID, default_value: "").isEmpty ? dataMessage.getBody(key: CoreMessage_TMessageKey.L_PIN) : dataMessage.getBody(key: CoreMessage_TMessageKey.CHAT_ID, default_value: "")
|
|
|
- let pin = isUser ? dataMessage.getBody(key: CoreMessage_TMessageKey.F_PIN) : chatId
|
|
|
- let messageId = dataMessage.getBody(key: CoreMessage_TMessageKey.MESSAGE_ID)
|
|
|
- if let index = chats.firstIndex(of: Chat(pin: pin)) {
|
|
|
- guard let chat = Chat.getData(messageId: messageId).first else {
|
|
|
- return
|
|
|
- }
|
|
|
- DispatchQueue.main.async {
|
|
|
- if self.segment.selectedSegmentIndex == 0 {
|
|
|
- self.tableView.beginUpdates()
|
|
|
- self.chats.remove(at: index)
|
|
|
- self.tableView.deleteRows(at: [IndexPath(row: index, section: 0)], with: .none)
|
|
|
- }
|
|
|
- self.chats.insert(chat, at: 0)
|
|
|
- if self.segment.selectedSegmentIndex == 0 {
|
|
|
- self.tableView.insertRows(at: [IndexPath(row: 0, section: 0)], with: .none)
|
|
|
- self.tableView.endUpdates()
|
|
|
- }
|
|
|
- }
|
|
|
- } else {
|
|
|
- guard let chat = Chat.getData(messageId: messageId).first else {
|
|
|
- return
|
|
|
- }
|
|
|
- DispatchQueue.main.async {
|
|
|
- if self.segment.selectedSegmentIndex == 0 {
|
|
|
- self.tableView.beginUpdates()
|
|
|
- }
|
|
|
- self.chats.insert(chat, at: 0)
|
|
|
- if self.segment.selectedSegmentIndex == 0 {
|
|
|
- self.tableView.insertRows(at: [IndexPath(row: 0, section: 0)], with: .none)
|
|
|
- self.tableView.endUpdates()
|
|
|
- }
|
|
|
+ DispatchQueue.main.async {
|
|
|
+ self.getChats {
|
|
|
+ self.tableView.reloadData()
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- @objc func add(sender: Any) {
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
- @objc func cancel(sender: Any) {
|
|
|
- navigationController?.dismiss(animated: true, completion: nil)
|
|
|
- }
|
|
|
-
|
|
|
@objc func segmentChanged(sender: Any) {
|
|
|
filterContentForSearchText(searchController.searchBar.text!)
|
|
|
}
|
|
@@ -609,7 +581,7 @@ extension SecondTabViewController: UITableViewDelegate, UITableViewDataSource {
|
|
|
var sects = 0
|
|
|
var sect = indexPath.section
|
|
|
var id = group.id
|
|
|
- if let e = groupMap[id] {
|
|
|
+ if let _ = groupMap[id] {
|
|
|
var loooop = true
|
|
|
repeat {
|
|
|
let c = sect + 1
|
|
@@ -737,7 +709,7 @@ extension SecondTabViewController: UITableViewDelegate, UITableViewDataSource {
|
|
|
if segment.selectedSegmentIndex == 2, let groups = fillteredData as? [Group] {
|
|
|
let group = groups[section]
|
|
|
if group.isSelected {
|
|
|
- if let g = groupMap[group.id] {
|
|
|
+ if let _ = groupMap[group.id] {
|
|
|
value = 1
|
|
|
}
|
|
|
else {
|
|
@@ -755,7 +727,7 @@ extension SecondTabViewController: UITableViewDelegate, UITableViewDataSource {
|
|
|
case 1:
|
|
|
let group = groups[section]
|
|
|
if group.isSelected {
|
|
|
- if let g = groupMap[group.id] {
|
|
|
+ if let _ = groupMap[group.id] {
|
|
|
value = 1
|
|
|
}
|
|
|
else {
|
|
@@ -963,5 +935,25 @@ extension SecondTabViewController: UISearchControllerDelegate, UISearchBarDelega
|
|
|
return mutableAttributedString
|
|
|
}
|
|
|
|
|
|
+ func searchBarTextDidBeginEditing(_ searchBar: UISearchBar) {
|
|
|
+ searchBar.showsCancelButton = true
|
|
|
+ let cBtn = searchBar.value(forKey: "cancelButton") as! UIButton
|
|
|
+ cBtn.setTitle("Cancel".localized(), for: .normal)
|
|
|
+ }
|
|
|
+
|
|
|
+ func searchBarTextDidEndEditing(_ searchBar: UISearchBar) {
|
|
|
+ searchBar.showsCancelButton = false
|
|
|
+ }
|
|
|
+
|
|
|
}
|
|
|
|
|
|
+extension SecondTabViewController: SFSpeechRecognizerDelegate {
|
|
|
+
|
|
|
+ func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
|
|
|
+ if available {
|
|
|
+ self.isAllowSpeech = true
|
|
|
+ } else {
|
|
|
+ self.isAllowSpeech = false
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|