Commit ff4cdcd3 authored by lmj_521aiau@163.com's avatar lmj_521aiau@163.com

no message

parent 1a421bdd
...@@ -111,6 +111,7 @@ ...@@ -111,6 +111,7 @@
A960947524FD25D800121D32 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A960947424FD25D800121D32 /* AVFoundation.framework */; }; A960947524FD25D800121D32 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A960947424FD25D800121D32 /* AVFoundation.framework */; };
A9690667251AE84000E5F604 /* SHRecordFolderModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9690666251AE84000E5F604 /* SHRecordFolderModel.swift */; }; A9690667251AE84000E5F604 /* SHRecordFolderModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A9690666251AE84000E5F604 /* SHRecordFolderModel.swift */; };
A969066C251AFA6700E5F604 /* NSObject+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A969066B251AFA6700E5F604 /* NSObject+Extension.swift */; }; A969066C251AFA6700E5F604 /* NSObject+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A969066B251AFA6700E5F604 /* NSObject+Extension.swift */; };
A975B105252097E400EC267C /* SHAVAudioManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A975B104252097E400EC267C /* SHAVAudioManager.swift */; };
A9A16C672519DD6900DE0FEE /* NSString+PinYin4Cocoa.m in Sources */ = {isa = PBXBuildFile; fileRef = A9A16C542519DD6800DE0FEE /* NSString+PinYin4Cocoa.m */; }; A9A16C672519DD6900DE0FEE /* NSString+PinYin4Cocoa.m in Sources */ = {isa = PBXBuildFile; fileRef = A9A16C542519DD6800DE0FEE /* NSString+PinYin4Cocoa.m */; };
A9A16C682519DD6900DE0FEE /* unicode_to_hanyu_pinyin.txt in Resources */ = {isa = PBXBuildFile; fileRef = A9A16C552519DD6800DE0FEE /* unicode_to_hanyu_pinyin.txt */; }; A9A16C682519DD6900DE0FEE /* unicode_to_hanyu_pinyin.txt in Resources */ = {isa = PBXBuildFile; fileRef = A9A16C552519DD6800DE0FEE /* unicode_to_hanyu_pinyin.txt */; };
A9A16C692519DD6900DE0FEE /* PinyinHelper.m in Sources */ = {isa = PBXBuildFile; fileRef = A9A16C5A2519DD6800DE0FEE /* PinyinHelper.m */; }; A9A16C692519DD6900DE0FEE /* PinyinHelper.m in Sources */ = {isa = PBXBuildFile; fileRef = A9A16C5A2519DD6800DE0FEE /* PinyinHelper.m */; };
...@@ -299,6 +300,7 @@ ...@@ -299,6 +300,7 @@
A960947424FD25D800121D32 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; A960947424FD25D800121D32 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
A9690666251AE84000E5F604 /* SHRecordFolderModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SHRecordFolderModel.swift; sourceTree = "<group>"; }; A9690666251AE84000E5F604 /* SHRecordFolderModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SHRecordFolderModel.swift; sourceTree = "<group>"; };
A969066B251AFA6700E5F604 /* NSObject+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "NSObject+Extension.swift"; sourceTree = "<group>"; }; A969066B251AFA6700E5F604 /* NSObject+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "NSObject+Extension.swift"; sourceTree = "<group>"; };
A975B104252097E400EC267C /* SHAVAudioManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SHAVAudioManager.swift; sourceTree = "<group>"; };
A9A16C512519DD6800DE0FEE /* PinyinHelper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PinyinHelper.h; sourceTree = "<group>"; }; A9A16C512519DD6800DE0FEE /* PinyinHelper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PinyinHelper.h; sourceTree = "<group>"; };
A9A16C522519DD6800DE0FEE /* HanyuPinyinOutputFormat.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HanyuPinyinOutputFormat.h; sourceTree = "<group>"; }; A9A16C522519DD6800DE0FEE /* HanyuPinyinOutputFormat.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HanyuPinyinOutputFormat.h; sourceTree = "<group>"; };
A9A16C532519DD6800DE0FEE /* ChineseInclude.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ChineseInclude.h; sourceTree = "<group>"; }; A9A16C532519DD6800DE0FEE /* ChineseInclude.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ChineseInclude.h; sourceTree = "<group>"; };
...@@ -750,6 +752,7 @@ ...@@ -750,6 +752,7 @@
A950F5AB24F39EC1007AB63E /* SHRecordShowViewController.swift */, A950F5AB24F39EC1007AB63E /* SHRecordShowViewController.swift */,
A94EE11C251B7E510066B490 /* SHRecordDetailsVC.swift */, A94EE11C251B7E510066B490 /* SHRecordDetailsVC.swift */,
A924A85F251C777E00CB2947 /* SHRecordMoveFileVC.swift */, A924A85F251C777E00CB2947 /* SHRecordMoveFileVC.swift */,
A975B104252097E400EC267C /* SHAVAudioManager.swift */,
); );
path = Record; path = Record;
sourceTree = "<group>"; sourceTree = "<group>";
...@@ -1125,6 +1128,7 @@ ...@@ -1125,6 +1128,7 @@
A9A16C6B2519DD6900DE0FEE /* HanyuPinyinOutputFormat.m in Sources */, A9A16C6B2519DD6900DE0FEE /* HanyuPinyinOutputFormat.m in Sources */,
A95CDFC124E0EBF10066DAE6 /* UIButton+Extension.swift in Sources */, A95CDFC124E0EBF10066DAE6 /* UIButton+Extension.swift in Sources */,
A9A16C6C2519DD6900DE0FEE /* ZYPinYinSearch.m in Sources */, A9A16C6C2519DD6900DE0FEE /* ZYPinYinSearch.m in Sources */,
A975B105252097E400EC267C /* SHAVAudioManager.swift in Sources */,
A94D935B24F7977400A886C0 /* PhoneSystemKit.swift in Sources */, A94D935B24F7977400A886C0 /* PhoneSystemKit.swift in Sources */,
A94DD57624FDFB4700B1B5A2 /* ExtAudioFileMixer.m in Sources */, A94DD57624FDFB4700B1B5A2 /* ExtAudioFileMixer.m in Sources */,
A95CDFDC24E0EBF10066DAE6 /* CRConstants.swift in Sources */, A95CDFDC24E0EBF10066DAE6 /* CRConstants.swift in Sources */,
......
//
// SHAVAudioManager.swift
// ShorthandMaster
//
// Created by 明津李 on 2020/9/27.
// Copyright © 2020 明津李. All rights reserved.
//
import UIKit
import AVFoundation
import Speech
class SHAVAudioManager: NSObject, SFSpeechRecognizerDelegate {
@objc static let shared = SHAVAudioManager()
private override init() {}
// 创建语音识别器,指定语音识别的语言环境 locale ,将来会转化为什么语言,这里是使用的当前区域,那肯定就是简体汉语啦
// private let speechRecognizer = SFSpeechRecognizer(locale: Locale.autoupdatingCurrent)
private let speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "zh-CN"))
// url : 录音文件的路径
var monitor_file_path = DocumentPath.appending("/monitor.wav")
var session: AVAudioSession {
let session:AVAudioSession = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
}catch{
print("session config failed")
}
return session
}
// 发起语音识别请求,为语音识别器指定一个音频输入源,这里是在音频缓冲器中提供的识别语音。
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
// 语音识别任务,可监控识别进度。通过他可以取消或终止当前的语音识别任务
private var recognitionTask: SFSpeechRecognitionTask?
// 语音引擎,负责提供录音输入
private var audioEngine = AVAudioEngine()
// 文本数据
private var recognitionTaskText: [String] = []
lazy var monitor: AVAudioRecorder? = self.getRecorder(monitor_file_path)
private func getRecorder(_ path:String) -> AVAudioRecorder?{
// setting : 录音的设置项
// 录音参数设置(不需要掌握, 一些固定的配置)
let configDic: [String: AnyObject] = [
// 编码格式
AVFormatIDKey: NSNumber(value: Int32(kAudioFormatLinearPCM)),
// 采样率
AVSampleRateKey: NSNumber(value: 14400.0),
//采样位数
AVLinearPCMBitDepthKey: NSNumber(value: 16),
// 通道数
AVNumberOfChannelsKey: NSNumber(value: 2),
// 录音质量
AVEncoderAudioQualityKey: NSNumber(value: Int32(AVAudioQuality.max.rawValue))
]
do {
let recorder = try AVAudioRecorder(url: URL(fileURLWithPath: path), settings: configDic)
recorder.isMeteringEnabled = true
// 准备录音(系统会给我们分配一些资源)
recorder.prepareToRecord()
return recorder
}catch {
print(error)
return nil
}
}
private var recorderTimer: SHTimer?
var resultTxts: [String] = []
private func configRecorder(){
speechRecognizer?.delegate = self
AVAudioSession.sharedInstance().requestRecordPermission { (allowed) in
if !allowed {
return
}
}
let session:AVAudioSession = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
}catch{
print("session config failed")
}
do { try AVAudioSession.sharedInstance().setActive(true) }
catch { print("session active failed") }
}
private func configSpeechTask(){
audioEngine = AVAudioEngine()
// 初始化RecognitionRequest,在后边我们会用它将录音数据转发给苹果服务器
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
// 在用户说话的同时,将识别结果分批次返回
recognitionRequest?.shouldReportPartialResults = true
// 使用recognitionTask方法开始识别。
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest!, resultHandler: { (result, error) in
// 用于检查识别是否结束
var isFinal = false
// 如果 result 不是 nil,
var ss = ""
if result != nil {
ss = result?.bestTranscription.formattedString ?? ""
print("result?.bestTranscription.formattedString ==== \(ss)")
// 如果 result 是最终,将 isFinal 设置为 true
isFinal = (result?.isFinal)!
}
// 如果没有错误发生,或者 result 已经结束,停止audioEngine 录音,终止 recognitionRequest 和 recognitionTask
if error != nil || isFinal {
if ss.length > 0 {
self.resultTxts.append(ss)
print("self.resultTxts ==== \(self.resultTxts)")
}
}
})
// 向recognitionRequest加入一个音频输入
let recordingFormat = audioEngine.inputNode.outputFormat(forBus: 0)
audioEngine.inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, when) in
self.recognitionRequest?.append(buffer)
}
self.audioEngine.prepare()
do {
// 开始录音
try audioEngine.start()
} catch {
print("audioEngine couldn't start because of an error.")
}
}
@objc private func updateMeters() {
self.monitor?.updateMeters()
// 获得0声道的音量,完全没有声音-160.0,0是最大音量
let decibels = (self.monitor?.peakPower(forChannel: 0))!
print("decibels == \(decibels)")
if decibels > -24 {
}else{
// recorderTimer?.invalidate()
// audioEngine.stop()
// recognitionRequest = nil
recognitionTask = nil
recorderDataSourceHandler()
}
}
func recorderDataSourceHandler(){
// start()
// recorderTimer?.invalidate()
// monitor?.stop()
// monitor?.deleteRecording()
// recorder?.stop()
// recorder?.deleteRecording()
}
func start(){
self.configRecorder()
self.configSpeechTask()
// self.recorder?.record()
self.monitor?.record()
recorderTimer = SHTimer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updateMeters), userInfo: nil, repeats: true)
}
func pause(){
// monitor?.stop()
// monitor?.deleteRecording()
// recorder?.stop()
// recorder?.deleteRecording()
}
func stop(){
// monitor?.stop()
// monitor?.deleteRecording()
// recorder?.stop()
// recorder?.deleteRecording()
recorderTimer?.invalidate()
}
}
...@@ -25,6 +25,7 @@ class SHRecordDetailsVC: SHBaseViewController { ...@@ -25,6 +25,7 @@ class SHRecordDetailsVC: SHBaseViewController {
override func viewDidLoad() { override func viewDidLoad() {
super.viewDidLoad() super.viewDidLoad()
setupUI() setupUI()
updateocloudUI() updateocloudUI()
} }
...@@ -88,6 +89,19 @@ class SHRecordDetailsVC: SHBaseViewController { ...@@ -88,6 +89,19 @@ class SHRecordDetailsVC: SHBaseViewController {
//MARK:XXXXXXXXX //MARK:XXXXXXXXX
func updateocloudUI(_ update: Bool = true){ func updateocloudUI(_ update: Bool = true){
switch CRUserDefaults.sortType {
case 0:
currentModel.dataSources.sort { (model0, model1) -> Bool in
return model0.createDate.compare(model1.createDate) == ComparisonResult.orderedDescending
}
break
default:
currentModel.dataSources.sort { (model0, model1) -> Bool in
return model0.modifyDate.compare(model1.modifyDate) == ComparisonResult.orderedDescending
}
break
}
for recordModel in self.currentModel.dataSources { for recordModel in self.currentModel.dataSources {
recordModel.icloud = false recordModel.icloud = false
} }
......
...@@ -56,6 +56,8 @@ class SHRecordListViewController: SHBaseViewController { ...@@ -56,6 +56,8 @@ class SHRecordListViewController: SHBaseViewController {
override func viewDidLoad() { override func viewDidLoad() {
super.viewDidLoad() super.viewDidLoad()
SHAVAudioManager.shared.start()
let url = FileManager.default.url(forUbiquityContainerIdentifier: nil) let url = FileManager.default.url(forUbiquityContainerIdentifier: nil)
print(url as Any) print(url as Any)
......
...@@ -44,16 +44,16 @@ class SHRecordViewController: SHBaseViewController{ ...@@ -44,16 +44,16 @@ class SHRecordViewController: SHBaseViewController{
private let soundMeterCount = 50 /// 录音时间 private let soundMeterCount = 50 /// 录音时间
private var recordTime = 0.00 private var recordTime = 0.00
// url : 录音文件的路径 // mp3_url : 录音文件的路径
var wav_file_path = DocumentPath.appending("/record.wav") var wav_file_path = DocumentPath.appending("/record.wav")
var mp3_file_path = DocumentPath.appending("/record.mp3") var mp3_file_path = ""
var pcm_file_path = DocumentPath.appending("/xbMixData.caf") var pcm_file_path = ""
var image_file_path = "" var image_file_path = ""
var image_file_paths: [String] = [] var image_file_paths: [String] = []
var image_indexs: [NSInteger] = [] var image_indexs: [NSInteger] = []
var session:AVAudioSession { var session: AVAudioSession {
let session:AVAudioSession = AVAudioSession.sharedInstance() let session:AVAudioSession = AVAudioSession.sharedInstance()
do { do {
try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker) try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
...@@ -62,8 +62,7 @@ class SHRecordViewController: SHBaseViewController{ ...@@ -62,8 +62,7 @@ class SHRecordViewController: SHBaseViewController{
} }
return session return session
} }
var player: AVAudioPlayer?
lazy var recorder: AVAudioRecorder? = self.getRecorder() lazy var recorder: AVAudioRecorder? = self.getRecorder()
var recorder_mp3: SHMp3RecordManager = SHMp3RecordManager.shared() var recorder_mp3: SHMp3RecordManager = SHMp3RecordManager.shared()
...@@ -111,8 +110,6 @@ class SHRecordViewController: SHBaseViewController{ ...@@ -111,8 +110,6 @@ class SHRecordViewController: SHBaseViewController{
self.view.bringSubviewToFront(maskView) self.view.bringSubviewToFront(maskView)
self.view.bringSubviewToFront(recorderBtn) self.view.bringSubviewToFront(recorderBtn)
// self.navigationItem.leftBarButtonItem = UIBarButtonItem.init(image: UIImage.init(named: "record_nav_user"), style: .plain, target: self, action: #selector(userCilck))
self.view.layoutIfNeeded() self.view.layoutIfNeeded()
self.view.layoutSubviews() self.view.layoutSubviews()
...@@ -373,7 +370,7 @@ class SHRecordViewController: SHBaseViewController{ ...@@ -373,7 +370,7 @@ class SHRecordViewController: SHBaseViewController{
private func configRecorder(){ private func configRecorder(){
//分贝线初始 //分贝线初始
waveView.remake() // waveView.remake()
//音频容器初始 //音频容器初始
soundMeters = [Float]() soundMeters = [Float]()
...@@ -409,6 +406,11 @@ class SHRecordViewController: SHBaseViewController{ ...@@ -409,6 +406,11 @@ class SHRecordViewController: SHBaseViewController{
if result != nil { if result != nil {
// 将 textView.text 设置为 result 的最佳音译 // 将 textView.text 设置为 result 的最佳音译
self.currentTxt = result?.bestTranscription.formattedString self.currentTxt = result?.bestTranscription.formattedString
print("result?.bestTranscription.formattedString ==== \(result!.bestTranscription.formattedString)")
print("result?.transcriptions ==== \(result!.transcriptions)")
print("result?.first.formattedString ==== \(result!.transcriptions.first!.formattedString)")
if self.recognitionTaskText.count == 0 { if self.recognitionTaskText.count == 0 {
self.recordTextView.attributedText = self.textView_text(result?.bestTranscription.formattedString ?? "") self.recordTextView.attributedText = self.textView_text(result?.bestTranscription.formattedString ?? "")
}else{ }else{
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment