Commit 5b87fdb9 authored by lmj_521aiau@163.com's avatar lmj_521aiau@163.com

ui recorder

parent cbe2f15d
......@@ -52,6 +52,8 @@
A950F5AF24F4E06E007AB63E /* SHMineViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A950F5AE24F4E06E007AB63E /* SHMineViewController.swift */; };
A950F5B124F4E080007AB63E /* Mine.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A950F5B024F4E080007AB63E /* Mine.storyboard */; };
A950F5B624F4E64A007AB63E /* UIView+CornerRadii.m in Sources */ = {isa = PBXBuildFile; fileRef = A950F5B524F4E64A007AB63E /* UIView+CornerRadii.m */; };
A95A76872521D70F003B4E1C /* UIView+LayoutMethods.m in Sources */ = {isa = PBXBuildFile; fileRef = A95A76842521D70F003B4E1C /* UIView+LayoutMethods.m */; };
A95A76882521D70F003B4E1C /* DDSoundWaveView.m in Sources */ = {isa = PBXBuildFile; fileRef = A95A76852521D70F003B4E1C /* DDSoundWaveView.m */; };
A95B3FCB24F507A900FABDD1 /* SHDeleteDetailsViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A95B3FCA24F507A900FABDD1 /* SHDeleteDetailsViewController.swift */; };
A95B3FCD24F50B2F00FABDD1 /* SHRecentDeleteCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = A95B3FCC24F50B2F00FABDD1 /* SHRecentDeleteCell.swift */; };
A95B3FD024F525AD00FABDD1 /* SwiftHoledView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A95B3FCF24F525AD00FABDD1 /* SwiftHoledView.swift */; };
......@@ -105,7 +107,6 @@
A95CE02F24E151340066DAE6 /* UIButton+Category.m in Sources */ = {isa = PBXBuildFile; fileRef = A95CE02D24E151340066DAE6 /* UIButton+Category.m */; };
A95CE03224E1521F0066DAE6 /* SHRecordViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A95CE03124E1521F0066DAE6 /* SHRecordViewController.swift */; };
A95CE03424E157CF0066DAE6 /* Record.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A95CE03324E157CF0066DAE6 /* Record.storyboard */; };
A95CE03624E1729B0066DAE6 /* SHRecordWaveView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A95CE03524E1729B0066DAE6 /* SHRecordWaveView.swift */; };
A95CE03824E17BAF0066DAE6 /* SHTimer.swift in Sources */ = {isa = PBXBuildFile; fileRef = A95CE03724E17BAF0066DAE6 /* SHTimer.swift */; };
A95CE03A24E2AE6A0066DAE6 /* SHRecordModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A95CE03924E2AE6A0066DAE6 /* SHRecordModel.swift */; };
A960947524FD25D800121D32 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A960947424FD25D800121D32 /* AVFoundation.framework */; };
......@@ -229,6 +230,10 @@
A950F5B024F4E080007AB63E /* Mine.storyboard */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; path = Mine.storyboard; sourceTree = "<group>"; };
A950F5B424F4E64A007AB63E /* UIView+CornerRadii.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIView+CornerRadii.h"; sourceTree = "<group>"; };
A950F5B524F4E64A007AB63E /* UIView+CornerRadii.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIView+CornerRadii.m"; sourceTree = "<group>"; };
A95A76832521D70F003B4E1C /* DDSoundWaveView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DDSoundWaveView.h; sourceTree = "<group>"; };
A95A76842521D70F003B4E1C /* UIView+LayoutMethods.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIView+LayoutMethods.m"; sourceTree = "<group>"; };
A95A76852521D70F003B4E1C /* DDSoundWaveView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DDSoundWaveView.m; sourceTree = "<group>"; };
A95A76862521D70F003B4E1C /* UIView+LayoutMethods.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIView+LayoutMethods.h"; sourceTree = "<group>"; };
A95B3FCA24F507A900FABDD1 /* SHDeleteDetailsViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SHDeleteDetailsViewController.swift; sourceTree = "<group>"; };
A95B3FCC24F50B2F00FABDD1 /* SHRecentDeleteCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SHRecentDeleteCell.swift; sourceTree = "<group>"; };
A95B3FCF24F525AD00FABDD1 /* SwiftHoledView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SwiftHoledView.swift; sourceTree = "<group>"; };
......@@ -294,7 +299,6 @@
A95CE02E24E151340066DAE6 /* UIButton+Category.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIButton+Category.h"; sourceTree = "<group>"; };
A95CE03124E1521F0066DAE6 /* SHRecordViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SHRecordViewController.swift; sourceTree = "<group>"; };
A95CE03324E157CF0066DAE6 /* Record.storyboard */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; path = Record.storyboard; sourceTree = "<group>"; };
A95CE03524E1729B0066DAE6 /* SHRecordWaveView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SHRecordWaveView.swift; sourceTree = "<group>"; };
A95CE03724E17BAF0066DAE6 /* SHTimer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SHTimer.swift; sourceTree = "<group>"; };
A95CE03924E2AE6A0066DAE6 /* SHRecordModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SHRecordModel.swift; sourceTree = "<group>"; wrapsLines = 1; };
A960947424FD25D800121D32 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
......@@ -467,10 +471,10 @@
A94EE121251B7E5A0066B490 /* View */ = {
isa = PBXGroup;
children = (
A95A76822521D70F003B4E1C /* DDSoundWave */,
A924A857251B89D000CB2947 /* SHRecordDetailsCell.swift */,
A950F5A924F3727A007AB63E /* SHRecordListCell.swift */,
A95B3FD124F5261100FABDD1 /* SHRecordGuideView.swift */,
A95CE03524E1729B0066DAE6 /* SHRecordWaveView.swift */,
A94D935124F7502700A886C0 /* SHRecordExportAlertView.swift */,
A94D935324F7503E00A886C0 /* SHRecordExportAlertView.xib */,
A94DD57724FE295300B1B5A2 /* SHMemberUpgradeAlertView.h */,
......@@ -519,6 +523,17 @@
path = Other;
sourceTree = "<group>";
};
A95A76822521D70F003B4E1C /* DDSoundWave */ = {
isa = PBXGroup;
children = (
A95A76832521D70F003B4E1C /* DDSoundWaveView.h */,
A95A76852521D70F003B4E1C /* DDSoundWaveView.m */,
A95A76862521D70F003B4E1C /* UIView+LayoutMethods.h */,
A95A76842521D70F003B4E1C /* UIView+LayoutMethods.m */,
);
path = DDSoundWave;
sourceTree = "<group>";
};
A95B3FCE24F525AD00FABDD1 /* SwiftHoledView */ = {
isa = PBXGroup;
children = (
......@@ -668,6 +683,7 @@
A95CDFED24E0F42E0066DAE6 /* Managers */ = {
isa = PBXGroup;
children = (
A975B104252097E400EC267C /* SHAVAudioManager.swift */,
A94D935D24F7AF2300A886C0 /* SHLocationManager.swift */,
A94DD57124FDF9D200B1B5A2 /* SHMp3RecordManager.h */,
A94DD57224FDF9D200B1B5A2 /* SHMp3RecordManager.m */,
......@@ -752,7 +768,6 @@
A950F5AB24F39EC1007AB63E /* SHRecordShowViewController.swift */,
A94EE11C251B7E510066B490 /* SHRecordDetailsVC.swift */,
A924A85F251C777E00CB2947 /* SHRecordMoveFileVC.swift */,
A975B104252097E400EC267C /* SHAVAudioManager.swift */,
);
path = Record;
sourceTree = "<group>";
......@@ -1130,6 +1145,7 @@
A9A16C6C2519DD6900DE0FEE /* ZYPinYinSearch.m in Sources */,
A975B105252097E400EC267C /* SHAVAudioManager.swift in Sources */,
A94D935B24F7977400A886C0 /* PhoneSystemKit.swift in Sources */,
A95A76872521D70F003B4E1C /* UIView+LayoutMethods.m in Sources */,
A94DD57624FDFB4700B1B5A2 /* ExtAudioFileMixer.m in Sources */,
A95CDFDC24E0EBF10066DAE6 /* CRConstants.swift in Sources */,
A95CE00F24E0F42F0066DAE6 /* AESCipher.m in Sources */,
......@@ -1138,8 +1154,8 @@
A94D935924F7969600A886C0 /* CMNetworkManager.swift in Sources */,
A94DD56724FDF29700B1B5A2 /* XBAudioUnitPlayer.m in Sources */,
A94DD56F24FDF29700B1B5A2 /* XBAACEncoder_system.m in Sources */,
A95CE03624E1729B0066DAE6 /* SHRecordWaveView.swift in Sources */,
A950F5AA24F3727A007AB63E /* SHRecordListCell.swift in Sources */,
A95A76882521D70F003B4E1C /* DDSoundWaveView.m in Sources */,
A9A16C812519DDC400DE0FEE /* PopContainerView.swift in Sources */,
CC13DB79251CA62800835654 /* SHImputAudioButton.swift in Sources */,
A94DD56B24FDF29700B1B5A2 /* XBAudioConverterPlayer.m in Sources */,
......
......@@ -44,6 +44,7 @@ class SHMineViewController: SHBaseViewController {
tableView.backgroundColor = UIColor.groupTableViewBackground
tableView.layoutIfNeeded()
tableView.layoutSubviews()
self.lockBtn.addTarget(self, action: #selector(memberBtnClick(_:)), for: .touchUpInside)
}
func getUserInfo(){
......@@ -60,6 +61,12 @@ class SHMineViewController: SHBaseViewController {
}
}
@objc func memberBtnClick(_ sender: UIButton){
if SHUserAccountManager.shared.isMember == false {
self.goGoods("home_pop")
}
}
@objc func setting(){
let setting = UIStoryboard.init(name: "Mine", bundle: nil).instantiateViewController(withIdentifier: "SHSettingViewController") as! SHSettingViewController
self.navigationController?.pushViewController(setting, animated: true)
......@@ -67,7 +74,9 @@ class SHMineViewController: SHBaseViewController {
func sortType(){
UIAlertController.showActionSheet(withTitle: "", message: "", cancelBtnTitle: "取消", otherBtnTitles: sortTypes) { (index) in
if index == 0{
return
}
CRUserDefaults.sortType = index-1
self.currentSortType = self.sortTypes[index-1]
self.tableView.reloadData()
......
......@@ -10,7 +10,7 @@ import UIKit
class SHTimer: NSObject {
private(set) var _timer: Timer!
var _timer: Timer!
fileprivate weak var _aTarget: AnyObject!
fileprivate var _aSelector: Selector!
var fireDate: Date {
......
......@@ -372,7 +372,7 @@
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Label" textAlignment="natural" lineBreakMode="tailTruncation" numberOfLines="3" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="FVv-OG-DwV">
<rect key="frame" x="18" y="80.5" width="35.5" height="17"/>
<rect key="frame" x="18" y="80.5" width="309" height="17"/>
<fontDescription key="fontDescription" type="system" pointSize="14"/>
<nil key="textColor"/>
<nil key="highlightedColor"/>
......@@ -408,7 +408,7 @@
<constraint firstItem="6je-jR-opG" firstAttribute="leading" secondItem="6bV-3V-R81" secondAttribute="leading" id="3MN-xi-qGf"/>
<constraint firstItem="6je-jR-opG" firstAttribute="top" secondItem="FVv-OG-DwV" secondAttribute="bottom" constant="10" id="4KH-Iy-ZhP"/>
<constraint firstItem="XAw-V9-smq" firstAttribute="centerY" secondItem="6je-jR-opG" secondAttribute="centerY" id="50B-aE-1WZ"/>
<constraint firstAttribute="trailing" relation="greaterThanOrEqual" secondItem="FVv-OG-DwV" secondAttribute="trailing" constant="17" id="8HX-P1-g1a"/>
<constraint firstAttribute="trailing" secondItem="FVv-OG-DwV" secondAttribute="trailing" constant="17" id="8HX-P1-g1a"/>
<constraint firstItem="6bV-3V-R81" firstAttribute="leading" secondItem="Ace-iU-keq" secondAttribute="leading" constant="18" id="IIf-qf-lrN"/>
<constraint firstItem="FVv-OG-DwV" firstAttribute="top" secondItem="cAa-wI-k0B" secondAttribute="bottom" constant="14" id="Lpt-nL-sQY"/>
<constraint firstAttribute="trailing" secondItem="SzJ-Gk-Tag" secondAttribute="trailing" constant="18" id="NMg-Q5-D9g"/>
......@@ -418,7 +418,7 @@
<constraint firstItem="6bV-3V-R81" firstAttribute="top" secondItem="Ace-iU-keq" secondAttribute="top" constant="18" id="Sj5-yA-Hdn"/>
<constraint firstItem="FVv-OG-DwV" firstAttribute="leading" secondItem="6bV-3V-R81" secondAttribute="leading" id="k4e-UV-52H"/>
<constraint firstItem="cAa-wI-k0B" firstAttribute="top" secondItem="6bV-3V-R81" secondAttribute="bottom" constant="10" id="mde-3T-2mf"/>
<constraint firstAttribute="trailing" relation="greaterThanOrEqual" secondItem="6bV-3V-R81" secondAttribute="trailing" constant="18" id="xug-bT-eC2"/>
<constraint firstAttribute="trailing" relation="greaterThanOrEqual" secondItem="6bV-3V-R81" secondAttribute="trailing" constant="17" id="xug-bT-eC2"/>
</constraints>
</view>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="adA-MF-vWU">
......
......@@ -261,11 +261,13 @@ extension SHRecordDetailsVC {
return
}
AlertControllerTool.alertController(withTitle: "提示", message: "确认删除所选\(selectRecordModels.count)条记录吗?", cancelTitle: "取消", cancel: nil, confirmTitle: "确认", confirmTitleColor: UIColor.red, confirmBlock: { (some) in
let nowDate = Date()
for model in selectRecordModels {
if currentModel.dataSources.contains(model) {
if let index = currentModel.dataSources.firstIndex(where: { $0.pathFile == model.pathFile}) {
currentModel.dataSources.remove(at: index)
for model in self.selectRecordModels {
if self.currentModel.dataSources.contains(model) {
if let index = self.currentModel.dataSources.firstIndex(where: { $0.pathFile == model.pathFile}) {
self.currentModel.dataSources.remove(at: index)
}
}
}
......@@ -274,9 +276,9 @@ extension SHRecordDetailsVC {
if var recordList = list{
for (index, subDict) in recordList.enumerated(){
let subFolderModel:SHRecordFolderModel = getDataDictWith(dict: subDict)
if subFolderModel.id == currentModel.id {
if subFolderModel.id == self.currentModel.id {
subFolderModel.modifyDate = nowDate
subFolderModel.dataSources = currentModel.dataSources
subFolderModel.dataSources = self.currentModel.dataSources
let dic = getDictWith(obj: subFolderModel)
recordList[index] = dic
break
......@@ -285,7 +287,7 @@ extension SHRecordDetailsVC {
CRUserDefaults.recordList = recordList
}
for model in selectRecordModels {
for model in self.selectRecordModels {
SHCloudManager.shared.deleteRecord(model) { (result, models) in
if result {
......@@ -298,14 +300,18 @@ extension SHRecordDetailsVC {
}
}
if currentModel.dataSources.count == 0 {
if self.currentModel.dataSources.count == 0 {
self.editClick(true)
self.markAlertViewShow(false)
bottomView?.cancel = true
self.bottomView?.cancel = true
}
selectRecordModels = []
// edit = false
tableView?.reloadData()
self.selectRecordModels = []
// edit = false
DispatchQueue.main.async {
self.tableView?.reloadData()
}
}, finish: nil)
}
//MARK:笔记弹框
func markAlertViewShow(_ selected:Bool){
......
......@@ -56,8 +56,6 @@ class SHRecordListViewController: SHBaseViewController {
override func viewDidLoad() {
super.viewDidLoad()
SHAVAudioManager.shared.start()
let url = FileManager.default.url(forUbiquityContainerIdentifier: nil)
print(url as Any)
......@@ -271,10 +269,6 @@ class SHRecordListViewController: SHBaseViewController {
}
@objc func userCilck(){
// if SHUserAccountManager.shared.isMember == false {
// self.goGoods("home_pop")
// }
addModel()
}
......
......@@ -151,6 +151,7 @@ class SHRecordMoveFileVC: SHBaseViewController {
}
}
}
MBProgressHUD.show("正在处理中,请稍后...", view: nil)
let list = CRUserDefaults.recordList
if var recordList = list{
//增
......@@ -195,6 +196,7 @@ class SHRecordMoveFileVC: SHBaseViewController {
if result {
self.saveSuccessCallBack?(self.currentModel)
DispatchQueue.main.async {
MBProgressHUD.hide()
self.navigationController?.dismiss(animated: true, completion: nil)
}
}
......@@ -204,6 +206,7 @@ class SHRecordMoveFileVC: SHBaseViewController {
if result {
self.saveSuccessCallBack?(self.currentModel)
DispatchQueue.main.async {
MBProgressHUD.hide()
self.navigationController?.dismiss(animated: true, completion: nil)
}
}
......
......@@ -13,7 +13,7 @@ import PDFGenerator
class SHRecordViewController: SHBaseViewController{
private var waveView: SHRecordWaveView!
private var waveView = DDSoundWaveView()
var folderModel: SHRecordFolderModel = SHRecordFolderModel()
var saveSuccessCallBack:((SHRecordFolderModel)->Void)?
......@@ -34,15 +34,6 @@ class SHRecordViewController: SHBaseViewController{
private var secondTimer: SHTimer?
private var seconds: NSInteger = 0
private let non_member_limit_seconds: NSInteger = 60
/// 录音计时器
private var waveTimer: SHTimer?
/// 波形更新间隔
private let updateFequency = 0.05
/// 声音数据数组
private var soundMeters: [Float]!
/// 声音数据数组容量
private let soundMeterCount = 50 /// 录音时间
private var recordTime = 0.00
// mp3_url : 录音文件的路径
var wav_file_path = DocumentPath.appending("/record.wav")
......@@ -53,38 +44,23 @@ class SHRecordViewController: SHBaseViewController{
var image_file_paths: [String] = []
var image_indexs: [NSInteger] = []
// var session: AVAudioSession {
// let session:AVAudioSession = AVAudioSession.sharedInstance()
// do {
// try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
// }catch{
// print("session config failed")
// }
// return session
// }
// lazy var recorder: AVAudioRecorder? = self.getRecorder()
var recorder_mp3: SHMp3RecordManager = SHMp3RecordManager.shared()
// // 创建语音识别器,指定语音识别的语言环境 locale ,将来会转化为什么语言,这里是使用的当前区域,那肯定就是简体汉语啦
//// private let speechRecognizer = SFSpeechRecognizer(locale: Locale.autoupdatingCurrent)
// private let speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "zh-CN"))
//
// // 发起语音识别请求,为语音识别器指定一个音频输入源,这里是在音频缓冲器中提供的识别语音。
// private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
// // 语音识别任务,可监控识别进度。通过他可以取消或终止当前的语音识别任务
// private var recognitionTask: SFSpeechRecognitionTask?
// // 语音引擎,负责提供录音输入
// private var audioEngine = AVAudioEngine()
// 文本数据
private var recognitionTaskText: [String] = []
var recorderManager = SHAVAudioManager.shared
override func viewDidLoad() {
super.viewDidLoad()
setupUI()
configPathFile()
configRecorder()
recorderManager.decibelsCallBack = { decibels in
self.waveView.displayWave(Double(decibels))
}
recorderManager.recoderResiltCallBack = { result in
self.currentTxt = result
self.recordTextView.attributedText = self.textView_text(self.currentTxt!)
}
NotificationCenter.default.addObserver(self, selector: #selector(backGroundAlive), name: NSNotification.Name(rawValue: "recordAlive"), object: nil)
}
......@@ -94,9 +70,7 @@ class SHRecordViewController: SHBaseViewController{
}
@objc func backGroundAlive(){
if recorder?.isRecording == true && recognitionTask?.isFinishing == true{
configSpeechTask()
}
recorderManager.start()
}
override func goback() {
......@@ -117,9 +91,9 @@ class SHRecordViewController: SHBaseViewController{
recordTextView?.textContainerInset = UIEdgeInsets.init(top: 20, left: 20, bottom: 20, right: 30)
recordTextView?.isEditable = false
waveView = SHRecordWaveView(frame: waveBgView.bounds, type: .line, capacity: soundMeterCount)
waveView.frame = waveBgView.bounds
waveBgView.addSubview(waveView)
self.waveView.displayWave(Double(0))
setNavTitleAndConfirmBtn()
}
......@@ -149,19 +123,10 @@ class SHRecordViewController: SHBaseViewController{
return
}
if recorder?.isRecording == true {
if var s = self.recognitionTaskText.first{
s = s + "\n" + (currentTxt ?? "") + "\n"
self.recognitionTaskText[0] = s
}
}
stopRecord()
save = true
saveContent()
saveSuccessCallBack?(folderModel)
self.recorderManager.stop()
self.save = true
self.saveContent()
self.saveSuccessCallBack?(folderModel)
let export = SHRecordExportAlertView.loadFromNibAndClass(SHRecordExportAlertView.self)!
export.frame = UIApplication.shared.keyWindow!.bounds
......@@ -177,16 +142,14 @@ class SHRecordViewController: SHBaseViewController{
} else if index == 404{
if self.save == true {
self.currentTxt = ""
self.recognitionTaskText = []
self.recorderBtn.setImage(UIImage.init(named: "record_start"), for: .normal)
self.recordTextView.attributedText = self.textView_text("")
self.configPathFile()
self.configRecorder()
self.recorderBtn.isSelected = false
self.secondsLabel.text = "00:00:00"
self.seconds = 0
self.save = false
self.recorder = self.getRecorder()
self.recorderManager.stop()
}
DispatchQueue.main.asyncAfter(deadline: .now()+0.5) {
self.navigationController?.popViewController(animated: true)
......@@ -197,29 +160,27 @@ class SHRecordViewController: SHBaseViewController{
@objc func userCilck(){
if let txt = recognitionTaskText.first, txt.length == 0 {
if currentTxt?.length == 0{
return
}
stopRecord()
save = true
self.recorderManager.stop()
self.save = true
if let model = currentModel {
self.removeCurrentRecored(model)
}
saveContent()
if save == true {
currentTxt = ""
recognitionTaskText = []
recorderBtn.setImage(UIImage.init(named: "record_start"), for: .normal)
recordTextView.attributedText = self.textView_text("")
configPathFile()
configRecorder()
recorderBtn.isSelected = false
secondsLabel.text = "00:00:00"
seconds = 0
save = false
recorder = self.getRecorder()
self.currentTxt = ""
self.recorderBtn.setImage(UIImage.init(named: "record_start"), for: .normal)
self.recordTextView.attributedText = self.textView_text("")
self.configPathFile()
self.recorderBtn.isSelected = false
self.secondsLabel.text = "00:00:00"
self.seconds = 0
self.save = false
self.recorderManager.stop()
}
let mine = UIStoryboard.init(name: "Mine", bundle: nil).instantiateViewController(withIdentifier: "SHMineViewController") as! SHMineViewController
......@@ -241,33 +202,20 @@ class SHRecordViewController: SHBaseViewController{
SHLocationManager.shared.locationStatusService { (address) in
self.currentAddress = address
if self.recognitionTaskText.count == 0 {
self.recordTextView.attributedText = self.textView_text(self.currentTxt ?? "")
}else{
self.recordTextView.attributedText = self.textView_text(self.recognitionTaskText.first!)
}
}
self.view.sendSubviewToBack(maskView)
recorder?.record()
recorder_mp3.start()
start = true
configSpeechTask()
waveTimer = SHTimer.scheduledTimer(timeInterval: updateFequency, target: self, selector: #selector(updateMeters), userInfo: nil, repeats: true)
secondTimer = SHTimer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(countSecond), userInfo: nil, repeats: true)
self.recorderManager.start()
self.recorder_mp3.start()
self.start = true
self.secondTimer = SHTimer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(countSecond), userInfo: nil, repeats: true)
}else{
recorder?.pause()
recorder_mp3.pause()
start = false
recognitionTask?.cancel()
speechStop()
timerInvalidate()
self.recorderManager.manualPause()
self.recorder_mp3.pause()
self.start = false
self.timerInvalidate()
}
}
......@@ -276,16 +224,10 @@ class SHRecordViewController: SHBaseViewController{
recorderBtn.isSelected = false
recorder?.pause()
recorder_mp3.pause()
start = false
recognitionTask?.cancel()
speechStop()
timerInvalidate()
self.recorderManager.manualPause()
self.recorder_mp3.pause()
self.start = false
self.timerInvalidate()
self.showMemberAlert()
}
return SHUserAccountManager.shared.isMember
......@@ -299,33 +241,6 @@ class SHRecordViewController: SHBaseViewController{
}
}
private func getRecorder() -> AVAudioRecorder?{
// setting : 录音的设置项
// 录音参数设置(不需要掌握, 一些固定的配置)
let configDic: [String: AnyObject] = [
// 编码格式
AVFormatIDKey: NSNumber(value: Int32(kAudioFormatLinearPCM)),
// 采样率
AVSampleRateKey: NSNumber(value: 16000),
//采样位数
AVLinearPCMBitDepthKey: NSNumber(value: 16),
// 通道数
AVNumberOfChannelsKey: NSNumber(value: 2),
// 录音质量
AVEncoderAudioQualityKey: NSNumber(value: Int32(AVAudioQuality.min.rawValue))
]
do {
let recorder = try AVAudioRecorder(url: URL(fileURLWithPath: wav_file_path), settings: configDic)
recorder.isMeteringEnabled = true
// 准备录音(系统会给我们分配一些资源)
recorder.prepareToRecord()
return recorder
}catch {
print(error)
return nil
}
}
private func configPathFile(){
let fileManager = FileManager.default
......@@ -368,122 +283,9 @@ class SHRecordViewController: SHBaseViewController{
NSLog("mp3_file_path=======\(String(describing: mp3_file_path)) pcm_file_path=======\(String(describing: pcm_file_path))")
}
private func configRecorder(){
//分贝线初始
// waveView.remake()
//音频容器初始
soundMeters = [Float]()
speechRecognizer?.delegate = self
AVAudioSession.sharedInstance().requestRecordPermission { (allowed) in
if !allowed {
return
}
}
let session:AVAudioSession = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker)
}catch{
print("session config failed")
}
do { try AVAudioSession.sharedInstance().setActive(true) }
catch { print("session active failed") }
}
private func configSpeechTask(){
audioEngine = AVAudioEngine()
// 初始化RecognitionRequest,在后边我们会用它将录音数据转发给苹果服务器
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
// 在用户说话的同时,将识别结果分批次返回
recognitionRequest?.shouldReportPartialResults = true
// 使用recognitionTask方法开始识别。
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest!, resultHandler: { (result, error) in
// 用于检查识别是否结束
var isFinal = false
// 如果 result 不是 nil,
if result != nil {
// 将 textView.text 设置为 result 的最佳音译
self.currentTxt = result?.bestTranscription.formattedString
print("result?.bestTranscription.formattedString ==== \(result!.bestTranscription.formattedString)")
print("result?.transcriptions ==== \(result!.transcriptions)")
print("result?.first.formattedString ==== \(result!.transcriptions.first!.formattedString)")
if self.recognitionTaskText.count == 0 {
self.recordTextView.attributedText = self.textView_text(result?.bestTranscription.formattedString ?? "")
}else{
self.recordTextView.attributedText = self.textView_text(self.recognitionTaskText.first! + "\n" + (result?.bestTranscription.formattedString)!)
}
// 如果 result 是最终,将 isFinal 设置为 true
isFinal = (result?.isFinal)!
}
// 如果没有错误发生,或者 result 已经结束,停止audioEngine 录音,终止 recognitionRequest 和 recognitionTask
if error != nil || isFinal {
if self.recognitionTaskText.count == 0 {
self.recognitionTaskText.append((self.currentTxt ?? "") + "\n")
}else{
var s = ""
if let text = self.currentTxt, text.length > 0 {
s = self.recognitionTaskText.first! + "\n" + text + "\n"
} else{
s = self.recognitionTaskText.first!
}
self.recognitionTaskText[0] = s
}
if self.recorderBtn.isSelected == true {
if self.start == true {
self.speechStop()
self.configSpeechTask()
self.start = false
}else{
// self.start = false
}
}
}
})
// 向recognitionRequest加入一个音频输入
let recordingFormat = audioEngine.inputNode.outputFormat(forBus: 0)
audioEngine.inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, when) in
self.recognitionRequest?.append(buffer)
}
self.audioEngine.prepare()
do {
// 开始录音
try audioEngine.start()
} catch {
print("audioEngine couldn't start because of an error.")
}
}
//结束录音
func stopRecord() {
recorder_mp3.stop()
if let recorder = self.recorder {
if recorder.isRecording {
print("正在录音,马上结束它,文件保存到了:\(wav_file_path)")
}else {
print("没有录音,但是依然结束它")
}
recorder.stop()
// self.recorder = nil
speechStop()
timerInvalidate()
}else {
print("没有初始化")
}
}
func saveContent(){
if let txt = recognitionTaskText.first, txt.length == 0 {
if currentTxt?.length == 0 {
return
}
......@@ -494,7 +296,7 @@ class SHRecordViewController: SHBaseViewController{
model.modifyDate = nowDate
model.address = currentAddress ?? ""
model.rename = currentAddress ?? ""
model.txt = recognitionTaskText.first ?? (self.currentTxt ?? "")
model.txt = currentTxt ?? "";
model.pathFile = mp3_file_path
model.pcmPathFile = pcm_file_path
model.during = seconds
......@@ -510,8 +312,8 @@ class SHRecordViewController: SHBaseViewController{
image_file_paths.append(image_file_path+"\(index).jpeg")
image_indexs.append(index)
}
model.imagesPath = image_file_paths
model.imagesIndex = image_indexs
model.imagesPath = []
model.imagesIndex = []
var contains = false
if folderModel.dataSources.count != 0 {
......@@ -563,19 +365,10 @@ class SHRecordViewController: SHBaseViewController{
}
func timerInvalidate(){
waveTimer?.invalidate()
secondTimer?.invalidate()
}
func speechStop(){
self.audioEngine.stop()
self.audioEngine.inputNode.removeTap(onBus: 0)
self.recognitionRequest = nil
self.recognitionTask = nil
}
deinit {
waveTimer?.invalidate()
secondTimer?.invalidate()
}
}
......@@ -599,30 +392,6 @@ extension SHRecordViewController{
}
}
@objc private func updateMeters() {
self.recorder?.updateMeters()
recordTime += updateFequency
let decibels = (self.recorder?.averagePower(forChannel: 0))!
// decibels = decibels + 160.0
// NSLog("-----------\(decibels)")
addSoundMeter(item: decibels)
}
private func addSoundMeter(item: Float) {
if soundMeters.count < soundMeterCount {
soundMeters.append(item)
} else {
for (index, _) in soundMeters.enumerated() {
if index < soundMeterCount - 1 {
soundMeters[index] = soundMeters[index + 1]
}
}
// 插入新数据
soundMeters[soundMeterCount - 1] = item
NotificationCenter.default.post(name: NSNotification.Name.init("updateMeters"), object: soundMeters)
}
}
static func getAllFilePath(_ dirPath: String) -> [String]? {
var filePaths = [String]()
......@@ -741,14 +510,3 @@ extension SHRecordViewController{
}
}
}
extension SHRecordViewController: SFSpeechRecognizerDelegate {
func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
if available {
}else {
}
}
}
//
// DDSoundWaveView.h
// DDSoundWave
//
// Created by Teaker on 2018/4/6.
// Copyright © 2018年 Liuzhida. All rights reserved.
//
#import <UIKit/UIKit.h>
/**
绘制波浪特效视图
*/
@interface DDSoundWaveView : UIView
- (void)displayWave:(double)value;
@end
//
// DDSoundWaveView.m
// DDSoundWave
//
// Created by Teaker on 2018/4/6.
// Copyright © 2018年 Liuzhida. All rights reserved.
//
#import "DDSoundWaveView.h"
#import "UIView+LayoutMethods.h"
#define kNumberOfWaves 5
@interface DDSoundWaveView ()
{
CGFloat _phase; //相位
CGFloat _phaseShift; //相位偏移
CGFloat _amplitude; //振幅
CGFloat _maxAmplitude; //波峰
CGFloat _idleAmplitude; //波谷
CGFloat _waveHeight; //波高度
CGFloat _waveWidth; //波长度
CGFloat _waveMid; //波中心点
CGFloat _density; //波分布密度
CGFloat _frequency; //波频率
CGFloat _mainWaveWidth; //主波宽度
CGFloat _decorativeWavesWidth;//其他波宽度
}
@property (nonatomic, strong) NSMutableArray<CAShapeLayer *> * waves;
@property (nonatomic, strong) CALayer *waveLayer; //存放波浪layer
@end
@implementation DDSoundWaveView
#pragma mark - overwrite init
- (instancetype)init {
self = [super init];
if (self) {
[self setup];
}
return self;
}
- (void)setup {
self.clipsToBounds = YES;
_phaseShift = -0.25f;
_idleAmplitude = 0.01f;
_density = 1.f;
_frequency = 1.2f;
_mainWaveWidth = 2.0f;
_decorativeWavesWidth = 1.0f;
self.backgroundColor = [UIColor clearColor];
[self.layer addSublayer:self.waveLayer];
}
- (void)layoutSubviews {
[super layoutSubviews];
self.waveLayer.frame = self.bounds;
}
// 刷新layer布局
- (void)displayWave:(double)value{
double lowPassResults = value;
_waveHeight = CGRectGetHeight(self.bounds)/2;
_waveWidth = CGRectGetWidth(self.bounds);
_waveMid = _waveWidth / 2.0f;
_maxAmplitude = _waveHeight/2 - 4.0f;
_phase += _phaseShift;
_amplitude = fmax(lowPassResults, _idleAmplitude);
UIGraphicsBeginImageContext(self.frame.size);
for(int i = 0; i < self.waves.count; i++) {
UIBezierPath *wavelinePath = [UIBezierPath bezierPath];
CGFloat progress = 1.0f - (CGFloat)i / kNumberOfWaves;
CGFloat normedAmplitude = (1.5f * progress - 0.5f) * _amplitude;
for(CGFloat x = 0; x < _waveWidth + _density; x += _density) {
CGFloat scaling = -pow(x / _waveMid - 1, 2) + 1;
CGFloat y = scaling * _maxAmplitude * normedAmplitude * sinf(2 * M_PI *(x / _waveWidth) * _frequency + _phase) + (_waveHeight * 0.8);
// if (y > CGRectGetHeight(self.bounds)*1.5){
// y = CGRectGetHeight(self.bounds)*1.5;
// }
if (x==0) {
[wavelinePath moveToPoint:CGPointMake(x, y)];
}
else {
[wavelinePath addLineToPoint:CGPointMake(x, y)];
}
}
CAShapeLayer *waveline = [self.waves objectAtIndex:i];
waveline.path = [wavelinePath CGPath];
}
UIGraphicsEndImageContext();
}
- (NSMutableArray<CAShapeLayer *> *)waves {
if (!_waves) {
_waves = [NSMutableArray arrayWithCapacity:kNumberOfWaves];
for (int i = 0; i < kNumberOfWaves; ++i) {
CAShapeLayer *waveline = [CAShapeLayer layer];
waveline.lineCap = kCALineCapButt;
waveline.lineJoin = kCALineJoinRound;
waveline.strokeColor = [[UIColor clearColor] CGColor];
waveline.fillColor = [[UIColor clearColor] CGColor];
[waveline setLineWidth:(i==0 ? _mainWaveWidth : _decorativeWavesWidth)];
CGFloat progress = 1.0f - (CGFloat)i / kNumberOfWaves;
CGFloat multiplier = MIN(1.0, (progress / 3.0f * 2.0f) + (1.0f / 3.0f));
UIColor *color = [[UIColor blueColor] colorWithAlphaComponent:(i == 0 ? 1.0 : 1.0 * multiplier * 0.6)];
waveline.strokeColor = color.CGColor;
CAGradientLayer *gradientLayer = [CAGradientLayer layer];
gradientLayer.frame = CGRectMake(0, 0, self.bounds.size.width, 200);
//设置颜色
[gradientLayer setColors:[NSArray arrayWithObjects:(id)[[UIColor cyanColor] CGColor],(id)[[UIColor blueColor] CGColor], nil]];
//每种颜色最亮的位置
[gradientLayer setLocations:@[@0,@1]];
//渐变的方向StartPoint->EndPoint
[gradientLayer setStartPoint:CGPointMake(0, 0.5)];
[gradientLayer setEndPoint:CGPointMake(1, 0.5)];
gradientLayer.mask = waveline;
[self.waveLayer addSublayer:gradientLayer];
[_waves addObject:waveline];
}
}
return _waves;
}
- (CALayer *)waveLayer {
if (!_waveLayer) {
_waveLayer = [CALayer layer];
}
return _waveLayer;
}
@end
//
// UIView+LayoutMethods.h
// TmallClient4iOS-Prime
//
// Created by casa on 14/12/8.
// Copyright (c) 2014年 casa. All rights reserved.
//
#import <UIKit/UIKit.h>
#define SYSTEM_VERSION_EQUAL_TO(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] == NSOrderedSame)
#define SYSTEM_VERSION_GREATER_THAN(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] == NSOrderedDescending)
#define SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] != NSOrderedAscending)
#define SYSTEM_VERSION_LESS_THAN(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] == NSOrderedAscending)
#define SYSTEM_VERSION_LESS_THAN_OR_EQUAL_TO(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] != NSOrderedDescending)
#define SCREEN_WIDTH ([[UIScreen mainScreen]bounds].size.width)
#define SCREEN_HEIGHT ([[UIScreen mainScreen]bounds].size.height)
#define kHPercentage(a) (SCREEN_HEIGHT*((a)/667.00))
#define kWPercentage(a) (SCREEN_WIDTH *((a)/375.00))
#define SCREEN_WITHOUT_STATUS_HEIGHT (SCREEN_HEIGHT - [[UIApplication sharedApplication] statusBarFrame].size.height)
typedef CGFloat UIScreenType;
static UIScreenType UIScreenType_iPhone5 = 320.0f;
static UIScreenType UIScreenType_iPhone6 = 375.0f;
static UIScreenType UIScreenType_iPhone6P = 414.0f;
@interface UIView (LayoutMethods)
// coordinator getters
- (CGFloat)ct_height;
- (CGFloat)ct_width;
- (CGFloat)ct_x;
- (CGFloat)ct_y;
- (CGSize)ct_size;
- (CGPoint)ct_origin;
- (CGFloat)ct_centerX;
- (CGFloat)ct_centerY;
- (CGFloat)ct_left;
- (CGFloat)ct_top;
- (CGFloat)ct_bottom;
- (CGFloat)ct_right;
- (void)setCt_x:(CGFloat)x;
- (void)setCt_left:(CGFloat)left;
- (void)setCt_y:(CGFloat)y;
- (void)setCt_top:(CGFloat)top;
// height
- (void)setCt_height:(CGFloat)height;
- (void)heightEqualToView:(UIView *)view;
// width
- (void)setCt_width:(CGFloat)width;
- (void)widthEqualToView:(UIView *)view;
// size
- (void)setCt_size:(CGSize)size;
- (void)setSize:(CGSize)size screenType:(UIScreenType)screenType;
- (void)sizeEqualToView:(UIView *)view;
// center
- (void)setCt_centerX:(CGFloat)centerX;
- (void)setCt_centerY:(CGFloat)centerY;
- (void)centerXEqualToView:(UIView *)view;
- (void)centerYEqualToView:(UIView *)view;
- (void)centerEqualToView:(UIView *)view;
// top, bottom, left, right -- Version 1.1.0
- (void)fromTheTop:(CGFloat)distance ofView:(UIView *)view;
- (void)fromTheBottom:(CGFloat)distance ofView:(UIView *)view;
- (void)fromTheLeft:(CGFloat)distance ofView:(UIView *)view;
- (void)fromTheRight:(CGFloat)distance ofView:(UIView *)view;
- (void)fromTheRelativeTop:(CGFloat)distance ofView:(UIView *)view screenType:(UIScreenType)screenType;
- (void)fromTheRelativeBottom:(CGFloat)distance ofView:(UIView *)view screenType:(UIScreenType)screenType;
- (void)fromTheRelativeLeft:(CGFloat)distance ofView:(UIView *)view screenType:(UIScreenType)screenType;
- (void)fromTheRelativeRight:(CGFloat)distance ofView:(UIView *)view screenType:(UIScreenType)screenType;
- (void)relativeTopInContainer:(CGFloat)top shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType;
- (void)relativeBottomInContainer:(CGFloat)bottom shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType;
- (void)relativeLeftInContainer:(CGFloat)left shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType;
- (void)relativeRightInContainer:(CGFloat)right shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType;
// top, bottom, left, right -- Old Version
- (void)top:(CGFloat)top FromView:(UIView *)view;
- (void)bottom:(CGFloat)bottom FromView:(UIView *)view;
- (void)left:(CGFloat)left FromView:(UIView *)view;
- (void)right:(CGFloat)right FromView:(UIView *)view;
- (void)topRatio:(CGFloat)top FromView:(UIView *)view screenType:(UIScreenType)screenType;
- (void)bottomRatio:(CGFloat)bottom FromView:(UIView *)view screenType:(UIScreenType)screenType;
- (void)leftRatio:(CGFloat)left FromView:(UIView *)view screenType:(UIScreenType)screenType;
- (void)rightRatio:(CGFloat)right FromView:(UIView *)view screenType:(UIScreenType)screenType;
- (void)topInContainer:(CGFloat)top shouldResize:(BOOL)shouldResize;
- (void)bottomInContainer:(CGFloat)bottom shouldResize:(BOOL)shouldResize;
- (void)leftInContainer:(CGFloat)left shouldResize:(BOOL)shouldResize;
- (void)rightInContainer:(CGFloat)right shouldResize:(BOOL)shouldResize;
- (void)topRatioInContainer:(CGFloat)top shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType;
- (void)bottomRatioInContainer:(CGFloat)bottom shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType;
- (void)leftRatioInContainer:(CGFloat)left shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType;
- (void)rightRatioInContainer:(CGFloat)right shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType;
- (void)topEqualToView:(UIView *)view;
- (void)bottomEqualToView:(UIView *)view;
- (void)leftEqualToView:(UIView *)view;
- (void)rightEqualToView:(UIView *)view;
// imbueset
- (void)fillWidth;
- (void)fillHeight;
- (void)fill;
- (UIView *)topSuperView;
// iPhoneX adapt
- (CGFloat)safeAreaBottomGap;
- (CGFloat)safeAreaTopGap;
- (CGFloat)safeAreaLeftGap;
- (CGFloat)safeAreaRightGap;
@end
//
// UIView+LayoutMethods.m
// TmallClient4iOS-Prime
//
// Created by casa on 14/12/8.
// Copyright (c) 2014年 casa. All rights reserved.
//
#import "UIView+LayoutMethods.h"
#import <objc/runtime.h>
static void *kUIViewLayoutMethodPropertyBottomGap = &kUIViewLayoutMethodPropertyBottomGap;
static void *kUIViewLayoutMethodPropertyTopGap = &kUIViewLayoutMethodPropertyTopGap;
static void *kUIViewLayoutMethodPropertyLeftGap = &kUIViewLayoutMethodPropertyLeftGap;
static void *kUIViewLayoutMethodPropertyRightGap = &kUIViewLayoutMethodPropertyRightGap;
@implementation UIView (LayoutMethods)
// coordinator getters
- (CGFloat)ct_height
{
return self.frame.size.height;
}
- (CGFloat)ct_width
{
return self.frame.size.width;
}
- (CGFloat)ct_x
{
return self.frame.origin.x;
}
- (CGFloat)ct_y
{
return self.frame.origin.y;
}
- (CGSize)ct_size
{
return self.frame.size;
}
- (CGPoint)ct_origin
{
return self.frame.origin;
}
- (CGFloat)ct_centerX
{
return self.center.x;
}
- (CGFloat)ct_centerY
{
return self.center.y;
}
- (CGFloat)ct_left
{
return self.frame.origin.x;
}
- (CGFloat)ct_top
{
return self.frame.origin.y;
}
- (CGFloat)ct_bottom
{
return self.frame.size.height + self.frame.origin.y;
}
- (CGFloat)ct_right
{
return self.frame.size.width + self.frame.origin.x;
}
- (void)setCt_x:(CGFloat)x
{
self.frame = CGRectMake(x, self.frame.origin.y, self.frame.size.width, self.frame.size.height);
}
- (void)setCt_y:(CGFloat)y
{
self.frame = CGRectMake(self.frame.origin.x, y, self.frame.size.width, self.frame.size.height);
}
- (void)setCt_left:(CGFloat)left
{
self.ct_x = left;
}
- (void)setCt_top:(CGFloat)top
{
self.ct_y = top;
}
- (void)setCt_height:(CGFloat)height
{
CGRect newFrame = CGRectMake(self.ct_x, self.ct_y, self.ct_width, height);
self.frame = newFrame;
}
- (void)heightEqualToView:(UIView *)view
{
self.ct_height = view.ct_height;
}
// width
- (void)setCt_width:(CGFloat)width
{
CGRect newFrame = CGRectMake(self.ct_x, self.ct_y, width, self.ct_height);
self.frame = newFrame;
}
- (void)widthEqualToView:(UIView *)view
{
self.ct_width = view.ct_width;
}
// center
- (void)setCt_centerX:(CGFloat)centerX
{
CGPoint center = CGPointMake(self.ct_centerX, self.ct_centerY);
center.x = centerX;
self.center = center;
}
- (void)setCt_centerY:(CGFloat)centerY
{
CGPoint center = CGPointMake(self.ct_centerX, self.ct_centerY);
center.y = centerY;
self.center = center;
}
- (void)centerXEqualToView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewCenterPoint = [superView convertPoint:view.center toView:self.topSuperView];
CGPoint centerPoint = [self.topSuperView convertPoint:viewCenterPoint toView:self.superview];
self.ct_centerX = centerPoint.x;
}
- (void)centerYEqualToView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewCenterPoint = [superView convertPoint:view.center toView:self.topSuperView];
CGPoint centerPoint = [self.topSuperView convertPoint:viewCenterPoint toView:self.superview];
self.ct_centerY = centerPoint.y;
}
- (void)centerEqualToView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewCenterPoint = [superView convertPoint:view.center toView:self.topSuperView];
CGPoint centerPoint = [self.topSuperView convertPoint:viewCenterPoint toView:self.superview];
self.ct_centerX = centerPoint.x;
self.ct_centerY = centerPoint.y;
}
// top, bottom, left, right -- Version 1.1.0
- (void)fromTheTop:(CGFloat)distance ofView:(UIView *)view
{
[self bottom:distance FromView:view];
}
- (void)fromTheBottom:(CGFloat)distance ofView:(UIView *)view
{
[self top:distance FromView:view];
}
- (void)fromTheLeft:(CGFloat)distance ofView:(UIView *)view
{
[self left:distance FromView:view];
}
- (void)fromTheRight:(CGFloat)distance ofView:(UIView *)view
{
[self right:distance FromView:view];
}
- (void)fromTheRelativeTop:(CGFloat)distance ofView:(UIView *)view screenType:(UIScreenType)screenType
{
[self bottomRatio:distance FromView:view screenType:screenType];
}
- (void)fromTheRelativeBottom:(CGFloat)distance ofView:(UIView *)view screenType:(UIScreenType)screenType
{
[self topRatio:distance FromView:view screenType:screenType];
}
- (void)fromTheRelativeLeft:(CGFloat)distance ofView:(UIView *)view screenType:(UIScreenType)screenType
{
[self leftRatio:distance FromView:view screenType:screenType];
}
- (void)fromTheRelativeRight:(CGFloat)distance ofView:(UIView *)view screenType:(UIScreenType)screenType
{
[self rightRatio:distance FromView:view screenType:screenType];
}
- (void)relativeTopInContainer:(CGFloat)top shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType
{
[self topRatioInContainer:top shouldResize:shouldResize screenType:screenType];
}
- (void)relativeBottomInContainer:(CGFloat)bottom shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType
{
[self bottomRatioInContainer:bottom shouldResize:shouldResize screenType:screenType];
}
- (void)relativeLeftInContainer:(CGFloat)left shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType
{
[self leftRatioInContainer:left shouldResize:shouldResize screenType:screenType];
}
- (void)relativeRightInContainer:(CGFloat)right shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType
{
[self rightRatioInContainer:right shouldResize:shouldResize screenType:screenType];
}
// top, bottom, left, right -- Old Version
- (void)top:(CGFloat)top FromView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewOrigin = [superView convertPoint:view.ct_origin toView:self.topSuperView];
CGPoint newOrigin = [self.topSuperView convertPoint:viewOrigin toView:self.superview];
self.ct_y = floorf(newOrigin.y + top + view.ct_height);
}
- (void)bottom:(CGFloat)bottom FromView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewOrigin = [superView convertPoint:view.ct_origin toView:self.topSuperView];
CGPoint newOrigin = [self.topSuperView convertPoint:viewOrigin toView:self.superview];
self.ct_y = newOrigin.y - bottom - self.ct_height;
}
- (void)left:(CGFloat)left FromView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewOrigin = [superView convertPoint:view.ct_origin toView:self.topSuperView];
CGPoint newOrigin = [self.topSuperView convertPoint:viewOrigin toView:self.superview];
self.ct_x = newOrigin.x - left - self.ct_width;
}
- (void)right:(CGFloat)right FromView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewOrigin = [superView convertPoint:view.ct_origin toView:self.topSuperView];
CGPoint newOrigin = [self.topSuperView convertPoint:viewOrigin toView:self.superview];
self.ct_x = newOrigin.x + right + view.ct_width;
}
- (void)topRatio:(CGFloat)top FromView:(UIView *)view screenType:(UIScreenType)screenType
{
CGFloat topRatio = top / screenType;
CGFloat topValue = topRatio * self.superview.ct_width;
[self top:topValue FromView:view];
}
- (void)bottomRatio:(CGFloat)bottom FromView:(UIView *)view screenType:(UIScreenType)screenType
{
CGFloat bottomRatio = bottom / screenType;
CGFloat bottomValue = bottomRatio * self.superview.ct_width;
[self bottom:bottomValue FromView:view];
}
- (void)leftRatio:(CGFloat)left FromView:(UIView *)view screenType:(UIScreenType)screenType
{
CGFloat leftRatio = left / screenType;
CGFloat leftValue = leftRatio * self.superview.ct_width;
[self left:leftValue FromView:view];
}
- (void)rightRatio:(CGFloat)right FromView:(UIView *)view screenType:(UIScreenType)screenType
{
CGFloat rightRatio = right / screenType;
CGFloat rightValue = rightRatio * self.superview.ct_width;
[self right:rightValue FromView:view];
}
- (void)topInContainer:(CGFloat)top shouldResize:(BOOL)shouldResize
{
if (shouldResize) {
self.ct_height = self.ct_y - top + self.ct_height;
}
self.ct_y = top;
}
- (void)bottomInContainer:(CGFloat)bottom shouldResize:(BOOL)shouldResize
{
if (shouldResize) {
self.ct_height = self.superview.ct_height - bottom - self.ct_y - self.safeAreaBottomGap;
} else {
self.ct_y = self.superview.ct_height - self.ct_height - bottom - self.safeAreaBottomGap;
}
}
- (void)leftInContainer:(CGFloat)left shouldResize:(BOOL)shouldResize
{
if (shouldResize) {
self.ct_width = self.ct_x - left + self.ct_width;
}
self.ct_x = left;
}
- (void)rightInContainer:(CGFloat)right shouldResize:(BOOL)shouldResize
{
if (shouldResize) {
self.ct_width = self.superview.ct_width - right - self.ct_x;
} else {
self.ct_x = self.superview.ct_width - self.ct_width - right;
}
}
- (void)topRatioInContainer:(CGFloat)top shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType
{
CGFloat topRatio = top / screenType;
CGFloat topValue = topRatio * self.superview.ct_width;
[self topInContainer:topValue shouldResize:shouldResize];
}
- (void)bottomRatioInContainer:(CGFloat)bottom shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType
{
CGFloat bottomRatio = bottom / screenType;
CGFloat bottomValue = bottomRatio * self.superview.ct_width;
[self bottomInContainer:bottomValue shouldResize:shouldResize];
}
- (void)leftRatioInContainer:(CGFloat)left shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType
{
CGFloat leftRatio = left / screenType;
CGFloat leftValue = leftRatio * self.superview.ct_width;
[self leftInContainer:leftValue shouldResize:shouldResize];
}
- (void)rightRatioInContainer:(CGFloat)right shouldResize:(BOOL)shouldResize screenType:(UIScreenType)screenType
{
CGFloat rightRatio = right / screenType;
CGFloat rightValue = rightRatio * self.superview.ct_width;
[self rightInContainer:rightValue shouldResize:shouldResize];
}
- (void)topEqualToView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewOrigin = [superView convertPoint:view.ct_origin toView:self.topSuperView];
CGPoint newOrigin = [self.topSuperView convertPoint:viewOrigin toView:self.superview];
self.ct_y = newOrigin.y;
}
- (void)bottomEqualToView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewOrigin = [superView convertPoint:view.ct_origin toView:self.topSuperView];
CGPoint newOrigin = [self.topSuperView convertPoint:viewOrigin toView:self.superview];
self.ct_y = newOrigin.y + view.ct_height - self.ct_height;
}
- (void)leftEqualToView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewOrigin = [superView convertPoint:view.ct_origin toView:self.topSuperView];
CGPoint newOrigin = [self.topSuperView convertPoint:viewOrigin toView:self.superview];
self.ct_x = newOrigin.x;
}
- (void)rightEqualToView:(UIView *)view
{
UIView *superView = view.superview ? view.superview : view;
CGPoint viewOrigin = [superView convertPoint:view.ct_origin toView:self.topSuperView];
CGPoint newOrigin = [self.topSuperView convertPoint:viewOrigin toView:self.superview];
self.ct_x = newOrigin.x + view.ct_width - self.ct_width;
}
// size
- (void)setCt_size:(CGSize)size
{
self.frame = CGRectMake(self.ct_x, self.ct_y, size.width, size.height);
}
- (void)setSize:(CGSize)size screenType:(UIScreenType)screenType
{
CGFloat ratio = SCREEN_WIDTH / screenType;
self.frame = CGRectMake(self.ct_x, self.ct_y, size.width * ratio, size.height * ratio);
}
- (void)sizeEqualToView:(UIView *)view
{
self.frame = CGRectMake(self.ct_x, self.ct_y, view.ct_width, view.ct_height);
}
// imbueset
- (void)fillWidth
{
self.ct_width = self.superview.ct_width;
self.ct_x = 0;
}
- (void)fillHeight
{
self.ct_height = self.superview.ct_height;
self.ct_y = 0;
}
- (void)fill
{
self.frame = CGRectMake(0, 0, self.superview.ct_width, self.superview.ct_height);
}
- (UIView *)topSuperView
{
UIView *topSuperView = self.superview;
if (topSuperView == nil) {
topSuperView = self;
} else {
while (topSuperView.superview) {
topSuperView = topSuperView.superview;
}
}
return topSuperView;
}
// iPhoneX adapt
- (CGFloat)safeAreaBottomGap
{
NSNumber *gap = objc_getAssociatedObject(self, kUIViewLayoutMethodPropertyBottomGap);
if (gap == nil) {
if (@available(iOS 11, *)) {
if (self.superview.safeAreaLayoutGuide.layoutFrame.size.height > 0) {
gap = @((self.superview.ct_height - self.superview.safeAreaLayoutGuide.layoutFrame.origin.y - self.superview.safeAreaLayoutGuide.layoutFrame.size.height));
} else {
gap = nil;
}
} else {
gap = @(0);
}
objc_setAssociatedObject(self, kUIViewLayoutMethodPropertyBottomGap, gap, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
return gap.floatValue;
}
- (CGFloat)safeAreaTopGap
{
NSNumber *gap = objc_getAssociatedObject(self, kUIViewLayoutMethodPropertyTopGap);
if (gap == nil) {
if (@available(iOS 11, *)) {
gap = @(self.superview.safeAreaLayoutGuide.layoutFrame.origin.y);
} else {
gap = @(0);
}
objc_setAssociatedObject(self, kUIViewLayoutMethodPropertyTopGap, gap, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
return gap.floatValue;
}
- (CGFloat)safeAreaLeftGap
{
NSNumber *gap = objc_getAssociatedObject(self, kUIViewLayoutMethodPropertyLeftGap);
if (gap == nil) {
if (@available(iOS 11, *)) {
gap = @(self.superview.safeAreaLayoutGuide.layoutFrame.origin.x);
} else {
gap = @(0);
}
objc_setAssociatedObject(self, kUIViewLayoutMethodPropertyLeftGap, gap, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
return gap.floatValue;
}
- (CGFloat)safeAreaRightGap
{
NSNumber *gap = objc_getAssociatedObject(self, kUIViewLayoutMethodPropertyRightGap);
if (gap == nil) {
if (@available(iOS 11, *)) {
gap = @(self.superview.safeAreaLayoutGuide.layoutFrame.origin.x);
} else {
gap = @(0);
}
objc_setAssociatedObject(self, kUIViewLayoutMethodPropertyRightGap, gap, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
return gap.floatValue;
}
@end
//
// SHRecordWaveView.swift
// ShorthandMaster
//
// Created by 明津李 on 2020/8/10.
// Copyright © 2020 明津李. All rights reserved.
//
import UIKit
enum SHWaveType: Int {
case bar = 0
case line
}
class SHRecordWaveView: UIView {
//MARK: Private Properties
/// 声音表数组
private var soundMeters: [Float]!
private var type: SHWaveType = .line
private var capacity: NSInteger = 0
//MARK: Init
convenience init(frame: CGRect, type: SHWaveType, capacity:NSInteger) {
self.init(frame: frame)
self.type = type
self.capacity = capacity
}
override init(frame: CGRect) {
super.init(frame: frame)
backgroundColor = UIColor.white
contentMode = .redraw //内容模式为重绘,因为需要多次重复绘制音量表
NotificationCenter.default.addObserver(self, selector: #selector(updateView(notice:)), name: NSNotification.Name.init("updateMeters"), object: nil)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
public func remake(){
soundMeters = []
for _ in 0...capacity-1 {
soundMeters.append(-46.0)
}
setNeedsDisplay()
}
override func draw(_ rect: CGRect) {
if soundMeters != nil && soundMeters.count > 0 {
let context = UIGraphicsGetCurrentContext()
context?.setLineCap(.round)
context?.setLineJoin(.round)
context?.setStrokeColor(UIColor.red.cgColor)
let noVoice = -46.0 // 该值代表低于-46.0的声音都认为无声音
let maxVolume = 55.0 // 该值代表最高声音为55.0
let positionX = Float(self.frame.size.width)/Float(soundMeters.count-1)
switch type {
case .bar:
context?.setLineWidth(3)
for (index,item) in soundMeters.enumerated() {
let barHeight = maxVolume - (Double(item) - noVoice) //通过当前声音表计算应该显示的声音表高度
context?.move(to: CGPoint(x: index * 6 + 3, y: 40))
context?.addLine(to: CGPoint(x: index * 6 + 3, y: Int(barHeight)))
}
case .line:
context?.setLineWidth(1.5)
for (index, item) in soundMeters.enumerated() {
let position = maxVolume - (Double(item) - noVoice)
//计算对应线段高度
context?.addLine(to: CGPoint(x: Double(Float(index) * positionX), y: position))
context?.move(to: CGPoint(x: Double(Float(index) * positionX), y: position))
}
}
context?.strokePath()
}
}
@objc private func updateView(notice: Notification) {
soundMeters = notice.object as? [Float]
setNeedsDisplay()
}
}
......@@ -10,7 +10,13 @@ import UIKit
import AVFoundation
import Speech
class SHAVAudioManager: NSObject, SFSpeechRecognizerDelegate {
enum SHRecordState {
case start
case pause
case stop
}
class SHAVAudioManager: NSObject {
@objc static let shared = SHAVAudioManager()
......@@ -38,8 +44,6 @@ class SHAVAudioManager: NSObject, SFSpeechRecognizerDelegate {
private var recognitionTask: SFSpeechRecognitionTask?
// 语音引擎,负责提供录音输入
private var audioEngine = AVAudioEngine()
// 文本数据
private var recognitionTaskText: [String] = []
lazy var monitor: AVAudioRecorder? = self.getRecorder(monitor_file_path)
......@@ -75,6 +79,8 @@ class SHAVAudioManager: NSObject, SFSpeechRecognizerDelegate {
var bestRestlt: String = ""
var startTime: CFTimeInterval = CACurrentMediaTime()
var recoderResiltCallBack:((String)->Void)?
var decibelsCallBack:((Float)->Void)?
var state = SHRecordState.start
private func configRecorder(){
......@@ -119,12 +125,29 @@ class SHAVAudioManager: NSObject, SFSpeechRecognizerDelegate {
// 如果没有错误发生,或者 result 已经结束,停止audioEngine 录音,终止 recognitionRequest 和 recognitionTask
if error != nil || isFinal {
if self.state == SHRecordState.pause {
if self.bestRestlt.length > 0 {
self.resultTxts.append(self.bestRestlt)
self.bestRestlt = ""
// print("self.resultTxts ==== \(self.resultTxts)")
}
if self.resultTxts.count > 0 && !self.resultTxts.last!.isEqualTo("\n") {
self.resultTxts.append("\n")
}
self.resultRecoderTxt(self.resultTxts)
}else if self.state == SHRecordState.stop{
if self.bestRestlt.length > 0 {
self.resultTxts.append(self.bestRestlt)
self.bestRestlt = ""
}
self.resultRecoderTxt(self.resultTxts)
self.resultTxts = []
}else{
if self.bestRestlt.length > 0 {
self.resultTxts.append(self.bestRestlt)
self.bestRestlt = ""
}
self.resultRecoderTxt(self.resultTxts)
self.configSpeechTask()
}
}
})
......@@ -149,64 +172,117 @@ class SHAVAudioManager: NSObject, SFSpeechRecognizerDelegate {
self.monitor?.updateMeters()
// 获得0声道的音量,完全没有声音-160.0,0是最大音量
let decibels = (self.monitor?.peakPower(forChannel: 0))!
let lowPassResults = pow(10, (0.05 * (self.monitor?.peakPower(forChannel: 0))!));
decibelsCallBack?(lowPassResults)
// print("decibels == \(decibels)")
if decibels > -44 {
if recognitionTask?.isCancelled == true {
if recognitionTask?.isCancelled == true && self.state == SHRecordState.start{
start()
}
}else{
let endTime: CFTimeInterval = CACurrentMediaTime()
if endTime - startTime > 3 {
if self.state == SHRecordState.start{
pause()
go_on()
}
}
}
}
func start(){
// if recognitionTask?.state{
//
// }
if audioEngine.isRunning {
return
}
self.state = SHRecordState.start
self.configRecorder()
self.configSpeechTask()
self.monitor?.record()
startTime = CACurrentMediaTime()
if recorderTimer == nil {
recorderTimer = SHTimer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updateMeters), userInfo: nil, repeats: true)
}
}
func go_on(){
monitor?.record()
self.audioEngine.prepare()
do {
// 开始录音
try audioEngine.start()
} catch {
print("audioEngine couldn't start because of an error.")
}
startTime = CACurrentMediaTime()
if recorderTimer == nil {
recorderTimer = SHTimer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updateMeters), userInfo: nil, repeats: true)
}
}
private func pause(){
monitor?.stop()
monitor?.deleteRecording()
recorderTimer?.invalidate()
recorderTimer = nil
recognitionTask?.cancel()
audioEngine.stop()
recognitionRequest = nil
audioEngine.inputNode.removeTap(onBus: 0)
}
func manualPause(){
recorderTimer?.invalidate()
recognitionTask?.cancel()
state = SHRecordState.pause
pause()
if !audioEngine.isRunning && self.resultTxts.count > 0 && !self.resultTxts.last!.isEqualTo("\n") {
self.resultTxts.append("\n")
}
}
func stop(){
pause()
state = SHRecordState.stop
monitor?.stop()
monitor?.deleteRecording()
recorderTimer?.invalidate()
recorderTimer = nil
recognitionTask = nil
audioEngine.stop()
recognitionRequest = nil
audioEngine.inputNode.removeTap(onBus: 0)
}
func resultRecoderTxt(_ results:[String], _ processStr:String = ""){
let recorderResult = results.reduce("", {
if $0.length == 0{
var recorderResult = results.reduce("", {
if $0.length == 0 || $0.hasSuffix("\n"){
return $0 + $1
}else{
return $0 + " " + $1
}}) + " " + processStr
}}
)
if recorderResult.hasSuffix("\n") {
recorderResult = recorderResult + processStr
}else{
recorderResult = recorderResult + " " + processStr
}
print("recorderResult ===== \(recorderResult)")
recoderResiltCallBack?(recorderResult)
}
deinit {
recorderTimer?.invalidate()
recorderTimer = nil
}
}
extension SHAVAudioManager: SFSpeechRecognizerDelegate {
func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
if available {
}else {
}
}
}
......@@ -16,3 +16,6 @@
#import "ZYPinYinSearch.h"
#import "UIAlertController+HCAdd.h"
#import "DDSoundWaveView.h"
#import "MBProgressHUD+MJ.h"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment