Newer
Older
urbanLifeline_YanAn / src / views / 全局文件公共说明.vue
@zhangqy zhangqy on 3 Oct 11 KB first commit
<template>
  <div id="as">
    <div>融云</div>
    <el-divider />
    <div>语音通讯</div>
    <div class="Separatebox">
      <div class="titlename">4.示例参考样式【通过webSocket方法传输音频给服务器】</div>
      <div class="record-page">
        <img
          v-if="!data.showModal"
          src="@/assets/images/voice/jqr.png"
          class="img"
          @click="STARTVOICE()"
        />
        <img
          v-else
          src="@/assets/images/voice/iflytek.gif"
          class="img"
          @click="STARTVOICE()"
        />
        <div class="duihuak" :class="{ show: data.showModal }">
          <div class="tip">提示:点击左侧小机器人开始说话!</div>
          <div class="msg">
            <div class="item"><span>机器人:</span>{{ nowword }}</div>
          </div>
          <div class="close" @click="closedia">
            <el-icon :size="20"><Close /></el-icon>
          </div>
        </div>
      </div>
    </div>
  </div>

  <el-dialog title="泵站弹框" v-model="pumpdia" width="600px" append-to-body>
    <div class="bzbox">模拟收到指令后打开的泵站弹框内容.....</div>
  </el-dialog>
</template>

<script setup name="as">
import { ref, reactive, toRefs, onMounted } from "vue";
import lamejs from "lamejs";
import { videoupload } from "@/api/voice/recordpage.js";
import VoiceFileEscape from "@/views/voice/VoiceFileEscape/index.vue"; //音频上传

const { proxy } = getCurrentInstance();
import Recorder from "js-audio-recorder";
import useUserStore from "@/store/modules/user";
import { nextTick } from "vue";
const userStore = useUserStore();
const lockReconnect = ref(null);
const timeoutnum = ref(null);

var recorder = new Recorder({
  sampleBits: 16, // 采样位数,支持 8 或 16,默认是16
  sampleRate: 48000, // 采样率,支持 11025、16000、22050、24000、44100、48000,根据浏览器默认值,我的chrome是48000
  numChannels: 1, // 声道,支持 1 或 2, 默认是1
  // compiling: false,(0.x版本中生效,1.x增加中)  // 是否边录边转换,默认是false
});
// 功能3
const uploadbusinessSourceCode = ref("text"); //上传用的业务类型code 用来返回后指令匹配

const shibieword = ref("");
const nowword = ref("你好,请点击【开始录制】,进行语音录制!"); //当前指令进度位置
const process = ref("1"); //录音开始和结束的状态
const inputword = ref(""); //文字播报输入文字
const wordaudioFilePath = ref(null); //文字播报后收取到的音频文件路径
const wordbusinessSourceCode = ref("testyyhc"); //文字播报用的业务类型 用来返回后的语音匹配 yyhc要加上

const pumpdia = ref(false); //模拟的泵站弹框
const data = reactive({
  showModal: false, //展示对话框
  recordStatus: null, //录音进程的当前位置
  miao: 0,
  recognizeWs: null,
});
onMounted(() => {
  // 绑定事件-打印的是当前录音数据
  initRecognizeWs();
});
onBeforeUnmount(() => {
  data.recognizeWs && data.recognizeWs.close();
});
//初始化语音调度websocket服务
function initRecognizeWs() {
  if (data.recognizeWs) {
    data.recognizeWs.onclose();
  }
  let wsuri;
  if (window.location.protocol.includes("https")) {
    //线上环境
    wsuri = `wss://${window.location.host}/websocket/voicesWebocket`;
  } else {
    //本地环境
    // wsuri = `ws://192.168.20.145:13002/voiceWebsocket`;
    wsuri = `wss://server2.wh-nf.cn:8088/websocket/voiceWebsocket`;

    // wsuri = `wss://jingkai.wh-nf.cn:8986/voicesWebocket`;
  }
  data.recognizeWs = new WebSocket(wsuri);

  //连接建立
  data.recognizeWs.onopen = function (e) {
    console.log("连接成功", e);
  };

  //连接建立失败
  data.recognizeWs.onerror = function (evt) {
    console.log("连接失败", evt);
    reconnect();
  };

  data.recognizeWs.onmessage = function (e) {
    if (e.data != "客户端连接成功") {
      let data = JSON.parse(e.data);
      let params = data.data;
      console.log("Websocket接收值", data);
      console.log("接收的data内部的data", params);

      // // 表明进入了文字播报的转义功能 不走指令等功能
      // if (wordbusinessSourceCode.value == data.type) {
      //   // 将返回的type数据与文字播报的业务code进行精准匹配 如果匹配上了 才说明是发送的这条数据
      //   if (params.audioFilePath) {
      //     wordaudioFilePath.value = pathToUrl(params.audioFilePath);
      //   }
      // }
      if (params.recognitionState == 1) {
        // 将返回的type数据与语音指令的业务code进行精准匹配 如果匹配上了 才说明是发送的这条数据
        shibieword.value = params.recognitionResult;
        if (params.recognitionActionCode == "error") {
          nowword.value = `指令未识别,请您再说一遍`;
        } else {
          nowword.value = `成功识别语音,返回的指令为:${params.recognitionResult}`;

          if (params.recognitionActionCode == "open") {
            // 打开的操作
            if (params.recognitionDataSourceCode == "pump" && params.recognitionDataId) {
              // 例如是泵站的操作 具体业务书写..... recognitionDataId 对象的唯一id 泵站:泵站id  站点:站点id
              pumpdia.value = true;
            } else if ("其它业务") {
            }
          } else if (params.recognitionActionCode == "close") {
            // 关闭的操作
          } else if (params.recognitionActionCode == "detail") {
            // 查看的操作
          } else {
            nowword.value = `指令未识别,请您再说一遍`;
          }
        }
      }
    }
  };
  //关闭连接
  data.recognizeWs.onclose = function (e) {
    console.log("断开连接");
  };
  //重新连接
  function reconnect() {
    if (lockReconnect.value) {
      return;
    }

    lockReconnect.value = true;
    //没连接上会一直重连,设置延迟避免请求过多
    timeoutnum.value && clearTimeout(timeoutnum.value);
    timeoutnum.value = setTimeout(() => {
      lockReconnect.value = false;
    }, 5000);
  }
}
// 结束录音并自动上传
function stopRecorderAndupload(val) {
  process.value = 1;
  nowword.value = "录音结束";
  data.recordStatus = val;
  recorder.stop();
  uploadaudioformwebSocket();
}
// 机器人所用方法
function STARTVOICE() {
  console.log("process.value", process.value);
  if (process.value == 1) {
    startRecorder("begin");
    process.value = 2;
  } else {
    stopRecorderAndupload("stop");
  }
}
/**
 *  录音的具体操作功能
 * */
// 开始录音
function startRecorder(val) {
  data.showModal = true;
  nowword.value = "开始录音,正在录音...";
  data.recordStatus = val;
  // 获取麦克风权限
  Recorder.getPermission().then(
    () => {
      proxy.$modal.msgSuccess("获取权限成功,开始录音");
      recorder.start();
    },
    (error) => {
      proxy.$modal.msgError("请先允许该网页使用麦克风");
      // console.log(`${error.name} : ${error.message}`);
    }
  );
}
// 将获取到的音频文件上传到服务器[通过webSocket方式]
function uploadaudioformwebSocket() {
  const mp3Blob = convertToMp3(recorder.getWAV());
  // recorder.download(mp3Blob, 'recorder', 'mp3');
  mp3ToBase64(mp3Blob).then((stream) => {
    // console.log('语音打印', stream)
    // 下面发送数据
    let parms = {
      createBy: userStore.userInfo.userName,
      voiceType: "mp3",
      data: stream,
      businessSourceCode: "dpyysb",
    };
    data.recognizeWs.send(JSON.stringify(parms));
  });
}
//
function convertToMp3(wavDataView) {
  // 获取wav头信息
  const wav = lamejs.WavHeader.readHeader(wavDataView); // 此处其实可以不用去读wav头信息,毕竟有对应的config配置
  const { channels, sampleRate } = wav;
  const mp3enc = new lamejs.Mp3Encoder(channels, sampleRate, 128);
  // 获取左右通道数据
  const result = recorder.getChannelData();
  const buffer = [];
  const leftData =
    result.left && new Int16Array(result.left.buffer, 0, result.left.byteLength / 2);
  const rightData =
    result.right && new Int16Array(result.right.buffer, 0, result.right.byteLength / 2);
  const remaining = leftData.length + (rightData ? rightData.length : 0);
  const maxSamples = 1152;
  for (let i = 0; i < remaining; i += maxSamples) {
    const left = leftData.subarray(i, i + maxSamples);
    let right = null;
    let mp3buf = null;
    if (channels === 2) {
      right = rightData.subarray(i, i + maxSamples);
      mp3buf = mp3enc.encodeBuffer(left, right);
    } else {
      mp3buf = mp3enc.encodeBuffer(left);
    }
    if (mp3buf.length > 0) {
      buffer.push(mp3buf);
    }
  }
  const enc = mp3enc.flush();
  if (enc.length > 0) {
    buffer.push(enc);
  }
  return new Blob(buffer, { type: "audio/mp3" });
}
function mp3ToBase64(blob) {
  return new Promise((resolve, reject) => {
    const fileReader = new FileReader();
    fileReader.onload = (e) => {
      resolve(e.target.result);
    };
    fileReader.readAsDataURL(blob);
    fileReader.onerror = () => {
      reject(new Error("blobToBase64 error"));
    };
  });
}
function closedia() {
  data.showModal = false;
  nowword.value = "你好,请点击【开始录制】,进行语音录制!";
  stopRecorder();
}
// 结束录音
function stopRecorder(val) {
  process.value = 1;
  nowword.value = "录音结束";
  data.recordStatus = val;
  recorder.stop();
}
</script>

<style lang="scss" scoped>
$text-color: #fff;
$form-item-margin-bottom: 20px;
$border-color-base: fade($text-color, 50%);
$primary-color: #1890ff;
$font-size-base: 15px;
$error-color: #f5222d;
$highlight-color: #f5222d;
$form-item-margin-bottom: 18px;
$popover-bg: #1d1f4a;
$select-item-selected-bg: $primary-color;

$theme-color-1: #9ec3de;
$theme-color-2: #36e2f7;
$theme-color-3: #072e7a;

$zindex-modal: 1009;
$zindex-modal-mask: 1009;

$tooltip-max-width: 300px;
$tooltip-bg: #1d1f4a;
#as {
  width: 100%;
  height: 100%;
}
.Separatebox {
  margin: 20px;
  padding: 10px;
  border: 1px solid #bec0e0;
  border-radius: 10px;
}

.titlename {
  position: relative;
  margin: 10px 0;
  font-size: 16px;
  font-weight: bold;
}

.wordtitle {
  color: green;
  font-size: 20px;
}

.record-page {
  position: relative;
  z-index: 1000;
  top: 120px;
  left: 140px;
  width: 40px;
  height: 250px;
  animation: hideLeftMenu 0.75s ease-in-out;
  animation-fill-mode: forwards;
  &.show {
    animation-fill-mode: forwards;
    animation: showLeftMenu 0.75s ease-in-out;
  }
  .img {
    width: 60px;
    cursor: pointer;
  }
  .duihuak {
    position: absolute;
    width: 382px;
    height: 166px;
    background: url("@/assets/images/voice/duihuak.png") no-repeat;
    top: -100px;
    left: 40px;
    padding: 50px 0 20px 0;
    font-size: 14px;
    transform: scale(0);
    transform-origin: left bottom;
    transition: all 0.25s ease-in-out;
    &.show {
      transform: scale(1);
    }
    .close {
      position: absolute;
      top: 0;
      right: 0;
      cursor: pointer;
    }
    .tip {
      position: absolute;
      top: 25px;
      left: 75px;
      font-size: 12px;
      color: #ccc;
    }
    .msg {
      position: absolute;
      top: 50px;
      left: 75px;
      font-size: 14px;
      color: #ffffff;
      span {
        color: $primary-color;
        font-weight: 600;
      }
      .message {
        margin-top: 5px;
        padding-right: 10px;
        padding-left: 20px;
        font-size: 15px;
        font-weight: 600;
        overflow: hidden;
        // text-align: right;
        text-overflow: ellipsis;
        cursor: pointer;
        display: -webkit-box;
        -webkit-box-orient: vertical;
        color: $primary-color;
        span {
          color: #ccc;
        }
      }
    }
  }
}

.bzbox {
  height: 400px;
}
</style>