index.js 12.7 KB
const dasha = require("@dasha.ai/sdk");
const AudioResources = require("./customTts.js");
const fs = require("fs");
const sys = require('child_process');
const shared = require("./shared");

const local_num = "4029";  
const events_owner = "105";  
const sip_config = "huntersales";  
const preset_template_id = 105;  
const logs_path = "./logs/";  
const file_mime_type = "text/plain";

function nextInterval(iterationNum) {
  var x = iterationNum;
  // Максимальный период ожидания 10 минут
  var msMax = 600000;
  // Прогрессивный период ожидания
  var msWait = x * x * 1000;
  // Возвращаем мниммльное из двух значений
  return Math.min(msMax, msWait);
}

async function main() {

  const app_suffix = shared.pid_name();
  const app_work_dir = shared.make_unique_app(app_suffix);
  const app = await dasha.deploy(app_work_dir, {
    // Указываем название рабочей группы
    groupName: "Default",
    // Указываем количество попыток соединения с
    // серверами Dasha.AI при потере соединения
    // до остановки приложения из-за потери связи
    maxReconnectCount: 100,
    // Указываем функцию вычисления ожидания до
    // следующей попытки соединения с серверами
    reconnectInterval: x => nextInterval(x)
  });
  if (app_work_dir !== './app') shared.drop_unique_app(app_suffix);

  const audio = new AudioResources();
  audio.addFolder("audio");

  app.ttsDispatcher = (conv) => "custom";
  app.customTtsProvider = async (text, voice) => {
    console.log(`Tts asking for phrase with text ${text} and voice ${JSON.stringify(voice)}`);
    const fname = audio.GetPath(text, voice);

    console.log(`Found in file ${fname}`);
    return dasha.audio.fromFile(fname);
  };

  app.setExternal("numbers_from_text", async (args) => {
    return await shared.cct_numbers_from_text(args.text);
  });

  app.setExternal("dates_from_text", async (args) => {
    return await shared.cct_dates_from_text(args.text);
  });

  app.setExternal("part_of_the_day", (args, conv) => {
    return shared.get_part_of_the_day();
  });

  app.setExternal("array_size", (args, conv) => {
      const arr = args.arr;
      return arr.length;
  });

  app.setExternal("string_trim", (args, conv) => {
      const str = args.one_line;
      return str.trim();
  });

  app.setExternal("is_empty", (args, conv) => {
    return shared.empty(args.check);
  });

  app.setExternal("performed_stage", (args, conv) => {
    const arr = args.stages;
    const val = args.stage;
    arr.push(val);
    return arr;
  });

  app.setExternal("sleep_ms", async (args, conv) => {
    return new Promise((resolve) => {
      setTimeout(resolve, args.duration);
    });
  });

  app.setExternal("json_encode", async (args, conv) => {
    return JSON.stringify(args.object, undefined, 2);
  });

  app.setExternal("time_stamp", async (args, conv) => {
    return Date.now();
  });

  app.setExternal("math_floor", async (args, conv) => {
    return Math.floor(args.value, args.presision);
  });

  app.setExternal("math_round", async (args, conv) => {
    return Math.round(args.value, args.presision);
  });

  app.setExternal("math_ceil", async (args, conv) => {
    return Math.ceil(args.value, args.presision);
  });

  app.setExternal("phone_human", async (args, conv) => {
    return shared.human_format(args.phone);
  });

  app.setExternal("last_four", async (args, conv) => {
    return shared.last_four_digits(args.phone);
  });

  app.setExternal("check_mobile_code", async (args, conv) => {
    return shared.is_mobile_code(args.phone);
  });

  app.setExternal("hours_now", async (args, conv) => {
    let date_time_now = new Date(ts);
    return date_time_now.getHours();
  });

  app.setExternal("countWords", (args) => {
    return args.message.split(' ').length;
  });

  await app.start();

  let abonent_phone = process.argv[2] ?? "";
  let caller_phone = process.argv[3] ?? abonent_phone;
  const conv = app.createConversation({ phone: abonent_phone,
                                        caller: caller_phone });

  const audioChannel = conv.input.phone !== "chat";
  if (audioChannel) {
    conv.sip.config = sip_config;
    // conv.audio.tts = 'custom';
    conv.on("transcription", console.log);
  } else {
    await dasha.chat.createConsoleChat(conv);
  }
  
  // Устанавливаем минимальный уровень фонового шума
  conv.audio.noiseVolume = 0.1;

  let debugItem = 0;

  // current timestamp in milliseconds
  let ts = Date.now();

  // Get now date info
  let date_ob = new Date(ts);
  let date_day = date_ob.getDate();
  let month = date_ob.getMonth() + 1;
  let year = date_ob.getFullYear();
  let hours = date_ob.getHours();
  let minutes = date_ob.getMinutes();
  let seconds = date_ob.getSeconds();

  let unique = ts / 1000;

  if (month    < 10) month    = '0' + month;
  if (date_day < 10) date_day = '0' + date_day;
  if (hours    < 10) hours    = '0' + hours;
  if (minutes  < 10) minutes  = '0' + minutes;
  if (seconds  < 10) seconds  = '0' + seconds;

  // Make date and time string for log & debug file
  let solid_date = year + '' + month + '' + date_day;
  let solid_time = hours + '' + minutes + '' + seconds;
  let time_mark = year + '-' + month + '-' + date_day
        + ' ' + hours + ':' + minutes + ':' + seconds;

  let toUser = conv.input.phone;
  // Check PBX phone prefix and cut it
  let check_prefix = toUser.substring(0, 4);
  if (local_num == check_prefix) toUser = toUser.substring(4);

  // Make log & debug files names
  let txt_file_name = logs_path + 'out-' + toUser + '-'
                      + local_num + '-' + solid_date + '-'
                     + solid_time + '-' + unique + '.txt';
  let log_file_name = logs_path + 'out-' + toUser + '-'
                      + local_num + '-' + solid_date + '-'
                     + solid_time + '-' + unique + '.log';
  let debug_file_name = logs_path + 'out-' + toUser + '-'
                        + local_num + '-' + solid_date + '-'
                     + solid_time + '-' + unique + '.debug';

  // Open text, log and debug files for write
  const txtFile = await fs.promises.open(txt_file_name, "w");
  const logFile = await fs.promises.open(log_file_name, "w");
  const debugFile = await fs.promises.open(debug_file_name, "w");

  caller_phone = shared.human_format(conv.input.caller);
  let txtData = "\t" + caller_phone + "\t-=#=-\t" + time_mark + "\n\n";
  await txtFile.appendFile(txtData);

  debugString = 'Initialize call to: ' + toUser + "\n";
  await debugFile.appendFile(debugString);

  if (audioChannel) console.log(debugString);

  logString = 'Conversation started to: ' + toUser
            + "\n\nConversation replics:\n";
  await logFile.appendFile(logString);
  
  conv.on("transcription", async (entry) => {
    rts = entry.startTime.getTime();
    dts = rts - ts;
    dts_m = Math.floor(dts / 60000);
    dts_s = ((dts - (dts_m * 60000)) / 1000).toFixed(1);
    if (dts_m < 10) dts_m = '0' + dts_m;
    if (dts_s < 10) dts_s = '0' + dts_s;

    hours   = entry.startTime.getHours();
    minutes = entry.startTime.getMinutes();
    seconds = entry.startTime.getSeconds();
    if (hours < 10)    hours    = '0' + hours;
    if (minutes < 10)  minutes  = '0' + minutes;
    if (seconds < 10)  seconds  = '0' + seconds;

    speaker = entry.speaker;
    if (speaker === "ai") speaker = "   ai";
    replic = speaker + ': [' + hours + ':' + minutes + ':' + seconds + ' '
                        + dts_m + ':' + dts_s + '] - ' + entry.text + "\n";

    txtData = txtData + replic;
    await txtFile.appendFile(replic);
    await logFile.appendFile(replic);

    debugItem ++;
    let debugString = '"Debug Info #' + debugItem + ' dialogue": '
             + JSON.stringify(entry, undefined, 2) + "\n";
    await debugFile.appendFile(debugString);
  });

  conv.on("debugLog", async (event) => {
    if (event?.msg?.msgId === "RecognizedSpeechMessage") {
      const logEntry = event?.msg?.results[0]?.facts;
      await logFile.appendFile(JSON.stringify(logEntry, undefined, 2) + "\n");
      debugItem ++;
      let debugString = '"Debug Info #' + debugItem + ' entry": '
               + JSON.stringify(logEntry, undefined, 2) + "\n";
      await debugFile.appendFile(debugString);
    }
    debugItem ++;
    let debugString = '"Debug Info #' + debugItem + ' event": '
             + JSON.stringify(event, undefined, 2) + "\n";
    await debugFile.appendFile(debugString);
  });

  const result = await conv.execute();

  let close_time = new Date();
  rts = close_time.getTime();
  dts = rts - ts;
  dts_m = Math.floor(dts / 60000);
  dts_s = ((dts - (dts_m * 60000)) / 1000).toFixed(1);
  if (dts_m < 10) dts_m = '0' + dts_m;
  if (dts_s < 10) dts_s = '0' + dts_s;

  hours   = close_time.getHours();
  minutes = close_time.getMinutes();
  seconds = close_time.getSeconds();
  if (hours   < 10)  hours    = '0' + hours;
  if (minutes < 10)  minutes  = '0' + minutes;
  if (seconds < 10)  seconds  = '0' + seconds;

  const conversations_statuses = {
    normal:  "разговор завершён",
    broken:  "абонент повесил трубку",
    forward: "разговор переведён на другой номер"
  };
  try {
    conversation_status_id = result.output.conversation_status;
    conversation_status = conversations_statuses[conversation_status_id];
  } catch(err) {
    conversation_status_id = "unknown";
    conversation_status = "Не определённое в сценанрии завершение разговора";
  }
  speaker = "  sys";
  replic = speaker + ': [' + hours + ':' + minutes + ':' + seconds + ' '
         + dts_m + ':' + dts_s + "] - / " + conversation_status + " /\n";

  txtData = txtData + replic;
  await txtFile.appendFile(replic);
  await logFile.appendFile(replic);

  logString = "\nConversation results:" +
          JSON.stringify(result.output, undefined, 2) + "\n";
  await logFile.appendFile(logString);

  console.log(result.output);

  let audio_url = "";
  if (shared.empty(result.recordingUrl))
    audio_url = "\nАудио запись разговора не доступна\n";
  else
    audio_url = "\nЗапись разговора доступна по ссылке:\n"
                               + result.recordingUrl  + "\n";
  // txtData = txtData + audio_url;
  await txtFile.appendFile(audio_url);
  await logFile.appendFile(audio_url);

  const conv_info = shared.get_conversation_info(app, conv);

  // Close text, log and debug files
  await txtFile.close();
  await logFile.close();
  await debugFile.close();

  // Store conversation text and data
  let conv_start = result.output.conversation_start;
  let conv_begin = result.output.conversation_begin;
  let conv_stop = result.output.conversation_stop;
  if (shared.empty(conv_start)) conv_start = 0;
  if (shared.empty(conv_begin)) conv_begin = 0;
  if (shared.empty(conv_stop)) conv_stop = 0;
  if (conv_start == 0) conv_start = ts;
  if (conv_begin == 0) conv_begin = conv_start;
  if (conv_stop == 0) conv_stop = Date.now();

  let output_data = JSON.stringify(result.output, undefined, 2);

  let template_id = result.output.template_id??preset_template_id;
  let action = "store_conversations";
  let request = {
    owner: events_owner,
    datetime_event: Math.ceil(conv_start/1000),
 // src: caller_phone,
 // dst: local_num,
    dst: caller_phone,
    src: local_num,
    duration: Math.ceil((conv_stop - conv_start)/1000),

    direction: "output",
    billsec: Math.ceil((conv_stop - conv_begin)/1000),
    app_name: conv_info.app_name,
    app_name_full: conv_info.app_name_full,
    app_id: conv_info.app_id,
    job_id: conv_info.job_id,

    report_data: output_data,
    conversation: txtData,
    template_id: template_id
  };
  console.log(txtData);
  let response = await shared.ai_api_hook(action, request);
  console.log(response);

  if (conversation_status_id == "forward") {
    var conversations_id = response.conversations_id ?? 0;
    action = "force_conversations_mail";
    request = {
      owner: events_owner,
      time: Math.ceil(conv_start/1000),
      id: conversations_id,
      swap: 1
    };
    console.log(request);
    response = await shared.ai_api_hook(action, request);
    console.log(response);
  }

  // Send text file to Google Disk
  if (audioChannel)
  {
    shared.renew_last_log_file(txt_file_name);
    shared.send_to_google_disk(txt_file_name, file_mime_type);
  }
  // Send log file to Google Disk
  if (audioChannel)
    shared.send_to_google_disk(log_file_name, file_mime_type);
  // Send debug file to Google Disk
  if (audioChannel)
    shared.send_to_google_disk(debug_file_name, file_mime_type);

  await app.stop();
  app.dispose();
}

main().catch((err) => {console.error(err)});