Fangjun Kuang
Committed by GitHub

Fix C# to support Chinese tts models using jieba (#815)

@@ -24,6 +24,7 @@ cd ../offline-decode-files @@ -24,6 +24,7 @@ cd ../offline-decode-files
24 cd ../offline-tts 24 cd ../offline-tts
25 ./run-aishell3.sh 25 ./run-aishell3.sh
26 ./run-piper.sh 26 ./run-piper.sh
  27 +./run-hf-fanchen.sh
27 ls -lh 28 ls -lh
28 29
29 cd ../.. 30 cd ../..
@@ -47,6 +47,6 @@ jobs: @@ -47,6 +47,6 @@ jobs:
47 env: 47 env:
48 API_KEY: ${{ secrets.NUGET_API_KEY }} 48 API_KEY: ${{ secrets.NUGET_API_KEY }}
49 run: | 49 run: |
50 - # API_KEY is valid until 2024.05.02 50 + # API_KEY is valid until 2025.04.26
51 cd /tmp/packages 51 cd /tmp/packages
52 dotnet nuget push ./org.k2fsa.sherpa.onnx.*.nupkg --skip-duplicate --api-key $API_KEY --source https://api.nuget.org/v3/index.json 52 dotnet nuget push ./org.k2fsa.sherpa.onnx.*.nupkg --skip-duplicate --api-key $API_KEY --source https://api.nuget.org/v3/index.json
@@ -28,6 +28,9 @@ class OfflineTtsPlayDemo @@ -28,6 +28,9 @@ class OfflineTtsPlayDemo
28 [Option("tts-rule-fsts", Required = false, Default = "", HelpText = "path to rule.fst")] 28 [Option("tts-rule-fsts", Required = false, Default = "", HelpText = "path to rule.fst")]
29 public string RuleFsts { get; set; } 29 public string RuleFsts { get; set; }
30 30
  31 + [Option("vits-dict-dir", Required = false, Default = "", HelpText = "Path to the directory containing dict for jieba.")]
  32 + public string DictDir { get; set; }
  33 +
31 [Option("vits-data-dir", Required = false, Default = "", HelpText = "Path to the directory containing dict for espeak-ng.")] 34 [Option("vits-data-dir", Required = false, Default = "", HelpText = "Path to the directory containing dict for espeak-ng.")]
32 public string DataDir { get; set; } 35 public string DataDir { get; set; }
33 36
@@ -129,6 +132,7 @@ to download more models. @@ -129,6 +132,7 @@ to download more models.
129 config.Model.Vits.Lexicon = options.Lexicon; 132 config.Model.Vits.Lexicon = options.Lexicon;
130 config.Model.Vits.Tokens = options.Tokens; 133 config.Model.Vits.Tokens = options.Tokens;
131 config.Model.Vits.DataDir = options.DataDir; 134 config.Model.Vits.DataDir = options.DataDir;
  135 + config.Model.Vits.DictDir = options.DictDir;
132 config.Model.Vits.NoiseScale = options.NoiseScale; 136 config.Model.Vits.NoiseScale = options.NoiseScale;
133 config.Model.Vits.NoiseScaleW = options.NoiseScaleW; 137 config.Model.Vits.NoiseScaleW = options.NoiseScaleW;
134 config.Model.Vits.LengthScale = options.LengthScale; 138 config.Model.Vits.LengthScale = options.LengthScale;
  1 +#!/usr/bin/env bash
  2 +set -ex
  3 +if [ ! -f ./vits-zh-hf-fanchen-C/vits-zh-hf-fanchen-C.onnx ]; then
  4 + curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-zh-hf-fanchen-C.tar.bz2
  5 + tar xf vits-zh-hf-fanchen-C.tar.bz2
  6 + rm vits-zh-hf-fanchen-C.tar.bz2
  7 +fi
  8 +
  9 +dotnet run \
  10 + --vits-model=./vits-zh-hf-fanchen-C/vits-zh-hf-fanchen-C.onnx \
  11 + --vits-tokens=./vits-zh-hf-fanchen-C/tokens.txt \
  12 + --vits-lexicon=./vits-zh-hf-fanchen-C/lexicon.txt \
  13 + --tts-rule-fsts=./vits-zh-hf-fanchen-C/phone.fst,./vits-zh-hf-fanchen-C/date.fst,./vits-zh-hf-fanchen-C/number.fst \
  14 + --vits-dict-dir=./vits-zh-hf-fanchen-C/dict \
  15 + --sid=100 \
  16 + --debug=1 \
  17 + --output-filename=./fanchen-100.wav \
  18 + --text="这是一个语音合成测试, 写于公元2024年4月26号, 11点05分,星期5。小米的使命是,始终坚持做'感动人心、价格厚道'的好产品,让全球每个人都能享受科技带来的美好生活。"
@@ -2,7 +2,6 @@ @@ -2,7 +2,6 @@
2 2
3 set -ex 3 set -ex
4 if [ ! -f ./vits-piper-en_US-amy-low/en_US-amy-low.onnx ]; then 4 if [ ! -f ./vits-piper-en_US-amy-low/en_US-amy-low.onnx ]; then
5 - # wget -qq https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-en_US-amy-low.tar.bz2  
6 curl -OL https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-en_US-amy-low.tar.bz2 5 curl -OL https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-en_US-amy-low.tar.bz2
7 tar xf vits-piper-en_US-amy-low.tar.bz2 6 tar xf vits-piper-en_US-amy-low.tar.bz2
8 rm vits-piper-en_US-amy-low.tar.bz2 7 rm vits-piper-en_US-amy-low.tar.bz2
@@ -23,6 +23,9 @@ class OfflineTtsDemo @@ -23,6 +23,9 @@ class OfflineTtsDemo
23 [Option("tts-rule-fars", Required = false, Default = "", HelpText = "path to rule.far")] 23 [Option("tts-rule-fars", Required = false, Default = "", HelpText = "path to rule.far")]
24 public string RuleFars { get; set; } 24 public string RuleFars { get; set; }
25 25
  26 + [Option("vits-dict-dir", Required = false, Default = "", HelpText = "Path to the directory containing dict for jieba.")]
  27 + public string DictDir { get; set; }
  28 +
26 [Option("vits-data-dir", Required = false, Default = "", HelpText = "Path to the directory containing dict for espeak-ng.")] 29 [Option("vits-data-dir", Required = false, Default = "", HelpText = "Path to the directory containing dict for espeak-ng.")]
27 public string DataDir { get; set; } 30 public string DataDir { get; set; }
28 31
@@ -124,6 +127,7 @@ to download more models. @@ -124,6 +127,7 @@ to download more models.
124 config.Model.Vits.Lexicon = options.Lexicon; 127 config.Model.Vits.Lexicon = options.Lexicon;
125 config.Model.Vits.Tokens = options.Tokens; 128 config.Model.Vits.Tokens = options.Tokens;
126 config.Model.Vits.DataDir = options.DataDir; 129 config.Model.Vits.DataDir = options.DataDir;
  130 + config.Model.Vits.DictDir = options.DictDir;
127 config.Model.Vits.NoiseScale = options.NoiseScale; 131 config.Model.Vits.NoiseScale = options.NoiseScale;
128 config.Model.Vits.NoiseScaleW = options.NoiseScaleW; 132 config.Model.Vits.NoiseScaleW = options.NoiseScaleW;
129 config.Model.Vits.LengthScale = options.LengthScale; 133 config.Model.Vits.LengthScale = options.LengthScale;
  1 +#!/usr/bin/env bash
  2 +set -ex
  3 +if [ ! -f ./vits-zh-hf-fanchen-C/vits-zh-hf-fanchen-C.onnx ]; then
  4 + curl -SL -O https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-zh-hf-fanchen-C.tar.bz2
  5 + tar xf vits-zh-hf-fanchen-C.tar.bz2
  6 + rm vits-zh-hf-fanchen-C.tar.bz2
  7 +fi
  8 +
  9 +dotnet run \
  10 + --vits-model=./vits-zh-hf-fanchen-C/vits-zh-hf-fanchen-C.onnx \
  11 + --vits-tokens=./vits-zh-hf-fanchen-C/tokens.txt \
  12 + --vits-lexicon=./vits-zh-hf-fanchen-C/lexicon.txt \
  13 + --tts-rule-fsts=./vits-zh-hf-fanchen-C/phone.fst,./vits-zh-hf-fanchen-C/date.fst,./vits-zh-hf-fanchen-C/number.fst \
  14 + --vits-dict-dir=./vits-zh-hf-fanchen-C/dict \
  15 + --sid=100 \
  16 + --debug=1 \
  17 + --output-filename=./fanchen-100.wav \
  18 + --text="这是一个语音合成测试, 写于公元2024年4月26号, 11点05分,星期5。小米的使命是,始终坚持做'感动人心、价格厚道'的好产品,让全球每个人都能享受科技带来的美好生活。"
@@ -2,6 +2,7 @@ @@ -2,6 +2,7 @@
2 # Copyright (c) 2023 Xiaomi Corporation 2 # Copyright (c) 2023 Xiaomi Corporation
3 3
4 import argparse 4 import argparse
  5 +import os
5 import re 6 import re
6 from pathlib import Path 7 from pathlib import Path
7 8
@@ -9,6 +10,8 @@ import jinja2 @@ -9,6 +10,8 @@ import jinja2
9 10
10 SHERPA_ONNX_DIR = Path(__file__).resolve().parent.parent.parent 11 SHERPA_ONNX_DIR = Path(__file__).resolve().parent.parent.parent
11 12
  13 +src_dir = os.environ.get("src_dir", "/tmp")
  14 +
12 15
13 def get_version(): 16 def get_version():
14 cmake_file = SHERPA_ONNX_DIR / "CMakeLists.txt" 17 cmake_file = SHERPA_ONNX_DIR / "CMakeLists.txt"
@@ -45,7 +48,7 @@ def process_linux(s): @@ -45,7 +48,7 @@ def process_linux(s):
45 "libsherpa-onnx-kaldifst-core.so", 48 "libsherpa-onnx-kaldifst-core.so",
46 "libucd.so", 49 "libucd.so",
47 ] 50 ]
48 - prefix = "/tmp/linux/" 51 + prefix = f"{src_dir}/linux/"
49 libs = [prefix + lib for lib in libs] 52 libs = [prefix + lib for lib in libs]
50 libs = "\n ;".join(libs) 53 libs = "\n ;".join(libs)
51 54
@@ -74,7 +77,7 @@ def process_macos(s): @@ -74,7 +77,7 @@ def process_macos(s):
74 "libsherpa-onnx-kaldifst-core.dylib", 77 "libsherpa-onnx-kaldifst-core.dylib",
75 "libucd.dylib", 78 "libucd.dylib",
76 ] 79 ]
77 - prefix = f"/tmp/macos/" 80 + prefix = f"{src_dir}/macos/"
78 libs = [prefix + lib for lib in libs] 81 libs = [prefix + lib for lib in libs]
79 libs = "\n ;".join(libs) 82 libs = "\n ;".join(libs)
80 83
@@ -106,7 +109,7 @@ def process_windows(s, rid): @@ -106,7 +109,7 @@ def process_windows(s, rid):
106 109
107 version = get_version() 110 version = get_version()
108 111
109 - prefix = f"/tmp/windows-{rid}/" 112 + prefix = f"{src_dir}/windows-{rid}/"
110 libs = [prefix + lib for lib in libs] 113 libs = [prefix + lib for lib in libs]
111 libs = "\n ;".join(libs) 114 libs = "\n ;".join(libs)
112 115
@@ -10,23 +10,36 @@ echo "SHERPA_ONNX_DIR: $SHERPA_ONNX_DIR" @@ -10,23 +10,36 @@ echo "SHERPA_ONNX_DIR: $SHERPA_ONNX_DIR"
10 10
11 SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" $SHERPA_ONNX_DIR/CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2) 11 SHERPA_ONNX_VERSION=$(grep "SHERPA_ONNX_VERSION" $SHERPA_ONNX_DIR/CMakeLists.txt | cut -d " " -f 2 | cut -d '"' -f 2)
12 12
13 -# HF_MIRROR=hf-mirror.com  
14 -HF_MIRROR=hf.co 13 +# You can pre-download the required wheels to $src_dir
  14 +
  15 +if [ $(hostname) == fangjuns-MacBook-Pro.local ]; then
  16 + HF_MIRROR=hf-mirror.com
  17 + src_dir=/Users/fangjun/open-source/sherpa-onnx/scripts/dotnet/tmp
  18 +else
  19 + src_dir=/tmp
  20 + HF_MIRROR=hf.co
  21 +fi
  22 +export src_dir
15 23
16 -mkdir -p /tmp/  
17 -pushd /tmp 24 +mkdir -p $src_dir
  25 +pushd $src_dir
18 26
19 mkdir -p linux macos windows-x64 windows-x86 27 mkdir -p linux macos windows-x64 windows-x86
20 28
21 -# You can pre-download the required wheels to /tmp  
22 -src_dir=/tmp  
23 29
24 -linux_wheel=$src_dir/sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl  
25 -macos_wheel=$src_dir/sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-macosx_11_0_x86_64.whl  
26 -windows_x64_wheel=$src_dir/sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-win_amd64.whl  
27 -windows_x86_wheel=$src_dir/sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-win32.whl 30 +linux_wheel_filename=sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
  31 +linux_wheel=$src_dir/$linux_wheel_filename
  32 +
  33 +macos_wheel_filename=sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-macosx_11_0_x86_64.whl
  34 +macos_wheel=$src_dir/$macos_wheel_filename
  35 +
  36 +windows_x64_wheel_filename=sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-win_amd64.whl
  37 +windows_x64_wheel=$src_dir/$windows_x64_wheel_filename
  38 +
  39 +windows_x86_wheel_filename=sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-win32.whl
  40 +windows_x86_wheel=$src_dir/$windows_x86_wheel_filename
28 41
29 -if [ ! -f /tmp/linux/libsherpa-onnx-core.so ]; then 42 +if [ ! -f $src_dir/linux/libsherpa-onnx-core.so ]; then
30 echo "---linux x86_64---" 43 echo "---linux x86_64---"
31 cd linux 44 cd linux
32 mkdir -p wheel 45 mkdir -p wheel
@@ -34,9 +47,9 @@ if [ ! -f /tmp/linux/libsherpa-onnx-core.so ]; then @@ -34,9 +47,9 @@ if [ ! -f /tmp/linux/libsherpa-onnx-core.so ]; then
34 if [ -f $linux_wheel ]; then 47 if [ -f $linux_wheel ]; then
35 cp -v $linux_wheel . 48 cp -v $linux_wheel .
36 else 49 else
37 - curl -OL https://$HF_MIRROR/csukuangfj/sherpa-onnx-wheels/resolve/main/sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl 50 + curl -OL https://$HF_MIRROR/csukuangfj/sherpa-onnx-wheels/resolve/main/$linux_wheel_filename
38 fi 51 fi
39 - unzip sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl 52 + unzip $linux_wheel_filename
40 cp -v sherpa_onnx/lib/*.so* ../ 53 cp -v sherpa_onnx/lib/*.so* ../
41 cd .. 54 cd ..
42 rm -v libpiper_phonemize.so libpiper_phonemize.so.1.2.0 55 rm -v libpiper_phonemize.so libpiper_phonemize.so.1.2.0
@@ -49,7 +62,7 @@ if [ ! -f /tmp/linux/libsherpa-onnx-core.so ]; then @@ -49,7 +62,7 @@ if [ ! -f /tmp/linux/libsherpa-onnx-core.so ]; then
49 cd .. 62 cd ..
50 fi 63 fi
51 64
52 -if [ ! -f /tmp/macos/libsherpa-onnx-core.dylib ]; then 65 +if [ ! -f $src_dir/macos/libsherpa-onnx-core.dylib ]; then
53 echo "---macOS x86_64---" 66 echo "---macOS x86_64---"
54 cd macos 67 cd macos
55 mkdir -p wheel 68 mkdir -p wheel
@@ -57,9 +70,9 @@ if [ ! -f /tmp/macos/libsherpa-onnx-core.dylib ]; then @@ -57,9 +70,9 @@ if [ ! -f /tmp/macos/libsherpa-onnx-core.dylib ]; then
57 if [ -f $macos_wheel ]; then 70 if [ -f $macos_wheel ]; then
58 cp -v $macos_wheel . 71 cp -v $macos_wheel .
59 else 72 else
60 - curl -OL https://$HF_MIRROR/csukuangfj/sherpa-onnx-wheels/resolve/main/sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-macosx_11_0_x86_64.whl 73 + curl -OL https://$HF_MIRROR/csukuangfj/sherpa-onnx-wheels/resolve/main/$macos_wheel_filename
61 fi 74 fi
62 - unzip sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-macosx_11_0_x86_64.whl 75 + unzip $macos_wheel_filename
63 cp -v sherpa_onnx/lib/*.dylib ../ 76 cp -v sherpa_onnx/lib/*.dylib ../
64 77
65 cd .. 78 cd ..
@@ -75,7 +88,7 @@ if [ ! -f /tmp/macos/libsherpa-onnx-core.dylib ]; then @@ -75,7 +88,7 @@ if [ ! -f /tmp/macos/libsherpa-onnx-core.dylib ]; then
75 fi 88 fi
76 89
77 90
78 -if [ ! -f /tmp/windows-x64/sherpa-onnx-core.dll ]; then 91 +if [ ! -f $src_dir/windows-x64/sherpa-onnx-core.dll ]; then
79 echo "---windows x64---" 92 echo "---windows x64---"
80 cd windows-x64 93 cd windows-x64
81 mkdir -p wheel 94 mkdir -p wheel
@@ -83,9 +96,9 @@ if [ ! -f /tmp/windows-x64/sherpa-onnx-core.dll ]; then @@ -83,9 +96,9 @@ if [ ! -f /tmp/windows-x64/sherpa-onnx-core.dll ]; then
83 if [ -f $windows_x64_wheel ]; then 96 if [ -f $windows_x64_wheel ]; then
84 cp -v $windows_x64_wheel . 97 cp -v $windows_x64_wheel .
85 else 98 else
86 - curl -OL https://$HF_MIRROR/csukuangfj/sherpa-onnx-wheels/resolve/main/sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-win_amd64.whl 99 + curl -OL https://$HF_MIRROR/csukuangfj/sherpa-onnx-wheels/resolve/main/$windows_x64_wheel_filename
87 fi 100 fi
88 - unzip sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-win_amd64.whl 101 + unzip $windows_x64_wheel_filename
89 cp -v sherpa_onnx-${SHERPA_ONNX_VERSION}.data/data/bin/*.dll ../ 102 cp -v sherpa_onnx-${SHERPA_ONNX_VERSION}.data/data/bin/*.dll ../
90 cp -v sherpa_onnx-${SHERPA_ONNX_VERSION}.data/data/bin/*.lib ../ 103 cp -v sherpa_onnx-${SHERPA_ONNX_VERSION}.data/data/bin/*.lib ../
91 cd .. 104 cd ..
@@ -95,7 +108,7 @@ if [ ! -f /tmp/windows-x64/sherpa-onnx-core.dll ]; then @@ -95,7 +108,7 @@ if [ ! -f /tmp/windows-x64/sherpa-onnx-core.dll ]; then
95 cd .. 108 cd ..
96 fi 109 fi
97 110
98 -if [ ! -f /tmp/windows-x86/sherpa-onnx-core.dll ]; then 111 +if [ ! -f $src_dir/windows-x86/sherpa-onnx-core.dll ]; then
99 echo "---windows x86---" 112 echo "---windows x86---"
100 cd windows-x86 113 cd windows-x86
101 mkdir -p wheel 114 mkdir -p wheel
@@ -103,9 +116,9 @@ if [ ! -f /tmp/windows-x86/sherpa-onnx-core.dll ]; then @@ -103,9 +116,9 @@ if [ ! -f /tmp/windows-x86/sherpa-onnx-core.dll ]; then
103 if [ -f $windows_x86_wheel ]; then 116 if [ -f $windows_x86_wheel ]; then
104 cp -v $windows_x86_wheel . 117 cp -v $windows_x86_wheel .
105 else 118 else
106 - curl -OL https://$HF_MIRROR/csukuangfj/sherpa-onnx-wheels/resolve/main/sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-win32.whl 119 + curl -OL https://$HF_MIRROR/csukuangfj/sherpa-onnx-wheels/resolve/main/$windows_x86_wheel_filename
107 fi 120 fi
108 - unzip sherpa_onnx-${SHERPA_ONNX_VERSION}-cp38-cp38-win32.whl 121 + unzip $windows_x86_wheel_filename
109 cp -v sherpa_onnx-${SHERPA_ONNX_VERSION}.data/data/bin/*.dll ../ 122 cp -v sherpa_onnx-${SHERPA_ONNX_VERSION}.data/data/bin/*.dll ../
110 cp -v sherpa_onnx-${SHERPA_ONNX_VERSION}.data/data/bin/*.lib ../ 123 cp -v sherpa_onnx-${SHERPA_ONNX_VERSION}.data/data/bin/*.lib ../
111 cd .. 124 cd ..
@@ -38,29 +38,29 @@ static void Handler(int32_t sig) { @@ -38,29 +38,29 @@ static void Handler(int32_t sig) {
38 fprintf(stderr, "\nCaught Ctrl + C. Exiting...\n"); 38 fprintf(stderr, "\nCaught Ctrl + C. Exiting...\n");
39 } 39 }
40 40
41 -static std::string tolowerUnicode(const std::string& input_str) {  
42 -  
43 - // Use system locale  
44 - std::setlocale(LC_ALL, "");  
45 -  
46 - // From char string to wchar string  
47 - std::wstring input_wstr(input_str.size()+1, '\0');  
48 - std::mbstowcs(&input_wstr[0], input_str.c_str(), input_str.size());  
49 - std::wstring lowercase_wstr;  
50 -  
51 - for (wchar_t wc : input_wstr) {  
52 - if (std::iswupper(wc)) {  
53 - lowercase_wstr += std::towlower(wc);  
54 - } else {  
55 - lowercase_wstr += wc;  
56 - } 41 +static std::string tolowerUnicode(const std::string &input_str) {
  42 + // Use system locale
  43 + std::setlocale(LC_ALL, "");
  44 +
  45 + // From char string to wchar string
  46 + std::wstring input_wstr(input_str.size() + 1, '\0');
  47 + std::mbstowcs(&input_wstr[0], input_str.c_str(), input_str.size());
  48 + std::wstring lowercase_wstr;
  49 +
  50 + for (wchar_t wc : input_wstr) {
  51 + if (std::iswupper(wc)) {
  52 + lowercase_wstr += std::towlower(wc);
  53 + } else {
  54 + lowercase_wstr += wc;
57 } 55 }
58 -  
59 - // Back to char string  
60 - std::string lowercase_str(input_str.size()+1, '\0');  
61 - std:wcstombs(&lowercase_str[0], lowercase_wstr.c_str(), lowercase_wstr.size()); 56 + }
  57 +
  58 + // Back to char string
  59 + std::string lowercase_str(input_str.size() + 1, '\0');
  60 + std::wcstombs(&lowercase_str[0], lowercase_wstr.c_str(),
  61 + lowercase_wstr.size());
62 62
63 - return lowercase_str; 63 + return lowercase_str;
64 } 64 }
65 65
66 int32_t main(int32_t argc, char *argv[]) { 66 int32_t main(int32_t argc, char *argv[]) {