File size: 3,652 Bytes
c021d8e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "import pickle\n",
    "import numpy as np\n",
    "import torch\n",
    "import torch.nn.functional as F\n",
    "from collections import OrderedDict\n",
    "from onmt_modules.misc import sequence_mask\n",
    "from model_autopst import Generator_2 as Predictor\n",
    "from hparams_autopst import hparams\n",
    "\n",
    "device = 'cuda:0'\n",
    "\n",
    "P = Predictor(hparams).eval().to(device)\n",
    "\n",
    "checkpoint = torch.load('./assets/580000-P.ckpt', map_location=lambda storage, loc: storage)  \n",
    "P.load_state_dict(checkpoint['model'], strict=True)\n",
    "print('Loaded predictor .....................................................')\n",
    "\n",
    "dict_test = pickle.load(open('./assets/test_vctk.meta', 'rb'))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "spect_vc = OrderedDict()\n",
    "\n",
    "uttrs = [('p231', 'p270', '001'),\n",
    "         ('p270', 'p231', '001'),\n",
    "         ('p231', 'p245', '003001'),\n",
    "         ('p245', 'p231', '003001'),\n",
    "         ('p239', 'p270', '024002'),\n",
    "         ('p270', 'p239', '024002')]\n",
    "\n",
    "\n",
    "for uttr in uttrs:\n",
    "        \n",
    "    cep_real, spk_emb = dict_test[uttr[0]][uttr[2]]\n",
    "    cep_real_A = torch.from_numpy(cep_real).unsqueeze(0).to(device)\n",
    "    len_real_A = torch.tensor(cep_real_A.size(1)).unsqueeze(0).to(device)\n",
    "    real_mask_A = sequence_mask(len_real_A, cep_real_A.size(1)).float()\n",
    "    \n",
    "    _, spk_emb = dict_test[uttr[1]][uttr[2]]\n",
    "    spk_emb_B = torch.from_numpy(spk_emb).unsqueeze(0).to(device)\n",
    "    \n",
    "    with torch.no_grad():\n",
    "        spect_output, len_spect = P.infer_onmt(cep_real_A.transpose(2,1)[:,:14,:],\n",
    "                                               real_mask_A,\n",
    "                                               len_real_A,\n",
    "                                               spk_emb_B)\n",
    "    \n",
    "    uttr_tgt = spect_output[:len_spect[0],0,:].cpu().numpy()\n",
    "        \n",
    "    spect_vc[f'{uttr[0]}_{uttr[1]}_{uttr[2]}'] = uttr_tgt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# spectrogram to waveform\n",
    "# Feel free to use other vocoders\n",
    "# This cell requires some preparation to work, please see the corresponding part in AutoVC\n",
    "import torch\n",
    "import librosa\n",
    "import pickle\n",
    "import os\n",
    "from synthesis import build_model\n",
    "from synthesis import wavegen\n",
    "\n",
    "model = build_model().to(device)\n",
    "checkpoint = torch.load(\"./assets/checkpoint_step001000000_ema.pth\")\n",
    "model.load_state_dict(checkpoint[\"state_dict\"])\n",
    "\n",
    "for name, sp in spect_vc.items():\n",
    "    print(name)\n",
    "    waveform = wavegen(model, c=sp)   \n",
    "    librosa.output.write_wav('./assets/'+name+'.wav', waveform, sr=16000)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}