{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "%load_ext autoreload\n", "%autoreload 2\n", "import os\n", "\n", "os.environ['TORCH_LOGS'] = '+dynamic'\n", "import pylab as pl" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/utils/weight_norm.py:143: FutureWarning: `torch.nn.utils.weight_norm` is deprecated in favor of `torch.nn.utils.parametrizations.weight_norm`.\n", " WeightNorm.apply(module, name, dim)\n", "/rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:123: UserWarning: dropout option adds dropout after all but last recurrent layer, so non-zero dropout expects num_layers greater than 1, but got dropout=0.2 and num_layers=1\n", " warnings.warn(\n" ] }, { "ename": "TypeError", "evalue": "CustomAlbert.forward() got an unexpected keyword argument 'attention_mask'", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[3], line 10\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mkokoro\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m generate\n\u001b[1;32m 9\u001b[0m text \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mHow could I know? It\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124ms an unanswerable question. Like asking an unborn child if they\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mll lead a good life. They haven\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mt even been born.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m---> 10\u001b[0m audio, out_ps \u001b[38;5;241m=\u001b[39m \u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtext\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvoicepack\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 12\u001b[0m \u001b[38;5;66;03m# 4️⃣ Display the 24khz audio and print the output phonemes\u001b[39;00m\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mIPython\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mdisplay\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m display, Audio\n", "File \u001b[0;32m~/Projects/DeepLearning/TTS/Kokoro-82M/kokoro.py:147\u001b[0m, in \u001b[0;36mgenerate\u001b[0;34m(model, text, voicepack, lang, speed)\u001b[0m\n\u001b[1;32m 145\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mTruncated to 510 tokens\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m 146\u001b[0m ref_s \u001b[38;5;241m=\u001b[39m voicepack[\u001b[38;5;28mlen\u001b[39m(tokens)]\n\u001b[0;32m--> 147\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtokens\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mref_s\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mspeed\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 148\u001b[0m ps \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(\u001b[38;5;28mnext\u001b[39m(k \u001b[38;5;28;01mfor\u001b[39;00m k, v \u001b[38;5;129;01min\u001b[39;00m VOCAB\u001b[38;5;241m.\u001b[39mitems() \u001b[38;5;28;01mif\u001b[39;00m i \u001b[38;5;241m==\u001b[39m v) \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m tokens)\n\u001b[1;32m 149\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out, ps\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/utils/_contextlib.py:116\u001b[0m, in \u001b[0;36mcontext_decorator..decorate_context\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 113\u001b[0m \u001b[38;5;129m@functools\u001b[39m\u001b[38;5;241m.\u001b[39mwraps(func)\n\u001b[1;32m 114\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mdecorate_context\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 115\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m ctx_factory():\n\u001b[0;32m--> 116\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/Projects/DeepLearning/TTS/Kokoro-82M/kokoro.py:119\u001b[0m, in \u001b[0;36mforward\u001b[0;34m(model, tokens, ref_s, speed)\u001b[0m\n\u001b[1;32m 117\u001b[0m input_lengths \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mLongTensor([tokens\u001b[38;5;241m.\u001b[39mshape[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m]])\u001b[38;5;241m.\u001b[39mto(device)\n\u001b[1;32m 118\u001b[0m text_mask \u001b[38;5;241m=\u001b[39m length_to_mask(input_lengths)\u001b[38;5;241m.\u001b[39mto(device)\n\u001b[0;32m--> 119\u001b[0m bert_dur \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbert\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtokens\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mattention_mask\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m~\u001b[39;49m\u001b[43mtext_mask\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mint\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 120\u001b[0m d_en \u001b[38;5;241m=\u001b[39m model\u001b[38;5;241m.\u001b[39mbert_encoder(bert_dur)\u001b[38;5;241m.\u001b[39mtranspose(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m2\u001b[39m)\n\u001b[1;32m 121\u001b[0m s \u001b[38;5;241m=\u001b[39m ref_s[:, \u001b[38;5;241m128\u001b[39m:]\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/module.py:1736\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1734\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1735\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1736\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/module.py:1747\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1742\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1743\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1744\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1747\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1749\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1750\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", "\u001b[0;31mTypeError\u001b[0m: CustomAlbert.forward() got an unexpected keyword argument 'attention_mask'" ] } ], "source": [ "from models import build_model\n", "import torch\n", "device = \"cpu\" #'cuda' if torch.cuda.is_available() else 'cpu'\n", "model = build_model('kokoro-v0_19.pth', device)\n", "voicepack = torch.load('voices/af.pt', weights_only=True).to(device)\n", "\n", "# 3️⃣ Call generate, which returns a 24khz audio waveform and a string of output phonemes\n", "from kokoro import generate\n", "text = \"How could I know? It's an unanswerable question. Like asking an unborn child if they'll lead a good life. They haven't even been born.\"\n", "audio, out_ps = generate(model, text, voicepack)\n", "\n", "# 4️⃣ Display the 24khz audio and print the output phonemes\n", "from IPython.display import display, Audio\n", "display(Audio(data=audio, rate=24000, autoplay=True))\n", "print(out_ps)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "torch.Size([1, 640, 348])\n" ] }, { "data": { "text/html": [ "\n", " \n", " " ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ "hˌaʊ kʊd aɪ nˈoʊ? ɪts ɐn ʌnˈænsɚɹəbəl kwˈɛstʃən. lˈaɪk ˈæskɪŋ ɐn ʌnbˈɔːɹn tʃˈaɪld ɪf ðeɪl lˈiːd ɐ ɡˈʊd lˈaɪf. ðeɪ hˈævənt ˈiːvən bˌɪn bˈɔːɹn.\n" ] } ], "source": [ "from kokoro import phonemize, tokenize, length_to_mask\n", "import torch.nn.functional as F\n", "model = model\n", "speed = 1.\n", "\n", "ps = phonemize(text, \"a\")\n", "tokens = tokenize(ps)\n", "\n", "tokens = torch.LongTensor([[0, *tokens, 0]]).to(device)\n", "\n", "# tokens = torch.nn.functional.pad(tokens, (0, 510 - tokens.shape[-1]))\n", "input_lengths = torch.LongTensor([tokens.shape[-1]]).to(device)\n", "\n", "text_mask = length_to_mask(input_lengths).to(device)\n", "bert_dur = model.bert(tokens, attention_mask=(~text_mask).int())\n", "\n", "\n", "d_en = model.bert_encoder(bert_dur).transpose(-1, -2)\n", "\n", "ref_s =voicepack[tokens.shape[1]]\n", "s = ref_s[:, 128:]\n", "\n", "d = model.predictor.text_encoder.inference(d_en, s)\n", "x, _ = model.predictor.lstm(d)\n", "\n", "duration = model.predictor.duration_proj(x)\n", "duration = torch.sigmoid(duration).sum(axis=-1) / speed\n", "pred_dur = torch.round(duration).clamp(min=1).long()\n", "max_mels = pred_dur.sum().item()\n", "pred_aln_trg = torch.zeros(input_lengths, max_mels)\n", "\n", "c_start = F.pad(pred_dur,(1,0), \"constant\").cumsum(dim=1)[0,0:-1]\n", "c_end = c_start + pred_dur[0,:]\n", "\n", "for row, cs, ce in zip(pred_aln_trg, c_start, c_end):\n", " row[cs:ce] = 1\n", " \n", "en = d.transpose(-1, -2) @ pred_aln_trg.unsqueeze(0).to(device)\n", "print(en.shape)\n", "F0_pred, N_pred = model.predictor.F0Ntrain(en, s)\n", "t_en = model.text_encoder.inference(tokens)\n", "asr = t_en @ pred_aln_trg.unsqueeze(0).to(device)\n", "output = model.decoder(asr, F0_pred, N_pred, ref_s[:, :128]).squeeze().detach().cpu().numpy()\n", "\n", "from IPython.display import display, Audio\n", "display(Audio(data=output, rate=24000, autoplay=True))\n", "print(out_ps)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "ename": "Error", "evalue": "Unable to infer type of dictionary: Cannot infer concrete type of torch.nn.Module", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[4], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m scrpt \u001b[38;5;241m=\u001b[39m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mjit\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mscript\u001b[49m\u001b[43m(\u001b[49m\u001b[43mMODEL\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/jit/_script.py:1429\u001b[0m, in \u001b[0;36mscript\u001b[0;34m(obj, optimize, _frames_up, _rcb, example_inputs)\u001b[0m\n\u001b[1;32m 1427\u001b[0m prev \u001b[38;5;241m=\u001b[39m _TOPLEVEL\n\u001b[1;32m 1428\u001b[0m _TOPLEVEL \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[0;32m-> 1429\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[43m_script_impl\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1430\u001b[0m \u001b[43m \u001b[49m\u001b[43mobj\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mobj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1431\u001b[0m \u001b[43m \u001b[49m\u001b[43moptimize\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moptimize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1432\u001b[0m \u001b[43m \u001b[49m\u001b[43m_frames_up\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_frames_up\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1433\u001b[0m \u001b[43m \u001b[49m\u001b[43m_rcb\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_rcb\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1434\u001b[0m \u001b[43m \u001b[49m\u001b[43mexample_inputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mexample_inputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1435\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1437\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m prev:\n\u001b[1;32m 1438\u001b[0m log_torchscript_usage(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mscript\u001b[39m\u001b[38;5;124m\"\u001b[39m, model_id\u001b[38;5;241m=\u001b[39m_get_model_id(ret))\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/jit/_script.py:1154\u001b[0m, in \u001b[0;36m_script_impl\u001b[0;34m(obj, optimize, _frames_up, _rcb, example_inputs)\u001b[0m\n\u001b[1;32m 1151\u001b[0m obj \u001b[38;5;241m=\u001b[39m obj\u001b[38;5;241m.\u001b[39m__prepare_scriptable__() \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mhasattr\u001b[39m(obj, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m__prepare_scriptable__\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01melse\u001b[39;00m obj \u001b[38;5;66;03m# type: ignore[operator]\u001b[39;00m\n\u001b[1;32m 1153\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(obj, \u001b[38;5;28mdict\u001b[39m):\n\u001b[0;32m-> 1154\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m create_script_dict(obj)\n\u001b[1;32m 1155\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(obj, \u001b[38;5;28mlist\u001b[39m):\n\u001b[1;32m 1156\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m create_script_list(obj)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/jit/_script.py:1066\u001b[0m, in \u001b[0;36mcreate_script_dict\u001b[0;34m(obj)\u001b[0m\n\u001b[1;32m 1053\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate_script_dict\u001b[39m(obj):\n\u001b[1;32m 1054\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 1055\u001b[0m \u001b[38;5;124;03m Create a ``torch._C.ScriptDict`` instance with the data from ``obj``.\u001b[39;00m\n\u001b[1;32m 1056\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1064\u001b[0m \u001b[38;5;124;03m zero copy overhead.\u001b[39;00m\n\u001b[1;32m 1065\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m-> 1066\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_C\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mScriptDict\u001b[49m\u001b[43m(\u001b[49m\u001b[43mobj\u001b[49m\u001b[43m)\u001b[49m\n", "\u001b[0;31mError\u001b[0m: Unable to infer type of dictionary: Cannot infer concrete type of torch.nn.Module" ] } ], "source": [ "scrpt = torch.jit.script(model)" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "torch.Size([1, 143, 640])\n", "torch.Size([1, 640, 143])\n", "torch.Size([1, 512, 143])\n", "torch.Size([1, 143, 348])\n", "en.shape=torch.Size([1, 640, 348])\n", "s.shape=torch.Size([1, 128])\n", "en.dtype=torch.float32\n", "s.dtype=torch.float32\n", "torch.Size([1, 512, 143])\n", "torch.Size([1, 512, 348])\n" ] }, { "data": { "text/plain": [ "" ] }, "execution_count": 11, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAD8CAYAAABdPV+VAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAyOUlEQVR4nO3deXwTdf4/8NckadODNqVnGmih3CA3QikqgnS5XOQSAUEBERYEFFBEXLn87S4sHqso6qoruF9BRZdjRWHlagFbChQKchWKhbbQtFBo0oOmOT6/P6rB0HInnSR9PR+PPB6Zz3xm8s7HVF/OfGZGEkIIEBEREbkRhdwFEBEREV2PAYWIiIjcDgMKERERuR0GFCIiInI7DChERETkdhhQiIiIyO0woBAREZHbYUAhIiIit8OAQkRERG6HAYWIiIjcjqwBZcWKFWjcuDH8/PwQHx+Pffv2yVkOERERuQnZAsrXX3+N2bNnY+HChTh48CA6dOiAfv36obCwUK6SiIiIyE1Icj0sMD4+Hl27dsX7778PALDZbIiJicGMGTPwyiuv3HRbm82GCxcuICgoCJIk1Ua5REREdI+EECgpKYFOp4NCcfNjJKpaqslBZWUl0tPTMW/ePHubQqFAYmIiUlNTq/U3mUwwmUz25fPnz6NNmza1UisRERE5V25uLho2bHjTPrIElEuXLsFqtSIqKsqhPSoqCidPnqzWf8mSJVi8eHG19iYzFkCp9nNZnTdj9ROwxFag+bISWLOyZamBiIjIk1hgxh78gKCgoFv2lSWg3Kl58+Zh9uzZ9mWj0YiYmBjEvncYKslHlppsD3bEiE+SsTL5MYQKFSy/nJWlDiIiIo/x66SS25meIUtACQ8Ph1KpREFBgUN7QUEBtFpttf5qtRpqtbq2yrstij0Z+E+bKPTP2IVvznRCg2FyV0REROQ9ZLmKx9fXF126dMH27dvtbTabDdu3b0dCQoIcJd0dIZD8YgKshzVQJ2uhjIiQuyIiIiKvINtlxrNnz8Ynn3yCzz//HCdOnMDUqVNRVlaGCRMmyFXSXfHZlo6wY1aYLCoUPtYMivat5C6JiIjI48k2B2XkyJG4ePEiFixYAL1ej44dO2LLli3VJs56gsBv06D4IQDvH92Ip9ZOR5OjyqoVNqu8hREREXko2e6Dci+MRiM0Gg16YbBsk2SrkSQoWzfHyan18be+awEA780fiXpr98pcGBERkXuwCDOSsBEGgwHBwcE37esRV/F4BCFgPX4KkWndMS9gOABA1UVCRUgCwj+ufm8XIiIiujEGFCfTfLEXmi+q3us3tEZcj4so3xgJ68UinvIhIiK6TXyasQvpnjwH/TtN8fH+dUC3++Quh4iIyGMwoLiQrbwcmoN6DHj/ZZz6kw/0L/SQuyQiIiKPwFM8LmbJPgfdsnOo3NoI56RwaLu3BwCozhXCkq+XuToiIiL3xCMotcT3D+cQeEyN/637N/637t84M6WJ3CURERG5LR5BqUWx355Hz9OTAQCmvhbo9lY9LOnsolbw3bJfztKIiIjcCgNKLbJkn4N/9jkAQL1mPXChpQYAUNjZB/UiEiAJgdBNJ2AtNshZJhERkewYUGSiezMFeLPqfcPtwH9brYdVCAw9NRHSwRMQNsHLkomIqM5iQHEDqmckDK03FkKSEPuvX9C//inkmkPx44NxsF65Ind5REREtY4BxQ1YzuXa3+/aloBtYVX3TPGZo4QkAN8rUtURFyIiojqCAcXNNP5z1W3xlfXrY/K+/Wjicwn/MXTBgS8bATabvZ+l4CJPARERkddiQHFT1itX8M/Onaret2uCz1JXwFeSAACVQmDSwGdhO3pSzhKJiIhchgHFjdlKSgAAqlN5GPjOy4B0bV3Z3KvwUbdz6F+pD0Dz59Nqs0QiIiKXYEDxANZLRYh++3dzUCQJyh3R6B2R6dBvd1RzVDzQ0WmfqyyrhC3juNP2R0REdLsYUDyRELD2voBtCHJs7x6JH9etctrHzC3oiIxOTtsdERHRbWNA8SKKY9noNWmS0/ZnjFGh+/5D9uVSiy+KhvrDoi9w2mcQERHVhAHFi9hKSqD+3nm3zI/q2AbHh2nty2abAleHRkFZce05QhG79bBmZTvtM4mIiAAGFLoJW8ZxqPteW/b388Nfj21Agp/J3vbAoucRfrbqPi7CYqntEomIyEsxoNBts1VUYFm/wYBSaW8zLTVgxtyjAIDlT44A9v0sV3lERORFGFDojlx/Oke1vQem5z9d9f5xJaRhCZDMQJOlR2ArK5OjRCIi8gIMKHRPIt9PQeSv73V7gzA9ajsuWoPw7n+GQ3Em134vFyIiojuhkLsA8h76R6x4rW1vvPdgL7z0n6+RPaet3CUREZGHYkAhp7GVl8NWVgbrpSK8vGwyKjUCpz69H5CkW29MRET0Owwo5HTCYkH4P1OhvqxAfOtfYH24E5ThYXKXRUREHsTpAWXJkiXo2rUrgoKCEBkZiSFDhiAz0/GW7L169YIkSQ6vKVOmOLsUklns4hQYJ4Xj+9X/RNHAFnKXQ0REHsTpASU5ORnTpk3D3r17sXXrVpjNZvTt2xdl113RMWnSJOTn59tfy5Ytc3Yp5AbELznoP/E5FD9ahtbpKvsr+28JcpdGRERuzOlX8WzZssVhedWqVYiMjER6ejp69uxpbw8ICIBWq71+c/IytooK+G7ZD/FwAtKCG9nbzfWtKJpUPaREbT0Py9mc2iyRiIjckMsvMzYYDACA0NBQh/bVq1fjiy++gFarxaBBgzB//nwEBATUuA+TyQST6drdS41Go+sKJpeIm5fqsFz6chSSFr5drV9i+UyE5Fc960f87p85ERHVLZIQQrhq5zabDY899hiKi4uxZ88ee/vHH3+MRo0aQafT4ciRI5g7dy66deuGdevW1bifRYsWYfHixdXae2EwVJKPq8onF1IGBwNR4dXaDe8BC5t9BxsUeHfIUNiOnpShOiIicgWLMCMJG2EwGBAcHHzTvi4NKFOnTsXmzZuxZ88eNGzY8Ib9duzYgT59+iArKwtNmzattr6mIygxMTEMKF5I/0IPlDS1AgAUlRIgJCgrgSb/7xBsFRUyV0dERPfiTgKKy07xTJ8+HZs2bcKuXbtuGk4AID4+HgBuGFDUajXUarVL6iT3on03BVoAkCS0PqDEqPppOGsOx+df9oOq5NpEa6u+kKeAiIi8mNMDihACM2bMwPr165GUlIS4uLhbbpORkQEAiI6OdnY55KmEwMkeSiyWHoAiPAxLdn+OOB+bffWg6S/Af8M+GQskIiJXcnpAmTZtGtasWYONGzciKCgIer0eAKDRaODv748zZ85gzZo1GDhwIMLCwnDkyBHMmjULPXv2RPv27Z1dDnkwYTJBABAXL2Hi0pmwqa7dkdYwxATNhFawCgkxM0pgyc2Tr1AiInI6p89BkW5wW/OVK1di/PjxyM3NxdixY3H06FGUlZUhJiYGQ4cOxWuvvXbL81G/MRqN0Gg0nINSh51Z0xGj2qTDLJTY/ffu8C8029f5ZebDcv6CjNUREVFN3GaSrKswoJCdQomJJ7PwRD2Dvan9W88h+q0UGYsiIqKauMUkWaJaYbPik2eG4iM/pb2pZIwJAw5VvT/4fCcodh+SqTgiIrpbDCjk8RR7Mhye2RB4fw/siKh69k/5Q/4I13SD3yZOqCUi8iQMKOR1Giy9dnqndaoev/QMh7RVzcuSiYg8iNMfFkjkToqGBaDsv1rMPJYBVZPGcpdDRES3iUdQyKtZ8vWI2h+Bad9NAGYAgBaqcglxi9IhzJVyl0dERDfAgEJeT+z/Gc0PqRCffhW9653AzxUx2Lw6HlJFJVBphiXvvNwlEhHRdRhQqE4QFgvSutVDGrpCEaPDiu2fIUKpwsKCHjjaRe7qiIjoegwoVGf8NklWXCjAiP83B0IJmEIkBHx/EQBQsj8CsYt4/xQiInfAgEJ1jq28HGGfpgIApPvbot3ILADA+ub1YE7sAt/dR3nFDxGRzHgVD9Vp4sBRHOkscKSzgGaXH9aveh9KnVbusoiI6jweQSH6lXbjLxiaPQO61VloGmjGzwYdyvsYeLUPEZEMGFCIfmXRF8D3UhFSxrbHkeBomEw+8HuuOSQb4H/JhuAv98pdIhFRncGAQvQ7wmJB86cPAgBUcY3wftIHCFEo8Gf9I8jeGADb1auA5z1fk4jI4zCgEN2A5Vwepj84ClBIKO6mw1tHP8CCsc9ASjksd2lERF6PAYXoRmxWWHLzAAAaHxWe+nY6rCMFMDoekkVCy4XHYTUaZS6SiMg7MaAQ3QZrVjaavJwN/+QoPNdgB4ptAfh07RCoLpVAslhhyT4nd4lERF6FAYXoDlT0KcLbUico6gXitfTPcb9vJTaVR+CzNs0hLBa5yyMi8hoMKER34LcQYjVYMWfhVFjVgCVAgv93BVBIAvmnItD8+TSZqyQi8nwMKER3w2ZFyP9V3Y1W1SgGujGXoVZYUBqrhmlgVwCA//lS2A6fkLNKIiKPxYBCdI8s53KRE1/1PuSPQUj69GMAQMvdT6PxSBkLIyLyYAwoRE4UuCcTf3hiPABA8UgAHvm5zL5ud1EzmHvreR8VIqLbwIBC5ETWYgMUezIAABFh3fDvDt3s60wmHwTOjAN+yycCaLA6E9ZLRbVfKBGRm2NAIXIR/4370HDjtWXlfS3xr83vw0eSAAA2AGMPTIPyasU9f5awWPgEZiLyKgwoRLXEeiILzyY8ca1BktDpuwyMDrn3q37GHR4P7RBOyCUi7+H0gLJo0SIsXrzYoa1ly5Y4efIkAKCiogIvvvgivvrqK5hMJvTr1w8ffPABoqKinF0KkXuxWWE5f8Gh6Ycve2B9aMK971oJnPqg6nSSdreCDzYkIo/nkiMo9913H7Zt23btQ1TXPmbWrFn4/vvv8c0330Cj0WD69OkYNmwYfvrpJ1eUQuTWdMtSnLKf0hHxmPu3LwAAL2As6mc0r1qRXwhrscEpn0FEVJtcElBUKhW0Wm21doPBgH/9619Ys2YNHnnkEQDAypUr0bp1a+zduxfdu3d3RTlEXq/et/vwwfo2AACf15VYv20NACD+7y8garlzQhARUW1SuGKnp0+fhk6nQ5MmTTBmzBjk5OQAANLT02E2m5GYmGjv26pVK8TGxiI1NfWG+zOZTDAajQ4vIvodIaomylosaLrWgIf+/Dwe+vPzMLQ149S/7pe7OiKiO+b0gBIfH49Vq1Zhy5Yt+PDDD5GdnY2HHnoIJSUl0Ov18PX1RUhIiMM2UVFR0Ov1N9znkiVLoNFo7K+YmBhnl03kNWwZx1F/VSrqr0qF6ooKEVoDKv7YDcqwULlLIyK6bU4PKAMGDMCIESPQvn179OvXDz/88AOKi4uxdu3au97nvHnzYDAY7K/c3FwnVkzkvZrMTYVmaT0kf/wxyrs1lbscIqLb5pJTPL8XEhKCFi1aICsrC1qtFpWVlSguLnboU1BQUOOcld+o1WoEBwc7vIjo9qgOnkLfx8fBNvMSTn3K0z1E5BlcHlBKS0tx5swZREdHo0uXLvDx8cH27dvt6zMzM5GTk4OEhHu/1JKIqrOVlUFKOYz8Q1rAokD+7B5QMuQTkZtzekB56aWXkJycjLNnzyIlJQVDhw6FUqnE6NGjodFoMHHiRMyePRs7d+5Eeno6JkyYgISEBF7BQ+RiTV5JRXiaEt++8AZEYx2UwcFQBAXJXRYRUY2cfplxXl4eRo8ejaKiIkRERODBBx/E3r17ERERAQD4xz/+AYVCgeHDhzvcqI2IXC9szUHM2jEKgzfvRo+AM9h7tQnWd4qFreLeb7dPRORMkhCe92hVo9EIjUaDXhgMleQjdzlEnkWhxPmX42EOFIACqAy3ANK11a3fvAzrqTPy1UdEXssizEjCRhgMhlvOJ+WzeIjqGpsVDZZW3bxN1bABhm1NR4iy3L76re1PIsTnun81mC0MLURUqxhQiOowS955rL2vgUPbQ+l78ZfIdIe2H68G4r3W7SHMlbVZHhHVYQwoRHWdzeqwuHd+NzxY3/GqOnMgUH/zeagkG06d1qHFlH21WSER1UEMKETkwG/TPvhd16Zq2ACBYyvgpzTjQlQwyofG29dJNoGAHzJ4dIWInIoBhYhuyZJ3HoYHAQOAsIGBSPr0n/Z1hdYyTOg+ApbzF+QrkIi8DgMKEd0R/90n0X/wU/Zlq58KnTcdQpz6InZdaYGih4zVThsREd0pBhQiuiO2khJg/8/2ZaVajbUHukIZaIatUomAl9UAgMALAiH/vvFTyomIboYBhYjuiTCZ0GLSfgCAsmUzvPPj5/CTBGafG4Ly7+rDauARFSK6cy5/Fg8R1R3WU2cw8/4hmNJlKC6saIa3Dn0PqXNrucsiIg/EgEJEziMErBcvwnrxIkLSCzFs1UvIfDYAlybzYaBEdGcYUIjIJaynf0HsohR0aH0OV3pUQtG2FaBQyl0WEXkIBhQicqmrvQoRcFKND77/FMqwULnLISIPwUmyRORaQiB2XQFGnZ+Del+eR5BPFDILIxH7xFHA855VSkS1hAGFiFzOeuoMQs/rUfF0OPxUZoTUK0fp490gXZdP6mWXQqQfk6dIInIrDChEVCtsZWXw/UMZrgAI6RCFDT+8DxUc56Q03zoJzSdIjhvyKAtRncSAQkS1Tpz8BYMfG1+t3XewL544nm9f3ljYEaaH9bVYGRG5CwYUIqp1wmQCajiVExnTDX9tPPBav0oF/P/cpHq/DDPU3+93aY1EJC8GFCJyG/4b9qH5hmvLUqf7sGLDO9UuN3xkyyy03lt1RZCttKwq8BCRV2FAISK3JQ6fxIwug6u1+85U4b2D/wUADH/zZUS9l1LbpRGRizGgEJH7sllhvVRUrTl2awUGXX0ZAFDRwgrjvzsDAFq+WQ7bkZO1WiIRuQYDChF5HEXyIcQkV70/vSIeC7ptAgB80Hk4Ikobw/LLWfmKIyKnYEAhIo/WfFoavpQaAAA6pWXgxGgt/PvJXBQR3TPe6p6IPJ8QgBA4/WobFCVHI2h3OIJ2hyP/xR5yV0ZEd4kBhYi8hs+2dEQcMsNiU8JiU6I01oaSUd1RMqo7VE0ay10eEd0BBhQi8irqzftx9eECXH24AKqrEna/9QF2v/UBzj+qAySp6kVEbs/pAaVx48aQJKnaa9q0aQCAXr16VVs3ZcoUZ5dBRIRm/ziDPw4cgz8OHIOyhHI8fTIHT5/Mge3BjnKXRkS34PRJsvv374fVarUvHz16FH/4wx8wYsQIe9ukSZPw+uuv25cDAgKcXQYREawFhUBBIQAgMLUHXjMNAQD4DPSF8pEekGxA4+VHYTUaZaySiGri9IASERHhsLx06VI0bdoUDz/8sL0tICAAWq3W2R9NRHRDUe+lIOrX92E/1ceihptQbPPFgu/HQ5lXCAhbjfdcISJ5uPQy48rKSnzxxReYPXs2pN+d9129ejW++OILaLVaDBo0CPPnz7/pURSTyQTT725lbeT/7RDRPbjS34KZqj9CUvtifPIP6OGfi6TyxljToSlvm0/kJlwaUDZs2IDi4mKMHz/e3vbkk0+iUaNG0Ol0OHLkCObOnYvMzEysW7fuhvtZsmQJFi9e7MpSiagOsZWUVL1RKLHkg9GwBABCBZj+dRWSBCjO+SHu1VR5iySq4yQhhHDVzvv16wdfX1989913N+yzY8cO9OnTB1lZWWjatGmNfWo6ghITE4NeGAyV5OP0uomo7lHFNESvzScQpKjANxe6wGee5tpKIYCMkxAWi3wFEnkBizAjCRthMBgQHBx8074uO4Jy7tw5bNu27aZHRgAgPj4eAG4aUNRqNdRqtdNrJCL6jSU3D9vaBgEIgq1/FLZs/MS+7pK1DOMSnoAl77x8BRLVMS4LKCtXrkRkZCQeffTRm/bLyMgAAERHR7uqFCKiO+J/4Bc8NO1P9mWbCgj6dx6i/K/9H9+5v7aC36Z9cpRHVCe4JKDYbDasXLkS48aNg0p17SPOnDmDNWvWYODAgQgLC8ORI0cwa9Ys9OzZE+3bt3dFKUREd8x6qQgB669d0SOp1Sh8qjEU0rUz4hc7qhCs6W5fDj5bAemnjNosk8iruSSgbNu2DTk5OXjmmWcc2n19fbFt2za88847KCsrQ0xMDIYPH47XXnvNFWUQETmFMJkQ8VgmrL9rC93ih51Tv7Ev37d7AuJSlb9uYKuat0JEd82lk2RdxWg0QqPRcJIsEclG1SgGtqBA+/K5waFYNG41AOD/fTwGujdT5CqNyG25xSRZIiJvZjmX67Cs1d2PuY2eAAAoo224MKcHdG+m8kgK0V3iwwKJiJzA58cDaDF5P1pM3g+br8DYp7dCpYuGxCsQie4KAwoRkZO1nHMYOyZ2x/spa2Ec2knucog8Ek/xEBE5ma2iAsrTeXjsvZdR2tuMoqHtIQTQfJ4BluxzcpdH5BF4BIWIyAWsV65A92YKFIEWTG27C8+124XirtEQCR2Abu0AhVLuEoncGo+gEBG5ULOxh7AZIYAk4alj32FKyHlkm0sxrctgPj2Z6CYYUIiIaoMQ+GZGf6ypp4TVV0L0+jPQ+NTDwYKGiBySyat9iK7DgEJEVEtU29OhAqAIDET20w0Q7GeCxabAlXFVd6QN1Fvgu2W/vEUSuQkGFCKiWmYrK0P4oFMAgIYd2+A/m96FUpIw/PRjsGxT8anJROAkWSIiWYljWXi8z5MYmvgk8tc0xpQTJ6Fs00LusohkxyMoREQyEuZKWDOzAAARAb6Ytf1JqJ5WQrIlQFkhodHf0yFMJpmrJKp9DChERG5CpB9Di4MSuhy04omQ/ThljsSqr/8AqbQcwmKBtaBQ7hKJag0DChGROxEChx6oh0PKRCjCQ/G3HauhU1nw4eWuSOngK3d1RLWGAYWIyM3Yysur3pjNGP/2LAgVYA4AxDclkCQBcSQYsa/zacnk3RhQiIjclK2iAlHvVQURRftWeGjkISglGz5HPGwPdoRy/wnOTyGvxat4iIg8gO3ISSS398eOdoEI+m8QNn39CRRNG8ldFpHLMKAQEXmY8B9/Qd8/TYPt/TLkvdpD7nKIXIIBhYjIw1j0BfD7IR1Z+ghc1VphGNudDx8kr8OAQkTkiWxWNH0yAz5GBd56/QMo62sASZK7KiKnYUAhIvJgTf+RicVjJmBMymEUPdtd7nKInIYBhYjIg1mLLkN1LBsLvh+B4lYCF+ZwTgp5BwYUIiIPZzUa0Wz2XtjCK9H98cNQxTSE5MObupFnY0AhIvISLZ49il9ebYUvUtbC9Eh7ucshuicMKEREXkKYK+F3LA+9l72Ec0/akLOAp3vIczGgEBF5EYu+AFHvpcC/ngnWNqWw9uoMa6/OUDVpLHdpRHfkjgPKrl27MGjQIOh0OkiShA0bNjisF0JgwYIFiI6Ohr+/PxITE3H69GmHPpcvX8aYMWMQHByMkJAQTJw4EaWlpff0RYiI6JqGw48heFsgtq35DNvWfIYTM6PkLonojtxxQCkrK0OHDh2wYsWKGtcvW7YMy5cvx0cffYS0tDQEBgaiX79+qKiosPcZM2YMjh07hq1bt2LTpk3YtWsXJk+efPffgoiIqon6Phu9JzyL3hOehcIswScpGpJaLXdZRLdFEkKIu95YkrB+/XoMGTIEQNXRE51OhxdffBEvvfQSAMBgMCAqKgqrVq3CqFGjcOLECbRp0wb79+/H/fffDwDYsmULBg4ciLy8POh0ult+rtFohEajQS8MhkryudvyiYjqjKKJCTD90QCfHzXQJl2C9cTpW29E5GQWYUYSNsJgMCA4OPimfZ06ByU7Oxt6vR6JiYn2No1Gg/j4eKSmpgIAUlNTERISYg8nAJCYmAiFQoG0tLQa92symWA0Gh1eRER0+8L+lYrYmSXYMO8N5PeJgKRWVx1N4d1nyU05NaDo9XoAQFSU47nOqKgo+zq9Xo/IyEiH9SqVCqGhofY+11uyZAk0Go39FRMT48yyiYjqBEvuBUx9eAxMvYx45fh+vHJ8P2wPdJC7LKIaqeQu4HbMmzcPs2fPti8bjUaGFCKiO2WzwpJ9DgH/S8DEvF/n/Q0DpKHdIVkkNF9yHNZig7w1Ev3KqQFFq9UCAAoKChAdHW1vLygoQMeOHe19CgsLHbazWCy4fPmyffvrqdVqqDmxi4jIKcI+SUXYr+/9k6MwO+Z/KLH5Yfn6J6AqMAAWKyy5ebLWSOTUUzxxcXHQarXYvn27vc1oNCItLQ0JCQkAgISEBBQXFyM9Pd3eZ8eOHbDZbIiPj3dmOUREdAsVfYuxpE13rIjvgRe/+BJrdn+FZ7bv4q3ySXZ3fASltLQUWVlZ9uXs7GxkZGQgNDQUsbGxmDlzJv7yl7+gefPmiIuLw/z586HT6exX+rRu3Rr9+/fHpEmT8NFHH8FsNmP69OkYNWrUbV3BQ0REziNMJggAqDTjz397FhY/CVY/QPntZUiSgOFMfTSbtVfuMqkOuuOAcuDAAfTu3du+/NvckHHjxmHVqlV4+eWXUVZWhsmTJ6O4uBgPPvggtmzZAj8/P/s2q1evxvTp09GnTx8oFAoMHz4cy5cvd8LXISKiu2KzIvSzqqstVTEN0XyMHmqFBTsULWBO7GLvJtkAn12HISwWuSqlOuKe7oMiF94HhYiodpgGdkXSp5/YlwutZZjQfQQs5y/IWBV5qju5D4pHXMVDRETyCEg5hT5jJ9qXrWoF4v5zEg38rPa23fMT4PfdPjnKIy/GgEJERDdkLTZAtePaRQ0+ajV+eroV6gVee3zJ1S4q+MU4PjlZYRGI+PwghMlUa7WSd2FAISKi2yZMJsSNPuzQVralCb5vu9qhLc8CvLx5BGwXLzn1822VZsBmvXVH8ngMKEREdE+Cx5XiSf+RDm0i0B+jtm5HU9/CG2x1d6a/OR2RH6Q4dZ/knhhQiIjonlgLqocQSa3GX/8zAlYn305FNLfBsPzaPbM0J5UMLF6KAYWIiJxOmExo/OdUp+/31Cdd8ekjn9mXX2o0Asof4+zLksUKy9kcp38u1T4GFCIi8hgtpx7CG8r77cvKsfWxNukf9uU0UyDebBcPW3m5HOWREzGgEBGRxxAWC/C7m8RFJuvRa/Es+7LFT4LP2ksI+GcIL332cAwoRETksaxZ2QjLyrYvK6Mi0WBsGX5qH46GRR0gpRy+ydbkzpz6sEAiIiI5WQsKkRNfBlXXK0j48IDc5dA9YEAhIiKv0/BVK354pyd6HK6ESOggdzl0F3iKh4iIvI71+ClEoAW+/aUjrH0C4du1ByQboF15GLayMrnLo9vAgEJERF7JevwUdEOBxvv88dfobSixCUzbPgFS1jkIc6Xc5dEtMKAQEZFXyx0UhHE+TwC+Pnh0014sX/9Hl9yjhZyLc1CIiMirWQsKYck7D8vZXLz/1SBYAgR+WZoAScX/R3dnDChERFQ32KyIfT0FPkYFxg3cCalVMyhbNoOqUYzclVENGB+JiKhOabQ4DSmr78Nn2z6FRuGLl/MfwumucldF12NAISKiusVmhTivx2ML5wASUBEuof7/8gEAF3+KRuzrfPigO2BAISKiOsdWVobQlb9OlO3eHk1HVAWULU1CYRrQFX7bDvNKH5lxDgoREdVte4/gTNcKnOlageCDanz78TtQRkXIXVWdx4BCRET0K93XWRj19AzErb+EC3N6yF1OncaAQkRE9CtrQSFUe47ih2NtcVVrw6XJCYAkyV1WncSAQkRE9DvCXInm49MhlMALs7+BMrQ+75kiAwYUIiKiGrSYfwxfTHgUC/ZvRfHI++Uup86544Cya9cuDBo0CDqdDpIkYcOGDfZ1ZrMZc+fORbt27RAYGAidToenn34aFy5ccNhH48aNIUmSw2vp0qX3/GWIiIicxVZSAtWpXIxbPR2F3QXy5nFOSm2644BSVlaGDh06YMWKFdXWlZeX4+DBg5g/fz4OHjyIdevWITMzE4899li1vq+//jry8/PtrxkzZtzdNyAiInIRa9FlNJ6fClV4BXSJuVC2aQFJrZa7rDrhjk+qDRgwAAMGDKhxnUajwdatWx3a3n//fXTr1g05OTmIjY21twcFBUGr1d7pxxMREdW6uDFHcXVQF2zaugKDHn8WUuphuUvyei6fg2IwGCBJEkJCQhzaly5dirCwMHTq1AlvvPEGLBbLDfdhMplgNBodXkRERLXGZkXQvhz0nPc8zr9kQe5rPN3jai6dllxRUYG5c+di9OjRCA4Otrc///zz6Ny5M0JDQ5GSkoJ58+YhPz8fb7/9do37WbJkCRYvXuzKUomIiG7Kkq9HyL/1KB18H642rsTVwd0AAPWOX4L19C8yV+d9JCGEuOuNJQnr16/HkCFDqq0zm80YPnw48vLykJSU5BBQrvfZZ5/hT3/6E0pLS6Gu4dyeyWSCyWSyLxuNRsTExKAXBkMl+dxt+URERHflwks98PPsDwAArT55Do0W8vk9t8MizEjCRhgMhpvmAsBFR1DMZjOeeOIJnDt3Djt27LhlEfHx8bBYLDh79ixatmxZbb1ara4xuBAREckh5v+y0PencQAA0zgLAndFoKzXZcBmlbky7+H0gPJbODl9+jR27tyJsLCwW26TkZEBhUKByMhIZ5dDRETkdNaCQkgFhQCA0HYJyPCPQcCc5ojdUAhrZpbM1XmHOw4opaWlyMq6NvjZ2dnIyMhAaGgooqOj8fjjj+PgwYPYtGkTrFYr9Ho9ACA0NBS+vr5ITU1FWloaevfujaCgIKSmpmLWrFkYO3Ys6tev77xvRkREVAvCP05F1J4WWLF5OUYWzEFYXj5sZWVyl+Xx7ngOSlJSEnr37l2tfdy4cVi0aBHi4uJq3G7nzp3o1asXDh48iOeeew4nT56EyWRCXFwcnnrqKcyePfu2T+MYjUZoNBrOQSEiIvegUEIZEYbSfwfCR2mFKjFH7orc0p3MQbmnSbJyYUAhIiJ3dHlCAopbApYwC9r8RQ/LuVy5S3IrdxJQ+CweIiIiJwldmYoGyRb8pec6lHSKhqphA7lL8lgMKERERE6k3rwf/3dfHEb+bTNO/C1K7nI8FgMKERGRkwmLBWvnDoAqTw2fpGj4JEWj+KkEucvyKAwoRERELuD33T6EZALBPhUI9qnAldZA2fB4lA2Ph0rLIyu34tJb3RMREdVl9T9PRdHnVe8tn5uwZ/xnAIAHZk5BvbUFMlbm/hhQiIiIakHrhZfQ/72xAIDKRZcxfHEhzEKJ74fG8+ZuNWBAISIiqgWWsznA2ar3pft6YJmhHwDAZ5Q/FJWRUJoA3XsHIMyV8hXpRhhQiIiIalns678+XFChxCOHjRitOYSTlfXx7roBEIYSez9bSQmExSJTlfJiQCEiIpKLzYrkXjFIVjQGQjV4efs66FTXAsrkqTOh/mG/fPXJiAGFiIhIRtaiywAARVkZpn06BeJ3/2W++pgF0hNdIGwS2ryaC4u+7kysZUAhIiJyA7bycjRckuLQdvG/LTGzxXaYhQr/13kQ/C+E2ddJuXp7uPFGDChERERuKmLwKaxGDCRfX8z4+WsMCSy2r+u8bDq076bceGMPx4BCRETkrn59nq8wmbD8xVF4K+Da/VWNf6hE010R9uUj+5qi6Ut7a71EV2FAISIi8gB+3+2D3++Wi9onQNFS2JdtkZUofaJ7rddV72wZsO9np++XAYWIiMgDNf5zKkp+txw6sTl2/WNFrdfRMvkZNB0j3bqjELfu8zsMKERERF4gct1JPHrwqVr/XJ9BARh94vwt+/1l0zA0enH3be+XAYWIiMgLWK9cAa5cqfXPjdJ1xetNBt26ow+QNzce+PvG29qvRwYU8ethIgvMwJ0dMSIiIiInUm5KQdymW/fLmxuP7SM/RqO/X/vv+M1I4nZ6uZm8vDzExMTIXQYRERHdhdzcXDRs2PCmfTwyoNhsNmRmZqJNmzbIzc1FcHCw3CW5FaPRiJiYGI5NDTg2N8fxuTGOzY1xbG6MY+NICIGSkhLodDooFIqb9vXIUzwKhQINGjQAAAQHB/Mf+g1wbG6MY3NzHJ8b49jcGMfmxjg212g0mtvqd/P4QkRERCQDBhQiIiJyOx4bUNRqNRYuXAi1Wi13KW6HY3NjHJub4/jcGMfmxjg2N8axuXseOUmWiIiIvJvHHkEhIiIi78WAQkRERG6HAYWIiIjcDgMKERERuR2PDCgrVqxA48aN4efnh/j4eOzbt0/ukmrdokWLIEmSw6tVq1b29RUVFZg2bRrCwsJQr149DB8+HAUFBTJW7Fq7du3CoEGDoNPpIEkSNmzY4LBeCIEFCxYgOjoa/v7+SExMxOnTpx36XL58GWPGjEFwcDBCQkIwceJElJaW1uK3cI1bjc348eOr/Zb69+/v0Mdbx2bJkiXo2rUrgoKCEBkZiSFDhiAzM9Ohz+38LeXk5ODRRx9FQEAAIiMjMWfOHFgsltr8Kk53O2PTq1evar+dKVOmOPTxxrH58MMP0b59e/vN1xISErB582b7+rr6m3E2jwsoX3/9NWbPno2FCxfi4MGD6NChA/r164fCwkK5S6t19913H/Lz8+2vPXv22NfNmjUL3333Hb755hskJyfjwoULGDZsmIzVulZZWRk6dOiAFStW1Lh+2bJlWL58OT766COkpaUhMDAQ/fr1Q0VFhb3PmDFjcOzYMWzduhWbNm3Crl27MHny5Nr6Ci5zq7EBgP79+zv8lr788kuH9d46NsnJyZg2bRr27t2LrVu3wmw2o2/fvigrK7P3udXfktVqxaOPPorKykqkpKTg888/x6pVq7BgwQI5vpLT3M7YAMCkSZMcfjvLli2zr/PWsWnYsCGWLl2K9PR0HDhwAI888ggGDx6MY8eOAai7vxmnEx6mW7duYtq0afZlq9UqdDqdWLJkiYxV1b6FCxeKDh061LiuuLhY+Pj4iG+++cbeduLECQFApKam1lKF8gEg1q9fb1+22WxCq9WKN954w95WXFws1Gq1+PLLL4UQQhw/flwAEPv377f32bx5s5AkSZw/f77Wane168dGCCHGjRsnBg8efMNt6srYCCFEYWGhACCSk5OFELf3t/TDDz8IhUIh9Hq9vc+HH34ogoODhclkqt0v4ELXj40QQjz88MPihRdeuOE2dWVshBCifv364tNPP+Vvxok86ghKZWUl0tPTkZiYaG9TKBRITExEamqqjJXJ4/Tp09DpdGjSpAnGjBmDnJwcAEB6ejrMZrPDOLVq1QqxsbF1cpyys7Oh1+sdxkOj0SA+Pt4+HqmpqQgJCcH9999v75OYmAiFQoG0tLRar7m2JSUlITIyEi1btsTUqVNRVFRkX1eXxsZgMAAAQkNDAdze31JqairatWuHqKgoe59+/frBaDTa/4/aG1w/Nr9ZvXo1wsPD0bZtW8ybNw/l5eX2dXVhbKxWK7766iuUlZUhISGBvxkn8qiHBV66dAlWq9XhHyoAREVF4eTJkzJVJY/4+HisWrUKLVu2RH5+PhYvXoyHHnoIR48ehV6vh6+vL0JCQhy2iYqKgl6vl6dgGf32nWv63fy2Tq/XIzIy0mG9SqVCaGio149Z//79MWzYMMTFxeHMmTN49dVXMWDAAKSmpkKpVNaZsbHZbJg5cyYeeOABtG3bFgBu629Jr9fX+Nv6bZ03qGlsAODJJ59Eo0aNoNPpcOTIEcydOxeZmZlYt24dAO8em59//hkJCQmoqKhAvXr1sH79erRp0wYZGRn8zTiJRwUUumbAgAH29+3bt0d8fDwaNWqEtWvXwt/fX8bKyNOMGjXK/r5du3Zo3749mjZtiqSkJPTp00fGymrXtGnTcPToUYe5XFTlRmPz+3lI7dq1Q3R0NPr06YMzZ86gadOmtV1mrWrZsiUyMjJgMBjw7bffYty4cUhOTpa7LK/iUad4wsPDoVQqq82GLigogFarlakq9xASEoIWLVogKysLWq0WlZWVKC4uduhTV8fpt+98s9+NVqutNtHaYrHg8uXLdW7MmjRpgvDwcGRlZQGoG2Mzffp0bNq0CTt37kTDhg3t7bfzt6TVamv8bf22ztPdaGxqEh8fDwAOvx1vHRtfX180a9YMXbp0wZIlS9ChQwe8++67/M04kUcFFF9fX3Tp0gXbt2+3t9lsNmzfvh0JCQkyVia/0tJSnDlzBtHR0ejSpQt8fHwcxikzMxM5OTl1cpzi4uKg1WodxsNoNCItLc0+HgkJCSguLkZ6erq9z44dO2Cz2ez/0q0r8vLyUFRUhOjoaADePTZCCEyfPh3r16/Hjh07EBcX57D+dv6WEhIS8PPPPzuEuK1btyI4OBht2rSpnS/iArcam5pkZGQAgMNvxxvHpiY2mw0mk6lO/2acTu5Zunfqq6++Emq1WqxatUocP35cTJ48WYSEhDjMhq4LXnzxRZGUlCSys7PFTz/9JBITE0V4eLgoLCwUQggxZcoUERsbK3bs2CEOHDggEhISREJCgsxVu05JSYk4dOiQOHTokAAg3n77bXHo0CFx7tw5IYQQS5cuFSEhIWLjxo3iyJEjYvDgwSIuLk5cvXrVvo/+/fuLTp06ibS0NLFnzx7RvHlzMXr0aLm+ktPcbGxKSkrESy+9JFJTU0V2drbYtm2b6Ny5s2jevLmoqKiw78Nbx2bq1KlCo9GIpKQkkZ+fb3+Vl5fb+9zqb8lisYi2bduKvn37ioyMDLFlyxYREREh5s2bJ8dXcppbjU1WVpZ4/fXXxYEDB0R2drbYuHGjaNKkiejZs6d9H946Nq+88opITk4W2dnZ4siRI+KVV14RkiSJH3/8UQhRd38zzuZxAUUIId577z0RGxsrfH19Rbdu3cTevXvlLqnWjRw5UkRHRwtfX1/RoEEDMXLkSJGVlWVff/XqVfHcc8+J+vXri4CAADF06FCRn58vY8WutXPnTgGg2mvcuHFCiKpLjefPny+ioqKEWq0Wffr0EZmZmQ77KCoqEqNHjxb16tUTwcHBYsKECaKkpESGb+NcNxub8vJy0bdvXxERESF8fHxEo0aNxKRJk6oFfm8dm5rGBYBYuXKlvc/t/C2dPXtWDBgwQPj7+4vw8HDx4osvCrPZXMvfxrluNTY5OTmiZ8+eIjQ0VKjVatGsWTMxZ84cYTAYHPbjjWPzzDPPiEaNGglfX18REREh+vTpYw8nQtTd34yzSUIIUXvHa4iIiIhuzaPmoBAREVHdwIBCREREbocBhYiIiNwOAwoRERG5HQYUIiIicjsMKEREROR2GFCIiIjI7TCgEBERkdthQCEiIiK3w4BCREREbocBhYiIiNwOAwoRERG5nf8PgBiqWsr6jrYAAAAASUVORK5CYII=", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "print(d.shape)\n", "print(d.transpose(-1,-2).shape)\n", "print(d_en.shape)\n", "print(pred_aln_trg.unsqueeze(0).shape)\n", "print(f\"{en.shape=}\")\n", "print(f\"{s.shape=}\")\n", "print(f\"{en.dtype=}\")\n", "print(f\"{s.dtype=}\")\n", "\n", "print(t_en.shape)\n", "print(asr.shape)\n", "pl.imshow(pred_aln_trg[:,:])\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Export StyleTTS2 model" ] }, { "cell_type": "code", "execution_count": 39, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "I0101 18:56:11.998000 2488298 site-packages/torch/fx/experimental/symbolic_shapes.py:3557] create_symbol s0 = 143 for L['args'][0][0].size()[1] [2, 510] (_export/non_strict_utils.py:109 in fakify), for more info run with TORCHDYNAMO_EXTENDED_DEBUG_CREATE_SYMBOL=\"s0\"\n", "I0101 18:56:12.007000 2488298 site-packages/torch/fx/experimental/symbolic_shapes.py:4857] set_replacement s0 = 143 (range_refined_to_singleton) VR[143, 143]\n", "I0101 18:56:12.008000 2488298 site-packages/torch/fx/experimental/symbolic_shapes.py:5106] eval Eq(s0, 143) [guard added] (mp/ipykernel_2488298/2554868606.py:17 in forward), for more info run with TORCHDYNAMO_EXTENDED_DEBUG_GUARD_ADDED=\"Eq(s0, 143)\"\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "I0101 18:56:27.383000 2488298 site-packages/torch/fx/experimental/symbolic_shapes.py:3317] create_unbacked_symint u0 [-int_oo, int_oo] (_subclasses/fake_impls.py:390 in local_scalar_dense)\n", "I0101 18:56:27.387000 2488298 site-packages/torch/fx/experimental/symbolic_shapes.py:5106] runtime_assert u0 >= 0 [guard added] (_refs/__init__.py:4957 in arange), for more info run with TORCHDYNAMO_EXTENDED_DEBUG_GUARD_ADDED=\"u0 >= 0\"\n", "W0101 18:56:28.575000 2488298 site-packages/torch/fx/experimental/symbolic_shapes.py:5124] failed during evaluate_expr(u0, hint=None, size_oblivious=False, forcing_spec=False\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] failed while running evaluate_expr(*(u0, None), **{'fx_node': False})\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] Traceback (most recent call last):\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] File \"/rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/fx/experimental/recording.py\", line 262, in wrapper\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] return retlog(fn(*args, **kwargs))\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] File \"/rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/fx/experimental/symbolic_shapes.py\", line 5122, in evaluate_expr\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] return self._evaluate_expr(orig_expr, hint, fx_node, size_oblivious, forcing_spec=forcing_spec)\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] File \"/rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/fx/experimental/symbolic_shapes.py\", line 5238, in _evaluate_expr\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] raise self._make_data_dependent_error(\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] torch.fx.experimental.symbolic_shapes.GuardOnDataDependentSymNode: Could not extract specialized integer from data-dependent expression u0 (unhinted: u0). (Size-like symbols: u0)\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] \n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] Potential framework code culprit (scroll up for full backtrace):\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] File \"/rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_ops.py\", line 759, in decompose\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] return self._op_dk(dk, *args, **kwargs)\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] \n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] For more information, run with TORCH_LOGS=\"dynamic\"\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] For extended logs when we create symbols, also add TORCHDYNAMO_EXTENDED_DEBUG_CREATE_SYMBOL=\"u0\"\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] If you suspect the guard was triggered from C++, add TORCHDYNAMO_EXTENDED_DEBUG_CPP=1\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] For more debugging help, see https://docs.google.com/document/d/1HSuTTVvYH1pTew89Rtpeu84Ht3nQEFTYhAX3Ypa_xJs/edit?usp=sharing\n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] \n", "E0101 18:56:28.576000 2488298 site-packages/torch/fx/experimental/recording.py:298] For C++ stack trace, run with TORCHDYNAMO_EXTENDED_DEBUG_CPP=1\n" ] }, { "ename": "GuardOnDataDependentSymNode", "evalue": "Could not extract specialized integer from data-dependent expression u0 (unhinted: u0). (Size-like symbols: u0)\n\nPotential framework code culprit (scroll up for full backtrace):\n File \"/rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_ops.py\", line 759, in decompose\n return self._op_dk(dk, *args, **kwargs)\n\nFor more information, run with TORCH_LOGS=\"dynamic\"\nFor extended logs when we create symbols, also add TORCHDYNAMO_EXTENDED_DEBUG_CREATE_SYMBOL=\"u0\"\nIf you suspect the guard was triggered from C++, add TORCHDYNAMO_EXTENDED_DEBUG_CPP=1\nFor more debugging help, see https://docs.google.com/document/d/1HSuTTVvYH1pTew89Rtpeu84Ht3nQEFTYhAX3Ypa_xJs/edit?usp=sharing\n\nFor C++ stack trace, run with TORCHDYNAMO_EXTENDED_DEBUG_CPP=1\n\nThe following call raised this error:\n File \"/rhome/eingerman/Projects/DeepLearning/TTS/Kokoro-82M/models.py\", line 471, in F0Ntrain\n x2, _temp = self.shared(x1)\n\nTo fix the error, insert one of the following checks before this call:\n 1. torch._check(x.shape[2])\n 2. torch._check(~x.shape[2])\n\n(These suggested fixes were derived by replacing `u0` with x.shape[2] or x1.shape[1] in u0 and its negation.)", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mGuardOnDataDependentSymNode\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[39], line 61\u001b[0m\n\u001b[1;32m 58\u001b[0m dynamic_shapes \u001b[38;5;241m=\u001b[39m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtokens\u001b[39m\u001b[38;5;124m\"\u001b[39m:{\u001b[38;5;241m0\u001b[39m:batch, \u001b[38;5;241m1\u001b[39m:token_len}}\n\u001b[1;32m 60\u001b[0m \u001b[38;5;66;03m# with torch.no_grad():\u001b[39;00m\n\u001b[0;32m---> 61\u001b[0m export_mod \u001b[38;5;241m=\u001b[39m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexport\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexport\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstyle_model\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[43mtokens\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdynamic_shapes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdynamic_shapes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstrict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[1;32m 62\u001b[0m \u001b[38;5;66;03m# export_mod = torch.export.export(style_model, args=( tokens, ), strict=False)\u001b[39;00m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/__init__.py:270\u001b[0m, in \u001b[0;36mexport\u001b[0;34m(mod, args, kwargs, dynamic_shapes, strict, preserve_module_call_signature)\u001b[0m\n\u001b[1;32m 264\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(mod, torch\u001b[38;5;241m.\u001b[39mjit\u001b[38;5;241m.\u001b[39mScriptModule):\n\u001b[1;32m 265\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 266\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExporting a ScriptModule is not supported. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 267\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMaybe try converting your ScriptModule to an ExportedProgram \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 268\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124musing `TS2EPConverter(mod, args, kwargs).convert()` instead.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 269\u001b[0m )\n\u001b[0;32m--> 270\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_export\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 271\u001b[0m \u001b[43m \u001b[49m\u001b[43mmod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 272\u001b[0m \u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 273\u001b[0m \u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 274\u001b[0m \u001b[43m \u001b[49m\u001b[43mdynamic_shapes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 275\u001b[0m \u001b[43m \u001b[49m\u001b[43mstrict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstrict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 276\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreserve_module_call_signature\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpreserve_module_call_signature\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 277\u001b[0m \u001b[43m \u001b[49m\u001b[43mpre_dispatch\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 278\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:1017\u001b[0m, in \u001b[0;36m_log_export_wrapper..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 1010\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1011\u001b[0m log_export_usage(\n\u001b[1;32m 1012\u001b[0m event\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mexport.error.unclassified\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 1013\u001b[0m \u001b[38;5;28mtype\u001b[39m\u001b[38;5;241m=\u001b[39merror_type,\n\u001b[1;32m 1014\u001b[0m message\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mstr\u001b[39m(e),\n\u001b[1;32m 1015\u001b[0m flags\u001b[38;5;241m=\u001b[39m_EXPORT_FLAGS,\n\u001b[1;32m 1016\u001b[0m )\n\u001b[0;32m-> 1017\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[1;32m 1018\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 1019\u001b[0m _EXPORT_FLAGS \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:990\u001b[0m, in \u001b[0;36m_log_export_wrapper..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 988\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 989\u001b[0m start \u001b[38;5;241m=\u001b[39m time\u001b[38;5;241m.\u001b[39mtime()\n\u001b[0;32m--> 990\u001b[0m ep \u001b[38;5;241m=\u001b[39m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 991\u001b[0m end \u001b[38;5;241m=\u001b[39m time\u001b[38;5;241m.\u001b[39mtime()\n\u001b[1;32m 992\u001b[0m log_export_usage(\n\u001b[1;32m 993\u001b[0m event\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mexport.time\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 994\u001b[0m metrics\u001b[38;5;241m=\u001b[39mend \u001b[38;5;241m-\u001b[39m start,\n\u001b[1;32m 995\u001b[0m flags\u001b[38;5;241m=\u001b[39m_EXPORT_FLAGS,\n\u001b[1;32m 996\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mget_ep_stats(ep),\n\u001b[1;32m 997\u001b[0m )\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/exported_program.py:114\u001b[0m, in \u001b[0;36m_disable_prexisiting_fake_mode..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 111\u001b[0m \u001b[38;5;129m@functools\u001b[39m\u001b[38;5;241m.\u001b[39mwraps(fn)\n\u001b[1;32m 112\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapper\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 113\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m unset_fake_temporarily():\n\u001b[0;32m--> 114\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:1880\u001b[0m, in \u001b[0;36m_export\u001b[0;34m(mod, args, kwargs, dynamic_shapes, strict, preserve_module_call_signature, pre_dispatch, allow_complex_guards_as_runtime_asserts, _is_torch_jit_trace)\u001b[0m\n\u001b[1;32m 1877\u001b[0m \u001b[38;5;66;03m# Call the appropriate export function based on the strictness of tracing.\u001b[39;00m\n\u001b[1;32m 1878\u001b[0m export_func \u001b[38;5;241m=\u001b[39m _strict_export \u001b[38;5;28;01mif\u001b[39;00m strict \u001b[38;5;28;01melse\u001b[39;00m _non_strict_export\n\u001b[0;32m-> 1880\u001b[0m export_artifact \u001b[38;5;241m=\u001b[39m \u001b[43mexport_func\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore[operator]\u001b[39;49;00m\n\u001b[1;32m 1881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1882\u001b[0m \u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1883\u001b[0m \u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1884\u001b[0m \u001b[43m \u001b[49m\u001b[43mdynamic_shapes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1885\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreserve_module_call_signature\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1886\u001b[0m \u001b[43m \u001b[49m\u001b[43mpre_dispatch\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1887\u001b[0m \u001b[43m \u001b[49m\u001b[43moriginal_state_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1888\u001b[0m \u001b[43m \u001b[49m\u001b[43moriginal_in_spec\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1889\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_complex_guards_as_runtime_asserts\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1890\u001b[0m \u001b[43m \u001b[49m\u001b[43m_is_torch_jit_trace\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1891\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1892\u001b[0m export_graph_signature: ExportGraphSignature \u001b[38;5;241m=\u001b[39m export_artifact\u001b[38;5;241m.\u001b[39maten\u001b[38;5;241m.\u001b[39msig\n\u001b[1;32m 1894\u001b[0m forward_arg_names \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 1895\u001b[0m _get_forward_arg_names(mod, args, kwargs) \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m _is_torch_jit_trace \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1896\u001b[0m )\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:1683\u001b[0m, in \u001b[0;36m_non_strict_export\u001b[0;34m(mod, args, kwargs, dynamic_shapes, preserve_module_call_signature, pre_dispatch, original_state_dict, orig_in_spec, allow_complex_guards_as_runtime_asserts, _is_torch_jit_trace, dispatch_tracing_mode)\u001b[0m\n\u001b[1;32m 1667\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m _fakify_script_objects(mod, fake_args, fake_kwargs, fake_mode) \u001b[38;5;28;01mas\u001b[39;00m (\n\u001b[1;32m 1668\u001b[0m patched_mod,\n\u001b[1;32m 1669\u001b[0m new_fake_args,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1672\u001b[0m map_fake_to_real,\n\u001b[1;32m 1673\u001b[0m ):\n\u001b[1;32m 1674\u001b[0m _to_aten_func \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 1675\u001b[0m _export_to_aten_ir_make_fx\n\u001b[1;32m 1676\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m dispatch_tracing_mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmake_fx\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1681\u001b[0m )\n\u001b[1;32m 1682\u001b[0m )\n\u001b[0;32m-> 1683\u001b[0m aten_export_artifact \u001b[38;5;241m=\u001b[39m \u001b[43m_to_aten_func\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore[operator]\u001b[39;49;00m\n\u001b[1;32m 1684\u001b[0m \u001b[43m \u001b[49m\u001b[43mpatched_mod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1685\u001b[0m \u001b[43m \u001b[49m\u001b[43mnew_fake_args\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1686\u001b[0m \u001b[43m \u001b[49m\u001b[43mnew_fake_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1687\u001b[0m \u001b[43m \u001b[49m\u001b[43mfake_params_buffers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1688\u001b[0m \u001b[43m \u001b[49m\u001b[43mnew_fake_constant_attrs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1689\u001b[0m \u001b[43m \u001b[49m\u001b[43mproduce_guards_callback\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_produce_guards_callback\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1690\u001b[0m \u001b[43m \u001b[49m\u001b[43mtransform\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_tuplify_outputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1691\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1692\u001b[0m \u001b[38;5;66;03m# aten_export_artifact.constants contains only fake script objects, we need to map them back\u001b[39;00m\n\u001b[1;32m 1693\u001b[0m aten_export_artifact\u001b[38;5;241m.\u001b[39mconstants \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 1694\u001b[0m fqn: map_fake_to_real[obj] \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(obj, FakeScriptObject) \u001b[38;5;28;01melse\u001b[39;00m obj\n\u001b[1;32m 1695\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m fqn, obj \u001b[38;5;129;01min\u001b[39;00m aten_export_artifact\u001b[38;5;241m.\u001b[39mconstants\u001b[38;5;241m.\u001b[39mitems()\n\u001b[1;32m 1696\u001b[0m }\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:637\u001b[0m, in \u001b[0;36m_export_to_aten_ir\u001b[0;34m(mod, fake_args, fake_kwargs, fake_params_buffers, constant_attrs, produce_guards_callback, transform, pre_dispatch, decomp_table, _check_autograd_state, _is_torch_jit_trace)\u001b[0m\n\u001b[1;32m 627\u001b[0m \u001b[38;5;66;03m# This _reparametrize_module makes sure inputs and module.params/buffers have the same fake_mode,\u001b[39;00m\n\u001b[1;32m 628\u001b[0m \u001b[38;5;66;03m# otherwise aot_export_module will error out because it sees a mix of fake_modes.\u001b[39;00m\n\u001b[1;32m 629\u001b[0m \u001b[38;5;66;03m# And we want aot_export_module to use the fake_tensor mode in dynamo to keep the pipeline easy to reason about.\u001b[39;00m\n\u001b[1;32m 630\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mnn\u001b[38;5;241m.\u001b[39mutils\u001b[38;5;241m.\u001b[39mstateless\u001b[38;5;241m.\u001b[39m_reparametrize_module(\n\u001b[1;32m 631\u001b[0m mod,\n\u001b[1;32m 632\u001b[0m fake_params_buffers,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 635\u001b[0m stack_weights\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m 636\u001b[0m ), grad_safe_guard, _ignore_backend_decomps(), _compiling_state_context(): \u001b[38;5;66;03m# type: ignore[attr-defined]\u001b[39;00m\n\u001b[0;32m--> 637\u001b[0m gm, graph_signature \u001b[38;5;241m=\u001b[39m \u001b[43mtransform\u001b[49m\u001b[43m(\u001b[49m\u001b[43maot_export_module\u001b[49m\u001b[43m)\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 638\u001b[0m \u001b[43m \u001b[49m\u001b[43mmod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 639\u001b[0m \u001b[43m \u001b[49m\u001b[43mfake_args\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 640\u001b[0m \u001b[43m \u001b[49m\u001b[43mtrace_joint\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 641\u001b[0m \u001b[43m \u001b[49m\u001b[43mpre_dispatch\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpre_dispatch\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 642\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecompositions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecomp_table\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 643\u001b[0m \u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfake_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 644\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 646\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_maybe_fixup_gm_and_output_node_meta\u001b[39m(old_gm, new_gm):\n\u001b[1;32m 647\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(old_gm, torch\u001b[38;5;241m.\u001b[39mfx\u001b[38;5;241m.\u001b[39mGraphModule):\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:1611\u001b[0m, in \u001b[0;36m_non_strict_export.._tuplify_outputs.._aot_export_non_strict\u001b[0;34m(mod, args, kwargs, **flags)\u001b[0m\n\u001b[1;32m 1605\u001b[0m new_preserved_call_signatures \u001b[38;5;241m=\u001b[39m [\n\u001b[1;32m 1606\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m_export_root.\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m+\u001b[39m i \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m preserve_module_call_signature\n\u001b[1;32m 1607\u001b[0m ]\n\u001b[1;32m 1608\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m _wrap_submodules(\n\u001b[1;32m 1609\u001b[0m wrapped_mod, new_preserved_call_signatures, module_call_specs\n\u001b[1;32m 1610\u001b[0m ):\n\u001b[0;32m-> 1611\u001b[0m gm, sig \u001b[38;5;241m=\u001b[39m \u001b[43maot_export\u001b[49m\u001b[43m(\u001b[49m\u001b[43mwrapped_mod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mflags\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1612\u001b[0m log\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExported program from AOTAutograd:\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m\"\u001b[39m, gm)\n\u001b[1;32m 1614\u001b[0m sig\u001b[38;5;241m.\u001b[39mparameters \u001b[38;5;241m=\u001b[39m pytree\u001b[38;5;241m.\u001b[39mtree_map(_strip_root, sig\u001b[38;5;241m.\u001b[39mparameters)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py:1246\u001b[0m, in \u001b[0;36maot_export_module\u001b[0;34m(mod, args, decompositions, trace_joint, output_loss_index, pre_dispatch, dynamic_shapes, kwargs)\u001b[0m\n\u001b[1;32m 1243\u001b[0m full_args\u001b[38;5;241m.\u001b[39mextend(args)\n\u001b[1;32m 1245\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m ctx():\n\u001b[0;32m-> 1246\u001b[0m fx_g, metadata, in_spec, out_spec \u001b[38;5;241m=\u001b[39m \u001b[43m_aot_export_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1247\u001b[0m \u001b[43m \u001b[49m\u001b[43mfn_to_trace\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1248\u001b[0m \u001b[43m \u001b[49m\u001b[43mfull_args\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1249\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecompositions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecompositions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1250\u001b[0m \u001b[43m \u001b[49m\u001b[43mnum_params_buffers\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mparams_len\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1251\u001b[0m \u001b[43m \u001b[49m\u001b[43mno_tangents\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 1252\u001b[0m \u001b[43m \u001b[49m\u001b[43mpre_dispatch\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpre_dispatch\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1253\u001b[0m \u001b[43m \u001b[49m\u001b[43mdynamic_shapes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdynamic_shapes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1254\u001b[0m \u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1255\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1256\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m trace_joint:\n\u001b[1;32m 1258\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mflattened_joint\u001b[39m(\u001b[38;5;241m*\u001b[39margs):\n\u001b[1;32m 1259\u001b[0m \u001b[38;5;66;03m# The idea here is that the joint graph that AOTAutograd creates has some strict properties:\u001b[39;00m\n\u001b[1;32m 1260\u001b[0m \u001b[38;5;66;03m# (1) It accepts two arguments (primals, tangents), and pytree_flattens them\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1273\u001b[0m \u001b[38;5;66;03m# This function \"fixes\" both of the above by removing any tangent inputs,\u001b[39;00m\n\u001b[1;32m 1274\u001b[0m \u001b[38;5;66;03m# and removing pytrees from the original FX graph.\u001b[39;00m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py:1480\u001b[0m, in \u001b[0;36m_aot_export_function\u001b[0;34m(func, args, num_params_buffers, decompositions, no_tangents, pre_dispatch, dynamic_shapes, kwargs)\u001b[0m\n\u001b[1;32m 1477\u001b[0m fake_mode, shape_env \u001b[38;5;241m=\u001b[39m construct_fake_mode(flat_args, aot_config)\n\u001b[1;32m 1478\u001b[0m fake_flat_args \u001b[38;5;241m=\u001b[39m process_inputs(flat_args, aot_config, fake_mode, shape_env)\n\u001b[0;32m-> 1480\u001b[0m fx_g, meta \u001b[38;5;241m=\u001b[39m \u001b[43mcreate_aot_dispatcher_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1481\u001b[0m \u001b[43m \u001b[49m\u001b[43mflat_fn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1482\u001b[0m \u001b[43m \u001b[49m\u001b[43mfake_flat_args\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1483\u001b[0m \u001b[43m \u001b[49m\u001b[43maot_config\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1484\u001b[0m \u001b[43m \u001b[49m\u001b[43mfake_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1485\u001b[0m \u001b[43m \u001b[49m\u001b[43mshape_env\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1486\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1487\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m fx_g, meta, in_spec, out_spec\u001b[38;5;241m.\u001b[39mspec\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py:522\u001b[0m, in \u001b[0;36mcreate_aot_dispatcher_function\u001b[0;34m(flat_fn, fake_flat_args, aot_config, fake_mode, shape_env)\u001b[0m\n\u001b[1;32m 514\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcreate_aot_dispatcher_function\u001b[39m(\n\u001b[1;32m 515\u001b[0m flat_fn,\n\u001b[1;32m 516\u001b[0m fake_flat_args: FakifiedFlatArgs,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 519\u001b[0m shape_env: Optional[ShapeEnv],\n\u001b[1;32m 520\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tuple[Callable, ViewAndMutationMeta]:\n\u001b[1;32m 521\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m dynamo_timed(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcreate_aot_dispatcher_function\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[0;32m--> 522\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_create_aot_dispatcher_function\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 523\u001b[0m \u001b[43m \u001b[49m\u001b[43mflat_fn\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfake_flat_args\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43maot_config\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfake_mode\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mshape_env\u001b[49m\n\u001b[1;32m 524\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py:623\u001b[0m, in \u001b[0;36m_create_aot_dispatcher_function\u001b[0;34m(flat_fn, fake_flat_args, aot_config, fake_mode, shape_env)\u001b[0m\n\u001b[1;32m 621\u001b[0m ctx \u001b[38;5;241m=\u001b[39m nullcontext()\n\u001b[1;32m 622\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m ctx:\n\u001b[0;32m--> 623\u001b[0m fw_metadata \u001b[38;5;241m=\u001b[39m \u001b[43mrun_functionalized_fw_and_collect_metadata\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 624\u001b[0m \u001b[43m \u001b[49m\u001b[43mflat_fn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 625\u001b[0m \u001b[43m \u001b[49m\u001b[43mstatic_input_indices\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43maot_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstatic_input_indices\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 626\u001b[0m \u001b[43m \u001b[49m\u001b[43mkeep_input_mutations\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43maot_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mkeep_inference_input_mutations\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 627\u001b[0m \u001b[43m \u001b[49m\u001b[43mis_train\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mneeds_autograd\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 628\u001b[0m \u001b[43m \u001b[49m\u001b[43mpre_dispatch\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43maot_config\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpre_dispatch\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 629\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43m_dup_fake_script_obj\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfake_flat_args\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 631\u001b[0m req_subclass_dispatch \u001b[38;5;241m=\u001b[39m requires_subclass_dispatch(\n\u001b[1;32m 632\u001b[0m fake_flat_args, fw_metadata\n\u001b[1;32m 633\u001b[0m )\n\u001b[1;32m 635\u001b[0m output_and_mutation_safe \u001b[38;5;241m=\u001b[39m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28many\u001b[39m(\n\u001b[1;32m 636\u001b[0m x\u001b[38;5;241m.\u001b[39mrequires_grad\n\u001b[1;32m 637\u001b[0m \u001b[38;5;66;03m# view-type operations preserve requires_grad even in no_grad.\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 652\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m x \u001b[38;5;129;01min\u001b[39;00m fw_metadata\u001b[38;5;241m.\u001b[39minput_info\n\u001b[1;32m 653\u001b[0m )\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_functorch/_aot_autograd/collect_metadata_analysis.py:173\u001b[0m, in \u001b[0;36mrun_functionalized_fw_and_collect_metadata..inner\u001b[0;34m(*flat_args)\u001b[0m\n\u001b[1;32m 170\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m disable_above, mode, suppress_pending:\n\u001b[1;32m 171\u001b[0m \u001b[38;5;66;03m# precondition: The passed in function already handles unflattening inputs + flattening outputs\u001b[39;00m\n\u001b[1;32m 172\u001b[0m flat_f_args \u001b[38;5;241m=\u001b[39m pytree\u001b[38;5;241m.\u001b[39mtree_map(_to_fun, flat_args)\n\u001b[0;32m--> 173\u001b[0m flat_f_outs \u001b[38;5;241m=\u001b[39m \u001b[43mf\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mflat_f_args\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 174\u001b[0m \u001b[38;5;66;03m# We didn't do any tracing, so we don't need to process the\u001b[39;00m\n\u001b[1;32m 175\u001b[0m \u001b[38;5;66;03m# unbacked symbols, they will just disappear into the ether.\u001b[39;00m\n\u001b[1;32m 176\u001b[0m \u001b[38;5;66;03m# Also, prevent memoization from applying.\u001b[39;00m\n\u001b[1;32m 177\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m fake_mode:\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_functorch/_aot_autograd/utils.py:182\u001b[0m, in \u001b[0;36mcreate_tree_flattened_fn..flat_fn\u001b[0;34m(*flat_args)\u001b[0m\n\u001b[1;32m 180\u001b[0m \u001b[38;5;28;01mnonlocal\u001b[39;00m out_spec\n\u001b[1;32m 181\u001b[0m args, kwargs \u001b[38;5;241m=\u001b[39m pytree\u001b[38;5;241m.\u001b[39mtree_unflatten(flat_args, tensor_args_spec)\n\u001b[0;32m--> 182\u001b[0m tree_out \u001b[38;5;241m=\u001b[39m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 183\u001b[0m flat_out, spec \u001b[38;5;241m=\u001b[39m pytree\u001b[38;5;241m.\u001b[39mtree_flatten(tree_out)\n\u001b[1;32m 184\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m flat_out:\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_functorch/_aot_autograd/traced_function_transforms.py:863\u001b[0m, in \u001b[0;36mcreate_functional_call..functional_call\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 859\u001b[0m out \u001b[38;5;241m=\u001b[39m PropagateUnbackedSymInts(mod)\u001b[38;5;241m.\u001b[39mrun(\n\u001b[1;32m 860\u001b[0m \u001b[38;5;241m*\u001b[39margs[params_len:], \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m 861\u001b[0m )\n\u001b[1;32m 862\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 863\u001b[0m out \u001b[38;5;241m=\u001b[39m \u001b[43mmod\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m[\u001b[49m\u001b[43mparams_len\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 865\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(out, (\u001b[38;5;28mtuple\u001b[39m, \u001b[38;5;28mlist\u001b[39m)):\n\u001b[1;32m 866\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[1;32m 867\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mGraph output must be a (). This is so that we can avoid \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 868\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpytree processing of the outputs. Please change the module to \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 869\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mhave tuple outputs or use aot_module instead.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 870\u001b[0m )\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/module.py:1736\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1734\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1735\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1736\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/module.py:1747\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1742\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1743\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1744\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1747\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1749\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1750\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:1598\u001b[0m, in \u001b[0;36m_non_strict_export.._tuplify_outputs.._aot_export_non_strict..Wrapper.forward\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1594\u001b[0m tree_out \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mfx\u001b[38;5;241m.\u001b[39mInterpreter(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_export_root)\u001b[38;5;241m.\u001b[39mrun(\n\u001b[1;32m 1595\u001b[0m \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m 1596\u001b[0m )\n\u001b[1;32m 1597\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1598\u001b[0m tree_out \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_export_root\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1599\u001b[0m flat_outs, out_spec \u001b[38;5;241m=\u001b[39m pytree\u001b[38;5;241m.\u001b[39mtree_flatten(tree_out)\n\u001b[1;32m 1600\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mtuple\u001b[39m(flat_outs)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/module.py:1736\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1734\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1735\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1736\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/module.py:1747\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1742\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1743\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1744\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1747\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1749\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1750\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", "Cell \u001b[0;32mIn[39], line 46\u001b[0m, in \u001b[0;36mStyleTTS2.forward\u001b[0;34m(self, tokens)\u001b[0m\n\u001b[1;32m 42\u001b[0m pred_aln_trg\u001b[38;5;241m=\u001b[39mtorch\u001b[38;5;241m.\u001b[39mvstack(pred_aln_trg_list)\n\u001b[1;32m 44\u001b[0m en \u001b[38;5;241m=\u001b[39m d\u001b[38;5;241m.\u001b[39mtranspose(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m2\u001b[39m) \u001b[38;5;241m@\u001b[39m pred_aln_trg\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39mto(device)\n\u001b[0;32m---> 46\u001b[0m F0_pred, N_pred \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodel\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mpredictor\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mF0Ntrain\u001b[49m\u001b[43m(\u001b[49m\u001b[43men\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43ms\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 47\u001b[0m t_en \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mmodel[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtext_encoder\u001b[39m\u001b[38;5;124m\"\u001b[39m]\u001b[38;5;241m.\u001b[39minference(tokens)\n\u001b[1;32m 48\u001b[0m asr \u001b[38;5;241m=\u001b[39m t_en \u001b[38;5;241m@\u001b[39m pred_aln_trg\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39mto(device)\n", "File \u001b[0;32m~/Projects/DeepLearning/TTS/Kokoro-82M/models.py:471\u001b[0m, in \u001b[0;36mProsodyPredictor.F0Ntrain\u001b[0;34m(self, x, s)\u001b[0m\n\u001b[1;32m 466\u001b[0m x1 \u001b[38;5;241m=\u001b[39m x\u001b[38;5;241m.\u001b[39mtranspose(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m2\u001b[39m)\n\u001b[1;32m 467\u001b[0m \u001b[38;5;66;03m# torch._check(x1.dim() == 3, lambda: print(f\"Expected 3D tensor, got {x1.dim()}D tensor\"))\u001b[39;00m\n\u001b[1;32m 468\u001b[0m \u001b[38;5;66;03m# torch._check(x1.shape[1] > 0, lambda: print(f\"Shape 2, got {x1.shape[1]}\"))\u001b[39;00m\n\u001b[1;32m 469\u001b[0m \u001b[38;5;66;03m# torch._check(x1.shape[2] > 0, lambda: print(f\"Shape 2, got {x1.shape[2]}\"))\u001b[39;00m\n\u001b[1;32m 470\u001b[0m \u001b[38;5;66;03m# torch._check(x.shape[2] > 0, lambda: print(f\"Shape 2, got {x.shape[2]}\"))\u001b[39;00m\n\u001b[0;32m--> 471\u001b[0m x2, _temp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mshared\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx1\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 472\u001b[0m \u001b[38;5;66;03m# torch._check(x.shape[2] > 0, lambda: print(f\"Shape 2, got {x.size(2)}\"))\u001b[39;00m\n\u001b[1;32m 474\u001b[0m F0 \u001b[38;5;241m=\u001b[39m x2\u001b[38;5;241m.\u001b[39mtranspose(\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m, \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m2\u001b[39m)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/module.py:1736\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1734\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1735\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1736\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/module.py:1747\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1742\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1743\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1744\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1747\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1749\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1750\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1123\u001b[0m, in \u001b[0;36mLSTM.forward\u001b[0;34m(self, input, hx)\u001b[0m\n\u001b[1;32m 1120\u001b[0m hx \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpermute_hidden(hx, sorted_indices)\n\u001b[1;32m 1122\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m batch_sizes \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m-> 1123\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43m_VF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlstm\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1124\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1125\u001b[0m \u001b[43m \u001b[49m\u001b[43mhx\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1126\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_flat_weights\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1127\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1128\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnum_layers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1129\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdropout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1130\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtraining\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1131\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbidirectional\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1132\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbatch_first\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1133\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1134\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1135\u001b[0m result \u001b[38;5;241m=\u001b[39m _VF\u001b[38;5;241m.\u001b[39mlstm(\n\u001b[1;32m 1136\u001b[0m \u001b[38;5;28minput\u001b[39m,\n\u001b[1;32m 1137\u001b[0m batch_sizes,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1144\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbidirectional,\n\u001b[1;32m 1145\u001b[0m )\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_export/non_strict_utils.py:520\u001b[0m, in \u001b[0;36m_NonStrictTorchFunctionHandler.__torch_function__\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 512\u001b[0m log\u001b[38;5;241m.\u001b[39mdebug(\n\u001b[1;32m 513\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m called at \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m:\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m in \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 514\u001b[0m func\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 517\u001b[0m frame\u001b[38;5;241m.\u001b[39mf_code\u001b[38;5;241m.\u001b[39mco_name,\n\u001b[1;32m 518\u001b[0m )\n\u001b[1;32m 519\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 520\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 521\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m GuardOnDataDependentSymNode \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 522\u001b[0m _suggest_fixes_for_data_dependent_error_non_strict(e)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_decomp/decompositions.py:3476\u001b[0m, in \u001b[0;36mlstm_impl\u001b[0;34m(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first)\u001b[0m\n\u001b[1;32m 3474\u001b[0m hidden \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(\u001b[38;5;28mzip\u001b[39m(hx[\u001b[38;5;241m0\u001b[39m], hx[\u001b[38;5;241m1\u001b[39m]))\n\u001b[1;32m 3475\u001b[0m layer_fn \u001b[38;5;241m=\u001b[39m select_one_layer_lstm_function(\u001b[38;5;28minput\u001b[39m, hx, params)\n\u001b[0;32m-> 3476\u001b[0m out, final_hiddens \u001b[38;5;241m=\u001b[39m \u001b[43m_rnn_helper\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 3477\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3478\u001b[0m \u001b[43m \u001b[49m\u001b[43mhidden\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3479\u001b[0m \u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3480\u001b[0m \u001b[43m \u001b[49m\u001b[43mhas_biases\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3481\u001b[0m \u001b[43m \u001b[49m\u001b[43mnum_layers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3482\u001b[0m \u001b[43m \u001b[49m\u001b[43mdropout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3483\u001b[0m \u001b[43m \u001b[49m\u001b[43mtrain\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3484\u001b[0m \u001b[43m \u001b[49m\u001b[43mbidirectional\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3485\u001b[0m \u001b[43m \u001b[49m\u001b[43mbatch_first\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3486\u001b[0m \u001b[43m \u001b[49m\u001b[43mlayer_fn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3487\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3488\u001b[0m final_hiddens \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(\u001b[38;5;28mzip\u001b[39m(\u001b[38;5;241m*\u001b[39mfinal_hiddens))\n\u001b[1;32m 3489\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m out, torch\u001b[38;5;241m.\u001b[39mstack(final_hiddens[\u001b[38;5;241m0\u001b[39m], \u001b[38;5;241m0\u001b[39m), torch\u001b[38;5;241m.\u001b[39mstack(final_hiddens[\u001b[38;5;241m1\u001b[39m], \u001b[38;5;241m0\u001b[39m)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_decomp/decompositions.py:3151\u001b[0m, in \u001b[0;36m_rnn_helper\u001b[0;34m(input, hidden, params, has_biases, num_layers, dropout, train, bidirectional, batch_first, layer_fn)\u001b[0m\n\u001b[1;32m 3147\u001b[0m cur_params, cur_hidden, bidir_params, bidir_hidden \u001b[38;5;241m=\u001b[39m params_hiddens(\n\u001b[1;32m 3148\u001b[0m params, hidden, i, bidirectional\n\u001b[1;32m 3149\u001b[0m )\n\u001b[1;32m 3150\u001b[0m dropout \u001b[38;5;241m=\u001b[39m dropout \u001b[38;5;28;01mif\u001b[39;00m (train \u001b[38;5;129;01mand\u001b[39;00m num_layers \u001b[38;5;241m<\u001b[39m i \u001b[38;5;241m-\u001b[39m \u001b[38;5;241m1\u001b[39m) \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;241m0.0\u001b[39m\n\u001b[0;32m-> 3151\u001b[0m fwd_inp, fwd_hidden \u001b[38;5;241m=\u001b[39m \u001b[43mlayer_fn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcur_hidden\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcur_params\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mhas_biases\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3152\u001b[0m final_hiddens\u001b[38;5;241m.\u001b[39mappend(fwd_hidden)\n\u001b[1;32m 3154\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m bidirectional:\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_decomp/decompositions.py:3333\u001b[0m, in \u001b[0;36mone_layer_lstm\u001b[0;34m(inp, hidden, params, has_biases, reverse)\u001b[0m\n\u001b[1;32m 3331\u001b[0m precomputed_input \u001b[38;5;241m=\u001b[39m precomputed_input\u001b[38;5;241m.\u001b[39mflip(\u001b[38;5;241m0\u001b[39m) \u001b[38;5;28;01mif\u001b[39;00m reverse \u001b[38;5;28;01melse\u001b[39;00m precomputed_input\n\u001b[1;32m 3332\u001b[0m step_output \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m-> 3333\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m inp \u001b[38;5;129;01min\u001b[39;00m precomputed_input:\n\u001b[1;32m 3334\u001b[0m hx, cx \u001b[38;5;241m=\u001b[39m lstm_cell(inp, hx, cx, hh_weight, hh_bias, hr_weight, chunk_dim\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m2\u001b[39m)\n\u001b[1;32m 3335\u001b[0m step_output\u001b[38;5;241m.\u001b[39mappend(hx)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_tensor.py:1119\u001b[0m, in \u001b[0;36mTensor.__iter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1110\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m torch\u001b[38;5;241m.\u001b[39m_C\u001b[38;5;241m.\u001b[39m_get_tracing_state():\n\u001b[1;32m 1111\u001b[0m warnings\u001b[38;5;241m.\u001b[39mwarn(\n\u001b[1;32m 1112\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mIterating over a tensor might cause the trace to be incorrect. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 1113\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPassing a tensor of different shape won\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mt change the number of \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1117\u001b[0m stacklevel\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m2\u001b[39m,\n\u001b[1;32m 1118\u001b[0m )\n\u001b[0;32m-> 1119\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28miter\u001b[39m(\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43munbind\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m)\u001b[49m)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_subclasses/functional_tensor.py:534\u001b[0m, in \u001b[0;36mFunctionalTensorMode.__torch_dispatch__\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 525\u001b[0m outs_wrapped \u001b[38;5;241m=\u001b[39m pytree\u001b[38;5;241m.\u001b[39mtree_map_only(\n\u001b[1;32m 526\u001b[0m torch\u001b[38;5;241m.\u001b[39mTensor, wrap, outs_unwrapped\n\u001b[1;32m 527\u001b[0m )\n\u001b[1;32m 528\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 529\u001b[0m \u001b[38;5;66;03m# When we dispatch to the C++ functionalization kernel, we might need to jump back to the\u001b[39;00m\n\u001b[1;32m 530\u001b[0m \u001b[38;5;66;03m# PreDispatch mode stack afterwards, to handle any other PreDispatch modes underneath\u001b[39;00m\n\u001b[1;32m 531\u001b[0m \u001b[38;5;66;03m# FunctionalTensorMode. If we call func() directly, we would need to exclude PreDispatch\u001b[39;00m\n\u001b[1;32m 532\u001b[0m \u001b[38;5;66;03m# from the TLS in order to avoid infinite looping, but this would prevent us from coming\u001b[39;00m\n\u001b[1;32m 533\u001b[0m \u001b[38;5;66;03m# back to PreDispatch later\u001b[39;00m\n\u001b[0;32m--> 534\u001b[0m outs_unwrapped \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_op_dk\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 535\u001b[0m \u001b[43m \u001b[49m\u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_C\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mDispatchKey\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mFunctionalize\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 536\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs_unwrapped\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 537\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs_unwrapped\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 538\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 539\u001b[0m \u001b[38;5;66;03m# We don't allow any mutation on result of dropout or _to_copy\u001b[39;00m\n\u001b[1;32m 540\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mexport:\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/utils/_stats.py:21\u001b[0m, in \u001b[0;36mcount..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 19\u001b[0m simple_call_counter[fn\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[1;32m 20\u001b[0m simple_call_counter[fn\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m] \u001b[38;5;241m=\u001b[39m simple_call_counter[fn\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m] \u001b[38;5;241m+\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m---> 21\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py:1238\u001b[0m, in \u001b[0;36mFakeTensorMode.__torch_dispatch__\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 1234\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m (\n\u001b[1;32m 1235\u001b[0m torch\u001b[38;5;241m.\u001b[39m_C\u001b[38;5;241m.\u001b[39m_get_dispatch_mode(torch\u001b[38;5;241m.\u001b[39m_C\u001b[38;5;241m.\u001b[39m_TorchDispatchModeKey\u001b[38;5;241m.\u001b[39mFAKE) \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1236\u001b[0m ), func\n\u001b[1;32m 1237\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1238\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdispatch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1239\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m:\n\u001b[1;32m 1240\u001b[0m log\u001b[38;5;241m.\u001b[39mexception(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mfake tensor raised TypeError\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py:1692\u001b[0m, in \u001b[0;36mFakeTensorMode.dispatch\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 1689\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 1691\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcache_enabled:\n\u001b[0;32m-> 1692\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cached_dispatch_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1693\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1694\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dispatch_impl(func, types, args, kwargs)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py:1348\u001b[0m, in \u001b[0;36mFakeTensorMode._cached_dispatch_impl\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 1345\u001b[0m FakeTensorMode\u001b[38;5;241m.\u001b[39mcache_bypasses[e\u001b[38;5;241m.\u001b[39mreason] \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m 1347\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m output \u001b[38;5;129;01mis\u001b[39;00m _UNASSIGNED:\n\u001b[0;32m-> 1348\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dispatch_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1350\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m output\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py:1943\u001b[0m, in \u001b[0;36mFakeTensorMode._dispatch_impl\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 1933\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m func \u001b[38;5;129;01min\u001b[39;00m decomposition_table \u001b[38;5;129;01mand\u001b[39;00m (\n\u001b[1;32m 1934\u001b[0m has_symbolic_sizes\n\u001b[1;32m 1935\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m (\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1940\u001b[0m )\n\u001b[1;32m 1941\u001b[0m ):\n\u001b[1;32m 1942\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m:\n\u001b[0;32m-> 1943\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mdecomposition_table\u001b[49m\u001b[43m[\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m]\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1945\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m:\n\u001b[1;32m 1946\u001b[0m \u001b[38;5;66;03m# Decomposes CompositeImplicitAutograd ops\u001b[39;00m\n\u001b[1;32m 1947\u001b[0m r \u001b[38;5;241m=\u001b[39m func\u001b[38;5;241m.\u001b[39mdecompose(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_refs/__init__.py:3956\u001b[0m, in \u001b[0;36munbind\u001b[0;34m(t, dim)\u001b[0m\n\u001b[1;32m 3953\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ()\n\u001b[1;32m 3954\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 3955\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mtuple\u001b[39m(\n\u001b[0;32m-> 3956\u001b[0m torch\u001b[38;5;241m.\u001b[39msqueeze(s, dim) \u001b[38;5;28;01mfor\u001b[39;00m s \u001b[38;5;129;01min\u001b[39;00m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtensor_split\u001b[49m\u001b[43m(\u001b[49m\u001b[43mt\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mshape\u001b[49m\u001b[43m[\u001b[49m\u001b[43mdim\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdim\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3957\u001b[0m )\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/utils/_stats.py:21\u001b[0m, in \u001b[0;36mcount..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 19\u001b[0m simple_call_counter[fn\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[1;32m 20\u001b[0m simple_call_counter[fn\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m] \u001b[38;5;241m=\u001b[39m simple_call_counter[fn\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m] \u001b[38;5;241m+\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m---> 21\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py:1238\u001b[0m, in \u001b[0;36mFakeTensorMode.__torch_dispatch__\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 1234\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m (\n\u001b[1;32m 1235\u001b[0m torch\u001b[38;5;241m.\u001b[39m_C\u001b[38;5;241m.\u001b[39m_get_dispatch_mode(torch\u001b[38;5;241m.\u001b[39m_C\u001b[38;5;241m.\u001b[39m_TorchDispatchModeKey\u001b[38;5;241m.\u001b[39mFAKE) \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1236\u001b[0m ), func\n\u001b[1;32m 1237\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1238\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdispatch\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1239\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m:\n\u001b[1;32m 1240\u001b[0m log\u001b[38;5;241m.\u001b[39mexception(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mfake tensor raised TypeError\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py:1692\u001b[0m, in \u001b[0;36mFakeTensorMode.dispatch\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 1689\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m func(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 1691\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcache_enabled:\n\u001b[0;32m-> 1692\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_cached_dispatch_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1693\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1694\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dispatch_impl(func, types, args, kwargs)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py:1348\u001b[0m, in \u001b[0;36mFakeTensorMode._cached_dispatch_impl\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 1345\u001b[0m FakeTensorMode\u001b[38;5;241m.\u001b[39mcache_bypasses[e\u001b[38;5;241m.\u001b[39mreason] \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m 1347\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m output \u001b[38;5;129;01mis\u001b[39;00m _UNASSIGNED:\n\u001b[0;32m-> 1348\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dispatch_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1350\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m output\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py:1947\u001b[0m, in \u001b[0;36mFakeTensorMode._dispatch_impl\u001b[0;34m(self, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m 1943\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m decomposition_table[func](\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 1945\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m:\n\u001b[1;32m 1946\u001b[0m \u001b[38;5;66;03m# Decomposes CompositeImplicitAutograd ops\u001b[39;00m\n\u001b[0;32m-> 1947\u001b[0m r \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdecompose\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1948\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m r \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mNotImplemented\u001b[39m:\n\u001b[1;32m 1949\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m r\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_ops.py:759\u001b[0m, in \u001b[0;36mOpOverload.decompose\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 757\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpy_kernels[dk](\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 758\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m torch\u001b[38;5;241m.\u001b[39m_C\u001b[38;5;241m.\u001b[39m_dispatch_has_kernel_for_dispatch_key(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mname(), dk):\n\u001b[0;32m--> 759\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_op_dk\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdk\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 760\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 761\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mNotImplemented\u001b[39m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/fx/experimental/sym_node.py:429\u001b[0m, in \u001b[0;36mSymNode.guard_int\u001b[0;34m(self, file, line)\u001b[0m\n\u001b[1;32m 426\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mguard_int\u001b[39m(\u001b[38;5;28mself\u001b[39m, file, line):\n\u001b[1;32m 427\u001b[0m \u001b[38;5;66;03m# TODO: use the file/line for some useful diagnostic on why a\u001b[39;00m\n\u001b[1;32m 428\u001b[0m \u001b[38;5;66;03m# guard occurred\u001b[39;00m\n\u001b[0;32m--> 429\u001b[0m r \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mshape_env\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mevaluate_expr\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexpr\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mhint\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfx_node\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfx_node\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 430\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 431\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mint\u001b[39m(r)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/fx/experimental/recording.py:262\u001b[0m, in \u001b[0;36mrecord_shapeenv_event..decorator..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 255\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 256\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m args[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m.\u001b[39mis_recording: \u001b[38;5;66;03m# type: ignore[has-type]\u001b[39;00m\n\u001b[1;32m 257\u001b[0m \u001b[38;5;66;03m# If ShapeEnv is already recording an event, call the wrapped\u001b[39;00m\n\u001b[1;32m 258\u001b[0m \u001b[38;5;66;03m# function directly.\u001b[39;00m\n\u001b[1;32m 259\u001b[0m \u001b[38;5;66;03m#\u001b[39;00m\n\u001b[1;32m 260\u001b[0m \u001b[38;5;66;03m# NB: here, we skip the check of whether all ShapeEnv instances\u001b[39;00m\n\u001b[1;32m 261\u001b[0m \u001b[38;5;66;03m# are equal, in favor of a faster dispatch.\u001b[39;00m\n\u001b[0;32m--> 262\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m retlog(\u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 264\u001b[0m \u001b[38;5;66;03m# Retrieve an instance of ShapeEnv.\u001b[39;00m\n\u001b[1;32m 265\u001b[0m \u001b[38;5;66;03m# Assumption: the collection of args and kwargs may not reference\u001b[39;00m\n\u001b[1;32m 266\u001b[0m \u001b[38;5;66;03m# different ShapeEnv instances.\u001b[39;00m\n\u001b[1;32m 267\u001b[0m \u001b[38;5;28mself\u001b[39m \u001b[38;5;241m=\u001b[39m _extract_shape_env_and_assert_equal(args, kwargs)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/fx/experimental/symbolic_shapes.py:5122\u001b[0m, in \u001b[0;36mShapeEnv.evaluate_expr\u001b[0;34m(self, orig_expr, hint, fx_node, size_oblivious, forcing_spec)\u001b[0m\n\u001b[1;32m 5117\u001b[0m \u001b[38;5;129m@lru_cache\u001b[39m(\u001b[38;5;241m256\u001b[39m)\n\u001b[1;32m 5118\u001b[0m \u001b[38;5;129m@record_shapeenv_event\u001b[39m(save_tracked_fakes\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m 5119\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mevaluate_expr\u001b[39m(\u001b[38;5;28mself\u001b[39m, orig_expr: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msympy.Expr\u001b[39m\u001b[38;5;124m\"\u001b[39m, hint\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, fx_node\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 5120\u001b[0m size_oblivious: \u001b[38;5;28mbool\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m, \u001b[38;5;241m*\u001b[39m, forcing_spec: \u001b[38;5;28mbool\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m):\n\u001b[1;32m 5121\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 5122\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_evaluate_expr\u001b[49m\u001b[43m(\u001b[49m\u001b[43morig_expr\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mhint\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfx_node\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msize_oblivious\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mforcing_spec\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mforcing_spec\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 5123\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[1;32m 5124\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlog\u001b[38;5;241m.\u001b[39mwarning(\n\u001b[1;32m 5125\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mfailed during evaluate_expr(\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m, hint=\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m, size_oblivious=\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m, forcing_spec=\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 5126\u001b[0m orig_expr, hint, size_oblivious, forcing_spec\n\u001b[1;32m 5127\u001b[0m )\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/fx/experimental/symbolic_shapes.py:5238\u001b[0m, in \u001b[0;36mShapeEnv._evaluate_expr\u001b[0;34m(self, orig_expr, hint, fx_node, size_oblivious, forcing_spec)\u001b[0m\n\u001b[1;32m 5236\u001b[0m concrete_val \u001b[38;5;241m=\u001b[39m unsound_result\n\u001b[1;32m 5237\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 5238\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_make_data_dependent_error(\n\u001b[1;32m 5239\u001b[0m expr\u001b[38;5;241m.\u001b[39mxreplace(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mvar_to_val),\n\u001b[1;32m 5240\u001b[0m expr,\n\u001b[1;32m 5241\u001b[0m size_oblivious_result\u001b[38;5;241m=\u001b[39msize_oblivious_result\n\u001b[1;32m 5242\u001b[0m )\n\u001b[1;32m 5243\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 5244\u001b[0m expr \u001b[38;5;241m=\u001b[39m new_expr\n", "\u001b[0;31mGuardOnDataDependentSymNode\u001b[0m: Could not extract specialized integer from data-dependent expression u0 (unhinted: u0). (Size-like symbols: u0)\n\nPotential framework code culprit (scroll up for full backtrace):\n File \"/rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_ops.py\", line 759, in decompose\n return self._op_dk(dk, *args, **kwargs)\n\nFor more information, run with TORCH_LOGS=\"dynamic\"\nFor extended logs when we create symbols, also add TORCHDYNAMO_EXTENDED_DEBUG_CREATE_SYMBOL=\"u0\"\nIf you suspect the guard was triggered from C++, add TORCHDYNAMO_EXTENDED_DEBUG_CPP=1\nFor more debugging help, see https://docs.google.com/document/d/1HSuTTVvYH1pTew89Rtpeu84Ht3nQEFTYhAX3Ypa_xJs/edit?usp=sharing\n\nFor C++ stack trace, run with TORCHDYNAMO_EXTENDED_DEBUG_CPP=1\n\nThe following call raised this error:\n File \"/rhome/eingerman/Projects/DeepLearning/TTS/Kokoro-82M/models.py\", line 471, in F0Ntrain\n x2, _temp = self.shared(x1)\n\nTo fix the error, insert one of the following checks before this call:\n 1. torch._check(x.shape[2])\n 2. torch._check(~x.shape[2])\n\n(These suggested fixes were derived by replacing `u0` with x.shape[2] or x1.shape[1] in u0 and its negation.)" ] } ], "source": [ "os.environ['TORCH_LOGS'] = '+dynamic'\n", "os.environ['TORCH_LOGS'] = '+export'\n", "os.environ['TORCHDYNAMO_EXTENDED_DEBUG_GUARD_ADDED']=\"u0 >= 0\"\n", "os.environ['TORCHDYNAMO_EXTENDED_DEBUG_CPP']=\"1\"\n", "os.environ['TORCHDYNAMO_EXTENDED_DEBUG_CREATE_SYMBOL']=\"u0\"\n", "\n", "class StyleTTS2(torch.nn.Module):\n", " def __init__(self, model, voicepack):\n", " super().__init__()\n", " self.model = model\n", " self.voicepack = voicepack\n", " \n", " def forward(self, tokens):\n", " speed = 1.\n", " # tokens = torch.nn.functional.pad(tokens, (0, 510 - tokens.shape[-1]))\n", " device = tokens.device\n", " input_lengths = torch.LongTensor([tokens.shape[-1]]).to(device)\n", "\n", " text_mask = length_to_mask(input_lengths).to(device)\n", " bert_dur = self.model['bert'](tokens, attention_mask=(~text_mask).int())\n", "\n", " d_en = self.model[\"bert_encoder\"](bert_dur).transpose(-1, -2)\n", "\n", " ref_s = self.voicepack[tokens.shape[1]]\n", " s = ref_s[:, 128:]\n", "\n", " d = self.model[\"predictor\"].text_encoder.inference(d_en, s)\n", " x, _ = self.model[\"predictor\"].lstm(d)\n", "\n", " duration = self.model[\"predictor\"].duration_proj(x)\n", " duration = torch.sigmoid(duration).sum(axis=-1) / speed\n", " pred_dur = torch.round(duration).clamp(min=1).long()\n", " \n", " c_start = F.pad(pred_dur,(1,0), \"constant\").cumsum(dim=1)[0,0:-1]\n", " c_end = c_start + pred_dur[0,:]\n", " indices = torch.arange(0, pred_dur.sum().item()).long().to(device)\n", "\n", " pred_aln_trg_list=[]\n", " for cs, ce in zip(c_start, c_end):\n", " row = torch.where((indices>=cs) & (indices 670\u001b[0m \u001b[43mproduce_guards_callback\u001b[49m\u001b[43m(\u001b[49m\u001b[43mgm\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 671\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (ConstraintViolationError, ValueRangeError) \u001b[38;5;28;01mas\u001b[39;00m e:\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:1655\u001b[0m, in \u001b[0;36m_non_strict_export.._produce_guards_callback\u001b[0;34m(gm)\u001b[0m\n\u001b[1;32m 1654\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_produce_guards_callback\u001b[39m(gm):\n\u001b[0;32m-> 1655\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mproduce_guards_and_solve_constraints\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 1656\u001b[0m \u001b[43m \u001b[49m\u001b[43mfake_mode\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfake_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1657\u001b[0m \u001b[43m \u001b[49m\u001b[43mgm\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mgm\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1658\u001b[0m \u001b[43m \u001b[49m\u001b[43mdynamic_shapes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtransformed_dynamic_shapes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1659\u001b[0m \u001b[43m \u001b[49m\u001b[43mequalities_inputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mequalities_inputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1660\u001b[0m \u001b[43m \u001b[49m\u001b[43moriginal_signature\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moriginal_signature\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1661\u001b[0m \u001b[43m \u001b[49m\u001b[43m_is_torch_jit_trace\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_is_torch_jit_trace\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1662\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_export/non_strict_utils.py:305\u001b[0m, in \u001b[0;36mproduce_guards_and_solve_constraints\u001b[0;34m(fake_mode, gm, dynamic_shapes, equalities_inputs, original_signature, _is_torch_jit_trace)\u001b[0m\n\u001b[1;32m 304\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m constraint_violation_error:\n\u001b[0;32m--> 305\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m constraint_violation_error\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/_export/non_strict_utils.py:270\u001b[0m, in \u001b[0;36mproduce_guards_and_solve_constraints\u001b[0;34m(fake_mode, gm, dynamic_shapes, equalities_inputs, original_signature, _is_torch_jit_trace)\u001b[0m\n\u001b[1;32m 269\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 270\u001b[0m \u001b[43mshape_env\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mproduce_guards\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 271\u001b[0m \u001b[43m \u001b[49m\u001b[43mplaceholders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 272\u001b[0m \u001b[43m \u001b[49m\u001b[43msources\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 273\u001b[0m \u001b[43m \u001b[49m\u001b[43minput_contexts\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minput_contexts\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 274\u001b[0m \u001b[43m \u001b[49m\u001b[43mequalities_inputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mequalities_inputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 275\u001b[0m \u001b[43m \u001b[49m\u001b[43mignore_static\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 276\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 277\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ConstraintViolationError \u001b[38;5;28;01mas\u001b[39;00m e:\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/fx/experimental/symbolic_shapes.py:4178\u001b[0m, in \u001b[0;36mShapeEnv.produce_guards\u001b[0;34m(self, placeholders, sources, source_ref, guards, input_contexts, equalities_inputs, _simplified, ignore_static)\u001b[0m\n\u001b[1;32m 4177\u001b[0m err \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(error_msgs)\n\u001b[0;32m-> 4178\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m ConstraintViolationError(\n\u001b[1;32m 4179\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mConstraints violated (\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdebug_names\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m)! \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 4180\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mFor more information, run with TORCH_LOGS=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m+dynamic\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m 4181\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00merr\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 4182\u001b[0m )\n\u001b[1;32m 4183\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(warn_msgs) \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m:\n", "\u001b[0;31mConstraintViolationError\u001b[0m: Constraints violated (token_len)! For more information, run with TORCH_LOGS=\"+dynamic\".\n - Not all values of token_len = L['args'][0][0].size()[0] in the specified range are valid because token_len was inferred to be a constant (143).\nSuggested fixes:\n token_len = 143", "\nDuring handling of the above exception, another exception occurred:\n", "\u001b[0;31mUserError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[33], line 61\u001b[0m\n\u001b[1;32m 58\u001b[0m dynamic_shapes \u001b[38;5;241m=\u001b[39m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtokens0\u001b[39m\u001b[38;5;124m\"\u001b[39m:{\u001b[38;5;241m0\u001b[39m:token_len}}\n\u001b[1;32m 60\u001b[0m \u001b[38;5;66;03m# with torch.no_grad():\u001b[39;00m\n\u001b[0;32m---> 61\u001b[0m export_mod \u001b[38;5;241m=\u001b[39m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexport\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexport\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtest_model\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[43mtokens\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdynamic_shapes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdynamic_shapes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstrict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[1;32m 62\u001b[0m \u001b[38;5;66;03m# export_mod = torch.export.export(test_model, args=( tokens[0,:], ), strict=False).run_decompositions()\u001b[39;00m\n\u001b[1;32m 63\u001b[0m \u001b[38;5;28mprint\u001b[39m(export_mod)\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/__init__.py:270\u001b[0m, in \u001b[0;36mexport\u001b[0;34m(mod, args, kwargs, dynamic_shapes, strict, preserve_module_call_signature)\u001b[0m\n\u001b[1;32m 264\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(mod, torch\u001b[38;5;241m.\u001b[39mjit\u001b[38;5;241m.\u001b[39mScriptModule):\n\u001b[1;32m 265\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 266\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mExporting a ScriptModule is not supported. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 267\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMaybe try converting your ScriptModule to an ExportedProgram \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 268\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124musing `TS2EPConverter(mod, args, kwargs).convert()` instead.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 269\u001b[0m )\n\u001b[0;32m--> 270\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_export\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 271\u001b[0m \u001b[43m \u001b[49m\u001b[43mmod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 272\u001b[0m \u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 273\u001b[0m \u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 274\u001b[0m \u001b[43m \u001b[49m\u001b[43mdynamic_shapes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 275\u001b[0m \u001b[43m \u001b[49m\u001b[43mstrict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstrict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 276\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreserve_module_call_signature\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpreserve_module_call_signature\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 277\u001b[0m \u001b[43m \u001b[49m\u001b[43mpre_dispatch\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 278\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:1017\u001b[0m, in \u001b[0;36m_log_export_wrapper..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 1010\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1011\u001b[0m log_export_usage(\n\u001b[1;32m 1012\u001b[0m event\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mexport.error.unclassified\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 1013\u001b[0m \u001b[38;5;28mtype\u001b[39m\u001b[38;5;241m=\u001b[39merror_type,\n\u001b[1;32m 1014\u001b[0m message\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mstr\u001b[39m(e),\n\u001b[1;32m 1015\u001b[0m flags\u001b[38;5;241m=\u001b[39m_EXPORT_FLAGS,\n\u001b[1;32m 1016\u001b[0m )\n\u001b[0;32m-> 1017\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[1;32m 1018\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 1019\u001b[0m _EXPORT_FLAGS \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:990\u001b[0m, in \u001b[0;36m_log_export_wrapper..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 988\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 989\u001b[0m start \u001b[38;5;241m=\u001b[39m time\u001b[38;5;241m.\u001b[39mtime()\n\u001b[0;32m--> 990\u001b[0m ep \u001b[38;5;241m=\u001b[39m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 991\u001b[0m end \u001b[38;5;241m=\u001b[39m time\u001b[38;5;241m.\u001b[39mtime()\n\u001b[1;32m 992\u001b[0m log_export_usage(\n\u001b[1;32m 993\u001b[0m event\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mexport.time\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 994\u001b[0m metrics\u001b[38;5;241m=\u001b[39mend \u001b[38;5;241m-\u001b[39m start,\n\u001b[1;32m 995\u001b[0m flags\u001b[38;5;241m=\u001b[39m_EXPORT_FLAGS,\n\u001b[1;32m 996\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mget_ep_stats(ep),\n\u001b[1;32m 997\u001b[0m )\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/exported_program.py:114\u001b[0m, in \u001b[0;36m_disable_prexisiting_fake_mode..wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 111\u001b[0m \u001b[38;5;129m@functools\u001b[39m\u001b[38;5;241m.\u001b[39mwraps(fn)\n\u001b[1;32m 112\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mwrapper\u001b[39m(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 113\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m unset_fake_temporarily():\n\u001b[0;32m--> 114\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:1880\u001b[0m, in \u001b[0;36m_export\u001b[0;34m(mod, args, kwargs, dynamic_shapes, strict, preserve_module_call_signature, pre_dispatch, allow_complex_guards_as_runtime_asserts, _is_torch_jit_trace)\u001b[0m\n\u001b[1;32m 1877\u001b[0m \u001b[38;5;66;03m# Call the appropriate export function based on the strictness of tracing.\u001b[39;00m\n\u001b[1;32m 1878\u001b[0m export_func \u001b[38;5;241m=\u001b[39m _strict_export \u001b[38;5;28;01mif\u001b[39;00m strict \u001b[38;5;28;01melse\u001b[39;00m _non_strict_export\n\u001b[0;32m-> 1880\u001b[0m export_artifact \u001b[38;5;241m=\u001b[39m \u001b[43mexport_func\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore[operator]\u001b[39;49;00m\n\u001b[1;32m 1881\u001b[0m \u001b[43m \u001b[49m\u001b[43mmod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1882\u001b[0m \u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1883\u001b[0m \u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1884\u001b[0m \u001b[43m \u001b[49m\u001b[43mdynamic_shapes\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1885\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreserve_module_call_signature\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1886\u001b[0m \u001b[43m \u001b[49m\u001b[43mpre_dispatch\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1887\u001b[0m \u001b[43m \u001b[49m\u001b[43moriginal_state_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1888\u001b[0m \u001b[43m \u001b[49m\u001b[43moriginal_in_spec\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1889\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_complex_guards_as_runtime_asserts\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1890\u001b[0m \u001b[43m \u001b[49m\u001b[43m_is_torch_jit_trace\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1891\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1892\u001b[0m export_graph_signature: ExportGraphSignature \u001b[38;5;241m=\u001b[39m export_artifact\u001b[38;5;241m.\u001b[39maten\u001b[38;5;241m.\u001b[39msig\n\u001b[1;32m 1894\u001b[0m forward_arg_names \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 1895\u001b[0m _get_forward_arg_names(mod, args, kwargs) \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m _is_torch_jit_trace \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1896\u001b[0m )\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:1683\u001b[0m, in \u001b[0;36m_non_strict_export\u001b[0;34m(mod, args, kwargs, dynamic_shapes, preserve_module_call_signature, pre_dispatch, original_state_dict, orig_in_spec, allow_complex_guards_as_runtime_asserts, _is_torch_jit_trace, dispatch_tracing_mode)\u001b[0m\n\u001b[1;32m 1667\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m _fakify_script_objects(mod, fake_args, fake_kwargs, fake_mode) \u001b[38;5;28;01mas\u001b[39;00m (\n\u001b[1;32m 1668\u001b[0m patched_mod,\n\u001b[1;32m 1669\u001b[0m new_fake_args,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1672\u001b[0m map_fake_to_real,\n\u001b[1;32m 1673\u001b[0m ):\n\u001b[1;32m 1674\u001b[0m _to_aten_func \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 1675\u001b[0m _export_to_aten_ir_make_fx\n\u001b[1;32m 1676\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m dispatch_tracing_mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmake_fx\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1681\u001b[0m )\n\u001b[1;32m 1682\u001b[0m )\n\u001b[0;32m-> 1683\u001b[0m aten_export_artifact \u001b[38;5;241m=\u001b[39m \u001b[43m_to_aten_func\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore[operator]\u001b[39;49;00m\n\u001b[1;32m 1684\u001b[0m \u001b[43m \u001b[49m\u001b[43mpatched_mod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1685\u001b[0m \u001b[43m \u001b[49m\u001b[43mnew_fake_args\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1686\u001b[0m \u001b[43m \u001b[49m\u001b[43mnew_fake_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1687\u001b[0m \u001b[43m \u001b[49m\u001b[43mfake_params_buffers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1688\u001b[0m \u001b[43m \u001b[49m\u001b[43mnew_fake_constant_attrs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1689\u001b[0m \u001b[43m \u001b[49m\u001b[43mproduce_guards_callback\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_produce_guards_callback\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1690\u001b[0m \u001b[43m \u001b[49m\u001b[43mtransform\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_tuplify_outputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 1691\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1692\u001b[0m \u001b[38;5;66;03m# aten_export_artifact.constants contains only fake script objects, we need to map them back\u001b[39;00m\n\u001b[1;32m 1693\u001b[0m aten_export_artifact\u001b[38;5;241m.\u001b[39mconstants \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 1694\u001b[0m fqn: map_fake_to_real[obj] \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(obj, FakeScriptObject) \u001b[38;5;28;01melse\u001b[39;00m obj\n\u001b[1;32m 1695\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m fqn, obj \u001b[38;5;129;01min\u001b[39;00m aten_export_artifact\u001b[38;5;241m.\u001b[39mconstants\u001b[38;5;241m.\u001b[39mitems()\n\u001b[1;32m 1696\u001b[0m }\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/export/_trace.py:672\u001b[0m, in \u001b[0;36m_export_to_aten_ir\u001b[0;34m(mod, fake_args, fake_kwargs, fake_params_buffers, constant_attrs, produce_guards_callback, transform, pre_dispatch, decomp_table, _check_autograd_state, _is_torch_jit_trace)\u001b[0m\n\u001b[1;32m 670\u001b[0m produce_guards_callback(gm)\n\u001b[1;32m 671\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (ConstraintViolationError, ValueRangeError) \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m--> 672\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m UserError(UserErrorType\u001b[38;5;241m.\u001b[39mCONSTRAINT_VIOLATION, \u001b[38;5;28mstr\u001b[39m(e)) \u001b[38;5;66;03m# noqa: B904\u001b[39;00m\n\u001b[1;32m 674\u001b[0m \u001b[38;5;66;03m# Run runtime asserts pass before creating input/output specs, since size-related CSE/DCE might affect output signature.\u001b[39;00m\n\u001b[1;32m 675\u001b[0m \u001b[38;5;66;03m# Overwrite output specs afterwards.\u001b[39;00m\n\u001b[1;32m 676\u001b[0m flat_fake_args \u001b[38;5;241m=\u001b[39m pytree\u001b[38;5;241m.\u001b[39mtree_leaves((fake_args, fake_kwargs))\n", "\u001b[0;31mUserError\u001b[0m: Constraints violated (token_len)! For more information, run with TORCH_LOGS=\"+dynamic\".\n - Not all values of token_len = L['args'][0][0].size()[0] in the specified range are valid because token_len was inferred to be a constant (143).\nSuggested fixes:\n token_len = 143" ] } ], "source": [ "os.environ['TORCH_LOGS'] = '+dynamic'\n", "os.environ['TORCH_LOGS'] = '+export'\n", "class test(torch.nn.Module):\n", " def __init__(self, model, voicepack):\n", " super().__init__()\n", " self.model = model\n", " self.voicepack = voicepack\n", " self.model.text_encoder.lstm.flatten_parameters()\n", " \n", " def forward(self, tokens0):\n", " tokens = tokens0.unsqueeze(0)\n", " print(tokens.shape)\n", " # speed = 1.\n", " # # tokens = torch.nn.functional.pad(tokens, (0, 510 - tokens.shape[-1]))\n", " # device = tokens.device\n", " input_lengths = torch.LongTensor([tokens0.shape[-1]]).to(device)\n", "\n", " # text_mask = length_to_mask(input_lengths).to(device)\n", " # bert_dur = self.model['bert'](tokens, attention_mask=(~text_mask).int())\n", "\n", " # d_en = self.model[\"bert_encoder\"](bert_dur).transpose(-1, -2)\n", "\n", " # ref_s = self.voicepack[tokens.shape[1]]\n", " # s = ref_s[:, 128:]\n", "\n", " # d = self.model[\"predictor\"].text_encoder.inference(d_en, s)\n", " # x, _ = self.model[\"predictor\"].lstm(d)\n", "\n", " # duration = self.model[\"predictor\"].duration_proj(x)\n", " # duration = torch.sigmoid(duration).sum(axis=-1) / speed\n", " # pred_dur = torch.round(duration).clamp(min=1).long()\n", " \n", " # c_start = F.pad(pred_dur,(1,0), \"constant\").cumsum(dim=1)[0,0:-1]\n", " # c_end = c_start + pred_dur[0,:]\n", " # indices = torch.arange(0, pred_dur.sum().item()).long().to(device)\n", "\n", " # pred_aln_trg_list=[]\n", " # for cs, ce in zip(c_start, c_end):\n", " # row = torch.where((indices>=cs) & (indices" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import torch.nn.functional as F\n", "\n", "# pred_aln_trg = torch.zeros(input_lengths, pred_dur.sum().item())\n", "c_start = F.pad(pred_dur,(1,0), \"constant\").cumsum(dim=1)[0,0:-1]\n", "c_end = c_start + pred_dur[0,:]\n", "indices = torch.arange(0, pred_dur.sum().item()).to(device)\n", "\n", "pred_aln_trg_list=[]\n", "for cs, ce in zip(c_start, c_end):\n", " row = torch.where((indices>=cs) & (indices 41\u001b[0m pred_aln_trg \u001b[38;5;241m=\u001b[39m \u001b[43mcreate_alignment_matrix\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_lengths\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mitem\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpred_dur\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 42\u001b[0m pl\u001b[38;5;241m.\u001b[39mimshow(pred_aln_trg)\n", "Cell \u001b[0;32mIn[48], line 22\u001b[0m, in \u001b[0;36mcreate_alignment_matrix\u001b[0;34m(input_lengths, pred_dur)\u001b[0m\n\u001b[1;32m 19\u001b[0m col_indices \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39marange(pred_dur\u001b[38;5;241m.\u001b[39mmax()\u001b[38;5;241m.\u001b[39mitem())\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39mrepeat(input_lengths, \u001b[38;5;241m1\u001b[39m)\n\u001b[1;32m 21\u001b[0m \u001b[38;5;66;03m# Create a mask based on durations\u001b[39;00m\n\u001b[0;32m---> 22\u001b[0m mask \u001b[38;5;241m=\u001b[39m \u001b[43mcol_indices\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m<\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mpred_dur\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43munsqueeze\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 24\u001b[0m \u001b[38;5;66;03m# Create offset indices for the columns\u001b[39;00m\n\u001b[1;32m 25\u001b[0m offset \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mcat((torch\u001b[38;5;241m.\u001b[39mtensor([\u001b[38;5;241m0\u001b[39m]), cum_dur[\u001b[38;5;241m0\u001b[39m, :\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m]))\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m1\u001b[39m)\u001b[38;5;241m.\u001b[39mrepeat(\u001b[38;5;241m1\u001b[39m, pred_dur\u001b[38;5;241m.\u001b[39mmax()\u001b[38;5;241m.\u001b[39mitem())\n", "\u001b[0;31mRuntimeError\u001b[0m: The size of tensor a (23) must match the size of tensor b (143) at non-singleton dimension 2" ] } ], "source": [ "\n", "\n", "def create_alignment_matrix(input_lengths, pred_dur):\n", " \"\"\"Creates an alignment matrix without explicit loops.\n", "\n", " Args:\n", " input_lengths: Number of input units (int).\n", " pred_dur: Predicted durations (torch.Tensor of shape (1, input_lengths)).\n", "\n", " Returns:\n", " pred_aln_trg: Alignment matrix (torch.Tensor of shape (input_lengths, pred_dur.sum())).\n", " \"\"\"\n", " total_duration = pred_dur.sum().item()\n", " pred_aln_trg = torch.zeros(input_lengths, total_duration)\n", "\n", " # Calculate cumulative durations\n", " cum_dur = torch.cumsum(pred_dur, dim=1)\n", "\n", " # Create indices for filling the matrix\n", " row_indices = torch.arange(input_lengths).unsqueeze(1).repeat(1, pred_dur.max().item())\n", " col_indices = torch.arange(pred_dur.max().item()).unsqueeze(0).repeat(input_lengths, 1)\n", "\n", " # Create a mask based on durations\n", " mask = col_indices < pred_dur.unsqueeze(1)\n", "\n", " # Create offset indices for the columns\n", " offset = torch.cat((torch.tensor([0]), cum_dur[0, :-1])).unsqueeze(1).repeat(1, pred_dur.max().item())\n", "\n", " # Apply the mask and offset to generate the final column indices\n", " final_col_indices = (col_indices + offset) * mask\n", "\n", " # Flatten indices and create a flattened index tensor\n", " flat_row_indices = row_indices[mask].long()\n", " flat_col_indices = final_col_indices[mask].long()\n", " flat_indices = torch.stack([flat_row_indices, flat_col_indices], dim=1)\n", "\n", " # Scatter ones into the alignment matrix\n", " pred_aln_trg[flat_indices.T[0], flat_indices.T[1]] = 1\n", "\n", " return pred_aln_trg\n", "\n", "\n", "pred_aln_trg = create_alignment_matrix(input_lengths.item(), pred_dur)\n", "pl.imshow(pred_aln_trg)\n" ] }, { "cell_type": "code", "execution_count": 47, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "tensor([143])" ] }, "execution_count": 47, "metadata": {}, "output_type": "execute_result" } ], "source": [ "input_lengths" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "pred_aln_trg = torch.zeros(input_lengths, pred_dur.sum().item())\n", "c_frame = 0\n", "\n", "for i in range(pred_aln_trg.size(0)):\n", " pred_aln_trg[i, c_frame:c_frame + pred_dur[0,i].item()] = 1\n", " c_frame += pred_dur[0,i].item()" ] }, { "cell_type": "code", "execution_count": 44, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 44, "metadata": {}, "output_type": "execute_result" } ], "source": [ "style_model.eval" ] }, { "cell_type": "code", "execution_count": 23, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "CustomAlbert(\n", " (embeddings): AlbertEmbeddings(\n", " (word_embeddings): Embedding(178, 128, padding_idx=0)\n", " (position_embeddings): Embedding(512, 128)\n", " (token_type_embeddings): Embedding(2, 128)\n", " (LayerNorm): LayerNorm((128,), eps=1e-12, elementwise_affine=True)\n", " (dropout): Dropout(p=0, inplace=False)\n", " )\n", " (encoder): AlbertTransformer(\n", " (embedding_hidden_mapping_in): Linear(in_features=128, out_features=768, bias=True)\n", " (albert_layer_groups): ModuleList(\n", " (0): AlbertLayerGroup(\n", " (albert_layers): ModuleList(\n", " (0): AlbertLayer(\n", " (full_layer_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n", " (attention): AlbertAttention(\n", " (query): Linear(in_features=768, out_features=768, bias=True)\n", " (key): Linear(in_features=768, out_features=768, bias=True)\n", " (value): Linear(in_features=768, out_features=768, bias=True)\n", " (attention_dropout): Dropout(p=0, inplace=False)\n", " (output_dropout): Dropout(p=0, inplace=False)\n", " (dense): Linear(in_features=768, out_features=768, bias=True)\n", " (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n", " )\n", " (ffn): Linear(in_features=768, out_features=2048, bias=True)\n", " (ffn_output): Linear(in_features=2048, out_features=768, bias=True)\n", " (activation): NewGELUActivation()\n", " (dropout): Dropout(p=0, inplace=False)\n", " )\n", " )\n", " )\n", " )\n", " )\n", " (pooler): Linear(in_features=768, out_features=768, bias=True)\n", " (pooler_activation): Tanh()\n", ")" ] }, "execution_count": 23, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model['bert']" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [ { "ename": "TypeError", "evalue": "only integer tensors of a single element can be converted to an index", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[17], line 11\u001b[0m\n\u001b[1;32m 8\u001b[0m pred_aln_trg1 \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mzeros(input_lengths, pred_dur\u001b[38;5;241m.\u001b[39msum()\u001b[38;5;241m.\u001b[39mitem(), dtype\u001b[38;5;241m=\u001b[39mtorch\u001b[38;5;241m.\u001b[39mfloat32)\n\u001b[1;32m 9\u001b[0m batch_indices \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39marange(input_lengths\u001b[38;5;241m.\u001b[39mitem())\u001b[38;5;241m.\u001b[39munsqueeze(\u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m---> 11\u001b[0m \u001b[43mpred_aln_trg1\u001b[49m\u001b[43m[\u001b[49m\u001b[43mbatch_indices\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstart_indices\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mend_indices\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m]\u001b[49m \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m 13\u001b[0m pl\u001b[38;5;241m.\u001b[39mimshow(pred_aln_trg1)\n", "\u001b[0;31mTypeError\u001b[0m: only integer tensors of a single element can be converted to an index" ] } ], "source": [ "# Process durations\n", "\n", "cumsum_dur = torch.cumsum(pred_dur, dim=1).to(device)\n", "end_indices = cumsum_dur - 1\n", "start_indices = torch.cat([torch.zeros(1, 1, dtype=torch.long).to(device), end_indices[:, :-1] + 1], dim=1)\n", "\n", "# Create binary alignment target\n", "pred_aln_trg1 = torch.zeros(input_lengths, pred_dur.sum().item(), dtype=torch.float32)\n", "batch_indices = torch.arange(input_lengths.item()).unsqueeze(1)\n", "\n", "pred_aln_trg1[batch_indices, start_indices: end_indices + 1] = 1\n", "\n", "pl.imshow(pred_aln_trg1)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "batch_indices" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "torch.Size([143, 329])" ] }, "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ "pred_aln_trg1 = torch.zeros(input_lengths, pred_dur.sum().item()).to(device)\n", "a = torch.arange(pred_aln_trg1.size(0))[:, None].repeat(1, pred_dur.size(1)).to(device)\n", "b = (torch.arange(pred_dur.size(1)).repeat(pred_aln_trg1.size(0), 1).to(device) < pred_dur).to(torch.float32).to(device)\n", "print(pred_aln_trg.dtype, pred_aln_trg1.dtype, a.dtype, b.dtype)\n", "print(a.device, b.device, pred_dur.device)\n", "pred_aln_trg1.scatter_(1, \n", " a, \n", " b)\n", "\n", "pl.imshow(pred_aln_trg1.detach().cpu().numpy())" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "ename": "RuntimeError", "evalue": "Expected index [1, 25] to be smaller than self [143, 329] apart from dimension 1 and to be smaller size than src [1, 1]", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[8], line 18\u001b[0m\n\u001b[1;32m 16\u001b[0m \u001b[38;5;66;03m# Use scatter_add_ to set the appropriate slices to 1\u001b[39;00m\n\u001b[1;32m 17\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(pred_dur\u001b[38;5;241m.\u001b[39msize(\u001b[38;5;241m1\u001b[39m)):\n\u001b[0;32m---> 18\u001b[0m \t\u001b[43mmask\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mscatter_add_\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m(\u001b[49m\u001b[43mstart_indices\u001b[49m\u001b[43m[\u001b[49m\u001b[43m:\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mi\u001b[49m\u001b[43m:\u001b[49m\u001b[43mi\u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43marange\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpred_dur\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmax\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43munsqueeze\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mclamp\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mmax\u001b[39;49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpred_aln_trg\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msize\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m-\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalues\u001b[49m\u001b[43m[\u001b[49m\u001b[43m:\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mi\u001b[49m\u001b[43m:\u001b[49m\u001b[43mi\u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 20\u001b[0m \u001b[38;5;66;03m# Apply the mask to pred_aln_trg\u001b[39;00m\n\u001b[1;32m 21\u001b[0m pred_aln_trg \u001b[38;5;241m=\u001b[39m mask\n", "\u001b[0;31mRuntimeError\u001b[0m: Expected index [1, 25] to be smaller than self [143, 329] apart from dimension 1 and to be smaller size than src [1, 1]" ] } ], "source": [ "# Calculate the cumulative sum of durations to get the end indices\n", "cumulative_durations = torch.cumsum(pred_dur, dim=1).to(device)\n", "\n", "# Calculate the start indices by shifting the cumulative durations\n", "start_indices = cumulative_durations - pred_dur\n", "\n", "# Create a tensor of indices for pred_aln_trg\n", "indices = torch.arange(pred_aln_trg.size(1)).to(device)\n", "\n", "# Create a mask tensor initialized to zeros\n", "mask = torch.zeros_like(pred_aln_trg).to(device)\n", "\n", "# Create a tensor to hold the values to scatter\n", "values = torch.ones_like(pred_dur, dtype=pred_aln_trg.dtype).to(device)\n", "\n", "# Use scatter_ to set the appropriate slices to 1\n", "mask.scatter_(1, start_indices.unsqueeze(2) + torch.arange(pred_dur.max()).unsqueeze(0).unsqueeze(0).to(device), values.unsqueeze(2))\n", "\n", "# Apply the mask to pred_aln_trg\n", "pred_aln_trg = mask" ] }, { "cell_type": "code", "execution_count": 63, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "device(type='cpu')" ] }, "execution_count": 63, "metadata": {}, "output_type": "execute_result" } ], "source": [ "torch.arange(pred_dur.size(1)).repeat(pred_aln_trg1.size(0), 1).device" ] }, { "cell_type": "code", "execution_count": 49, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 49, "metadata": {}, "output_type": "execute_result" }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAEICAYAAAB1SQ8uAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAyTklEQVR4nO3deXRT1d4+8CdJm3SgTemYFtpS5rGICDUOiNLLoJcLggOICohwQUClqFCVyTuUF3ydUe5PveCrIIoyXFBRppYLlAKFUsZCodACbRmbdKBpkrN/f1QDoQVaaHKS5vmslbVy9tlNvtmm+PTsfc5RCCEEiIiIiFyIUu4CiIiIiK7HgEJEREQuhwGFiIiIXA4DChEREbkcBhQiIiJyOQwoRERE5HIYUIiIiMjlMKAQERGRy2FAISIiIpfDgEJEREQuR9aAsmDBArRo0QI+Pj5ISEjAzp075SyHiIiIXIRsAeW7775DUlISZs2ahT179qBr167o168fzp07J1dJRERE5CIUct0sMCEhAT169MAnn3wCAJAkCdHR0Zg8eTKmT58uR0lERETkIrzkeNOqqipkZmYiOTnZ1qZUKpGYmIj09PQa/U0mE0wmk21bkiRcunQJISEhUCgUTqmZiIiI7owQAqWlpYiKioJSefNJHFkCyoULF2C1WhEREWHXHhERgSNHjtTon5KSgjlz5jirPCIiInKggoICNG/e/KZ9ZAko9ZWcnIykpCTbtsFgQExMDB7Ao/CCt4yV2TM+3QOfzvgEr772Evx3HIO1xCh3SURERC7DAjO24mcEBATcsq8sASU0NBQqlQrFxcV27cXFxdDpdDX6azQaaDSaGu1e8IaXwnUCSuh/jmD29qfR/ocT2F3UArrBh+UuiYiIyHX8vuq1LsszZDmLR61Wo3v37ti4caOtTZIkbNy4EXq9Xo6SGoRUWQnL6TNIXxOP0lNanJirh1fLFnKXRURE5HZkO804KSkJn3/+Ob766iscPnwYEyZMQHl5OUaPHi1XSQ0m+h/bEZEB/DZ8Pso6hUNZh0NZREREdJVsa1CefvppnD9/HjNnzkRRURHuuusurFu3rsbCWXcVtCILk7YNR+TSXBwdG4Wwv+TIXRIREZHbkO06KHfCaDRCq9WiNwa51BqU2uTPvA+mcCu8L1cfrArIB0I+r3kqNRERUWNnEWakYjUMBgMCAwNv2pf34nGwmHe2Q/dfBVJHzkfqyPkIf+YUVIGBgFIld2lEREQuiwHFCbRrsjHmkecw5pHncP7rWEzduw3mxG5yl0VEROSy3OI6KO5OqqgAcvMAACFB/nhxyyh49/ZGYLQeIV9yuoeIiOh6DChOJnYfQNvRQNNtwah6QIUry/whXakEJKvcpREREbkMTvHIpPQZfxQsao1J+zJh6ne33OUQERG5FB5BkYnlVAFC9mvxSsZwqO73gtdd9wEAYn8ogvXYCZmrIyIikhcDiozE7gNoNQII+G8ovoxbAwAYcGIKAk+ehjBXyVwdERGRfBhQXMCV5/0xzL/6CrqKd8/BMCoagQOOy1wVERGRfBhQXIAl75Tt+YVdelRFmVE+9T40/w+ne4iIyDNxkayLaTEjHbE/KrB9ynu41DMcCo0GCo0GqMOdH4mIiBoLBhQX5Jt2CE8MHAPzsEsYti8Pw/bl4cqgHnKXRURE5DSc4nFBUnk5sPcgyvbp8b+WRACAuZsXfGKqz/SJWlcM61GuUSEiosaLAcWFtXj76lVmpY3RWNt+BQDggdKXEXLi6roVIQle6I2IiBoVBhQ34T1BjUHaFwAAV5KNGPx6oW3fd2fugbpvPuB+N6YmIiKqFQOKm7h2Ssd8SI8v1ffbtkuMfggYHwVck090Wy7CeuioM0skIiJqMAwobujaqR8AaHr/XVj13YfwVqhsbT1TJiP8iJPWqQiJR2+IiKhBMaA0AqqsYxj89Di7ttKXruCZ8SVOef+vvumHZnO3O+W9iIjIMzCgNAJSeTkU27Ls2rwT78OqkK5Oef8r4RIuvqivdZ/SDIQs3weposIptRARUePAgNJIxcxx4hGN98ORMWdBrbtOWirwcupwSPlXrjZyOoiIiG6BAYXuWPuPCtFv9Yu17rP4qtDqu8OI8TXZ2r5d/RBiZ6bX2p+IiAhgQKEGYMk7BdU19xO6lndAAHYVRSM/oKmtrSpYgmHEvWi6an/1RemIiIiuw0vdk0NJpaWIHHwYyj4Ftof6ogo/pLwLRVSE3OUREZGL4hEUcrpWi89gxK4khP37BOL8FSgx++HsU8GwnCqQuzQiInIRDCjkdJa8U/ArPo+jL8ai0uoNs1WFC/2aIzS7KbAjW+7yiIjIBTCgkCykigpEDj4MMwClvz/e2bcBL6cPR+sdcldGRESugGtQSHZSRQXeGzcCXqd8ELUjAF5xsXKXREREMmNAIfkJAa9NmdAeAwxVPih+JAqKHl3kroqIiGTU4AElJSUFPXr0QEBAAMLDwzF48GDk5OTY9enduzcUCoXdY/z48Q1dCrmZpl+l40rfUiRPW4ITSczORESerMH/L5CWloaJEydix44dWL9+PcxmM/r27Yvy6653MXbsWBQWFtoe8+bNa+hSyA1JJhM+mfwUlMf8ELUjAFE7ApCXUvtl9ImIqPFq8EWy69ats9tevHgxwsPDkZmZiV69etna/fz8oNPpGvrtyd0JAfWvu6EN1wN9q5vM4WZcGdzzmj6A/6bDkEpL5amRiIgczuFn8RgMBgBAcHCwXfuSJUvwzTffQKfTYeDAgZgxYwb8/PxqfQ2TyQST6eql0o1Go+MKJpcQ9HU6zn5d/TxwihpbPv3Uts8kzBg0aDSw+4BM1RERkaM5NKBIkoRXX30V999/Pzp37mxrf+aZZxAbG4uoqChkZ2dj2rRpyMnJwYoVK2p9nZSUFMyZM8eRpZILa776LB4o+KttWygVMM+5iJZBwbBISlT8NRjWQ0dlrJCIiBqaQgjH3Vp2woQJ+OWXX7B161Y0b978hv02bdqEPn36IDc3F61ataqxv7YjKNHR0eiNQfBSeDukdnJhShUurG4Fve4UJCiw67Nu8C+yAALw25YDK4+wERG5JIswIxWrYTAYEBgYeNO+DjuCMmnSJKxduxZbtmy5aTgBgISEBAC4YUDRaDTQaDQOqZPckGRF6MCjOAZA4eWFEft+watNT8IsrHj06TFQbs2Su0IiIrpDDR5QhBCYPHkyVq5cidTUVMTFxd3yZ7KysgAAkZGRDV0ONXLCYsHKaX3xXaASQgGUTzUi9u+RkIQC4lUtpH2H5S6RiIhuQ4MHlIkTJ2Lp0qVYvXo1AgICUFRUBADQarXw9fXF8ePHsXTpUjz66KMICQlBdnY2pkyZgl69eiE+Pr6hyyEP4LN2J3wAQKGA6al26KQtBABs6h6LUEVHSFmHZK2PiIjqr8HXoCgUilrbFy1ahFGjRqGgoADPPvssDhw4gPLyckRHR+Pxxx/H22+/fcv5qD8YjUZotVquQaEbU6pw394rSD3XBuo/nZK7GiIigsxrUG6Vd6Kjo5GWltbQb0tkT7Ji46wHcKGLFyLWVzcV7ItEq9d4N0IiInfAuxlTo+W7aidC0RO9nzgGAPg+rgmsve8GAHhfvsL1KURELowBhRo131U7sX119RlgASMC8OuS6gu+Dc0dgCsPyVkZERHdDAMKNX6/TzuGbCtEwqyJAICKCAWarvNHwAx/iF375ayOiIhqwVvGksew5J1CyBfpCPkiHeF7zRjaPAsXujaBqlM7uUsjIqLrMKCQR9L8vAu/xjdF93FZ8Ft4Ue5yiIjoOpziIc8lWXFgfjwut1fB5z8hAADj0aY804eIyAXwCAp5tCbLMxCxy4xxrbdiXOutCOt0HooeXaCq4zV5iIjIMRhQyOOpf92NVfGRWBUfCa/PQ7F61b9RmthB7rKIiDwaAwqREBAWC4TFgsA9hUj4n1dw+s9WnJinl7syIiKPxYBCdA3LyXxEfLQdTZpWoHm3s1De1ZHTPUREMmBAIapFsyePwrogAsvX/huG/h3lLoeIyOPwLB6iWgiLBQFZRdB/kISyRDPUQ6vvtB37oQKK7ftkro6IqPFjQCG6AcvJfES9m48zKzphZue1AIC58c8gwtgeAKDIPwur0ShniUREjRYDCtEtNB+ei0WK6rN6wn4qwLK3lwMA+k2fAu03vGYKEZEjMKAQ3YIwmSB+f274d3PcFzMVAFDxgAUXO+vRcnq6fMURETVSDChE9aBdsgPa35/nL++CLp3PoqxdawCAwlQFy8l8+YojImpEGFCIblPss0dxvncXfLfhQygBzL9wP7LuVtjunkxERLePAYXoNgmTCX6HivDQ568DACSNQNWXZkAh4F2sRlzyDoYVIqLbxIBCdAcsBacR887p6o174zH1m2+hUkhYduFenGnTEpAkKExmWApOy1soEZGbYUAhaig7D+KDu6svj2+6pw2+WP8hfBTAwssJyOjuC2GxyFwgEZH7YEAhaiiS1XZdFJ+jRRjw1esQCkB4AeZPq8OJ90UvxL21E5CsclZKROTyGFCIHMBy+gxiZ54BACi6dcL0H76Fj8KMH0vuwcEvW0Bhvno0RbpwCVJ5uVylEhG5JAYUIgcT+45gXs/eAABz51h8sOFT+CmuHkEZ/N4b0H24XabqiIhcEwMKkaNJVlgvXgIAqI+q8fi3SRCKa3bHSTi2IMG27WVQouWsTAhzlbMrJSJyGQwoRE5kKSxCXHKRXdvRf9+DlQ8vsG1vKu+AjV/EAybnBRRRWsb7ChGRS2FAIZJZ+1dy8KbPn23b1laRmLNhEYJVlU6rod/PU9B2wk6nvR8R0a00eECZPXs25syZY9fWrl07HDlyBABQWVmJqVOnYtmyZTCZTOjXrx8+/fRTRERENHQpRG5BKi0FSktt2yoAz/0wCZK38y7ypgSQ+/69NdpD9ygQ9DXvNUREzueQIyidOnXChg0brr6J19W3mTJlCn766ScsX74cWq0WkyZNwpAhQ7Bt2zZHlELkdqznz6PlG+ed+p5npt+H5ePfrdH+59CJCPktvHrjSiWngYjIaRwSULy8vKDT6Wq0GwwGfPnll1i6dCkeeeQRAMCiRYvQoUMH7NixA/feW/MvOCJyvOiP9+G1r5+o0e4z0hf/3LEGAPD45olo+8JuZ5dGRB7KIQHl2LFjiIqKgo+PD/R6PVJSUhATE4PMzEyYzWYkJiba+rZv3x4xMTFIT09nQCGSiVReXuu1WCJ2RmJI5MsAAKVVgby5erT54AQsRcXOLpGIPIyyoV8wISEBixcvxrp16/DZZ58hLy8PDz74IEpLS1FUVAS1Wo2goCC7n4mIiEBRUVHtLwjAZDLBaDTaPYjI8bw3ZKLN5Ay0mZyBJnlK/DDsfVS1joTS31/u0oiokWvwIygDBgywPY+Pj0dCQgJiY2Px/fffw9fX97ZeMyUlpcbCWyJyrqgv9iF5zTAkrMjE0n090GbkHrlLIqJGrMGPoFwvKCgIbdu2RW5uLnQ6HaqqqlBSUmLXp7i4uNY1K39ITk6GwWCwPQoKChxcNRFdTyovh+VkAb7/5QGgRI2Tf9PDS8ez74jIMRweUMrKynD8+HFERkaie/fu8Pb2xsaNG237c3JykJ+fD71ef8PX0Gg0CAwMtHsQkQwkK+KS06E9psSa59+tnu7x85O7KiJqhBo8oLz22mtIS0vDyZMnsX37djz++ONQqVQYPnw4tFotxowZg6SkJGzevBmZmZkYPXo09Ho9F8gSuRHdV/vx6p+eR6cPDyBnYXu5yyGiRqjB16CcPn0aw4cPx8WLFxEWFoYHHngAO3bsQFhYGADg/fffh1KpxNChQ+0u1EZE7kMqLQXKK/DThp6An4T8mfeh5ecnYCm88WJ3IqL6UAghnHe5ygZiNBqh1WrRG4PgpfCWuxwij3Z+vB6r3pyPMc9NhteeXACAVFYGuN8/LUTkYBZhRipWw2Aw3HK5Bu/FQ0R3JGLJAYxPG42WXx3B48F7UCVU+PSZoRC79stdGhG5MQYUIrojUmkpcKQMG7ckYENE9XoU7z/7QvWn+2x9AvMkBH67Q64SicgNMaAQ0Z0TAq1e+z2AKFXosacKU0MybLv/fPBZKFdXn+0jrFYIk0mOKonIjTCgEFHDkqzY80RrjNB0tDWVPxKCpKzqG4i+lTUIsU9x+oeIbo4BhYganDU3z247LLQbkjMfBwCYDRqcfaN6+qfJaQmBSzn1Q0Q1MaAQkcMp0/YiLq36uWHEvdjwP+8BAJ46NgTSj5rqHZKAMFfJVCERuRoGFCJyquA1h/DkgdEAgGJ9EKZmV69VSTnYH82GHJSzNCJyIQwoRORUVqMR2Fd9R/LQJnfhnwf6AwAqSnxR9Gr11I9/kYSAZZz6IfJkDChEJBvFtiw031b9vPyJBPz2wQcAgBdP9cOlH9UQFjMv+EbkoRhQiMglBP52GEMGvwAAuNAtAOOyV2PZK4/C+7fdMldGRHJgQCEil2A1GoHdBwAAIcou+OxoL5gS1GiqTUCT5Rm3+GkiamwYUIjI9ezcj/BBQMi2ptA8ZsHZH1WAZJW7KiJyIqXcBRAR3cjlv0Yg+9+d0S/7MkwDeshdDhE5EQMKEbks6cARhGaVYXlBN5zr7o2KIQlyl0RETsIpHiJybTv3I3AAELnFCN2gUhxfqbi6j2f4EDVaPIJCRG7hyuRQ7Px/3fDgvit4cN8VnF3RQe6SiMiBeASFiNyCtO8wQtVdsLG4HQDAZPLC5VF6AIBPiRW+q3bKWR4RNTAGFCJyG2LXfqj/VP28Wb97sOHfC6BSKPFmcTwyV6s45UPUiDCgEJFb8s04hr7Pj4VQAMYWavTZk263f9vfE+D/I6+fQuSuGFCIyC1ZSwzw2pgJAAi7qyP2XW4GpeLqEZRLHVVQVfWEzxpO/RC5IwYUInJ7UtYh4BHg2ku5NV3ngz8/vQ8bfgriRd6I3BDP4iGiRingLV98935ftMnwQodML5z/TztAqZK7LCKqIwYUImqUROZBhGVcxtkrgThdEQSFQqD0yR7wiouVuzQiqgMGFCJqtKQDR1De6zxKH7yA0H/4YPN7H+Pso83kLouI6oABhYg8gurQSfQdPxGGnpUoXNUBCi8uwSNyZQwoROQRrEYjfNbshKpYA39NFcoGded0D5ELY0AhIo/S8o10aGf44rcPP8bpQZzuIXJVDR5QWrRoAYVCUeMxceJEAEDv3r1r7Bs/fnxDl0FEdEPKo/no9/JkGLtU4ezKjpzuIXJBDf5buWvXLlitV685cODAAfzpT3/Ck08+aWsbO3Ys3nnnHdu2n59fQ5dBRHRDVqMRfisy4HWvHuGxZah47G4oLQKqKxK8Nu/hJfOJXECDB5SwsDC77blz56JVq1Z46KGHbG1+fn7Q6XQN/dZERPXS8o10KO/qiNVrP0ITpQ++L9NiUXwHSJWVcpdG5PEcelyzqqoK33zzDZKSkqBQKGztS5YswTfffAOdToeBAwdixowZPIpCRLJQnDiNvlNfBRSASatAxC+n4Ku0IvdiKJoPz4UwmeQukcgjOTSgrFq1CiUlJRg1apSt7ZlnnkFsbCyioqKQnZ2NadOmIScnBytWrLjh65hMJpiu+UfCaDQ6smwi8iBWoxEB3+0AADRt2QLNx1xGE5UJFqFEWWI8/A8Ww3IyX+YqiTyPQgjHTbb269cParUaa9asuWGfTZs2oU+fPsjNzUWrVq1q7TN79mzMmTOnRntvDIKXwrvB6iUi+oOqY1v836+L8NCnr6N5yna5yyFqFCzCjFSshsFgQGBg4E37Ouw041OnTmHDhg148cUXb9ovISEBAJCbm3vDPsnJyTAYDLZHQUFBg9ZKRHQ9UVCIgdOmoiLGAmxsbnucWdEJSk5JEzmcw6Z4Fi1ahPDwcDz22GM37ZeVlQUAiIyMvGEfjUYDjUbTkOUREd2UVFqKwKU7cDFej3tD82ztx3zCUdirE1QmK5RVEhTp+3m3ZCIHcEhAkSQJixYtwsiRI+F1zfUFjh8/jqVLl+LRRx9FSEgIsrOzMWXKFPTq1Qvx8fGOKIWI6I7ETU/HjmS1bVvVuim+3vg+IlV+2GZSYm73h2G9fFnGCokaJ4cElA0bNiA/Px8vvPCCXbtarcaGDRvwwQcfoLy8HNHR0Rg6dCjefvttR5RBRNQwrlmqJ84WY8iM1yFUgMVXAe9vzsPHKwgXSv0RO/oUpNJSGQslajwcukjWUYxGI7RaLRfJEpGsvJpFoc1/ziFSbcCJK6E4OqMTvMotUFgFFBkHOPVDdJ36LJLl9Z2JiG6T5cxZHO6hwmEEwismDJ+mfoTW3hrsrZIwu+ejsJ4/L3eJRG6LAYWI6E78fpREOncBz/1tKiRvQFIrIH1+Geq1rRHyRbrMBRK5JwYUIqIGIFVUIOTL6jCiCgtDh5EXsbpDT4Te0xliz2FO9xDVk8Oug0JE5Kms58/jYIISQgXMXf4lvMJD5S6JyO3wCAoRkQMIiwUtf6jEyFNTUPVxKby9g1F5RY3WL+XztGSiOmBAISJyEOXWLDQ7EoIuz19EB9+zOF0VjM3d7odPTiEsZ87KXR6RS+MUDxGRA1kvXMS+nmos69IC6QNa4p0vPsfh5Gi5yyJyeTyCQkTkYMJcBaA6rEz630lQtpRw6vsuAAD/DU0Q+v94pg/R9XgEhYjISYTJhPAF26G5oMTsrmsxu+taXOpmhapTOyi8+Pci0bUYUIiInCx6/k581bUdvuraDupLKsz/aTFU0c3kLovIpTCyExE5mbBYICwWAEDMuko8VTYVV+ZUounWKE73EP2OR1CIiGSk/O9exC44gGFdduNSdwtUbVtxuocIDChERLKzGo3Ye38TaIq8Mf+3b6BswbN8iBjTiYhcgFRRgegNV/CEdSoqp1chJEOHkM853UOei0dQiIhchPK/e9Hif/dhxD0ZuJhghqp1HKd7yGMxoBARuRCpvByZDwbBJ1+NuRu+hbJlrNwlEcmC0ZyIyMVIpaVonnYFT3lNgSnJipDMcE73kMfhERQiIhekTNuLuLn78Lx+Gy49YIJXbDSgVMldFpHTMKAQEbkoqaICux4MgW+OD1LSlkPVrqXcJRE5DQMKEZELsxqNiNxeiSeWTcGRicG49IJe7pKInIJrUIiIXJwqdQ9a7fBBz4xSfBPUE+G/NYPlbBEgWeUujchheASFiMgNSJWV2PVIBHwO+uKd/66EsmMbuUsicigGFCIiN2G9eAmR6ZV4+seXkTM2iNM91KhxioeIyI2oUvegdboGCTvL8G34PQj/OQKWcxc43UONDo+gEBG5GWEyYVdiFDR7/TErfS2UnTndQ40PAwoRkRuynj+PiJ2VGLZmEo49H4TLozjdQ40Lp3iIiNyUKnUP2m5TI2F3OZa36IbQn8IAAKKyElJpqbzFEd2heh9B2bJlCwYOHIioqCgoFAqsWrXKbr8QAjNnzkRkZCR8fX2RmJiIY8eO2fW5dOkSRowYgcDAQAQFBWHMmDEoKyu7ow9CROSJhLkKu/s1h1d6IP628yf8bedPyP1XK7nLIrpj9Q4o5eXl6Nq1KxYsWFDr/nnz5uGjjz7CwoULkZGRAX9/f/Tr1w+VlZW2PiNGjMDBgwexfv16rF27Flu2bMG4ceNu/1MQEXkwS1ExwjMr8cSvk/DEr5NgPe+Dk//Qw6t5M7lLI7ptCiGEuO0fViiwcuVKDB48GED10ZOoqChMnToVr732GgDAYDAgIiICixcvxrBhw3D48GF07NgRu3btwj333AMAWLduHR599FGcPn0aUVFRt3xfo9EIrVaL3hgEL4X37ZZPRNQoXRinx/dvzce4F16BescRSOXlcpdEBACwCDNSsRoGgwGBgYE37dugi2Tz8vJQVFSExMREW5tWq0VCQgLS06vvxJmeno6goCBbOAGAxMREKJVKZGRk1Pq6JpMJRqPR7kFERLULX5KNyX2eQ+uUQzj+RWu5yyG6LQ0aUIqKigAAERERdu0RERG2fUVFRQgPD7fb7+XlheDgYFuf66WkpECr1doe0dHRDVk2EVGjIpWXw3r8JFLX3wXLBR+cmnMfp3vI7bjFacbJyckwGAy2R0FBgdwlERG5NiHQ4q10hOxVYsPoeahsp4PSx0fuqojqrEEDik6nAwAUFxfbtRcXF9v26XQ6nDt3zm6/xWLBpUuXbH2up9FoEBgYaPcgIqJbC/0uG+MSRyLmH0dxfHFbucshqrMGDShxcXHQ6XTYuHGjrc1oNCIjIwN6ffVFhPR6PUpKSpCZmWnrs2nTJkiShISEhIYsh4jI40nl5bAeO4GtqZ1hvuSDgrfuQ8Fb98Hw7L1yl0Z0U/W+UFtZWRlyc3Nt23l5ecjKykJwcDBiYmLw6quv4u9//zvatGmDuLg4zJgxA1FRUbYzfTp06ID+/ftj7NixWLhwIcxmMyZNmoRhw4bV6QweIiKqJyHQcno6Lo/S4+e/zQcAPHF4BJQr/at3WywQJpOcFRLVUO+Asnv3bjz88MO27aSkJADAyJEjsXjxYrzxxhsoLy/HuHHjUFJSggceeADr1q2DzzVzn0uWLMGkSZPQp08fKJVKDB06FB999FEDfBwiIrqRkB8P4PmdowEAht6heDVrAwDgrexBaD70oJylEdVwR9dBkQuvg0JEdGekB+5C3kvVz61GNfxPXvP3qgBivzsNy8l8eYqjRqs+10HhvXiIiDyQcmsWWm2tfl467F789u57tn1WCAw88gr8CqtPeBBVVYD7/S1Lbo4BhYjIw2l/OognD42ya9O8X4hR844CAP718lCof90tQ2XkyRhQiIg8nFRaCmQfsWs7uVuPdy4/BgAQejXUHe+r8XPN1hTCmpvnlBrJ8zCgEBFRDS2npdueN90WjH/HrqvRp0/Ry9CeOuOwGoTFzKklD8aAQkREN2V8IRhDmoyu0W6ZcwlPznTcQtpPP3gcof9Kv3VHapQYUIiI6KasObm1thuy9fjQ8nCt+xpCZRxgnVxzakmXboTYfcBh70uugQGFiIhuS9ybjj26cXzpXch47vMa7d0WvoKYrKv/+xKSACSrQ2sh52NAISIil9Tu7RL8JXxMjfYrL5jx2L7ztu2s0mic/ZOierEvNRoMKERE5JIsJ04CJ2q2B9x3H75u1tO2XWFSw/t5LSIyOPXTmDCgEBGRW4l8bztw9bpyCIuLxSep/0L/b15Hi0wFz/xpJBr0bsZERETOZj1diL8OnwhzkISwbVqogrRyl0QNgAGFiIjcmjBXQbF9HwKOqnDwvA7FT3eEonsnucuiO8SAQkREjYLuw+2Ierkc3785H8efDAQUCrlLojvAgEJERI2G9Wwxxj87GVY/CSFbg6Bq2lTukug2MaAQEVGjIcxVUP53LwKOq5BnDMa5Ie2h6MbpHnfEgEJERI2O7sPtCH7xCpbOfBe5zwTKXQ7dBgYUIiJqlKznLmDCCy9DeAsE/DcUqpBguUuiemBAISKiRkmYq+C1MRNNTipx2eSHC39uB2XXDnKXRXXEgEJERI2a7oPt8HnWhEVz3sPR0bxGirtgQCEiokbPevEyJo1/GZAATZoOqtAQuUuiW2BAISKiRk+Yq6BetwsBJ5WQhAKX+reBMr693GXRTTCgEBGRx4j4aDukJy1Y+LcPkfNXTve4MgYUIiLyKFKJAa9MngxlhRKKTc2gigiXuySqBQMKERF5FGGugs/anWiSr0CguhKXH2kJVad2cpdF12FAISIijxTx8XaUDRL48J8f4/BkTve4GgYUIiLyWJLBiKlJk+BlVKFqfSyne1xIvQPKli1bMHDgQERFRUGhUGDVqlW2fWazGdOmTUOXLl3g7++PqKgoPP/88zh79qzda7Ro0QIKhcLuMXfu3Dv+MERERPUhLBb4rcyAf4ECLQIuwfBQS6g6tpW7LMJtBJTy8nJ07doVCxYsqLGvoqICe/bswYwZM7Bnzx6sWLECOTk5+Mtf/lKj7zvvvIPCwkLbY/Lkybf3CYiIiO5QxMfbUTzAC/PmforDUzjd4wq86vsDAwYMwIABA2rdp9VqsX79eru2Tz75BD179kR+fj5iYmJs7QEBAdDpdPV9eyIiIoeQSksx7Y0J8OqhRPm6ltCOroClsEjusjyWw9egGAwGKBQKBAUF2bXPnTsXISEh6NatG+bPnw+LxeLoUoiIiG5IWCzw/yEDTfIVuDu0AIb7Y6Fq11rusjxWvY+g1EdlZSWmTZuG4cOHIzDw6u2uX375Zdx9990IDg7G9u3bkZycjMLCQrz33nu1vo7JZILJZLJtG41GR5ZNREQeLHzBduQu0WJO5hd4MW002r4gd0WeyWEBxWw246mnnoIQAp999pndvqSkJNvz+Ph4qNVq/PWvf0VKSgo0Gk2N10pJScGcOXMcVSoREZEdqawcb789Fl5dFbj8UxuEjSuH5czZW/8gNRiHTPH8EU5OnTqF9evX2x09qU1CQgIsFgtOnjxZ6/7k5GQYDAbbo6CgwAFVExERVRMWCwK/3YEmBQo8HHUMhnujoWrbSu6yPEqDB5Q/wsmxY8ewYcMGhITc+o6RWVlZUCqVCA+v/fxzjUaDwMBAuwcREZGjhS/Yjv0PBeKNuV/jyNs8u8eZ6j3FU1ZWhtzcXNt2Xl4esrKyEBwcjMjISDzxxBPYs2cP1q5dC6vViqKi6hXQwcHBUKvVSE9PR0ZGBh5++GEEBAQgPT0dU6ZMwbPPPoumTZs23CcjIiJqAFJFBf4553l4dVbg/H/aIfKlMlhOn5G7rEZPIYQQ9fmB1NRUPPzwwzXaR44cidmzZyMuLq7Wn9u8eTN69+6NPXv24KWXXsKRI0dgMpkQFxeH5557DklJSbWuP6mN0WiEVqtFbwyCl8K7PuUTERHdlvMT9BgyYTN+m9kLAdnnYDlxUu6S3I5FmJGK1TAYDLecDal3QHEFDChEROR0CgVUAQEYtXsf3tw1BK1G7JW7IrdTn4DCe/EQERHVhRCwlpXj3X8+A2WBD86s6ASv2Gi5q2q0GFCIiIjqSrKi6eJ0NDkJvNhuG0q7RcIrLlbuqholBhQiIqJ6CvvXDvymj8ELc1fh2D+D5C6nUXLolWSJiIgapd+nez6ZPxRoA5z+sRMAQJmhRdS87TIX1zgwoBAREd0OyYqQL9OBsXpMeXwdAGCWaSCUXTsAAJQlZbCc4oVFbxcDChER0R0I+XInvv6/6psK+k/ww/KfFgIA7t05Bs2GyFmZe2NAISIiuhOSFcJkBQBEphmgV1ffb64qWCDv264AAHWWP5r9D6d+6oMBhYiIqIGIvQcR9fvlUS6P0uPNoT8CAN70HgxVx7YAAIWxnFeirQMGFCIiIgdo+n878a/v4wEAPiMDsOTX/wUAJGaNQuhAOStzDwwoREREjiBZIVVUAAAi0g24f+FrAABLgMDl/7sbAOB30IdTPzfAgEJERORgUtYhRGdVPzeMuBezn1gCAHgzaDBUbVsBABTlV2A5c1amCl0PAwoREZETBX23Gx/+pycAQPlUMBZvfBcAMOTgc/DvL2dlroUBhYiIyImExQJRWgoACNt1GQ99+ToAwOIjYPky2K5vh/dKYT2Y4/QaXQEDChERkUyk7COIya5+XvZkAlL+5wu7/dPXj0fTimvu9VNpgqWwyIkVyocBhYiIyAUErMzE3PUP2bW1/uUQ/tnsZ9v2+BNPAr2dXJhMGFCIiIhcgLBYYC0x2LXtX3IfHonqdLWPF2BZGO3s0moVu1pA88suh70+AwoREZGLCv/E/hRk02M9MO+TT2Wqxt7IwlfQ8kDzOvcXBiMshot17s+AQkRE5CZ8ft2L2T1c41Qf07smfLR1WZ37P/r162j+1pY692dAISIichPCYoH1Qt2PQjiSbl0b9D/3ep37S/4Cx+ffA7y+uk793TKgCCEAABaYASFzMURERB7I99v/Iubbuvc/Pv8ebO3zJWJx9f/jN6MQdenlYk6fPo3oaNdYJERERET1U1BQgObNb75+xS0DiiRJyMnJQceOHVFQUIDAwEC5S3JpRqMR0dHRHKs64njVHceq7jhW9cPxqjt3GishBEpLSxEVFQWlUnnTvm45xaNUKtGsWTMAQGBgoMv/B3EVHKv64XjVHceq7jhW9cPxqjt3GSutVlunfjePL0REREQyYEAhIiIil+O2AUWj0WDWrFnQaDRyl+LyOFb1w/GqO45V3XGs6ofjVXeNdazccpEsERERNW5uewSFiIiIGi8GFCIiInI5DChERETkchhQiIiIyOW4ZUBZsGABWrRoAR8fHyQkJGDnzp1yl+QSZs+eDYVCYfdo3769bX9lZSUmTpyIkJAQNGnSBEOHDkVxcbGMFTvPli1bMHDgQERFRUGhUGDVqlV2+4UQmDlzJiIjI+Hr64vExEQcO3bMrs+lS5cwYsQIBAYGIigoCGPGjEFZWZkTP4Vz3GqsRo0aVeN71r+//d1VPWWsUlJS0KNHDwQEBCA8PByDBw9GTk6OXZ+6/N7l5+fjscceg5+fH8LDw/H666/DYrE486M4RV3Gq3fv3jW+X+PHj7fr4wnj9dlnnyE+Pt528TW9Xo9ffvnFtt8TvlduF1C+++47JCUlYdasWdizZw+6du2Kfv364dy5c3KX5hI6deqEwsJC22Pr1q22fVOmTMGaNWuwfPlypKWl4ezZsxgyZIiM1TpPeXk5unbtigULFtS6f968efjoo4+wcOFCZGRkwN/fH/369UNlZaWtz4gRI3Dw4EGsX78ea9euxZYtWzBu3DhnfQSnudVYAUD//v3tvmfffmt/xzBPGau0tDRMnDgRO3bswPr162E2m9G3b1+Ul5fb+tzq985qteKxxx5DVVUVtm/fjq+++gqLFy/GzJkz5fhIDlWX8QKAsWPH2n2/5s2bZ9vnKePVvHlzzJ07F5mZmdi9ezceeeQRDBo0CAcPHgTgId8r4WZ69uwpJk6caNu2Wq0iKipKpKSkyFiVa5g1a5bo2rVrrftKSkqEt7e3WL58ua3t8OHDAoBIT093UoWuAYBYuXKlbVuSJKHT6cT8+fNtbSUlJUKj0Yhvv/1WCCHEoUOHBACxa9cuW59ffvlFKBQKcebMGafV7mzXj5UQQowcOVIMGjTohj/jqWMlhBDnzp0TAERaWpoQom6/dz///LNQKpWiqKjI1uezzz4TgYGBwmQyOfcDONn14yWEEA899JB45ZVXbvgznjxeTZs2FV988YXHfK/c6ghKVVUVMjMzkZiYaGtTKpVITExEenq6jJW5jmPHjiEqKgotW7bEiBEjkJ+fDwDIzMyE2Wy2G7v27dsjJibG48cuLy8PRUVFdmOj1WqRkJBgG5v09HQEBQXhnnvusfVJTEyEUqlERkaG02uWW2pqKsLDw9GuXTtMmDABFy9etO3z5LEyGAwAgODgYAB1+71LT09Hly5dEBERYevTr18/GI1G21/LjdX14/WHJUuWIDQ0FJ07d0ZycjIqKips+zxxvKxWK5YtW4by8nLo9XqP+V651c0CL1y4AKvVajfgABAREYEjR47IVJXrSEhIwOLFi9GuXTsUFhZizpw5ePDBB3HgwAEUFRVBrVYjKCjI7mciIiJQVFQkT8Eu4o/PX9v36o99RUVFCA8Pt9vv5eWF4OBgjxu//v37Y8iQIYiLi8Px48fx5ptvYsCAAUhPT4dKpfLYsZIkCa+++iruv/9+dO7cGQDq9HtXVFRU63fvj32NVW3jBQDPPPMMYmNjERUVhezsbEybNg05OTlYsWIFAM8ar/3790Ov16OyshJNmjTBypUr0bFjR2RlZXnE98qtAgrd3IABA2zP4+PjkZCQgNjYWHz//ffw9fWVsTJqTIYNG2Z73qVLF8THx6NVq1ZITU1Fnz59ZKxMXhMnTsSBAwfs1n3Rjd1ovK5dq9SlSxdERkaiT58+OH78OFq1auXsMmXVrl07ZGVlwWAw4IcffsDIkSORlpYmd1lO41ZTPKGhoVCpVDVWKhcXF0On08lUlesKCgpC27ZtkZubC51Oh6qqKpSUlNj14djB9vlv9r3S6XQ1FmJbLBZcunTJ48evZcuWCA0NRW5uLgDPHKtJkyZh7dq12Lx5M5o3b25rr8vvnU6nq/W798e+xuhG41WbhIQEALD7fnnKeKnVarRu3Rrdu3dHSkoKunbtig8//NBjvlduFVDUajW6d++OjRs32tokScLGjRuh1+tlrMw1lZWV4fjx44iMjET37t3h7e1tN3Y5OTnIz8/3+LGLi4uDTqezGxuj0YiMjAzb2Oj1epSUlCAzM9PWZ9OmTZAkyfYPqKc6ffo0Ll68iMjISACeNVZCCEyaNAkrV67Epk2bEBcXZ7e/Lr93er0e+/fvtwt169evR2BgIDp27OicD+Iktxqv2mRlZQGA3ffLU8brepIkwWQyec73Su5VuvW1bNkyodFoxOLFi8WhQ4fEuHHjRFBQkN1KZU81depUkZqaKvLy8sS2bdtEYmKiCA0NFefOnRNCCDF+/HgRExMjNm3aJHbv3i30er3Q6/UyV+0cpaWlYu/evWLv3r0CgHjvvffE3r17xalTp4QQQsydO1cEBQWJ1atXi+zsbDFo0CARFxcnrly5YnuN/v37i27duomMjAyxdetW0aZNGzF8+HC5PpLD3GysSktLxWuvvSbS09NFXl6e2LBhg7j77rtFmzZtRGVlpe01PGWsJkyYILRarUhNTRWFhYW2R0VFha3PrX7vLBaL6Ny5s+jbt6/IysoS69atE2FhYSI5OVmOj+RQtxqv3Nxc8c4774jdu3eLvLw8sXr1atGyZUvRq1cv22t4ynhNnz5dpKWliby8PJGdnS2mT58uFAqF+O2334QQnvG9cruAIoQQH3/8sYiJiRFqtVr07NlT7NixQ+6SXMLTTz8tIiMjhVqtFs2aNRNPP/20yM3Nte2/cuWKeOmll0TTpk2Fn5+fePzxx0VhYaGMFTvP5s2bBYAaj5EjRwohqk81njFjhoiIiBAajUb06dNH5OTk2L3GxYsXxfDhw0WTJk1EYGCgGD16tCgtLZXh0zjWzcaqoqJC9O3bV4SFhQlvb28RGxsrxo4dW+MPBE8Zq9rGCYBYtGiRrU9dfu9OnjwpBgwYIHx9fUVoaKiYOnWqMJvNTv40jner8crPzxe9evUSwcHBQqPRiNatW4vXX39dGAwGu9fxhPF64YUXRGxsrFCr1SIsLEz06dPHFk6E8IzvlUIIIZx3vIaIiIjo1txqDQoRERF5BgYUIiIicjkMKERERORyGFCIiIjI5TCgEBERkcthQCEiIiKXw4BCRERELocBhYiIiFwOAwoRERG5HAYUIiIicjkMKERERORyGFCIiIjI5fx/8BB3b1NDTSwAAAAASUVORK5CYII=", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "pl.imshow(pred_aln_trg.detach().cpu().numpy())" ] }, { "cell_type": "code", "execution_count": 45, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "torch.Size([1, 143])\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAGdCAYAAAA44ojeAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABBzUlEQVR4nO3deXhU5d3/8Xf2fRJCSCYDAcIiO4lljaC1QlkEooJalSJ1feovWAUXitVa6oJaW1tbK61Pq20Vtz6igYoWQaDUGCCYsIdVAkwmCQnJJIFMkpnz+wMYiaKSkDDb53Vdc13MuU8m3zuQmQ/n3EuQYRgGIiIiIl4k2NMFiIiIiHyZAoqIiIh4HQUUERER8ToKKCIiIuJ1FFBERETE6yigiIiIiNdRQBERERGvo4AiIiIiXifU0wW0hcvlwmq1EhcXR1BQkKfLERERkXNgGAa1tbVYLBaCg7/5GolPBhSr1UpaWpqnyxAREZE2OHToEN26dfvGc3wyoMTFxQEnO2gymTxcjYiIiJwLu91OWlqa+3P8m/hkQDl9W8dkMimgiIiI+JhzGZ6hQbIiIiLidRRQRERExOsooIiIiIjXUUARERERr6OAIiIiIl5HAUVERES8jgKKiIiIeB0FFBEREfE6CigiIiLidRRQRERExOsooIiIiIjXUUARERERr6OAIiIiIgA4XQbr9xxl/j+3sKa43KO1+ORuxiIiItJ+qo838tuP9rB8SylH6xwA1DU2c3m/ZI/VpIAiIiIS4B745xZW7igDICE6jMmDU5n+na4erUkBRUREJIBV1jlYvevk7Zw/3HQxEwaaCQ/1/AgQBRQREZEAtnxLKU6XwdBu8UwdavF0OW6ej0giIiLiMUs/OwLA1ZmevaXzZQooIiIiAerA0XoKD1UTEhzEtAzvuXoCCigiIiIB691TV0/G9kmiS1yEh6tpSQFFREQkABmGwbuFJwPKNRd71+0dUEAREREJSJ8dquZg5XGiw0OYMCjF0+V8hQKKiIhIADp9e2fiIDPR4d43qVcBRUREJACt210BwLSMVA9XcnYKKCIiIgHG6TI4Un0CgP5mk4erOTsFFBERkQBTXttAk9MgNDiIFFOkp8s5KwUUERGRAHP42MmrJ5aEKEKCgzxczdkpoIiIiASYw8eOA9CtU5SHK/l6CigiIiIB5nDVySsoCigiIiLiNU7f4unWKdrDlXw9BRQREZEAc7hat3hERETEy+gKioiIiHgVp8vAWq0xKCIiIuJFfGENFFBAERERCSi+sAYKKKCIiIgEFF9YAwUUUERERAKKL6yBAgooIiIiAcUXZvCAAoqIiEhA8YU1UEABRUREJKDoCoqIiIh4FV9ZAwVaGVBefPFFhg4dislkwmQykZWVxYoVK9ztDQ0N5OTk0LlzZ2JjY5kxYwZlZWUtXqOkpIQpU6YQHR1NcnIyDzzwAM3Nze3TGxEREflavrIGCrQyoHTr1o2nnnqKgoICNm3axBVXXMFVV13F9u3bAZg7dy7Lli3j7bffZu3atVitVqZPn+7+eqfTyZQpU2hsbOSTTz7hb3/7G6+88go///nP27dXIiIi8hWnb++kJkR69RooAEGGYRjn8wKJiYn86le/4tprr6VLly4sWbKEa6+9FoBdu3YxYMAA8vLyGD16NCtWrGDq1KlYrVZSUlIAWLx4MfPnz6eiooLw8PBz+p52u534+HhqamowmUznU76IiEjAWPrZYea+WURWr868fufoC/79W/P53eYxKE6nkzfeeIP6+nqysrIoKCigqamJ8ePHu8/p378/3bt3Jy8vD4C8vDyGDBniDicAEydOxG63u6/CiIiISMfwlTVQAEJb+wVbt24lKyuLhoYGYmNjWbp0KQMHDqSwsJDw8HASEhJanJ+SkoLNZgPAZrO1CCen20+3fR2Hw4HD4XA/t9vtrS1bREQk4PnKDB5owxWUfv36UVhYSH5+PnfddRezZ89mx44dHVGb26JFi4iPj3c/0tLSOvT7iYiI+CNfWQMF2hBQwsPD6dOnD8OGDWPRokVkZGTwu9/9DrPZTGNjI9XV1S3OLysrw2w2A2A2m78yq+f089PnnM2CBQuoqalxPw4dOtTaskVERALeF1dQ/DCgfJnL5cLhcDBs2DDCwsJYtWqVu624uJiSkhKysrIAyMrKYuvWrZSXl7vPWblyJSaTiYEDB37t94iIiHBPbT79EBERkXO3u6yWI6cCSlcfCCitGoOyYMECJk+eTPfu3amtrWXJkiWsWbOGDz/8kPj4eG677TbmzZtHYmIiJpOJu+++m6ysLEaPPjlSeMKECQwcOJBZs2bxzDPPYLPZePjhh8nJySEiIqJDOigiIhLoDhyt56aX8ml2GYzo2YmuCX4WUMrLy7n55pspLS0lPj6eoUOH8uGHH/L9738fgOeee47g4GBmzJiBw+Fg4sSJ/PGPf3R/fUhICMuXL+euu+4iKyuLmJgYZs+ezS9/+cv27ZWIiIgAcPjYcWa+9ClH6xz0N8fx0s3DCQry7jVQoB3WQfEErYMiIiLy7crsDVy3OI+SquP07hLDm/+TRVKs5+5YXJB1UERERMR7VdY5mPm/+ZRUHad7YjSv3T7ao+GktRRQRERE/EzN8SZ++JcN7C2vIzU+ktduH4U53rv33vkyBRQRERE/cqy+kdkvb2BnqZ2k2Aheu30UaYnevzDbl7V6JVkRERHxTmuKy3ngn1uoqHXQKTqM124fRa8usZ4uq00UUERERHzciUYnT76/k398ehCAPsmx/OGmi+lnjvNwZW2ngCIiIuLDCg9VM+/NQvYfrQfgljE9mT+pP5FhIR6u7PwooIiIiPigZqeLFz7ex/Or9+B0GZhNkTx7XQZj+yZ5urR2oYAiIiLiY/ZX1DH3rSKKDlUDMC3DwuNXDSY+OsyzhbUjBRQREREfYRgGr+WX8MS/dnKiyYkpMpTHrh7MVZldPV1au1NAERER8QHl9gYe/L8trCmuAOCS3p159roMLD6wr05bKKCIiIh4uQ+2lbLgna0cO95EeGgw8yf155ZLehIc7P176rSVAoqIiIiXqm1oYuGyHfyz4DAAA1NN/PaGTC5K8d3pw+dKAUVERMQL5e+vZN5bRRypPkFQENz13d7cO/4iwkMDYxF4BRQREREv4mh28puVu/nzuv0YBnTrFMVzP8hkRM9ET5d2QSmgiIiIeIldNjv3vlHILlstANcP78YjUwcSF+k/04fPlQKKiIiIhzU0OflH3kF+9WExjU4XiTHhPHnNECYNNnu6NI9RQBEREfGAZqeL9XuPkltk5d/by6hzNANwRf9knpoxhOS4SA9X6FkKKCIiIheIy2VQUHKM3EIr728tpbK+0d1miY9kzhV9uXFkGkFB/jt9+FwpoIiIiHQgwzDYbrWzrMjKsiIr1poGd1tiTDhThqSSnWlhWPdOfr2uSWspoIiIiHSA/RV15J4KJfsq6t3HYyNCmTAohewMC2P6JBEWEhjThltLAUVERKSdlNacYHlRKblFVrYeqXEfDw8NZlz/ZLIzLHyvfzKRYSEerNI3KKCIiIich6r6Rt7fejKUbPy8CsM4eTwkOIixfZLIzrAwYVBKQE4VPh8KKCIiIq1U52jm39tt5BZZWb/nKM0uw902smci0zItXDnYTOfYCA9W6dsUUERERM5BQ5OTNcUVLCuy8tHOMhzNLnfbIIuJ7AwLUzMsdPXT3YUvNAUUERGRr9HsdPHJvkpyi6x8uM1G7am1SgB6JcUwLcNCdqaF3l1iPVilf1JAEREROYPLZbC55Bi5RSfXKjla98VaJanxkSdDSYaFQRaT1ivpQAooIiIS8AzDYGdprXta8JHqE+62TtFhTBmaSnZGV4b30FolF4oCioiIBKzPj9aTW2Qlt8jK3vI69/GY8BAmDjIzLdPCWK1V4hEKKCIiElBsNQ0s33IylGw53HKtkiv6JZOdaeEKrVXicQooIiLi947VN7Jim43coiPkH2i5VsmYM9YqMWmtEq+hgCIiIn6p3tHMyh1l5BZZWbe7osVaJSN6diI7w8LkIakkaa0Sr6SAIiIifsPRfHKtktwiK6t2ltHQ9MVaJQNTTWRnWpimtUp8ggKKiIj4NKfLIG9fJblFR1ixzUZtwxdrlaSfXqskw0KfZK1V4ksUUERExOcYhsHmkmqWFVlZvqWUo3UOd5vZFMm0jJPTggd31VolvkoBRUREfIJhGOyyfbFWyeFjLdcqmTwklewMCyN7JmqtEj+ggCIiIl7tYGU9uYUnpwXv+dJaJRMGmcnOsDC2r9Yq8TcKKCIi4nXK7A0s31JKbpGVokPV7uPhIcF8r38XsjO6ckX/ZKLCtVaJv1JAERERr1B9/NRaJYVWPj1Q6V6rJDgI91olEwebtVZJgFBAERERjzneeGqtkkIr6/ZU0OT8Yq2SYT1OrlVy5ZBUusRprZJAo4AiIiIesbe8lpteyqe89osZOANSTWRnWJiWkUq3TtEerE48TQFFREQuuIOV9e5w0jUhihnf6Up2poU+yXGeLk28hAKKiIhcUEeqT7jDSb+UON64czSdYsI9XZZ4Gc3JEhGRC+ZYfSM//N98jlSfoFdSDK/ePkrhRM5KAUVERC4IwzB48P+2cOBoPd06RfHaHaM0+FW+lgKKiIhcEK/ll7ByRxnhIcH8adYwUuO1YZ98vVYFlEWLFjFixAji4uJITk7m6quvpri4uMU5l19+OUFBQS0eP/7xj1ucU1JSwpQpU4iOjiY5OZkHHniA5uZmRETEP+0pq+Wx5TsAeHBSPwZZ4j1ckXi7Vg2SXbt2LTk5OYwYMYLm5mYeeughJkyYwI4dO4iJiXGfd8cdd/DLX/7S/Tw6+oupYk6nkylTpmA2m/nkk08oLS3l5ptvJiwsjCeffLIduiQiIt6kocnJ3a9/hqPZxWUXdeHWMemeLkl8QKsCygcffNDi+SuvvEJycjIFBQVcdtll7uPR0dGYzeazvsa///1vduzYwUcffURKSgqZmZk89thjzJ8/n1/84heEh2uwlIiIP/lnwWF22WrpHBPOs9cN1UZ+ck7OawxKTU0NAImJiS2Ov/baayQlJTF48GAWLFjA8ePH3W15eXkMGTKElJQU97GJEydit9vZvn37Wb+Pw+HAbre3eIiIiG84vZfOzNE9SI6L9Gwx4jPavA6Ky+Xi3nvvZcyYMQwePNh9/KabbqJHjx5YLBa2bNnC/PnzKS4u5p133gHAZrO1CCeA+7nNZjvr91q0aBELFy5sa6kiIuJBu2y1AAxMNXm4EvElbQ4oOTk5bNu2jfXr17c4fuedd7r/PGTIEFJTUxk3bhz79u2jd+/ebfpeCxYsYN68ee7ndrudtLS0thUuIiIXTLPTxe6ykwFlQKpWiZVz16ZbPHPmzGH58uV8/PHHdOvW7RvPHTVqFAB79+4FwGw2U1ZW1uKc08+/btxKREQEJpOpxUNERLzf55XHcTS7iA4PIU1760grtCqgGIbBnDlzWLp0KatXryY9/dtHYhcWFgKQmpoKQFZWFlu3bqW8vNx9zsqVKzGZTAwcOLA15YiIiJfbZTs5ZvCilDgNjpVWadUtnpycHJYsWcJ7771HXFyce8xIfHw8UVFR7Nu3jyVLlnDllVfSuXNntmzZwty5c7nssssYOnQoABMmTGDgwIHMmjWLZ555BpvNxsMPP0xOTg4REVpRUETEn+wq1e0daZtWXUF58cUXqamp4fLLLyc1NdX9ePPNNwEIDw/no48+YsKECfTv35/77ruPGTNmsGzZMvdrhISEsHz5ckJCQsjKyuKHP/whN998c4t1U0RExD+cvoLS36xb89I6rbqCYhjGN7anpaWxdu3ab32dHj168P7777fmW4uIiA/aeeoKSn+zrqBI62gvHhER6RD2hiaOVJ8AdAVFWk8BRUREOkTxqfVPLPGRxEeHebga8TUKKCIi0iF2lZ4af6IF2qQNFFBERKRD7LRp/Im0nQKKiIh0CF1BkfOhgCIiIu3O5TLcY1AG6AqKtIECioiItLvDx05Q3+gkPCSY9KQYT5cjPkgBRURE2t3OUwu09U2JJTREHzXSevpXIyIi7W6Xe4E2jT+RtlFAERGRdnWsvpHXN5QAMKSrAoq0jQKKiIi0G8Mw+Ok7W7DZG+jVJYbrR6R5uiTxUQooIiLSbl7fcIgPt5cRFhLE8zdcTHR4q7Z8E3FTQBERkXaxt7yWXy7fDsD8Sf0Z3DXewxWJL1NAERGR87Z2dwU3vZRPQ5OLS/smceuYdE+XJD5O195ERKTNTjQ6eWrFTv6WdxCA3l1i+PX1GQQHB3m4MvF1CigiItImVfWN3PTSp+w6tWLs7Kwe/HTyAKLCQzxcmfgDBRQREWm1mhNN3PzXfHbZakmKjeDX12fw3Yu6eLos8SMKKCIi0ir1jmZueXkD247Y6RwTzht3jqJPsvbbkfalQbIiInLOKusc3PrKRjaXVBMfFcY/blM4kY6hKygiInJOVu8q48F/buFoXSOxEaH87daRDLRopVjpGAooIiLyjeodzTzx/k6W5J9cvv6ilFh++4OLFU6kQymgiIjI19pccox5bxbyeeVxAG4fm879E/sRGaaZOtKxFFBEROQr6h3N/GntPv7w8V5cBqTGR/Lr6zK4pE+Sp0uTAKGAIiIiADianawtriC3yMpHO8toaHIBcHWmhYVXDSY+KszDFUogUUAREQlgTpdB3r5KcouO8ME2G/aGZndbz87R3DehH9MyLB6sUAKVAoqISIAxDIPPDlWTW2hl+ZZSjtY53G0ppgimDrWQnWFhaLd4goK0ZL14hgKKiEiA2GWzk1toZdkWK4eqTriPJ0SHMXlwKtkZFkamJxKifXTECyigiIj4uS2Hq3lo6Va2HbG7j0WHhzBhYArZmRbG9ulCeKjW7RTvooAiIuKnmp0uXlyzj9+t2kOzyyA8JJjL+3UhO9PCuP4p2tRPvJoCioiIn2locrJ6Vzl/XrefwkPVAEwZkspjVw8mMSbcs8WJnCMFFBERP9DkdLF+71GWFVr5944y6hwnZ+PERYby2FWDuSrTogGv4lMUUEREfJTLZbDx8ypyi6y8v7WUY8eb3G1dE6KYlmHh5qweWBKiPFilSNsooIiI+BDDMNh2xE5u0RGWbymltKbB3ZYUG86UIalkZ1r4TvdOumIiPk0BRUTEB+yrqDs5RbjIyv6j9e7jcZGhTBpkJjvTQlavzoSGaDaO+AcFFBERL2WtPsGyIiu5RVa2W7+YIhwRGsz4ASenCH/3oi7auE/8kgKKiIgXqaxz8P7WUnKLrGz8/Jj7eGhwEJf2TSI708L3B5qJjdDbt/g3/QsXEfGw2oYmPtxeRm6Rlf/uPYrTZQAQFAQjeyaSnWlh8uBUTRGWgKKAIiLiAQ1NTj7eVU5ukZVVu8ppbHa524Z0jSc7w8LUjFRS4zUDRwKTAoqIyAXS5HTx371HyS2y8u/tX6xVAtC7SwzZGV2ZlpFKry6xHqxSxDsooIiIdCCXy2DTwWPkFh3h/a02quob3W1dE6KYmnFyk76BqSZNCxY5gwKKiEg7MwyD7VY7y4pOTgu2nrFWSeeYcK4ckspVp9YqCdbOwSJnpYAiItJO9lfUkXtqWvD+ijPWKokIZcKptUrG9NZaJSLnQgFFROQ81DuaWZJfwntFR9h2pOVaJeMGJJOdYeHyfslaq0SklRRQRETaqODgMea9VcjByuMAhAQHMbZPEtkZFiYMSiEuMszDFYr4rlZdZ1y0aBEjRowgLi6O5ORkrr76aoqLi1uc09DQQE5ODp07dyY2NpYZM2ZQVlbW4pySkhKmTJlCdHQ0ycnJPPDAAzQ3NyMi4gsczU5+/e9irlv8CQcrj2OJj+Sxqwez4aFx/O3WkcwY1k3hROQ8teoKytq1a8nJyWHEiBE0Nzfz0EMPMWHCBHbs2EFMTAwAc+fO5V//+hdvv/028fHxzJkzh+nTp/Pf//4XAKfTyZQpUzCbzXzyySeUlpZy8803ExYWxpNPPtn+PRQRaQdOl0H+/kr3zsH2hpP/qbrm4q78InsQ8VEKJCLtKcgwDKOtX1xRUUFycjJr167lsssuo6amhi5durBkyRKuvfZaAHbt2sWAAQPIy8tj9OjRrFixgqlTp2K1WklJSQFg8eLFzJ8/n4qKCsLDv32lRLvdTnx8PDU1NZhMpraWLyJyTnaX1XLvG4XsKP1ijInZFMkjUwcyZWiqBysT8S2t+fw+rzEoNTU1ACQmJgJQUFBAU1MT48ePd5/Tv39/unfv7g4oeXl5DBkyxB1OACZOnMhdd93F9u3bufjii7/yfRwOBw6Ho0UHRUQ6mstl8PInn/P0B7tobHYRFxnK1KGpZGd0ZWR6IiGaIizSYdocUFwuF/feey9jxoxh8ODBANhsNsLDw0lISGhxbkpKCjabzX3OmeHkdPvptrNZtGgRCxcubGupIiKtUlnn4P1tNv656RBFh0/+R+yK/sk8NWMIyXGRHq5OJDC0OaDk5OSwbds21q9f3571nNWCBQuYN2+e+7ndbictLa3Dv6+IBI7ahib+fWrDvvVnbNgXFRbCw1MHcNPI7lrpVeQCalNAmTNnDsuXL2fdunV069bNfdxsNtPY2Eh1dXWLqyhlZWWYzWb3ORs2bGjxeqdn+Zw+58siIiKIiIhoS6kiIt+opPI4T3+4i492lOE4y4Z92ZkWUky6aiJyobUqoBiGwd13383SpUtZs2YN6enpLdqHDRtGWFgYq1atYsaMGQAUFxdTUlJCVlYWAFlZWTzxxBOUl5eTnJwMwMqVKzGZTAwcOLA9+iQi8q0Mw+DtTYdZuGw79Y1O4IsN+7IzLaQnxXi4QpHA1qqAkpOTw5IlS3jvvfeIi4tzjxmJj48nKiqK+Ph4brvtNubNm0diYiImk4m7776brKwsRo8eDcCECRMYOHAgs2bN4plnnsFms/Hwww+Tk5OjqyQickEcrXOw4J2trNxx8urtyPREfj51IIMs2rBPxFu0aprx1/3ivvzyy/zoRz8CTi7Udt999/H666/jcDiYOHEif/zjH1vcvjl48CB33XUXa9asISYmhtmzZ/PUU08RGnpueUnTjEWkrT7aUcZP39nC0bpGwkKCuH9CP26/tJdm5IhcAK35/D6vdVA8RQFFRFqr3tHM4//awesbDgHQLyWO536QyUCL3kNELpQLtg6KiIgvOHPPnKAguH1sOvdN6KcN/ES8mAKKiPitJqeL51ft4YWP9+IywBIfybPXZ3BJ7yRPlyYi30IBRUT8Ur2jmVte3siGz6sA7Zkj4msUUETE7zQ0Obnj75vY8HkVcRGhLJoxhKlDLZ4uS0RaQQFFRPxKY7OLu14t4JN9lcSEh/D320ZycfdOni5LRFpJAUVE/EKz08Un+yp56T/7+c+eo0SGBfPXH41QOBHxUQooIuLTak408fyqPbxXeISjdY0AhIcE8+dZwxnVq7OHqxORtlJAERGf9cneo9z3dhGlNQ0AdIoO48ohqdw0qjuDLPEerk5EzocCioj4nIYmJ89+WMz/rj8AQI/O0TwyZSDf7deFsJBgD1cnIu1BAUVEfMp2aw1z3yxkd1kdADeO7M7DUwYQE6G3MxF/ot9oEfEJzU4XL/3nAL9ZWUyT0yApNpynZwxl3IAUT5cmIh1AAUVEvJbLZVBQcozcQivvby2lsv7kINgJA1NYNH0InWO1A7qIv1JAERGvYhgG2612lhVZWVZkxXpqACxAUmw4D07sz3XDu33t7uoi4h8UUETEK+yvqCP3VCjZV1HvPh4XEcqEQWayMy2M6d2ZUA2CFQkICigi4jGlNSdYXlRKbpGVrUdq3McjQoMZNyCZ7AwLl/dL1q7DIgFIAUVELrjdZbUsXLadT/ZVYhgnj4UEB3Fp3ySyMyx8f2AKcZHa1E8kkCmgiMgF43IZ/PW/B3jmw2Iam10AjOyZyLRMC1cONmvQq4i4KaCISIdzugzyD1Tyh9V7+WRfJQBX9E9mYfYg0hKjPVydiHgjBRQR6TBl9gb+tHY/y7dYKa91ABAVFsLDUwdw08jumokjIl9LAUVEOoStpoHr/vQJh6pOABAfFcbkwWb+57u9SU+K8XB1IuLtFFBEpN1V1Dq46X8/5VDVCfc+OZdd1IXwUE0RFpFzo4AiIu2q+ngjs/6Sz/6Keizxkbx2+yi6ddI4ExFpHQUUEWk3Ww/XcM+bn7G/op4ucRG8dsdohRMRaRMFFBE5b81OFy+u2cfvVu2h2WWQHBfBq7eP0lgTEWkzBRQROS8HK+uZ+2Yhm0uqAZg82MyT1wyhU0y4ZwsTEZ+mgCIibWIYBm9sPMRjy3dwvNFJXEQoC68axDUXd9X0YRE5bwooItIqjmYna4orePXTg/xnz1EARqYn8pvrMzTeRETajQKKiHwrp8sgb18luUVHWLHNRm1DMwDhIcHcP/Eibhvbi5BgXTURkfajgCIiZ2UYBptLqllWZGX5llKO1jncbWZTJFOHpnLDyO70SY71YJUi4q8UUESkhV02O+8VWllWZOXwsRPu4wnRYVw5JJXsDAsjeyYSrCsmItKBFFBEBFtNA/8sOERukZXdZXXu49HhIUwcZCY7w8LYvkmEhWglWBG5MBRQRAKYYRi8s/kIj+Zup87xxbiSy/t1ITvTwrj+KUSFh3i4ShEJRAooIgGqqr6Rny3dyoptNgAyusUzc3QPJg4yEx8V5uHqRCTQKaCIBKCPi8t58J9bqKh1EBYSxNzvX8T/XNZbM3FExGsooIgEkOONzTz5/k5e/bQEgL7JsTz3g0wGd433cGUiIi0poIgEiM9KjjHvrSIOHK0H4NYx6Tw4qR+RYRpjIiLeRwFFxM81OV38YfVe/vDxXpwuA7Mpkl9fn8GYPkmeLk1E5GspoIj4sf0Vdcx9q4iiQ9UAZGdYeOyqwcRHaxCsiHg3BRQRP2QYBq/ml/DEv3bQ0OTCFBnKY1cP5qrMrp4uTUTknCigiPiZo3UO7n+7iDXFFQCM6dOZZ6/LIDU+ysOViYicOwUUET9SVd/IzJfyKS6rJTw0mJ9O6s+PLumpZelFxOcooIj4iZoTTdz815PhJMUUwd9vHUU/c5ynyxIRaRMFFBE/UO9o5paXN7DtiJ3OMeG8dvto7TIsIj5NAUXEx+0pq+WeNwrZUWrHFBnKP24bpXAiIj6v1VuTrlu3jmnTpmGxWAgKCuLdd99t0f6jH/2IoKCgFo9Jkya1OKeqqoqZM2diMplISEjgtttuo66uDhE5dy6XwV/XH2DK79ezo9ROQnQYf7t1JAMtJk+XJiJy3lp9BaW+vp6MjAxuvfVWpk+fftZzJk2axMsvv+x+HhER0aJ95syZlJaWsnLlSpqamrjlllu48847WbJkSWvLEQk45fYGlm0pZelnh9l2xA7Ady/qwq+uHUqyKdLD1YmItI9WB5TJkyczefLkbzwnIiICs9l81radO3fywQcfsHHjRoYPHw7A73//e6688kqeffZZLBZLa0sS8Xs1x5tYsa2U3CIrefsrMYyTxyPDgvnZlQP44egeBAVppo6I+I8OGYOyZs0akpOT6dSpE1dccQWPP/44nTt3BiAvL4+EhAR3OAEYP348wcHB5Ofnc80113zl9RwOBw6Hw/3cbrd3RNkiXuV4YzMrd5SxrMjK2t0VNDkNd9t3uieQnWFhylALXeIivuFVRER8U7sHlEmTJjF9+nTS09PZt28fDz30EJMnTyYvL4+QkBBsNhvJycktiwgNJTExEZvNdtbXXLRoEQsXLmzvUkW8TmOzi7W7K8gtsvLRjjJONDndbf3NcWRnWpg21EJaYrQHqxQR6XjtHlBuuOEG95+HDBnC0KFD6d27N2vWrGHcuHFtes0FCxYwb94893O73U5aWtp51yriDZwug/z9leQWWVmxzUbNiSZ3W/fEaLIzLGRnWrgoRWuaiEjg6PBpxr169SIpKYm9e/cybtw4zGYz5eXlLc5pbm6mqqrqa8etREREfGWgrYivO3C0nr/nfc6/tpRSXvvFLczkuAimDj0ZSjK6xWtsiYgEpA4PKIcPH6ayspLU1FQAsrKyqK6upqCggGHDhgGwevVqXC4Xo0aN6uhyRDzO5TL4e97nLFqxC0ezC4D4qDAmDzaTnWlhVHpnQrQ0vYgEuFYHlLq6Ovbu3et+fuDAAQoLC0lMTCQxMZGFCxcyY8YMzGYz+/bt48EHH6RPnz5MnDgRgAEDBjBp0iTuuOMOFi9eTFNTE3PmzOGGG27QDB7xe7aaBh74ZxH/2XMUOLmR3y2XpHPZRV0ID231skQiIn4ryDAM49tP+8KaNWv43ve+95Xjs2fP5sUXX+Tqq6/ms88+o7q6GovFwoQJE3jsscdISUlxn1tVVcWcOXNYtmwZwcHBzJgxg+eff57Y2HNb/dJutxMfH09NTQ0mkxalEt9QVd/Ilb/7DzZ7A5FhwTx05QBmaXqwiASQ1nx+tzqgeAMFFPE1hmFwx98L+GhnGelJMbx083AtRy8iAac1n9+6pixyAbyaX8JHO8sIDwnmDzddrHAiIvItFFBEOtjusloeX74DgPmT+zPIEu/hikREvJ8CikgHcroMfvL6ZziaXXz3oi7ccklPT5ckIuITFFBEOtB2aw27bLXERoTy7HUZBGv6sIjIOVFAEelA+yrqABhoMWnPHBGRVlBAEelAByrqAejdJcbDlYiI+BYFFJEOtO/oyYDSK0mzdkREWkMBRaQD7T91BSU9SVdQRERaQwFFpIO4XAafn76Cols8IiKtooAi0kFs9gZONDkJDQ4iLTHa0+WIiPgUBRSRDnL69k73xGjCQvSrJiLSGnrXFOkgB46enGKs2zsiIq2ngCLSQfZVnB5/ohk8IiKtpYAi0kH2H9UMHhGRtlJAEekg+0+tIttLAUVEpNUUUEQ6QEOTkyPVJwDd4hERaQsFFJEOcLDyOIYBcZGhJMWGe7ocERGfo4Ai0gHOvL0TFKQdjEVEWksBRaQD7D+qGTwiIudDAUWkA5xepE0DZEVE2kYBRaQD7D+1SFu6FmkTEWkTBRSRDnDg9C2eJN3iERFpCwUUkXZWVd9I9fEmQIu0iYi0lQKKSDvbXVYLgCU+kqjwEA9XIyLimxRQRNpRmb2B+f+3BYCh3RI8W4yIiA9TQBFpJ5V1Dmb+bz4HK4+TlhjFo9kDPV2SiIjPUkARaQc1x5v44V82sLe8jtT4SJbcPprU+ChPlyUi4rMUUETOU52jmdkvb2BnqZ2k2HBevX0UaYnRni5LRMSnKaCInIcTjU5ufWUjhYeqSYgO49XbR9Fbq8eKiJw3BRSRNnI0O7nzH5vYcKCKuIhQ/n7rSPqbTZ4uS0TEL4R6ugARX1RSeZy5bxVScPAYUWEhvHzLCM3aERFpRwooIq1gGAZvbzrMwmXbqW90EhsRyp9mDWN4z0RPlyYi4lcUUETO0dE6Bwve2crKHWUAjOyZyK+vz9CAWBGRDqCAInIOVu0sY/7/beFoXSNhIUHcN6Efd1zai5DgIE+XJiLilxRQRL5BvaOZx/+1k9c3lABwUUosz/0gk0GWeA9XJiLi3xRQRL5GwcFjzHurkIOVxwG4fWw690/sR2SY9tcREeloCigiX9LkdPH8qj288PFeXMbJTf+evS6DS/okebo0EZGAoYAicoa95XXMfbOQrUdqALg608LCqwYTHxXm4cpERAKLAooIJxddez2/hKc+2EVDk4v4qDAev3ow0zIsni5NRCQgKaBIwHK5DPL2V/Je4RE+2GbD3tAMwKV9k/jVtRmY4yM9XKGISOBSQJGAVFpzgvvfLuK/eyvdx1JMEfy/y/swa3QPgjV9WETEoxRQJOC8V3iER97dhr2hmaiwEK75TleyMyyM7JmoYCIi4iUUUCRg1Bxv4uH3trGsyApAZloCz/0gk/SkGA9XJiIiX6aAIgFh/Z6j3P92ETZ7AyHBQfzkir7kfK83oSHa0FtExBu1+t153bp1TJs2DYvFQlBQEO+++26LdsMw+PnPf05qaipRUVGMHz+ePXv2tDinqqqKmTNnYjKZSEhI4LbbbqOuru68OiLyZc1OF//ZU8HcNwv54V/ysdkb6JUUwzt3XcI94/sqnIiIeLFWv0PX19eTkZHBCy+8cNb2Z555hueff57FixeTn59PTEwMEydOpKGhwX3OzJkz2b59OytXrmT58uWsW7eOO++8s+29EDnFMAwKDlbx6HvbGL1oFbP+soGlnx0B4OasHvzrJ5eSkZbg2SJFRORbBRmGYbT5i4OCWLp0KVdffTVw8sPBYrFw3333cf/99wNQU1NDSkoKr7zyCjfccAM7d+5k4MCBbNy4keHDhwPwwQcfcOWVV3L48GEslm9fd8JutxMfH09NTQ0mk6mt5YufMAyDnaW15BZZWVZk5Uj1CXdbp+gwrhySyoxh3fhO904erFJERFrz+d2uY1AOHDiAzWZj/Pjx7mPx8fGMGjWKvLw8brjhBvLy8khISHCHE4Dx48cTHBxMfn4+11xzTXuWJH6stqGJv33yOe8WWtlb/sUtwpjwECYOMjMt08LYPkmE6VaOiIjPadeAYrPZAEhJSWlxPCUlxd1ms9lITk5uWURoKImJie5zvszhcOBwONzP7XZ7e5YtPmjDgSrmvVXI4WMnr5aEhwbzvX5dyM7oyrgBydrQT0TEx/nELJ5FixaxcOFCT5chXuBEo5PnV+9h8dp9GAZ0TYjinvF9mTTYjClS++WIiPiLdg0oZrMZgLKyMlJTU93Hy8rKyMzMdJ9TXl7e4uuam5upqqpyf/2XLViwgHnz5rmf2+120tLS2rN08WKNzS7W763gvUIrK3eUcbzRCcC1w7rx6LSBxCmYiIj4nXYNKOnp6ZjNZlatWuUOJHa7nfz8fO666y4AsrKyqK6upqCggGHDhgGwevVqXC4Xo0aNOuvrRkREEBER0Z6lipdzuQzyD1SRW2RlxbZSqo83udvSEqP42ZUDmDQ49RteQUREfFmrA0pdXR179+51Pz9w4ACFhYUkJibSvXt37r33Xh5//HH69u1Leno6jzzyCBaLxT3TZ8CAAUyaNIk77riDxYsX09TUxJw5c7jhhhvOaQaP+L9tR2qY91Yhu8u+GPiaFBvB1KGpZGdauDgtgaAgLUkvIuLPWh1QNm3axPe+9z3389O3XmbPns0rr7zCgw8+SH19PXfeeSfV1dWMHTuWDz74gMjIL3aGfe2115gzZw7jxo0jODiYGTNm8Pzzz7dDd8SXOV0Gi9fu47cf7abJaRAXGcrkwWayM7oyuleiFlYTEQkg57UOiqdoHRT/U1J5nHlvFbLp4DEAJg5KYdH0oSTGhHu4MhERaS8eWwdFpLUMw+DtTYdZuGw79Y1OYiNC+UX2IGZ8p6tu44iIBDAFFPEIp8sg/0Alf11/gI92npzVNbJnIr++PoO0xGgPVyciIp6mgCIXVEWtg8Vr97GsyEp57cnF98JCgrhvQj/uuLQXIcG6aiIiIgoocgF9uN3Ggne2UlXfCIApMpTJg1O5ZWxP+ps1lkhERL6ggCIdrs7RzC+XbeetTYcBGJBqYt73L+Kyi5KICNWS9CIi8lUKKNKhNn5+cs+cQ1UnCAqC/7msN3O/31fBREREvpECinSIxmYXz320271nTrdOUfzm+kxGpid6ujQREfEBCijS7naX1XLvG4XsKD256/R1w7rxc+2ZIyIiraCAIu3G5TJ4+ZPPefqDXTQ2u0iMCefJa4YwafDZN4EUERH5Ogoo0i4ampz8+NUC1hRXAPC9fl14+tqhJMdFfstXioiIfJUCipw3R7OT//lHAWt3VxAVFsLDUwdw08juWglWRETaTAFFzkuT08XdSz5zh5N/3DaS4T01EFZERM6PtoeVNmtocjLvrSL+vaOM8NBg/nf2cIUTERFpF7qCIm2yw2pn7puFFJfVEhocxOIffocxfZI8XZaIiPgJBRRplTpHM3/P+5znVu6myWmQFBvOs9dlcHm/ZE+XJiIifkQBRb5VQ5OTNcUVLCuysmpXGQ1NLgC+PzCFp6YPoXNshIcrFBERf6OAImfV7HSRt7+S3EIrH2y3UdvQ7G5LT4rhrst7c92wbpqpIyIiHUIBRdwMw2BzyTFyC638a2spR+sa3W1mUyTTMlLJzujK4K4mBRMREelQCigCwN7yWua9VcSWwzXuY52iw5g8JJXsDAsjeyYSHKxQIiIiF4YCSoBzuQz+nvc5i1bswtHsIjo8hImDzGRnWBjbN4mwEM1EFxGRC08BJUDVnGjiw+023tp4iE0HjwFwad8kfnVtBuZ4LU8vIiKepYASYGpONPHwu9v4cJuNRufJ2TgRocH8bMoAZo3uobElIiLiFRRQAki9o5lbXt7A5pJqAPomx5KdYeGa73SlW6dozxYnIiJyBgWUANHQ5OT2v21ic0k1pshQ/vqjEQzr0UlXTERExCspoASAxmYXP361gLz9lcRGhPL320aRmZbg6bJERES+lqZoBIAXPt7LmuIKIsOC+euPRiiciIiI11NA8XPW6hP8ad0+AJ65NoOR6dptWEREvJ8Cip97asUuGppcjExPZNrQVE+XIyIick4UUPzYps+ryC2yEhQEP586UANiRUTEZyig+CmXy2Dhsh0A/GB4GoO7xnu4IhERkXOngOKn3is6wtYjNcRGhHLfhH6eLkdERKRVFFD81Ec7ygG4dUxPusRFeLgaERGR1lFA8VP7KuoAyOye4NlCRERE2kABxQ85XQb7j9YD0KdLnIerERERaT0FFD90qOo4jc0uIkKD6dopytPliIiItJoCih/aW37y9k6vLrGEBGtqsYiI+B4FFD+099T4kz7JsR6uREREpG0UUPzQ6SsofboooIiIiG9SQPFD7oCiKygiIuKjFFD8jGEY7inGCigiIuKrFFD8TEWtg9qGZoKDoGdStKfLERERaRMFFD9z+vZO98RoIkJDPFyNiIhI2yig+BnN4BEREX+ggOJnTl9B6a2AIiIiPkwBxc9oirGIiPiDdg8ov/jFLwgKCmrx6N+/v7u9oaGBnJwcOnfuTGxsLDNmzKCsrKy9ywhYmmIsIiL+oEOuoAwaNIjS0lL3Y/369e62uXPnsmzZMt5++23Wrl2L1Wpl+vTpHVFGwLE3NFFe6wB0i0dERHxbaIe8aGgoZrP5K8dramr4y1/+wpIlS7jiiisAePnllxkwYACffvopo0eP7ohyAsa+U1dPUkwRmCLDPFyNiIhI23XIFZQ9e/ZgsVjo1asXM2fOpKSkBICCggKampoYP368+9z+/fvTvXt38vLyvvb1HA4Hdru9xUO+Srd3RETEX7R7QBk1ahSvvPIKH3zwAS+++CIHDhzg0ksvpba2FpvNRnh4OAkJCS2+JiUlBZvN9rWvuWjRIuLj492PtLS09i7bL7inGGuArIiI+Lh2v8UzefJk95+HDh3KqFGj6NGjB2+99RZRUVFtes0FCxYwb94893O73a6Q8iUul8Hmg8cAjT8RERHf1+HTjBMSErjooovYu3cvZrOZxsZGqqurW5xTVlZ21jErp0VERGAymVo85AuGYfDwe9vY+PkxQoKDyOrV2dMliYiInJcODyh1dXXs27eP1NRUhg0bRlhYGKtWrXK3FxcXU1JSQlZWVkeX4pcMw+Dxf+1kSX4JQUHwm+sz6JsS5+myREREzku73+K5//77mTZtGj169MBqtfLoo48SEhLCjTfeSHx8PLfddhvz5s0jMTERk8nE3XffTVZWlmbwtNFvVu7mL+sPAPD09KFcldnVwxWJiIicv3YPKIcPH+bGG2+ksrKSLl26MHbsWD799FO6dOkCwHPPPUdwcDAzZszA4XAwceJE/vjHP7Z3GQHhj2v28vvVewFYmD2I60doXI6IiPiHIMMwDE8X0Vp2u534+HhqamoCdjzKy/89wMJlOwD46eT+/Pi7vT1ckYiIyDdrzee39uLxQW9sKHGHk5+M66twIiIifqdDVpKVjnG8sZkn/rWT1/JPLnx3x6XpzB3f18NViYiItD8FFB/xWckx5r5ZyOeVxwG46/LePDixH0FBQR6uTEREpP0poHi5JqeL36/eywsf78XpMkiNj+TX12VwSZ8kT5cmIiLSYRRQvNi+ijrmvlnIlsM1AFydaWHhVYOJj9JGgCIi4t8UULxMs9PFf/dVkltoZfkWK45mF6bIUJ64ZgjTMiyeLk9EROSCUEDxAi6XQUHJMXILrby/tZTK+kZ329g+STx7XQbm+EgPVigiInJhKaB4kMtl8PInn/PX9Qc4Un3CfTwxJpwrh5jJzujKiJ6dNBBWREQCjgKKhxw+dpz73y7i0/1VAMRGhDJhUArZGRbG9EkiLERL1IiISOBSQLnAao438V7REX71QTG1jmaiw0NYMLk/1w1PIzIsxNPliYiIeAUFlAvgeGMzH+0sJ7fQyrrdFTQ6XQBc3D2B567PpGdSjIcrFBER8S4KKB2ksdnFf/ZUkFtkZeWOMo43Ot1t/VLi+MGING7O6kGobuWIiIh8hQJKOzIMg0/3V5FbdIQV22xUH29yt6UlRpGdYSE7oyv9zHEerFJERMT7KaC0k3J7Aw/+3xbWFFe4j3WJi2Dq0FSyMyxkpiVoNo6IiMg5UkBpByu2lvLQ0q0cO95ERGgw11zclewMC6N6dSYkWKFERESktRRQ2qiqvpH3t5aSW2hlw+cnpwoPspj47Q8y6ZuiWzgiIiLnQwGlFeoczfx7u43cIivr9xyl2WUAEBx0cnfhe8ZdRHioBr2KiIicLwWUb9HQ5GRNcTm5RVZW7SzH0exytw3uaiI7w8LUoRYsCVEerFJERMS/KKCcxZkb9v17u41aR7O7rVeXmFOzcSz06hLrwSpFRET8lwLKGbYdqeGtTYd4f2spR+u+2LDPEh/JtAwL0zIsDLKYNBtHRESkgymgnOG/e4/y97yDQMsN+4b36ESwZuOIiIhcMAooZ5iWYaG4rFYb9omIiHiYAsoZLAlR/Ob6TE+XISIiEvB0iUBERES8jgKKiIiIeB0FFBEREfE6CigiIiLidRRQRERExOsooIiIiIjXUUARERERr6OAIiIiIl5HAUVERES8jgKKiIiIeB0FFBEREfE6CigiIiLidRRQRERExOv45G7GhmEAYLfbPVyJiIiInKvTn9unP8e/iU8GlNraWgDS0tI8XImIiIi0Vm1tLfHx8d94TpBxLjHGy7hcLqxWK3FxcQQFBbXra9vtdtLS0jh06BAmk6ldX9vbBXLfIbD7H8h9h8DufyD3HQK7/57ou2EY1NbWYrFYCA7+5lEmPnkFJTg4mG7dunXo9zCZTAH3j/W0QO47BHb/A7nvENj9D+S+Q2D3/0L3/duunJymQbIiIiLidRRQRERExOsooHxJREQEjz76KBEREZ4u5YIL5L5DYPc/kPsOgd3/QO47BHb/vb3vPjlIVkRERPybrqCIiIiI11FAEREREa+jgCIiIiJeRwFFREREvI4CyhleeOEFevbsSWRkJKNGjWLDhg2eLqndLVq0iBEjRhAXF0dycjJXX301xcXFLc5paGggJyeHzp07Exsby4wZMygrK/NQxR3rqaeeIigoiHvvvdd9zJ/7f+TIEX74wx/SuXNnoqKiGDJkCJs2bXK3G4bBz3/+c1JTU4mKimL8+PHs2bPHgxW3H6fTySOPPEJ6ejpRUVH07t2bxx57rMWeIP7U/3Xr1jFt2jQsFgtBQUG8++67LdrPpa9VVVXMnDkTk8lEQkICt912G3V1dRewF23zTX1vampi/vz5DBkyhJiYGCwWCzfffDNWq7XFa/hq3+Hb/+7P9OMf/5igoCB++9vftjjuDf1XQDnlzTffZN68eTz66KNs3ryZjIwMJk6cSHl5uadLa1dr164lJyeHTz/9lJUrV9LU1MSECROor693nzN37lyWLVvG22+/zdq1a7FarUyfPt2DVXeMjRs38qc//YmhQ4e2OO6v/T927BhjxowhLCyMFStWsGPHDn7961/TqVMn9znPPPMMzz//PIsXLyY/P5+YmBgmTpxIQ0ODBytvH08//TQvvvgif/jDH9i5cydPP/00zzzzDL///e/d5/hT/+vr68nIyOCFF144a/u59HXmzJls376dlStXsnz5ctatW8edd955obrQZt/U9+PHj7N582YeeeQRNm/ezDvvvENxcTHZ2dktzvPVvsO3/92ftnTpUj799FMsFstX2ryi/4YYhmEYI0eONHJyctzPnU6nYbFYjEWLFnmwqo5XXl5uAMbatWsNwzCM6upqIywszHj77bfd5+zcudMAjLy8PE+V2e5qa2uNvn37GitXrjS++93vGvfcc49hGP7d//nz5xtjx4792naXy2WYzWbjV7/6lftYdXW1ERERYbz++usXosQONWXKFOPWW29tcWz69OnGzJkzDcPw7/4DxtKlS93Pz6WvO3bsMABj48aN7nNWrFhhBAUFGUeOHLlgtZ+vL/f9bDZs2GAAxsGDBw3D8J++G8bX9//w4cNG165djW3bthk9evQwnnvuOXebt/RfV1CAxsZGCgoKGD9+vPtYcHAw48ePJy8vz4OVdbyamhoAEhMTASgoKKCpqanFz6J///50797dr34WOTk5TJkypUU/wb/7n5uby/Dhw7nuuutITk7m4osv5qWXXnK3HzhwAJvN1qLv8fHxjBo1yuf7DnDJJZewatUqdu/eDUBRURHr169n8uTJgP/3/0zn0te8vDwSEhIYPny4+5zx48cTHBxMfn7+Ba+5I9XU1BAUFERCQgLg/313uVzMmjWLBx54gEGDBn2l3Vv675ObBba3o0eP4nQ6SUlJaXE8JSWFXbt2eaiqjudyubj33nsZM2YMgwcPBsBmsxEeHu7+RT0tJSUFm83mgSrb3xtvvMHmzZvZuHHjV9r8uf/79+/nxRdfZN68eTz00ENs3LiRn/zkJ4SHhzN79mx3/872e+DrfQf46U9/it1up3///oSEhOB0OnniiSeYOXMmgN/3/0zn0lebzUZycnKL9tDQUBITE/3q59HQ0MD8+fO58cYb3Rvm+Xvfn376aUJDQ/nJT35y1nZv6b8CSgDLyclh27ZtrF+/3tOlXDCHDh3innvuYeXKlURGRnq6nAvK5XIxfPhwnnzySQAuvvhitm3bxuLFi5k9e7aHq+t4b731Fq+99hpLlixh0KBBFBYWcu+992KxWAKi//JVTU1NXH/99RiGwYsvvujpci6IgoICfve737F582aCgoI8Xc430i0eICkpiZCQkK/M1CgrK8NsNnuoqo41Z84cli9fzscff0y3bt3cx81mM42NjVRXV7c4319+FgUFBZSXl/Od73yH0NBQQkNDWbt2Lc8//zyhoaGkpKT4bf9TU1MZOHBgi2MDBgygpKQEwN0/f/09eOCBB/jpT3/KDTfcwJAhQ5g1axZz585l0aJFgP/3/0zn0lez2fyVSQLNzc1UVVX5xc/jdDg5ePAgK1eudF89Af/u+3/+8x/Ky8vp3r27+z3w4MGD3HffffTs2RPwnv4roADh4eEMGzaMVatWuY+5XC5WrVpFVlaWBytrf4ZhMGfOHJYuXcrq1atJT09v0T5s2DDCwsJa/CyKi4spKSnxi5/FuHHj2Lp1K4WFhe7H8OHDmTlzpvvP/tr/MWPGfGVK+e7du+nRowcA6enpmM3mFn232+3k5+f7fN/h5OyN4OCWb3khISG4XC7A//t/pnPpa1ZWFtXV1RQUFLjPWb16NS6Xi1GjRl3wmtvT6XCyZ88ePvroIzp37tyi3Z/7PmvWLLZs2dLiPdBisfDAAw/w4YcfAl7U/ws2HNfLvfHGG0ZERITxyiuvGDt27DDuvPNOIyEhwbDZbJ4urV3dddddRnx8vLFmzRqjtLTU/Th+/Lj7nB//+MdG9+7djdWrVxubNm0ysrKyjKysLA9W3bHOnMVjGP7b/w0bNhihoaHGE088YezZs8d47bXXjOjoaOPVV191n/PUU08ZCQkJxnvvvWds2bLFuOqqq4z09HTjxIkTHqy8fcyePdvo2rWrsXz5cuPAgQPGO++8YyQlJRkPPvig+xx/6n9tba3x2WefGZ999pkBGL/5zW+Mzz77zD1T5Vz6OmnSJOPiiy828vPzjfXr1xt9+/Y1brzxRk916Zx9U98bGxuN7Oxso1u3bkZhYWGL90GHw+F+DV/tu2F8+9/9l315Fo9heEf/FVDO8Pvf/97o3r27ER4ebowcOdL49NNPPV1SuwPO+nj55Zfd55w4ccL4f//v/xmdOnUyoqOjjWuuucYoLS31XNEd7MsBxZ/7v2zZMmPw4MFGRESE0b9/f+PPf/5zi3aXy2U88sgjRkpKihEREWGMGzfOKC4u9lC17ctutxv33HOP0b17dyMyMtLo1auX8bOf/azFh5I/9f/jjz8+6+/67NmzDcM4t75WVlYaN954oxEbG2uYTCbjlltuMWpraz3Qm9b5pr4fOHDga98HP/74Y/dr+GrfDePb/+6/7GwBxRv6H2QYZyyjKCIiIuIFNAZFREREvI4CioiIiHgdBRQRERHxOgooIiIi4nUUUERERMTrKKCIiIiI11FAEREREa+jgCIiIiJeRwFFREREvI4CioiIiHgdBRQRERHxOgooIiIi4nX+PyTsSd33y6+VAAAAAElFTkSuQmCC", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "print(pred_dur.shape)\n", "pl.plot(pred_dur[0,:].detach().cpu().numpy().cumsum());" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[50, 157, 43, 135, 16, 53, 135, 46, 16, 43, 102, 16, 56, 156, 57, 135, 6, 16, 102, 62, 61, 16, 70, 56, 16, 138, 56, 156, 72, 56, 61, 85, 123, 83, 44, 83, 54, 16, 53, 65, 156, 86, 61, 62, 131, 83, 56, 4, 16, 54, 156, 43, 102, 53, 16, 156, 72, 61, 53, 102, 112, 16, 70, 56, 16, 138, 56, 44, 156, 76, 158, 123, 56, 16, 62, 131, 156, 43, 102, 54, 46, 16, 102, 48, 16, 81, 47, 102, 54, 16, 54, 156, 51, 158, 46, 16, 70, 16, 92, 156, 135, 46, 16, 54, 156, 43, 102, 48, 4, 16, 81, 47, 102, 16, 50, 156, 72, 64, 83, 56, 62, 16, 156, 51, 158, 64, 83, 56, 16, 44, 157, 102, 56, 16, 44, 156, 76, 158, 123, 56, 4]\n" ] } ], "source": [ "ps = phonemize(text)\n", "tokens = tokenize(ps)\n", "print(tokens)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "from models import build_model\n", "import torch\n", "device = \"cpu\" #'cuda' if torch.cuda.is_available() else 'cpu'\n", "model = build_model('kokoro-v0_19.pth', device)\n", "voicepack = torch.load('voices/af.pt', weights_only=True).to(device)" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "bert = model[\"bert\"]" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "embeddings.word_embeddings.weight torch.Size([178, 128])\n", "embeddings.position_embeddings.weight torch.Size([512, 128])\n", "embeddings.token_type_embeddings.weight torch.Size([2, 128])\n", "embeddings.LayerNorm.weight torch.Size([128])\n", "embeddings.LayerNorm.bias torch.Size([128])\n", "encoder.embedding_hidden_mapping_in.weight torch.Size([768, 128])\n", "encoder.embedding_hidden_mapping_in.bias torch.Size([768])\n", "encoder.albert_layer_groups.0.albert_layers.0.full_layer_layer_norm.weight torch.Size([768])\n", "encoder.albert_layer_groups.0.albert_layers.0.full_layer_layer_norm.bias torch.Size([768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.query.weight torch.Size([768, 768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.query.bias torch.Size([768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.key.weight torch.Size([768, 768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.key.bias torch.Size([768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.value.weight torch.Size([768, 768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.value.bias torch.Size([768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.dense.weight torch.Size([768, 768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.dense.bias torch.Size([768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.LayerNorm.weight torch.Size([768])\n", "encoder.albert_layer_groups.0.albert_layers.0.attention.LayerNorm.bias torch.Size([768])\n", "encoder.albert_layer_groups.0.albert_layers.0.ffn.weight torch.Size([2048, 768])\n", "encoder.albert_layer_groups.0.albert_layers.0.ffn.bias torch.Size([2048])\n", "encoder.albert_layer_groups.0.albert_layers.0.ffn_output.weight torch.Size([768, 2048])\n", "encoder.albert_layer_groups.0.albert_layers.0.ffn_output.bias torch.Size([768])\n", "pooler.weight torch.Size([768, 768])\n", "pooler.bias torch.Size([768])\n" ] } ], "source": [ "# show all parameters of model bert\n", "for name, param in bert.named_parameters():\n", " print(name, param.requires_grad())\n", " # print(param)\n", " # print(param.shape)\n", " # break" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Testing LSTM export" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "x1.shape=torch.Size([1, 300, 256])\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/onnx/symbolic_opset9.py:4279: UserWarning: Exporting a model to ONNX with a batch_size other than 1, with a variable length with LSTM can cause an error when running the ONNX model with a different batch size. Make sure to save the model with a batch size of 1, or define the initial states (h0/c0) as inputs of the model. \n", " warnings.warn(\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Exported graph: graph(%x : Float(*, 300, 128, strides=[38400, 128, 1], requires_grad=0, device=cpu),\n", " %onnx::LSTM_194 : Float(2, 1024, strides=[1024, 1], requires_grad=0, device=cpu),\n", " %onnx::LSTM_195 : Float(2, 512, 128, strides=[65536, 128, 1], requires_grad=0, device=cpu),\n", " %onnx::LSTM_196 : Float(2, 512, 128, strides=[65536, 128, 1], requires_grad=0, device=cpu)):\n", " %/lstm/Shape_output_0 : Long(3, strides=[1], device=cpu) = onnx::Shape[onnx_name=\"/lstm/Shape\"](%x), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1081:0\n", " %/lstm/Constant_output_0 : Long(device=cpu) = onnx::Constant[value={0}, onnx_name=\"/lstm/Constant\"](), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1081:0\n", " %/lstm/Gather_output_0 : Long(device=cpu) = onnx::Gather[axis=0, onnx_name=\"/lstm/Gather\"](%/lstm/Shape_output_0, %/lstm/Constant_output_0), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1081:0\n", " %/lstm/Constant_1_output_0 : Long(1, strides=[1], requires_grad=0, device=cpu) = onnx::Constant[value={2}, onnx_name=\"/lstm/Constant_1\"](), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm\n", " %onnx::Unsqueeze_16 : Long(1, strides=[1], device=cpu) = onnx::Constant[value={0}]()\n", " %/lstm/Unsqueeze_output_0 : Long(1, strides=[1], device=cpu) = onnx::Unsqueeze[onnx_name=\"/lstm/Unsqueeze\"](%/lstm/Gather_output_0, %onnx::Unsqueeze_16), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm\n", " %/lstm/Constant_2_output_0 : Long(1, strides=[1], requires_grad=0, device=cpu) = onnx::Constant[value={128}, onnx_name=\"/lstm/Constant_2\"](), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm\n", " %/lstm/Concat_output_0 : Long(3, strides=[1], device=cpu) = onnx::Concat[axis=0, onnx_name=\"/lstm/Concat\"](%/lstm/Constant_1_output_0, %/lstm/Unsqueeze_output_0, %/lstm/Constant_2_output_0), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1085:0\n", " %/lstm/ConstantOfShape_output_0 : Float(*, *, *, strides=[128, 128, 1], requires_grad=0, device=cpu) = onnx::ConstantOfShape[value={0}, onnx_name=\"/lstm/ConstantOfShape\"](%/lstm/Concat_output_0), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1085:0\n", " %/lstm/Transpose_output_0 : Float(300, *, 128, device=cpu) = onnx::Transpose[perm=[1, 0, 2], onnx_name=\"/lstm/Transpose\"](%x), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1123:0\n", " %onnx::LSTM_23 : Tensor? = prim::Constant(), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1123:0\n", " %/lstm/LSTM_output_0 : Float(300, 2, *, 128, device=cpu), %/lstm/LSTM_output_1 : Float(2, *, 128, strides=[128, 128, 1], requires_grad=1, device=cpu), %/lstm/LSTM_output_2 : Float(2, *, 128, strides=[128, 128, 1], requires_grad=1, device=cpu) = onnx::LSTM[direction=\"bidirectional\", hidden_size=128, onnx_name=\"/lstm/LSTM\"](%/lstm/Transpose_output_0, %onnx::LSTM_195, %onnx::LSTM_196, %onnx::LSTM_194, %onnx::LSTM_23, %/lstm/ConstantOfShape_output_0, %/lstm/ConstantOfShape_output_0), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1123:0\n", " %/lstm/Transpose_1_output_0 : Float(300, *, 2, 128, device=cpu) = onnx::Transpose[perm=[0, 2, 1, 3], onnx_name=\"/lstm/Transpose_1\"](%/lstm/LSTM_output_0), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1123:0\n", " %/lstm/Constant_3_output_0 : Long(3, strides=[1], device=cpu) = onnx::Constant[value= 0 0 -1 [ CPULongType{3} ], onnx_name=\"/lstm/Constant_3\"](), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1123:0\n", " %/lstm/Reshape_output_0 : Float(300, *, 256, device=cpu) = onnx::Reshape[allowzero=0, onnx_name=\"/lstm/Reshape\"](%/lstm/Transpose_1_output_0, %/lstm/Constant_3_output_0), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1123:0\n", " %151 : Float(*, 300, 256, strides=[256, 256, 1], requires_grad=1, device=cpu) = onnx::Transpose[perm=[1, 0, 2], onnx_name=\"/lstm/Transpose_2\"](%/lstm/Reshape_output_0), scope: __main__.Model::/torch.nn.modules.rnn.LSTM::lstm # /rhome/eingerman/mambaforge/envs/styletts2/lib/python3.10/site-packages/torch/nn/modules/rnn.py:1123:0\n", " return (%151)\n", "\n" ] }, { "ename": "AttributeError", "evalue": "'NoneType' object has no attribute 'graph'", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[2], line 37\u001b[0m\n\u001b[1;32m 34\u001b[0m export_mod \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39monnx\u001b[38;5;241m.\u001b[39mexport(model\u001b[38;5;241m=\u001b[39mmodel, args\u001b[38;5;241m=\u001b[39m( xa, ), dynamic_axes\u001b[38;5;241m=\u001b[39mdynamic_shapes, input_names\u001b[38;5;241m=\u001b[39m[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mx\u001b[39m\u001b[38;5;124m\"\u001b[39m], f\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmodel.onnx\u001b[39m\u001b[38;5;124m\"\u001b[39m, verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, dynamo\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m)\n\u001b[1;32m 35\u001b[0m \u001b[38;5;66;03m# export_mod.save(\"model.onnx\")\u001b[39;00m\n\u001b[1;32m 36\u001b[0m \u001b[38;5;66;03m# export_mod.save_diagnostics(\"model_diagnostics.sarif\")\u001b[39;00m\n\u001b[0;32m---> 37\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[43mexport_mod\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgraph\u001b[49m)\n", "\u001b[0;31mAttributeError\u001b[0m: 'NoneType' object has no attribute 'graph'" ] } ], "source": [ "import torch\n", "# os.environ['TORCHDYNAMO_EXTENDED_DEBUG_GUARD_ADDED']=\"Eq(s0, 384)\"\n", "\n", "# model class containing a single bidirectional LSTM layer\n", "class Model(torch.nn.Module):\n", " def __init__(self):\n", " super().__init__()\n", " self.lstm = torch.nn.LSTM(128, 128, 1, bidirectional=True, batch_first=True)\n", " #initialize lstm weights\n", " for name, param in self.lstm.named_parameters():\n", " if 'weight' in name:\n", " torch.nn.init.orthogonal_(param)\n", " elif 'bias' in name:\n", " torch.nn.init.zeros_(param)\n", "\n", " def forward(self, x):\n", " x1 = x.transpose(-1,-2)\n", " # print(f\"{x.shape=} {x1.shape=}\")\n", " x2, _ = self.lstm(x)\n", " return x2\n", "\n", "model = Model()\n", "model = model.to(\"cpu\")\n", "model.eval()\n", "\n", "#inital input to LSTM in variable x\n", "xa = torch.zeros((1, 300, 128)).to(\"cpu\")\n", "x1 = model(xa)\n", "print(f\"{x1.shape=}\")\n", "ntokens = torch.export.Dim(\"ntokens\", min=3)\n", "dynamic_shapes= {\"x\":{0:\"ntokens\"}}\n", "\n", "# scripted = torch.jit.script(model)\n", "torch.onnx.export(model=model, args=( xa, ), dynamic_axes=dynamic_shapes, input_names=[\"x\"], f=\"model.onnx\", verbose=True, dynamo=False)\n", "# export_mod.save(\"model.onnx\")\n", "# export_mod.save_diagnostics(\"model_diagnostics.sarif\")\n", "# print(export_mod.graph)" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "torch.Size([1, 143])\n" ] } ], "source": [ "from kokoro import phonemize, tokenize\n", "from models_scripting import load_plbert\n", "bert = load_plbert()\n", "\n", "text = \"How could I know? It's an unanswerable question. Like asking an unborn child if they'll lead a good life. They haven't even been born.\"\n", "ps = phonemize(text, \"a\")\n", "tokens = tokenize(ps)\n", "tokens = torch.LongTensor([[0, *tokens, 0]]).to(device)\n", "dynamic_shapes = {\"tokens\":{1:'ntokens'}}\n", "print(tokens.shape)\n", "torch.onnx.export(model=bert, args=( tokens, ), dynamic_axes=dynamic_shapes, input_names=[\"tokens\"], f=\"bert.onnx\", verbose=False, dynamo=False)\n" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "ename": "Fail", "evalue": "[ONNXRuntimeError] : 1 : FAIL : Load model from style_model.onnx failed:Node (/Transpose_9) Op (Transpose) [TypeInferenceError] Invalid attribute perm {1, -1, 0}, input shape = {0, 0, 128}", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mFail\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[6], line 6\u001b[0m\n\u001b[1;32m 3\u001b[0m onnx_model \u001b[38;5;241m=\u001b[39m onnx\u001b[38;5;241m.\u001b[39mload(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstyle_model.onnx\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 4\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01monnxruntime\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mort\u001b[39;00m\n\u001b[0;32m----> 6\u001b[0m ort_session \u001b[38;5;241m=\u001b[39m \u001b[43mort\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mInferenceSession\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mstyle_model.onnx\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 7\u001b[0m outputs \u001b[38;5;241m=\u001b[39m ort_session\u001b[38;5;241m.\u001b[39mrun(\u001b[38;5;28;01mNone\u001b[39;00m, {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtokens\u001b[39m\u001b[38;5;124m\"\u001b[39m: tokens\u001b[38;5;241m.\u001b[39mnumpy()})\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py:465\u001b[0m, in \u001b[0;36mInferenceSession.__init__\u001b[0;34m(self, path_or_bytes, sess_options, providers, provider_options, **kwargs)\u001b[0m\n\u001b[1;32m 462\u001b[0m disabled_optimizers \u001b[38;5;241m=\u001b[39m kwargs\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mdisabled_optimizers\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 464\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 465\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_create_inference_session\u001b[49m\u001b[43m(\u001b[49m\u001b[43mproviders\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mprovider_options\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdisabled_optimizers\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 466\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mValueError\u001b[39;00m, \u001b[38;5;167;01mRuntimeError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 467\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_enable_fallback:\n", "File \u001b[0;32m~/mambaforge/envs/styletts2/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py:526\u001b[0m, in \u001b[0;36mInferenceSession._create_inference_session\u001b[0;34m(self, providers, provider_options, disabled_optimizers)\u001b[0m\n\u001b[1;32m 523\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_register_ep_custom_ops(session_options, providers, provider_options, available_providers)\n\u001b[1;32m 525\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_model_path:\n\u001b[0;32m--> 526\u001b[0m sess \u001b[38;5;241m=\u001b[39m \u001b[43mC\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mInferenceSession\u001b[49m\u001b[43m(\u001b[49m\u001b[43msession_options\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_model_path\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_read_config_from_model\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 527\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 528\u001b[0m sess \u001b[38;5;241m=\u001b[39m C\u001b[38;5;241m.\u001b[39mInferenceSession(session_options, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_model_bytes, \u001b[38;5;28;01mFalse\u001b[39;00m, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_read_config_from_model)\n", "\u001b[0;31mFail\u001b[0m: [ONNXRuntimeError] : 1 : FAIL : Load model from style_model.onnx failed:Node (/Transpose_9) Op (Transpose) [TypeInferenceError] Invalid attribute perm {1, -1, 0}, input shape = {0, 0, 128}" ] } ], "source": [ "import onnx\n", "\n", "onnx_model = onnx.load(\"style_model.onnx\")\n", "import onnxruntime as ort\n", "\n", "ort_session = ort.InferenceSession(\"style_model.onnx\")\n", "outputs = ort_session.run(None, {\"tokens\": tokens.numpy()})" ] } ], "metadata": { "kernelspec": { "display_name": "styletts2", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.14" } }, "nbformat": 4, "nbformat_minor": 2 }