diff --git a/Season2.step_into_llm/16.Practical-cases/shufflenet/mindspore_shufflenet.ipynb b/Season2.step_into_llm/16.Practical-cases/shufflenet/mindspore_shufflenet.ipynb index 29d5d5d..631ed7e 100644 --- a/Season2.step_into_llm/16.Practical-cases/shufflenet/mindspore_shufflenet.ipynb +++ b/Season2.step_into_llm/16.Practical-cases/shufflenet/mindspore_shufflenet.ipynb @@ -43,15 +43,213 @@ "cell_type": "code", "execution_count": 1, "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Looking in indexes: http://mirrors.aliyun.com/pypi/simple\n", + "Collecting mindspore==2.3.1\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/d2/12/c6e2c689616eb66b61009b6f47e7460fa567952916691a9da30febd5f6dd/mindspore-2.3.1-cp310-none-any.whl (328.8 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m328.8/328.8 MB\u001b[0m \u001b[31m913.4 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:08\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: asttokens>=2.0.4 in ./miniconda3/lib/python3.10/site-packages (from mindspore==2.3.1) (3.0.0)\n", + "Requirement already satisfied: packaging>=20.0 in ./miniconda3/lib/python3.10/site-packages (from mindspore==2.3.1) (24.2)\n", + "Collecting astunparse>=1.6.3\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/2b/03/13dde6512ad7b4557eb792fbcf0c653af6076b81e5941d36ec61f7ce6028/astunparse-1.6.3-py2.py3-none-any.whl (12 kB)\n", + "Requirement already satisfied: pillow>=6.2.0 in ./miniconda3/lib/python3.10/site-packages (from mindspore==2.3.1) (11.1.0)\n", + "Requirement already satisfied: scipy>=1.5.4 in ./miniconda3/lib/python3.10/site-packages (from mindspore==2.3.1) (1.15.2)\n", + "Requirement already satisfied: numpy<2.0.0,>=1.20.0 in ./miniconda3/lib/python3.10/site-packages (from mindspore==2.3.1) (1.26.4)\n", + "Requirement already satisfied: psutil>=5.6.1 in ./miniconda3/lib/python3.10/site-packages (from mindspore==2.3.1) (7.0.0)\n", + "Requirement already satisfied: protobuf>=3.13.0 in ./miniconda3/lib/python3.10/site-packages (from mindspore==2.3.1) (6.30.1)\n", + "Requirement already satisfied: six<2.0,>=1.6.1 in ./miniconda3/lib/python3.10/site-packages (from astunparse>=1.6.3->mindspore==2.3.1) (1.16.0)\n", + "Requirement already satisfied: wheel<1.0,>=0.23.0 in ./miniconda3/lib/python3.10/site-packages (from astunparse>=1.6.3->mindspore==2.3.1) (0.37.1)\n", + "Installing collected packages: astunparse, mindspore\n", + "Successfully installed astunparse-1.6.3 mindspore-2.3.1\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n", + "\u001b[0m" + ] + } + ], + "source": [ + "!pip install mindspore==2.3.1" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Looking in indexes: http://mirrors.aliyun.com/pypi/simple\n", + "Collecting mindnlp==0.3.1\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/72/37/ef313c23fd587c3d1f46b0741c98235aecdfd93b4d6d446376f3db6a552c/mindnlp-0.3.1-py3-none-any.whl (5.7 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m5.7/5.7 MB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hCollecting pytest==7.2.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/67/68/a5eb36c3a8540594b6035e6cdae40c1ef1b6a2bfacbecc3d1a544583c078/pytest-7.2.0-py3-none-any.whl (316 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m316.8/316.8 kB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: mindspore in ./miniconda3/lib/python3.10/site-packages (from mindnlp==0.3.1) (2.3.1)\n", + "Requirement already satisfied: requests in ./miniconda3/lib/python3.10/site-packages (from mindnlp==0.3.1) (2.32.3)\n", + "Collecting datasets\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/20/34/a08b0ee99715eaba118cbe19a71f7b5e2425c2718ef96007c325944a1152/datasets-3.6.0-py3-none-any.whl (491 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m491.5/491.5 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hCollecting safetensors\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/5d/9a/add3e6fef267658075c5a41573c26d42d80c935cdc992384dfae435feaef/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (459 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m459.5/459.5 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hCollecting addict\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl (3.8 kB)\n", + "Collecting pyctcdecode\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/a5/8a/93e2118411ae5e861d4f4ce65578c62e85d0f1d9cb389bd63bd57130604e/pyctcdecode-0.5.0-py2.py3-none-any.whl (39 kB)\n", + "Requirement already satisfied: tqdm in ./miniconda3/lib/python3.10/site-packages (from mindnlp==0.3.1) (4.64.1)\n", + "Collecting evaluate\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/a2/e7/cbca9e2d2590eb9b5aa8f7ebabe1beb1498f9462d2ecede5c9fd9735faaf/evaluate-0.4.3-py3-none-any.whl (84 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m84.0/84.0 kB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting tokenizers\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/4d/7a/a209b29f971a9fdc1da86f917fe4524564924db50d13f0724feed37b2a4d/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (2.9 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.9/2.9 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hCollecting ml-dtypes\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/a4/a4/9321cae435d6140f9b0e7af8334456a854b60e3a9c6101280a16e3594965/ml_dtypes-0.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (4.6 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.6/4.6 MB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hCollecting jieba\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/c6/cb/18eeb235f833b726522d7ebed54f2278ce28ba9438e3135ab0278d9792a2/jieba-0.42.1.tar.gz (19.2 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m19.2/19.2 MB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25ldone\n", + "\u001b[?25hCollecting sentencepiece\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/fd/46/316c1ba6c52b97de76aff7b9da678f7afbb52136afb2987c474d95630e65/sentencepiece-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (1.3 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m0m\n", + "\u001b[?25hCollecting regex\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/78/a2/6dd36e16341ab95e4c6073426561b9bfdeb1a9c9b63ab1b579c2e96cb105/regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (782 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m782.5/782.5 kB\u001b[0m \u001b[31m1.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hCollecting iniconfig\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl (6.0 kB)\n", + "Requirement already satisfied: pluggy<2.0,>=0.12 in ./miniconda3/lib/python3.10/site-packages (from pytest==7.2.0->mindnlp==0.3.1) (1.0.0)\n", + "Requirement already satisfied: exceptiongroup>=1.0.0rc8 in ./miniconda3/lib/python3.10/site-packages (from pytest==7.2.0->mindnlp==0.3.1) (1.2.2)\n", + "Requirement already satisfied: tomli>=1.0.0 in ./miniconda3/lib/python3.10/site-packages (from pytest==7.2.0->mindnlp==0.3.1) (2.2.1)\n", + "Requirement already satisfied: attrs>=19.2.0 in ./miniconda3/lib/python3.10/site-packages (from pytest==7.2.0->mindnlp==0.3.1) (25.3.0)\n", + "Requirement already satisfied: packaging in ./miniconda3/lib/python3.10/site-packages (from pytest==7.2.0->mindnlp==0.3.1) (24.2)\n", + "Collecting huggingface-hub>=0.24.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/3a/bf/6002da17ec1c7a47bedeb216812929665927c70b6e7500b3c7bf36f01bdd/huggingface_hub-0.31.1-py3-none-any.whl (484 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m484.3/484.3 kB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hCollecting xxhash\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/20/ee/b8a99ebbc6d1113b3a3f09e747fa318c3cde5b04bd9c197688fadf0eeae8/xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (220 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m220.9/220.9 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hCollecting pyarrow>=15.0.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/21/d1/71d91b2791b829c9e98f1e0d85be66ed93aff399f80abb99678511847eaa/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl (40.7 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m40.7/40.7 MB\u001b[0m \u001b[31m976.1 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m0:01\u001b[0m\n", + "\u001b[?25hCollecting tqdm\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl (78 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.5/78.5 kB\u001b[0m \u001b[31m985.4 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: filelock in ./miniconda3/lib/python3.10/site-packages (from datasets->mindnlp==0.3.1) (3.18.0)\n", + "Requirement already satisfied: pyyaml>=5.1 in ./miniconda3/lib/python3.10/site-packages (from datasets->mindnlp==0.3.1) (6.0.2)\n", + "Collecting dill<0.3.9,>=0.3.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/c9/7a/cef76fd8438a42f96db64ddaa85280485a9c395e7df3db8158cfec1eee34/dill-0.3.8-py3-none-any.whl (116 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m116.3/116.3 kB\u001b[0m \u001b[31m942.1 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hCollecting pandas\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/ed/12/86c1747ea27989d7a4064f806ce2bae2c6d575b950be087837bdfcabacc9/pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl (66.5 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m66.5/66.5 MB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:02\u001b[0mm\n", + "\u001b[?25hRequirement already satisfied: numpy>=1.17 in ./miniconda3/lib/python3.10/site-packages (from datasets->mindnlp==0.3.1) (1.26.4)\n", + "Requirement already satisfied: fsspec[http]<=2025.3.0,>=2023.1.0 in ./miniconda3/lib/python3.10/site-packages (from datasets->mindnlp==0.3.1) (2025.3.0)\n", + "Collecting multiprocess<0.70.17\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl (134 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.8/134.8 kB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: charset-normalizer<4,>=2 in ./miniconda3/lib/python3.10/site-packages (from requests->mindnlp==0.3.1) (2.0.4)\n", + "Requirement already satisfied: certifi>=2017.4.17 in ./miniconda3/lib/python3.10/site-packages (from requests->mindnlp==0.3.1) (2022.12.7)\n", + "Requirement already satisfied: idna<4,>=2.5 in ./miniconda3/lib/python3.10/site-packages (from requests->mindnlp==0.3.1) (3.4)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in ./miniconda3/lib/python3.10/site-packages (from requests->mindnlp==0.3.1) (1.26.13)\n", + "Requirement already satisfied: pillow>=6.2.0 in ./miniconda3/lib/python3.10/site-packages (from mindspore->mindnlp==0.3.1) (11.1.0)\n", + "Requirement already satisfied: protobuf>=3.13.0 in ./miniconda3/lib/python3.10/site-packages (from mindspore->mindnlp==0.3.1) (6.30.1)\n", + "Requirement already satisfied: psutil>=5.6.1 in ./miniconda3/lib/python3.10/site-packages (from mindspore->mindnlp==0.3.1) (7.0.0)\n", + "Requirement already satisfied: asttokens>=2.0.4 in ./miniconda3/lib/python3.10/site-packages (from mindspore->mindnlp==0.3.1) (3.0.0)\n", + "Requirement already satisfied: astunparse>=1.6.3 in ./miniconda3/lib/python3.10/site-packages (from mindspore->mindnlp==0.3.1) (1.6.3)\n", + "Requirement already satisfied: scipy>=1.5.4 in ./miniconda3/lib/python3.10/site-packages (from mindspore->mindnlp==0.3.1) (1.15.2)\n", + "Collecting hypothesis<7,>=6.14\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/b6/c7/78597bcec48e1585ea9029deb2bf2341516e90dd615a3db498413d68a4cc/hypothesis-6.131.15-py3-none-any.whl (501 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m501.1/501.1 kB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hCollecting pygtrie<3.0,>=2.1\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/ec/cd/bd196b2cf014afb1009de8b0f05ecd54011d881944e62763f3c1b1e8ef37/pygtrie-2.5.0-py3-none-any.whl (25 kB)\n", + "Requirement already satisfied: six<2.0,>=1.6.1 in ./miniconda3/lib/python3.10/site-packages (from astunparse>=1.6.3->mindspore->mindnlp==0.3.1) (1.16.0)\n", + "Requirement already satisfied: wheel<1.0,>=0.23.0 in ./miniconda3/lib/python3.10/site-packages (from astunparse>=1.6.3->mindspore->mindnlp==0.3.1) (0.37.1)\n", + "Collecting aiohttp!=4.0.0a0,!=4.0.0a1\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/4c/ca/67d816ef075e8ac834b5f1f6b18e8db7d170f7aebaf76f1be462ea10cab0/aiohttp-3.11.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (1.6 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: typing-extensions>=3.7.4.3 in ./miniconda3/lib/python3.10/site-packages (from huggingface-hub>=0.24.0->datasets->mindnlp==0.3.1) (4.12.2)\n", + "Collecting hf-xet<2.0.0,>=1.1.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/2e/01/d94553f91d85746e0862f24d239da88d10f5ce252b028565744e982432f4/hf_xet-1.1.0-cp37-abi3-manylinux_2_28_aarch64.whl (52.0 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m52.0/52.0 MB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hCollecting sortedcontainers<3.0.0,>=2.1.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl (29 kB)\n", + "Collecting tzdata>=2022.7\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl (347 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m347.8/347.8 kB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: python-dateutil>=2.8.2 in ./miniconda3/lib/python3.10/site-packages (from pandas->datasets->mindnlp==0.3.1) (2.9.0.post0)\n", + "Collecting pytz>=2020.1\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl (509 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m509.2/509.2 kB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hCollecting yarl<2.0,>=1.17.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/c8/21/e0aa650bcee881fb804331faa2c0f9a5d6be7609970b2b6e3cdd414e174b/yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (327 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m327.3/327.3 kB\u001b[0m \u001b[31m1.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hCollecting async-timeout<6.0,>=4.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl (6.2 kB)\n", + "Collecting multidict<7.0,>=4.5\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/64/28/5a7bf4e7422613ea80f9ebc529d3845b20a422cfa94d4355504ac98047ee/multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (220 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m220.2/220.2 kB\u001b[0m \u001b[31m624.5 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hCollecting propcache>=0.2.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/ad/6c/d01f9dfbbdc613305e0a831016844987a1fb4861dd221cd4c69b1216b43f/propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (206 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m206.1/206.1 kB\u001b[0m \u001b[31m917.3 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hCollecting aiohappyeyeballs>=2.3.0\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl (15 kB)\n", + "Collecting aiosignal>=1.1.2\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl (7.6 kB)\n", + "Collecting frozenlist>=1.1.1\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/8e/09/93f0293e8a95c05eea7cf9277fef8929fb4d0a2234ad9394cd2a6b6a6bb4/frozenlist-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl (287 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m287.2/287.2 kB\u001b[0m \u001b[31m846.8 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n", + "\u001b[?25hBuilding wheels for collected packages: jieba\n", + " Building wheel for jieba (setup.py) ... \u001b[?25ldone\n", + "\u001b[?25h Created wheel for jieba: filename=jieba-0.42.1-py3-none-any.whl size=19314459 sha256=8878060d5ebb8fc4dc19f8eaacb52aed0e7fca70a3d4410110294399f8ee594d\n", + " Stored in directory: /root/.cache/pip/wheels/1e/38/87/a0f2d9504eee018eea490f7821cb9142b6a9304c318dba4fdd\n", + "Successfully built jieba\n", + "Installing collected packages: sortedcontainers, sentencepiece, pytz, pygtrie, jieba, addict, xxhash, tzdata, tqdm, safetensors, regex, pyarrow, propcache, multidict, ml-dtypes, iniconfig, hypothesis, hf-xet, frozenlist, dill, async-timeout, aiohappyeyeballs, yarl, pytest, pyctcdecode, pandas, multiprocess, huggingface-hub, aiosignal, tokenizers, aiohttp, datasets, evaluate, mindnlp\n", + " Attempting uninstall: tqdm\n", + " Found existing installation: tqdm 4.64.1\n", + " Uninstalling tqdm-4.64.1:\n", + " Successfully uninstalled tqdm-4.64.1\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "te 0.4.0 requires cloudpickle, which is not installed.\u001b[0m\u001b[31m\n", + "\u001b[0mSuccessfully installed addict-2.4.0 aiohappyeyeballs-2.6.1 aiohttp-3.11.18 aiosignal-1.3.2 async-timeout-5.0.1 datasets-3.6.0 dill-0.3.8 evaluate-0.4.3 frozenlist-1.6.0 hf-xet-1.1.0 huggingface-hub-0.31.1 hypothesis-6.131.15 iniconfig-2.1.0 jieba-0.42.1 mindnlp-0.3.1 ml-dtypes-0.5.1 multidict-6.4.3 multiprocess-0.70.16 pandas-2.2.3 propcache-0.3.1 pyarrow-20.0.0 pyctcdecode-0.5.0 pygtrie-2.5.0 pytest-7.2.0 pytz-2025.2 regex-2024.11.6 safetensors-0.5.3 sentencepiece-0.2.0 sortedcontainers-2.4.0 tokenizers-0.21.1 tqdm-4.67.1 tzdata-2025.2 xxhash-3.5.0 yarl-1.20.0\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n", + "\u001b[0m" + ] + } + ], + "source": [ + "!pip install mindnlp==0.3.1" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "[WARNING] GE_ADPT(168277,ffffb39b2010,python):2024-12-19-17:59:30.790.628 [mindspore/ccsrc/utils/dlopen_macro.h:163] DlsymAscend] Dynamically load symbol aclmdlBundleGetModelId failed, result = /usr/local/Ascend/ascend-toolkit/latest/lib64/libascendcl.so: undefined symbol: aclmdlBundleGetModelId\n", - "[WARNING] GE_ADPT(168277,ffffb39b2010,python):2024-12-19-17:59:30.790.684 [mindspore/ccsrc/utils/dlopen_macro.h:163] DlsymAscend] Dynamically load symbol aclmdlBundleLoadFromMem failed, result = /usr/local/Ascend/ascend-toolkit/latest/lib64/libascendcl.so: undefined symbol: aclmdlBundleLoadFromMem\n", - "[WARNING] GE_ADPT(168277,ffffb39b2010,python):2024-12-19-17:59:30.790.702 [mindspore/ccsrc/utils/dlopen_macro.h:163] DlsymAscend] Dynamically load symbol aclmdlBundleUnload failed, result = /usr/local/Ascend/ascend-toolkit/latest/lib64/libascendcl.so: undefined symbol: aclmdlBundleUnload\n", - "[WARNING] ME(168277:281473695031312,MainProcess):2024-12-19-17:59:30.912.829 [mindspore/run_check/_check_version.py:396] Can not find the tbe operator implementation(need by mindspore-ascend). Please check whether the Environment Variable PYTHONPATH is set. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n" + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:43:58.496.000 [mindspore/run_check/_check_version.py:357] MindSpore version 2.3.1 and Ascend AI software package (Ascend Data Center Solution)version 7.6 does not match, the version of software package expect one of ['7.2', '7.3']. Please refer to the match info on: https://www.mindspore.cn/install\n", + "/root/miniconda3/lib/python3.10/site-packages/numpy/core/getlimits.py:549: UserWarning: The value of the smallest subnormal for type is zero.\n", + " setattr(self, word, getattr(machar, word).flat[0])\n", + "/root/miniconda3/lib/python3.10/site-packages/numpy/core/getlimits.py:89: UserWarning: The value of the smallest subnormal for type is zero.\n", + " return self._float_to_str(self.smallest_subnormal)\n", + "/root/miniconda3/lib/python3.10/site-packages/numpy/core/getlimits.py:549: UserWarning: The value of the smallest subnormal for type is zero.\n", + " setattr(self, word, getattr(machar, word).flat[0])\n", + "/root/miniconda3/lib/python3.10/site-packages/numpy/core/getlimits.py:89: UserWarning: The value of the smallest subnormal for type is zero.\n", + " return self._float_to_str(self.smallest_subnormal)\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:01.127.000 [mindspore/run_check/_check_version.py:375] MindSpore version 2.3.1 and \"te\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:01.129.000 [mindspore/run_check/_check_version.py:382] MindSpore version 2.3.1 and \"hccl\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:01.130.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 3\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:02.132.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 2\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:03.133.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 1\n" ] } ], @@ -118,7 +316,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -193,7 +391,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -272,16 +470,47 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Looking in indexes: http://mirrors.aliyun.com/pypi/simple\n", + "Collecting download\n", + " Downloading http://mirrors.aliyun.com/pypi/packages/37/45/01e7455a9659528e77a414b222326d4c525796e4f571bbabcb2e0ff3d1f4/download-0.3.5-py3-none-any.whl (8.8 kB)\n", + "Requirement already satisfied: tqdm in ./miniconda3/lib/python3.10/site-packages (from download) (4.67.1)\n", + "Requirement already satisfied: requests in ./miniconda3/lib/python3.10/site-packages (from download) (2.32.3)\n", + "Requirement already satisfied: six in ./miniconda3/lib/python3.10/site-packages (from download) (1.16.0)\n", + "Requirement already satisfied: certifi>=2017.4.17 in ./miniconda3/lib/python3.10/site-packages (from requests->download) (2022.12.7)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in ./miniconda3/lib/python3.10/site-packages (from requests->download) (1.26.13)\n", + "Requirement already satisfied: idna<4,>=2.5 in ./miniconda3/lib/python3.10/site-packages (from requests->download) (3.4)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in ./miniconda3/lib/python3.10/site-packages (from requests->download) (2.0.4)\n", + "Installing collected packages: download\n", + "Successfully installed download-0.3.5\n", + "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n", + "\u001b[0m" + ] + } + ], + "source": [ + "!pip install download" + ] + }, + { + "cell_type": "code", + "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ + "Creating data folder...\n", "Downloading data from https://mindspore-website.obs.cn-north-4.myhuaweicloud.com/notebook/datasets/cifar-10-binary.tar.gz (162.2 MB)\n", "\n", - "file_sizes: 100%|████████████████████████████| 170M/170M [00:14<00:00, 11.4MB/s]\n", + "file_sizes: 100%|████████████████████████████| 170M/170M [00:04<00:00, 40.0MB/s]\n", "Extracting tar.gz file...\n", "Successfully downloaded / unzipped to ./dataset\n" ] @@ -292,7 +521,7 @@ "'./dataset'" ] }, - "execution_count": 12, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -307,7 +536,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -355,7 +584,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 10, "metadata": { "scrolled": true }, @@ -364,8 +593,18 @@ "name": "stderr", "output_type": "stream", "text": [ - "[WARNING] ME(7712:281473579339792,MainProcess):2024-12-19-16:16:17.982.151 [mindspore/run_check/_check_version.py:396] Can not find the tbe operator implementation(need by mindspore-ascend). Please check whether the Environment Variable PYTHONPATH is set. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", - "[WARNING] ME(7712:281473579339792,MainProcess):2024-12-19-16:16:17.984.986 [mindspore/run_check/_check_version.py:396] Can not find the tbe operator implementation(need by mindspore-ascend). Please check whether the Environment Variable PYTHONPATH is set. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n" + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:55.430.000 [mindspore/run_check/_check_version.py:357] MindSpore version 2.3.1 and Ascend AI software package (Ascend Data Center Solution)version 7.6 does not match, the version of software package expect one of ['7.2', '7.3']. Please refer to the match info on: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:55.431.000 [mindspore/run_check/_check_version.py:375] MindSpore version 2.3.1 and \"te\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:55.432.000 [mindspore/run_check/_check_version.py:382] MindSpore version 2.3.1 and \"hccl\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:55.432.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 3\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:56.434.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 2\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:57.436.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 1\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:58.439.000 [mindspore/run_check/_check_version.py:357] MindSpore version 2.3.1 and Ascend AI software package (Ascend Data Center Solution)version 7.6 does not match, the version of software package expect one of ['7.2', '7.3']. Please refer to the match info on: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:58.440.000 [mindspore/run_check/_check_version.py:375] MindSpore version 2.3.1 and \"te\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:58.441.000 [mindspore/run_check/_check_version.py:382] MindSpore version 2.3.1 and \"hccl\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:58.441.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 3\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:44:59.443.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 2\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-17:45:00.445.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 1\n" ] }, { @@ -373,15 +612,14 @@ "output_type": "stream", "text": [ "model size is 2.0x\n", - "============== Starting Training ==============\n", - "-\r" + "============== Starting Training ==============\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "/usr/local/Ascend/ascend-toolkit/8.0.RC3.alpha001/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[32, 2147483647], [48, 48], [16, 63], [16, 63]] to [[32, 2147483647], [48, 48], [16, 63], (16, 63)].\n", + "/usr/local/Ascend/ascend-toolkit/8.0.0/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[32, 2147483647], [48, 48], [16, 63], [16, 63]] to [[32, 2147483647], [48, 48], [16, 63], (16, 63)].\n", " warnings.warn(to_print)\n" ] }, @@ -389,14 +627,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "\\\r" + ".." ] }, { "name": "stderr", "output_type": "stream", "text": [ - "/usr/local/Ascend/ascend-toolkit/8.0.RC3.alpha001/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[32, 2147483647], [480, 480], [16, 63], [16, 63]] to [[32, 2147483647], [480, 480], [16, 63], (16, 63)].\n", + "/usr/local/Ascend/ascend-toolkit/8.0.0/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[32, 2147483647], [480, 480], [16, 63], [16, 63]] to [[32, 2147483647], [480, 480], [16, 63], (16, 63)].\n", " warnings.warn(to_print)\n" ] }, @@ -404,14 +642,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "|\r" + "." ] }, { "name": "stderr", "output_type": "stream", "text": [ - "/usr/local/Ascend/ascend-toolkit/8.0.RC3.alpha001/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[32, 2147483647], [960, 960], [4, 15], [4, 15]] to [[32, 2147483647], [960, 960], [4, 15], (4, 15)].\n", + "/usr/local/Ascend/ascend-toolkit/8.0.0/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[32, 2147483647], [960, 960], [4, 15], [4, 15]] to [[32, 2147483647], [960, 960], [4, 15], (4, 15)].\n", " warnings.warn(to_print)\n" ] }, @@ -419,14 +657,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "/\r" + "." ] }, { "name": "stderr", "output_type": "stream", "text": [ - "/usr/local/Ascend/ascend-toolkit/8.0.RC3.alpha001/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[32, 2147483647], [1920, 1920], [7, 15], [7, 15]] to [[32, 2147483647], [1920, 1920], [7, 15], (7, 15)].\n", + "/usr/local/Ascend/ascend-toolkit/8.0.0/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[32, 2147483647], [1920, 1920], [7, 15], [7, 15]] to [[32, 2147483647], [1920, 1920], [7, 15], (7, 15)].\n", " warnings.warn(to_print)\n" ] }, @@ -434,10787 +672,10610 @@ "name": "stdout", "output_type": "stream", "text": [ - "epoch: 1 step: 1, loss is 2.75980544090271\n", - "epoch: 1 step: 2, loss is 2.5670511722564697\n", - "epoch: 1 step: 3, loss is 2.4105427265167236\n", - "epoch: 1 step: 4, loss is 2.3655662536621094\n", - "epoch: 1 step: 5, loss is 2.4783384799957275\n", - "epoch: 1 step: 6, loss is 2.557666301727295\n", - "epoch: 1 step: 7, loss is 2.7726364135742188\n", - "epoch: 1 step: 8, loss is 2.708310127258301\n", - "epoch: 1 step: 9, loss is 2.580253839492798\n", - "epoch: 1 step: 10, loss is 2.372993230819702\n", - "epoch: 1 step: 11, loss is 2.3711836338043213\n", - "epoch: 1 step: 12, loss is 2.3730173110961914\n", - "epoch: 1 step: 13, loss is 2.4239094257354736\n", - "epoch: 1 step: 14, loss is 2.5057613849639893\n", - "epoch: 1 step: 15, loss is 2.52937388420105\n", - "epoch: 1 step: 16, loss is 2.354022741317749\n", - "epoch: 1 step: 17, loss is 2.2720425128936768\n", - "epoch: 1 step: 18, loss is 2.3076610565185547\n", - "epoch: 1 step: 19, loss is 2.3866496086120605\n", - "epoch: 1 step: 20, loss is 2.404115676879883\n", - "epoch: 1 step: 21, loss is 2.37735652923584\n", - "epoch: 1 step: 22, loss is 2.303785800933838\n", - "epoch: 1 step: 23, loss is 2.309417247772217\n", - "epoch: 1 step: 24, loss is 2.3778350353240967\n", - "epoch: 1 step: 25, loss is 2.2510104179382324\n", - "epoch: 1 step: 26, loss is 2.2096996307373047\n", - "epoch: 1 step: 27, loss is 2.280653953552246\n", - "epoch: 1 step: 28, loss is 2.336916446685791\n", - "epoch: 1 step: 29, loss is 2.2206225395202637\n", - "epoch: 1 step: 30, loss is 2.1820828914642334\n", - "epoch: 1 step: 31, loss is 2.191270351409912\n", - "epoch: 1 step: 32, loss is 2.2589340209960938\n", - "epoch: 1 step: 33, loss is 2.2834393978118896\n", - "epoch: 1 step: 34, loss is 2.2658863067626953\n", - "epoch: 1 step: 35, loss is 2.233153820037842\n", - "epoch: 1 step: 36, loss is 2.170480728149414\n", - "epoch: 1 step: 37, loss is 2.245612144470215\n", - "epoch: 1 step: 38, loss is 2.1554932594299316\n", - "epoch: 1 step: 39, loss is 2.161050319671631\n", - "epoch: 1 step: 40, loss is 2.17901611328125\n", - "epoch: 1 step: 41, loss is 2.231091022491455\n", - "epoch: 1 step: 42, loss is 2.172175884246826\n", - "epoch: 1 step: 43, loss is 2.13637113571167\n", - "epoch: 1 step: 44, loss is 2.1085221767425537\n", - "epoch: 1 step: 45, loss is 2.149837017059326\n", - "epoch: 1 step: 46, loss is 2.1344566345214844\n", - "epoch: 1 step: 47, loss is 2.1971163749694824\n", - "epoch: 1 step: 48, loss is 2.1419458389282227\n", - "epoch: 1 step: 49, loss is 2.100971221923828\n", - "epoch: 1 step: 50, loss is 2.0694847106933594\n", - "epoch: 1 step: 51, loss is 1.9892370700836182\n", - "epoch: 1 step: 52, loss is 2.112950325012207\n", - "epoch: 1 step: 53, loss is 2.124391555786133\n", - "epoch: 1 step: 54, loss is 2.105656862258911\n", - "epoch: 1 step: 55, loss is 2.022850275039673\n", - "epoch: 1 step: 56, loss is 2.024585485458374\n", - "epoch: 1 step: 57, loss is 2.020308494567871\n", - "epoch: 1 step: 58, loss is 2.108119010925293\n", - "epoch: 1 step: 59, loss is 2.0770769119262695\n", - "epoch: 1 step: 60, loss is 2.1304168701171875\n", - "epoch: 1 step: 61, loss is 2.028489589691162\n", - "epoch: 1 step: 62, loss is 2.001586437225342\n", - "epoch: 1 step: 63, loss is 2.0726583003997803\n", - "epoch: 1 step: 64, loss is 2.0158157348632812\n", - "epoch: 1 step: 65, loss is 2.0902786254882812\n", - "epoch: 1 step: 66, loss is 2.0740370750427246\n", - "epoch: 1 step: 67, loss is 2.02785325050354\n", - "epoch: 1 step: 68, loss is 2.0279736518859863\n", - "epoch: 1 step: 69, loss is 2.0466506481170654\n", - "epoch: 1 step: 70, loss is 2.074131488800049\n", - "epoch: 1 step: 71, loss is 2.058871030807495\n", - "epoch: 1 step: 72, loss is 2.019763708114624\n", - "epoch: 1 step: 73, loss is 2.046131134033203\n", - "epoch: 1 step: 74, loss is 1.9550176858901978\n", - "epoch: 1 step: 75, loss is 2.0520830154418945\n", - "epoch: 1 step: 76, loss is 2.1202194690704346\n", - "epoch: 1 step: 77, loss is 2.0659918785095215\n", - "epoch: 1 step: 78, loss is 2.027775526046753\n", - "epoch: 1 step: 79, loss is 1.9887727499008179\n", - "epoch: 1 step: 80, loss is 1.9759016036987305\n", - "epoch: 1 step: 81, loss is 2.0818727016448975\n", - "epoch: 1 step: 82, loss is 2.0271098613739014\n", - "epoch: 1 step: 83, loss is 2.029927968978882\n", - "epoch: 1 step: 84, loss is 2.0566868782043457\n", - "epoch: 1 step: 85, loss is 1.9924031496047974\n", - "epoch: 1 step: 86, loss is 1.996401071548462\n", - "epoch: 1 step: 87, loss is 1.9768834114074707\n", - "epoch: 1 step: 88, loss is 2.0737290382385254\n", - "epoch: 1 step: 89, loss is 2.0161893367767334\n", - "epoch: 1 step: 90, loss is 1.9706740379333496\n", - "epoch: 1 step: 91, loss is 2.0001158714294434\n", - "epoch: 1 step: 92, loss is 2.046380043029785\n", - "epoch: 1 step: 93, loss is 2.0490479469299316\n", - "epoch: 1 step: 94, loss is 2.0252761840820312\n", - "epoch: 1 step: 95, loss is 2.043809652328491\n", - "epoch: 1 step: 96, loss is 1.9563770294189453\n", - "epoch: 1 step: 97, loss is 2.049975872039795\n", - "epoch: 1 step: 98, loss is 1.9703243970870972\n", - "epoch: 1 step: 99, loss is 1.9601037502288818\n", - "epoch: 1 step: 100, loss is 2.0626094341278076\n", - "epoch: 1 step: 101, loss is 2.057330369949341\n", - "epoch: 1 step: 102, loss is 1.9933860301971436\n", - "epoch: 1 step: 103, loss is 1.9648717641830444\n", - "epoch: 1 step: 104, loss is 1.9323358535766602\n", - "epoch: 1 step: 105, loss is 2.0042471885681152\n", - "epoch: 1 step: 106, loss is 1.9503452777862549\n", - "epoch: 1 step: 107, loss is 2.063812255859375\n", - "epoch: 1 step: 108, loss is 2.0226542949676514\n", - "epoch: 1 step: 109, loss is 1.9879087209701538\n", - "epoch: 1 step: 110, loss is 2.0272059440612793\n", - "epoch: 1 step: 111, loss is 2.0028254985809326\n", - "epoch: 1 step: 112, loss is 1.9343068599700928\n", - "epoch: 1 step: 113, loss is 2.0272648334503174\n", - "epoch: 1 step: 114, loss is 1.972496747970581\n", - "epoch: 1 step: 115, loss is 2.008842945098877\n", - "epoch: 1 step: 116, loss is 1.990540862083435\n", - "epoch: 1 step: 117, loss is 2.0785160064697266\n", - "epoch: 1 step: 118, loss is 2.073641300201416\n", - "epoch: 1 step: 119, loss is 1.9699013233184814\n", - "epoch: 1 step: 120, loss is 1.9664998054504395\n", - "epoch: 1 step: 121, loss is 1.9853829145431519\n", - "epoch: 1 step: 122, loss is 1.9614557027816772\n", - "epoch: 1 step: 123, loss is 2.0395641326904297\n", - "epoch: 1 step: 124, loss is 1.9586012363433838\n", - "epoch: 1 step: 125, loss is 1.935078501701355\n", - "epoch: 1 step: 126, loss is 1.917393445968628\n", - "epoch: 1 step: 127, loss is 1.9316489696502686\n", - "epoch: 1 step: 128, loss is 1.9924571514129639\n", - "epoch: 1 step: 129, loss is 1.9180103540420532\n", - "epoch: 1 step: 130, loss is 1.8830822706222534\n", - "epoch: 1 step: 131, loss is 1.9893871545791626\n", - "epoch: 1 step: 132, loss is 2.0457324981689453\n", - "epoch: 1 step: 133, loss is 1.9473958015441895\n", - "epoch: 1 step: 134, loss is 1.9713988304138184\n", - "epoch: 1 step: 135, loss is 1.9245795011520386\n", - "epoch: 1 step: 136, loss is 2.0111937522888184\n", - "epoch: 1 step: 137, loss is 1.9782899618148804\n", - "epoch: 1 step: 138, loss is 2.011812448501587\n", - "epoch: 1 step: 139, loss is 1.9299283027648926\n", - "epoch: 1 step: 140, loss is 1.9381563663482666\n", - "epoch: 1 step: 141, loss is 1.8691617250442505\n", - "epoch: 1 step: 142, loss is 1.9355195760726929\n", - "epoch: 1 step: 143, loss is 2.0354208946228027\n", - "epoch: 1 step: 144, loss is 1.9860905408859253\n", - "epoch: 1 step: 145, loss is 1.8859858512878418\n", - "epoch: 1 step: 146, loss is 1.9505501985549927\n", - "epoch: 1 step: 147, loss is 1.9749855995178223\n", - "epoch: 1 step: 148, loss is 2.0081610679626465\n", - "epoch: 1 step: 149, loss is 1.9252101182937622\n", - "epoch: 1 step: 150, loss is 1.9293466806411743\n", - "epoch: 1 step: 151, loss is 1.880336880683899\n", - "epoch: 1 step: 152, loss is 1.9007970094680786\n", - "epoch: 1 step: 153, loss is 1.9256908893585205\n", - "epoch: 1 step: 154, loss is 1.903590440750122\n", - "epoch: 1 step: 155, loss is 1.9196360111236572\n", - "epoch: 1 step: 156, loss is 1.8820894956588745\n", - "epoch: 1 step: 157, loss is 1.878994107246399\n", - "epoch: 1 step: 158, loss is 1.950648307800293\n", - "epoch: 1 step: 159, loss is 1.9154410362243652\n", - "epoch: 1 step: 160, loss is 1.8783516883850098\n", - "epoch: 1 step: 161, loss is 1.9601423740386963\n", - "epoch: 1 step: 162, loss is 1.8808218240737915\n", - "epoch: 1 step: 163, loss is 1.9126462936401367\n", - "epoch: 1 step: 164, loss is 1.9157588481903076\n", - "epoch: 1 step: 165, loss is 1.8644219636917114\n", - "epoch: 1 step: 166, loss is 1.8545265197753906\n", - "epoch: 1 step: 167, loss is 1.8698030710220337\n", - "epoch: 1 step: 168, loss is 1.9071085453033447\n", - "epoch: 1 step: 169, loss is 1.916622281074524\n", - "epoch: 1 step: 170, loss is 1.9241015911102295\n", - "epoch: 1 step: 171, loss is 1.9427170753479004\n", - "epoch: 1 step: 172, loss is 1.8719244003295898\n", - "epoch: 1 step: 173, loss is 1.9249184131622314\n", - "epoch: 1 step: 174, loss is 1.8090195655822754\n", - "epoch: 1 step: 175, loss is 1.916797161102295\n", - "epoch: 1 step: 176, loss is 1.9270663261413574\n", - "epoch: 1 step: 177, loss is 1.9432189464569092\n", - "epoch: 1 step: 178, loss is 1.8819589614868164\n", - "epoch: 1 step: 179, loss is 1.9404748678207397\n", - "epoch: 1 step: 180, loss is 1.8541083335876465\n", - "epoch: 1 step: 181, loss is 1.9062340259552002\n", - "epoch: 1 step: 182, loss is 1.9527924060821533\n", - "epoch: 1 step: 183, loss is 1.8839168548583984\n", - "epoch: 1 step: 184, loss is 1.9268646240234375\n", - "epoch: 1 step: 185, loss is 1.8324300050735474\n", - "epoch: 1 step: 186, loss is 1.8515335321426392\n", - "epoch: 1 step: 187, loss is 1.8711837530136108\n", - "epoch: 1 step: 188, loss is 1.9212442636489868\n", - "epoch: 1 step: 189, loss is 1.9718868732452393\n", - "epoch: 1 step: 190, loss is 1.9413766860961914\n", - "epoch: 1 step: 191, loss is 1.915600061416626\n", - "epoch: 1 step: 192, loss is 1.8985328674316406\n", - "epoch: 1 step: 193, loss is 1.8649976253509521\n", - "epoch: 1 step: 194, loss is 1.8688143491744995\n", - "epoch: 1 step: 195, loss is 1.8243794441223145\n", - "Train epoch time: 225782.011 ms, per step time: 1157.856 ms\n", - "epoch: 2 step: 1, loss is 1.8835930824279785\n", - "epoch: 2 step: 2, loss is 1.8765673637390137\n", - "epoch: 2 step: 3, loss is 1.9059536457061768\n", - "epoch: 2 step: 4, loss is 1.8868937492370605\n", - "epoch: 2 step: 5, loss is 1.839110016822815\n", - "epoch: 2 step: 6, loss is 1.8309670686721802\n", - "epoch: 2 step: 7, loss is 1.8669013977050781\n", - "epoch: 2 step: 8, loss is 1.9598655700683594\n", - "epoch: 2 step: 9, loss is 1.849687099456787\n", - "epoch: 2 step: 10, loss is 1.8534170389175415\n", - "epoch: 2 step: 11, loss is 1.908738374710083\n", - "epoch: 2 step: 12, loss is 1.8766202926635742\n", - "epoch: 2 step: 13, loss is 1.8670843839645386\n", - "epoch: 2 step: 14, loss is 1.9053030014038086\n", - "epoch: 2 step: 15, loss is 1.8498446941375732\n", - "epoch: 2 step: 16, loss is 1.83674955368042\n", - "epoch: 2 step: 17, loss is 1.8222582340240479\n", - "epoch: 2 step: 18, loss is 1.8349320888519287\n", - "epoch: 2 step: 19, loss is 1.8130180835723877\n", - "epoch: 2 step: 20, loss is 1.8537371158599854\n", - "epoch: 2 step: 21, loss is 1.8492515087127686\n", - "epoch: 2 step: 22, loss is 1.8609511852264404\n", - "epoch: 2 step: 23, loss is 1.8824775218963623\n", - "epoch: 2 step: 24, loss is 1.8832416534423828\n", - "epoch: 2 step: 25, loss is 1.9459093809127808\n", - "epoch: 2 step: 26, loss is 1.8674077987670898\n", - "epoch: 2 step: 27, loss is 1.9142320156097412\n", - "epoch: 2 step: 28, loss is 1.8707239627838135\n", - "epoch: 2 step: 29, loss is 1.818388819694519\n", - "epoch: 2 step: 30, loss is 1.868898868560791\n", - "epoch: 2 step: 31, loss is 1.875065565109253\n", - "epoch: 2 step: 32, loss is 1.85364830493927\n", - "epoch: 2 step: 33, loss is 1.8230578899383545\n", - "epoch: 2 step: 34, loss is 1.8882098197937012\n", - "epoch: 2 step: 35, loss is 1.885534405708313\n", - "epoch: 2 step: 36, loss is 1.9048235416412354\n", - "epoch: 2 step: 37, loss is 1.8643020391464233\n", - "epoch: 2 step: 38, loss is 1.827202320098877\n", - "epoch: 2 step: 39, loss is 1.8662165403366089\n", - "epoch: 2 step: 40, loss is 1.9586678743362427\n", - "epoch: 2 step: 41, loss is 1.7446067333221436\n", - "epoch: 2 step: 42, loss is 1.820096492767334\n", - "epoch: 2 step: 43, loss is 1.836179494857788\n", - "epoch: 2 step: 44, loss is 1.842527151107788\n", - "epoch: 2 step: 45, loss is 1.8931591510772705\n", - "epoch: 2 step: 46, loss is 1.9003570079803467\n", - "epoch: 2 step: 47, loss is 1.9502613544464111\n", - "epoch: 2 step: 48, loss is 1.8058934211730957\n", - "epoch: 2 step: 49, loss is 1.869890570640564\n", - "epoch: 2 step: 50, loss is 1.8696047067642212\n", - "epoch: 2 step: 51, loss is 1.8107106685638428\n", - "epoch: 2 step: 52, loss is 1.8398702144622803\n", - "epoch: 2 step: 53, loss is 1.8218626976013184\n", - "epoch: 2 step: 54, loss is 1.8827303647994995\n", - "epoch: 2 step: 55, loss is 1.8509058952331543\n", - "epoch: 2 step: 56, loss is 1.820065975189209\n", - "epoch: 2 step: 57, loss is 1.840116262435913\n", - "epoch: 2 step: 58, loss is 1.8949873447418213\n", - "epoch: 2 step: 59, loss is 1.8778489828109741\n", - "epoch: 2 step: 60, loss is 1.8530941009521484\n", - "epoch: 2 step: 61, loss is 1.769039273262024\n", - "epoch: 2 step: 62, loss is 1.8116259574890137\n", - "epoch: 2 step: 63, loss is 1.797375202178955\n", - "epoch: 2 step: 64, loss is 1.7704265117645264\n", - "epoch: 2 step: 65, loss is 1.90865957736969\n", - "epoch: 2 step: 66, loss is 1.8896136283874512\n", - "epoch: 2 step: 67, loss is 1.823585033416748\n", - "epoch: 2 step: 68, loss is 1.8057748079299927\n", - "epoch: 2 step: 69, loss is 1.8464064598083496\n", - "epoch: 2 step: 70, loss is 1.9253573417663574\n", - "epoch: 2 step: 71, loss is 1.756753921508789\n", - "epoch: 2 step: 72, loss is 1.8651765584945679\n", - "epoch: 2 step: 73, loss is 1.7854652404785156\n", - "epoch: 2 step: 74, loss is 1.8520358800888062\n", - "epoch: 2 step: 75, loss is 1.7570254802703857\n", - "epoch: 2 step: 76, loss is 1.7863357067108154\n", - "epoch: 2 step: 77, loss is 1.7877100706100464\n", - "epoch: 2 step: 78, loss is 1.8751368522644043\n", - "epoch: 2 step: 79, loss is 1.8790359497070312\n", - "epoch: 2 step: 80, loss is 1.7300336360931396\n", - "epoch: 2 step: 81, loss is 1.8071209192276\n", - "epoch: 2 step: 82, loss is 1.872692346572876\n", - "epoch: 2 step: 83, loss is 1.859041452407837\n", - "epoch: 2 step: 84, loss is 1.8289787769317627\n", - "epoch: 2 step: 85, loss is 1.8514699935913086\n", - "epoch: 2 step: 86, loss is 1.7746024131774902\n", - "epoch: 2 step: 87, loss is 1.8034545183181763\n", - "epoch: 2 step: 88, loss is 1.8302220106124878\n", - "epoch: 2 step: 89, loss is 1.835216999053955\n", - "epoch: 2 step: 90, loss is 1.7966628074645996\n", - "epoch: 2 step: 91, loss is 1.7283095121383667\n", - "epoch: 2 step: 92, loss is 1.7857308387756348\n", - "epoch: 2 step: 93, loss is 1.7471954822540283\n", - "epoch: 2 step: 94, loss is 1.8245375156402588\n", - "epoch: 2 step: 95, loss is 1.8201725482940674\n", - "epoch: 2 step: 96, loss is 1.8172305822372437\n", - "epoch: 2 step: 97, loss is 1.7599267959594727\n", - "epoch: 2 step: 98, loss is 1.7652099132537842\n", - "epoch: 2 step: 99, loss is 1.8111861944198608\n", - "epoch: 2 step: 100, loss is 1.7994215488433838\n", - "epoch: 2 step: 101, loss is 1.84322190284729\n", - "epoch: 2 step: 102, loss is 1.8316501379013062\n", - "epoch: 2 step: 103, loss is 1.8635199069976807\n", - "epoch: 2 step: 104, loss is 1.8726353645324707\n", - "epoch: 2 step: 105, loss is 1.785705804824829\n", - "epoch: 2 step: 106, loss is 1.818791389465332\n", - "epoch: 2 step: 107, loss is 1.8749570846557617\n", - "epoch: 2 step: 108, loss is 1.6863653659820557\n", - "epoch: 2 step: 109, loss is 1.8321771621704102\n", - "epoch: 2 step: 110, loss is 1.7761744260787964\n", - "epoch: 2 step: 111, loss is 1.879426121711731\n", - "epoch: 2 step: 112, loss is 1.838904619216919\n", - "epoch: 2 step: 113, loss is 1.81224524974823\n", - "epoch: 2 step: 114, loss is 1.8092156648635864\n", - "epoch: 2 step: 115, loss is 1.8144543170928955\n", - "epoch: 2 step: 116, loss is 1.7913870811462402\n", - "epoch: 2 step: 117, loss is 1.8155028820037842\n", - "epoch: 2 step: 118, loss is 1.7962980270385742\n", - "epoch: 2 step: 119, loss is 1.7516214847564697\n", - "epoch: 2 step: 120, loss is 1.771040439605713\n", - "epoch: 2 step: 121, loss is 1.7596282958984375\n", - "epoch: 2 step: 122, loss is 1.8537299633026123\n", - "epoch: 2 step: 123, loss is 1.8120876550674438\n", - "epoch: 2 step: 124, loss is 1.8149526119232178\n", - "epoch: 2 step: 125, loss is 1.7668054103851318\n", - "epoch: 2 step: 126, loss is 1.768244743347168\n", - "epoch: 2 step: 127, loss is 1.8183424472808838\n", - "epoch: 2 step: 128, loss is 1.744974136352539\n", - "epoch: 2 step: 129, loss is 1.7528072595596313\n", - "epoch: 2 step: 130, loss is 1.6339621543884277\n", - "epoch: 2 step: 131, loss is 1.7914652824401855\n", - "epoch: 2 step: 132, loss is 1.75065279006958\n", - "epoch: 2 step: 133, loss is 1.7865480184555054\n", - "epoch: 2 step: 134, loss is 1.7826173305511475\n", - "epoch: 2 step: 135, loss is 1.7941867113113403\n", - "epoch: 2 step: 136, loss is 1.7378052473068237\n", - "epoch: 2 step: 137, loss is 1.744471549987793\n", - "epoch: 2 step: 138, loss is 1.8432142734527588\n", - "epoch: 2 step: 139, loss is 1.6913396120071411\n", - "epoch: 2 step: 140, loss is 1.78922700881958\n", - "epoch: 2 step: 141, loss is 1.8243529796600342\n", - "epoch: 2 step: 142, loss is 1.7244917154312134\n", - "epoch: 2 step: 143, loss is 1.7133386135101318\n", - "epoch: 2 step: 144, loss is 1.817855715751648\n", - "epoch: 2 step: 145, loss is 1.681628942489624\n", - "epoch: 2 step: 146, loss is 1.8370722532272339\n", - "epoch: 2 step: 147, loss is 1.8401905298233032\n", - "epoch: 2 step: 148, loss is 1.8338029384613037\n", - "epoch: 2 step: 149, loss is 1.783419132232666\n", - "epoch: 2 step: 150, loss is 1.8037354946136475\n", - "epoch: 2 step: 151, loss is 1.7952284812927246\n", - "epoch: 2 step: 152, loss is 1.7852938175201416\n", - "epoch: 2 step: 153, loss is 1.8060368299484253\n", - "epoch: 2 step: 154, loss is 1.7099151611328125\n", - "epoch: 2 step: 155, loss is 1.7653192281723022\n", - "epoch: 2 step: 156, loss is 1.7221190929412842\n", - "epoch: 2 step: 157, loss is 1.771467685699463\n", - "epoch: 2 step: 158, loss is 1.7833179235458374\n", - "epoch: 2 step: 159, loss is 1.7899360656738281\n", - "epoch: 2 step: 160, loss is 1.8668212890625\n", - "epoch: 2 step: 161, loss is 1.758234977722168\n", - "epoch: 2 step: 162, loss is 1.7473156452178955\n", - "epoch: 2 step: 163, loss is 1.7216883897781372\n", - "epoch: 2 step: 164, loss is 1.7644455432891846\n", - "epoch: 2 step: 165, loss is 1.7482396364212036\n", - "epoch: 2 step: 166, loss is 1.7509338855743408\n", - "epoch: 2 step: 167, loss is 1.7386764287948608\n", - "epoch: 2 step: 168, loss is 1.7262775897979736\n", - "epoch: 2 step: 169, loss is 1.814494013786316\n", - "epoch: 2 step: 170, loss is 1.7928271293640137\n", - "epoch: 2 step: 171, loss is 1.6762036085128784\n", - "epoch: 2 step: 172, loss is 1.7089025974273682\n", - "epoch: 2 step: 173, loss is 1.7569890022277832\n", - "epoch: 2 step: 174, loss is 1.8112742900848389\n", - "epoch: 2 step: 175, loss is 1.694785237312317\n", - "epoch: 2 step: 176, loss is 1.8345460891723633\n", - "epoch: 2 step: 177, loss is 1.8058542013168335\n", - "epoch: 2 step: 178, loss is 1.7824225425720215\n", - "epoch: 2 step: 179, loss is 1.7446579933166504\n", - "epoch: 2 step: 180, loss is 1.7286276817321777\n", - "epoch: 2 step: 181, loss is 1.7398707866668701\n", - "epoch: 2 step: 182, loss is 1.6718776226043701\n", - "epoch: 2 step: 183, loss is 1.7323997020721436\n", - "epoch: 2 step: 184, loss is 1.7368733882904053\n", - "epoch: 2 step: 185, loss is 1.8023041486740112\n", - "epoch: 2 step: 186, loss is 1.7890045642852783\n", - "epoch: 2 step: 187, loss is 1.6989820003509521\n", - "epoch: 2 step: 188, loss is 1.8555846214294434\n", - "epoch: 2 step: 189, loss is 1.7777941226959229\n", - "epoch: 2 step: 190, loss is 1.7652006149291992\n", - "epoch: 2 step: 191, loss is 1.7266161441802979\n", - "epoch: 2 step: 192, loss is 1.6861340999603271\n", - "epoch: 2 step: 193, loss is 1.7856721878051758\n", - "epoch: 2 step: 194, loss is 1.736732006072998\n", - "epoch: 2 step: 195, loss is 1.7464721202850342\n", - "Train epoch time: 115257.921 ms, per step time: 591.066 ms\n", - "epoch: 3 step: 1, loss is 1.6841131448745728\n", - "epoch: 3 step: 2, loss is 1.7501670122146606\n", - "epoch: 3 step: 3, loss is 1.7752610445022583\n", - "epoch: 3 step: 4, loss is 1.8088630437850952\n", - "epoch: 3 step: 5, loss is 1.7312605381011963\n", - "epoch: 3 step: 6, loss is 1.7129393815994263\n", - "epoch: 3 step: 7, loss is 1.727431058883667\n", - "epoch: 3 step: 8, loss is 1.810046672821045\n", - "epoch: 3 step: 9, loss is 1.7651646137237549\n", - "epoch: 3 step: 10, loss is 1.7154899835586548\n", - "epoch: 3 step: 11, loss is 1.6839958429336548\n", - "epoch: 3 step: 12, loss is 1.6822993755340576\n", - "epoch: 3 step: 13, loss is 1.7335350513458252\n", - "epoch: 3 step: 14, loss is 1.7258131504058838\n", - "epoch: 3 step: 15, loss is 1.738661527633667\n", - "epoch: 3 step: 16, loss is 1.7651036977767944\n", - "epoch: 3 step: 17, loss is 1.784008264541626\n", - "epoch: 3 step: 18, loss is 1.7540134191513062\n", - "epoch: 3 step: 19, loss is 1.6211364269256592\n", - "epoch: 3 step: 20, loss is 1.718348503112793\n", - "epoch: 3 step: 21, loss is 1.8035595417022705\n", - "epoch: 3 step: 22, loss is 1.720760703086853\n", - "epoch: 3 step: 23, loss is 1.7492343187332153\n", - "epoch: 3 step: 24, loss is 1.7155003547668457\n", - "epoch: 3 step: 25, loss is 1.78609299659729\n", - "epoch: 3 step: 26, loss is 1.7881174087524414\n", - "epoch: 3 step: 27, loss is 1.7392337322235107\n", - "epoch: 3 step: 28, loss is 1.6965761184692383\n", - "epoch: 3 step: 29, loss is 1.7969112396240234\n", - "epoch: 3 step: 30, loss is 1.818634033203125\n", - "epoch: 3 step: 31, loss is 1.7111769914627075\n", - "epoch: 3 step: 32, loss is 1.759969711303711\n", - "epoch: 3 step: 33, loss is 1.7239017486572266\n", - "epoch: 3 step: 34, loss is 1.6556309461593628\n", - "epoch: 3 step: 35, loss is 1.6414852142333984\n", - "epoch: 3 step: 36, loss is 1.7257921695709229\n", - "epoch: 3 step: 37, loss is 1.7067492008209229\n", - "epoch: 3 step: 38, loss is 1.6874881982803345\n", - "epoch: 3 step: 39, loss is 1.7304046154022217\n", - "epoch: 3 step: 40, loss is 1.7286373376846313\n", - "epoch: 3 step: 41, loss is 1.7652695178985596\n", - "epoch: 3 step: 42, loss is 1.7852199077606201\n", - "epoch: 3 step: 43, loss is 1.669382929801941\n", - "epoch: 3 step: 44, loss is 1.7078745365142822\n", - "epoch: 3 step: 45, loss is 1.7322012186050415\n", - "epoch: 3 step: 46, loss is 1.7248754501342773\n", - "epoch: 3 step: 47, loss is 1.8024485111236572\n", - "epoch: 3 step: 48, loss is 1.671626091003418\n", - "epoch: 3 step: 49, loss is 1.7302730083465576\n", - "epoch: 3 step: 50, loss is 1.7077665328979492\n", - "epoch: 3 step: 51, loss is 1.6927061080932617\n", - "epoch: 3 step: 52, loss is 1.7586188316345215\n", - "epoch: 3 step: 53, loss is 1.76719331741333\n", - "epoch: 3 step: 54, loss is 1.6907131671905518\n", - "epoch: 3 step: 55, loss is 1.7159448862075806\n", - "epoch: 3 step: 56, loss is 1.7365708351135254\n", - "epoch: 3 step: 57, loss is 1.704948902130127\n", - "epoch: 3 step: 58, loss is 1.7479050159454346\n", - "epoch: 3 step: 59, loss is 1.729019284248352\n", - "epoch: 3 step: 60, loss is 1.7397884130477905\n", - "epoch: 3 step: 61, loss is 1.7830913066864014\n", - "epoch: 3 step: 62, loss is 1.6789608001708984\n", - "epoch: 3 step: 63, loss is 1.6698534488677979\n", - "epoch: 3 step: 64, loss is 1.6799232959747314\n", - "epoch: 3 step: 65, loss is 1.817123532295227\n", - "epoch: 3 step: 66, loss is 1.7281492948532104\n", - "epoch: 3 step: 67, loss is 1.7114418745040894\n", - "epoch: 3 step: 68, loss is 1.697174310684204\n", - "epoch: 3 step: 69, loss is 1.670013666152954\n", - "epoch: 3 step: 70, loss is 1.7232799530029297\n", - "epoch: 3 step: 71, loss is 1.6988884210586548\n", - "epoch: 3 step: 72, loss is 1.6875848770141602\n", - "epoch: 3 step: 73, loss is 1.5793190002441406\n", - "epoch: 3 step: 74, loss is 1.773633360862732\n", - "epoch: 3 step: 75, loss is 1.7373473644256592\n", - "epoch: 3 step: 76, loss is 1.7004932165145874\n", - "epoch: 3 step: 77, loss is 1.7461297512054443\n", - "epoch: 3 step: 78, loss is 1.6598271131515503\n", - "epoch: 3 step: 79, loss is 1.7789373397827148\n", - "epoch: 3 step: 80, loss is 1.7122302055358887\n", - "epoch: 3 step: 81, loss is 1.7141821384429932\n", - "epoch: 3 step: 82, loss is 1.6238118410110474\n", - "epoch: 3 step: 83, loss is 1.733994960784912\n", - "epoch: 3 step: 84, loss is 1.6897964477539062\n", - "epoch: 3 step: 85, loss is 1.7056931257247925\n", - "epoch: 3 step: 86, loss is 1.6320717334747314\n", - "epoch: 3 step: 87, loss is 1.6662894487380981\n", - "epoch: 3 step: 88, loss is 1.7769612073898315\n", - "epoch: 3 step: 89, loss is 1.710070252418518\n", - "epoch: 3 step: 90, loss is 1.7610437870025635\n", - "epoch: 3 step: 91, loss is 1.7293509244918823\n", - "epoch: 3 step: 92, loss is 1.756462812423706\n", - "epoch: 3 step: 93, loss is 1.6930468082427979\n", - "epoch: 3 step: 94, loss is 1.7188326120376587\n", - "epoch: 3 step: 95, loss is 1.7497308254241943\n", - "epoch: 3 step: 96, loss is 1.8129918575286865\n", - "epoch: 3 step: 97, loss is 1.6941945552825928\n", - "epoch: 3 step: 98, loss is 1.641719102859497\n", - "epoch: 3 step: 99, loss is 1.6319137811660767\n", - "epoch: 3 step: 100, loss is 1.7291414737701416\n", - "epoch: 3 step: 101, loss is 1.7556796073913574\n", - "epoch: 3 step: 102, loss is 1.5970323085784912\n", - "epoch: 3 step: 103, loss is 1.6415477991104126\n", - "epoch: 3 step: 104, loss is 1.6172653436660767\n", - "epoch: 3 step: 105, loss is 1.7159910202026367\n", - "epoch: 3 step: 106, loss is 1.7129868268966675\n", - "epoch: 3 step: 107, loss is 1.6541032791137695\n", - "epoch: 3 step: 108, loss is 1.734543800354004\n", - "epoch: 3 step: 109, loss is 1.6203200817108154\n", - "epoch: 3 step: 110, loss is 1.677174687385559\n", - "epoch: 3 step: 111, loss is 1.7477593421936035\n", - "epoch: 3 step: 112, loss is 1.650285243988037\n", - "epoch: 3 step: 113, loss is 1.7538528442382812\n", - "epoch: 3 step: 114, loss is 1.7439944744110107\n", - "epoch: 3 step: 115, loss is 1.6835726499557495\n", - "epoch: 3 step: 116, loss is 1.6939976215362549\n", - "epoch: 3 step: 117, loss is 1.7131999731063843\n", - "epoch: 3 step: 118, loss is 1.7545164823532104\n", - "epoch: 3 step: 119, loss is 1.6518956422805786\n", - "epoch: 3 step: 120, loss is 1.6332716941833496\n", - "epoch: 3 step: 121, loss is 1.653060793876648\n", - "epoch: 3 step: 122, loss is 1.7415505647659302\n", - "epoch: 3 step: 123, loss is 1.6880912780761719\n", - "epoch: 3 step: 124, loss is 1.643927812576294\n", - "epoch: 3 step: 125, loss is 1.6699855327606201\n", - "epoch: 3 step: 126, loss is 1.6960227489471436\n", - "epoch: 3 step: 127, loss is 1.6869429349899292\n", - "epoch: 3 step: 128, loss is 1.6218465566635132\n", - "epoch: 3 step: 129, loss is 1.6617004871368408\n", - "epoch: 3 step: 130, loss is 1.7297581434249878\n", - "epoch: 3 step: 131, loss is 1.6627494096755981\n", - "epoch: 3 step: 132, loss is 1.6735060214996338\n", - "epoch: 3 step: 133, loss is 1.6792749166488647\n", - "epoch: 3 step: 134, loss is 1.6935715675354004\n", - "epoch: 3 step: 135, loss is 1.7778264284133911\n", - "epoch: 3 step: 136, loss is 1.6708532571792603\n", - "epoch: 3 step: 137, loss is 1.6677041053771973\n", - "epoch: 3 step: 138, loss is 1.6557347774505615\n", - "epoch: 3 step: 139, loss is 1.7370514869689941\n", - "epoch: 3 step: 140, loss is 1.6776245832443237\n", - "epoch: 3 step: 141, loss is 1.7884455919265747\n", - "epoch: 3 step: 142, loss is 1.7902798652648926\n", - "epoch: 3 step: 143, loss is 1.6678006649017334\n", - "epoch: 3 step: 144, loss is 1.6726068258285522\n", - "epoch: 3 step: 145, loss is 1.6331015825271606\n", - "epoch: 3 step: 146, loss is 1.6607431173324585\n", - "epoch: 3 step: 147, loss is 1.727637767791748\n", - "epoch: 3 step: 148, loss is 1.7257366180419922\n", - "epoch: 3 step: 149, loss is 1.7135244607925415\n", - "epoch: 3 step: 150, loss is 1.653186559677124\n", - "epoch: 3 step: 151, loss is 1.64363431930542\n", - "epoch: 3 step: 152, loss is 1.783362627029419\n", - "epoch: 3 step: 153, loss is 1.6822437047958374\n", - "epoch: 3 step: 154, loss is 1.7662022113800049\n", - "epoch: 3 step: 155, loss is 1.6621829271316528\n", - "epoch: 3 step: 156, loss is 1.677652359008789\n", - "epoch: 3 step: 157, loss is 1.7367796897888184\n", - "epoch: 3 step: 158, loss is 1.691730260848999\n", - "epoch: 3 step: 159, loss is 1.6946136951446533\n", - "epoch: 3 step: 160, loss is 1.7284181118011475\n", - "epoch: 3 step: 161, loss is 1.6398156881332397\n", - "epoch: 3 step: 162, loss is 1.5906654596328735\n", - "epoch: 3 step: 163, loss is 1.7063485383987427\n", - "epoch: 3 step: 164, loss is 1.7030251026153564\n", - "epoch: 3 step: 165, loss is 1.7250406742095947\n", - "epoch: 3 step: 166, loss is 1.6462278366088867\n", - "epoch: 3 step: 167, loss is 1.5875654220581055\n", - "epoch: 3 step: 168, loss is 1.6804282665252686\n", - "epoch: 3 step: 169, loss is 1.7418447732925415\n", - "epoch: 3 step: 170, loss is 1.7042070627212524\n", - "epoch: 3 step: 171, loss is 1.6737473011016846\n", - "epoch: 3 step: 172, loss is 1.6122379302978516\n", - "epoch: 3 step: 173, loss is 1.6897451877593994\n", - "epoch: 3 step: 174, loss is 1.6230573654174805\n", - "epoch: 3 step: 175, loss is 1.7668870687484741\n", - "epoch: 3 step: 176, loss is 1.5819287300109863\n", - "epoch: 3 step: 177, loss is 1.6527924537658691\n", - "epoch: 3 step: 178, loss is 1.678804636001587\n", - "epoch: 3 step: 179, loss is 1.648897409439087\n", - "epoch: 3 step: 180, loss is 1.6257283687591553\n", - "epoch: 3 step: 181, loss is 1.6626296043395996\n", - "epoch: 3 step: 182, loss is 1.6427736282348633\n", - "epoch: 3 step: 183, loss is 1.6220641136169434\n", - "epoch: 3 step: 184, loss is 1.6349902153015137\n", - "epoch: 3 step: 185, loss is 1.6943621635437012\n", - "epoch: 3 step: 186, loss is 1.677880048751831\n", - "epoch: 3 step: 187, loss is 1.667533040046692\n", - "epoch: 3 step: 188, loss is 1.5964527130126953\n", - "epoch: 3 step: 189, loss is 1.606506109237671\n", - "epoch: 3 step: 190, loss is 1.618372917175293\n", - "epoch: 3 step: 191, loss is 1.6176166534423828\n", - "epoch: 3 step: 192, loss is 1.6634066104888916\n", - "epoch: 3 step: 193, loss is 1.5549530982971191\n", - "epoch: 3 step: 194, loss is 1.6754807233810425\n", - "epoch: 3 step: 195, loss is 1.5997204780578613\n", - "Train epoch time: 109768.354 ms, per step time: 562.915 ms\n", - "epoch: 4 step: 1, loss is 1.632151484489441\n", - "epoch: 4 step: 2, loss is 1.7264846563339233\n", - "epoch: 4 step: 3, loss is 1.7161239385604858\n", - "epoch: 4 step: 4, loss is 1.671665072441101\n", - "epoch: 4 step: 5, loss is 1.6735305786132812\n", - "epoch: 4 step: 6, loss is 1.5976629257202148\n", - "epoch: 4 step: 7, loss is 1.668761968612671\n", - "epoch: 4 step: 8, loss is 1.6833436489105225\n", - "epoch: 4 step: 9, loss is 1.6010526418685913\n", - "epoch: 4 step: 10, loss is 1.5976853370666504\n", - "epoch: 4 step: 11, loss is 1.6464104652404785\n", - "epoch: 4 step: 12, loss is 1.5889983177185059\n", - "epoch: 4 step: 13, loss is 1.57478928565979\n", - "epoch: 4 step: 14, loss is 1.592933177947998\n", - "epoch: 4 step: 15, loss is 1.7329944372177124\n", - "epoch: 4 step: 16, loss is 1.7235604524612427\n", - "epoch: 4 step: 17, loss is 1.6543910503387451\n", - "epoch: 4 step: 18, loss is 1.6419198513031006\n", - "epoch: 4 step: 19, loss is 1.5611886978149414\n", - "epoch: 4 step: 20, loss is 1.5621662139892578\n", - "epoch: 4 step: 21, loss is 1.6384258270263672\n", - "epoch: 4 step: 22, loss is 1.6253340244293213\n", - "epoch: 4 step: 23, loss is 1.6027189493179321\n", - "epoch: 4 step: 24, loss is 1.692981481552124\n", - "epoch: 4 step: 25, loss is 1.6590511798858643\n", - "epoch: 4 step: 26, loss is 1.6275076866149902\n", - "epoch: 4 step: 27, loss is 1.5699542760849\n", - "epoch: 4 step: 28, loss is 1.6664592027664185\n", - "epoch: 4 step: 29, loss is 1.6113371849060059\n", - "epoch: 4 step: 30, loss is 1.5733962059020996\n", - "epoch: 4 step: 31, loss is 1.651510238647461\n", - "epoch: 4 step: 32, loss is 1.5506412982940674\n", - "epoch: 4 step: 33, loss is 1.677369236946106\n", - "epoch: 4 step: 34, loss is 1.6951649188995361\n", - "epoch: 4 step: 35, loss is 1.7658153772354126\n", - "epoch: 4 step: 36, loss is 1.6035091876983643\n", - "epoch: 4 step: 37, loss is 1.6281245946884155\n", - "epoch: 4 step: 38, loss is 1.6767975091934204\n", - "epoch: 4 step: 39, loss is 1.7299246788024902\n", - "epoch: 4 step: 40, loss is 1.6403136253356934\n", - "epoch: 4 step: 41, loss is 1.6801038980484009\n", - "epoch: 4 step: 42, loss is 1.6813998222351074\n", - "epoch: 4 step: 43, loss is 1.6128292083740234\n", - "epoch: 4 step: 44, loss is 1.5497729778289795\n", - "epoch: 4 step: 45, loss is 1.6363134384155273\n", - "epoch: 4 step: 46, loss is 1.658717155456543\n", - "epoch: 4 step: 47, loss is 1.5864410400390625\n", - "epoch: 4 step: 48, loss is 1.724147081375122\n", - "epoch: 4 step: 49, loss is 1.6560754776000977\n", - "epoch: 4 step: 50, loss is 1.6705589294433594\n", - "epoch: 4 step: 51, loss is 1.6401616334915161\n", - "epoch: 4 step: 52, loss is 1.6280434131622314\n", - "epoch: 4 step: 53, loss is 1.6531908512115479\n", - "epoch: 4 step: 54, loss is 1.6056238412857056\n", - "epoch: 4 step: 55, loss is 1.591927409172058\n", - "epoch: 4 step: 56, loss is 1.7126481533050537\n", - "epoch: 4 step: 57, loss is 1.617047667503357\n", - "epoch: 4 step: 58, loss is 1.6465672254562378\n", - "epoch: 4 step: 59, loss is 1.6363983154296875\n", - "epoch: 4 step: 60, loss is 1.600630283355713\n", - "epoch: 4 step: 61, loss is 1.566674828529358\n", - "epoch: 4 step: 62, loss is 1.6564844846725464\n", - "epoch: 4 step: 63, loss is 1.6219236850738525\n", - "epoch: 4 step: 64, loss is 1.6109635829925537\n", - "epoch: 4 step: 65, loss is 1.6193220615386963\n", - "epoch: 4 step: 66, loss is 1.6031917333602905\n", - "epoch: 4 step: 67, loss is 1.6834722757339478\n", - "epoch: 4 step: 68, loss is 1.6825077533721924\n", - "epoch: 4 step: 69, loss is 1.6245614290237427\n", - "epoch: 4 step: 70, loss is 1.688910961151123\n", - "epoch: 4 step: 71, loss is 1.6050342321395874\n", - "epoch: 4 step: 72, loss is 1.6169708967208862\n", - "epoch: 4 step: 73, loss is 1.6678271293640137\n", - "epoch: 4 step: 74, loss is 1.6826083660125732\n", - "epoch: 4 step: 75, loss is 1.6716305017471313\n", - "epoch: 4 step: 76, loss is 1.6281429529190063\n", - "epoch: 4 step: 77, loss is 1.7824180126190186\n", - "epoch: 4 step: 78, loss is 1.669790506362915\n", - "epoch: 4 step: 79, loss is 1.6335220336914062\n", - "epoch: 4 step: 80, loss is 1.695752739906311\n", - "epoch: 4 step: 81, loss is 1.6094547510147095\n", - "epoch: 4 step: 82, loss is 1.635634183883667\n", - "epoch: 4 step: 83, loss is 1.6075245141983032\n", - "epoch: 4 step: 84, loss is 1.6564500331878662\n", - "epoch: 4 step: 85, loss is 1.6660058498382568\n", - "epoch: 4 step: 86, loss is 1.6991667747497559\n", - "epoch: 4 step: 87, loss is 1.6710928678512573\n", - "epoch: 4 step: 88, loss is 1.6151670217514038\n", - "epoch: 4 step: 89, loss is 1.5943197011947632\n", - "epoch: 4 step: 90, loss is 1.6190614700317383\n", - "epoch: 4 step: 91, loss is 1.659781575202942\n", - "epoch: 4 step: 92, loss is 1.6849955320358276\n", - "epoch: 4 step: 93, loss is 1.7099926471710205\n", - "epoch: 4 step: 94, loss is 1.672356367111206\n", - "epoch: 4 step: 95, loss is 1.635805368423462\n", - "epoch: 4 step: 96, loss is 1.6677284240722656\n", - "epoch: 4 step: 97, loss is 1.599265217781067\n", - "epoch: 4 step: 98, loss is 1.5523755550384521\n", - "epoch: 4 step: 99, loss is 1.621001958847046\n", - "epoch: 4 step: 100, loss is 1.711308479309082\n", - "epoch: 4 step: 101, loss is 1.7047224044799805\n", - "epoch: 4 step: 102, loss is 1.5642669200897217\n", - "epoch: 4 step: 103, loss is 1.602745771408081\n", - "epoch: 4 step: 104, loss is 1.6317754983901978\n", - "epoch: 4 step: 105, loss is 1.630226731300354\n", - "epoch: 4 step: 106, loss is 1.5829434394836426\n", - "epoch: 4 step: 107, loss is 1.6169558763504028\n", - "epoch: 4 step: 108, loss is 1.5777840614318848\n", - "epoch: 4 step: 109, loss is 1.6572725772857666\n", - "epoch: 4 step: 110, loss is 1.6351275444030762\n", - "epoch: 4 step: 111, loss is 1.6470080614089966\n", - "epoch: 4 step: 112, loss is 1.595346450805664\n", - "epoch: 4 step: 113, loss is 1.6564357280731201\n", - "epoch: 4 step: 114, loss is 1.6840795278549194\n", - "epoch: 4 step: 115, loss is 1.6232919692993164\n", - "epoch: 4 step: 116, loss is 1.6740002632141113\n", - "epoch: 4 step: 117, loss is 1.5374162197113037\n", - "epoch: 4 step: 118, loss is 1.6951665878295898\n", - "epoch: 4 step: 119, loss is 1.6204252243041992\n", - "epoch: 4 step: 120, loss is 1.6221181154251099\n", - "epoch: 4 step: 121, loss is 1.5537707805633545\n", - "epoch: 4 step: 122, loss is 1.6277174949645996\n", - "epoch: 4 step: 123, loss is 1.6667327880859375\n", - "epoch: 4 step: 124, loss is 1.4986757040023804\n", - "epoch: 4 step: 125, loss is 1.6182351112365723\n", - "epoch: 4 step: 126, loss is 1.6394846439361572\n", - "epoch: 4 step: 127, loss is 1.6260210275650024\n", - "epoch: 4 step: 128, loss is 1.545825481414795\n", - "epoch: 4 step: 129, loss is 1.6431002616882324\n", - "epoch: 4 step: 130, loss is 1.6482861042022705\n", - "epoch: 4 step: 131, loss is 1.659328818321228\n", - "epoch: 4 step: 132, loss is 1.5606145858764648\n", - "epoch: 4 step: 133, loss is 1.635382890701294\n", - "epoch: 4 step: 134, loss is 1.6413743495941162\n", - "epoch: 4 step: 135, loss is 1.5529956817626953\n", - "epoch: 4 step: 136, loss is 1.5545841455459595\n", - "epoch: 4 step: 137, loss is 1.6351344585418701\n", - "epoch: 4 step: 138, loss is 1.6698541641235352\n", - "epoch: 4 step: 139, loss is 1.6729806661605835\n", - "epoch: 4 step: 140, loss is 1.6431035995483398\n", - "epoch: 4 step: 141, loss is 1.6157598495483398\n", - "epoch: 4 step: 142, loss is 1.5533349514007568\n", - "epoch: 4 step: 143, loss is 1.605139136314392\n", - "epoch: 4 step: 144, loss is 1.5951838493347168\n", - "epoch: 4 step: 145, loss is 1.6883686780929565\n", - "epoch: 4 step: 146, loss is 1.6930533647537231\n", - "epoch: 4 step: 147, loss is 1.616921067237854\n", - "epoch: 4 step: 148, loss is 1.589206337928772\n", - "epoch: 4 step: 149, loss is 1.6340148448944092\n", - "epoch: 4 step: 150, loss is 1.5479761362075806\n", - "epoch: 4 step: 151, loss is 1.6992318630218506\n", - "epoch: 4 step: 152, loss is 1.5573326349258423\n", - "epoch: 4 step: 153, loss is 1.5911777019500732\n", - "epoch: 4 step: 154, loss is 1.665592908859253\n", - "epoch: 4 step: 155, loss is 1.6043286323547363\n", - "epoch: 4 step: 156, loss is 1.6947126388549805\n", - "epoch: 4 step: 157, loss is 1.5575032234191895\n", - "epoch: 4 step: 158, loss is 1.6363940238952637\n", - "epoch: 4 step: 159, loss is 1.6107062101364136\n", - "epoch: 4 step: 160, loss is 1.563986897468567\n", - "epoch: 4 step: 161, loss is 1.5581750869750977\n", - "epoch: 4 step: 162, loss is 1.6054623126983643\n", - "epoch: 4 step: 163, loss is 1.6519685983657837\n", - "epoch: 4 step: 164, loss is 1.5929789543151855\n", - "epoch: 4 step: 165, loss is 1.5881822109222412\n", - "epoch: 4 step: 166, loss is 1.624574065208435\n", - "epoch: 4 step: 167, loss is 1.5660936832427979\n", - "epoch: 4 step: 168, loss is 1.7413311004638672\n", - "epoch: 4 step: 169, loss is 1.617895483970642\n", - "epoch: 4 step: 170, loss is 1.5311201810836792\n", - "epoch: 4 step: 171, loss is 1.5229462385177612\n", - "epoch: 4 step: 172, loss is 1.629323124885559\n", - "epoch: 4 step: 173, loss is 1.6958014965057373\n", - "epoch: 4 step: 174, loss is 1.6713364124298096\n", - "epoch: 4 step: 175, loss is 1.6335649490356445\n", - "epoch: 4 step: 176, loss is 1.6315730810165405\n", - "epoch: 4 step: 177, loss is 1.650512456893921\n", - "epoch: 4 step: 178, loss is 1.6020163297653198\n", - "epoch: 4 step: 179, loss is 1.5183154344558716\n", - "epoch: 4 step: 180, loss is 1.6194127798080444\n", - "epoch: 4 step: 181, loss is 1.5776127576828003\n", - "epoch: 4 step: 182, loss is 1.5435810089111328\n", - "epoch: 4 step: 183, loss is 1.5908143520355225\n", - "epoch: 4 step: 184, loss is 1.625827670097351\n", - "epoch: 4 step: 185, loss is 1.4971815347671509\n", - "epoch: 4 step: 186, loss is 1.5430313348770142\n", - "epoch: 4 step: 187, loss is 1.6019543409347534\n", - "epoch: 4 step: 188, loss is 1.6593749523162842\n", - "epoch: 4 step: 189, loss is 1.5537240505218506\n", - "epoch: 4 step: 190, loss is 1.5439367294311523\n", - "epoch: 4 step: 191, loss is 1.5692863464355469\n", - "epoch: 4 step: 192, loss is 1.6208775043487549\n", - "epoch: 4 step: 193, loss is 1.6452176570892334\n", - "epoch: 4 step: 194, loss is 1.5839658975601196\n", - "epoch: 4 step: 195, loss is 1.6222418546676636\n", - "Train epoch time: 105716.095 ms, per step time: 542.134 ms\n", - "epoch: 5 step: 1, loss is 1.6291134357452393\n", - "epoch: 5 step: 2, loss is 1.630381464958191\n", - "epoch: 5 step: 3, loss is 1.6272845268249512\n", - "epoch: 5 step: 4, loss is 1.6062297821044922\n", - "epoch: 5 step: 5, loss is 1.5725517272949219\n", - "epoch: 5 step: 6, loss is 1.6263139247894287\n", - "epoch: 5 step: 7, loss is 1.5958149433135986\n", - "epoch: 5 step: 8, loss is 1.5896222591400146\n", - "epoch: 5 step: 9, loss is 1.5668258666992188\n", - "epoch: 5 step: 10, loss is 1.5255160331726074\n", - "epoch: 5 step: 11, loss is 1.6277358531951904\n", - "epoch: 5 step: 12, loss is 1.5296845436096191\n", - "epoch: 5 step: 13, loss is 1.4964330196380615\n", - "epoch: 5 step: 14, loss is 1.562056541442871\n", - "epoch: 5 step: 15, loss is 1.5771225690841675\n", - "epoch: 5 step: 16, loss is 1.5375909805297852\n", - "epoch: 5 step: 17, loss is 1.5960278511047363\n", - "epoch: 5 step: 18, loss is 1.6009914875030518\n", - "epoch: 5 step: 19, loss is 1.5288138389587402\n", - "epoch: 5 step: 20, loss is 1.613315463066101\n", - "epoch: 5 step: 21, loss is 1.6063491106033325\n", - "epoch: 5 step: 22, loss is 1.6259429454803467\n", - "epoch: 5 step: 23, loss is 1.6105166673660278\n", - "epoch: 5 step: 24, loss is 1.5539393424987793\n", - "epoch: 5 step: 25, loss is 1.564895749092102\n", - "epoch: 5 step: 26, loss is 1.6098949909210205\n", - "epoch: 5 step: 27, loss is 1.6622965335845947\n", - "epoch: 5 step: 28, loss is 1.702588438987732\n", - "epoch: 5 step: 29, loss is 1.5608000755310059\n", - "epoch: 5 step: 30, loss is 1.5815403461456299\n", - "epoch: 5 step: 31, loss is 1.6231831312179565\n", - "epoch: 5 step: 32, loss is 1.529085636138916\n", - "epoch: 5 step: 33, loss is 1.601198434829712\n", - "epoch: 5 step: 34, loss is 1.6691968441009521\n", - "epoch: 5 step: 35, loss is 1.6418415307998657\n", - "epoch: 5 step: 36, loss is 1.5125951766967773\n", - "epoch: 5 step: 37, loss is 1.573991060256958\n", - "epoch: 5 step: 38, loss is 1.5374033451080322\n", - "epoch: 5 step: 39, loss is 1.6113100051879883\n", - "epoch: 5 step: 40, loss is 1.6513816118240356\n", - "epoch: 5 step: 41, loss is 1.7058184146881104\n", - "epoch: 5 step: 42, loss is 1.5606614351272583\n", - "epoch: 5 step: 43, loss is 1.5729628801345825\n", - "epoch: 5 step: 44, loss is 1.5059725046157837\n", - "epoch: 5 step: 45, loss is 1.6124483346939087\n", - "epoch: 5 step: 46, loss is 1.6108061075210571\n", - "epoch: 5 step: 47, loss is 1.5724024772644043\n", - "epoch: 5 step: 48, loss is 1.5875262022018433\n", - "epoch: 5 step: 49, loss is 1.554578423500061\n", - "epoch: 5 step: 50, loss is 1.551038146018982\n", - "epoch: 5 step: 51, loss is 1.6033042669296265\n", - "epoch: 5 step: 52, loss is 1.5672204494476318\n", - "epoch: 5 step: 53, loss is 1.6042449474334717\n", - "epoch: 5 step: 54, loss is 1.8136216402053833\n", - "epoch: 5 step: 55, loss is 1.6811379194259644\n", - "epoch: 5 step: 56, loss is 1.5468108654022217\n", - "epoch: 5 step: 57, loss is 1.519727349281311\n", - "epoch: 5 step: 58, loss is 1.5434579849243164\n", - "epoch: 5 step: 59, loss is 1.5377097129821777\n", - "epoch: 5 step: 60, loss is 1.5702345371246338\n", - "epoch: 5 step: 61, loss is 1.6920826435089111\n", - "epoch: 5 step: 62, loss is 1.6663700342178345\n", - "epoch: 5 step: 63, loss is 1.533160924911499\n", - "epoch: 5 step: 64, loss is 1.6377326250076294\n", - "epoch: 5 step: 65, loss is 1.5873849391937256\n", - "epoch: 5 step: 66, loss is 1.5319710969924927\n", - "epoch: 5 step: 67, loss is 1.588597297668457\n", - "epoch: 5 step: 68, loss is 1.5093746185302734\n", - "epoch: 5 step: 69, loss is 1.6486823558807373\n", - "epoch: 5 step: 70, loss is 1.6806023120880127\n", - "epoch: 5 step: 71, loss is 1.6301050186157227\n", - "epoch: 5 step: 72, loss is 1.509552001953125\n", - "epoch: 5 step: 73, loss is 1.4995574951171875\n", - "epoch: 5 step: 74, loss is 1.568403959274292\n", - "epoch: 5 step: 75, loss is 1.5902581214904785\n", - "epoch: 5 step: 76, loss is 1.6017258167266846\n", - "epoch: 5 step: 77, loss is 1.5831518173217773\n", - "epoch: 5 step: 78, loss is 1.6122655868530273\n", - "epoch: 5 step: 79, loss is 1.51787269115448\n", - "epoch: 5 step: 80, loss is 1.5347437858581543\n", - "epoch: 5 step: 81, loss is 1.5797195434570312\n", - "epoch: 5 step: 82, loss is 1.6857746839523315\n", - "epoch: 5 step: 83, loss is 1.631077766418457\n", - "epoch: 5 step: 84, loss is 1.615084171295166\n", - "epoch: 5 step: 85, loss is 1.5356749296188354\n", - "epoch: 5 step: 86, loss is 1.5127631425857544\n", - "epoch: 5 step: 87, loss is 1.6314640045166016\n", - "epoch: 5 step: 88, loss is 1.630873203277588\n", - "epoch: 5 step: 89, loss is 1.5950806140899658\n", - "epoch: 5 step: 90, loss is 1.5196205377578735\n", - "epoch: 5 step: 91, loss is 1.614460825920105\n", - "epoch: 5 step: 92, loss is 1.5702085494995117\n", - "epoch: 5 step: 93, loss is 1.5679209232330322\n", - "epoch: 5 step: 94, loss is 1.590861201286316\n", - "epoch: 5 step: 95, loss is 1.6107532978057861\n", - "epoch: 5 step: 96, loss is 1.5590496063232422\n", - "epoch: 5 step: 97, loss is 1.660438895225525\n", - "epoch: 5 step: 98, loss is 1.5570570230484009\n", - "epoch: 5 step: 99, loss is 1.5751655101776123\n", - "epoch: 5 step: 100, loss is 1.4615893363952637\n", - "epoch: 5 step: 101, loss is 1.630858302116394\n", - "epoch: 5 step: 102, loss is 1.6327793598175049\n", - "epoch: 5 step: 103, loss is 1.4481351375579834\n", - "epoch: 5 step: 104, loss is 1.510433554649353\n", - "epoch: 5 step: 105, loss is 1.548460602760315\n", - "epoch: 5 step: 106, loss is 1.5288385152816772\n", - "epoch: 5 step: 107, loss is 1.5139927864074707\n", - "epoch: 5 step: 108, loss is 1.5642671585083008\n", - "epoch: 5 step: 109, loss is 1.590576171875\n", - "epoch: 5 step: 110, loss is 1.5412566661834717\n", - "epoch: 5 step: 111, loss is 1.5235416889190674\n", - "epoch: 5 step: 112, loss is 1.560255527496338\n", - "epoch: 5 step: 113, loss is 1.5404945611953735\n", - "epoch: 5 step: 114, loss is 1.5259674787521362\n", - "epoch: 5 step: 115, loss is 1.6100269556045532\n", - "epoch: 5 step: 116, loss is 1.5952180624008179\n", - "epoch: 5 step: 117, loss is 1.5212076902389526\n", - "epoch: 5 step: 118, loss is 1.544267177581787\n", - "epoch: 5 step: 119, loss is 1.515645980834961\n", - "epoch: 5 step: 120, loss is 1.5478583574295044\n", - "epoch: 5 step: 121, loss is 1.5606967210769653\n", - "epoch: 5 step: 122, loss is 1.5228321552276611\n", - "epoch: 5 step: 123, loss is 1.5908513069152832\n", - "epoch: 5 step: 124, loss is 1.509644865989685\n", - "epoch: 5 step: 125, loss is 1.482250452041626\n", - "epoch: 5 step: 126, loss is 1.6022708415985107\n", - "epoch: 5 step: 127, loss is 1.622236728668213\n", - "epoch: 5 step: 128, loss is 1.6023094654083252\n", - "epoch: 5 step: 129, loss is 1.5301949977874756\n", - "epoch: 5 step: 130, loss is 1.5557873249053955\n", - "epoch: 5 step: 131, loss is 1.5438777208328247\n", - "epoch: 5 step: 132, loss is 1.580703616142273\n", - "epoch: 5 step: 133, loss is 1.626776099205017\n", - "epoch: 5 step: 134, loss is 1.5010910034179688\n", - "epoch: 5 step: 135, loss is 1.659401535987854\n", - "epoch: 5 step: 136, loss is 1.5865554809570312\n", - "epoch: 5 step: 137, loss is 1.5445318222045898\n", - "epoch: 5 step: 138, loss is 1.5331158638000488\n", - "epoch: 5 step: 139, loss is 1.4952641725540161\n", - "epoch: 5 step: 140, loss is 1.581543207168579\n", - "epoch: 5 step: 141, loss is 1.5467302799224854\n", - "epoch: 5 step: 142, loss is 1.5560073852539062\n", - "epoch: 5 step: 143, loss is 1.5457018613815308\n", - "epoch: 5 step: 144, loss is 1.6202428340911865\n", - "epoch: 5 step: 145, loss is 1.5543478727340698\n", - "epoch: 5 step: 146, loss is 1.6049385070800781\n", - "epoch: 5 step: 147, loss is 1.525991678237915\n", - "epoch: 5 step: 148, loss is 1.5845924615859985\n", - "epoch: 5 step: 149, loss is 1.5389384031295776\n", - "epoch: 5 step: 150, loss is 1.4576280117034912\n", - "epoch: 5 step: 151, loss is 1.5723379850387573\n", - "epoch: 5 step: 152, loss is 1.5660076141357422\n", - "epoch: 5 step: 153, loss is 1.6097548007965088\n", - "epoch: 5 step: 154, loss is 1.5270884037017822\n", - "epoch: 5 step: 155, loss is 1.4979337453842163\n", - "epoch: 5 step: 156, loss is 1.6203107833862305\n", - "epoch: 5 step: 157, loss is 1.6619656085968018\n", - "epoch: 5 step: 158, loss is 1.4955337047576904\n", - "epoch: 5 step: 159, loss is 1.5828279256820679\n", - "epoch: 5 step: 160, loss is 1.5385479927062988\n", - "epoch: 5 step: 161, loss is 1.5685821771621704\n", - "epoch: 5 step: 162, loss is 1.6465656757354736\n", - "epoch: 5 step: 163, loss is 1.5739396810531616\n", - "epoch: 5 step: 164, loss is 1.4910187721252441\n", - "epoch: 5 step: 165, loss is 1.4646795988082886\n", - "epoch: 5 step: 166, loss is 1.5939422845840454\n", - "epoch: 5 step: 167, loss is 1.654055118560791\n", - "epoch: 5 step: 168, loss is 1.5559473037719727\n", - "epoch: 5 step: 169, loss is 1.6222816705703735\n", - "epoch: 5 step: 170, loss is 1.539888620376587\n", - "epoch: 5 step: 171, loss is 1.543352484703064\n", - "epoch: 5 step: 172, loss is 1.651602864265442\n", - "epoch: 5 step: 173, loss is 1.531855583190918\n", - "epoch: 5 step: 174, loss is 1.5096299648284912\n", - "epoch: 5 step: 175, loss is 1.5049216747283936\n", - "epoch: 5 step: 176, loss is 1.5709919929504395\n", - "epoch: 5 step: 177, loss is 1.4274080991744995\n", - "epoch: 5 step: 178, loss is 1.6023424863815308\n", - "epoch: 5 step: 179, loss is 1.5617828369140625\n", - "epoch: 5 step: 180, loss is 1.600217342376709\n", - "epoch: 5 step: 181, loss is 1.5970659255981445\n", - "epoch: 5 step: 182, loss is 1.5027505159378052\n", - "epoch: 5 step: 183, loss is 1.4966425895690918\n", - "epoch: 5 step: 184, loss is 1.5915578603744507\n", - "epoch: 5 step: 185, loss is 1.5784345865249634\n", - "epoch: 5 step: 186, loss is 1.5959726572036743\n", - "epoch: 5 step: 187, loss is 1.5866261720657349\n", - "epoch: 5 step: 188, loss is 1.576472282409668\n", - "epoch: 5 step: 189, loss is 1.5415232181549072\n", - "epoch: 5 step: 190, loss is 1.5600171089172363\n", - "epoch: 5 step: 191, loss is 1.50706946849823\n", - "epoch: 5 step: 192, loss is 1.540470004081726\n", - "epoch: 5 step: 193, loss is 1.5361647605895996\n", - "epoch: 5 step: 194, loss is 1.6157336235046387\n", - "epoch: 5 step: 195, loss is 1.5066640377044678\n", - "Train epoch time: 108087.211 ms, per step time: 554.293 ms\n", - "epoch: 6 step: 1, loss is 1.5168009996414185\n", - "epoch: 6 step: 2, loss is 1.5963847637176514\n", - "epoch: 6 step: 3, loss is 1.5281972885131836\n", - "epoch: 6 step: 4, loss is 1.6191624402999878\n", - "epoch: 6 step: 5, loss is 1.5344558954238892\n", - "epoch: 6 step: 6, loss is 1.5129823684692383\n", - "epoch: 6 step: 7, loss is 1.5464227199554443\n", - "epoch: 6 step: 8, loss is 1.6063425540924072\n", - "epoch: 6 step: 9, loss is 1.5158634185791016\n", - "epoch: 6 step: 10, loss is 1.5139622688293457\n", - "epoch: 6 step: 11, loss is 1.6050300598144531\n", - "epoch: 6 step: 12, loss is 1.5366644859313965\n", - "epoch: 6 step: 13, loss is 1.5967121124267578\n", - "epoch: 6 step: 14, loss is 1.5981378555297852\n", - "epoch: 6 step: 15, loss is 1.540015459060669\n", - "epoch: 6 step: 16, loss is 1.492516040802002\n", - "epoch: 6 step: 17, loss is 1.5513213872909546\n", - "epoch: 6 step: 18, loss is 1.5504059791564941\n", - "epoch: 6 step: 19, loss is 1.586832880973816\n", - "epoch: 6 step: 20, loss is 1.482323169708252\n", - "epoch: 6 step: 21, loss is 1.4803252220153809\n", - "epoch: 6 step: 22, loss is 1.5392757654190063\n", - "epoch: 6 step: 23, loss is 1.5714585781097412\n", - "epoch: 6 step: 24, loss is 1.4917312860488892\n", - "epoch: 6 step: 25, loss is 1.499375581741333\n", - "epoch: 6 step: 26, loss is 1.5034757852554321\n", - "epoch: 6 step: 27, loss is 1.5169413089752197\n", - "epoch: 6 step: 28, loss is 1.5198606252670288\n", - "epoch: 6 step: 29, loss is 1.5310466289520264\n", - "epoch: 6 step: 30, loss is 1.5656919479370117\n", - "epoch: 6 step: 31, loss is 1.4819889068603516\n", - "epoch: 6 step: 32, loss is 1.5463405847549438\n", - "epoch: 6 step: 33, loss is 1.5360537767410278\n", - "epoch: 6 step: 34, loss is 1.4743027687072754\n", - "epoch: 6 step: 35, loss is 1.4548977613449097\n", - "epoch: 6 step: 36, loss is 1.5358030796051025\n", - "epoch: 6 step: 37, loss is 1.4708020687103271\n", - "epoch: 6 step: 38, loss is 1.579813838005066\n", - "epoch: 6 step: 39, loss is 1.5433744192123413\n", - "epoch: 6 step: 40, loss is 1.4907677173614502\n", - "epoch: 6 step: 41, loss is 1.5300912857055664\n", - "epoch: 6 step: 42, loss is 1.5930089950561523\n", - "epoch: 6 step: 43, loss is 1.528731346130371\n", - "epoch: 6 step: 44, loss is 1.6503602266311646\n", - "epoch: 6 step: 45, loss is 1.5481113195419312\n", - "epoch: 6 step: 46, loss is 1.5438824892044067\n", - "epoch: 6 step: 47, loss is 1.4646594524383545\n", - "epoch: 6 step: 48, loss is 1.5950512886047363\n", - "epoch: 6 step: 49, loss is 1.5725396871566772\n", - "epoch: 6 step: 50, loss is 1.6261200904846191\n", - "epoch: 6 step: 51, loss is 1.5467002391815186\n", - "epoch: 6 step: 52, loss is 1.6101902723312378\n", - "epoch: 6 step: 53, loss is 1.5220361948013306\n", - "epoch: 6 step: 54, loss is 1.4702224731445312\n", - "epoch: 6 step: 55, loss is 1.5329726934432983\n", - "epoch: 6 step: 56, loss is 1.640350341796875\n", - "epoch: 6 step: 57, loss is 1.4936192035675049\n", - "epoch: 6 step: 58, loss is 1.5358295440673828\n", - "epoch: 6 step: 59, loss is 1.5270037651062012\n", - "epoch: 6 step: 60, loss is 1.5093586444854736\n", - "epoch: 6 step: 61, loss is 1.5998053550720215\n", - "epoch: 6 step: 62, loss is 1.5315927267074585\n", - "epoch: 6 step: 63, loss is 1.5140918493270874\n", - "epoch: 6 step: 64, loss is 1.4735920429229736\n", - "epoch: 6 step: 65, loss is 1.5319581031799316\n", - "epoch: 6 step: 66, loss is 1.6074835062026978\n", - "epoch: 6 step: 67, loss is 1.4713388681411743\n", - "epoch: 6 step: 68, loss is 1.5030381679534912\n", - "epoch: 6 step: 69, loss is 1.5320777893066406\n", - "epoch: 6 step: 70, loss is 1.5278956890106201\n", - "epoch: 6 step: 71, loss is 1.5255309343338013\n", - "epoch: 6 step: 72, loss is 1.5454436540603638\n", - "epoch: 6 step: 73, loss is 1.5482513904571533\n", - "epoch: 6 step: 74, loss is 1.4984415769577026\n", - "epoch: 6 step: 75, loss is 1.4952940940856934\n", - "epoch: 6 step: 76, loss is 1.5711333751678467\n", - "epoch: 6 step: 77, loss is 1.4789788722991943\n", - "epoch: 6 step: 78, loss is 1.561328411102295\n", - "epoch: 6 step: 79, loss is 1.5443943738937378\n", - "epoch: 6 step: 80, loss is 1.5559606552124023\n", - "epoch: 6 step: 81, loss is 1.5598878860473633\n", - "epoch: 6 step: 82, loss is 1.5287787914276123\n", - "epoch: 6 step: 83, loss is 1.5436670780181885\n", - "epoch: 6 step: 84, loss is 1.5120441913604736\n", - "epoch: 6 step: 85, loss is 1.592405080795288\n", - "epoch: 6 step: 86, loss is 1.5054988861083984\n", - "epoch: 6 step: 87, loss is 1.4739066362380981\n", - "epoch: 6 step: 88, loss is 1.576535701751709\n", - "epoch: 6 step: 89, loss is 1.5421173572540283\n", - "epoch: 6 step: 90, loss is 1.5323903560638428\n", - "epoch: 6 step: 91, loss is 1.549246072769165\n", - "epoch: 6 step: 92, loss is 1.5238741636276245\n", - "epoch: 6 step: 93, loss is 1.4108941555023193\n", - "epoch: 6 step: 94, loss is 1.5807687044143677\n", - "epoch: 6 step: 95, loss is 1.5101879835128784\n", - "epoch: 6 step: 96, loss is 1.5076720714569092\n", - "epoch: 6 step: 97, loss is 1.539828896522522\n", - "epoch: 6 step: 98, loss is 1.5246250629425049\n", - "epoch: 6 step: 99, loss is 1.5137839317321777\n", - "epoch: 6 step: 100, loss is 1.5154650211334229\n", - "epoch: 6 step: 101, loss is 1.4363713264465332\n", - "epoch: 6 step: 102, loss is 1.5324684381484985\n", - "epoch: 6 step: 103, loss is 1.4945255517959595\n", - "epoch: 6 step: 104, loss is 1.5052132606506348\n", - "epoch: 6 step: 105, loss is 1.4958027601242065\n", - "epoch: 6 step: 106, loss is 1.536914587020874\n", - "epoch: 6 step: 107, loss is 1.5131607055664062\n", - "epoch: 6 step: 108, loss is 1.5171908140182495\n", - "epoch: 6 step: 109, loss is 1.5488420724868774\n", - "epoch: 6 step: 110, loss is 1.5417617559432983\n", - "epoch: 6 step: 111, loss is 1.4130439758300781\n", - "epoch: 6 step: 112, loss is 1.457828402519226\n", - "epoch: 6 step: 113, loss is 1.4960986375808716\n", - "epoch: 6 step: 114, loss is 1.5217095613479614\n", - "epoch: 6 step: 115, loss is 1.4705275297164917\n", - "epoch: 6 step: 116, loss is 1.5543395280838013\n", - "epoch: 6 step: 117, loss is 1.4628500938415527\n", - "epoch: 6 step: 118, loss is 1.618275761604309\n", - "epoch: 6 step: 119, loss is 1.485256552696228\n", - "epoch: 6 step: 120, loss is 1.494795560836792\n", - "epoch: 6 step: 121, loss is 1.4875752925872803\n", - "epoch: 6 step: 122, loss is 1.6517702341079712\n", - "epoch: 6 step: 123, loss is 1.5723800659179688\n", - "epoch: 6 step: 124, loss is 1.5000405311584473\n", - "epoch: 6 step: 125, loss is 1.530178427696228\n", - "epoch: 6 step: 126, loss is 1.4836000204086304\n", - "epoch: 6 step: 127, loss is 1.5103240013122559\n", - "epoch: 6 step: 128, loss is 1.5475642681121826\n", - "epoch: 6 step: 129, loss is 1.6109751462936401\n", - "epoch: 6 step: 130, loss is 1.5858711004257202\n", - "epoch: 6 step: 131, loss is 1.5409538745880127\n", - "epoch: 6 step: 132, loss is 1.4811959266662598\n", - "epoch: 6 step: 133, loss is 1.421108365058899\n", - "epoch: 6 step: 134, loss is 1.4793981313705444\n", - "epoch: 6 step: 135, loss is 1.5129847526550293\n", - "epoch: 6 step: 136, loss is 1.4404023885726929\n", - "epoch: 6 step: 137, loss is 1.5967963933944702\n", - "epoch: 6 step: 138, loss is 1.4657764434814453\n", - "epoch: 6 step: 139, loss is 1.4295220375061035\n", - "epoch: 6 step: 140, loss is 1.5097485780715942\n", - "epoch: 6 step: 141, loss is 1.583897352218628\n", - "epoch: 6 step: 142, loss is 1.4590997695922852\n", - "epoch: 6 step: 143, loss is 1.4984616041183472\n", - "epoch: 6 step: 144, loss is 1.5084997415542603\n", - "epoch: 6 step: 145, loss is 1.4734172821044922\n", - "epoch: 6 step: 146, loss is 1.4855918884277344\n", - "epoch: 6 step: 147, loss is 1.4796168804168701\n", - "epoch: 6 step: 148, loss is 1.4822638034820557\n", - "epoch: 6 step: 149, loss is 1.4701366424560547\n", - "epoch: 6 step: 150, loss is 1.4216171503067017\n", - "epoch: 6 step: 151, loss is 1.595086693763733\n", - "epoch: 6 step: 152, loss is 1.5976423025131226\n", - "epoch: 6 step: 153, loss is 1.4181222915649414\n", - "epoch: 6 step: 154, loss is 1.5427049398422241\n", - "epoch: 6 step: 155, loss is 1.4240083694458008\n", - "epoch: 6 step: 156, loss is 1.501006841659546\n", - "epoch: 6 step: 157, loss is 1.4777637720108032\n", - "epoch: 6 step: 158, loss is 1.4978241920471191\n", - "epoch: 6 step: 159, loss is 1.6361052989959717\n", - "epoch: 6 step: 160, loss is 1.5181944370269775\n", - "epoch: 6 step: 161, loss is 1.5003464221954346\n", - "epoch: 6 step: 162, loss is 1.4519720077514648\n", - "epoch: 6 step: 163, loss is 1.5016770362854004\n", - "epoch: 6 step: 164, loss is 1.428086280822754\n", - "epoch: 6 step: 165, loss is 1.6404063701629639\n", - "epoch: 6 step: 166, loss is 1.467785358428955\n", - "epoch: 6 step: 167, loss is 1.508514165878296\n", - "epoch: 6 step: 168, loss is 1.4996943473815918\n", - "epoch: 6 step: 169, loss is 1.572036862373352\n", - "epoch: 6 step: 170, loss is 1.5294147729873657\n", - "epoch: 6 step: 171, loss is 1.5247507095336914\n", - "epoch: 6 step: 172, loss is 1.5536715984344482\n", - "epoch: 6 step: 173, loss is 1.5043799877166748\n", - "epoch: 6 step: 174, loss is 1.438157320022583\n", - "epoch: 6 step: 175, loss is 1.5259623527526855\n", - "epoch: 6 step: 176, loss is 1.5832343101501465\n", - "epoch: 6 step: 177, loss is 1.4981961250305176\n", - "epoch: 6 step: 178, loss is 1.4782218933105469\n", - "epoch: 6 step: 179, loss is 1.5882790088653564\n", - "epoch: 6 step: 180, loss is 1.5630435943603516\n", - "epoch: 6 step: 181, loss is 1.5255740880966187\n", - "epoch: 6 step: 182, loss is 1.4995763301849365\n", - "epoch: 6 step: 183, loss is 1.4683091640472412\n", - "epoch: 6 step: 184, loss is 1.4638352394104004\n", - "epoch: 6 step: 185, loss is 1.532323956489563\n", - "epoch: 6 step: 186, loss is 1.633467674255371\n", - "epoch: 6 step: 187, loss is 1.4564119577407837\n", - "epoch: 6 step: 188, loss is 1.5319931507110596\n", - "epoch: 6 step: 189, loss is 1.5097391605377197\n", - "epoch: 6 step: 190, loss is 1.5650966167449951\n", - "epoch: 6 step: 191, loss is 1.3985449075698853\n", - "epoch: 6 step: 192, loss is 1.502357006072998\n", - "epoch: 6 step: 193, loss is 1.5499777793884277\n", - "epoch: 6 step: 194, loss is 1.598647117614746\n", - "epoch: 6 step: 195, loss is 1.4927434921264648\n", - "Train epoch time: 100865.807 ms, per step time: 517.261 ms\n", - "epoch: 7 step: 1, loss is 1.5043296813964844\n", - "epoch: 7 step: 2, loss is 1.3788352012634277\n", - "epoch: 7 step: 3, loss is 1.424262523651123\n", - "epoch: 7 step: 4, loss is 1.4881532192230225\n", - "epoch: 7 step: 5, loss is 1.4929544925689697\n", - "epoch: 7 step: 6, loss is 1.5428223609924316\n", - "epoch: 7 step: 7, loss is 1.4219191074371338\n", - "epoch: 7 step: 8, loss is 1.543452501296997\n", - "epoch: 7 step: 9, loss is 1.4623894691467285\n", - "epoch: 7 step: 10, loss is 1.41693913936615\n", - "epoch: 7 step: 11, loss is 1.4937163591384888\n", - "epoch: 7 step: 12, loss is 1.586480975151062\n", - "epoch: 7 step: 13, loss is 1.4538681507110596\n", - "epoch: 7 step: 14, loss is 1.3656944036483765\n", - "epoch: 7 step: 15, loss is 1.4771169424057007\n", - "epoch: 7 step: 16, loss is 1.5156288146972656\n", - "epoch: 7 step: 17, loss is 1.5064911842346191\n", - "epoch: 7 step: 18, loss is 1.5924088954925537\n", - "epoch: 7 step: 19, loss is 1.4530917406082153\n", - "epoch: 7 step: 20, loss is 1.491387128829956\n", - "epoch: 7 step: 21, loss is 1.5176119804382324\n", - "epoch: 7 step: 22, loss is 1.4657269716262817\n", - "epoch: 7 step: 23, loss is 1.5061123371124268\n", - "epoch: 7 step: 24, loss is 1.4878525733947754\n", - "epoch: 7 step: 25, loss is 1.6137754917144775\n", - "epoch: 7 step: 26, loss is 1.4848593473434448\n", - "epoch: 7 step: 27, loss is 1.5650756359100342\n", - "epoch: 7 step: 28, loss is 1.6011370420455933\n", - "epoch: 7 step: 29, loss is 1.4152276515960693\n", - "epoch: 7 step: 30, loss is 1.5913746356964111\n", - "epoch: 7 step: 31, loss is 1.5784099102020264\n", - "epoch: 7 step: 32, loss is 1.4640570878982544\n", - "epoch: 7 step: 33, loss is 1.4940814971923828\n", - "epoch: 7 step: 34, loss is 1.5079275369644165\n", - "epoch: 7 step: 35, loss is 1.4612488746643066\n", - "epoch: 7 step: 36, loss is 1.533675193786621\n", - "epoch: 7 step: 37, loss is 1.45689058303833\n", - "epoch: 7 step: 38, loss is 1.5214346647262573\n", - "epoch: 7 step: 39, loss is 1.5260032415390015\n", - "epoch: 7 step: 40, loss is 1.5860832929611206\n", - "epoch: 7 step: 41, loss is 1.468464970588684\n", - "epoch: 7 step: 42, loss is 1.5327972173690796\n", - "epoch: 7 step: 43, loss is 1.4565191268920898\n", - "epoch: 7 step: 44, loss is 1.5572458505630493\n", - "epoch: 7 step: 45, loss is 1.5559360980987549\n", - "epoch: 7 step: 46, loss is 1.5142409801483154\n", - "epoch: 7 step: 47, loss is 1.523766279220581\n", - "epoch: 7 step: 48, loss is 1.5186293125152588\n", - "epoch: 7 step: 49, loss is 1.50870943069458\n", - "epoch: 7 step: 50, loss is 1.4578818082809448\n", - "epoch: 7 step: 51, loss is 1.5197813510894775\n", - "epoch: 7 step: 52, loss is 1.502407431602478\n", - "epoch: 7 step: 53, loss is 1.4740475416183472\n", - "epoch: 7 step: 54, loss is 1.540830135345459\n", - "epoch: 7 step: 55, loss is 1.3457492589950562\n", - "epoch: 7 step: 56, loss is 1.4958750009536743\n", - "epoch: 7 step: 57, loss is 1.5185296535491943\n", - "epoch: 7 step: 58, loss is 1.5216048955917358\n", - "epoch: 7 step: 59, loss is 1.5318355560302734\n", - "epoch: 7 step: 60, loss is 1.5011483430862427\n", - "epoch: 7 step: 61, loss is 1.515061378479004\n", - "epoch: 7 step: 62, loss is 1.4643523693084717\n", - "epoch: 7 step: 63, loss is 1.5048272609710693\n", - "epoch: 7 step: 64, loss is 1.493390440940857\n", - "epoch: 7 step: 65, loss is 1.4603462219238281\n", - "epoch: 7 step: 66, loss is 1.482275366783142\n", - "epoch: 7 step: 67, loss is 1.5105946063995361\n", - "epoch: 7 step: 68, loss is 1.4726853370666504\n", - "epoch: 7 step: 69, loss is 1.447388768196106\n", - "epoch: 7 step: 70, loss is 1.4647923707962036\n", - "epoch: 7 step: 71, loss is 1.5107338428497314\n", - "epoch: 7 step: 72, loss is 1.6002007722854614\n", - "epoch: 7 step: 73, loss is 1.4142224788665771\n", - "epoch: 7 step: 74, loss is 1.4973994493484497\n", - "epoch: 7 step: 75, loss is 1.5239574909210205\n", - "epoch: 7 step: 76, loss is 1.4929814338684082\n", - "epoch: 7 step: 77, loss is 1.4576858282089233\n", - "epoch: 7 step: 78, loss is 1.4740468263626099\n", - "epoch: 7 step: 79, loss is 1.4783765077590942\n", - "epoch: 7 step: 80, loss is 1.4699275493621826\n", - "epoch: 7 step: 81, loss is 1.446077823638916\n", - "epoch: 7 step: 82, loss is 1.4819600582122803\n", - "epoch: 7 step: 83, loss is 1.465059518814087\n", - "epoch: 7 step: 84, loss is 1.4979264736175537\n", - "epoch: 7 step: 85, loss is 1.4489562511444092\n", - "epoch: 7 step: 86, loss is 1.5509796142578125\n", - "epoch: 7 step: 87, loss is 1.4808306694030762\n", - "epoch: 7 step: 88, loss is 1.5191899538040161\n", - "epoch: 7 step: 89, loss is 1.4386074542999268\n", - "epoch: 7 step: 90, loss is 1.5016305446624756\n", - "epoch: 7 step: 91, loss is 1.4730781316757202\n", - "epoch: 7 step: 92, loss is 1.4346343278884888\n", - "epoch: 7 step: 93, loss is 1.4838359355926514\n", - "epoch: 7 step: 94, loss is 1.4321085214614868\n", - "epoch: 7 step: 95, loss is 1.4832935333251953\n", - "epoch: 7 step: 96, loss is 1.3848750591278076\n", - "epoch: 7 step: 97, loss is 1.458479642868042\n", - "epoch: 7 step: 98, loss is 1.404242753982544\n", - "epoch: 7 step: 99, loss is 1.4245388507843018\n", - "epoch: 7 step: 100, loss is 1.4893617630004883\n", - "epoch: 7 step: 101, loss is 1.4813257455825806\n", - "epoch: 7 step: 102, loss is 1.479996681213379\n", - "epoch: 7 step: 103, loss is 1.4750529527664185\n", - "epoch: 7 step: 104, loss is 1.4361767768859863\n", - "epoch: 7 step: 105, loss is 1.4210655689239502\n", - "epoch: 7 step: 106, loss is 1.4760034084320068\n", - "epoch: 7 step: 107, loss is 1.467147707939148\n", - "epoch: 7 step: 108, loss is 1.4473330974578857\n", - "epoch: 7 step: 109, loss is 1.4925254583358765\n", - "epoch: 7 step: 110, loss is 1.540977954864502\n", - "epoch: 7 step: 111, loss is 1.4327526092529297\n", - "epoch: 7 step: 112, loss is 1.487854242324829\n", - "epoch: 7 step: 113, loss is 1.372823715209961\n", - "epoch: 7 step: 114, loss is 1.4697837829589844\n", - "epoch: 7 step: 115, loss is 1.4933768510818481\n", - "epoch: 7 step: 116, loss is 1.3966046571731567\n", - "epoch: 7 step: 117, loss is 1.4160324335098267\n", - "epoch: 7 step: 118, loss is 1.4595578908920288\n", - "epoch: 7 step: 119, loss is 1.4743643999099731\n", - "epoch: 7 step: 120, loss is 1.557667851448059\n", - "epoch: 7 step: 121, loss is 1.4244754314422607\n", - "epoch: 7 step: 122, loss is 1.494997501373291\n", - "epoch: 7 step: 123, loss is 1.4864234924316406\n", - "epoch: 7 step: 124, loss is 1.4721158742904663\n", - "epoch: 7 step: 125, loss is 1.5518609285354614\n", - "epoch: 7 step: 126, loss is 1.4957640171051025\n", - "epoch: 7 step: 127, loss is 1.3156301975250244\n", - "epoch: 7 step: 128, loss is 1.398167371749878\n", - "epoch: 7 step: 129, loss is 1.4935675859451294\n", - "epoch: 7 step: 130, loss is 1.508893609046936\n", - "epoch: 7 step: 131, loss is 1.4556041955947876\n", - "epoch: 7 step: 132, loss is 1.5139472484588623\n", - "epoch: 7 step: 133, loss is 1.4905041456222534\n", - "epoch: 7 step: 134, loss is 1.4743626117706299\n", - "epoch: 7 step: 135, loss is 1.5177185535430908\n", - "epoch: 7 step: 136, loss is 1.5296862125396729\n", - "epoch: 7 step: 137, loss is 1.4775011539459229\n", - "epoch: 7 step: 138, loss is 1.549059510231018\n", - "epoch: 7 step: 139, loss is 1.4695754051208496\n", - "epoch: 7 step: 140, loss is 1.484771966934204\n", - "epoch: 7 step: 141, loss is 1.5034211874008179\n", - "epoch: 7 step: 142, loss is 1.4984829425811768\n", - "epoch: 7 step: 143, loss is 1.4896142482757568\n", - "epoch: 7 step: 144, loss is 1.4848573207855225\n", - "epoch: 7 step: 145, loss is 1.434865951538086\n", - "epoch: 7 step: 146, loss is 1.4549081325531006\n", - "epoch: 7 step: 147, loss is 1.4179425239562988\n", - "epoch: 7 step: 148, loss is 1.547905445098877\n", - "epoch: 7 step: 149, loss is 1.4736230373382568\n", - "epoch: 7 step: 150, loss is 1.5593163967132568\n", - "epoch: 7 step: 151, loss is 1.4970840215682983\n", - "epoch: 7 step: 152, loss is 1.5117411613464355\n", - "epoch: 7 step: 153, loss is 1.5070085525512695\n", - "epoch: 7 step: 154, loss is 1.4941980838775635\n", - "epoch: 7 step: 155, loss is 1.5356048345565796\n", - "epoch: 7 step: 156, loss is 1.4605953693389893\n", - "epoch: 7 step: 157, loss is 1.4507675170898438\n", - "epoch: 7 step: 158, loss is 1.454154133796692\n", - "epoch: 7 step: 159, loss is 1.508192539215088\n", - "epoch: 7 step: 160, loss is 1.454262614250183\n", - "epoch: 7 step: 161, loss is 1.5052950382232666\n", - "epoch: 7 step: 162, loss is 1.5292794704437256\n", - "epoch: 7 step: 163, loss is 1.4873976707458496\n", - "epoch: 7 step: 164, loss is 1.4131702184677124\n", - "epoch: 7 step: 165, loss is 1.4637771844863892\n", - "epoch: 7 step: 166, loss is 1.408691644668579\n", - "epoch: 7 step: 167, loss is 1.4266360998153687\n", - "epoch: 7 step: 168, loss is 1.5367345809936523\n", - "epoch: 7 step: 169, loss is 1.4338159561157227\n", - "epoch: 7 step: 170, loss is 1.483839511871338\n", - "epoch: 7 step: 171, loss is 1.4692538976669312\n", - "epoch: 7 step: 172, loss is 1.4180325269699097\n", - "epoch: 7 step: 173, loss is 1.4824738502502441\n", - "epoch: 7 step: 174, loss is 1.4606093168258667\n", - "epoch: 7 step: 175, loss is 1.5255937576293945\n", - "epoch: 7 step: 176, loss is 1.3911335468292236\n", - "epoch: 7 step: 177, loss is 1.4300789833068848\n", - "epoch: 7 step: 178, loss is 1.4878897666931152\n", - "epoch: 7 step: 179, loss is 1.4815349578857422\n", - "epoch: 7 step: 180, loss is 1.4449535608291626\n", - "epoch: 7 step: 181, loss is 1.4588613510131836\n", - "epoch: 7 step: 182, loss is 1.5129939317703247\n", - "epoch: 7 step: 183, loss is 1.4789873361587524\n", - "epoch: 7 step: 184, loss is 1.4415959119796753\n", - "epoch: 7 step: 185, loss is 1.445063829421997\n", - "epoch: 7 step: 186, loss is 1.4950401782989502\n", - "epoch: 7 step: 187, loss is 1.4018011093139648\n", - "epoch: 7 step: 188, loss is 1.4623416662216187\n", - "epoch: 7 step: 189, loss is 1.407805323600769\n", - "epoch: 7 step: 190, loss is 1.5904037952423096\n", - "epoch: 7 step: 191, loss is 1.520334005355835\n", - "epoch: 7 step: 192, loss is 1.5010075569152832\n", - "epoch: 7 step: 193, loss is 1.3924946784973145\n", - "epoch: 7 step: 194, loss is 1.4323792457580566\n", - "epoch: 7 step: 195, loss is 1.488898754119873\n", - "Train epoch time: 105356.687 ms, per step time: 540.291 ms\n", - "epoch: 8 step: 1, loss is 1.4853860139846802\n", - "epoch: 8 step: 2, loss is 1.4470711946487427\n", - "epoch: 8 step: 3, loss is 1.441148281097412\n", - "epoch: 8 step: 4, loss is 1.4429980516433716\n", - "epoch: 8 step: 5, loss is 1.4304293394088745\n", - "epoch: 8 step: 6, loss is 1.4625797271728516\n", - "epoch: 8 step: 7, loss is 1.483774185180664\n", - "epoch: 8 step: 8, loss is 1.4424699544906616\n", - "epoch: 8 step: 9, loss is 1.4378409385681152\n", - "epoch: 8 step: 10, loss is 1.4000816345214844\n", - "epoch: 8 step: 11, loss is 1.4254200458526611\n", - "epoch: 8 step: 12, loss is 1.4616323709487915\n", - "epoch: 8 step: 13, loss is 1.474768877029419\n", - "epoch: 8 step: 14, loss is 1.3732140064239502\n", - "epoch: 8 step: 15, loss is 1.5181782245635986\n", - "epoch: 8 step: 16, loss is 1.4393178224563599\n", - "epoch: 8 step: 17, loss is 1.382467269897461\n", - "epoch: 8 step: 18, loss is 1.3791776895523071\n", - "epoch: 8 step: 19, loss is 1.4564411640167236\n", - "epoch: 8 step: 20, loss is 1.4218473434448242\n", - "epoch: 8 step: 21, loss is 1.3829455375671387\n", - "epoch: 8 step: 22, loss is 1.3811804056167603\n", - "epoch: 8 step: 23, loss is 1.4496384859085083\n", - "epoch: 8 step: 24, loss is 1.4368200302124023\n", - "epoch: 8 step: 25, loss is 1.4838359355926514\n", - "epoch: 8 step: 26, loss is 1.4819374084472656\n", - "epoch: 8 step: 27, loss is 1.5077060461044312\n", - "epoch: 8 step: 28, loss is 1.42545747756958\n", - "epoch: 8 step: 29, loss is 1.4789379835128784\n", - "epoch: 8 step: 30, loss is 1.480667233467102\n", - "epoch: 8 step: 31, loss is 1.458118200302124\n", - "epoch: 8 step: 32, loss is 1.5266969203948975\n", - "epoch: 8 step: 33, loss is 1.3946259021759033\n", - "epoch: 8 step: 34, loss is 1.4376158714294434\n", - "epoch: 8 step: 35, loss is 1.5635749101638794\n", - "epoch: 8 step: 36, loss is 1.517399787902832\n", - "epoch: 8 step: 37, loss is 1.3861666917800903\n", - "epoch: 8 step: 38, loss is 1.4951542615890503\n", - "epoch: 8 step: 39, loss is 1.5694758892059326\n", - "epoch: 8 step: 40, loss is 1.5099910497665405\n", - "epoch: 8 step: 41, loss is 1.4824888706207275\n", - "epoch: 8 step: 42, loss is 1.4218077659606934\n", - "epoch: 8 step: 43, loss is 1.438224196434021\n", - "epoch: 8 step: 44, loss is 1.4067015647888184\n", - "epoch: 8 step: 45, loss is 1.5105013847351074\n", - "epoch: 8 step: 46, loss is 1.4284451007843018\n", - "epoch: 8 step: 47, loss is 1.5443179607391357\n", - "epoch: 8 step: 48, loss is 1.4850034713745117\n", - "epoch: 8 step: 49, loss is 1.3432358503341675\n", - "epoch: 8 step: 50, loss is 1.3830974102020264\n", - "epoch: 8 step: 51, loss is 1.440751075744629\n", - "epoch: 8 step: 52, loss is 1.43281888961792\n", - "epoch: 8 step: 53, loss is 1.4782578945159912\n", - "epoch: 8 step: 54, loss is 1.5047858953475952\n", - "epoch: 8 step: 55, loss is 1.4760655164718628\n", - "epoch: 8 step: 56, loss is 1.3672726154327393\n", - "epoch: 8 step: 57, loss is 1.4034481048583984\n", - "epoch: 8 step: 58, loss is 1.3738510608673096\n", - "epoch: 8 step: 59, loss is 1.4158509969711304\n", - "epoch: 8 step: 60, loss is 1.5088627338409424\n", - "epoch: 8 step: 61, loss is 1.4259867668151855\n", - "epoch: 8 step: 62, loss is 1.4301403760910034\n", - "epoch: 8 step: 63, loss is 1.5740902423858643\n", - "epoch: 8 step: 64, loss is 1.4569450616836548\n", - "epoch: 8 step: 65, loss is 1.4422351121902466\n", - "epoch: 8 step: 66, loss is 1.505530595779419\n", - "epoch: 8 step: 67, loss is 1.4461724758148193\n", - "epoch: 8 step: 68, loss is 1.454694390296936\n", - "epoch: 8 step: 69, loss is 1.4635491371154785\n", - "epoch: 8 step: 70, loss is 1.441287875175476\n", - "epoch: 8 step: 71, loss is 1.453078031539917\n", - "epoch: 8 step: 72, loss is 1.5568044185638428\n", - "epoch: 8 step: 73, loss is 1.4678857326507568\n", - "epoch: 8 step: 74, loss is 1.512624979019165\n", - "epoch: 8 step: 75, loss is 1.3334951400756836\n", - "epoch: 8 step: 76, loss is 1.425999641418457\n", - "epoch: 8 step: 77, loss is 1.3990145921707153\n", - "epoch: 8 step: 78, loss is 1.461276888847351\n", - "epoch: 8 step: 79, loss is 1.4124958515167236\n", - "epoch: 8 step: 80, loss is 1.4877912998199463\n", - "epoch: 8 step: 81, loss is 1.4394843578338623\n", - "epoch: 8 step: 82, loss is 1.4359078407287598\n", - "epoch: 8 step: 83, loss is 1.507434606552124\n", - "epoch: 8 step: 84, loss is 1.4494572877883911\n", - "epoch: 8 step: 85, loss is 1.526078224182129\n", - "epoch: 8 step: 86, loss is 1.5779447555541992\n", - "epoch: 8 step: 87, loss is 1.5267746448516846\n", - "epoch: 8 step: 88, loss is 1.415459156036377\n", - "epoch: 8 step: 89, loss is 1.4322260618209839\n", - "epoch: 8 step: 90, loss is 1.566877841949463\n", - "epoch: 8 step: 91, loss is 1.4789512157440186\n", - "epoch: 8 step: 92, loss is 1.5568833351135254\n", - "epoch: 8 step: 93, loss is 1.5022315979003906\n", - "epoch: 8 step: 94, loss is 1.4006327390670776\n", - "epoch: 8 step: 95, loss is 1.4552278518676758\n", - "epoch: 8 step: 96, loss is 1.4713218212127686\n", - "epoch: 8 step: 97, loss is 1.4800403118133545\n", - "epoch: 8 step: 98, loss is 1.492903470993042\n", - "epoch: 8 step: 99, loss is 1.5539541244506836\n", - "epoch: 8 step: 100, loss is 1.4644291400909424\n", - "epoch: 8 step: 101, loss is 1.4984171390533447\n", - "epoch: 8 step: 102, loss is 1.3292292356491089\n", - "epoch: 8 step: 103, loss is 1.4338009357452393\n", - "epoch: 8 step: 104, loss is 1.5475879907608032\n", - "epoch: 8 step: 105, loss is 1.542767882347107\n", - "epoch: 8 step: 106, loss is 1.3752655982971191\n", - "epoch: 8 step: 107, loss is 1.4356662034988403\n", - "epoch: 8 step: 108, loss is 1.4400396347045898\n", - "epoch: 8 step: 109, loss is 1.5020948648452759\n", - "epoch: 8 step: 110, loss is 1.4970731735229492\n", - "epoch: 8 step: 111, loss is 1.4158711433410645\n", - "epoch: 8 step: 112, loss is 1.3296138048171997\n", - "epoch: 8 step: 113, loss is 1.6084356307983398\n", - "epoch: 8 step: 114, loss is 1.479214072227478\n", - "epoch: 8 step: 115, loss is 1.3798211812973022\n", - "epoch: 8 step: 116, loss is 1.4386321306228638\n", - "epoch: 8 step: 117, loss is 1.410789966583252\n", - "epoch: 8 step: 118, loss is 1.3517998456954956\n", - "epoch: 8 step: 119, loss is 1.5129280090332031\n", - "epoch: 8 step: 120, loss is 1.492523193359375\n", - "epoch: 8 step: 121, loss is 1.603529453277588\n", - "epoch: 8 step: 122, loss is 1.4350736141204834\n", - "epoch: 8 step: 123, loss is 1.3458735942840576\n", - "epoch: 8 step: 124, loss is 1.489173173904419\n", - "epoch: 8 step: 125, loss is 1.5628888607025146\n", - "epoch: 8 step: 126, loss is 1.4916396141052246\n", - "epoch: 8 step: 127, loss is 1.3608587980270386\n", - "epoch: 8 step: 128, loss is 1.3901667594909668\n", - "epoch: 8 step: 129, loss is 1.3849161863327026\n", - "epoch: 8 step: 130, loss is 1.4271172285079956\n", - "epoch: 8 step: 131, loss is 1.4397755861282349\n", - "epoch: 8 step: 132, loss is 1.4869177341461182\n", - "epoch: 8 step: 133, loss is 1.5868098735809326\n", - "epoch: 8 step: 134, loss is 1.4563021659851074\n", - "epoch: 8 step: 135, loss is 1.3524556159973145\n", - "epoch: 8 step: 136, loss is 1.4983081817626953\n", - "epoch: 8 step: 137, loss is 1.4211821556091309\n", - "epoch: 8 step: 138, loss is 1.419816255569458\n", - "epoch: 8 step: 139, loss is 1.4573042392730713\n", - "epoch: 8 step: 140, loss is 1.4487924575805664\n", - "epoch: 8 step: 141, loss is 1.433356761932373\n", - "epoch: 8 step: 142, loss is 1.4964767694473267\n", - "epoch: 8 step: 143, loss is 1.5534299612045288\n", - "epoch: 8 step: 144, loss is 1.4708095788955688\n", - "epoch: 8 step: 145, loss is 1.5685863494873047\n", - "epoch: 8 step: 146, loss is 1.4808427095413208\n", - "epoch: 8 step: 147, loss is 1.4608464241027832\n", - "epoch: 8 step: 148, loss is 1.494614601135254\n", - "epoch: 8 step: 149, loss is 1.4873383045196533\n", - "epoch: 8 step: 150, loss is 1.341841220855713\n", - "epoch: 8 step: 151, loss is 1.3668982982635498\n", - "epoch: 8 step: 152, loss is 1.4026339054107666\n", - "epoch: 8 step: 153, loss is 1.5319308042526245\n", - "epoch: 8 step: 154, loss is 1.3257153034210205\n", - "epoch: 8 step: 155, loss is 1.42469322681427\n", - "epoch: 8 step: 156, loss is 1.4665048122406006\n", - "epoch: 8 step: 157, loss is 1.4799840450286865\n", - "epoch: 8 step: 158, loss is 1.4337728023529053\n", - "epoch: 8 step: 159, loss is 1.443000316619873\n", - "epoch: 8 step: 160, loss is 1.3782110214233398\n", - "epoch: 8 step: 161, loss is 1.4556231498718262\n", - "epoch: 8 step: 162, loss is 1.425423264503479\n", - "epoch: 8 step: 163, loss is 1.4370026588439941\n", - "epoch: 8 step: 164, loss is 1.365470290184021\n", - "epoch: 8 step: 165, loss is 1.410109043121338\n", - "epoch: 8 step: 166, loss is 1.44974946975708\n", - "epoch: 8 step: 167, loss is 1.3680527210235596\n", - "epoch: 8 step: 168, loss is 1.3873088359832764\n", - "epoch: 8 step: 169, loss is 1.5280447006225586\n", - "epoch: 8 step: 170, loss is 1.5055896043777466\n", - "epoch: 8 step: 171, loss is 1.432206153869629\n", - "epoch: 8 step: 172, loss is 1.480765700340271\n", - "epoch: 8 step: 173, loss is 1.5037331581115723\n", - "epoch: 8 step: 174, loss is 1.4330967664718628\n", - "epoch: 8 step: 175, loss is 1.4243203401565552\n", - "epoch: 8 step: 176, loss is 1.3958309888839722\n", - "epoch: 8 step: 177, loss is 1.5025379657745361\n", - "epoch: 8 step: 178, loss is 1.4663933515548706\n", - "epoch: 8 step: 179, loss is 1.452102780342102\n", - "epoch: 8 step: 180, loss is 1.451858639717102\n", - "epoch: 8 step: 181, loss is 1.3984813690185547\n", - "epoch: 8 step: 182, loss is 1.4788213968276978\n", - "epoch: 8 step: 183, loss is 1.3017902374267578\n", - "epoch: 8 step: 184, loss is 1.4115046262741089\n", - "epoch: 8 step: 185, loss is 1.436378002166748\n", - "epoch: 8 step: 186, loss is 1.4421536922454834\n", - "epoch: 8 step: 187, loss is 1.452845573425293\n", - "epoch: 8 step: 188, loss is 1.434173345565796\n", - "epoch: 8 step: 189, loss is 1.4310007095336914\n", - "epoch: 8 step: 190, loss is 1.3160090446472168\n", - "epoch: 8 step: 191, loss is 1.397233247756958\n", - "epoch: 8 step: 192, loss is 1.4541261196136475\n", - "epoch: 8 step: 193, loss is 1.4517838954925537\n", - "epoch: 8 step: 194, loss is 1.4931917190551758\n", - "epoch: 8 step: 195, loss is 1.5315526723861694\n", - "Train epoch time: 102444.866 ms, per step time: 525.358 ms\n", - "epoch: 9 step: 1, loss is 1.6003079414367676\n", - "epoch: 9 step: 2, loss is 1.4922685623168945\n", - "epoch: 9 step: 3, loss is 1.3885917663574219\n", - "epoch: 9 step: 4, loss is 1.3959556818008423\n", - "epoch: 9 step: 5, loss is 1.3999963998794556\n", - "epoch: 9 step: 6, loss is 1.4670345783233643\n", - "epoch: 9 step: 7, loss is 1.3973442316055298\n", - "epoch: 9 step: 8, loss is 1.340246319770813\n", - "epoch: 9 step: 9, loss is 1.4790980815887451\n", - "epoch: 9 step: 10, loss is 1.3846538066864014\n", - "epoch: 9 step: 11, loss is 1.3954954147338867\n", - "epoch: 9 step: 12, loss is 1.4041377305984497\n", - "epoch: 9 step: 13, loss is 1.4407566785812378\n", - "epoch: 9 step: 14, loss is 1.4082683324813843\n", - "epoch: 9 step: 15, loss is 1.4336481094360352\n", - "epoch: 9 step: 16, loss is 1.4163028001785278\n", - "epoch: 9 step: 17, loss is 1.4272379875183105\n", - "epoch: 9 step: 18, loss is 1.4297065734863281\n", - "epoch: 9 step: 19, loss is 1.422825574874878\n", - "epoch: 9 step: 20, loss is 1.4385401010513306\n", - "epoch: 9 step: 21, loss is 1.445806622505188\n", - "epoch: 9 step: 22, loss is 1.419236421585083\n", - "epoch: 9 step: 23, loss is 1.3894257545471191\n", - "epoch: 9 step: 24, loss is 1.372558832168579\n", - "epoch: 9 step: 25, loss is 1.4225895404815674\n", - "epoch: 9 step: 26, loss is 1.4315637350082397\n", - "epoch: 9 step: 27, loss is 1.396859884262085\n", - "epoch: 9 step: 28, loss is 1.4430700540542603\n", - "epoch: 9 step: 29, loss is 1.425110101699829\n", - "epoch: 9 step: 30, loss is 1.340305209159851\n", - "epoch: 9 step: 31, loss is 1.4256365299224854\n", - "epoch: 9 step: 32, loss is 1.425264835357666\n", - "epoch: 9 step: 33, loss is 1.3432583808898926\n", - "epoch: 9 step: 34, loss is 1.4501047134399414\n", - "epoch: 9 step: 35, loss is 1.4256477355957031\n", - "epoch: 9 step: 36, loss is 1.3926483392715454\n", - "epoch: 9 step: 37, loss is 1.467071771621704\n", - "epoch: 9 step: 38, loss is 1.4511688947677612\n", - "epoch: 9 step: 39, loss is 1.425896167755127\n", - "epoch: 9 step: 40, loss is 1.39301598072052\n", - "epoch: 9 step: 41, loss is 1.3640766143798828\n", - "epoch: 9 step: 42, loss is 1.383095622062683\n", - "epoch: 9 step: 43, loss is 1.3945188522338867\n", - "epoch: 9 step: 44, loss is 1.3717586994171143\n", - "epoch: 9 step: 45, loss is 1.4483091831207275\n", - "epoch: 9 step: 46, loss is 1.4148410558700562\n", - "epoch: 9 step: 47, loss is 1.4304040670394897\n", - "epoch: 9 step: 48, loss is 1.3608626127243042\n", - "epoch: 9 step: 49, loss is 1.42831289768219\n", - "epoch: 9 step: 50, loss is 1.4228458404541016\n", - "epoch: 9 step: 51, loss is 1.4425525665283203\n", - "epoch: 9 step: 52, loss is 1.4267643690109253\n", - "epoch: 9 step: 53, loss is 1.3897196054458618\n", - "epoch: 9 step: 54, loss is 1.4612106084823608\n", - "epoch: 9 step: 55, loss is 1.4505631923675537\n", - "epoch: 9 step: 56, loss is 1.4501452445983887\n", - "epoch: 9 step: 57, loss is 1.4348971843719482\n", - "epoch: 9 step: 58, loss is 1.3447551727294922\n", - "epoch: 9 step: 59, loss is 1.3725731372833252\n", - "epoch: 9 step: 60, loss is 1.4050712585449219\n", - "epoch: 9 step: 61, loss is 1.380196452140808\n", - "epoch: 9 step: 62, loss is 1.5016015768051147\n", - "epoch: 9 step: 63, loss is 1.3762125968933105\n", - "epoch: 9 step: 64, loss is 1.400113821029663\n", - "epoch: 9 step: 65, loss is 1.390522837638855\n", - "epoch: 9 step: 66, loss is 1.436388373374939\n", - "epoch: 9 step: 67, loss is 1.4666852951049805\n", - "epoch: 9 step: 68, loss is 1.3964097499847412\n", - "epoch: 9 step: 69, loss is 1.3992280960083008\n", - "epoch: 9 step: 70, loss is 1.403984546661377\n", - "epoch: 9 step: 71, loss is 1.5064489841461182\n", - "epoch: 9 step: 72, loss is 1.4924342632293701\n", - "epoch: 9 step: 73, loss is 1.4470736980438232\n", - "epoch: 9 step: 74, loss is 1.4246807098388672\n", - "epoch: 9 step: 75, loss is 1.4689629077911377\n", - "epoch: 9 step: 76, loss is 1.4128447771072388\n", - "epoch: 9 step: 77, loss is 1.4700579643249512\n", - "epoch: 9 step: 78, loss is 1.3486058712005615\n", - "epoch: 9 step: 79, loss is 1.4582267999649048\n", - "epoch: 9 step: 80, loss is 1.396195650100708\n", - "epoch: 9 step: 81, loss is 1.3688485622406006\n", - "epoch: 9 step: 82, loss is 1.3706858158111572\n", - "epoch: 9 step: 83, loss is 1.316467046737671\n", - "epoch: 9 step: 84, loss is 1.5192853212356567\n", - "epoch: 9 step: 85, loss is 1.459778904914856\n", - "epoch: 9 step: 86, loss is 1.4018524885177612\n", - "epoch: 9 step: 87, loss is 1.4267604351043701\n", - "epoch: 9 step: 88, loss is 1.4821763038635254\n", - "epoch: 9 step: 89, loss is 1.4102380275726318\n", - "epoch: 9 step: 90, loss is 1.4617805480957031\n", - "epoch: 9 step: 91, loss is 1.4174072742462158\n", - "epoch: 9 step: 92, loss is 1.4345035552978516\n", - "epoch: 9 step: 93, loss is 1.3794127702713013\n", - "epoch: 9 step: 94, loss is 1.4041898250579834\n", - "epoch: 9 step: 95, loss is 1.3837155103683472\n", - "epoch: 9 step: 96, loss is 1.437793493270874\n", - "epoch: 9 step: 97, loss is 1.4495553970336914\n", - "epoch: 9 step: 98, loss is 1.4666194915771484\n", - "epoch: 9 step: 99, loss is 1.3122203350067139\n", - "epoch: 9 step: 100, loss is 1.4746571779251099\n", - "epoch: 9 step: 101, loss is 1.3794174194335938\n", - "epoch: 9 step: 102, loss is 1.467755675315857\n", - "epoch: 9 step: 103, loss is 1.583702564239502\n", - "epoch: 9 step: 104, loss is 1.4728593826293945\n", - "epoch: 9 step: 105, loss is 1.447981834411621\n", - "epoch: 9 step: 106, loss is 1.3983805179595947\n", - "epoch: 9 step: 107, loss is 1.4462443590164185\n", - "epoch: 9 step: 108, loss is 1.3800263404846191\n", - "epoch: 9 step: 109, loss is 1.5332759618759155\n", - "epoch: 9 step: 110, loss is 1.3966152667999268\n", - "epoch: 9 step: 111, loss is 1.505787968635559\n", - "epoch: 9 step: 112, loss is 1.5261991024017334\n", - "epoch: 9 step: 113, loss is 1.4829952716827393\n", - "epoch: 9 step: 114, loss is 1.4459309577941895\n", - "epoch: 9 step: 115, loss is 1.4073562622070312\n", - "epoch: 9 step: 116, loss is 1.4061671495437622\n", - "epoch: 9 step: 117, loss is 1.4721447229385376\n", - "epoch: 9 step: 118, loss is 1.361497163772583\n", - "epoch: 9 step: 119, loss is 1.4536912441253662\n", - "epoch: 9 step: 120, loss is 1.424666166305542\n", - "epoch: 9 step: 121, loss is 1.4328957796096802\n", - "epoch: 9 step: 122, loss is 1.334822654724121\n", - "epoch: 9 step: 123, loss is 1.443067193031311\n", - "epoch: 9 step: 124, loss is 1.3541892766952515\n", - "epoch: 9 step: 125, loss is 1.3579010963439941\n", - "epoch: 9 step: 126, loss is 1.3874437808990479\n", - "epoch: 9 step: 127, loss is 1.4592578411102295\n", - "epoch: 9 step: 128, loss is 1.5056045055389404\n", - "epoch: 9 step: 129, loss is 1.4291508197784424\n", - "epoch: 9 step: 130, loss is 1.4102530479431152\n", - "epoch: 9 step: 131, loss is 1.4127767086029053\n", - "epoch: 9 step: 132, loss is 1.44157874584198\n", - "epoch: 9 step: 133, loss is 1.4844645261764526\n", - "epoch: 9 step: 134, loss is 1.4196217060089111\n", - "epoch: 9 step: 135, loss is 1.39158034324646\n", - "epoch: 9 step: 136, loss is 1.4368102550506592\n", - "epoch: 9 step: 137, loss is 1.420792579650879\n", - "epoch: 9 step: 138, loss is 1.3750956058502197\n", - "epoch: 9 step: 139, loss is 1.4317573308944702\n", - "epoch: 9 step: 140, loss is 1.4562273025512695\n", - "epoch: 9 step: 141, loss is 1.4701918363571167\n", - "epoch: 9 step: 142, loss is 1.4579381942749023\n", - "epoch: 9 step: 143, loss is 1.4216327667236328\n", - "epoch: 9 step: 144, loss is 1.4019056558609009\n", - "epoch: 9 step: 145, loss is 1.3909857273101807\n", - "epoch: 9 step: 146, loss is 1.3865052461624146\n", - "epoch: 9 step: 147, loss is 1.567305088043213\n", - "epoch: 9 step: 148, loss is 1.3793929815292358\n", - "epoch: 9 step: 149, loss is 1.3975459337234497\n", - "epoch: 9 step: 150, loss is 1.3994706869125366\n", - "epoch: 9 step: 151, loss is 1.4336097240447998\n", - "epoch: 9 step: 152, loss is 1.4594308137893677\n", - "epoch: 9 step: 153, loss is 1.402277946472168\n", - "epoch: 9 step: 154, loss is 1.5039217472076416\n", - "epoch: 9 step: 155, loss is 1.41551673412323\n", - "epoch: 9 step: 156, loss is 1.4417976140975952\n", - "epoch: 9 step: 157, loss is 1.4270507097244263\n", - "epoch: 9 step: 158, loss is 1.406843900680542\n", - "epoch: 9 step: 159, loss is 1.4105967283248901\n", - "epoch: 9 step: 160, loss is 1.457517385482788\n", - "epoch: 9 step: 161, loss is 1.4588218927383423\n", - "epoch: 9 step: 162, loss is 1.3877910375595093\n", - "epoch: 9 step: 163, loss is 1.4787626266479492\n", - "epoch: 9 step: 164, loss is 1.3771215677261353\n", - "epoch: 9 step: 165, loss is 1.405901312828064\n", - "epoch: 9 step: 166, loss is 1.421569585800171\n", - "epoch: 9 step: 167, loss is 1.460265874862671\n", - "epoch: 9 step: 168, loss is 1.4700508117675781\n", - "epoch: 9 step: 169, loss is 1.438446044921875\n", - "epoch: 9 step: 170, loss is 1.4892833232879639\n", - "epoch: 9 step: 171, loss is 1.4415148496627808\n", - "epoch: 9 step: 172, loss is 1.4261375665664673\n", - "epoch: 9 step: 173, loss is 1.4154267311096191\n", - "epoch: 9 step: 174, loss is 1.4122343063354492\n", - "epoch: 9 step: 175, loss is 1.4426662921905518\n", - "epoch: 9 step: 176, loss is 1.4262408018112183\n", - "epoch: 9 step: 177, loss is 1.4288458824157715\n", - "epoch: 9 step: 178, loss is 1.3773396015167236\n", - "epoch: 9 step: 179, loss is 1.4386996030807495\n", - "epoch: 9 step: 180, loss is 1.5087385177612305\n", - "epoch: 9 step: 181, loss is 1.4953683614730835\n", - "epoch: 9 step: 182, loss is 1.3578590154647827\n", - "epoch: 9 step: 183, loss is 1.4742990732192993\n", - "epoch: 9 step: 184, loss is 1.3722326755523682\n", - "epoch: 9 step: 185, loss is 1.3456703424453735\n", - "epoch: 9 step: 186, loss is 1.4324419498443604\n", - "epoch: 9 step: 187, loss is 1.3626792430877686\n", - "epoch: 9 step: 188, loss is 1.382414698600769\n", - "epoch: 9 step: 189, loss is 1.51063871383667\n", - "epoch: 9 step: 190, loss is 1.3704649209976196\n", - "epoch: 9 step: 191, loss is 1.4699370861053467\n", - "epoch: 9 step: 192, loss is 1.370850682258606\n", - "epoch: 9 step: 193, loss is 1.3789448738098145\n", - "epoch: 9 step: 194, loss is 1.370216965675354\n", - "epoch: 9 step: 195, loss is 1.3970491886138916\n", - "Train epoch time: 107645.555 ms, per step time: 552.028 ms\n", - "epoch: 10 step: 1, loss is 1.2965832948684692\n", - "epoch: 10 step: 2, loss is 1.3624372482299805\n", - "epoch: 10 step: 3, loss is 1.2861902713775635\n", - "epoch: 10 step: 4, loss is 1.3241184949874878\n", - "epoch: 10 step: 5, loss is 1.394718050956726\n", - "epoch: 10 step: 6, loss is 1.3413889408111572\n", - "epoch: 10 step: 7, loss is 1.4911961555480957\n", - "epoch: 10 step: 8, loss is 1.417178750038147\n", - "epoch: 10 step: 9, loss is 1.3227043151855469\n", - "epoch: 10 step: 10, loss is 1.4835829734802246\n", - "epoch: 10 step: 11, loss is 1.412089228630066\n", - "epoch: 10 step: 12, loss is 1.40829598903656\n", - "epoch: 10 step: 13, loss is 1.3629873991012573\n", - "epoch: 10 step: 14, loss is 1.3833584785461426\n", - "epoch: 10 step: 15, loss is 1.411811113357544\n", - "epoch: 10 step: 16, loss is 1.398743748664856\n", - "epoch: 10 step: 17, loss is 1.4473192691802979\n", - "epoch: 10 step: 18, loss is 1.3827720880508423\n", - "epoch: 10 step: 19, loss is 1.4063584804534912\n", - "epoch: 10 step: 20, loss is 1.3422629833221436\n", - "epoch: 10 step: 21, loss is 1.334032416343689\n", - "epoch: 10 step: 22, loss is 1.414698600769043\n", - "epoch: 10 step: 23, loss is 1.3855944871902466\n", - "epoch: 10 step: 24, loss is 1.4665839672088623\n", - "epoch: 10 step: 25, loss is 1.354512333869934\n", - "epoch: 10 step: 26, loss is 1.3405847549438477\n", - "epoch: 10 step: 27, loss is 1.297778844833374\n", - "epoch: 10 step: 28, loss is 1.3773537874221802\n", - "epoch: 10 step: 29, loss is 1.3269649744033813\n", - "epoch: 10 step: 30, loss is 1.422094464302063\n", - "epoch: 10 step: 31, loss is 1.4265559911727905\n", - "epoch: 10 step: 32, loss is 1.418647289276123\n", - "epoch: 10 step: 33, loss is 1.389244794845581\n", - "epoch: 10 step: 34, loss is 1.3263553380966187\n", - "epoch: 10 step: 35, loss is 1.275538444519043\n", - "epoch: 10 step: 36, loss is 1.3983631134033203\n", - "epoch: 10 step: 37, loss is 1.4519941806793213\n", - "epoch: 10 step: 38, loss is 1.3014628887176514\n", - "epoch: 10 step: 39, loss is 1.4144717454910278\n", - "epoch: 10 step: 40, loss is 1.4499932527542114\n", - "epoch: 10 step: 41, loss is 1.3236216306686401\n", - "epoch: 10 step: 42, loss is 1.4025533199310303\n", - "epoch: 10 step: 43, loss is 1.4357435703277588\n", - "epoch: 10 step: 44, loss is 1.3669359683990479\n", - "epoch: 10 step: 45, loss is 1.4060128927230835\n", - "epoch: 10 step: 46, loss is 1.4033373594284058\n", - "epoch: 10 step: 47, loss is 1.3929178714752197\n", - "epoch: 10 step: 48, loss is 1.4031305313110352\n", - "epoch: 10 step: 49, loss is 1.4559919834136963\n", - "epoch: 10 step: 50, loss is 1.3836954832077026\n", - "epoch: 10 step: 51, loss is 1.3887559175491333\n", - "epoch: 10 step: 52, loss is 1.3818857669830322\n", - "epoch: 10 step: 53, loss is 1.3133264780044556\n", - "epoch: 10 step: 54, loss is 1.4239494800567627\n", - "epoch: 10 step: 55, loss is 1.405240535736084\n", - "epoch: 10 step: 56, loss is 1.3808413743972778\n", - "epoch: 10 step: 57, loss is 1.4532716274261475\n", - "epoch: 10 step: 58, loss is 1.3615643978118896\n", - "epoch: 10 step: 59, loss is 1.3954668045043945\n", - "epoch: 10 step: 60, loss is 1.4080253839492798\n", - "epoch: 10 step: 61, loss is 1.5060691833496094\n", - "epoch: 10 step: 62, loss is 1.3712656497955322\n", - "epoch: 10 step: 63, loss is 1.4440624713897705\n", - "epoch: 10 step: 64, loss is 1.4231438636779785\n", - "epoch: 10 step: 65, loss is 1.4151415824890137\n", - "epoch: 10 step: 66, loss is 1.4696968793869019\n", - "epoch: 10 step: 67, loss is 1.3374946117401123\n", - "epoch: 10 step: 68, loss is 1.447821021080017\n", - "epoch: 10 step: 69, loss is 1.373509168624878\n", - "epoch: 10 step: 70, loss is 1.3768022060394287\n", - "epoch: 10 step: 71, loss is 1.4237534999847412\n", - "epoch: 10 step: 72, loss is 1.3670027256011963\n", - "epoch: 10 step: 73, loss is 1.3372024297714233\n", - "epoch: 10 step: 74, loss is 1.3745112419128418\n", - "epoch: 10 step: 75, loss is 1.3913434743881226\n", - "epoch: 10 step: 76, loss is 1.50279700756073\n", - "epoch: 10 step: 77, loss is 1.378105878829956\n", - "epoch: 10 step: 78, loss is 1.4290138483047485\n", - "epoch: 10 step: 79, loss is 1.3755422830581665\n", - "epoch: 10 step: 80, loss is 1.3978270292282104\n", - "epoch: 10 step: 81, loss is 1.418957233428955\n", - "epoch: 10 step: 82, loss is 1.3231757879257202\n", - "epoch: 10 step: 83, loss is 1.3601419925689697\n", - "epoch: 10 step: 84, loss is 1.3891749382019043\n", - "epoch: 10 step: 85, loss is 1.3336032629013062\n", - "epoch: 10 step: 86, loss is 1.4387264251708984\n", - "epoch: 10 step: 87, loss is 1.447472333908081\n", - "epoch: 10 step: 88, loss is 1.3845824003219604\n", - "epoch: 10 step: 89, loss is 1.3586456775665283\n", - "epoch: 10 step: 90, loss is 1.4330099821090698\n", - "epoch: 10 step: 91, loss is 1.4640119075775146\n", - "epoch: 10 step: 92, loss is 1.4041026830673218\n", - "epoch: 10 step: 93, loss is 1.4550449848175049\n", - "epoch: 10 step: 94, loss is 1.5469350814819336\n", - "epoch: 10 step: 95, loss is 1.3541643619537354\n", - "epoch: 10 step: 96, loss is 1.3415396213531494\n", - "epoch: 10 step: 97, loss is 1.3518762588500977\n", - "epoch: 10 step: 98, loss is 1.360211730003357\n", - "epoch: 10 step: 99, loss is 1.4328492879867554\n", - "epoch: 10 step: 100, loss is 1.4450207948684692\n", - "epoch: 10 step: 101, loss is 1.477220892906189\n", - "epoch: 10 step: 102, loss is 1.3989790678024292\n", - "epoch: 10 step: 103, loss is 1.4099704027175903\n", - "epoch: 10 step: 104, loss is 1.3115266561508179\n", - "epoch: 10 step: 105, loss is 1.3436390161514282\n", - "epoch: 10 step: 106, loss is 1.3612253665924072\n", - "epoch: 10 step: 107, loss is 1.4591314792633057\n", - "epoch: 10 step: 108, loss is 1.4166926145553589\n", - "epoch: 10 step: 109, loss is 1.3011889457702637\n", - "epoch: 10 step: 110, loss is 1.4686541557312012\n", - "epoch: 10 step: 111, loss is 1.3920936584472656\n", - "epoch: 10 step: 112, loss is 1.4055709838867188\n", - "epoch: 10 step: 113, loss is 1.4412797689437866\n", - "epoch: 10 step: 114, loss is 1.4033015966415405\n", - "epoch: 10 step: 115, loss is 1.3964070081710815\n", - "epoch: 10 step: 116, loss is 1.3559627532958984\n", - "epoch: 10 step: 117, loss is 1.3963572978973389\n", - "epoch: 10 step: 118, loss is 1.349740982055664\n", - "epoch: 10 step: 119, loss is 1.4372875690460205\n", - "epoch: 10 step: 120, loss is 1.358196496963501\n", - "epoch: 10 step: 121, loss is 1.3117228746414185\n", - "epoch: 10 step: 122, loss is 1.3786026239395142\n", - "epoch: 10 step: 123, loss is 1.3010280132293701\n", - "epoch: 10 step: 124, loss is 1.4654299020767212\n", - "epoch: 10 step: 125, loss is 1.3337533473968506\n", - "epoch: 10 step: 126, loss is 1.364565372467041\n", - "epoch: 10 step: 127, loss is 1.2704663276672363\n", - "epoch: 10 step: 128, loss is 1.3564231395721436\n", - "epoch: 10 step: 129, loss is 1.4180039167404175\n", - "epoch: 10 step: 130, loss is 1.3639845848083496\n", - "epoch: 10 step: 131, loss is 1.3803002834320068\n", - "epoch: 10 step: 132, loss is 1.3628630638122559\n", - "epoch: 10 step: 133, loss is 1.3558411598205566\n", - "epoch: 10 step: 134, loss is 1.4883337020874023\n", - "epoch: 10 step: 135, loss is 1.333411455154419\n", - "epoch: 10 step: 136, loss is 1.3035935163497925\n", - "epoch: 10 step: 137, loss is 1.3214057683944702\n", - "epoch: 10 step: 138, loss is 1.3928450345993042\n", - "epoch: 10 step: 139, loss is 1.3657970428466797\n", - "epoch: 10 step: 140, loss is 1.3745687007904053\n", - "epoch: 10 step: 141, loss is 1.4447153806686401\n", - "epoch: 10 step: 142, loss is 1.2999922037124634\n", - "epoch: 10 step: 143, loss is 1.3633801937103271\n", - "epoch: 10 step: 144, loss is 1.3480429649353027\n", - "epoch: 10 step: 145, loss is 1.3235867023468018\n", - "epoch: 10 step: 146, loss is 1.3890784978866577\n", - "epoch: 10 step: 147, loss is 1.4675740003585815\n", - "epoch: 10 step: 148, loss is 1.4685134887695312\n", - "epoch: 10 step: 149, loss is 1.377044916152954\n", - "epoch: 10 step: 150, loss is 1.3216993808746338\n", - "epoch: 10 step: 151, loss is 1.290349006652832\n", - "epoch: 10 step: 152, loss is 1.413101315498352\n", - "epoch: 10 step: 153, loss is 1.3955527544021606\n", - "epoch: 10 step: 154, loss is 1.4087172746658325\n", - "epoch: 10 step: 155, loss is 1.2923272848129272\n", - "epoch: 10 step: 156, loss is 1.346842646598816\n", - "epoch: 10 step: 157, loss is 1.3531287908554077\n", - "epoch: 10 step: 158, loss is 1.3451437950134277\n", - "epoch: 10 step: 159, loss is 1.4337797164916992\n", - "epoch: 10 step: 160, loss is 1.475834846496582\n", - "epoch: 10 step: 161, loss is 1.4216883182525635\n", - "epoch: 10 step: 162, loss is 1.3326752185821533\n", - "epoch: 10 step: 163, loss is 1.3601791858673096\n", - "epoch: 10 step: 164, loss is 1.4948947429656982\n", - "epoch: 10 step: 165, loss is 1.4416465759277344\n", - "epoch: 10 step: 166, loss is 1.4442185163497925\n", - "epoch: 10 step: 167, loss is 1.4152555465698242\n", - "epoch: 10 step: 168, loss is 1.4029054641723633\n", - "epoch: 10 step: 169, loss is 1.3682323694229126\n", - "epoch: 10 step: 170, loss is 1.4172747135162354\n", - "epoch: 10 step: 171, loss is 1.4432785511016846\n", - "epoch: 10 step: 172, loss is 1.4246838092803955\n", - "epoch: 10 step: 173, loss is 1.371138572692871\n", - "epoch: 10 step: 174, loss is 1.3486088514328003\n", - "epoch: 10 step: 175, loss is 1.4125900268554688\n", - "epoch: 10 step: 176, loss is 1.3639887571334839\n", - "epoch: 10 step: 177, loss is 1.3915724754333496\n", - "epoch: 10 step: 178, loss is 1.3451502323150635\n", - "epoch: 10 step: 179, loss is 1.3965051174163818\n", - "epoch: 10 step: 180, loss is 1.380311369895935\n", - "epoch: 10 step: 181, loss is 1.3846101760864258\n", - "epoch: 10 step: 182, loss is 1.398587703704834\n", - "epoch: 10 step: 183, loss is 1.3646464347839355\n", - "epoch: 10 step: 184, loss is 1.4128984212875366\n", - "epoch: 10 step: 185, loss is 1.369757890701294\n", - "epoch: 10 step: 186, loss is 1.3501085042953491\n", - "epoch: 10 step: 187, loss is 1.4274100065231323\n", - "epoch: 10 step: 188, loss is 1.3888895511627197\n", - "epoch: 10 step: 189, loss is 1.3530522584915161\n", - "epoch: 10 step: 190, loss is 1.3565177917480469\n", - "epoch: 10 step: 191, loss is 1.3507171869277954\n", - "epoch: 10 step: 192, loss is 1.3254823684692383\n", - "epoch: 10 step: 193, loss is 1.4154565334320068\n", - "epoch: 10 step: 194, loss is 1.3216679096221924\n", - "epoch: 10 step: 195, loss is 1.4296022653579712\n", - "Train epoch time: 101595.825 ms, per step time: 521.004 ms\n", - "epoch: 11 step: 1, loss is 1.3514986038208008\n", - "epoch: 11 step: 2, loss is 1.3705165386199951\n", - "epoch: 11 step: 3, loss is 1.3199777603149414\n", - "epoch: 11 step: 4, loss is 1.3809438943862915\n", - "epoch: 11 step: 5, loss is 1.2884817123413086\n", - "epoch: 11 step: 6, loss is 1.4112142324447632\n", - "epoch: 11 step: 7, loss is 1.355219841003418\n", - "epoch: 11 step: 8, loss is 1.414138913154602\n", - "epoch: 11 step: 9, loss is 1.4002182483673096\n", - "epoch: 11 step: 10, loss is 1.3864917755126953\n", - "epoch: 11 step: 11, loss is 1.3203208446502686\n", - "epoch: 11 step: 12, loss is 1.3462626934051514\n", - "epoch: 11 step: 13, loss is 1.2533496618270874\n", - "epoch: 11 step: 14, loss is 1.40065598487854\n", - "epoch: 11 step: 15, loss is 1.3974335193634033\n", - "epoch: 11 step: 16, loss is 1.4740949869155884\n", - "epoch: 11 step: 17, loss is 1.3100659847259521\n", - "epoch: 11 step: 18, loss is 1.3775184154510498\n", - "epoch: 11 step: 19, loss is 1.3206316232681274\n", - "epoch: 11 step: 20, loss is 1.3319069147109985\n", - "epoch: 11 step: 21, loss is 1.3000259399414062\n", - "epoch: 11 step: 22, loss is 1.4466540813446045\n", - "epoch: 11 step: 23, loss is 1.4565842151641846\n", - "epoch: 11 step: 24, loss is 1.436469554901123\n", - "epoch: 11 step: 25, loss is 1.3870550394058228\n", - "epoch: 11 step: 26, loss is 1.4553287029266357\n", - "epoch: 11 step: 27, loss is 1.2967276573181152\n", - "epoch: 11 step: 28, loss is 1.3419265747070312\n", - "epoch: 11 step: 29, loss is 1.367044448852539\n", - "epoch: 11 step: 30, loss is 1.264862060546875\n", - "epoch: 11 step: 31, loss is 1.411987543106079\n", - "epoch: 11 step: 32, loss is 1.3183720111846924\n", - "epoch: 11 step: 33, loss is 1.3933228254318237\n", - "epoch: 11 step: 34, loss is 1.3272223472595215\n", - "epoch: 11 step: 35, loss is 1.2883217334747314\n", - "epoch: 11 step: 36, loss is 1.3552230596542358\n", - "epoch: 11 step: 37, loss is 1.3874359130859375\n", - "epoch: 11 step: 38, loss is 1.3859970569610596\n", - "epoch: 11 step: 39, loss is 1.3702952861785889\n", - "epoch: 11 step: 40, loss is 1.442229986190796\n", - "epoch: 11 step: 41, loss is 1.2251743078231812\n", - "epoch: 11 step: 42, loss is 1.365355134010315\n", - "epoch: 11 step: 43, loss is 1.3616288900375366\n", - "epoch: 11 step: 44, loss is 1.4360814094543457\n", - "epoch: 11 step: 45, loss is 1.3755671977996826\n", - "epoch: 11 step: 46, loss is 1.3499059677124023\n", - "epoch: 11 step: 47, loss is 1.300149917602539\n", - "epoch: 11 step: 48, loss is 1.3271315097808838\n", - "epoch: 11 step: 49, loss is 1.3486077785491943\n", - "epoch: 11 step: 50, loss is 1.4033631086349487\n", - "epoch: 11 step: 51, loss is 1.3766133785247803\n", - "epoch: 11 step: 52, loss is 1.3841084241867065\n", - "epoch: 11 step: 53, loss is 1.4159090518951416\n", - "epoch: 11 step: 54, loss is 1.4055582284927368\n", - "epoch: 11 step: 55, loss is 1.3272042274475098\n", - "epoch: 11 step: 56, loss is 1.2775148153305054\n", - "epoch: 11 step: 57, loss is 1.3210208415985107\n", - "epoch: 11 step: 58, loss is 1.4437336921691895\n", - "epoch: 11 step: 59, loss is 1.2408883571624756\n", - "epoch: 11 step: 60, loss is 1.3514443635940552\n", - "epoch: 11 step: 61, loss is 1.3840934038162231\n", - "epoch: 11 step: 62, loss is 1.415282964706421\n", - "epoch: 11 step: 63, loss is 1.2683483362197876\n", - "epoch: 11 step: 64, loss is 1.3667012453079224\n", - "epoch: 11 step: 65, loss is 1.383507251739502\n", - "epoch: 11 step: 66, loss is 1.3947486877441406\n", - "epoch: 11 step: 67, loss is 1.3435380458831787\n", - "epoch: 11 step: 68, loss is 1.353773832321167\n", - "epoch: 11 step: 69, loss is 1.432517409324646\n", - "epoch: 11 step: 70, loss is 1.3472764492034912\n", - "epoch: 11 step: 71, loss is 1.4028894901275635\n", - "epoch: 11 step: 72, loss is 1.3879528045654297\n", - "epoch: 11 step: 73, loss is 1.3442697525024414\n", - "epoch: 11 step: 74, loss is 1.4034984111785889\n", - "epoch: 11 step: 75, loss is 1.3146501779556274\n", - "epoch: 11 step: 76, loss is 1.3686045408248901\n", - "epoch: 11 step: 77, loss is 1.3054349422454834\n", - "epoch: 11 step: 78, loss is 1.4089261293411255\n", - "epoch: 11 step: 79, loss is 1.4178318977355957\n", - "epoch: 11 step: 80, loss is 1.380204439163208\n", - "epoch: 11 step: 81, loss is 1.3763902187347412\n", - "epoch: 11 step: 82, loss is 1.373510479927063\n", - "epoch: 11 step: 83, loss is 1.365464448928833\n", - "epoch: 11 step: 84, loss is 1.3540847301483154\n", - "epoch: 11 step: 85, loss is 1.4299991130828857\n", - "epoch: 11 step: 86, loss is 1.378877878189087\n", - "epoch: 11 step: 87, loss is 1.3229223489761353\n", - "epoch: 11 step: 88, loss is 1.3692700862884521\n", - "epoch: 11 step: 89, loss is 1.2946447134017944\n", - "epoch: 11 step: 90, loss is 1.3349860906600952\n", - "epoch: 11 step: 91, loss is 1.3907686471939087\n", - "epoch: 11 step: 92, loss is 1.3047585487365723\n", - "epoch: 11 step: 93, loss is 1.4182707071304321\n", - "epoch: 11 step: 94, loss is 1.4012025594711304\n", - "epoch: 11 step: 95, loss is 1.4166676998138428\n", - "epoch: 11 step: 96, loss is 1.3014649152755737\n", - "epoch: 11 step: 97, loss is 1.308947205543518\n", - "epoch: 11 step: 98, loss is 1.3489638566970825\n", - "epoch: 11 step: 99, loss is 1.2714179754257202\n", - "epoch: 11 step: 100, loss is 1.3834896087646484\n", - "epoch: 11 step: 101, loss is 1.4287711381912231\n", - "epoch: 11 step: 102, loss is 1.382741928100586\n", - "epoch: 11 step: 103, loss is 1.3602871894836426\n", - "epoch: 11 step: 104, loss is 1.3250287771224976\n", - "epoch: 11 step: 105, loss is 1.3959355354309082\n", - "epoch: 11 step: 106, loss is 1.3501150608062744\n", - "epoch: 11 step: 107, loss is 1.3617260456085205\n", - "epoch: 11 step: 108, loss is 1.3895246982574463\n", - "epoch: 11 step: 109, loss is 1.2615770101547241\n", - "epoch: 11 step: 110, loss is 1.2695530652999878\n", - "epoch: 11 step: 111, loss is 1.3917005062103271\n", - "epoch: 11 step: 112, loss is 1.3721950054168701\n", - "epoch: 11 step: 113, loss is 1.3860143423080444\n", - "epoch: 11 step: 114, loss is 1.3357609510421753\n", - "epoch: 11 step: 115, loss is 1.266148567199707\n", - "epoch: 11 step: 116, loss is 1.394500732421875\n", - "epoch: 11 step: 117, loss is 1.374269723892212\n", - "epoch: 11 step: 118, loss is 1.4328869581222534\n", - "epoch: 11 step: 119, loss is 1.4372501373291016\n", - "epoch: 11 step: 120, loss is 1.318766474723816\n", - "epoch: 11 step: 121, loss is 1.2610046863555908\n", - "epoch: 11 step: 122, loss is 1.3094037771224976\n", - "epoch: 11 step: 123, loss is 1.3522298336029053\n", - "epoch: 11 step: 124, loss is 1.3229259252548218\n", - "epoch: 11 step: 125, loss is 1.4207537174224854\n", - "epoch: 11 step: 126, loss is 1.42739737033844\n", - "epoch: 11 step: 127, loss is 1.365236520767212\n", - "epoch: 11 step: 128, loss is 1.4127171039581299\n", - "epoch: 11 step: 129, loss is 1.2993857860565186\n", - "epoch: 11 step: 130, loss is 1.300777792930603\n", - "epoch: 11 step: 131, loss is 1.3890771865844727\n", - "epoch: 11 step: 132, loss is 1.4688968658447266\n", - "epoch: 11 step: 133, loss is 1.3597408533096313\n", - "epoch: 11 step: 134, loss is 1.3276069164276123\n", - "epoch: 11 step: 135, loss is 1.3636209964752197\n", - "epoch: 11 step: 136, loss is 1.369603157043457\n", - "epoch: 11 step: 137, loss is 1.375201940536499\n", - "epoch: 11 step: 138, loss is 1.3906245231628418\n", - "epoch: 11 step: 139, loss is 1.3657907247543335\n", - "epoch: 11 step: 140, loss is 1.452655553817749\n", - "epoch: 11 step: 141, loss is 1.3102095127105713\n", - "epoch: 11 step: 142, loss is 1.2915419340133667\n", - "epoch: 11 step: 143, loss is 1.3614193201065063\n", - "epoch: 11 step: 144, loss is 1.2567229270935059\n", - "epoch: 11 step: 145, loss is 1.4643816947937012\n", - "epoch: 11 step: 146, loss is 1.3137043714523315\n", - "epoch: 11 step: 147, loss is 1.2624988555908203\n", - "epoch: 11 step: 148, loss is 1.3650047779083252\n", - "epoch: 11 step: 149, loss is 1.36278235912323\n", - "epoch: 11 step: 150, loss is 1.283724069595337\n", - "epoch: 11 step: 151, loss is 1.3203036785125732\n", - "epoch: 11 step: 152, loss is 1.361722707748413\n", - "epoch: 11 step: 153, loss is 1.376830816268921\n", - "epoch: 11 step: 154, loss is 1.3566585779190063\n", - "epoch: 11 step: 155, loss is 1.3124173879623413\n", - "epoch: 11 step: 156, loss is 1.2711949348449707\n", - "epoch: 11 step: 157, loss is 1.2913358211517334\n", - "epoch: 11 step: 158, loss is 1.4583773612976074\n", - "epoch: 11 step: 159, loss is 1.2532050609588623\n", - "epoch: 11 step: 160, loss is 1.4087457656860352\n", - "epoch: 11 step: 161, loss is 1.3923335075378418\n", - "epoch: 11 step: 162, loss is 1.2906339168548584\n", - "epoch: 11 step: 163, loss is 1.3523333072662354\n", - "epoch: 11 step: 164, loss is 1.3478566408157349\n", - "epoch: 11 step: 165, loss is 1.3769158124923706\n", - "epoch: 11 step: 166, loss is 1.3983933925628662\n", - "epoch: 11 step: 167, loss is 1.3652666807174683\n", - "epoch: 11 step: 168, loss is 1.354666829109192\n", - "epoch: 11 step: 169, loss is 1.3988981246948242\n", - "epoch: 11 step: 170, loss is 1.3378368616104126\n", - "epoch: 11 step: 171, loss is 1.3551857471466064\n", - "epoch: 11 step: 172, loss is 1.3677846193313599\n", - "epoch: 11 step: 173, loss is 1.3591129779815674\n", - "epoch: 11 step: 174, loss is 1.336830973625183\n", - "epoch: 11 step: 175, loss is 1.294440507888794\n", - "epoch: 11 step: 176, loss is 1.3856513500213623\n", - "epoch: 11 step: 177, loss is 1.4490528106689453\n", - "epoch: 11 step: 178, loss is 1.2735486030578613\n", - "epoch: 11 step: 179, loss is 1.3973665237426758\n", - "epoch: 11 step: 180, loss is 1.385074496269226\n", - "epoch: 11 step: 181, loss is 1.2603427171707153\n", - "epoch: 11 step: 182, loss is 1.4360918998718262\n", - "epoch: 11 step: 183, loss is 1.4103286266326904\n", - "epoch: 11 step: 184, loss is 1.3210636377334595\n", - "epoch: 11 step: 185, loss is 1.289481520652771\n", - "epoch: 11 step: 186, loss is 1.3785433769226074\n", - "epoch: 11 step: 187, loss is 1.3512091636657715\n", - "epoch: 11 step: 188, loss is 1.4189457893371582\n", - "epoch: 11 step: 189, loss is 1.3827811479568481\n", - "epoch: 11 step: 190, loss is 1.411993384361267\n", - "epoch: 11 step: 191, loss is 1.4060397148132324\n", - "epoch: 11 step: 192, loss is 1.2939680814743042\n", - "epoch: 11 step: 193, loss is 1.387575626373291\n", - "epoch: 11 step: 194, loss is 1.3169260025024414\n", - "epoch: 11 step: 195, loss is 1.3644827604293823\n", - "Train epoch time: 109156.335 ms, per step time: 559.776 ms\n", - "epoch: 12 step: 1, loss is 1.3749120235443115\n", - "epoch: 12 step: 2, loss is 1.3257428407669067\n", - "epoch: 12 step: 3, loss is 1.3339729309082031\n", - "epoch: 12 step: 4, loss is 1.345080852508545\n", - "epoch: 12 step: 5, loss is 1.3282959461212158\n", - "epoch: 12 step: 6, loss is 1.332105040550232\n", - "epoch: 12 step: 7, loss is 1.4509965181350708\n", - "epoch: 12 step: 8, loss is 1.3731828927993774\n", - "epoch: 12 step: 9, loss is 1.3712350130081177\n", - "epoch: 12 step: 10, loss is 1.2894232273101807\n", - "epoch: 12 step: 11, loss is 1.3279643058776855\n", - "epoch: 12 step: 12, loss is 1.3612538576126099\n", - "epoch: 12 step: 13, loss is 1.3893709182739258\n", - "epoch: 12 step: 14, loss is 1.5033447742462158\n", - "epoch: 12 step: 15, loss is 1.3631224632263184\n", - "epoch: 12 step: 16, loss is 1.346184492111206\n", - "epoch: 12 step: 17, loss is 1.211869478225708\n", - "epoch: 12 step: 18, loss is 1.3692021369934082\n", - "epoch: 12 step: 19, loss is 1.315664529800415\n", - "epoch: 12 step: 20, loss is 1.3213762044906616\n", - "epoch: 12 step: 21, loss is 1.3617274761199951\n", - "epoch: 12 step: 22, loss is 1.3757624626159668\n", - "epoch: 12 step: 23, loss is 1.2932567596435547\n", - "epoch: 12 step: 24, loss is 1.3488984107971191\n", - "epoch: 12 step: 25, loss is 1.2407503128051758\n", - "epoch: 12 step: 26, loss is 1.3898820877075195\n", - "epoch: 12 step: 27, loss is 1.3436524868011475\n", - "epoch: 12 step: 28, loss is 1.37770676612854\n", - "epoch: 12 step: 29, loss is 1.2700433731079102\n", - "epoch: 12 step: 30, loss is 1.370192050933838\n", - "epoch: 12 step: 31, loss is 1.4042953252792358\n", - "epoch: 12 step: 32, loss is 1.2976502180099487\n", - "epoch: 12 step: 33, loss is 1.3905391693115234\n", - "epoch: 12 step: 34, loss is 1.3600332736968994\n", - "epoch: 12 step: 35, loss is 1.370139718055725\n", - "epoch: 12 step: 36, loss is 1.3202131986618042\n", - "epoch: 12 step: 37, loss is 1.3199766874313354\n", - "epoch: 12 step: 38, loss is 1.3705322742462158\n", - "epoch: 12 step: 39, loss is 1.4356493949890137\n", - "epoch: 12 step: 40, loss is 1.3903864622116089\n", - "epoch: 12 step: 41, loss is 1.4473252296447754\n", - "epoch: 12 step: 42, loss is 1.4008510112762451\n", - "epoch: 12 step: 43, loss is 1.212838888168335\n", - "epoch: 12 step: 44, loss is 1.364315390586853\n", - "epoch: 12 step: 45, loss is 1.406559944152832\n", - "epoch: 12 step: 46, loss is 1.40316641330719\n", - "epoch: 12 step: 47, loss is 1.389106035232544\n", - "epoch: 12 step: 48, loss is 1.399122953414917\n", - "epoch: 12 step: 49, loss is 1.399647831916809\n", - "epoch: 12 step: 50, loss is 1.3538447618484497\n", - "epoch: 12 step: 51, loss is 1.3019393682479858\n", - "epoch: 12 step: 52, loss is 1.3091504573822021\n", - "epoch: 12 step: 53, loss is 1.2999556064605713\n", - "epoch: 12 step: 54, loss is 1.3275054693222046\n", - "epoch: 12 step: 55, loss is 1.3532236814498901\n", - "epoch: 12 step: 56, loss is 1.3464151620864868\n", - "epoch: 12 step: 57, loss is 1.2939003705978394\n", - "epoch: 12 step: 58, loss is 1.29934561252594\n", - "epoch: 12 step: 59, loss is 1.2517552375793457\n", - "epoch: 12 step: 60, loss is 1.2833586931228638\n", - "epoch: 12 step: 61, loss is 1.3098225593566895\n", - "epoch: 12 step: 62, loss is 1.342951774597168\n", - "epoch: 12 step: 63, loss is 1.335723638534546\n", - "epoch: 12 step: 64, loss is 1.4209569692611694\n", - "epoch: 12 step: 65, loss is 1.3660608530044556\n", - "epoch: 12 step: 66, loss is 1.3221166133880615\n", - "epoch: 12 step: 67, loss is 1.3616020679473877\n", - "epoch: 12 step: 68, loss is 1.4362266063690186\n", - "epoch: 12 step: 69, loss is 1.3845769166946411\n", - "epoch: 12 step: 70, loss is 1.3931061029434204\n", - "epoch: 12 step: 71, loss is 1.3252878189086914\n", - "epoch: 12 step: 72, loss is 1.335828185081482\n", - "epoch: 12 step: 73, loss is 1.3358795642852783\n", - "epoch: 12 step: 74, loss is 1.3375921249389648\n", - "epoch: 12 step: 75, loss is 1.3766423463821411\n", - "epoch: 12 step: 76, loss is 1.3925431966781616\n", - "epoch: 12 step: 77, loss is 1.367783784866333\n", - "epoch: 12 step: 78, loss is 1.3847731351852417\n", - "epoch: 12 step: 79, loss is 1.327022910118103\n", - "epoch: 12 step: 80, loss is 1.3467707633972168\n", - "epoch: 12 step: 81, loss is 1.3389359712600708\n", - "epoch: 12 step: 82, loss is 1.3532925844192505\n", - "epoch: 12 step: 83, loss is 1.2030620574951172\n", - "epoch: 12 step: 84, loss is 1.31825852394104\n", - "epoch: 12 step: 85, loss is 1.3440210819244385\n", - "epoch: 12 step: 86, loss is 1.3510026931762695\n", - "epoch: 12 step: 87, loss is 1.317724347114563\n", - "epoch: 12 step: 88, loss is 1.4584540128707886\n", - "epoch: 12 step: 89, loss is 1.3655003309249878\n", - "epoch: 12 step: 90, loss is 1.3314133882522583\n", - "epoch: 12 step: 91, loss is 1.3347185850143433\n", - "epoch: 12 step: 92, loss is 1.3305401802062988\n", - "epoch: 12 step: 93, loss is 1.3862367868423462\n", - "epoch: 12 step: 94, loss is 1.2733185291290283\n", - "epoch: 12 step: 95, loss is 1.3003748655319214\n", - "epoch: 12 step: 96, loss is 1.368822455406189\n", - "epoch: 12 step: 97, loss is 1.361835241317749\n", - "epoch: 12 step: 98, loss is 1.2483099699020386\n", - "epoch: 12 step: 99, loss is 1.2921302318572998\n", - "epoch: 12 step: 100, loss is 1.3071764707565308\n", - "epoch: 12 step: 101, loss is 1.3578846454620361\n", - "epoch: 12 step: 102, loss is 1.484748363494873\n", - "epoch: 12 step: 103, loss is 1.4120935201644897\n", - "epoch: 12 step: 104, loss is 1.2663170099258423\n", - "epoch: 12 step: 105, loss is 1.310514211654663\n", - "epoch: 12 step: 106, loss is 1.3853429555892944\n", - "epoch: 12 step: 107, loss is 1.3467544317245483\n", - "epoch: 12 step: 108, loss is 1.4428993463516235\n", - "epoch: 12 step: 109, loss is 1.3221195936203003\n", - "epoch: 12 step: 110, loss is 1.3741698265075684\n", - "epoch: 12 step: 111, loss is 1.3167011737823486\n", - "epoch: 12 step: 112, loss is 1.242370367050171\n", - "epoch: 12 step: 113, loss is 1.3208401203155518\n", - "epoch: 12 step: 114, loss is 1.283278226852417\n", - "epoch: 12 step: 115, loss is 1.3911306858062744\n", - "epoch: 12 step: 116, loss is 1.3273272514343262\n", - "epoch: 12 step: 117, loss is 1.3542145490646362\n", - "epoch: 12 step: 118, loss is 1.375185489654541\n", - "epoch: 12 step: 119, loss is 1.3990886211395264\n", - "epoch: 12 step: 120, loss is 1.397849678993225\n", - "epoch: 12 step: 121, loss is 1.3174793720245361\n", - "epoch: 12 step: 122, loss is 1.3419415950775146\n", - "epoch: 12 step: 123, loss is 1.3498806953430176\n", - "epoch: 12 step: 124, loss is 1.3221936225891113\n", - "epoch: 12 step: 125, loss is 1.4351340532302856\n", - "epoch: 12 step: 126, loss is 1.4098035097122192\n", - "epoch: 12 step: 127, loss is 1.3253614902496338\n", - "epoch: 12 step: 128, loss is 1.284562110900879\n", - "epoch: 12 step: 129, loss is 1.3135451078414917\n", - "epoch: 12 step: 130, loss is 1.3734666109085083\n", - "epoch: 12 step: 131, loss is 1.2726982831954956\n", - "epoch: 12 step: 132, loss is 1.3447588682174683\n", - "epoch: 12 step: 133, loss is 1.3854541778564453\n", - "epoch: 12 step: 134, loss is 1.3437473773956299\n", - "epoch: 12 step: 135, loss is 1.3496053218841553\n", - "epoch: 12 step: 136, loss is 1.3877677917480469\n", - "epoch: 12 step: 137, loss is 1.3551610708236694\n", - "epoch: 12 step: 138, loss is 1.3138344287872314\n", - "epoch: 12 step: 139, loss is 1.3242584466934204\n", - "epoch: 12 step: 140, loss is 1.3433794975280762\n", - "epoch: 12 step: 141, loss is 1.36911141872406\n", - "epoch: 12 step: 142, loss is 1.429419994354248\n", - "epoch: 12 step: 143, loss is 1.324636459350586\n", - "epoch: 12 step: 144, loss is 1.3358206748962402\n", - "epoch: 12 step: 145, loss is 1.2827239036560059\n", - "epoch: 12 step: 146, loss is 1.3973894119262695\n", - "epoch: 12 step: 147, loss is 1.3126802444458008\n", - "epoch: 12 step: 148, loss is 1.272353172302246\n", - "epoch: 12 step: 149, loss is 1.263411283493042\n", - "epoch: 12 step: 150, loss is 1.3647053241729736\n", - "epoch: 12 step: 151, loss is 1.2865593433380127\n", - "epoch: 12 step: 152, loss is 1.3357665538787842\n", - "epoch: 12 step: 153, loss is 1.4210436344146729\n", - "epoch: 12 step: 154, loss is 1.3121615648269653\n", - "epoch: 12 step: 155, loss is 1.4346314668655396\n", - "epoch: 12 step: 156, loss is 1.2400988340377808\n", - "epoch: 12 step: 157, loss is 1.3275915384292603\n", - "epoch: 12 step: 158, loss is 1.321425199508667\n", - "epoch: 12 step: 159, loss is 1.355749249458313\n", - "epoch: 12 step: 160, loss is 1.3264704942703247\n", - "epoch: 12 step: 161, loss is 1.3915637731552124\n", - "epoch: 12 step: 162, loss is 1.419718861579895\n", - "epoch: 12 step: 163, loss is 1.4108872413635254\n", - "epoch: 12 step: 164, loss is 1.2778005599975586\n", - "epoch: 12 step: 165, loss is 1.278543472290039\n", - "epoch: 12 step: 166, loss is 1.3421764373779297\n", - "epoch: 12 step: 167, loss is 1.328843355178833\n", - "epoch: 12 step: 168, loss is 1.3402674198150635\n", - "epoch: 12 step: 169, loss is 1.3722070455551147\n", - "epoch: 12 step: 170, loss is 1.2757922410964966\n", - "epoch: 12 step: 171, loss is 1.254894495010376\n", - "epoch: 12 step: 172, loss is 1.333903193473816\n", - "epoch: 12 step: 173, loss is 1.3079614639282227\n", - "epoch: 12 step: 174, loss is 1.2909159660339355\n", - "epoch: 12 step: 175, loss is 1.309578537940979\n", - "epoch: 12 step: 176, loss is 1.3100945949554443\n", - "epoch: 12 step: 177, loss is 1.33547043800354\n", - "epoch: 12 step: 178, loss is 1.3288025856018066\n", - "epoch: 12 step: 179, loss is 1.3678605556488037\n", - "epoch: 12 step: 180, loss is 1.4968376159667969\n", - "epoch: 12 step: 181, loss is 1.3110452890396118\n", - "epoch: 12 step: 182, loss is 1.28890061378479\n", - "epoch: 12 step: 183, loss is 1.3732596635818481\n", - "epoch: 12 step: 184, loss is 1.3541789054870605\n", - "epoch: 12 step: 185, loss is 1.3579795360565186\n", - "epoch: 12 step: 186, loss is 1.3036326169967651\n", - "epoch: 12 step: 187, loss is 1.3820055723190308\n", - "epoch: 12 step: 188, loss is 1.3236181735992432\n", - "epoch: 12 step: 189, loss is 1.2932544946670532\n", - "epoch: 12 step: 190, loss is 1.3079922199249268\n", - "epoch: 12 step: 191, loss is 1.3040754795074463\n", - "epoch: 12 step: 192, loss is 1.2884989976882935\n", - "epoch: 12 step: 193, loss is 1.299836277961731\n", - "epoch: 12 step: 194, loss is 1.3422300815582275\n", - "epoch: 12 step: 195, loss is 1.275468349456787\n", - "Train epoch time: 111763.841 ms, per step time: 573.148 ms\n", - "epoch: 13 step: 1, loss is 1.3198258876800537\n", - "epoch: 13 step: 2, loss is 1.267249584197998\n", - "epoch: 13 step: 3, loss is 1.377860426902771\n", - "epoch: 13 step: 4, loss is 1.2958390712738037\n", - "epoch: 13 step: 5, loss is 1.3649100065231323\n", - "epoch: 13 step: 6, loss is 1.2953672409057617\n", - "epoch: 13 step: 7, loss is 1.2863819599151611\n", - "epoch: 13 step: 8, loss is 1.3077056407928467\n", - "epoch: 13 step: 9, loss is 1.2955645322799683\n", - "epoch: 13 step: 10, loss is 1.2726750373840332\n", - "epoch: 13 step: 11, loss is 1.3051942586898804\n", - "epoch: 13 step: 12, loss is 1.3631467819213867\n", - "epoch: 13 step: 13, loss is 1.332863211631775\n", - "epoch: 13 step: 14, loss is 1.300406813621521\n", - "epoch: 13 step: 15, loss is 1.4065595865249634\n", - "epoch: 13 step: 16, loss is 1.3207926750183105\n", - "epoch: 13 step: 17, loss is 1.4314912557601929\n", - "epoch: 13 step: 18, loss is 1.2905689477920532\n", - "epoch: 13 step: 19, loss is 1.3968850374221802\n", - "epoch: 13 step: 20, loss is 1.3385591506958008\n", - "epoch: 13 step: 21, loss is 1.3743515014648438\n", - "epoch: 13 step: 22, loss is 1.3417283296585083\n", - "epoch: 13 step: 23, loss is 1.2500780820846558\n", - "epoch: 13 step: 24, loss is 1.3430250883102417\n", - "epoch: 13 step: 25, loss is 1.2321382761001587\n", - "epoch: 13 step: 26, loss is 1.3366892337799072\n", - "epoch: 13 step: 27, loss is 1.2943620681762695\n", - "epoch: 13 step: 28, loss is 1.2736753225326538\n", - "epoch: 13 step: 29, loss is 1.3176244497299194\n", - "epoch: 13 step: 30, loss is 1.2949817180633545\n", - "epoch: 13 step: 31, loss is 1.3017407655715942\n", - "epoch: 13 step: 32, loss is 1.250291109085083\n", - "epoch: 13 step: 33, loss is 1.378483533859253\n", - "epoch: 13 step: 34, loss is 1.325563669204712\n", - "epoch: 13 step: 35, loss is 1.2774980068206787\n", - "epoch: 13 step: 36, loss is 1.3602851629257202\n", - "epoch: 13 step: 37, loss is 1.3271067142486572\n", - "epoch: 13 step: 38, loss is 1.2774046659469604\n", - "epoch: 13 step: 39, loss is 1.2986865043640137\n", - "epoch: 13 step: 40, loss is 1.2404587268829346\n", - "epoch: 13 step: 41, loss is 1.2723153829574585\n", - "epoch: 13 step: 42, loss is 1.252101182937622\n", - "epoch: 13 step: 43, loss is 1.3344852924346924\n", - "epoch: 13 step: 44, loss is 1.2985411882400513\n", - "epoch: 13 step: 45, loss is 1.3431930541992188\n", - "epoch: 13 step: 46, loss is 1.2865036725997925\n", - "epoch: 13 step: 47, loss is 1.321958065032959\n", - "epoch: 13 step: 48, loss is 1.2221448421478271\n", - "epoch: 13 step: 49, loss is 1.2524559497833252\n", - "epoch: 13 step: 50, loss is 1.2174897193908691\n", - "epoch: 13 step: 51, loss is 1.3686397075653076\n", - "epoch: 13 step: 52, loss is 1.2955166101455688\n", - "epoch: 13 step: 53, loss is 1.2829947471618652\n", - "epoch: 13 step: 54, loss is 1.352844476699829\n", - "epoch: 13 step: 55, loss is 1.3140009641647339\n", - "epoch: 13 step: 56, loss is 1.3189575672149658\n", - "epoch: 13 step: 57, loss is 1.332587718963623\n", - "epoch: 13 step: 58, loss is 1.3261804580688477\n", - "epoch: 13 step: 59, loss is 1.3275057077407837\n", - "epoch: 13 step: 60, loss is 1.3038597106933594\n", - "epoch: 13 step: 61, loss is 1.188060998916626\n", - "epoch: 13 step: 62, loss is 1.327467918395996\n", - "epoch: 13 step: 63, loss is 1.2759475708007812\n", - "epoch: 13 step: 64, loss is 1.3519041538238525\n", - "epoch: 13 step: 65, loss is 1.293632984161377\n", - "epoch: 13 step: 66, loss is 1.3712116479873657\n", - "epoch: 13 step: 67, loss is 1.3739862442016602\n", - "epoch: 13 step: 68, loss is 1.2954466342926025\n", - "epoch: 13 step: 69, loss is 1.2557052373886108\n", - "epoch: 13 step: 70, loss is 1.2708330154418945\n", - "epoch: 13 step: 71, loss is 1.3180299997329712\n", - "epoch: 13 step: 72, loss is 1.324101448059082\n", - "epoch: 13 step: 73, loss is 1.329204797744751\n", - "epoch: 13 step: 74, loss is 1.3019959926605225\n", - "epoch: 13 step: 75, loss is 1.2676588296890259\n", - "epoch: 13 step: 76, loss is 1.325836420059204\n", - "epoch: 13 step: 77, loss is 1.2323129177093506\n", - "epoch: 13 step: 78, loss is 1.3478059768676758\n", - "epoch: 13 step: 79, loss is 1.3038560152053833\n", - "epoch: 13 step: 80, loss is 1.278468370437622\n", - "epoch: 13 step: 81, loss is 1.2795864343643188\n", - "epoch: 13 step: 82, loss is 1.2769668102264404\n", - "epoch: 13 step: 83, loss is 1.326011061668396\n", - "epoch: 13 step: 84, loss is 1.258583903312683\n", - "epoch: 13 step: 85, loss is 1.3920756578445435\n", - "epoch: 13 step: 86, loss is 1.2842274904251099\n", - "epoch: 13 step: 87, loss is 1.2911574840545654\n", - "epoch: 13 step: 88, loss is 1.253190279006958\n", - "epoch: 13 step: 89, loss is 1.2638144493103027\n", - "epoch: 13 step: 90, loss is 1.3445768356323242\n", - "epoch: 13 step: 91, loss is 1.2812072038650513\n", - "epoch: 13 step: 92, loss is 1.2581911087036133\n", - "epoch: 13 step: 93, loss is 1.2116434574127197\n", - "epoch: 13 step: 94, loss is 1.2935380935668945\n", - "epoch: 13 step: 95, loss is 1.3030837774276733\n", - "epoch: 13 step: 96, loss is 1.3300658464431763\n", - "epoch: 13 step: 97, loss is 1.2988314628601074\n", - "epoch: 13 step: 98, loss is 1.253922462463379\n", - "epoch: 13 step: 99, loss is 1.387947916984558\n", - "epoch: 13 step: 100, loss is 1.3204050064086914\n", - "epoch: 13 step: 101, loss is 1.2914122343063354\n", - "epoch: 13 step: 102, loss is 1.3566731214523315\n", - "epoch: 13 step: 103, loss is 1.2503315210342407\n", - "epoch: 13 step: 104, loss is 1.1947333812713623\n", - "epoch: 13 step: 105, loss is 1.3553577661514282\n", - "epoch: 13 step: 106, loss is 1.3109077215194702\n", - "epoch: 13 step: 107, loss is 1.2634453773498535\n", - "epoch: 13 step: 108, loss is 1.2581074237823486\n", - "epoch: 13 step: 109, loss is 1.31442129611969\n", - "epoch: 13 step: 110, loss is 1.2802975177764893\n", - "epoch: 13 step: 111, loss is 1.346985101699829\n", - "epoch: 13 step: 112, loss is 1.320293664932251\n", - "epoch: 13 step: 113, loss is 1.2657285928726196\n", - "epoch: 13 step: 114, loss is 1.3546600341796875\n", - "epoch: 13 step: 115, loss is 1.3065539598464966\n", - "epoch: 13 step: 116, loss is 1.3750945329666138\n", - "epoch: 13 step: 117, loss is 1.3447623252868652\n", - "epoch: 13 step: 118, loss is 1.4035773277282715\n", - "epoch: 13 step: 119, loss is 1.3662588596343994\n", - "epoch: 13 step: 120, loss is 1.3946096897125244\n", - "epoch: 13 step: 121, loss is 1.3096232414245605\n", - "epoch: 13 step: 122, loss is 1.369698166847229\n", - "epoch: 13 step: 123, loss is 1.3456752300262451\n", - "epoch: 13 step: 124, loss is 1.396632194519043\n", - "epoch: 13 step: 125, loss is 1.38190758228302\n", - "epoch: 13 step: 126, loss is 1.3114848136901855\n", - "epoch: 13 step: 127, loss is 1.2863233089447021\n", - "epoch: 13 step: 128, loss is 1.3047760725021362\n", - "epoch: 13 step: 129, loss is 1.2533119916915894\n", - "epoch: 13 step: 130, loss is 1.3993151187896729\n", - "epoch: 13 step: 131, loss is 1.4202632904052734\n", - "epoch: 13 step: 132, loss is 1.367031455039978\n", - "epoch: 13 step: 133, loss is 1.2697088718414307\n", - "epoch: 13 step: 134, loss is 1.2962384223937988\n", - "epoch: 13 step: 135, loss is 1.289229154586792\n", - "epoch: 13 step: 136, loss is 1.327449917793274\n", - "epoch: 13 step: 137, loss is 1.2646713256835938\n", - "epoch: 13 step: 138, loss is 1.3277664184570312\n", - "epoch: 13 step: 139, loss is 1.3029582500457764\n", - "epoch: 13 step: 140, loss is 1.2564985752105713\n", - "epoch: 13 step: 141, loss is 1.362924575805664\n", - "epoch: 13 step: 142, loss is 1.272240161895752\n", - "epoch: 13 step: 143, loss is 1.4446988105773926\n", - "epoch: 13 step: 144, loss is 1.3845510482788086\n", - "epoch: 13 step: 145, loss is 1.186936378479004\n", - "epoch: 13 step: 146, loss is 1.2852944135665894\n", - "epoch: 13 step: 147, loss is 1.2454169988632202\n", - "epoch: 13 step: 148, loss is 1.3380701541900635\n", - "epoch: 13 step: 149, loss is 1.3749141693115234\n", - "epoch: 13 step: 150, loss is 1.299337387084961\n", - "epoch: 13 step: 151, loss is 1.2815678119659424\n", - "epoch: 13 step: 152, loss is 1.365984559059143\n", - "epoch: 13 step: 153, loss is 1.2224669456481934\n", - "epoch: 13 step: 154, loss is 1.2980725765228271\n", - "epoch: 13 step: 155, loss is 1.2019745111465454\n", - "epoch: 13 step: 156, loss is 1.2537386417388916\n", - "epoch: 13 step: 157, loss is 1.33438241481781\n", - "epoch: 13 step: 158, loss is 1.3377866744995117\n", - "epoch: 13 step: 159, loss is 1.3218154907226562\n", - "epoch: 13 step: 160, loss is 1.2359482049942017\n", - "epoch: 13 step: 161, loss is 1.2482713460922241\n", - "epoch: 13 step: 162, loss is 1.2941745519638062\n", - "epoch: 13 step: 163, loss is 1.3449593782424927\n", - "epoch: 13 step: 164, loss is 1.3553290367126465\n", - "epoch: 13 step: 165, loss is 1.3531560897827148\n", - "epoch: 13 step: 166, loss is 1.2489161491394043\n", - "epoch: 13 step: 167, loss is 1.2581281661987305\n", - "epoch: 13 step: 168, loss is 1.2859731912612915\n", - "epoch: 13 step: 169, loss is 1.3678596019744873\n", - "epoch: 13 step: 170, loss is 1.2936376333236694\n", - "epoch: 13 step: 171, loss is 1.3384499549865723\n", - "epoch: 13 step: 172, loss is 1.2707794904708862\n", - "epoch: 13 step: 173, loss is 1.3316196203231812\n", - "epoch: 13 step: 174, loss is 1.3763166666030884\n", - "epoch: 13 step: 175, loss is 1.2546569108963013\n", - "epoch: 13 step: 176, loss is 1.350092887878418\n", - "epoch: 13 step: 177, loss is 1.3350321054458618\n", - "epoch: 13 step: 178, loss is 1.3616435527801514\n", - "epoch: 13 step: 179, loss is 1.2652897834777832\n", - "epoch: 13 step: 180, loss is 1.2306301593780518\n", - "epoch: 13 step: 181, loss is 1.3009637594223022\n", - "epoch: 13 step: 182, loss is 1.3185278177261353\n", - "epoch: 13 step: 183, loss is 1.2985941171646118\n", - "epoch: 13 step: 184, loss is 1.347240686416626\n", - "epoch: 13 step: 185, loss is 1.2906683683395386\n", - "epoch: 13 step: 186, loss is 1.2780156135559082\n", - "epoch: 13 step: 187, loss is 1.3213268518447876\n", - "epoch: 13 step: 188, loss is 1.2610639333724976\n", - "epoch: 13 step: 189, loss is 1.2599551677703857\n", - "epoch: 13 step: 190, loss is 1.1890722513198853\n", - "epoch: 13 step: 191, loss is 1.3777897357940674\n", - "epoch: 13 step: 192, loss is 1.2825472354888916\n", - "epoch: 13 step: 193, loss is 1.3848485946655273\n", - "epoch: 13 step: 194, loss is 1.276253342628479\n", - "epoch: 13 step: 195, loss is 1.306337594985962\n", - "Train epoch time: 106675.159 ms, per step time: 547.052 ms\n", - "epoch: 14 step: 1, loss is 1.3450721502304077\n", - "epoch: 14 step: 2, loss is 1.26054048538208\n", - "epoch: 14 step: 3, loss is 1.332331895828247\n", - "epoch: 14 step: 4, loss is 1.3339192867279053\n", - "epoch: 14 step: 5, loss is 1.2595032453536987\n", - "epoch: 14 step: 6, loss is 1.3470971584320068\n", - "epoch: 14 step: 7, loss is 1.3395274877548218\n", - "epoch: 14 step: 8, loss is 1.2648926973342896\n", - "epoch: 14 step: 9, loss is 1.3262805938720703\n", - "epoch: 14 step: 10, loss is 1.262170433998108\n", - "epoch: 14 step: 11, loss is 1.3831127882003784\n", - "epoch: 14 step: 12, loss is 1.2702398300170898\n", - "epoch: 14 step: 13, loss is 1.2940846681594849\n", - "epoch: 14 step: 14, loss is 1.3106650114059448\n", - "epoch: 14 step: 15, loss is 1.2934616804122925\n", - "epoch: 14 step: 16, loss is 1.2791800498962402\n", - "epoch: 14 step: 17, loss is 1.2813217639923096\n", - "epoch: 14 step: 18, loss is 1.2931361198425293\n", - "epoch: 14 step: 19, loss is 1.2845699787139893\n", - "epoch: 14 step: 20, loss is 1.3101274967193604\n", - "epoch: 14 step: 21, loss is 1.2669780254364014\n", - "epoch: 14 step: 22, loss is 1.2829197645187378\n", - "epoch: 14 step: 23, loss is 1.2547887563705444\n", - "epoch: 14 step: 24, loss is 1.2801023721694946\n", - "epoch: 14 step: 25, loss is 1.3108901977539062\n", - "epoch: 14 step: 26, loss is 1.3218908309936523\n", - "epoch: 14 step: 27, loss is 1.2678565979003906\n", - "epoch: 14 step: 28, loss is 1.285031795501709\n", - "epoch: 14 step: 29, loss is 1.3031389713287354\n", - "epoch: 14 step: 30, loss is 1.2870593070983887\n", - "epoch: 14 step: 31, loss is 1.3253238201141357\n", - "epoch: 14 step: 32, loss is 1.2040632963180542\n", - "epoch: 14 step: 33, loss is 1.291879653930664\n", - "epoch: 14 step: 34, loss is 1.1410378217697144\n", - "epoch: 14 step: 35, loss is 1.2780314683914185\n", - "epoch: 14 step: 36, loss is 1.3461428880691528\n", - "epoch: 14 step: 37, loss is 1.2565832138061523\n", - "epoch: 14 step: 38, loss is 1.2378966808319092\n", - "epoch: 14 step: 39, loss is 1.2659832239151\n", - "epoch: 14 step: 40, loss is 1.2419607639312744\n", - "epoch: 14 step: 41, loss is 1.2162748575210571\n", - "epoch: 14 step: 42, loss is 1.329022765159607\n", - "epoch: 14 step: 43, loss is 1.3240052461624146\n", - "epoch: 14 step: 44, loss is 1.2391873598098755\n", - "epoch: 14 step: 45, loss is 1.1821134090423584\n", - "epoch: 14 step: 46, loss is 1.2535078525543213\n", - "epoch: 14 step: 47, loss is 1.3444068431854248\n", - "epoch: 14 step: 48, loss is 1.3003418445587158\n", - "epoch: 14 step: 49, loss is 1.2838214635849\n", - "epoch: 14 step: 50, loss is 1.2410832643508911\n", - "epoch: 14 step: 51, loss is 1.3129734992980957\n", - "epoch: 14 step: 52, loss is 1.189102053642273\n", - "epoch: 14 step: 53, loss is 1.2472763061523438\n", - "epoch: 14 step: 54, loss is 1.2266480922698975\n", - "epoch: 14 step: 55, loss is 1.2552400827407837\n", - "epoch: 14 step: 56, loss is 1.2551482915878296\n", - "epoch: 14 step: 57, loss is 1.289197325706482\n", - "epoch: 14 step: 58, loss is 1.27436101436615\n", - "epoch: 14 step: 59, loss is 1.3673739433288574\n", - "epoch: 14 step: 60, loss is 1.3578227758407593\n", - "epoch: 14 step: 61, loss is 1.2905035018920898\n", - "epoch: 14 step: 62, loss is 1.2896229028701782\n", - "epoch: 14 step: 63, loss is 1.2284538745880127\n", - "epoch: 14 step: 64, loss is 1.4073352813720703\n", - "epoch: 14 step: 65, loss is 1.2380218505859375\n", - "epoch: 14 step: 66, loss is 1.3033117055892944\n", - "epoch: 14 step: 67, loss is 1.2819499969482422\n", - "epoch: 14 step: 68, loss is 1.3197319507598877\n", - "epoch: 14 step: 69, loss is 1.24137544631958\n", - "epoch: 14 step: 70, loss is 1.2013779878616333\n", - "epoch: 14 step: 71, loss is 1.2777838706970215\n", - "epoch: 14 step: 72, loss is 1.2335563898086548\n", - "epoch: 14 step: 73, loss is 1.3552428483963013\n", - "epoch: 14 step: 74, loss is 1.271492600440979\n", - "epoch: 14 step: 75, loss is 1.340668797492981\n", - "epoch: 14 step: 76, loss is 1.1586838960647583\n", - "epoch: 14 step: 77, loss is 1.237300157546997\n", - "epoch: 14 step: 78, loss is 1.3093725442886353\n", - "epoch: 14 step: 79, loss is 1.251554012298584\n", - "epoch: 14 step: 80, loss is 1.2711963653564453\n", - "epoch: 14 step: 81, loss is 1.2684483528137207\n", - "epoch: 14 step: 82, loss is 1.2832074165344238\n", - "epoch: 14 step: 83, loss is 1.2819594144821167\n", - "epoch: 14 step: 84, loss is 1.2772266864776611\n", - "epoch: 14 step: 85, loss is 1.3087269067764282\n", - "epoch: 14 step: 86, loss is 1.3176119327545166\n", - "epoch: 14 step: 87, loss is 1.3215527534484863\n", - "epoch: 14 step: 88, loss is 1.2157549858093262\n", - "epoch: 14 step: 89, loss is 1.1872963905334473\n", - "epoch: 14 step: 90, loss is 1.2838683128356934\n", - "epoch: 14 step: 91, loss is 1.2362263202667236\n", - "epoch: 14 step: 92, loss is 1.299850344657898\n", - "epoch: 14 step: 93, loss is 1.3657714128494263\n", - "epoch: 14 step: 94, loss is 1.3341724872589111\n", - "epoch: 14 step: 95, loss is 1.292480707168579\n", - "epoch: 14 step: 96, loss is 1.2726902961730957\n", - "epoch: 14 step: 97, loss is 1.3455917835235596\n", - "epoch: 14 step: 98, loss is 1.262060523033142\n", - "epoch: 14 step: 99, loss is 1.2921147346496582\n", - "epoch: 14 step: 100, loss is 1.3655706644058228\n", - "epoch: 14 step: 101, loss is 1.2721261978149414\n", - "epoch: 14 step: 102, loss is 1.3340201377868652\n", - "epoch: 14 step: 103, loss is 1.3068904876708984\n", - "epoch: 14 step: 104, loss is 1.3708817958831787\n", - "epoch: 14 step: 105, loss is 1.3204104900360107\n", - "epoch: 14 step: 106, loss is 1.3037351369857788\n", - "epoch: 14 step: 107, loss is 1.2544282674789429\n", - "epoch: 14 step: 108, loss is 1.2671258449554443\n", - "epoch: 14 step: 109, loss is 1.2680531740188599\n", - "epoch: 14 step: 110, loss is 1.3274598121643066\n", - "epoch: 14 step: 111, loss is 1.310436487197876\n", - "epoch: 14 step: 112, loss is 1.2762267589569092\n", - "epoch: 14 step: 113, loss is 1.2986853122711182\n", - "epoch: 14 step: 114, loss is 1.3329403400421143\n", - "epoch: 14 step: 115, loss is 1.3247852325439453\n", - "epoch: 14 step: 116, loss is 1.3527499437332153\n", - "epoch: 14 step: 117, loss is 1.376613974571228\n", - "epoch: 14 step: 118, loss is 1.3004509210586548\n", - "epoch: 14 step: 119, loss is 1.3661621809005737\n", - "epoch: 14 step: 120, loss is 1.276595950126648\n", - "epoch: 14 step: 121, loss is 1.299191951751709\n", - "epoch: 14 step: 122, loss is 1.2696219682693481\n", - "epoch: 14 step: 123, loss is 1.3228065967559814\n", - "epoch: 14 step: 124, loss is 1.2069807052612305\n", - "epoch: 14 step: 125, loss is 1.3261442184448242\n", - "epoch: 14 step: 126, loss is 1.3402016162872314\n", - "epoch: 14 step: 127, loss is 1.2072007656097412\n", - "epoch: 14 step: 128, loss is 1.254028558731079\n", - "epoch: 14 step: 129, loss is 1.3093407154083252\n", - "epoch: 14 step: 130, loss is 1.3113996982574463\n", - "epoch: 14 step: 131, loss is 1.2001656293869019\n", - "epoch: 14 step: 132, loss is 1.3733071088790894\n", - "epoch: 14 step: 133, loss is 1.2782783508300781\n", - "epoch: 14 step: 134, loss is 1.2694605588912964\n", - "epoch: 14 step: 135, loss is 1.280264973640442\n", - "epoch: 14 step: 136, loss is 1.2412729263305664\n", - "epoch: 14 step: 137, loss is 1.259082317352295\n", - "epoch: 14 step: 138, loss is 1.2584292888641357\n", - "epoch: 14 step: 139, loss is 1.280179738998413\n", - "epoch: 14 step: 140, loss is 1.4103319644927979\n", - "epoch: 14 step: 141, loss is 1.2191039323806763\n", - "epoch: 14 step: 142, loss is 1.1653329133987427\n", - "epoch: 14 step: 143, loss is 1.2948228120803833\n", - "epoch: 14 step: 144, loss is 1.3060729503631592\n", - "epoch: 14 step: 145, loss is 1.3610427379608154\n", - "epoch: 14 step: 146, loss is 1.3617124557495117\n", - "epoch: 14 step: 147, loss is 1.3203850984573364\n", - "epoch: 14 step: 148, loss is 1.1612622737884521\n", - "epoch: 14 step: 149, loss is 1.263569712638855\n", - "epoch: 14 step: 150, loss is 1.384065866470337\n", - "epoch: 14 step: 151, loss is 1.3455137014389038\n", - "epoch: 14 step: 152, loss is 1.1981256008148193\n", - "epoch: 14 step: 153, loss is 1.3180670738220215\n", - "epoch: 14 step: 154, loss is 1.280639410018921\n", - "epoch: 14 step: 155, loss is 1.308270812034607\n", - "epoch: 14 step: 156, loss is 1.27109694480896\n", - "epoch: 14 step: 157, loss is 1.2448087930679321\n", - "epoch: 14 step: 158, loss is 1.291178584098816\n", - "epoch: 14 step: 159, loss is 1.297309398651123\n", - "epoch: 14 step: 160, loss is 1.3218380212783813\n", - "epoch: 14 step: 161, loss is 1.2794595956802368\n", - "epoch: 14 step: 162, loss is 1.2610958814620972\n", - "epoch: 14 step: 163, loss is 1.2549281120300293\n", - "epoch: 14 step: 164, loss is 1.304896593093872\n", - "epoch: 14 step: 165, loss is 1.2771575450897217\n", - "epoch: 14 step: 166, loss is 1.3060591220855713\n", - "epoch: 14 step: 167, loss is 1.2862894535064697\n", - "epoch: 14 step: 168, loss is 1.2945486307144165\n", - "epoch: 14 step: 169, loss is 1.317674160003662\n", - "epoch: 14 step: 170, loss is 1.2964985370635986\n", - "epoch: 14 step: 171, loss is 1.2186224460601807\n", - "epoch: 14 step: 172, loss is 1.2307298183441162\n", - "epoch: 14 step: 173, loss is 1.2826675176620483\n", - "epoch: 14 step: 174, loss is 1.301631212234497\n", - "epoch: 14 step: 175, loss is 1.2674206495285034\n", - "epoch: 14 step: 176, loss is 1.3058898448944092\n", - "epoch: 14 step: 177, loss is 1.3300780057907104\n", - "epoch: 14 step: 178, loss is 1.2591716051101685\n", - "epoch: 14 step: 179, loss is 1.2578480243682861\n", - "epoch: 14 step: 180, loss is 1.3406051397323608\n", - "epoch: 14 step: 181, loss is 1.3229111433029175\n", - "epoch: 14 step: 182, loss is 1.4221268892288208\n", - "epoch: 14 step: 183, loss is 1.274423360824585\n", - "epoch: 14 step: 184, loss is 1.234600305557251\n", - "epoch: 14 step: 185, loss is 1.2336413860321045\n", - "epoch: 14 step: 186, loss is 1.2199461460113525\n", - "epoch: 14 step: 187, loss is 1.3113847970962524\n", - "epoch: 14 step: 188, loss is 1.306626796722412\n", - "epoch: 14 step: 189, loss is 1.3919321298599243\n", - "epoch: 14 step: 190, loss is 1.352609395980835\n", - "epoch: 14 step: 191, loss is 1.2421857118606567\n", - "epoch: 14 step: 192, loss is 1.3646414279937744\n", - "epoch: 14 step: 193, loss is 1.2543110847473145\n", - "epoch: 14 step: 194, loss is 1.3431118726730347\n", - "epoch: 14 step: 195, loss is 1.3362916707992554\n", - "Train epoch time: 106469.879 ms, per step time: 545.999 ms\n", - "epoch: 15 step: 1, loss is 1.296970248222351\n", - "epoch: 15 step: 2, loss is 1.3891500234603882\n", - "epoch: 15 step: 3, loss is 1.3012473583221436\n", - "epoch: 15 step: 4, loss is 1.3595901727676392\n", - "epoch: 15 step: 5, loss is 1.2580602169036865\n", - "epoch: 15 step: 6, loss is 1.3924059867858887\n", - "epoch: 15 step: 7, loss is 1.2502483129501343\n", - "epoch: 15 step: 8, loss is 1.178110122680664\n", - "epoch: 15 step: 9, loss is 1.2280969619750977\n", - "epoch: 15 step: 10, loss is 1.2612615823745728\n", - "epoch: 15 step: 11, loss is 1.2883235216140747\n", - "epoch: 15 step: 12, loss is 1.2603518962860107\n", - "epoch: 15 step: 13, loss is 1.2760024070739746\n", - "epoch: 15 step: 14, loss is 1.237297534942627\n", - "epoch: 15 step: 15, loss is 1.252425193786621\n", - "epoch: 15 step: 16, loss is 1.2893424034118652\n", - "epoch: 15 step: 17, loss is 1.2927435636520386\n", - "epoch: 15 step: 18, loss is 1.3022031784057617\n", - "epoch: 15 step: 19, loss is 1.2355520725250244\n", - "epoch: 15 step: 20, loss is 1.3219703435897827\n", - "epoch: 15 step: 21, loss is 1.4020118713378906\n", - "epoch: 15 step: 22, loss is 1.2556626796722412\n", - "epoch: 15 step: 23, loss is 1.2248175144195557\n", - "epoch: 15 step: 24, loss is 1.290246605873108\n", - "epoch: 15 step: 25, loss is 1.2516376972198486\n", - "epoch: 15 step: 26, loss is 1.3701214790344238\n", - "epoch: 15 step: 27, loss is 1.3332924842834473\n", - "epoch: 15 step: 28, loss is 1.269212245941162\n", - "epoch: 15 step: 29, loss is 1.4096589088439941\n", - "epoch: 15 step: 30, loss is 1.250321626663208\n", - "epoch: 15 step: 31, loss is 1.2338142395019531\n", - "epoch: 15 step: 32, loss is 1.2478784322738647\n", - "epoch: 15 step: 33, loss is 1.3611595630645752\n", - "epoch: 15 step: 34, loss is 1.3305891752243042\n", - "epoch: 15 step: 35, loss is 1.2284741401672363\n", - "epoch: 15 step: 36, loss is 1.3156623840332031\n", - "epoch: 15 step: 37, loss is 1.2334249019622803\n", - "epoch: 15 step: 38, loss is 1.2684279680252075\n", - "epoch: 15 step: 39, loss is 1.2695095539093018\n", - "epoch: 15 step: 40, loss is 1.2949273586273193\n", - "epoch: 15 step: 41, loss is 1.2605711221694946\n", - "epoch: 15 step: 42, loss is 1.350513219833374\n", - "epoch: 15 step: 43, loss is 1.206851601600647\n", - "epoch: 15 step: 44, loss is 1.276365876197815\n", - "epoch: 15 step: 45, loss is 1.3064466714859009\n", - "epoch: 15 step: 46, loss is 1.221441626548767\n", - "epoch: 15 step: 47, loss is 1.2477819919586182\n", - "epoch: 15 step: 48, loss is 1.2650346755981445\n", - "epoch: 15 step: 49, loss is 1.283036470413208\n", - "epoch: 15 step: 50, loss is 1.2574522495269775\n", - "epoch: 15 step: 51, loss is 1.2101945877075195\n", - "epoch: 15 step: 52, loss is 1.2609195709228516\n", - "epoch: 15 step: 53, loss is 1.2427045106887817\n", - "epoch: 15 step: 54, loss is 1.3135262727737427\n", - "epoch: 15 step: 55, loss is 1.2119871377944946\n", - "epoch: 15 step: 56, loss is 1.2429678440093994\n", - "epoch: 15 step: 57, loss is 1.2644697427749634\n", - "epoch: 15 step: 58, loss is 1.2958234548568726\n", - "epoch: 15 step: 59, loss is 1.2616832256317139\n", - "epoch: 15 step: 60, loss is 1.276987075805664\n", - "epoch: 15 step: 61, loss is 1.2433191537857056\n", - "epoch: 15 step: 62, loss is 1.3042113780975342\n", - "epoch: 15 step: 63, loss is 1.2814455032348633\n", - "epoch: 15 step: 64, loss is 1.3531595468521118\n", - "epoch: 15 step: 65, loss is 1.2376521825790405\n", - "epoch: 15 step: 66, loss is 1.2873739004135132\n", - "epoch: 15 step: 67, loss is 1.2942878007888794\n", - "epoch: 15 step: 68, loss is 1.2528331279754639\n", - "epoch: 15 step: 69, loss is 1.2704265117645264\n", - "epoch: 15 step: 70, loss is 1.3613746166229248\n", - "epoch: 15 step: 71, loss is 1.177742600440979\n", - "epoch: 15 step: 72, loss is 1.263636827468872\n", - "epoch: 15 step: 73, loss is 1.2667920589447021\n", - "epoch: 15 step: 74, loss is 1.2055697441101074\n", - "epoch: 15 step: 75, loss is 1.260768175125122\n", - "epoch: 15 step: 76, loss is 1.2534846067428589\n", - "epoch: 15 step: 77, loss is 1.2048691511154175\n", - "epoch: 15 step: 78, loss is 1.2284268140792847\n", - "epoch: 15 step: 79, loss is 1.1797672510147095\n", - "epoch: 15 step: 80, loss is 1.2382299900054932\n", - "epoch: 15 step: 81, loss is 1.216615080833435\n", - "epoch: 15 step: 82, loss is 1.2805240154266357\n", - "epoch: 15 step: 83, loss is 1.1930930614471436\n", - "epoch: 15 step: 84, loss is 1.140974521636963\n", - "epoch: 15 step: 85, loss is 1.2331902980804443\n", - "epoch: 15 step: 86, loss is 1.2514166831970215\n", - "epoch: 15 step: 87, loss is 1.3294726610183716\n", - "epoch: 15 step: 88, loss is 1.2701456546783447\n", - "epoch: 15 step: 89, loss is 1.3149538040161133\n", - "epoch: 15 step: 90, loss is 1.2158703804016113\n", - "epoch: 15 step: 91, loss is 1.323461651802063\n", - "epoch: 15 step: 92, loss is 1.273688793182373\n", - "epoch: 15 step: 93, loss is 1.3216376304626465\n", - "epoch: 15 step: 94, loss is 1.232588768005371\n", - "epoch: 15 step: 95, loss is 1.2354586124420166\n", - "epoch: 15 step: 96, loss is 1.3638391494750977\n", - "epoch: 15 step: 97, loss is 1.3302446603775024\n", - "epoch: 15 step: 98, loss is 1.2600990533828735\n", - "epoch: 15 step: 99, loss is 1.1782952547073364\n", - "epoch: 15 step: 100, loss is 1.2517387866973877\n", - "epoch: 15 step: 101, loss is 1.2114851474761963\n", - "epoch: 15 step: 102, loss is 1.3398654460906982\n", - "epoch: 15 step: 103, loss is 1.3277544975280762\n", - "epoch: 15 step: 104, loss is 1.3022119998931885\n", - "epoch: 15 step: 105, loss is 1.2798925638198853\n", - "epoch: 15 step: 106, loss is 1.1655162572860718\n", - "epoch: 15 step: 107, loss is 1.3060060739517212\n", - "epoch: 15 step: 108, loss is 1.2092158794403076\n", - "epoch: 15 step: 109, loss is 1.216623306274414\n", - "epoch: 15 step: 110, loss is 1.2886950969696045\n", - "epoch: 15 step: 111, loss is 1.2351323366165161\n", - "epoch: 15 step: 112, loss is 1.256291151046753\n", - "epoch: 15 step: 113, loss is 1.2132823467254639\n", - "epoch: 15 step: 114, loss is 1.209384799003601\n", - "epoch: 15 step: 115, loss is 1.2400623559951782\n", - "epoch: 15 step: 116, loss is 1.25479257106781\n", - "epoch: 15 step: 117, loss is 1.3072277307510376\n", - "epoch: 15 step: 118, loss is 1.225982666015625\n", - "epoch: 15 step: 119, loss is 1.1993201971054077\n", - "epoch: 15 step: 120, loss is 1.3242545127868652\n", - "epoch: 15 step: 121, loss is 1.3091707229614258\n", - "epoch: 15 step: 122, loss is 1.2741153240203857\n", - "epoch: 15 step: 123, loss is 1.3180584907531738\n", - "epoch: 15 step: 124, loss is 1.2477481365203857\n", - "epoch: 15 step: 125, loss is 1.1647981405258179\n", - "epoch: 15 step: 126, loss is 1.2373536825180054\n", - "epoch: 15 step: 127, loss is 1.2171181440353394\n", - "epoch: 15 step: 128, loss is 1.2576279640197754\n", - "epoch: 15 step: 129, loss is 1.2510181665420532\n", - "epoch: 15 step: 130, loss is 1.3157298564910889\n", - "epoch: 15 step: 131, loss is 1.175681233406067\n", - "epoch: 15 step: 132, loss is 1.2638832330703735\n", - "epoch: 15 step: 133, loss is 1.193274736404419\n", - "epoch: 15 step: 134, loss is 1.2021092176437378\n", - "epoch: 15 step: 135, loss is 1.341567873954773\n", - "epoch: 15 step: 136, loss is 1.3382102251052856\n", - "epoch: 15 step: 137, loss is 1.2891346216201782\n", - "epoch: 15 step: 138, loss is 1.2622382640838623\n", - "epoch: 15 step: 139, loss is 1.222377896308899\n", - "epoch: 15 step: 140, loss is 1.2877551317214966\n", - "epoch: 15 step: 141, loss is 1.3113880157470703\n", - "epoch: 15 step: 142, loss is 1.2723743915557861\n", - "epoch: 15 step: 143, loss is 1.1897218227386475\n", - "epoch: 15 step: 144, loss is 1.2809958457946777\n", - "epoch: 15 step: 145, loss is 1.2903027534484863\n", - "epoch: 15 step: 146, loss is 1.2597594261169434\n", - "epoch: 15 step: 147, loss is 1.2228127717971802\n", - "epoch: 15 step: 148, loss is 1.2712072134017944\n", - "epoch: 15 step: 149, loss is 1.226621389389038\n", - "epoch: 15 step: 150, loss is 1.2936750650405884\n", - "epoch: 15 step: 151, loss is 1.286426067352295\n", - "epoch: 15 step: 152, loss is 1.2377091646194458\n", - "epoch: 15 step: 153, loss is 1.238861083984375\n", - "epoch: 15 step: 154, loss is 1.315153956413269\n", - "epoch: 15 step: 155, loss is 1.209810495376587\n", - "epoch: 15 step: 156, loss is 1.2419583797454834\n", - "epoch: 15 step: 157, loss is 1.243720293045044\n", - "epoch: 15 step: 158, loss is 1.2803688049316406\n", - "epoch: 15 step: 159, loss is 1.1976585388183594\n", - "epoch: 15 step: 160, loss is 1.2582988739013672\n", - "epoch: 15 step: 161, loss is 1.2500369548797607\n", - "epoch: 15 step: 162, loss is 1.2557183504104614\n", - "epoch: 15 step: 163, loss is 1.206310749053955\n", - "epoch: 15 step: 164, loss is 1.3162862062454224\n", - "epoch: 15 step: 165, loss is 1.3124909400939941\n", - "epoch: 15 step: 166, loss is 1.2516766786575317\n", - "epoch: 15 step: 167, loss is 1.3082057237625122\n", - "epoch: 15 step: 168, loss is 1.2799891233444214\n", - "epoch: 15 step: 169, loss is 1.3270244598388672\n", - "epoch: 15 step: 170, loss is 1.197921633720398\n", - "epoch: 15 step: 171, loss is 1.2444106340408325\n", - "epoch: 15 step: 172, loss is 1.2946901321411133\n", - "epoch: 15 step: 173, loss is 1.1840176582336426\n", - "epoch: 15 step: 174, loss is 1.2681820392608643\n", - "epoch: 15 step: 175, loss is 1.3110121488571167\n", - "epoch: 15 step: 176, loss is 1.3127834796905518\n", - "epoch: 15 step: 177, loss is 1.2900402545928955\n", - "epoch: 15 step: 178, loss is 1.3088023662567139\n", - "epoch: 15 step: 179, loss is 1.3023568391799927\n", - "epoch: 15 step: 180, loss is 1.253270149230957\n", - "epoch: 15 step: 181, loss is 1.2522461414337158\n", - "epoch: 15 step: 182, loss is 1.1910359859466553\n", - "epoch: 15 step: 183, loss is 1.2301628589630127\n", - "epoch: 15 step: 184, loss is 1.3088245391845703\n", - "epoch: 15 step: 185, loss is 1.2386564016342163\n", - "epoch: 15 step: 186, loss is 1.240254521369934\n", - "epoch: 15 step: 187, loss is 1.2634961605072021\n", - "epoch: 15 step: 188, loss is 1.2595704793930054\n", - "epoch: 15 step: 189, loss is 1.2507398128509521\n", - "epoch: 15 step: 190, loss is 1.2307307720184326\n", - "epoch: 15 step: 191, loss is 1.2575700283050537\n", - "epoch: 15 step: 192, loss is 1.2463405132293701\n", - "epoch: 15 step: 193, loss is 1.2091997861862183\n", - "epoch: 15 step: 194, loss is 1.2385755777359009\n", - "epoch: 15 step: 195, loss is 1.1925673484802246\n", - "Train epoch time: 117850.298 ms, per step time: 604.361 ms\n", - "epoch: 16 step: 1, loss is 1.2274730205535889\n", - "epoch: 16 step: 2, loss is 1.2235838174819946\n", - "epoch: 16 step: 3, loss is 1.2720590829849243\n", - "epoch: 16 step: 4, loss is 1.148383617401123\n", - "epoch: 16 step: 5, loss is 1.1968835592269897\n", - "epoch: 16 step: 6, loss is 1.3378541469573975\n", - "epoch: 16 step: 7, loss is 1.1924853324890137\n", - "epoch: 16 step: 8, loss is 1.2511305809020996\n", - "epoch: 16 step: 9, loss is 1.2012386322021484\n", - "epoch: 16 step: 10, loss is 1.2189984321594238\n", - "epoch: 16 step: 11, loss is 1.3680706024169922\n", - "epoch: 16 step: 12, loss is 1.206792950630188\n", - "epoch: 16 step: 13, loss is 1.1881461143493652\n", - "epoch: 16 step: 14, loss is 1.2252566814422607\n", - "epoch: 16 step: 15, loss is 1.2500965595245361\n", - "epoch: 16 step: 16, loss is 1.237292766571045\n", - "epoch: 16 step: 17, loss is 1.2820303440093994\n", - "epoch: 16 step: 18, loss is 1.148396372795105\n", - "epoch: 16 step: 19, loss is 1.246347427368164\n", - "epoch: 16 step: 20, loss is 1.279171109199524\n", - "epoch: 16 step: 21, loss is 1.359834909439087\n", - "epoch: 16 step: 22, loss is 1.24973726272583\n", - "epoch: 16 step: 23, loss is 1.1607933044433594\n", - "epoch: 16 step: 24, loss is 1.1818289756774902\n", - "epoch: 16 step: 25, loss is 1.3800182342529297\n", - "epoch: 16 step: 26, loss is 1.2792166471481323\n", - "epoch: 16 step: 27, loss is 1.2777669429779053\n", - "epoch: 16 step: 28, loss is 1.2325360774993896\n", - "epoch: 16 step: 29, loss is 1.2749779224395752\n", - "epoch: 16 step: 30, loss is 1.1738426685333252\n", - "epoch: 16 step: 31, loss is 1.2503318786621094\n", - "epoch: 16 step: 32, loss is 1.2879382371902466\n", - "epoch: 16 step: 33, loss is 1.323315143585205\n", - "epoch: 16 step: 34, loss is 1.2160749435424805\n", - "epoch: 16 step: 35, loss is 1.2592848539352417\n", - "epoch: 16 step: 36, loss is 1.1310224533081055\n", - "epoch: 16 step: 37, loss is 1.2857701778411865\n", - "epoch: 16 step: 38, loss is 1.241631269454956\n", - "epoch: 16 step: 39, loss is 1.290355920791626\n", - "epoch: 16 step: 40, loss is 1.1943446397781372\n", - "epoch: 16 step: 41, loss is 1.2451283931732178\n", - "epoch: 16 step: 42, loss is 1.2322499752044678\n", - "epoch: 16 step: 43, loss is 1.2564818859100342\n", - "epoch: 16 step: 44, loss is 1.277268409729004\n", - "epoch: 16 step: 45, loss is 1.2928087711334229\n", - "epoch: 16 step: 46, loss is 1.2046185731887817\n", - "epoch: 16 step: 47, loss is 1.138471007347107\n", - "epoch: 16 step: 48, loss is 1.2224820852279663\n", - "epoch: 16 step: 49, loss is 1.2109723091125488\n", - "epoch: 16 step: 50, loss is 1.1801104545593262\n", - "epoch: 16 step: 51, loss is 1.2766666412353516\n", - "epoch: 16 step: 52, loss is 1.3233405351638794\n", - "epoch: 16 step: 53, loss is 1.3630709648132324\n", - "epoch: 16 step: 54, loss is 1.1460883617401123\n", - "epoch: 16 step: 55, loss is 1.3325954675674438\n", - "epoch: 16 step: 56, loss is 1.259045958518982\n", - "epoch: 16 step: 57, loss is 1.1594128608703613\n", - "epoch: 16 step: 58, loss is 1.2450932264328003\n", - "epoch: 16 step: 59, loss is 1.275389313697815\n", - "epoch: 16 step: 60, loss is 1.2404605150222778\n", - "epoch: 16 step: 61, loss is 1.234778642654419\n", - "epoch: 16 step: 62, loss is 1.2167614698410034\n", - "epoch: 16 step: 63, loss is 1.2514312267303467\n", - "epoch: 16 step: 64, loss is 1.2499357461929321\n", - "epoch: 16 step: 65, loss is 1.2663588523864746\n", - "epoch: 16 step: 66, loss is 1.231864333152771\n", - "epoch: 16 step: 67, loss is 1.2194405794143677\n", - "epoch: 16 step: 68, loss is 1.206528663635254\n", - "epoch: 16 step: 69, loss is 1.101513147354126\n", - "epoch: 16 step: 70, loss is 1.3908329010009766\n", - "epoch: 16 step: 71, loss is 1.3055996894836426\n", - "epoch: 16 step: 72, loss is 1.2141655683517456\n", - "epoch: 16 step: 73, loss is 1.1887707710266113\n", - "epoch: 16 step: 74, loss is 1.221242070198059\n", - "epoch: 16 step: 75, loss is 1.1832038164138794\n", - "epoch: 16 step: 76, loss is 1.2116243839263916\n", - "epoch: 16 step: 77, loss is 1.2849528789520264\n", - "epoch: 16 step: 78, loss is 1.2473610639572144\n", - "epoch: 16 step: 79, loss is 1.3379954099655151\n", - "epoch: 16 step: 80, loss is 1.1985424757003784\n", - "epoch: 16 step: 81, loss is 1.2495958805084229\n", - "epoch: 16 step: 82, loss is 1.2481029033660889\n", - "epoch: 16 step: 83, loss is 1.2353349924087524\n", - "epoch: 16 step: 84, loss is 1.2480812072753906\n", - "epoch: 16 step: 85, loss is 1.3057825565338135\n", - "epoch: 16 step: 86, loss is 1.1996017694473267\n", - "epoch: 16 step: 87, loss is 1.2488617897033691\n", - "epoch: 16 step: 88, loss is 1.212516188621521\n", - "epoch: 16 step: 89, loss is 1.2438344955444336\n", - "epoch: 16 step: 90, loss is 1.3710776567459106\n", - "epoch: 16 step: 91, loss is 1.2144547700881958\n", - "epoch: 16 step: 92, loss is 1.2171026468276978\n", - "epoch: 16 step: 93, loss is 1.3089040517807007\n", - "epoch: 16 step: 94, loss is 1.2570548057556152\n", - "epoch: 16 step: 95, loss is 1.2542940378189087\n", - "epoch: 16 step: 96, loss is 1.2019354104995728\n", - "epoch: 16 step: 97, loss is 1.2220085859298706\n", - "epoch: 16 step: 98, loss is 1.2381319999694824\n", - "epoch: 16 step: 99, loss is 1.2278664112091064\n", - "epoch: 16 step: 100, loss is 1.2042198181152344\n", - "epoch: 16 step: 101, loss is 1.1821157932281494\n", - "epoch: 16 step: 102, loss is 1.34200918674469\n", - "epoch: 16 step: 103, loss is 1.195743203163147\n", - "epoch: 16 step: 104, loss is 1.1882482767105103\n", - "epoch: 16 step: 105, loss is 1.2385985851287842\n", - "epoch: 16 step: 106, loss is 1.1501399278640747\n", - "epoch: 16 step: 107, loss is 1.2187302112579346\n", - "epoch: 16 step: 108, loss is 1.3155946731567383\n", - "epoch: 16 step: 109, loss is 1.3305858373641968\n", - "epoch: 16 step: 110, loss is 1.1908713579177856\n", - "epoch: 16 step: 111, loss is 1.265093445777893\n", - "epoch: 16 step: 112, loss is 1.283048391342163\n", - "epoch: 16 step: 113, loss is 1.264970302581787\n", - "epoch: 16 step: 114, loss is 1.2102863788604736\n", - "epoch: 16 step: 115, loss is 1.247843861579895\n", - "epoch: 16 step: 116, loss is 1.2801055908203125\n", - "epoch: 16 step: 117, loss is 1.25144362449646\n", - "epoch: 16 step: 118, loss is 1.223006010055542\n", - "epoch: 16 step: 119, loss is 1.2393598556518555\n", - "epoch: 16 step: 120, loss is 1.216909646987915\n", - "epoch: 16 step: 121, loss is 1.1969853639602661\n", - "epoch: 16 step: 122, loss is 1.1181695461273193\n", - "epoch: 16 step: 123, loss is 1.272802710533142\n", - "epoch: 16 step: 124, loss is 1.20206880569458\n", - "epoch: 16 step: 125, loss is 1.1861366033554077\n", - "epoch: 16 step: 126, loss is 1.1934534311294556\n", - "epoch: 16 step: 127, loss is 1.2243807315826416\n", - "epoch: 16 step: 128, loss is 1.2958638668060303\n", - "epoch: 16 step: 129, loss is 1.2313928604125977\n", - "epoch: 16 step: 130, loss is 1.285033106803894\n", - "epoch: 16 step: 131, loss is 1.313225269317627\n", - "epoch: 16 step: 132, loss is 1.2438418865203857\n", - "epoch: 16 step: 133, loss is 1.2017525434494019\n", - "epoch: 16 step: 134, loss is 1.2655237913131714\n", - "epoch: 16 step: 135, loss is 1.3005032539367676\n", - "epoch: 16 step: 136, loss is 1.244072437286377\n", - "epoch: 16 step: 137, loss is 1.325725793838501\n", - "epoch: 16 step: 138, loss is 1.247969388961792\n", - "epoch: 16 step: 139, loss is 1.1887179613113403\n", - "epoch: 16 step: 140, loss is 1.2735693454742432\n", - "epoch: 16 step: 141, loss is 1.2422592639923096\n", - "epoch: 16 step: 142, loss is 1.2461676597595215\n", - "epoch: 16 step: 143, loss is 1.2246917486190796\n", - "epoch: 16 step: 144, loss is 1.2958608865737915\n", - "epoch: 16 step: 145, loss is 1.2488183975219727\n", - "epoch: 16 step: 146, loss is 1.189430832862854\n", - "epoch: 16 step: 147, loss is 1.2287399768829346\n", - "epoch: 16 step: 148, loss is 1.252371072769165\n", - "epoch: 16 step: 149, loss is 1.226349949836731\n", - "epoch: 16 step: 150, loss is 1.1276710033416748\n", - "epoch: 16 step: 151, loss is 1.286956548690796\n", - "epoch: 16 step: 152, loss is 1.2623121738433838\n", - "epoch: 16 step: 153, loss is 1.202656865119934\n", - "epoch: 16 step: 154, loss is 1.2647936344146729\n", - "epoch: 16 step: 155, loss is 1.3170263767242432\n", - "epoch: 16 step: 156, loss is 1.1940571069717407\n", - "epoch: 16 step: 157, loss is 1.1954822540283203\n", - "epoch: 16 step: 158, loss is 1.2747403383255005\n", - "epoch: 16 step: 159, loss is 1.249922752380371\n", - "epoch: 16 step: 160, loss is 1.2010294198989868\n", - "epoch: 16 step: 161, loss is 1.2432454824447632\n", - "epoch: 16 step: 162, loss is 1.2505052089691162\n", - "epoch: 16 step: 163, loss is 1.2072128057479858\n", - "epoch: 16 step: 164, loss is 1.1469354629516602\n", - "epoch: 16 step: 165, loss is 1.2632534503936768\n", - "epoch: 16 step: 166, loss is 1.313177466392517\n", - "epoch: 16 step: 167, loss is 1.2422597408294678\n", - "epoch: 16 step: 168, loss is 1.2714320421218872\n", - "epoch: 16 step: 169, loss is 1.2533972263336182\n", - "epoch: 16 step: 170, loss is 1.1974824666976929\n", - "epoch: 16 step: 171, loss is 1.176013708114624\n", - "epoch: 16 step: 172, loss is 1.2280741930007935\n", - "epoch: 16 step: 173, loss is 1.299800157546997\n", - "epoch: 16 step: 174, loss is 1.2397340536117554\n", - "epoch: 16 step: 175, loss is 1.32926344871521\n", - "epoch: 16 step: 176, loss is 1.215296983718872\n", - "epoch: 16 step: 177, loss is 1.270972490310669\n", - "epoch: 16 step: 178, loss is 1.2527481317520142\n", - "epoch: 16 step: 179, loss is 1.2844569683074951\n", - "epoch: 16 step: 180, loss is 1.2906743288040161\n", - "epoch: 16 step: 181, loss is 1.3145138025283813\n", - "epoch: 16 step: 182, loss is 1.1955844163894653\n", - "epoch: 16 step: 183, loss is 1.214430809020996\n", - "epoch: 16 step: 184, loss is 1.257425308227539\n", - "epoch: 16 step: 185, loss is 1.3365603685379028\n", - "epoch: 16 step: 186, loss is 1.2331931591033936\n", - "epoch: 16 step: 187, loss is 1.2885427474975586\n", - "epoch: 16 step: 188, loss is 1.2363359928131104\n", - "epoch: 16 step: 189, loss is 1.2828916311264038\n", - "epoch: 16 step: 190, loss is 1.2008980512619019\n", - "epoch: 16 step: 191, loss is 1.2487199306488037\n", - "epoch: 16 step: 192, loss is 1.2396461963653564\n", - "epoch: 16 step: 193, loss is 1.225866436958313\n", - "epoch: 16 step: 194, loss is 1.2474937438964844\n", - "epoch: 16 step: 195, loss is 1.2275688648223877\n", - "Train epoch time: 117204.454 ms, per step time: 601.048 ms\n", - "epoch: 17 step: 1, loss is 1.1368712186813354\n", - "epoch: 17 step: 2, loss is 1.2154674530029297\n", - "epoch: 17 step: 3, loss is 1.1783324480056763\n", - "epoch: 17 step: 4, loss is 1.2023160457611084\n", - "epoch: 17 step: 5, loss is 1.2588672637939453\n", - "epoch: 17 step: 6, loss is 1.2113274335861206\n", - "epoch: 17 step: 7, loss is 1.2011066675186157\n", - "epoch: 17 step: 8, loss is 1.2339909076690674\n", - "epoch: 17 step: 9, loss is 1.2601714134216309\n", - "epoch: 17 step: 10, loss is 1.2264022827148438\n", - "epoch: 17 step: 11, loss is 1.2261953353881836\n", - "epoch: 17 step: 12, loss is 1.2412794828414917\n", - "epoch: 17 step: 13, loss is 1.2002688646316528\n", - "epoch: 17 step: 14, loss is 1.2177826166152954\n", - "epoch: 17 step: 15, loss is 1.2441383600234985\n", - "epoch: 17 step: 16, loss is 1.2624236345291138\n", - "epoch: 17 step: 17, loss is 1.2081058025360107\n", - "epoch: 17 step: 18, loss is 1.1600136756896973\n", - "epoch: 17 step: 19, loss is 1.3263986110687256\n", - "epoch: 17 step: 20, loss is 1.212758183479309\n", - "epoch: 17 step: 21, loss is 1.2366949319839478\n", - "epoch: 17 step: 22, loss is 1.2149782180786133\n", - "epoch: 17 step: 23, loss is 1.1182140111923218\n", - "epoch: 17 step: 24, loss is 1.2454135417938232\n", - "epoch: 17 step: 25, loss is 1.2855679988861084\n", - "epoch: 17 step: 26, loss is 1.180769443511963\n", - "epoch: 17 step: 27, loss is 1.276991367340088\n", - "epoch: 17 step: 28, loss is 1.2113182544708252\n", - "epoch: 17 step: 29, loss is 1.1773923635482788\n", - "epoch: 17 step: 30, loss is 1.2356983423233032\n", - "epoch: 17 step: 31, loss is 1.2006123065948486\n", - "epoch: 17 step: 32, loss is 1.2666823863983154\n", - "epoch: 17 step: 33, loss is 1.1864118576049805\n", - "epoch: 17 step: 34, loss is 1.2898330688476562\n", - "epoch: 17 step: 35, loss is 1.2948942184448242\n", - "epoch: 17 step: 36, loss is 1.2339624166488647\n", - "epoch: 17 step: 37, loss is 1.207308292388916\n", - "epoch: 17 step: 38, loss is 1.2334808111190796\n", - "epoch: 17 step: 39, loss is 1.1876094341278076\n", - "epoch: 17 step: 40, loss is 1.1989997625350952\n", - "epoch: 17 step: 41, loss is 1.128523349761963\n", - "epoch: 17 step: 42, loss is 1.2673076391220093\n", - "epoch: 17 step: 43, loss is 1.432381272315979\n", - "epoch: 17 step: 44, loss is 1.1828938722610474\n", - "epoch: 17 step: 45, loss is 1.196916103363037\n", - "epoch: 17 step: 46, loss is 1.1199086904525757\n", - "epoch: 17 step: 47, loss is 1.173409342765808\n", - "epoch: 17 step: 48, loss is 1.229339838027954\n", - "epoch: 17 step: 49, loss is 1.2648639678955078\n", - "epoch: 17 step: 50, loss is 1.1573368310928345\n", - "epoch: 17 step: 51, loss is 1.252699851989746\n", - "epoch: 17 step: 52, loss is 1.2679656744003296\n", - "epoch: 17 step: 53, loss is 1.2478971481323242\n", - "epoch: 17 step: 54, loss is 1.1537532806396484\n", - "epoch: 17 step: 55, loss is 1.3024675846099854\n", - "epoch: 17 step: 56, loss is 1.1944911479949951\n", - "epoch: 17 step: 57, loss is 1.243715763092041\n", - "epoch: 17 step: 58, loss is 1.2849395275115967\n", - "epoch: 17 step: 59, loss is 1.1573431491851807\n", - "epoch: 17 step: 60, loss is 1.2541215419769287\n", - "epoch: 17 step: 61, loss is 1.193427562713623\n", - "epoch: 17 step: 62, loss is 1.2838801145553589\n", - "epoch: 17 step: 63, loss is 1.183005928993225\n", - "epoch: 17 step: 64, loss is 1.2515697479248047\n", - "epoch: 17 step: 65, loss is 1.171645164489746\n", - "epoch: 17 step: 66, loss is 1.2654495239257812\n", - "epoch: 17 step: 67, loss is 1.215116262435913\n", - "epoch: 17 step: 68, loss is 1.2876511812210083\n", - "epoch: 17 step: 69, loss is 1.2766550779342651\n", - "epoch: 17 step: 70, loss is 1.2789809703826904\n", - "epoch: 17 step: 71, loss is 1.21022629737854\n", - "epoch: 17 step: 72, loss is 1.3506572246551514\n", - "epoch: 17 step: 73, loss is 1.3019983768463135\n", - "epoch: 17 step: 74, loss is 1.241420030593872\n", - "epoch: 17 step: 75, loss is 1.2391926050186157\n", - "epoch: 17 step: 76, loss is 1.2923179864883423\n", - "epoch: 17 step: 77, loss is 1.2566075325012207\n", - "epoch: 17 step: 78, loss is 1.2775304317474365\n", - "epoch: 17 step: 79, loss is 1.2055257558822632\n", - "epoch: 17 step: 80, loss is 1.2494230270385742\n", - "epoch: 17 step: 81, loss is 1.1269831657409668\n", - "epoch: 17 step: 82, loss is 1.2635886669158936\n", - "epoch: 17 step: 83, loss is 1.1307318210601807\n", - "epoch: 17 step: 84, loss is 1.2035168409347534\n", - "epoch: 17 step: 85, loss is 1.198492407798767\n", - "epoch: 17 step: 86, loss is 1.1858220100402832\n", - "epoch: 17 step: 87, loss is 1.1978291273117065\n", - "epoch: 17 step: 88, loss is 1.2211642265319824\n", - "epoch: 17 step: 89, loss is 1.1992467641830444\n", - "epoch: 17 step: 90, loss is 1.1408443450927734\n", - "epoch: 17 step: 91, loss is 1.1738532781600952\n", - "epoch: 17 step: 92, loss is 1.1387178897857666\n", - "epoch: 17 step: 93, loss is 1.3576897382736206\n", - "epoch: 17 step: 94, loss is 1.282267451286316\n", - "epoch: 17 step: 95, loss is 1.2726104259490967\n", - "epoch: 17 step: 96, loss is 1.2187788486480713\n", - "epoch: 17 step: 97, loss is 1.225401520729065\n", - "epoch: 17 step: 98, loss is 1.2436751127243042\n", - "epoch: 17 step: 99, loss is 1.1683436632156372\n", - "epoch: 17 step: 100, loss is 1.2353081703186035\n", - "epoch: 17 step: 101, loss is 1.2216999530792236\n", - "epoch: 17 step: 102, loss is 1.2683098316192627\n", - "epoch: 17 step: 103, loss is 1.1910035610198975\n", - "epoch: 17 step: 104, loss is 1.2748823165893555\n", - "epoch: 17 step: 105, loss is 1.2288672924041748\n", - "epoch: 17 step: 106, loss is 1.2370693683624268\n", - "epoch: 17 step: 107, loss is 1.2189065217971802\n", - "epoch: 17 step: 108, loss is 1.2345324754714966\n", - "epoch: 17 step: 109, loss is 1.246821403503418\n", - "epoch: 17 step: 110, loss is 1.35385262966156\n", - "epoch: 17 step: 111, loss is 1.2852669954299927\n", - "epoch: 17 step: 112, loss is 1.2824387550354004\n", - "epoch: 17 step: 113, loss is 1.1660194396972656\n", - "epoch: 17 step: 114, loss is 1.2597863674163818\n", - "epoch: 17 step: 115, loss is 1.3324213027954102\n", - "epoch: 17 step: 116, loss is 1.1363489627838135\n", - "epoch: 17 step: 117, loss is 1.171670913696289\n", - "epoch: 17 step: 118, loss is 1.1569430828094482\n", - "epoch: 17 step: 119, loss is 1.1638708114624023\n", - "epoch: 17 step: 120, loss is 1.1986749172210693\n", - "epoch: 17 step: 121, loss is 1.2963933944702148\n", - "epoch: 17 step: 122, loss is 1.2250511646270752\n", - "epoch: 17 step: 123, loss is 1.1643753051757812\n", - "epoch: 17 step: 124, loss is 1.2131401300430298\n", - "epoch: 17 step: 125, loss is 1.2659035921096802\n", - "epoch: 17 step: 126, loss is 1.2510257959365845\n", - "epoch: 17 step: 127, loss is 1.1570096015930176\n", - "epoch: 17 step: 128, loss is 1.2065162658691406\n", - "epoch: 17 step: 129, loss is 1.2328345775604248\n", - "epoch: 17 step: 130, loss is 1.1409492492675781\n", - "epoch: 17 step: 131, loss is 1.207078456878662\n", - "epoch: 17 step: 132, loss is 1.3251025676727295\n", - "epoch: 17 step: 133, loss is 1.2847282886505127\n", - "epoch: 17 step: 134, loss is 1.2583389282226562\n", - "epoch: 17 step: 135, loss is 1.1629046201705933\n", - "epoch: 17 step: 136, loss is 1.2359191179275513\n", - "epoch: 17 step: 137, loss is 1.1721794605255127\n", - "epoch: 17 step: 138, loss is 1.2206753492355347\n", - "epoch: 17 step: 139, loss is 1.1739181280136108\n", - "epoch: 17 step: 140, loss is 1.245823860168457\n", - "epoch: 17 step: 141, loss is 1.2040901184082031\n", - "epoch: 17 step: 142, loss is 1.2917197942733765\n", - "epoch: 17 step: 143, loss is 1.2295960187911987\n", - "epoch: 17 step: 144, loss is 1.2142951488494873\n", - "epoch: 17 step: 145, loss is 1.2301418781280518\n", - "epoch: 17 step: 146, loss is 1.2273237705230713\n", - "epoch: 17 step: 147, loss is 1.1696877479553223\n", - "epoch: 17 step: 148, loss is 1.2237462997436523\n", - "epoch: 17 step: 149, loss is 1.270733118057251\n", - "epoch: 17 step: 150, loss is 1.2302684783935547\n", - "epoch: 17 step: 151, loss is 1.1912108659744263\n", - "epoch: 17 step: 152, loss is 1.2522850036621094\n", - "epoch: 17 step: 153, loss is 1.2047255039215088\n", - "epoch: 17 step: 154, loss is 1.139001488685608\n", - "epoch: 17 step: 155, loss is 1.2501187324523926\n", - "epoch: 17 step: 156, loss is 1.2116199731826782\n", - "epoch: 17 step: 157, loss is 1.257354974746704\n", - "epoch: 17 step: 158, loss is 1.1789766550064087\n", - "epoch: 17 step: 159, loss is 1.2003939151763916\n", - "epoch: 17 step: 160, loss is 1.2342777252197266\n", - "epoch: 17 step: 161, loss is 1.2099637985229492\n", - "epoch: 17 step: 162, loss is 1.2957016229629517\n", - "epoch: 17 step: 163, loss is 1.2251198291778564\n", - "epoch: 17 step: 164, loss is 1.2255970239639282\n", - "epoch: 17 step: 165, loss is 1.1516563892364502\n", - "epoch: 17 step: 166, loss is 1.1545898914337158\n", - "epoch: 17 step: 167, loss is 1.182161569595337\n", - "epoch: 17 step: 168, loss is 1.2883062362670898\n", - "epoch: 17 step: 169, loss is 1.2371444702148438\n", - "epoch: 17 step: 170, loss is 1.243492603302002\n", - "epoch: 17 step: 171, loss is 1.2206103801727295\n", - "epoch: 17 step: 172, loss is 1.2447007894515991\n", - "epoch: 17 step: 173, loss is 1.1164084672927856\n", - "epoch: 17 step: 174, loss is 1.2386415004730225\n", - "epoch: 17 step: 175, loss is 1.2308399677276611\n", - "epoch: 17 step: 176, loss is 1.2692515850067139\n", - "epoch: 17 step: 177, loss is 1.2574223279953003\n", - "epoch: 17 step: 178, loss is 1.1886930465698242\n", - "epoch: 17 step: 179, loss is 1.2658964395523071\n", - "epoch: 17 step: 180, loss is 1.1521846055984497\n", - "epoch: 17 step: 181, loss is 1.2420017719268799\n", - "epoch: 17 step: 182, loss is 1.2170312404632568\n", - "epoch: 17 step: 183, loss is 1.2561466693878174\n", - "epoch: 17 step: 184, loss is 1.1954855918884277\n", - "epoch: 17 step: 185, loss is 1.185873031616211\n", - "epoch: 17 step: 186, loss is 1.2515869140625\n", - "epoch: 17 step: 187, loss is 1.2171010971069336\n", - "epoch: 17 step: 188, loss is 1.2851717472076416\n", - "epoch: 17 step: 189, loss is 1.2497345209121704\n", - "epoch: 17 step: 190, loss is 1.2024574279785156\n", - "epoch: 17 step: 191, loss is 1.2836174964904785\n", - "epoch: 17 step: 192, loss is 1.1394550800323486\n", - "epoch: 17 step: 193, loss is 1.2260174751281738\n", - "epoch: 17 step: 194, loss is 1.2440799474716187\n", - "epoch: 17 step: 195, loss is 1.2714698314666748\n", - "Train epoch time: 111485.809 ms, per step time: 571.722 ms\n", - "epoch: 18 step: 1, loss is 1.1941851377487183\n", - "epoch: 18 step: 2, loss is 1.2028131484985352\n", - "epoch: 18 step: 3, loss is 1.2496981620788574\n", - "epoch: 18 step: 4, loss is 1.2141849994659424\n", - "epoch: 18 step: 5, loss is 1.2137805223464966\n", - "epoch: 18 step: 6, loss is 1.141650915145874\n", - "epoch: 18 step: 7, loss is 1.2860140800476074\n", - "epoch: 18 step: 8, loss is 1.112280011177063\n", - "epoch: 18 step: 9, loss is 1.0993003845214844\n", - "epoch: 18 step: 10, loss is 1.2823307514190674\n", - "epoch: 18 step: 11, loss is 1.114200472831726\n", - "epoch: 18 step: 12, loss is 1.19282066822052\n", - "epoch: 18 step: 13, loss is 1.2437809705734253\n", - "epoch: 18 step: 14, loss is 1.171976089477539\n", - "epoch: 18 step: 15, loss is 1.2106871604919434\n", - "epoch: 18 step: 16, loss is 1.150513768196106\n", - "epoch: 18 step: 17, loss is 1.207829236984253\n", - "epoch: 18 step: 18, loss is 1.2873139381408691\n", - "epoch: 18 step: 19, loss is 1.2626097202301025\n", - "epoch: 18 step: 20, loss is 1.201612949371338\n", - "epoch: 18 step: 21, loss is 1.1613235473632812\n", - "epoch: 18 step: 22, loss is 1.2292171716690063\n", - "epoch: 18 step: 23, loss is 1.2623361349105835\n", - "epoch: 18 step: 24, loss is 1.2793309688568115\n", - "epoch: 18 step: 25, loss is 1.1512037515640259\n", - "epoch: 18 step: 26, loss is 1.1728259325027466\n", - "epoch: 18 step: 27, loss is 1.2383413314819336\n", - "epoch: 18 step: 28, loss is 1.2949329614639282\n", - "epoch: 18 step: 29, loss is 1.1898664236068726\n", - "epoch: 18 step: 30, loss is 1.2148301601409912\n", - "epoch: 18 step: 31, loss is 1.2653827667236328\n", - "epoch: 18 step: 32, loss is 1.179175615310669\n", - "epoch: 18 step: 33, loss is 1.2242895364761353\n", - "epoch: 18 step: 34, loss is 1.2023117542266846\n", - "epoch: 18 step: 35, loss is 1.2255192995071411\n", - "epoch: 18 step: 36, loss is 1.1922492980957031\n", - "epoch: 18 step: 37, loss is 1.2294403314590454\n", - "epoch: 18 step: 38, loss is 1.1799476146697998\n", - "epoch: 18 step: 39, loss is 1.2683058977127075\n", - "epoch: 18 step: 40, loss is 1.2300208806991577\n", - "epoch: 18 step: 41, loss is 1.2252845764160156\n", - "epoch: 18 step: 42, loss is 1.2440800666809082\n", - "epoch: 18 step: 43, loss is 1.2073389291763306\n", - "epoch: 18 step: 44, loss is 1.1610219478607178\n", - "epoch: 18 step: 45, loss is 1.1392560005187988\n", - "epoch: 18 step: 46, loss is 1.2460569143295288\n", - "epoch: 18 step: 47, loss is 1.160309076309204\n", - "epoch: 18 step: 48, loss is 1.3352422714233398\n", - "epoch: 18 step: 49, loss is 1.1999168395996094\n", - "epoch: 18 step: 50, loss is 1.2260346412658691\n", - "epoch: 18 step: 51, loss is 1.2744680643081665\n", - "epoch: 18 step: 52, loss is 1.274109959602356\n", - "epoch: 18 step: 53, loss is 1.248002052307129\n", - "epoch: 18 step: 54, loss is 1.173185110092163\n", - "epoch: 18 step: 55, loss is 1.2562493085861206\n", - "epoch: 18 step: 56, loss is 1.228978157043457\n", - "epoch: 18 step: 57, loss is 1.2579305171966553\n", - "epoch: 18 step: 58, loss is 1.2552387714385986\n", - "epoch: 18 step: 59, loss is 1.2264227867126465\n", - "epoch: 18 step: 60, loss is 1.1449203491210938\n", - "epoch: 18 step: 61, loss is 1.1107535362243652\n", - "epoch: 18 step: 62, loss is 1.1920360326766968\n", - "epoch: 18 step: 63, loss is 1.1535569429397583\n", - "epoch: 18 step: 64, loss is 1.2798761129379272\n", - "epoch: 18 step: 65, loss is 1.2738579511642456\n", - "epoch: 18 step: 66, loss is 1.1850402355194092\n", - "epoch: 18 step: 67, loss is 1.343055009841919\n", - "epoch: 18 step: 68, loss is 1.1570680141448975\n", - "epoch: 18 step: 69, loss is 1.2671403884887695\n", - "epoch: 18 step: 70, loss is 1.248884677886963\n", - "epoch: 18 step: 71, loss is 1.2503913640975952\n", - "epoch: 18 step: 72, loss is 1.2419198751449585\n", - "epoch: 18 step: 73, loss is 1.2132318019866943\n", - "epoch: 18 step: 74, loss is 1.1276620626449585\n", - "epoch: 18 step: 75, loss is 1.2323704957962036\n", - "epoch: 18 step: 76, loss is 1.1730008125305176\n", - "epoch: 18 step: 77, loss is 1.1981604099273682\n", - "epoch: 18 step: 78, loss is 1.0911144018173218\n", - "epoch: 18 step: 79, loss is 1.153266191482544\n", - "epoch: 18 step: 80, loss is 1.2160675525665283\n", - "epoch: 18 step: 81, loss is 1.3494268655776978\n", - "epoch: 18 step: 82, loss is 1.145481824874878\n", - "epoch: 18 step: 83, loss is 1.157247543334961\n", - "epoch: 18 step: 84, loss is 1.2162913084030151\n", - "epoch: 18 step: 85, loss is 1.1883059740066528\n", - "epoch: 18 step: 86, loss is 1.1511805057525635\n", - "epoch: 18 step: 87, loss is 1.206876277923584\n", - "epoch: 18 step: 88, loss is 1.2243187427520752\n", - "epoch: 18 step: 89, loss is 1.2137902975082397\n", - "epoch: 18 step: 90, loss is 1.1670256853103638\n", - "epoch: 18 step: 91, loss is 1.2616569995880127\n", - "epoch: 18 step: 92, loss is 1.2407196760177612\n", - "epoch: 18 step: 93, loss is 1.244102120399475\n", - "epoch: 18 step: 94, loss is 1.2041553258895874\n", - "epoch: 18 step: 95, loss is 1.2186359167099\n", - "epoch: 18 step: 96, loss is 1.2334771156311035\n", - "epoch: 18 step: 97, loss is 1.240622639656067\n", - "epoch: 18 step: 98, loss is 1.2455791234970093\n", - "epoch: 18 step: 99, loss is 1.1406259536743164\n", - "epoch: 18 step: 100, loss is 1.2416211366653442\n", - "epoch: 18 step: 101, loss is 1.2567397356033325\n", - "epoch: 18 step: 102, loss is 1.1658239364624023\n", - "epoch: 18 step: 103, loss is 1.144645094871521\n", - "epoch: 18 step: 104, loss is 1.1704614162445068\n", - "epoch: 18 step: 105, loss is 1.1572633981704712\n", - "epoch: 18 step: 106, loss is 1.2049450874328613\n", - "epoch: 18 step: 107, loss is 1.2221810817718506\n", - "epoch: 18 step: 108, loss is 1.1628081798553467\n", - "epoch: 18 step: 109, loss is 1.1648123264312744\n", - "epoch: 18 step: 110, loss is 1.1515204906463623\n", - "epoch: 18 step: 111, loss is 1.2167949676513672\n", - "epoch: 18 step: 112, loss is 1.1637673377990723\n", - "epoch: 18 step: 113, loss is 1.2031941413879395\n", - "epoch: 18 step: 114, loss is 1.2180062532424927\n", - "epoch: 18 step: 115, loss is 1.1394615173339844\n", - "epoch: 18 step: 116, loss is 1.157581090927124\n", - "epoch: 18 step: 117, loss is 1.352529764175415\n", - "epoch: 18 step: 118, loss is 1.170993447303772\n", - "epoch: 18 step: 119, loss is 1.2439546585083008\n", - "epoch: 18 step: 120, loss is 1.1533629894256592\n", - "epoch: 18 step: 121, loss is 1.261382818222046\n", - "epoch: 18 step: 122, loss is 1.2457956075668335\n", - "epoch: 18 step: 123, loss is 1.1532824039459229\n", - "epoch: 18 step: 124, loss is 1.1916731595993042\n", - "epoch: 18 step: 125, loss is 1.1527515649795532\n", - "epoch: 18 step: 126, loss is 1.2504023313522339\n", - "epoch: 18 step: 127, loss is 1.1447643041610718\n", - "epoch: 18 step: 128, loss is 1.1363916397094727\n", - "epoch: 18 step: 129, loss is 1.1095026731491089\n", - "epoch: 18 step: 130, loss is 1.1948131322860718\n", - "epoch: 18 step: 131, loss is 1.1443809270858765\n", - "epoch: 18 step: 132, loss is 1.1425096988677979\n", - "epoch: 18 step: 133, loss is 1.193054437637329\n", - "epoch: 18 step: 134, loss is 1.168875813484192\n", - "epoch: 18 step: 135, loss is 1.272268533706665\n", - "epoch: 18 step: 136, loss is 1.2539029121398926\n", - "epoch: 18 step: 137, loss is 1.1655241250991821\n", - "epoch: 18 step: 138, loss is 1.119997262954712\n", - "epoch: 18 step: 139, loss is 1.2378826141357422\n", - "epoch: 18 step: 140, loss is 1.1714905500411987\n", - "epoch: 18 step: 141, loss is 1.1801395416259766\n", - "epoch: 18 step: 142, loss is 1.2726768255233765\n", - "epoch: 18 step: 143, loss is 1.2565748691558838\n", - "epoch: 18 step: 144, loss is 1.2353582382202148\n", - "epoch: 18 step: 145, loss is 1.170344352722168\n", - "epoch: 18 step: 146, loss is 1.1527581214904785\n", - "epoch: 18 step: 147, loss is 1.242505431175232\n", - "epoch: 18 step: 148, loss is 1.171144962310791\n", - "epoch: 18 step: 149, loss is 1.1803925037384033\n", - "epoch: 18 step: 150, loss is 1.295773983001709\n", - "epoch: 18 step: 151, loss is 1.1560053825378418\n", - "epoch: 18 step: 152, loss is 1.2248835563659668\n", - "epoch: 18 step: 153, loss is 1.1852574348449707\n", - "epoch: 18 step: 154, loss is 1.21501624584198\n", - "epoch: 18 step: 155, loss is 1.1819690465927124\n", - "epoch: 18 step: 156, loss is 1.1577649116516113\n", - "epoch: 18 step: 157, loss is 1.2680764198303223\n", - "epoch: 18 step: 158, loss is 1.1996455192565918\n", - "epoch: 18 step: 159, loss is 1.2540663480758667\n", - "epoch: 18 step: 160, loss is 1.1555793285369873\n", - "epoch: 18 step: 161, loss is 1.2591450214385986\n", - "epoch: 18 step: 162, loss is 1.2213314771652222\n", - "epoch: 18 step: 163, loss is 1.2003898620605469\n", - "epoch: 18 step: 164, loss is 1.2005459070205688\n", - "epoch: 18 step: 165, loss is 1.1493195295333862\n", - "epoch: 18 step: 166, loss is 1.2018680572509766\n", - "epoch: 18 step: 167, loss is 1.1550521850585938\n", - "epoch: 18 step: 168, loss is 1.2333717346191406\n", - "epoch: 18 step: 169, loss is 1.187947154045105\n", - "epoch: 18 step: 170, loss is 1.198265790939331\n", - "epoch: 18 step: 171, loss is 1.2679691314697266\n", - "epoch: 18 step: 172, loss is 1.1474546194076538\n", - "epoch: 18 step: 173, loss is 1.1561428308486938\n", - "epoch: 18 step: 174, loss is 1.2829787731170654\n", - "epoch: 18 step: 175, loss is 1.2815979719161987\n", - "epoch: 18 step: 176, loss is 1.1637556552886963\n", - "epoch: 18 step: 177, loss is 1.1640303134918213\n", - "epoch: 18 step: 178, loss is 1.1799880266189575\n", - "epoch: 18 step: 179, loss is 1.276066780090332\n", - "epoch: 18 step: 180, loss is 1.18852961063385\n", - "epoch: 18 step: 181, loss is 1.2134523391723633\n", - "epoch: 18 step: 182, loss is 1.2392854690551758\n", - "epoch: 18 step: 183, loss is 1.1859794855117798\n", - "epoch: 18 step: 184, loss is 1.1036103963851929\n", - "epoch: 18 step: 185, loss is 1.2156715393066406\n", - "epoch: 18 step: 186, loss is 1.2863353490829468\n", - "epoch: 18 step: 187, loss is 1.265312671661377\n", - "epoch: 18 step: 188, loss is 1.1463671922683716\n", - "epoch: 18 step: 189, loss is 1.2023648023605347\n", - "epoch: 18 step: 190, loss is 1.1772598028182983\n", - "epoch: 18 step: 191, loss is 1.2374480962753296\n", - "epoch: 18 step: 192, loss is 1.1654703617095947\n", - "epoch: 18 step: 193, loss is 1.21602463722229\n", - "epoch: 18 step: 194, loss is 1.1927094459533691\n", - "epoch: 18 step: 195, loss is 1.1512067317962646\n", - "Train epoch time: 108420.523 ms, per step time: 556.003 ms\n", - "epoch: 19 step: 1, loss is 1.2158865928649902\n", - "epoch: 19 step: 2, loss is 1.1795833110809326\n", - "epoch: 19 step: 3, loss is 1.1759073734283447\n", - "epoch: 19 step: 4, loss is 1.104797124862671\n", - "epoch: 19 step: 5, loss is 1.1199524402618408\n", - "epoch: 19 step: 6, loss is 1.242564082145691\n", - "epoch: 19 step: 7, loss is 1.1528247594833374\n", - "epoch: 19 step: 8, loss is 1.1600167751312256\n", - "epoch: 19 step: 9, loss is 1.1698222160339355\n", - "epoch: 19 step: 10, loss is 1.2170498371124268\n", - "epoch: 19 step: 11, loss is 1.170514702796936\n", - "epoch: 19 step: 12, loss is 1.204442024230957\n", - "epoch: 19 step: 13, loss is 1.2059317827224731\n", - "epoch: 19 step: 14, loss is 1.3091166019439697\n", - "epoch: 19 step: 15, loss is 1.1550259590148926\n", - "epoch: 19 step: 16, loss is 1.1570837497711182\n", - "epoch: 19 step: 17, loss is 1.1377938985824585\n", - "epoch: 19 step: 18, loss is 1.1683560609817505\n", - "epoch: 19 step: 19, loss is 1.1535415649414062\n", - "epoch: 19 step: 20, loss is 1.1659129858016968\n", - "epoch: 19 step: 21, loss is 1.207434058189392\n", - "epoch: 19 step: 22, loss is 1.2297946214675903\n", - "epoch: 19 step: 23, loss is 1.2652627229690552\n", - "epoch: 19 step: 24, loss is 1.1405130624771118\n", - "epoch: 19 step: 25, loss is 1.1643458604812622\n", - "epoch: 19 step: 26, loss is 1.2033896446228027\n", - "epoch: 19 step: 27, loss is 1.1991820335388184\n", - "epoch: 19 step: 28, loss is 1.2071975469589233\n", - "epoch: 19 step: 29, loss is 1.2128506898880005\n", - "epoch: 19 step: 30, loss is 1.2495732307434082\n", - "epoch: 19 step: 31, loss is 1.1125600337982178\n", - "epoch: 19 step: 32, loss is 1.1209043264389038\n", - "epoch: 19 step: 33, loss is 1.2268168926239014\n", - "epoch: 19 step: 34, loss is 1.1992316246032715\n", - "epoch: 19 step: 35, loss is 1.1904911994934082\n", - "epoch: 19 step: 36, loss is 1.1848461627960205\n", - "epoch: 19 step: 37, loss is 1.1708191633224487\n", - "epoch: 19 step: 38, loss is 1.2114357948303223\n", - "epoch: 19 step: 39, loss is 1.1971246004104614\n", - "epoch: 19 step: 40, loss is 1.0913617610931396\n", - "epoch: 19 step: 41, loss is 1.2132728099822998\n", - "epoch: 19 step: 42, loss is 1.1176247596740723\n", - "epoch: 19 step: 43, loss is 1.1774775981903076\n", - "epoch: 19 step: 44, loss is 1.1832551956176758\n", - "epoch: 19 step: 45, loss is 1.1002973318099976\n", - "epoch: 19 step: 46, loss is 1.102927803993225\n", - "epoch: 19 step: 47, loss is 1.137946605682373\n", - "epoch: 19 step: 48, loss is 1.162173867225647\n", - "epoch: 19 step: 49, loss is 1.1722908020019531\n", - "epoch: 19 step: 50, loss is 1.2202272415161133\n", - "epoch: 19 step: 51, loss is 1.1781983375549316\n", - "epoch: 19 step: 52, loss is 1.1630905866622925\n", - "epoch: 19 step: 53, loss is 1.1649562120437622\n", - "epoch: 19 step: 54, loss is 1.2033534049987793\n", - "epoch: 19 step: 55, loss is 1.164754867553711\n", - "epoch: 19 step: 56, loss is 1.1836471557617188\n", - "epoch: 19 step: 57, loss is 1.2081130743026733\n", - "epoch: 19 step: 58, loss is 1.1663734912872314\n", - "epoch: 19 step: 59, loss is 1.1560461521148682\n", - "epoch: 19 step: 60, loss is 1.155867099761963\n", - "epoch: 19 step: 61, loss is 1.131630539894104\n", - "epoch: 19 step: 62, loss is 1.2716447114944458\n", - "epoch: 19 step: 63, loss is 1.1954071521759033\n", - "epoch: 19 step: 64, loss is 1.237220287322998\n", - "epoch: 19 step: 65, loss is 1.1483888626098633\n", - "epoch: 19 step: 66, loss is 1.222702980041504\n", - "epoch: 19 step: 67, loss is 1.166438102722168\n", - "epoch: 19 step: 68, loss is 1.168308138847351\n", - "epoch: 19 step: 69, loss is 1.206650733947754\n", - "epoch: 19 step: 70, loss is 1.2176928520202637\n", - "epoch: 19 step: 71, loss is 1.2175028324127197\n", - "epoch: 19 step: 72, loss is 1.1140520572662354\n", - "epoch: 19 step: 73, loss is 1.2784779071807861\n", - "epoch: 19 step: 74, loss is 1.1805301904678345\n", - "epoch: 19 step: 75, loss is 1.1798899173736572\n", - "epoch: 19 step: 76, loss is 1.1518090963363647\n", - "epoch: 19 step: 77, loss is 1.2364120483398438\n", - "epoch: 19 step: 78, loss is 1.1737169027328491\n", - "epoch: 19 step: 79, loss is 1.1684293746948242\n", - "epoch: 19 step: 80, loss is 1.220693588256836\n", - "epoch: 19 step: 81, loss is 1.2503461837768555\n", - "epoch: 19 step: 82, loss is 1.1905885934829712\n", - "epoch: 19 step: 83, loss is 1.1822084188461304\n", - "epoch: 19 step: 84, loss is 1.1553031206130981\n", - "epoch: 19 step: 85, loss is 1.1825361251831055\n", - "epoch: 19 step: 86, loss is 1.2044317722320557\n", - "epoch: 19 step: 87, loss is 1.1627497673034668\n", - "epoch: 19 step: 88, loss is 1.1464035511016846\n", - "epoch: 19 step: 89, loss is 1.1276562213897705\n", - "epoch: 19 step: 90, loss is 1.146213173866272\n", - "epoch: 19 step: 91, loss is 1.2221035957336426\n", - "epoch: 19 step: 92, loss is 1.1247589588165283\n", - "epoch: 19 step: 93, loss is 1.0513951778411865\n", - "epoch: 19 step: 94, loss is 1.1292215585708618\n", - "epoch: 19 step: 95, loss is 1.1044636964797974\n", - "epoch: 19 step: 96, loss is 1.2208478450775146\n", - "epoch: 19 step: 97, loss is 1.2209758758544922\n", - "epoch: 19 step: 98, loss is 1.1648083925247192\n", - "epoch: 19 step: 99, loss is 1.2027801275253296\n", - "epoch: 19 step: 100, loss is 1.0905022621154785\n", - "epoch: 19 step: 101, loss is 1.2712833881378174\n", - "epoch: 19 step: 102, loss is 1.140062689781189\n", - "epoch: 19 step: 103, loss is 1.176724910736084\n", - "epoch: 19 step: 104, loss is 1.2077751159667969\n", - "epoch: 19 step: 105, loss is 1.1957037448883057\n", - "epoch: 19 step: 106, loss is 1.1538515090942383\n", - "epoch: 19 step: 107, loss is 1.1439682245254517\n", - "epoch: 19 step: 108, loss is 1.2080341577529907\n", - "epoch: 19 step: 109, loss is 1.251025915145874\n", - "epoch: 19 step: 110, loss is 1.1654433012008667\n", - "epoch: 19 step: 111, loss is 1.191925287246704\n", - "epoch: 19 step: 112, loss is 1.1319026947021484\n", - "epoch: 19 step: 113, loss is 1.131664752960205\n", - "epoch: 19 step: 114, loss is 1.204467535018921\n", - "epoch: 19 step: 115, loss is 1.1303737163543701\n", - "epoch: 19 step: 116, loss is 1.0802013874053955\n", - "epoch: 19 step: 117, loss is 1.1376874446868896\n", - "epoch: 19 step: 118, loss is 1.1150307655334473\n", - "epoch: 19 step: 119, loss is 1.098187804222107\n", - "epoch: 19 step: 120, loss is 1.2336392402648926\n", - "epoch: 19 step: 121, loss is 1.1172149181365967\n", - "epoch: 19 step: 122, loss is 1.0610930919647217\n", - "epoch: 19 step: 123, loss is 1.1428275108337402\n", - "epoch: 19 step: 124, loss is 1.1310787200927734\n", - "epoch: 19 step: 125, loss is 1.2308604717254639\n", - "epoch: 19 step: 126, loss is 1.1751352548599243\n", - "epoch: 19 step: 127, loss is 1.1423704624176025\n", - "epoch: 19 step: 128, loss is 1.119938611984253\n", - "epoch: 19 step: 129, loss is 1.1247596740722656\n", - "epoch: 19 step: 130, loss is 1.1425809860229492\n", - "epoch: 19 step: 131, loss is 1.1298408508300781\n", - "epoch: 19 step: 132, loss is 1.1456996202468872\n", - "epoch: 19 step: 133, loss is 1.1238517761230469\n", - "epoch: 19 step: 134, loss is 1.223686695098877\n", - "epoch: 19 step: 135, loss is 1.192700982093811\n", - "epoch: 19 step: 136, loss is 1.1626219749450684\n", - "epoch: 19 step: 137, loss is 1.1465332508087158\n", - "epoch: 19 step: 138, loss is 1.280716896057129\n", - "epoch: 19 step: 139, loss is 1.1475489139556885\n", - "epoch: 19 step: 140, loss is 1.121297001838684\n", - "epoch: 19 step: 141, loss is 1.2081031799316406\n", - "epoch: 19 step: 142, loss is 1.164485216140747\n", - "epoch: 19 step: 143, loss is 1.2223079204559326\n", - "epoch: 19 step: 144, loss is 1.110025405883789\n", - "epoch: 19 step: 145, loss is 1.1975003480911255\n", - "epoch: 19 step: 146, loss is 1.1731890439987183\n", - "epoch: 19 step: 147, loss is 1.1980198621749878\n", - "epoch: 19 step: 148, loss is 1.2624881267547607\n", - "epoch: 19 step: 149, loss is 1.1154556274414062\n", - "epoch: 19 step: 150, loss is 1.273077368736267\n", - "epoch: 19 step: 151, loss is 1.2191673517227173\n", - "epoch: 19 step: 152, loss is 1.1788913011550903\n", - "epoch: 19 step: 153, loss is 1.1982306241989136\n", - "epoch: 19 step: 154, loss is 1.2329857349395752\n", - "epoch: 19 step: 155, loss is 1.2030599117279053\n", - "epoch: 19 step: 156, loss is 1.171222448348999\n", - "epoch: 19 step: 157, loss is 1.1812434196472168\n", - "epoch: 19 step: 158, loss is 1.1659244298934937\n", - "epoch: 19 step: 159, loss is 1.2471730709075928\n", - "epoch: 19 step: 160, loss is 1.151676893234253\n", - "epoch: 19 step: 161, loss is 1.1584725379943848\n", - "epoch: 19 step: 162, loss is 1.1268924474716187\n", - "epoch: 19 step: 163, loss is 1.2236961126327515\n", - "epoch: 19 step: 164, loss is 1.1283540725708008\n", - "epoch: 19 step: 165, loss is 1.240992784500122\n", - "epoch: 19 step: 166, loss is 1.2408446073532104\n", - "epoch: 19 step: 167, loss is 1.1732596158981323\n", - "epoch: 19 step: 168, loss is 1.1039268970489502\n", - "epoch: 19 step: 169, loss is 1.1480611562728882\n", - "epoch: 19 step: 170, loss is 1.1516188383102417\n", - "epoch: 19 step: 171, loss is 1.2218103408813477\n", - "epoch: 19 step: 172, loss is 1.1308624744415283\n", - "epoch: 19 step: 173, loss is 1.2465323209762573\n", - "epoch: 19 step: 174, loss is 1.2284690141677856\n", - "epoch: 19 step: 175, loss is 1.2370326519012451\n", - "epoch: 19 step: 176, loss is 1.1059259176254272\n", - "epoch: 19 step: 177, loss is 1.0924415588378906\n", - "epoch: 19 step: 178, loss is 1.2758101224899292\n", - "epoch: 19 step: 179, loss is 1.1968563795089722\n", - "epoch: 19 step: 180, loss is 1.1242156028747559\n", - "epoch: 19 step: 181, loss is 1.1839299201965332\n", - "epoch: 19 step: 182, loss is 1.1490572690963745\n", - "epoch: 19 step: 183, loss is 1.2114624977111816\n", - "epoch: 19 step: 184, loss is 1.193393349647522\n", - "epoch: 19 step: 185, loss is 1.2279844284057617\n", - "epoch: 19 step: 186, loss is 1.2572314739227295\n", - "epoch: 19 step: 187, loss is 1.2032257318496704\n", - "epoch: 19 step: 188, loss is 1.2652177810668945\n", - "epoch: 19 step: 189, loss is 1.1150282621383667\n", - "epoch: 19 step: 190, loss is 1.1851208209991455\n", - "epoch: 19 step: 191, loss is 1.241652011871338\n", - "epoch: 19 step: 192, loss is 1.1418536901474\n", - "epoch: 19 step: 193, loss is 1.1578309535980225\n", - "epoch: 19 step: 194, loss is 1.187867522239685\n", - "epoch: 19 step: 195, loss is 1.191091537475586\n", - "Train epoch time: 101225.978 ms, per step time: 519.108 ms\n", - "epoch: 20 step: 1, loss is 1.1884959936141968\n", - "epoch: 20 step: 2, loss is 1.169702172279358\n", - "epoch: 20 step: 3, loss is 1.1824917793273926\n", - "epoch: 20 step: 4, loss is 1.1418648958206177\n", - "epoch: 20 step: 5, loss is 1.111312985420227\n", - "epoch: 20 step: 6, loss is 1.1644928455352783\n", - "epoch: 20 step: 7, loss is 1.2055418491363525\n", - "epoch: 20 step: 8, loss is 1.1967711448669434\n", - "epoch: 20 step: 9, loss is 1.1894081830978394\n", - "epoch: 20 step: 10, loss is 1.2084699869155884\n", - "epoch: 20 step: 11, loss is 1.1871449947357178\n", - "epoch: 20 step: 12, loss is 1.1377928256988525\n", - "epoch: 20 step: 13, loss is 1.1205573081970215\n", - "epoch: 20 step: 14, loss is 1.1700925827026367\n", - "epoch: 20 step: 15, loss is 1.1846368312835693\n", - "epoch: 20 step: 16, loss is 1.1964526176452637\n", - "epoch: 20 step: 17, loss is 1.196950912475586\n", - "epoch: 20 step: 18, loss is 1.1034574508666992\n", - "epoch: 20 step: 19, loss is 1.1812515258789062\n", - "epoch: 20 step: 20, loss is 1.1493072509765625\n", - "epoch: 20 step: 21, loss is 1.2408857345581055\n", - "epoch: 20 step: 22, loss is 1.1612850427627563\n", - "epoch: 20 step: 23, loss is 1.1766201257705688\n", - "epoch: 20 step: 24, loss is 1.1008280515670776\n", - "epoch: 20 step: 25, loss is 1.189718246459961\n", - "epoch: 20 step: 26, loss is 1.1815381050109863\n", - "epoch: 20 step: 27, loss is 1.1030932664871216\n", - "epoch: 20 step: 28, loss is 1.1545801162719727\n", - "epoch: 20 step: 29, loss is 1.2081575393676758\n", - "epoch: 20 step: 30, loss is 1.1865613460540771\n", - "epoch: 20 step: 31, loss is 1.173910140991211\n", - "epoch: 20 step: 32, loss is 1.1752045154571533\n", - "epoch: 20 step: 33, loss is 1.126667857170105\n", - "epoch: 20 step: 34, loss is 1.21254563331604\n", - "epoch: 20 step: 35, loss is 1.139557957649231\n", - "epoch: 20 step: 36, loss is 1.1633224487304688\n", - "epoch: 20 step: 37, loss is 1.1809897422790527\n", - "epoch: 20 step: 38, loss is 1.2004567384719849\n", - "epoch: 20 step: 39, loss is 1.2216883897781372\n", - "epoch: 20 step: 40, loss is 1.0829761028289795\n", - "epoch: 20 step: 41, loss is 1.140751600265503\n", - "epoch: 20 step: 42, loss is 1.0880619287490845\n", - "epoch: 20 step: 43, loss is 1.092458963394165\n", - "epoch: 20 step: 44, loss is 1.1524646282196045\n", - "epoch: 20 step: 45, loss is 1.1959562301635742\n", - "epoch: 20 step: 46, loss is 1.179836392402649\n", - "epoch: 20 step: 47, loss is 1.298679232597351\n", - "epoch: 20 step: 48, loss is 1.1264419555664062\n", - "epoch: 20 step: 49, loss is 1.181549310684204\n", - "epoch: 20 step: 50, loss is 1.176405429840088\n", - "epoch: 20 step: 51, loss is 1.1321160793304443\n", - "epoch: 20 step: 52, loss is 1.2022438049316406\n", - "epoch: 20 step: 53, loss is 1.1068779230117798\n", - "epoch: 20 step: 54, loss is 1.1815742254257202\n", - "epoch: 20 step: 55, loss is 1.1763145923614502\n", - "epoch: 20 step: 56, loss is 1.1767460107803345\n", - "epoch: 20 step: 57, loss is 1.1193408966064453\n", - "epoch: 20 step: 58, loss is 1.195792555809021\n", - "epoch: 20 step: 59, loss is 1.1495978832244873\n", - "epoch: 20 step: 60, loss is 1.1685905456542969\n", - "epoch: 20 step: 61, loss is 1.094054937362671\n", - "epoch: 20 step: 62, loss is 1.1738637685775757\n", - "epoch: 20 step: 63, loss is 1.1835284233093262\n", - "epoch: 20 step: 64, loss is 1.1255600452423096\n", - "epoch: 20 step: 65, loss is 1.1245646476745605\n", - "epoch: 20 step: 66, loss is 1.1844677925109863\n", - "epoch: 20 step: 67, loss is 1.1718621253967285\n", - "epoch: 20 step: 68, loss is 1.128265619277954\n", - "epoch: 20 step: 69, loss is 1.0809264183044434\n", - "epoch: 20 step: 70, loss is 1.2144455909729004\n", - "epoch: 20 step: 71, loss is 1.1149961948394775\n", - "epoch: 20 step: 72, loss is 1.2265506982803345\n", - "epoch: 20 step: 73, loss is 1.2164801359176636\n", - "epoch: 20 step: 74, loss is 1.2198541164398193\n", - "epoch: 20 step: 75, loss is 1.1712840795516968\n", - "epoch: 20 step: 76, loss is 1.1442028284072876\n", - "epoch: 20 step: 77, loss is 1.1682544946670532\n", - "epoch: 20 step: 78, loss is 1.1639868021011353\n", - "epoch: 20 step: 79, loss is 1.1521660089492798\n", - "epoch: 20 step: 80, loss is 1.1231502294540405\n", - "epoch: 20 step: 81, loss is 1.1953973770141602\n", - "epoch: 20 step: 82, loss is 1.0986353158950806\n", - "epoch: 20 step: 83, loss is 1.128723382949829\n", - "epoch: 20 step: 84, loss is 1.2741785049438477\n", - "epoch: 20 step: 85, loss is 1.1714131832122803\n", - "epoch: 20 step: 86, loss is 1.1416068077087402\n", - "epoch: 20 step: 87, loss is 1.1887511014938354\n", - "epoch: 20 step: 88, loss is 1.264005422592163\n", - "epoch: 20 step: 89, loss is 1.1490142345428467\n", - "epoch: 20 step: 90, loss is 1.103130578994751\n", - "epoch: 20 step: 91, loss is 1.132491946220398\n", - "epoch: 20 step: 92, loss is 1.1292037963867188\n", - "epoch: 20 step: 93, loss is 1.1700360774993896\n", - "epoch: 20 step: 94, loss is 1.2154004573822021\n", - "epoch: 20 step: 95, loss is 1.1379494667053223\n", - "epoch: 20 step: 96, loss is 1.2294211387634277\n", - "epoch: 20 step: 97, loss is 1.1140503883361816\n", - "epoch: 20 step: 98, loss is 1.127234935760498\n", - "epoch: 20 step: 99, loss is 1.23896062374115\n", - "epoch: 20 step: 100, loss is 1.207397222518921\n", - "epoch: 20 step: 101, loss is 1.2409323453903198\n", - "epoch: 20 step: 102, loss is 1.2327001094818115\n", - "epoch: 20 step: 103, loss is 1.2188626527786255\n", - "epoch: 20 step: 104, loss is 1.183905839920044\n", - "epoch: 20 step: 105, loss is 1.2116985321044922\n", - "epoch: 20 step: 106, loss is 1.235504388809204\n", - "epoch: 20 step: 107, loss is 1.2356436252593994\n", - "epoch: 20 step: 108, loss is 1.1722736358642578\n", - "epoch: 20 step: 109, loss is 1.195304274559021\n", - "epoch: 20 step: 110, loss is 1.0994288921356201\n", - "epoch: 20 step: 111, loss is 1.1854407787322998\n", - "epoch: 20 step: 112, loss is 1.176624059677124\n", - "epoch: 20 step: 113, loss is 1.1966036558151245\n", - "epoch: 20 step: 114, loss is 1.1827623844146729\n", - "epoch: 20 step: 115, loss is 1.166933298110962\n", - "epoch: 20 step: 116, loss is 1.167234182357788\n", - "epoch: 20 step: 117, loss is 1.2020326852798462\n", - "epoch: 20 step: 118, loss is 1.1630098819732666\n", - "epoch: 20 step: 119, loss is 1.2292473316192627\n", - "epoch: 20 step: 120, loss is 1.2145514488220215\n", - "epoch: 20 step: 121, loss is 1.2117414474487305\n", - "epoch: 20 step: 122, loss is 1.0981587171554565\n", - "epoch: 20 step: 123, loss is 1.1341443061828613\n", - "epoch: 20 step: 124, loss is 1.1793553829193115\n", - "epoch: 20 step: 125, loss is 1.2287479639053345\n", - "epoch: 20 step: 126, loss is 1.1621183156967163\n", - "epoch: 20 step: 127, loss is 1.2383694648742676\n", - "epoch: 20 step: 128, loss is 1.2216033935546875\n", - "epoch: 20 step: 129, loss is 1.1626207828521729\n", - "epoch: 20 step: 130, loss is 1.156031847000122\n", - "epoch: 20 step: 131, loss is 1.1981768608093262\n", - "epoch: 20 step: 132, loss is 1.1635631322860718\n", - "epoch: 20 step: 133, loss is 1.0717275142669678\n", - "epoch: 20 step: 134, loss is 1.0868492126464844\n", - "epoch: 20 step: 135, loss is 1.0917433500289917\n", - "epoch: 20 step: 136, loss is 1.1549983024597168\n", - "epoch: 20 step: 137, loss is 1.2115200757980347\n", - "epoch: 20 step: 138, loss is 1.11542546749115\n", - "epoch: 20 step: 139, loss is 1.0827593803405762\n", - "epoch: 20 step: 140, loss is 1.121527075767517\n", - "epoch: 20 step: 141, loss is 1.2241697311401367\n", - "epoch: 20 step: 142, loss is 1.1186481714248657\n", - "epoch: 20 step: 143, loss is 1.2423193454742432\n", - "epoch: 20 step: 144, loss is 1.0573540925979614\n", - "epoch: 20 step: 145, loss is 1.202405571937561\n", - "epoch: 20 step: 146, loss is 1.1307504177093506\n", - "epoch: 20 step: 147, loss is 1.1758272647857666\n", - "epoch: 20 step: 148, loss is 1.224853515625\n", - "epoch: 20 step: 149, loss is 1.1400787830352783\n", - "epoch: 20 step: 150, loss is 1.1569344997406006\n", - "epoch: 20 step: 151, loss is 1.0623440742492676\n", - "epoch: 20 step: 152, loss is 1.1460214853286743\n", - "epoch: 20 step: 153, loss is 1.1157258749008179\n", - "epoch: 20 step: 154, loss is 1.1768310070037842\n", - "epoch: 20 step: 155, loss is 1.1178369522094727\n", - "epoch: 20 step: 156, loss is 1.2091405391693115\n", - "epoch: 20 step: 157, loss is 1.1431701183319092\n", - "epoch: 20 step: 158, loss is 1.2164804935455322\n", - "epoch: 20 step: 159, loss is 1.197888731956482\n", - "epoch: 20 step: 160, loss is 1.150985598564148\n", - "epoch: 20 step: 161, loss is 1.1827526092529297\n", - "epoch: 20 step: 162, loss is 1.161781668663025\n", - "epoch: 20 step: 163, loss is 1.2553699016571045\n", - "epoch: 20 step: 164, loss is 1.1375584602355957\n", - "epoch: 20 step: 165, loss is 1.0914632081985474\n", - "epoch: 20 step: 166, loss is 1.1240148544311523\n", - "epoch: 20 step: 167, loss is 1.1193705797195435\n", - "epoch: 20 step: 168, loss is 1.1332859992980957\n", - "epoch: 20 step: 169, loss is 1.1567590236663818\n", - "epoch: 20 step: 170, loss is 1.1976574659347534\n", - "epoch: 20 step: 171, loss is 1.2124419212341309\n", - "epoch: 20 step: 172, loss is 1.2483980655670166\n", - "epoch: 20 step: 173, loss is 1.0864322185516357\n", - "epoch: 20 step: 174, loss is 1.1615091562271118\n", - "epoch: 20 step: 175, loss is 1.0698835849761963\n", - "epoch: 20 step: 176, loss is 1.1791949272155762\n", - "epoch: 20 step: 177, loss is 1.0985698699951172\n", - "epoch: 20 step: 178, loss is 1.1923370361328125\n", - "epoch: 20 step: 179, loss is 1.1227954626083374\n", - "epoch: 20 step: 180, loss is 1.1542936563491821\n", - "epoch: 20 step: 181, loss is 1.1436656713485718\n", - "epoch: 20 step: 182, loss is 1.1351163387298584\n", - "epoch: 20 step: 183, loss is 1.1072967052459717\n", - "epoch: 20 step: 184, loss is 1.1524864435195923\n", - "epoch: 20 step: 185, loss is 1.189321756362915\n", - "epoch: 20 step: 186, loss is 1.158830165863037\n", - "epoch: 20 step: 187, loss is 1.1509073972702026\n", - "epoch: 20 step: 188, loss is 1.2126588821411133\n", - "epoch: 20 step: 189, loss is 1.1379395723342896\n", - "epoch: 20 step: 190, loss is 1.1415488719940186\n", - "epoch: 20 step: 191, loss is 1.13690185546875\n", - "epoch: 20 step: 192, loss is 1.1400315761566162\n", - "epoch: 20 step: 193, loss is 1.068132758140564\n", - "epoch: 20 step: 194, loss is 1.19902503490448\n", - "epoch: 20 step: 195, loss is 1.1567087173461914\n", - "Train epoch time: 104173.424 ms, per step time: 534.223 ms\n", - "epoch: 21 step: 1, loss is 1.1921052932739258\n", - "epoch: 21 step: 2, loss is 1.0701342821121216\n", - "epoch: 21 step: 3, loss is 1.1753308773040771\n", - "epoch: 21 step: 4, loss is 1.1313111782073975\n", - "epoch: 21 step: 5, loss is 1.1417230367660522\n", - "epoch: 21 step: 6, loss is 1.0305631160736084\n", - "epoch: 21 step: 7, loss is 1.1922510862350464\n", - "epoch: 21 step: 8, loss is 1.1823357343673706\n", - "epoch: 21 step: 9, loss is 1.129448413848877\n", - "epoch: 21 step: 10, loss is 1.1250107288360596\n", - "epoch: 21 step: 11, loss is 1.1208312511444092\n", - "epoch: 21 step: 12, loss is 1.1533703804016113\n", - "epoch: 21 step: 13, loss is 1.0750772953033447\n", - "epoch: 21 step: 14, loss is 1.0881654024124146\n", - "epoch: 21 step: 15, loss is 1.1426184177398682\n", - "epoch: 21 step: 16, loss is 1.2459871768951416\n", - "epoch: 21 step: 17, loss is 1.072798728942871\n", - "epoch: 21 step: 18, loss is 1.0991919040679932\n", - "epoch: 21 step: 19, loss is 1.1213173866271973\n", - "epoch: 21 step: 20, loss is 1.0883537530899048\n", - "epoch: 21 step: 21, loss is 1.1365002393722534\n", - "epoch: 21 step: 22, loss is 1.104675531387329\n", - "epoch: 21 step: 23, loss is 1.1440393924713135\n", - "epoch: 21 step: 24, loss is 1.1484761238098145\n", - "epoch: 21 step: 25, loss is 1.1014875173568726\n", - "epoch: 21 step: 26, loss is 1.118294358253479\n", - "epoch: 21 step: 27, loss is 1.1814875602722168\n", - "epoch: 21 step: 28, loss is 1.1103239059448242\n", - "epoch: 21 step: 29, loss is 1.1203694343566895\n", - "epoch: 21 step: 30, loss is 1.1217091083526611\n", - "epoch: 21 step: 31, loss is 1.2158026695251465\n", - "epoch: 21 step: 32, loss is 1.1644526720046997\n", - "epoch: 21 step: 33, loss is 1.1890766620635986\n", - "epoch: 21 step: 34, loss is 1.122287631034851\n", - "epoch: 21 step: 35, loss is 1.121174931526184\n", - "epoch: 21 step: 36, loss is 1.1157550811767578\n", - "epoch: 21 step: 37, loss is 1.128007411956787\n", - "epoch: 21 step: 38, loss is 1.1591732501983643\n", - "epoch: 21 step: 39, loss is 1.118435025215149\n", - "epoch: 21 step: 40, loss is 1.1228094100952148\n", - "epoch: 21 step: 41, loss is 1.1012872457504272\n", - "epoch: 21 step: 42, loss is 1.2191131114959717\n", - "epoch: 21 step: 43, loss is 1.1286933422088623\n", - "epoch: 21 step: 44, loss is 1.1683429479599\n", - "epoch: 21 step: 45, loss is 1.1217944622039795\n", - "epoch: 21 step: 46, loss is 1.1995563507080078\n", - "epoch: 21 step: 47, loss is 1.0857417583465576\n", - "epoch: 21 step: 48, loss is 1.1569418907165527\n", - "epoch: 21 step: 49, loss is 1.2251319885253906\n", - "epoch: 21 step: 50, loss is 1.1091125011444092\n", - "epoch: 21 step: 51, loss is 1.1549124717712402\n", - "epoch: 21 step: 52, loss is 1.1356117725372314\n", - "epoch: 21 step: 53, loss is 1.1109840869903564\n", - "epoch: 21 step: 54, loss is 1.1460250616073608\n", - "epoch: 21 step: 55, loss is 1.1362437009811401\n", - "epoch: 21 step: 56, loss is 1.1517187356948853\n", - "epoch: 21 step: 57, loss is 1.0988221168518066\n", - "epoch: 21 step: 58, loss is 1.1581555604934692\n", - "epoch: 21 step: 59, loss is 1.169415831565857\n", - "epoch: 21 step: 60, loss is 1.1740953922271729\n", - "epoch: 21 step: 61, loss is 1.1024632453918457\n", - "epoch: 21 step: 62, loss is 1.0984084606170654\n", - "epoch: 21 step: 63, loss is 1.2059848308563232\n", - "epoch: 21 step: 64, loss is 1.145885944366455\n", - "epoch: 21 step: 65, loss is 1.2163082361221313\n", - "epoch: 21 step: 66, loss is 1.108540415763855\n", - "epoch: 21 step: 67, loss is 1.1952319145202637\n", - "epoch: 21 step: 68, loss is 1.1441410779953003\n", - "epoch: 21 step: 69, loss is 1.1583361625671387\n", - "epoch: 21 step: 70, loss is 1.102898120880127\n", - "epoch: 21 step: 71, loss is 1.125910758972168\n", - "epoch: 21 step: 72, loss is 1.1942943334579468\n", - "epoch: 21 step: 73, loss is 1.2117856740951538\n", - "epoch: 21 step: 74, loss is 1.1190205812454224\n", - "epoch: 21 step: 75, loss is 1.107731580734253\n", - "epoch: 21 step: 76, loss is 1.1000752449035645\n", - "epoch: 21 step: 77, loss is 1.1882604360580444\n", - "epoch: 21 step: 78, loss is 1.1377122402191162\n", - "epoch: 21 step: 79, loss is 1.1691741943359375\n", - "epoch: 21 step: 80, loss is 1.2381455898284912\n", - "epoch: 21 step: 81, loss is 1.1835287809371948\n", - "epoch: 21 step: 82, loss is 1.136803150177002\n", - "epoch: 21 step: 83, loss is 1.193105936050415\n", - "epoch: 21 step: 84, loss is 1.1319094896316528\n", - "epoch: 21 step: 85, loss is 1.134192705154419\n", - "epoch: 21 step: 86, loss is 1.0962910652160645\n", - "epoch: 21 step: 87, loss is 1.1167593002319336\n", - "epoch: 21 step: 88, loss is 1.124763011932373\n", - "epoch: 21 step: 89, loss is 1.1915029287338257\n", - "epoch: 21 step: 90, loss is 1.154558539390564\n", - "epoch: 21 step: 91, loss is 1.1266372203826904\n", - "epoch: 21 step: 92, loss is 1.1204588413238525\n", - "epoch: 21 step: 93, loss is 1.0840075016021729\n", - "epoch: 21 step: 94, loss is 1.1026766300201416\n", - "epoch: 21 step: 95, loss is 1.1278811693191528\n", - "epoch: 21 step: 96, loss is 1.109001874923706\n", - "epoch: 21 step: 97, loss is 1.2332653999328613\n", - "epoch: 21 step: 98, loss is 1.0993515253067017\n", - "epoch: 21 step: 99, loss is 1.1307837963104248\n", - "epoch: 21 step: 100, loss is 1.1563172340393066\n", - "epoch: 21 step: 101, loss is 1.15267014503479\n", - "epoch: 21 step: 102, loss is 1.136658787727356\n", - "epoch: 21 step: 103, loss is 1.1215237379074097\n", - "epoch: 21 step: 104, loss is 1.1939799785614014\n", - "epoch: 21 step: 105, loss is 1.2076579332351685\n", - "epoch: 21 step: 106, loss is 1.1646722555160522\n", - "epoch: 21 step: 107, loss is 1.2286417484283447\n", - "epoch: 21 step: 108, loss is 1.0636025667190552\n", - "epoch: 21 step: 109, loss is 1.1365132331848145\n", - "epoch: 21 step: 110, loss is 1.1064388751983643\n", - "epoch: 21 step: 111, loss is 1.1925089359283447\n", - "epoch: 21 step: 112, loss is 1.1692545413970947\n", - "epoch: 21 step: 113, loss is 1.012389063835144\n", - "epoch: 21 step: 114, loss is 1.2235348224639893\n", - "epoch: 21 step: 115, loss is 1.161221981048584\n", - "epoch: 21 step: 116, loss is 1.0941426753997803\n", - "epoch: 21 step: 117, loss is 1.1226434707641602\n", - "epoch: 21 step: 118, loss is 1.1540415287017822\n", - "epoch: 21 step: 119, loss is 1.1769740581512451\n", - "epoch: 21 step: 120, loss is 1.2211434841156006\n", - "epoch: 21 step: 121, loss is 1.1179429292678833\n", - "epoch: 21 step: 122, loss is 1.1711430549621582\n", - "epoch: 21 step: 123, loss is 1.0949711799621582\n", - "epoch: 21 step: 124, loss is 1.1699912548065186\n", - "epoch: 21 step: 125, loss is 1.0529214143753052\n", - "epoch: 21 step: 126, loss is 1.1035442352294922\n", - "epoch: 21 step: 127, loss is 1.1384224891662598\n", - "epoch: 21 step: 128, loss is 1.1493457555770874\n", - "epoch: 21 step: 129, loss is 1.1306800842285156\n", - "epoch: 21 step: 130, loss is 1.1158616542816162\n", - "epoch: 21 step: 131, loss is 1.1646543741226196\n", - "epoch: 21 step: 132, loss is 1.1338950395584106\n", - "epoch: 21 step: 133, loss is 1.0740561485290527\n", - "epoch: 21 step: 134, loss is 1.1639001369476318\n", - "epoch: 21 step: 135, loss is 1.1926583051681519\n", - "epoch: 21 step: 136, loss is 1.137130856513977\n", - "epoch: 21 step: 137, loss is 1.1674678325653076\n", - "epoch: 21 step: 138, loss is 1.0838972330093384\n", - "epoch: 21 step: 139, loss is 1.1713426113128662\n", - "epoch: 21 step: 140, loss is 1.172694444656372\n", - "epoch: 21 step: 141, loss is 1.1743769645690918\n", - "epoch: 21 step: 142, loss is 1.1856400966644287\n", - "epoch: 21 step: 143, loss is 1.1878764629364014\n", - "epoch: 21 step: 144, loss is 1.1903069019317627\n", - "epoch: 21 step: 145, loss is 1.1692390441894531\n", - "epoch: 21 step: 146, loss is 1.124283790588379\n", - "epoch: 21 step: 147, loss is 1.1382466554641724\n", - "epoch: 21 step: 148, loss is 1.1935675144195557\n", - "epoch: 21 step: 149, loss is 1.1120277643203735\n", - "epoch: 21 step: 150, loss is 1.147344946861267\n", - "epoch: 21 step: 151, loss is 1.1530892848968506\n", - "epoch: 21 step: 152, loss is 1.098280429840088\n", - "epoch: 21 step: 153, loss is 1.2179057598114014\n", - "epoch: 21 step: 154, loss is 1.2466373443603516\n", - "epoch: 21 step: 155, loss is 1.1421725749969482\n", - "epoch: 21 step: 156, loss is 1.0607844591140747\n", - "epoch: 21 step: 157, loss is 1.1394695043563843\n", - "epoch: 21 step: 158, loss is 1.1770168542861938\n", - "epoch: 21 step: 159, loss is 1.0940386056900024\n", - "epoch: 21 step: 160, loss is 1.1483757495880127\n", - "epoch: 21 step: 161, loss is 1.1882250308990479\n", - "epoch: 21 step: 162, loss is 1.1590632200241089\n", - "epoch: 21 step: 163, loss is 1.1781816482543945\n", - "epoch: 21 step: 164, loss is 1.0744845867156982\n", - "epoch: 21 step: 165, loss is 1.1325417757034302\n", - "epoch: 21 step: 166, loss is 1.089174747467041\n", - "epoch: 21 step: 167, loss is 1.1512054204940796\n", - "epoch: 21 step: 168, loss is 1.1409275531768799\n", - "epoch: 21 step: 169, loss is 1.201310396194458\n", - "epoch: 21 step: 170, loss is 1.169210433959961\n", - "epoch: 21 step: 171, loss is 1.2410035133361816\n", - "epoch: 21 step: 172, loss is 1.028815507888794\n", - "epoch: 21 step: 173, loss is 1.1066675186157227\n", - "epoch: 21 step: 174, loss is 1.1109777688980103\n", - "epoch: 21 step: 175, loss is 1.1665771007537842\n", - "epoch: 21 step: 176, loss is 1.148111343383789\n", - "epoch: 21 step: 177, loss is 1.0802497863769531\n", - "epoch: 21 step: 178, loss is 1.1739122867584229\n", - "epoch: 21 step: 179, loss is 1.1767234802246094\n", - "epoch: 21 step: 180, loss is 1.1092647314071655\n", - "epoch: 21 step: 181, loss is 1.2105249166488647\n", - "epoch: 21 step: 182, loss is 1.1116437911987305\n", - "epoch: 21 step: 183, loss is 1.1619771718978882\n", - "epoch: 21 step: 184, loss is 1.1229248046875\n", - "epoch: 21 step: 185, loss is 1.154275894165039\n", - "epoch: 21 step: 186, loss is 1.114675760269165\n", - "epoch: 21 step: 187, loss is 1.1923682689666748\n", - "epoch: 21 step: 188, loss is 1.1858384609222412\n", - "epoch: 21 step: 189, loss is 1.162807822227478\n", - "epoch: 21 step: 190, loss is 1.0937739610671997\n", - "epoch: 21 step: 191, loss is 1.1718971729278564\n", - "epoch: 21 step: 192, loss is 1.2204475402832031\n", - "epoch: 21 step: 193, loss is 1.0986087322235107\n", - "epoch: 21 step: 194, loss is 1.129512071609497\n", - "epoch: 21 step: 195, loss is 1.2134814262390137\n", - "Train epoch time: 96557.791 ms, per step time: 495.168 ms\n", - "epoch: 22 step: 1, loss is 1.1057870388031006\n", - "epoch: 22 step: 2, loss is 1.1139236688613892\n", - "epoch: 22 step: 3, loss is 1.1555999517440796\n", - "epoch: 22 step: 4, loss is 1.0924568176269531\n", - "epoch: 22 step: 5, loss is 1.1187453269958496\n", - "epoch: 22 step: 6, loss is 1.133613109588623\n", - "epoch: 22 step: 7, loss is 1.1318658590316772\n", - "epoch: 22 step: 8, loss is 1.192131519317627\n", - "epoch: 22 step: 9, loss is 1.1278152465820312\n", - "epoch: 22 step: 10, loss is 1.202744960784912\n", - "epoch: 22 step: 11, loss is 1.1721439361572266\n", - "epoch: 22 step: 12, loss is 1.0863008499145508\n", - "epoch: 22 step: 13, loss is 1.1525710821151733\n", - "epoch: 22 step: 14, loss is 1.1658072471618652\n", - "epoch: 22 step: 15, loss is 1.136847972869873\n", - "epoch: 22 step: 16, loss is 1.0486916303634644\n", - "epoch: 22 step: 17, loss is 1.0813196897506714\n", - "epoch: 22 step: 18, loss is 1.1514248847961426\n", - "epoch: 22 step: 19, loss is 1.0714279413223267\n", - "epoch: 22 step: 20, loss is 1.0280711650848389\n", - "epoch: 22 step: 21, loss is 1.0710278749465942\n", - "epoch: 22 step: 22, loss is 1.1394612789154053\n", - "epoch: 22 step: 23, loss is 1.0616064071655273\n", - "epoch: 22 step: 24, loss is 1.100270390510559\n", - "epoch: 22 step: 25, loss is 1.067994475364685\n", - "epoch: 22 step: 26, loss is 1.1554644107818604\n", - "epoch: 22 step: 27, loss is 1.132413625717163\n", - "epoch: 22 step: 28, loss is 1.1626719236373901\n", - "epoch: 22 step: 29, loss is 1.1925325393676758\n", - "epoch: 22 step: 30, loss is 1.189226508140564\n", - "epoch: 22 step: 31, loss is 1.2117999792099\n", - "epoch: 22 step: 32, loss is 1.1735248565673828\n", - "epoch: 22 step: 33, loss is 1.1339526176452637\n", - "epoch: 22 step: 34, loss is 1.1265913248062134\n", - "epoch: 22 step: 35, loss is 1.083516240119934\n", - "epoch: 22 step: 36, loss is 1.186711072921753\n", - "epoch: 22 step: 37, loss is 1.0917110443115234\n", - "epoch: 22 step: 38, loss is 1.122393250465393\n", - "epoch: 22 step: 39, loss is 1.0775220394134521\n", - "epoch: 22 step: 40, loss is 1.114711046218872\n", - "epoch: 22 step: 41, loss is 1.0672664642333984\n", - "epoch: 22 step: 42, loss is 1.0923773050308228\n", - "epoch: 22 step: 43, loss is 1.1840053796768188\n", - "epoch: 22 step: 44, loss is 1.138547420501709\n", - "epoch: 22 step: 45, loss is 1.0909572839736938\n", - "epoch: 22 step: 46, loss is 1.1297260522842407\n", - "epoch: 22 step: 47, loss is 1.1536083221435547\n", - "epoch: 22 step: 48, loss is 1.1982747316360474\n", - "epoch: 22 step: 49, loss is 1.0661267042160034\n", - "epoch: 22 step: 50, loss is 1.216001272201538\n", - "epoch: 22 step: 51, loss is 1.195881962776184\n", - "epoch: 22 step: 52, loss is 1.1459851264953613\n", - "epoch: 22 step: 53, loss is 1.1505540609359741\n", - "epoch: 22 step: 54, loss is 1.0748066902160645\n", - "epoch: 22 step: 55, loss is 1.1084421873092651\n", - "epoch: 22 step: 56, loss is 1.127742052078247\n", - "epoch: 22 step: 57, loss is 0.9987824559211731\n", - "epoch: 22 step: 58, loss is 1.0742061138153076\n", - "epoch: 22 step: 59, loss is 1.1558446884155273\n", - "epoch: 22 step: 60, loss is 1.2418932914733887\n", - "epoch: 22 step: 61, loss is 1.1512435674667358\n", - "epoch: 22 step: 62, loss is 1.18963623046875\n", - "epoch: 22 step: 63, loss is 1.1356805562973022\n", - "epoch: 22 step: 64, loss is 1.0749949216842651\n", - "epoch: 22 step: 65, loss is 1.1138627529144287\n", - "epoch: 22 step: 66, loss is 1.0608296394348145\n", - "epoch: 22 step: 67, loss is 1.1862590312957764\n", - "epoch: 22 step: 68, loss is 1.1027181148529053\n", - "epoch: 22 step: 69, loss is 1.0789560079574585\n", - "epoch: 22 step: 70, loss is 1.196028709411621\n", - "epoch: 22 step: 71, loss is 1.1457781791687012\n", - "epoch: 22 step: 72, loss is 1.184518575668335\n", - "epoch: 22 step: 73, loss is 1.1258783340454102\n", - "epoch: 22 step: 74, loss is 1.1965044736862183\n", - "epoch: 22 step: 75, loss is 1.1160832643508911\n", - "epoch: 22 step: 76, loss is 1.0849354267120361\n", - "epoch: 22 step: 77, loss is 1.1233752965927124\n", - "epoch: 22 step: 78, loss is 1.082011342048645\n", - "epoch: 22 step: 79, loss is 1.0701531171798706\n", - "epoch: 22 step: 80, loss is 1.1088016033172607\n", - "epoch: 22 step: 81, loss is 1.1289912462234497\n", - "epoch: 22 step: 82, loss is 1.1551800966262817\n", - "epoch: 22 step: 83, loss is 1.1657989025115967\n", - "epoch: 22 step: 84, loss is 1.0940277576446533\n", - "epoch: 22 step: 85, loss is 1.2021234035491943\n", - "epoch: 22 step: 86, loss is 1.1075375080108643\n", - "epoch: 22 step: 87, loss is 1.1424462795257568\n", - "epoch: 22 step: 88, loss is 1.126102089881897\n", - "epoch: 22 step: 89, loss is 1.1145858764648438\n", - "epoch: 22 step: 90, loss is 1.1827151775360107\n", - "epoch: 22 step: 91, loss is 1.1285420656204224\n", - "epoch: 22 step: 92, loss is 1.1281323432922363\n", - "epoch: 22 step: 93, loss is 1.083482265472412\n", - "epoch: 22 step: 94, loss is 1.0388284921646118\n", - "epoch: 22 step: 95, loss is 1.0472513437271118\n", - "epoch: 22 step: 96, loss is 1.0967636108398438\n", - "epoch: 22 step: 97, loss is 1.1034371852874756\n", - "epoch: 22 step: 98, loss is 1.1421582698822021\n", - "epoch: 22 step: 99, loss is 1.1247735023498535\n", - "epoch: 22 step: 100, loss is 1.1255205869674683\n", - "epoch: 22 step: 101, loss is 1.066077709197998\n", - "epoch: 22 step: 102, loss is 1.1157441139221191\n", - "epoch: 22 step: 103, loss is 1.0566856861114502\n", - "epoch: 22 step: 104, loss is 1.1286683082580566\n", - "epoch: 22 step: 105, loss is 1.0926494598388672\n", - "epoch: 22 step: 106, loss is 1.1775054931640625\n", - "epoch: 22 step: 107, loss is 1.141067385673523\n", - "epoch: 22 step: 108, loss is 1.1127513647079468\n", - "epoch: 22 step: 109, loss is 1.120466947555542\n", - "epoch: 22 step: 110, loss is 1.0674927234649658\n", - "epoch: 22 step: 111, loss is 1.1168016195297241\n", - "epoch: 22 step: 112, loss is 1.0601282119750977\n", - "epoch: 22 step: 113, loss is 1.2126147747039795\n", - "epoch: 22 step: 114, loss is 1.0950136184692383\n", - "epoch: 22 step: 115, loss is 1.1604738235473633\n", - "epoch: 22 step: 116, loss is 1.0633180141448975\n", - "epoch: 22 step: 117, loss is 1.0476927757263184\n", - "epoch: 22 step: 118, loss is 1.2187708616256714\n", - "epoch: 22 step: 119, loss is 1.2101118564605713\n", - "epoch: 22 step: 120, loss is 1.0943667888641357\n", - "epoch: 22 step: 121, loss is 1.0570751428604126\n", - "epoch: 22 step: 122, loss is 1.0902503728866577\n", - "epoch: 22 step: 123, loss is 1.1060099601745605\n", - "epoch: 22 step: 124, loss is 1.1245768070220947\n", - "epoch: 22 step: 125, loss is 1.0946764945983887\n", - "epoch: 22 step: 126, loss is 1.2332159280776978\n", - "epoch: 22 step: 127, loss is 1.0753830671310425\n", - "epoch: 22 step: 128, loss is 1.1343789100646973\n", - "epoch: 22 step: 129, loss is 1.1489059925079346\n", - "epoch: 22 step: 130, loss is 1.0653080940246582\n", - "epoch: 22 step: 131, loss is 1.1134395599365234\n", - "epoch: 22 step: 132, loss is 1.1181024312973022\n", - "epoch: 22 step: 133, loss is 1.1542857885360718\n", - "epoch: 22 step: 134, loss is 1.0399237871170044\n", - "epoch: 22 step: 135, loss is 1.0861629247665405\n", - "epoch: 22 step: 136, loss is 1.127682089805603\n", - "epoch: 22 step: 137, loss is 1.196089506149292\n", - "epoch: 22 step: 138, loss is 1.1673725843429565\n", - "epoch: 22 step: 139, loss is 1.1664581298828125\n", - "epoch: 22 step: 140, loss is 1.0579397678375244\n", - "epoch: 22 step: 141, loss is 1.099616527557373\n", - "epoch: 22 step: 142, loss is 1.1683763265609741\n", - "epoch: 22 step: 143, loss is 1.1020599603652954\n", - "epoch: 22 step: 144, loss is 1.055849552154541\n", - "epoch: 22 step: 145, loss is 1.1096962690353394\n", - "epoch: 22 step: 146, loss is 1.1060367822647095\n", - "epoch: 22 step: 147, loss is 1.0919804573059082\n", - "epoch: 22 step: 148, loss is 1.0732414722442627\n", - "epoch: 22 step: 149, loss is 1.0836870670318604\n", - "epoch: 22 step: 150, loss is 1.2175854444503784\n", - "epoch: 22 step: 151, loss is 1.1793617010116577\n", - "epoch: 22 step: 152, loss is 1.0995378494262695\n", - "epoch: 22 step: 153, loss is 1.1928123235702515\n", - "epoch: 22 step: 154, loss is 1.124531865119934\n", - "epoch: 22 step: 155, loss is 1.1263102293014526\n", - "epoch: 22 step: 156, loss is 1.1295742988586426\n", - "epoch: 22 step: 157, loss is 1.0764509439468384\n", - "epoch: 22 step: 158, loss is 1.132375955581665\n", - "epoch: 22 step: 159, loss is 1.1854095458984375\n", - "epoch: 22 step: 160, loss is 1.1245477199554443\n", - "epoch: 22 step: 161, loss is 1.187173843383789\n", - "epoch: 22 step: 162, loss is 1.0998836755752563\n", - "epoch: 22 step: 163, loss is 1.1183044910430908\n", - "epoch: 22 step: 164, loss is 1.087443232536316\n", - "epoch: 22 step: 165, loss is 1.1431686878204346\n", - "epoch: 22 step: 166, loss is 1.150266408920288\n", - "epoch: 22 step: 167, loss is 1.1101069450378418\n", - "epoch: 22 step: 168, loss is 1.1075210571289062\n", - "epoch: 22 step: 169, loss is 1.106501579284668\n", - "epoch: 22 step: 170, loss is 1.1325280666351318\n", - "epoch: 22 step: 171, loss is 1.1748698949813843\n", - "epoch: 22 step: 172, loss is 1.109532356262207\n", - "epoch: 22 step: 173, loss is 1.0745608806610107\n", - "epoch: 22 step: 174, loss is 1.1903640031814575\n", - "epoch: 22 step: 175, loss is 1.1700621843338013\n", - "epoch: 22 step: 176, loss is 1.2008390426635742\n", - "epoch: 22 step: 177, loss is 1.2198768854141235\n", - "epoch: 22 step: 178, loss is 1.138572096824646\n", - "epoch: 22 step: 179, loss is 1.1092687845230103\n", - "epoch: 22 step: 180, loss is 1.2139312028884888\n", - "epoch: 22 step: 181, loss is 1.055828332901001\n", - "epoch: 22 step: 182, loss is 1.091160535812378\n", - "epoch: 22 step: 183, loss is 1.106805443763733\n", - "epoch: 22 step: 184, loss is 1.1469414234161377\n", - "epoch: 22 step: 185, loss is 1.1037912368774414\n", - "epoch: 22 step: 186, loss is 1.1567729711532593\n", - "epoch: 22 step: 187, loss is 1.1527526378631592\n", - "epoch: 22 step: 188, loss is 1.0447196960449219\n", - "epoch: 22 step: 189, loss is 1.055253505706787\n", - "epoch: 22 step: 190, loss is 1.1102721691131592\n", - "epoch: 22 step: 191, loss is 1.1446545124053955\n", - "epoch: 22 step: 192, loss is 1.1523252725601196\n", - "epoch: 22 step: 193, loss is 1.1921484470367432\n", - "epoch: 22 step: 194, loss is 1.065596103668213\n", - "epoch: 22 step: 195, loss is 1.1077330112457275\n", - "Train epoch time: 96550.665 ms, per step time: 495.132 ms\n", - "epoch: 23 step: 1, loss is 1.0420126914978027\n", - "epoch: 23 step: 2, loss is 1.099735975265503\n", - "epoch: 23 step: 3, loss is 1.1271454095840454\n", - "epoch: 23 step: 4, loss is 1.079667329788208\n", - "epoch: 23 step: 5, loss is 1.074260950088501\n", - "epoch: 23 step: 6, loss is 1.0702893733978271\n", - "epoch: 23 step: 7, loss is 1.0628925561904907\n", - "epoch: 23 step: 8, loss is 1.109250545501709\n", - "epoch: 23 step: 9, loss is 1.108660340309143\n", - "epoch: 23 step: 10, loss is 1.081648349761963\n", - "epoch: 23 step: 11, loss is 1.071412205696106\n", - "epoch: 23 step: 12, loss is 1.1206388473510742\n", - "epoch: 23 step: 13, loss is 1.0619468688964844\n", - "epoch: 23 step: 14, loss is 1.1640396118164062\n", - "epoch: 23 step: 15, loss is 1.0815362930297852\n", - "epoch: 23 step: 16, loss is 1.1246254444122314\n", - "epoch: 23 step: 17, loss is 1.148111343383789\n", - "epoch: 23 step: 18, loss is 1.0939370393753052\n", - "epoch: 23 step: 19, loss is 1.1357307434082031\n", - "epoch: 23 step: 20, loss is 1.1537823677062988\n", - "epoch: 23 step: 21, loss is 1.1099159717559814\n", - "epoch: 23 step: 22, loss is 1.0810271501541138\n", - "epoch: 23 step: 23, loss is 1.1312618255615234\n", - "epoch: 23 step: 24, loss is 1.096649169921875\n", - "epoch: 23 step: 25, loss is 1.1591715812683105\n", - "epoch: 23 step: 26, loss is 1.0770912170410156\n", - "epoch: 23 step: 27, loss is 1.0089478492736816\n", - "epoch: 23 step: 28, loss is 1.1134425401687622\n", - "epoch: 23 step: 29, loss is 1.1362148523330688\n", - "epoch: 23 step: 30, loss is 1.1841790676116943\n", - "epoch: 23 step: 31, loss is 1.0730592012405396\n", - "epoch: 23 step: 32, loss is 1.105896234512329\n", - "epoch: 23 step: 33, loss is 1.0915123224258423\n", - "epoch: 23 step: 34, loss is 1.244390845298767\n", - "epoch: 23 step: 35, loss is 1.1465954780578613\n", - "epoch: 23 step: 36, loss is 1.2488198280334473\n", - "epoch: 23 step: 37, loss is 1.1303648948669434\n", - "epoch: 23 step: 38, loss is 1.06625497341156\n", - "epoch: 23 step: 39, loss is 1.1351871490478516\n", - "epoch: 23 step: 40, loss is 1.143106460571289\n", - "epoch: 23 step: 41, loss is 1.0997296571731567\n", - "epoch: 23 step: 42, loss is 1.1801092624664307\n", - "epoch: 23 step: 43, loss is 1.1338733434677124\n", - "epoch: 23 step: 44, loss is 1.1757233142852783\n", - "epoch: 23 step: 45, loss is 1.132099986076355\n", - "epoch: 23 step: 46, loss is 1.059287428855896\n", - "epoch: 23 step: 47, loss is 1.1602349281311035\n", - "epoch: 23 step: 48, loss is 1.1087274551391602\n", - "epoch: 23 step: 49, loss is 1.058919906616211\n", - "epoch: 23 step: 50, loss is 1.1383061408996582\n", - "epoch: 23 step: 51, loss is 1.058732509613037\n", - "epoch: 23 step: 52, loss is 1.1311895847320557\n", - "epoch: 23 step: 53, loss is 1.0651788711547852\n", - "epoch: 23 step: 54, loss is 1.0933306217193604\n", - "epoch: 23 step: 55, loss is 1.0521669387817383\n", - "epoch: 23 step: 56, loss is 1.0857175588607788\n", - "epoch: 23 step: 57, loss is 1.126347541809082\n", - "epoch: 23 step: 58, loss is 1.0909123420715332\n", - "epoch: 23 step: 59, loss is 1.138649582862854\n", - "epoch: 23 step: 60, loss is 1.0849061012268066\n", - "epoch: 23 step: 61, loss is 1.1380014419555664\n", - "epoch: 23 step: 62, loss is 1.0749741792678833\n", - "epoch: 23 step: 63, loss is 1.0596951246261597\n", - "epoch: 23 step: 64, loss is 1.0243406295776367\n", - "epoch: 23 step: 65, loss is 1.170853853225708\n", - "epoch: 23 step: 66, loss is 1.0925712585449219\n", - "epoch: 23 step: 67, loss is 1.1108038425445557\n", - "epoch: 23 step: 68, loss is 1.1158647537231445\n", - "epoch: 23 step: 69, loss is 1.0916780233383179\n", - "epoch: 23 step: 70, loss is 1.1739258766174316\n", - "epoch: 23 step: 71, loss is 1.0683262348175049\n", - "epoch: 23 step: 72, loss is 1.214130163192749\n", - "epoch: 23 step: 73, loss is 1.0012922286987305\n", - "epoch: 23 step: 74, loss is 1.090229868888855\n", - "epoch: 23 step: 75, loss is 1.0642163753509521\n", - "epoch: 23 step: 76, loss is 1.133148431777954\n", - "epoch: 23 step: 77, loss is 1.0365712642669678\n", - "epoch: 23 step: 78, loss is 1.134724497795105\n", - "epoch: 23 step: 79, loss is 1.050230622291565\n", - "epoch: 23 step: 80, loss is 1.1680033206939697\n", - "epoch: 23 step: 81, loss is 1.077506184577942\n", - "epoch: 23 step: 82, loss is 1.2005258798599243\n", - "epoch: 23 step: 83, loss is 1.070518136024475\n", - "epoch: 23 step: 84, loss is 1.1651355028152466\n", - "epoch: 23 step: 85, loss is 1.187951922416687\n", - "epoch: 23 step: 86, loss is 1.1330618858337402\n", - "epoch: 23 step: 87, loss is 1.1270604133605957\n", - "epoch: 23 step: 88, loss is 1.1675716638565063\n", - "epoch: 23 step: 89, loss is 1.0827915668487549\n", - "epoch: 23 step: 90, loss is 1.1031270027160645\n", - "epoch: 23 step: 91, loss is 1.1060575246810913\n", - "epoch: 23 step: 92, loss is 1.1283595561981201\n", - "epoch: 23 step: 93, loss is 1.1641638278961182\n", - "epoch: 23 step: 94, loss is 1.121991515159607\n", - "epoch: 23 step: 95, loss is 1.193777322769165\n", - "epoch: 23 step: 96, loss is 1.154201626777649\n", - "epoch: 23 step: 97, loss is 1.0031192302703857\n", - "epoch: 23 step: 98, loss is 1.1418803930282593\n", - "epoch: 23 step: 99, loss is 1.0654265880584717\n", - "epoch: 23 step: 100, loss is 1.1046638488769531\n", - "epoch: 23 step: 101, loss is 1.116844654083252\n", - "epoch: 23 step: 102, loss is 1.0737988948822021\n", - "epoch: 23 step: 103, loss is 1.1782712936401367\n", - "epoch: 23 step: 104, loss is 1.1282520294189453\n", - "epoch: 23 step: 105, loss is 1.0460577011108398\n", - "epoch: 23 step: 106, loss is 1.1524302959442139\n", - "epoch: 23 step: 107, loss is 1.152945876121521\n", - "epoch: 23 step: 108, loss is 1.102074146270752\n", - "epoch: 23 step: 109, loss is 1.1422843933105469\n", - "epoch: 23 step: 110, loss is 1.1201359033584595\n", - "epoch: 23 step: 111, loss is 1.112854242324829\n", - "epoch: 23 step: 112, loss is 1.1151319742202759\n", - "epoch: 23 step: 113, loss is 1.0872212648391724\n", - "epoch: 23 step: 114, loss is 1.0612903833389282\n", - "epoch: 23 step: 115, loss is 1.026918649673462\n", - "epoch: 23 step: 116, loss is 1.0927183628082275\n", - "epoch: 23 step: 117, loss is 1.131216287612915\n", - "epoch: 23 step: 118, loss is 1.0949969291687012\n", - "epoch: 23 step: 119, loss is 1.0931265354156494\n", - "epoch: 23 step: 120, loss is 1.1404997110366821\n", - "epoch: 23 step: 121, loss is 1.053472876548767\n", - "epoch: 23 step: 122, loss is 1.1262367963790894\n", - "epoch: 23 step: 123, loss is 1.150343418121338\n", - "epoch: 23 step: 124, loss is 1.0660480260849\n", - "epoch: 23 step: 125, loss is 1.033414602279663\n", - "epoch: 23 step: 126, loss is 1.109561800956726\n", - "epoch: 23 step: 127, loss is 1.0779948234558105\n", - "epoch: 23 step: 128, loss is 1.0923174619674683\n", - "epoch: 23 step: 129, loss is 1.1790454387664795\n", - "epoch: 23 step: 130, loss is 1.1247092485427856\n", - "epoch: 23 step: 131, loss is 1.1069467067718506\n", - "epoch: 23 step: 132, loss is 1.178035020828247\n", - "epoch: 23 step: 133, loss is 1.0654507875442505\n", - "epoch: 23 step: 134, loss is 1.137501835823059\n", - "epoch: 23 step: 135, loss is 1.1127469539642334\n", - "epoch: 23 step: 136, loss is 1.1050828695297241\n", - "epoch: 23 step: 137, loss is 0.9895503520965576\n", - "epoch: 23 step: 138, loss is 1.1231327056884766\n", - "epoch: 23 step: 139, loss is 1.072704792022705\n", - "epoch: 23 step: 140, loss is 1.140213966369629\n", - "epoch: 23 step: 141, loss is 1.1909414529800415\n", - "epoch: 23 step: 142, loss is 1.1219518184661865\n", - "epoch: 23 step: 143, loss is 1.0941047668457031\n", - "epoch: 23 step: 144, loss is 1.0998458862304688\n", - "epoch: 23 step: 145, loss is 1.0940742492675781\n", - "epoch: 23 step: 146, loss is 1.095496416091919\n", - "epoch: 23 step: 147, loss is 1.1275544166564941\n", - "epoch: 23 step: 148, loss is 1.1048732995986938\n", - "epoch: 23 step: 149, loss is 1.0835894346237183\n", - "epoch: 23 step: 150, loss is 1.1628206968307495\n", - "epoch: 23 step: 151, loss is 1.0395472049713135\n", - "epoch: 23 step: 152, loss is 1.1257904767990112\n", - "epoch: 23 step: 153, loss is 1.0448265075683594\n", - "epoch: 23 step: 154, loss is 1.168929100036621\n", - "epoch: 23 step: 155, loss is 1.1050912141799927\n", - "epoch: 23 step: 156, loss is 1.0998780727386475\n", - "epoch: 23 step: 157, loss is 1.0974781513214111\n", - "epoch: 23 step: 158, loss is 1.0884851217269897\n", - "epoch: 23 step: 159, loss is 1.0380859375\n", - "epoch: 23 step: 160, loss is 1.2068121433258057\n", - "epoch: 23 step: 161, loss is 1.0829228162765503\n", - "epoch: 23 step: 162, loss is 1.1500890254974365\n", - "epoch: 23 step: 163, loss is 1.165330171585083\n", - "epoch: 23 step: 164, loss is 1.1311683654785156\n", - "epoch: 23 step: 165, loss is 1.0441009998321533\n", - "epoch: 23 step: 166, loss is 1.1290067434310913\n", - "epoch: 23 step: 167, loss is 1.108944058418274\n", - "epoch: 23 step: 168, loss is 1.107635498046875\n", - "epoch: 23 step: 169, loss is 1.1310901641845703\n", - "epoch: 23 step: 170, loss is 1.0751266479492188\n", - "epoch: 23 step: 171, loss is 1.0947020053863525\n", - "epoch: 23 step: 172, loss is 1.019446849822998\n", - "epoch: 23 step: 173, loss is 1.132136583328247\n", - "epoch: 23 step: 174, loss is 1.0757756233215332\n", - "epoch: 23 step: 175, loss is 1.0834013223648071\n", - "epoch: 23 step: 176, loss is 1.0895262956619263\n", - "epoch: 23 step: 177, loss is 1.0929279327392578\n", - "epoch: 23 step: 178, loss is 1.1086838245391846\n", - "epoch: 23 step: 179, loss is 1.0466564893722534\n", - "epoch: 23 step: 180, loss is 1.111632227897644\n", - "epoch: 23 step: 181, loss is 1.1164880990982056\n", - "epoch: 23 step: 182, loss is 1.1129474639892578\n", - "epoch: 23 step: 183, loss is 1.1050301790237427\n", - "epoch: 23 step: 184, loss is 1.1336846351623535\n", - "epoch: 23 step: 185, loss is 1.1323282718658447\n", - "epoch: 23 step: 186, loss is 1.0980340242385864\n", - "epoch: 23 step: 187, loss is 1.121384859085083\n", - "epoch: 23 step: 188, loss is 1.1679719686508179\n", - "epoch: 23 step: 189, loss is 1.085845708847046\n", - "epoch: 23 step: 190, loss is 1.0491199493408203\n", - "epoch: 23 step: 191, loss is 1.1239583492279053\n", - "epoch: 23 step: 192, loss is 1.0570039749145508\n", - "epoch: 23 step: 193, loss is 1.1567015647888184\n", - "epoch: 23 step: 194, loss is 1.0626479387283325\n", - "epoch: 23 step: 195, loss is 1.0094729661941528\n", - "Train epoch time: 101013.625 ms, per step time: 518.019 ms\n", - "epoch: 24 step: 1, loss is 1.0149340629577637\n", - "epoch: 24 step: 2, loss is 1.0656733512878418\n", - "epoch: 24 step: 3, loss is 1.0988367795944214\n", - "epoch: 24 step: 4, loss is 1.0419820547103882\n", - "epoch: 24 step: 5, loss is 1.0940383672714233\n", - "epoch: 24 step: 6, loss is 1.0750219821929932\n", - "epoch: 24 step: 7, loss is 1.066772699356079\n", - "epoch: 24 step: 8, loss is 1.1340692043304443\n", - "epoch: 24 step: 9, loss is 1.207837462425232\n", - "epoch: 24 step: 10, loss is 1.0940717458724976\n", - "epoch: 24 step: 11, loss is 1.101741909980774\n", - "epoch: 24 step: 12, loss is 1.0705571174621582\n", - "epoch: 24 step: 13, loss is 1.0340397357940674\n", - "epoch: 24 step: 14, loss is 1.1341054439544678\n", - "epoch: 24 step: 15, loss is 1.0972232818603516\n", - "epoch: 24 step: 16, loss is 1.1218732595443726\n", - "epoch: 24 step: 17, loss is 1.0522446632385254\n", - "epoch: 24 step: 18, loss is 1.0363849401474\n", - "epoch: 24 step: 19, loss is 1.0300065279006958\n", - "epoch: 24 step: 20, loss is 1.0871447324752808\n", - "epoch: 24 step: 21, loss is 1.0412099361419678\n", - "epoch: 24 step: 22, loss is 1.088841438293457\n", - "epoch: 24 step: 23, loss is 1.041816234588623\n", - "epoch: 24 step: 24, loss is 1.1186373233795166\n", - "epoch: 24 step: 25, loss is 1.1210126876831055\n", - "epoch: 24 step: 26, loss is 1.0748430490493774\n", - "epoch: 24 step: 27, loss is 1.082032561302185\n", - "epoch: 24 step: 28, loss is 1.1616694927215576\n", - "epoch: 24 step: 29, loss is 1.0672612190246582\n", - "epoch: 24 step: 30, loss is 1.0976974964141846\n", - "epoch: 24 step: 31, loss is 1.0183546543121338\n", - "epoch: 24 step: 32, loss is 1.1253126859664917\n", - "epoch: 24 step: 33, loss is 1.0805137157440186\n", - "epoch: 24 step: 34, loss is 1.0839871168136597\n", - "epoch: 24 step: 35, loss is 1.0788066387176514\n", - "epoch: 24 step: 36, loss is 1.1135940551757812\n", - "epoch: 24 step: 37, loss is 1.100695013999939\n", - "epoch: 24 step: 38, loss is 1.0564568042755127\n", - "epoch: 24 step: 39, loss is 1.0432438850402832\n", - "epoch: 24 step: 40, loss is 1.0497615337371826\n", - "epoch: 24 step: 41, loss is 1.1052402257919312\n", - "epoch: 24 step: 42, loss is 1.074357509613037\n", - "epoch: 24 step: 43, loss is 1.0754345655441284\n", - "epoch: 24 step: 44, loss is 1.1187794208526611\n", - "epoch: 24 step: 45, loss is 1.158486247062683\n", - "epoch: 24 step: 46, loss is 1.1193733215332031\n", - "epoch: 24 step: 47, loss is 1.1252005100250244\n", - "epoch: 24 step: 48, loss is 1.1686315536499023\n", - "epoch: 24 step: 49, loss is 1.077506184577942\n", - "epoch: 24 step: 50, loss is 0.9955160617828369\n", - "epoch: 24 step: 51, loss is 1.0416629314422607\n", - "epoch: 24 step: 52, loss is 1.0476529598236084\n", - "epoch: 24 step: 53, loss is 1.1165244579315186\n", - "epoch: 24 step: 54, loss is 1.1126850843429565\n", - "epoch: 24 step: 55, loss is 1.1327506303787231\n", - "epoch: 24 step: 56, loss is 1.1535394191741943\n", - "epoch: 24 step: 57, loss is 1.0998996496200562\n", - "epoch: 24 step: 58, loss is 1.2009762525558472\n", - "epoch: 24 step: 59, loss is 1.1290614604949951\n", - "epoch: 24 step: 60, loss is 1.0846294164657593\n", - "epoch: 24 step: 61, loss is 1.1018847227096558\n", - "epoch: 24 step: 62, loss is 1.0555287599563599\n", - "epoch: 24 step: 63, loss is 1.0728873014450073\n", - "epoch: 24 step: 64, loss is 1.0869959592819214\n", - "epoch: 24 step: 65, loss is 1.0526306629180908\n", - "epoch: 24 step: 66, loss is 1.0938445329666138\n", - "epoch: 24 step: 67, loss is 1.0982022285461426\n", - "epoch: 24 step: 68, loss is 1.0820813179016113\n", - "epoch: 24 step: 69, loss is 1.1439049243927002\n", - "epoch: 24 step: 70, loss is 1.106379747390747\n", - "epoch: 24 step: 71, loss is 1.0735957622528076\n", - "epoch: 24 step: 72, loss is 1.0705339908599854\n", - "epoch: 24 step: 73, loss is 0.9821990132331848\n", - "epoch: 24 step: 74, loss is 1.10223388671875\n", - "epoch: 24 step: 75, loss is 1.1412644386291504\n", - "epoch: 24 step: 76, loss is 1.083353042602539\n", - "epoch: 24 step: 77, loss is 1.0805102586746216\n", - "epoch: 24 step: 78, loss is 1.0857373476028442\n", - "epoch: 24 step: 79, loss is 1.1129400730133057\n", - "epoch: 24 step: 80, loss is 1.129321813583374\n", - "epoch: 24 step: 81, loss is 1.1183065176010132\n", - "epoch: 24 step: 82, loss is 1.158447265625\n", - "epoch: 24 step: 83, loss is 1.059683084487915\n", - "epoch: 24 step: 84, loss is 1.0833324193954468\n", - "epoch: 24 step: 85, loss is 1.0802512168884277\n", - "epoch: 24 step: 86, loss is 1.0950815677642822\n", - "epoch: 24 step: 87, loss is 1.1294218301773071\n", - "epoch: 24 step: 88, loss is 1.1059508323669434\n", - "epoch: 24 step: 89, loss is 1.1759282350540161\n", - "epoch: 24 step: 90, loss is 1.0779974460601807\n", - "epoch: 24 step: 91, loss is 1.0733767747879028\n", - "epoch: 24 step: 92, loss is 1.1614344120025635\n", - "epoch: 24 step: 93, loss is 1.0910868644714355\n", - "epoch: 24 step: 94, loss is 1.054944634437561\n", - "epoch: 24 step: 95, loss is 1.1366803646087646\n", - "epoch: 24 step: 96, loss is 1.13693106174469\n", - "epoch: 24 step: 97, loss is 1.0923651456832886\n", - "epoch: 24 step: 98, loss is 1.0509752035140991\n", - "epoch: 24 step: 99, loss is 1.1433801651000977\n", - "epoch: 24 step: 100, loss is 1.1243809461593628\n", - "epoch: 24 step: 101, loss is 1.1028223037719727\n", - "epoch: 24 step: 102, loss is 1.1130242347717285\n", - "epoch: 24 step: 103, loss is 1.151925802230835\n", - "epoch: 24 step: 104, loss is 1.0777122974395752\n", - "epoch: 24 step: 105, loss is 1.119873046875\n", - "epoch: 24 step: 106, loss is 1.1358789205551147\n", - "epoch: 24 step: 107, loss is 1.0719413757324219\n", - "epoch: 24 step: 108, loss is 1.0628635883331299\n", - "epoch: 24 step: 109, loss is 1.0503345727920532\n", - "epoch: 24 step: 110, loss is 1.1274672746658325\n", - "epoch: 24 step: 111, loss is 1.0830402374267578\n", - "epoch: 24 step: 112, loss is 1.1192857027053833\n", - "epoch: 24 step: 113, loss is 1.070591688156128\n", - "epoch: 24 step: 114, loss is 1.1368317604064941\n", - "epoch: 24 step: 115, loss is 1.0880318880081177\n", - "epoch: 24 step: 116, loss is 1.1302921772003174\n", - "epoch: 24 step: 117, loss is 1.145160436630249\n", - "epoch: 24 step: 118, loss is 1.1119775772094727\n", - "epoch: 24 step: 119, loss is 1.0518125295639038\n", - "epoch: 24 step: 120, loss is 1.1172714233398438\n", - "epoch: 24 step: 121, loss is 1.1207104921340942\n", - "epoch: 24 step: 122, loss is 1.052086353302002\n", - "epoch: 24 step: 123, loss is 1.1225634813308716\n", - "epoch: 24 step: 124, loss is 1.1011667251586914\n", - "epoch: 24 step: 125, loss is 1.112138032913208\n", - "epoch: 24 step: 126, loss is 1.1802303791046143\n", - "epoch: 24 step: 127, loss is 1.1204378604888916\n", - "epoch: 24 step: 128, loss is 1.1151847839355469\n", - "epoch: 24 step: 129, loss is 1.0922772884368896\n", - "epoch: 24 step: 130, loss is 1.0205130577087402\n", - "epoch: 24 step: 131, loss is 1.089955449104309\n", - "epoch: 24 step: 132, loss is 1.0827082395553589\n", - "epoch: 24 step: 133, loss is 1.1208171844482422\n", - "epoch: 24 step: 134, loss is 1.1053398847579956\n", - "epoch: 24 step: 135, loss is 1.0449259281158447\n", - "epoch: 24 step: 136, loss is 1.074751615524292\n", - "epoch: 24 step: 137, loss is 1.1305283308029175\n", - "epoch: 24 step: 138, loss is 1.1261173486709595\n", - "epoch: 24 step: 139, loss is 1.0394558906555176\n", - "epoch: 24 step: 140, loss is 1.1261268854141235\n", - "epoch: 24 step: 141, loss is 1.0963863134384155\n", - "epoch: 24 step: 142, loss is 1.045979619026184\n", - "epoch: 24 step: 143, loss is 1.0496078729629517\n", - "epoch: 24 step: 144, loss is 1.1875709295272827\n", - "epoch: 24 step: 145, loss is 1.0643904209136963\n", - "epoch: 24 step: 146, loss is 1.0871621370315552\n", - "epoch: 24 step: 147, loss is 1.1478910446166992\n", - "epoch: 24 step: 148, loss is 1.1920757293701172\n", - "epoch: 24 step: 149, loss is 1.1328034400939941\n", - "epoch: 24 step: 150, loss is 1.0893347263336182\n", - "epoch: 24 step: 151, loss is 1.1048067808151245\n", - "epoch: 24 step: 152, loss is 1.0077087879180908\n", - "epoch: 24 step: 153, loss is 1.0549124479293823\n", - "epoch: 24 step: 154, loss is 1.152020812034607\n", - "epoch: 24 step: 155, loss is 1.1228123903274536\n", - "epoch: 24 step: 156, loss is 1.1045303344726562\n", - "epoch: 24 step: 157, loss is 1.084133267402649\n", - "epoch: 24 step: 158, loss is 1.09710693359375\n", - "epoch: 24 step: 159, loss is 1.074958324432373\n", - "epoch: 24 step: 160, loss is 1.033774971961975\n", - "epoch: 24 step: 161, loss is 1.0859473943710327\n", - "epoch: 24 step: 162, loss is 1.1589056253433228\n", - "epoch: 24 step: 163, loss is 1.2113087177276611\n", - "epoch: 24 step: 164, loss is 1.1745445728302002\n", - "epoch: 24 step: 165, loss is 1.1192365884780884\n", - "epoch: 24 step: 166, loss is 1.068882703781128\n", - "epoch: 24 step: 167, loss is 1.0761008262634277\n", - "epoch: 24 step: 168, loss is 1.0699995756149292\n", - "epoch: 24 step: 169, loss is 1.1537774801254272\n", - "epoch: 24 step: 170, loss is 1.1350986957550049\n", - "epoch: 24 step: 171, loss is 1.0868403911590576\n", - "epoch: 24 step: 172, loss is 1.0896763801574707\n", - "epoch: 24 step: 173, loss is 1.1454657316207886\n", - "epoch: 24 step: 174, loss is 1.1377419233322144\n", - "epoch: 24 step: 175, loss is 1.0885043144226074\n", - "epoch: 24 step: 176, loss is 1.1249531507492065\n", - "epoch: 24 step: 177, loss is 1.1522539854049683\n", - "epoch: 24 step: 178, loss is 1.0993684530258179\n", - "epoch: 24 step: 179, loss is 1.1670578718185425\n", - "epoch: 24 step: 180, loss is 1.035400152206421\n", - "epoch: 24 step: 181, loss is 1.1165781021118164\n", - "epoch: 24 step: 182, loss is 1.075137972831726\n", - "epoch: 24 step: 183, loss is 1.0986744165420532\n", - "epoch: 24 step: 184, loss is 1.1392841339111328\n", - "epoch: 24 step: 185, loss is 1.0902831554412842\n", - "epoch: 24 step: 186, loss is 1.0261082649230957\n", - "epoch: 24 step: 187, loss is 1.0156962871551514\n", - "epoch: 24 step: 188, loss is 1.0787601470947266\n", - "epoch: 24 step: 189, loss is 1.0511951446533203\n", - "epoch: 24 step: 190, loss is 1.0000669956207275\n", - "epoch: 24 step: 191, loss is 1.1091296672821045\n", - "epoch: 24 step: 192, loss is 1.113128900527954\n", - "epoch: 24 step: 193, loss is 1.0678154230117798\n", - "epoch: 24 step: 194, loss is 1.1033992767333984\n", - "epoch: 24 step: 195, loss is 1.0996067523956299\n", - "Train epoch time: 92146.717 ms, per step time: 472.547 ms\n", - "epoch: 25 step: 1, loss is 1.0944640636444092\n", - "epoch: 25 step: 2, loss is 1.1537492275238037\n", - "epoch: 25 step: 3, loss is 1.0278011560440063\n", - "epoch: 25 step: 4, loss is 1.0667750835418701\n", - "epoch: 25 step: 5, loss is 1.0738204717636108\n", - "epoch: 25 step: 6, loss is 1.0765408277511597\n", - "epoch: 25 step: 7, loss is 1.1048355102539062\n", - "epoch: 25 step: 8, loss is 1.0568161010742188\n", - "epoch: 25 step: 9, loss is 1.065824270248413\n", - "epoch: 25 step: 10, loss is 1.1337116956710815\n", - "epoch: 25 step: 11, loss is 1.0800821781158447\n", - "epoch: 25 step: 12, loss is 1.0967190265655518\n", - "epoch: 25 step: 13, loss is 1.097056269645691\n", - "epoch: 25 step: 14, loss is 1.0970062017440796\n", - "epoch: 25 step: 15, loss is 1.130616307258606\n", - "epoch: 25 step: 16, loss is 1.0671025514602661\n", - "epoch: 25 step: 17, loss is 0.995775043964386\n", - "epoch: 25 step: 18, loss is 1.0410147905349731\n", - "epoch: 25 step: 19, loss is 1.0764706134796143\n", - "epoch: 25 step: 20, loss is 1.086004376411438\n", - "epoch: 25 step: 21, loss is 1.176888346672058\n", - "epoch: 25 step: 22, loss is 1.1183147430419922\n", - "epoch: 25 step: 23, loss is 1.0882411003112793\n", - "epoch: 25 step: 24, loss is 1.1074618101119995\n", - "epoch: 25 step: 25, loss is 1.048006296157837\n", - "epoch: 25 step: 26, loss is 1.0748313665390015\n", - "epoch: 25 step: 27, loss is 0.9693728685379028\n", - "epoch: 25 step: 28, loss is 1.1275076866149902\n", - "epoch: 25 step: 29, loss is 1.00841224193573\n", - "epoch: 25 step: 30, loss is 1.0752159357070923\n", - "epoch: 25 step: 31, loss is 1.0895309448242188\n", - "epoch: 25 step: 32, loss is 1.0762090682983398\n", - "epoch: 25 step: 33, loss is 1.0111603736877441\n", - "epoch: 25 step: 34, loss is 1.07645583152771\n", - "epoch: 25 step: 35, loss is 1.150286316871643\n", - "epoch: 25 step: 36, loss is 1.0705536603927612\n", - "epoch: 25 step: 37, loss is 1.0057810544967651\n", - "epoch: 25 step: 38, loss is 0.9676169157028198\n", - "epoch: 25 step: 39, loss is 1.1130061149597168\n", - "epoch: 25 step: 40, loss is 1.109873652458191\n", - "epoch: 25 step: 41, loss is 1.090269684791565\n", - "epoch: 25 step: 42, loss is 1.0881240367889404\n", - "epoch: 25 step: 43, loss is 1.1059993505477905\n", - "epoch: 25 step: 44, loss is 1.137759804725647\n", - "epoch: 25 step: 45, loss is 1.131312608718872\n", - "epoch: 25 step: 46, loss is 1.0295906066894531\n", - "epoch: 25 step: 47, loss is 1.0685251951217651\n", - "epoch: 25 step: 48, loss is 1.1225147247314453\n", - "epoch: 25 step: 49, loss is 1.0938369035720825\n", - "epoch: 25 step: 50, loss is 1.1687977313995361\n", - "epoch: 25 step: 51, loss is 1.0433377027511597\n", - "epoch: 25 step: 52, loss is 1.0630183219909668\n", - "epoch: 25 step: 53, loss is 1.106493353843689\n", - "epoch: 25 step: 54, loss is 1.1200652122497559\n", - "epoch: 25 step: 55, loss is 1.0635850429534912\n", - "epoch: 25 step: 56, loss is 1.1189876794815063\n", - "epoch: 25 step: 57, loss is 1.0621880292892456\n", - "epoch: 25 step: 58, loss is 1.05171537399292\n", - "epoch: 25 step: 59, loss is 1.1661138534545898\n", - "epoch: 25 step: 60, loss is 1.106707215309143\n", - "epoch: 25 step: 61, loss is 1.061164140701294\n", - "epoch: 25 step: 62, loss is 1.1553099155426025\n", - "epoch: 25 step: 63, loss is 1.0260666608810425\n", - "epoch: 25 step: 64, loss is 1.132649540901184\n", - "epoch: 25 step: 65, loss is 1.0889328718185425\n", - "epoch: 25 step: 66, loss is 1.1044869422912598\n", - "epoch: 25 step: 67, loss is 1.1112422943115234\n", - "epoch: 25 step: 68, loss is 1.0697600841522217\n", - "epoch: 25 step: 69, loss is 1.0266914367675781\n", - "epoch: 25 step: 70, loss is 1.1367233991622925\n", - "epoch: 25 step: 71, loss is 1.1762535572052002\n", - "epoch: 25 step: 72, loss is 0.9579718112945557\n", - "epoch: 25 step: 73, loss is 1.080369472503662\n", - "epoch: 25 step: 74, loss is 1.0514192581176758\n", - "epoch: 25 step: 75, loss is 1.0466524362564087\n", - "epoch: 25 step: 76, loss is 1.0832782983779907\n", - "epoch: 25 step: 77, loss is 1.0952484607696533\n", - "epoch: 25 step: 78, loss is 1.0719014406204224\n", - "epoch: 25 step: 79, loss is 0.999049186706543\n", - "epoch: 25 step: 80, loss is 1.0770364999771118\n", - "epoch: 25 step: 81, loss is 1.0887103080749512\n", - "epoch: 25 step: 82, loss is 1.1109684705734253\n", - "epoch: 25 step: 83, loss is 1.0503671169281006\n", - "epoch: 25 step: 84, loss is 1.1708521842956543\n", - "epoch: 25 step: 85, loss is 1.105607271194458\n", - "epoch: 25 step: 86, loss is 1.1338499784469604\n", - "epoch: 25 step: 87, loss is 1.1106376647949219\n", - "epoch: 25 step: 88, loss is 1.0791434049606323\n", - "epoch: 25 step: 89, loss is 1.0062893629074097\n", - "epoch: 25 step: 90, loss is 1.019977331161499\n", - "epoch: 25 step: 91, loss is 1.0760611295700073\n", - "epoch: 25 step: 92, loss is 1.0790257453918457\n", - "epoch: 25 step: 93, loss is 1.0402240753173828\n", - "epoch: 25 step: 94, loss is 1.1342862844467163\n", - "epoch: 25 step: 95, loss is 1.0273258686065674\n", - "epoch: 25 step: 96, loss is 1.1025688648223877\n", - "epoch: 25 step: 97, loss is 1.1238374710083008\n", - "epoch: 25 step: 98, loss is 1.0744726657867432\n", - "epoch: 25 step: 99, loss is 1.1032700538635254\n", - "epoch: 25 step: 100, loss is 1.137713074684143\n", - "epoch: 25 step: 101, loss is 1.0966110229492188\n", - "epoch: 25 step: 102, loss is 1.1041685342788696\n", - "epoch: 25 step: 103, loss is 1.0489667654037476\n", - "epoch: 25 step: 104, loss is 1.0010182857513428\n", - "epoch: 25 step: 105, loss is 1.0343749523162842\n", - "epoch: 25 step: 106, loss is 1.0772194862365723\n", - "epoch: 25 step: 107, loss is 0.999282956123352\n", - "epoch: 25 step: 108, loss is 1.1229469776153564\n", - "epoch: 25 step: 109, loss is 1.0480936765670776\n", - "epoch: 25 step: 110, loss is 1.0306779146194458\n", - "epoch: 25 step: 111, loss is 1.0049049854278564\n", - "epoch: 25 step: 112, loss is 1.0112063884735107\n", - "epoch: 25 step: 113, loss is 1.0822912454605103\n", - "epoch: 25 step: 114, loss is 1.0411224365234375\n", - "epoch: 25 step: 115, loss is 1.0773202180862427\n", - "epoch: 25 step: 116, loss is 1.0551668405532837\n", - "epoch: 25 step: 117, loss is 1.1168681383132935\n", - "epoch: 25 step: 118, loss is 0.9704387187957764\n", - "epoch: 25 step: 119, loss is 1.134149432182312\n", - "epoch: 25 step: 120, loss is 0.9871140718460083\n", - "epoch: 25 step: 121, loss is 1.0210661888122559\n", - "epoch: 25 step: 122, loss is 1.1468297243118286\n", - "epoch: 25 step: 123, loss is 1.1028860807418823\n", - "epoch: 25 step: 124, loss is 1.1045527458190918\n", - "epoch: 25 step: 125, loss is 1.0534635782241821\n", - "epoch: 25 step: 126, loss is 1.0983967781066895\n", - "epoch: 25 step: 127, loss is 1.0373344421386719\n", - "epoch: 25 step: 128, loss is 1.1219136714935303\n", - "epoch: 25 step: 129, loss is 1.068048357963562\n", - "epoch: 25 step: 130, loss is 1.091484785079956\n", - "epoch: 25 step: 131, loss is 1.032631278038025\n", - "epoch: 25 step: 132, loss is 1.1128851175308228\n", - "epoch: 25 step: 133, loss is 1.087246060371399\n", - "epoch: 25 step: 134, loss is 1.124280333518982\n", - "epoch: 25 step: 135, loss is 1.1236577033996582\n", - "epoch: 25 step: 136, loss is 1.0672986507415771\n", - "epoch: 25 step: 137, loss is 1.0534167289733887\n", - "epoch: 25 step: 138, loss is 1.1626495122909546\n", - "epoch: 25 step: 139, loss is 1.0787688493728638\n", - "epoch: 25 step: 140, loss is 1.0058670043945312\n", - "epoch: 25 step: 141, loss is 1.118138074874878\n", - "epoch: 25 step: 142, loss is 1.1579453945159912\n", - "epoch: 25 step: 143, loss is 0.9916603565216064\n", - "epoch: 25 step: 144, loss is 1.0520808696746826\n", - "epoch: 25 step: 145, loss is 1.0431550741195679\n", - "epoch: 25 step: 146, loss is 1.1541956663131714\n", - "epoch: 25 step: 147, loss is 1.105986475944519\n", - "epoch: 25 step: 148, loss is 1.0831660032272339\n", - "epoch: 25 step: 149, loss is 1.1066980361938477\n", - "epoch: 25 step: 150, loss is 1.0333201885223389\n", - "epoch: 25 step: 151, loss is 1.0604230165481567\n", - "epoch: 25 step: 152, loss is 1.0225627422332764\n", - "epoch: 25 step: 153, loss is 1.0901498794555664\n", - "epoch: 25 step: 154, loss is 1.0770379304885864\n", - "epoch: 25 step: 155, loss is 1.0785655975341797\n", - "epoch: 25 step: 156, loss is 1.092533826828003\n", - "epoch: 25 step: 157, loss is 1.080465316772461\n", - "epoch: 25 step: 158, loss is 1.0412288904190063\n", - "epoch: 25 step: 159, loss is 1.1183334589004517\n", - "epoch: 25 step: 160, loss is 1.1458343267440796\n", - "epoch: 25 step: 161, loss is 1.0706539154052734\n", - "epoch: 25 step: 162, loss is 1.157791018486023\n", - "epoch: 25 step: 163, loss is 1.092441201210022\n", - "epoch: 25 step: 164, loss is 1.073870062828064\n", - "epoch: 25 step: 165, loss is 1.0526149272918701\n", - "epoch: 25 step: 166, loss is 1.1032319068908691\n", - "epoch: 25 step: 167, loss is 1.0222210884094238\n", - "epoch: 25 step: 168, loss is 1.135607361793518\n", - "epoch: 25 step: 169, loss is 1.1079177856445312\n", - "epoch: 25 step: 170, loss is 1.1704673767089844\n", - "epoch: 25 step: 171, loss is 1.1687424182891846\n", - "epoch: 25 step: 172, loss is 1.0616486072540283\n", - "epoch: 25 step: 173, loss is 1.079866647720337\n", - "epoch: 25 step: 174, loss is 1.057521104812622\n", - "epoch: 25 step: 175, loss is 1.0851926803588867\n", - "epoch: 25 step: 176, loss is 1.0408588647842407\n", - "epoch: 25 step: 177, loss is 1.158246636390686\n", - "epoch: 25 step: 178, loss is 1.0870460271835327\n", - "epoch: 25 step: 179, loss is 1.0948772430419922\n", - "epoch: 25 step: 180, loss is 0.9931520223617554\n", - "epoch: 25 step: 181, loss is 1.0503507852554321\n", - "epoch: 25 step: 182, loss is 1.0508701801300049\n", - "epoch: 25 step: 183, loss is 1.0698341131210327\n", - "epoch: 25 step: 184, loss is 1.082878828048706\n", - "epoch: 25 step: 185, loss is 1.141692876815796\n", - "epoch: 25 step: 186, loss is 1.0640296936035156\n", - "epoch: 25 step: 187, loss is 1.0724351406097412\n", - "epoch: 25 step: 188, loss is 1.1006282567977905\n", - "epoch: 25 step: 189, loss is 1.093940019607544\n", - "epoch: 25 step: 190, loss is 1.0338797569274902\n", - "epoch: 25 step: 191, loss is 1.0126454830169678\n", - "epoch: 25 step: 192, loss is 1.049782395362854\n", - "epoch: 25 step: 193, loss is 1.0432065725326538\n", - "epoch: 25 step: 194, loss is 1.0758951902389526\n", - "epoch: 25 step: 195, loss is 1.0382393598556519\n", - "Train epoch time: 99232.283 ms, per step time: 508.884 ms\n", - "epoch: 26 step: 1, loss is 1.0517640113830566\n", - "epoch: 26 step: 2, loss is 1.034106731414795\n", - "epoch: 26 step: 3, loss is 1.1050300598144531\n", - "epoch: 26 step: 4, loss is 1.0493968725204468\n", - "epoch: 26 step: 5, loss is 1.0649911165237427\n", - "epoch: 26 step: 6, loss is 1.105018973350525\n", - "epoch: 26 step: 7, loss is 1.0444979667663574\n", - "epoch: 26 step: 8, loss is 1.1349263191223145\n", - "epoch: 26 step: 9, loss is 1.0790820121765137\n", - "epoch: 26 step: 10, loss is 1.0510661602020264\n", - "epoch: 26 step: 11, loss is 1.1816296577453613\n", - "epoch: 26 step: 12, loss is 1.0029466152191162\n", - "epoch: 26 step: 13, loss is 1.0195821523666382\n", - "epoch: 26 step: 14, loss is 1.0374330282211304\n", - "epoch: 26 step: 15, loss is 1.079158902168274\n", - "epoch: 26 step: 16, loss is 1.0664851665496826\n", - "epoch: 26 step: 17, loss is 1.0550463199615479\n", - "epoch: 26 step: 18, loss is 1.0381886959075928\n", - "epoch: 26 step: 19, loss is 1.0533243417739868\n", - "epoch: 26 step: 20, loss is 1.0075640678405762\n", - "epoch: 26 step: 21, loss is 1.0217363834381104\n", - "epoch: 26 step: 22, loss is 1.0725574493408203\n", - "epoch: 26 step: 23, loss is 1.0504868030548096\n", - "epoch: 26 step: 24, loss is 1.011362075805664\n", - "epoch: 26 step: 25, loss is 1.0163311958312988\n", - "epoch: 26 step: 26, loss is 1.0692439079284668\n", - "epoch: 26 step: 27, loss is 1.1008000373840332\n", - "epoch: 26 step: 28, loss is 1.0959250926971436\n", - "epoch: 26 step: 29, loss is 1.0228633880615234\n", - "epoch: 26 step: 30, loss is 1.0511887073516846\n", - "epoch: 26 step: 31, loss is 1.0608770847320557\n", - "epoch: 26 step: 32, loss is 1.0528080463409424\n", - "epoch: 26 step: 33, loss is 1.140068769454956\n", - "epoch: 26 step: 34, loss is 1.0786501169204712\n", - "epoch: 26 step: 35, loss is 1.0421595573425293\n", - "epoch: 26 step: 36, loss is 1.0266063213348389\n", - "epoch: 26 step: 37, loss is 1.0157393217086792\n", - "epoch: 26 step: 38, loss is 0.9573328495025635\n", - "epoch: 26 step: 39, loss is 1.0755560398101807\n", - "epoch: 26 step: 40, loss is 1.1283622980117798\n", - "epoch: 26 step: 41, loss is 1.1111503839492798\n", - "epoch: 26 step: 42, loss is 0.9759970307350159\n", - "epoch: 26 step: 43, loss is 1.0019840002059937\n", - "epoch: 26 step: 44, loss is 1.0777231454849243\n", - "epoch: 26 step: 45, loss is 1.0543546676635742\n", - "epoch: 26 step: 46, loss is 1.092705249786377\n", - "epoch: 26 step: 47, loss is 1.0773663520812988\n", - "epoch: 26 step: 48, loss is 1.0804762840270996\n", - "epoch: 26 step: 49, loss is 0.9640929698944092\n", - "epoch: 26 step: 50, loss is 1.1404153108596802\n", - "epoch: 26 step: 51, loss is 1.1303207874298096\n", - "epoch: 26 step: 52, loss is 1.0307775735855103\n", - "epoch: 26 step: 53, loss is 1.0385438203811646\n", - "epoch: 26 step: 54, loss is 1.020154595375061\n", - "epoch: 26 step: 55, loss is 1.0177607536315918\n", - "epoch: 26 step: 56, loss is 1.1031110286712646\n", - "epoch: 26 step: 57, loss is 1.0564035177230835\n", - "epoch: 26 step: 58, loss is 1.103069543838501\n", - "epoch: 26 step: 59, loss is 1.0753121376037598\n", - "epoch: 26 step: 60, loss is 1.086449384689331\n", - "epoch: 26 step: 61, loss is 1.093638300895691\n", - "epoch: 26 step: 62, loss is 1.0846304893493652\n", - "epoch: 26 step: 63, loss is 1.0204472541809082\n", - "epoch: 26 step: 64, loss is 1.0205191373825073\n", - "epoch: 26 step: 65, loss is 1.0733025074005127\n", - "epoch: 26 step: 66, loss is 1.0562231540679932\n", - "epoch: 26 step: 67, loss is 1.0363019704818726\n", - "epoch: 26 step: 68, loss is 1.0184295177459717\n", - "epoch: 26 step: 69, loss is 1.1273438930511475\n", - "epoch: 26 step: 70, loss is 1.063193678855896\n", - "epoch: 26 step: 71, loss is 1.092448115348816\n", - "epoch: 26 step: 72, loss is 1.0181409120559692\n", - "epoch: 26 step: 73, loss is 1.1541016101837158\n", - "epoch: 26 step: 74, loss is 1.0397893190383911\n", - "epoch: 26 step: 75, loss is 1.0326511859893799\n", - "epoch: 26 step: 76, loss is 1.1474494934082031\n", - "epoch: 26 step: 77, loss is 1.1063835620880127\n", - "epoch: 26 step: 78, loss is 1.1577624082565308\n", - "epoch: 26 step: 79, loss is 1.1082344055175781\n", - "epoch: 26 step: 80, loss is 1.0959268808364868\n", - "epoch: 26 step: 81, loss is 1.0803630352020264\n", - "epoch: 26 step: 82, loss is 1.042537808418274\n", - "epoch: 26 step: 83, loss is 1.0456452369689941\n", - "epoch: 26 step: 84, loss is 1.0787311792373657\n", - "epoch: 26 step: 85, loss is 1.1113595962524414\n", - "epoch: 26 step: 86, loss is 1.0774568319320679\n", - "epoch: 26 step: 87, loss is 1.05318284034729\n", - "epoch: 26 step: 88, loss is 1.121375322341919\n", - "epoch: 26 step: 89, loss is 1.0215983390808105\n", - "epoch: 26 step: 90, loss is 0.9743614196777344\n", - "epoch: 26 step: 91, loss is 1.1169620752334595\n", - "epoch: 26 step: 92, loss is 1.050586223602295\n", - "epoch: 26 step: 93, loss is 1.048630714416504\n", - "epoch: 26 step: 94, loss is 1.0937471389770508\n", - "epoch: 26 step: 95, loss is 1.026048183441162\n", - "epoch: 26 step: 96, loss is 1.0549015998840332\n", - "epoch: 26 step: 97, loss is 1.0495948791503906\n", - "epoch: 26 step: 98, loss is 1.0430347919464111\n", - "epoch: 26 step: 99, loss is 1.045041561126709\n", - "epoch: 26 step: 100, loss is 0.9819204807281494\n", - "epoch: 26 step: 101, loss is 1.0283970832824707\n", - "epoch: 26 step: 102, loss is 1.0337718725204468\n", - "epoch: 26 step: 103, loss is 1.094991683959961\n", - "epoch: 26 step: 104, loss is 1.064126968383789\n", - "epoch: 26 step: 105, loss is 1.0757126808166504\n", - "epoch: 26 step: 106, loss is 1.02644944190979\n", - "epoch: 26 step: 107, loss is 0.9997298121452332\n", - "epoch: 26 step: 108, loss is 1.055686116218567\n", - "epoch: 26 step: 109, loss is 1.0563688278198242\n", - "epoch: 26 step: 110, loss is 1.0874462127685547\n", - "epoch: 26 step: 111, loss is 1.0292081832885742\n", - "epoch: 26 step: 112, loss is 1.1482970714569092\n", - "epoch: 26 step: 113, loss is 1.0572491884231567\n", - "epoch: 26 step: 114, loss is 1.0562753677368164\n", - "epoch: 26 step: 115, loss is 1.0584640502929688\n", - "epoch: 26 step: 116, loss is 1.0785645246505737\n", - "epoch: 26 step: 117, loss is 1.0729360580444336\n", - "epoch: 26 step: 118, loss is 0.9676029086112976\n", - "epoch: 26 step: 119, loss is 1.0841299295425415\n", - "epoch: 26 step: 120, loss is 1.072570562362671\n", - "epoch: 26 step: 121, loss is 1.1914576292037964\n", - "epoch: 26 step: 122, loss is 0.9901759028434753\n", - "epoch: 26 step: 123, loss is 1.0641156435012817\n", - "epoch: 26 step: 124, loss is 0.9963059425354004\n", - "epoch: 26 step: 125, loss is 0.985388994216919\n", - "epoch: 26 step: 126, loss is 1.1377520561218262\n", - "epoch: 26 step: 127, loss is 1.0856072902679443\n", - "epoch: 26 step: 128, loss is 1.0533015727996826\n", - "epoch: 26 step: 129, loss is 1.015711784362793\n", - "epoch: 26 step: 130, loss is 1.0190331935882568\n", - "epoch: 26 step: 131, loss is 1.1201162338256836\n", - "epoch: 26 step: 132, loss is 1.0186271667480469\n", - "epoch: 26 step: 133, loss is 1.0069947242736816\n", - "epoch: 26 step: 134, loss is 1.0293490886688232\n", - "epoch: 26 step: 135, loss is 1.0155060291290283\n", - "epoch: 26 step: 136, loss is 1.0905357599258423\n", - "epoch: 26 step: 137, loss is 1.0570062398910522\n", - "epoch: 26 step: 138, loss is 1.1058920621871948\n", - "epoch: 26 step: 139, loss is 1.0636885166168213\n", - "epoch: 26 step: 140, loss is 1.0073214769363403\n", - "epoch: 26 step: 141, loss is 1.0989006757736206\n", - "epoch: 26 step: 142, loss is 1.0409773588180542\n", - "epoch: 26 step: 143, loss is 1.1236248016357422\n", - "epoch: 26 step: 144, loss is 1.1278859376907349\n", - "epoch: 26 step: 145, loss is 1.127524971961975\n", - "epoch: 26 step: 146, loss is 1.0904924869537354\n", - "epoch: 26 step: 147, loss is 1.0627973079681396\n", - "epoch: 26 step: 148, loss is 1.025049090385437\n", - "epoch: 26 step: 149, loss is 1.144707202911377\n", - "epoch: 26 step: 150, loss is 0.9949439764022827\n", - "epoch: 26 step: 151, loss is 1.0586516857147217\n", - "epoch: 26 step: 152, loss is 1.1403663158416748\n", - "epoch: 26 step: 153, loss is 1.013765811920166\n", - "epoch: 26 step: 154, loss is 1.0705132484436035\n", - "epoch: 26 step: 155, loss is 1.0457738637924194\n", - "epoch: 26 step: 156, loss is 1.1553254127502441\n", - "epoch: 26 step: 157, loss is 1.1338465213775635\n", - "epoch: 26 step: 158, loss is 1.0925546884536743\n", - "epoch: 26 step: 159, loss is 1.039358139038086\n", - "epoch: 26 step: 160, loss is 1.0264304876327515\n", - "epoch: 26 step: 161, loss is 1.0180381536483765\n", - "epoch: 26 step: 162, loss is 1.0640860795974731\n", - "epoch: 26 step: 163, loss is 1.1145057678222656\n", - "epoch: 26 step: 164, loss is 1.052668571472168\n", - "epoch: 26 step: 165, loss is 1.0262118577957153\n", - "epoch: 26 step: 166, loss is 1.0479339361190796\n", - "epoch: 26 step: 167, loss is 1.0635230541229248\n", - "epoch: 26 step: 168, loss is 1.0503597259521484\n", - "epoch: 26 step: 169, loss is 1.0509954690933228\n", - "epoch: 26 step: 170, loss is 1.0316325426101685\n", - "epoch: 26 step: 171, loss is 1.0291749238967896\n", - "epoch: 26 step: 172, loss is 0.9605768918991089\n", - "epoch: 26 step: 173, loss is 1.0807660818099976\n", - "epoch: 26 step: 174, loss is 1.0707502365112305\n", - "epoch: 26 step: 175, loss is 1.0711973905563354\n", - "epoch: 26 step: 176, loss is 1.0418204069137573\n", - "epoch: 26 step: 177, loss is 1.1052236557006836\n", - "epoch: 26 step: 178, loss is 1.0412814617156982\n", - "epoch: 26 step: 179, loss is 1.115617275238037\n", - "epoch: 26 step: 180, loss is 1.015880823135376\n", - "epoch: 26 step: 181, loss is 1.1110204458236694\n", - "epoch: 26 step: 182, loss is 1.1573785543441772\n", - "epoch: 26 step: 183, loss is 1.0637462139129639\n", - "epoch: 26 step: 184, loss is 1.077017068862915\n", - "epoch: 26 step: 185, loss is 0.9998891949653625\n", - "epoch: 26 step: 186, loss is 1.0074355602264404\n", - "epoch: 26 step: 187, loss is 1.10640287399292\n", - "epoch: 26 step: 188, loss is 0.9725328087806702\n", - "epoch: 26 step: 189, loss is 1.0363848209381104\n", - "epoch: 26 step: 190, loss is 1.0673935413360596\n", - "epoch: 26 step: 191, loss is 1.0101263523101807\n", - "epoch: 26 step: 192, loss is 1.0690515041351318\n", - "epoch: 26 step: 193, loss is 1.0714102983474731\n", - "epoch: 26 step: 194, loss is 0.9500989317893982\n", - "epoch: 26 step: 195, loss is 1.0447680950164795\n", - "Train epoch time: 95797.186 ms, per step time: 491.268 ms\n", - "epoch: 27 step: 1, loss is 1.031938076019287\n", - "epoch: 27 step: 2, loss is 1.0668877363204956\n", - "epoch: 27 step: 3, loss is 0.9860163331031799\n", - "epoch: 27 step: 4, loss is 1.0331521034240723\n", - "epoch: 27 step: 5, loss is 1.060064435005188\n", - "epoch: 27 step: 6, loss is 1.0411007404327393\n", - "epoch: 27 step: 7, loss is 1.0935925245285034\n", - "epoch: 27 step: 8, loss is 1.0122017860412598\n", - "epoch: 27 step: 9, loss is 0.9699509143829346\n", - "epoch: 27 step: 10, loss is 1.0239531993865967\n", - "epoch: 27 step: 11, loss is 1.0220377445220947\n", - "epoch: 27 step: 12, loss is 1.0477886199951172\n", - "epoch: 27 step: 13, loss is 1.059556245803833\n", - "epoch: 27 step: 14, loss is 1.021897554397583\n", - "epoch: 27 step: 15, loss is 1.0930849313735962\n", - "epoch: 27 step: 16, loss is 1.0485190153121948\n", - "epoch: 27 step: 17, loss is 0.998543381690979\n", - "epoch: 27 step: 18, loss is 1.0500601530075073\n", - "epoch: 27 step: 19, loss is 1.1116565465927124\n", - "epoch: 27 step: 20, loss is 1.0387561321258545\n", - "epoch: 27 step: 21, loss is 1.0739219188690186\n", - "epoch: 27 step: 22, loss is 1.0172758102416992\n", - "epoch: 27 step: 23, loss is 1.0142052173614502\n", - "epoch: 27 step: 24, loss is 1.0664150714874268\n", - "epoch: 27 step: 25, loss is 1.0585476160049438\n", - "epoch: 27 step: 26, loss is 1.0972603559494019\n", - "epoch: 27 step: 27, loss is 1.1184229850769043\n", - "epoch: 27 step: 28, loss is 0.9801948070526123\n", - "epoch: 27 step: 29, loss is 1.0599758625030518\n", - "epoch: 27 step: 30, loss is 1.0749592781066895\n", - "epoch: 27 step: 31, loss is 1.012206792831421\n", - "epoch: 27 step: 32, loss is 1.0299081802368164\n", - "epoch: 27 step: 33, loss is 1.0116221904754639\n", - "epoch: 27 step: 34, loss is 1.0666142702102661\n", - "epoch: 27 step: 35, loss is 0.9941093921661377\n", - "epoch: 27 step: 36, loss is 1.0543272495269775\n", - "epoch: 27 step: 37, loss is 1.0386252403259277\n", - "epoch: 27 step: 38, loss is 1.0291391611099243\n", - "epoch: 27 step: 39, loss is 1.0993064641952515\n", - "epoch: 27 step: 40, loss is 1.0329136848449707\n", - "epoch: 27 step: 41, loss is 1.0471508502960205\n", - "epoch: 27 step: 42, loss is 0.9570472836494446\n", - "epoch: 27 step: 43, loss is 1.1126043796539307\n", - "epoch: 27 step: 44, loss is 1.0687880516052246\n", - "epoch: 27 step: 45, loss is 1.0175225734710693\n", - "epoch: 27 step: 46, loss is 1.0748958587646484\n", - "epoch: 27 step: 47, loss is 1.036515712738037\n", - "epoch: 27 step: 48, loss is 0.9884911775588989\n", - "epoch: 27 step: 49, loss is 1.0220438241958618\n", - "epoch: 27 step: 50, loss is 1.0583467483520508\n", - "epoch: 27 step: 51, loss is 1.0183724164962769\n", - "epoch: 27 step: 52, loss is 1.0568006038665771\n", - "epoch: 27 step: 53, loss is 1.0342612266540527\n", - "epoch: 27 step: 54, loss is 1.0179940462112427\n", - "epoch: 27 step: 55, loss is 1.0346248149871826\n", - "epoch: 27 step: 56, loss is 1.0310218334197998\n", - "epoch: 27 step: 57, loss is 1.0720340013504028\n", - "epoch: 27 step: 58, loss is 1.1268504858016968\n", - "epoch: 27 step: 59, loss is 1.0168635845184326\n", - "epoch: 27 step: 60, loss is 1.0409904718399048\n", - "epoch: 27 step: 61, loss is 1.0128271579742432\n", - "epoch: 27 step: 62, loss is 1.0853266716003418\n", - "epoch: 27 step: 63, loss is 0.9940942525863647\n", - "epoch: 27 step: 64, loss is 1.0802412033081055\n", - "epoch: 27 step: 65, loss is 1.0525023937225342\n", - "epoch: 27 step: 66, loss is 1.0741641521453857\n", - "epoch: 27 step: 67, loss is 1.0447280406951904\n", - "epoch: 27 step: 68, loss is 1.0534757375717163\n", - "epoch: 27 step: 69, loss is 1.0584118366241455\n", - "epoch: 27 step: 70, loss is 1.0397610664367676\n", - "epoch: 27 step: 71, loss is 1.027557373046875\n", - "epoch: 27 step: 72, loss is 1.07358980178833\n", - "epoch: 27 step: 73, loss is 1.0978827476501465\n", - "epoch: 27 step: 74, loss is 1.0359325408935547\n", - "epoch: 27 step: 75, loss is 1.1403580904006958\n", - "epoch: 27 step: 76, loss is 1.0549849271774292\n", - "epoch: 27 step: 77, loss is 1.0214375257492065\n", - "epoch: 27 step: 78, loss is 0.9793615341186523\n", - "epoch: 27 step: 79, loss is 0.9985888004302979\n", - "epoch: 27 step: 80, loss is 1.0429975986480713\n", - "epoch: 27 step: 81, loss is 1.0607985258102417\n", - "epoch: 27 step: 82, loss is 1.0195157527923584\n", - "epoch: 27 step: 83, loss is 1.032827377319336\n", - "epoch: 27 step: 84, loss is 1.0506346225738525\n", - "epoch: 27 step: 85, loss is 1.0550663471221924\n", - "epoch: 27 step: 86, loss is 1.014336347579956\n", - "epoch: 27 step: 87, loss is 1.024290680885315\n", - "epoch: 27 step: 88, loss is 1.053837776184082\n", - "epoch: 27 step: 89, loss is 1.1035176515579224\n", - "epoch: 27 step: 90, loss is 1.0343066453933716\n", - "epoch: 27 step: 91, loss is 1.0723637342453003\n", - "epoch: 27 step: 92, loss is 1.046098232269287\n", - "epoch: 27 step: 93, loss is 1.1159708499908447\n", - "epoch: 27 step: 94, loss is 1.1092207431793213\n", - "epoch: 27 step: 95, loss is 1.028820276260376\n", - "epoch: 27 step: 96, loss is 0.9988867044448853\n", - "epoch: 27 step: 97, loss is 1.0653789043426514\n", - "epoch: 27 step: 98, loss is 1.021823525428772\n", - "epoch: 27 step: 99, loss is 1.1179784536361694\n", - "epoch: 27 step: 100, loss is 0.9874042272567749\n", - "epoch: 27 step: 101, loss is 1.0595815181732178\n", - "epoch: 27 step: 102, loss is 1.009967565536499\n", - "epoch: 27 step: 103, loss is 1.0396479368209839\n", - "epoch: 27 step: 104, loss is 1.1013263463974\n", - "epoch: 27 step: 105, loss is 1.05772864818573\n", - "epoch: 27 step: 106, loss is 1.0566017627716064\n", - "epoch: 27 step: 107, loss is 1.0697084665298462\n", - "epoch: 27 step: 108, loss is 1.1415010690689087\n", - "epoch: 27 step: 109, loss is 1.092660903930664\n", - "epoch: 27 step: 110, loss is 1.0666115283966064\n", - "epoch: 27 step: 111, loss is 0.9677072763442993\n", - "epoch: 27 step: 112, loss is 0.9994451999664307\n", - "epoch: 27 step: 113, loss is 1.0586915016174316\n", - "epoch: 27 step: 114, loss is 1.028842806816101\n", - "epoch: 27 step: 115, loss is 1.028630018234253\n", - "epoch: 27 step: 116, loss is 0.9947841167449951\n", - "epoch: 27 step: 117, loss is 1.0322678089141846\n", - "epoch: 27 step: 118, loss is 1.1226840019226074\n", - "epoch: 27 step: 119, loss is 1.0896713733673096\n", - "epoch: 27 step: 120, loss is 1.1376898288726807\n", - "epoch: 27 step: 121, loss is 0.9801294803619385\n", - "epoch: 27 step: 122, loss is 1.063185214996338\n", - "epoch: 27 step: 123, loss is 1.0831260681152344\n", - "epoch: 27 step: 124, loss is 1.030491828918457\n", - "epoch: 27 step: 125, loss is 0.9727450013160706\n", - "epoch: 27 step: 126, loss is 0.9762457609176636\n", - "epoch: 27 step: 127, loss is 1.01838219165802\n", - "epoch: 27 step: 128, loss is 1.0679218769073486\n", - "epoch: 27 step: 129, loss is 1.0443129539489746\n", - "epoch: 27 step: 130, loss is 1.0435552597045898\n", - "epoch: 27 step: 131, loss is 0.9975802898406982\n", - "epoch: 27 step: 132, loss is 0.9979383945465088\n", - "epoch: 27 step: 133, loss is 1.0742639303207397\n", - "epoch: 27 step: 134, loss is 1.0108975172042847\n", - "epoch: 27 step: 135, loss is 0.9972212314605713\n", - "epoch: 27 step: 136, loss is 0.9738519787788391\n", - "epoch: 27 step: 137, loss is 1.0242283344268799\n", - "epoch: 27 step: 138, loss is 1.031661033630371\n", - "epoch: 27 step: 139, loss is 1.0496327877044678\n", - "epoch: 27 step: 140, loss is 0.9991055727005005\n", - "epoch: 27 step: 141, loss is 1.050097107887268\n", - "epoch: 27 step: 142, loss is 1.1076414585113525\n", - "epoch: 27 step: 143, loss is 1.0060323476791382\n", - "epoch: 27 step: 144, loss is 1.009609341621399\n", - "epoch: 27 step: 145, loss is 1.0796658992767334\n", - "epoch: 27 step: 146, loss is 1.1502337455749512\n", - "epoch: 27 step: 147, loss is 1.1472514867782593\n", - "epoch: 27 step: 148, loss is 1.029049277305603\n", - "epoch: 27 step: 149, loss is 1.0642590522766113\n", - "epoch: 27 step: 150, loss is 1.123306155204773\n", - "epoch: 27 step: 151, loss is 1.0015729665756226\n", - "epoch: 27 step: 152, loss is 1.0918514728546143\n", - "epoch: 27 step: 153, loss is 1.1430081129074097\n", - "epoch: 27 step: 154, loss is 1.0776640176773071\n", - "epoch: 27 step: 155, loss is 0.9962928295135498\n", - "epoch: 27 step: 156, loss is 1.065542221069336\n", - "epoch: 27 step: 157, loss is 0.974758505821228\n", - "epoch: 27 step: 158, loss is 1.0509774684906006\n", - "epoch: 27 step: 159, loss is 1.0239585638046265\n", - "epoch: 27 step: 160, loss is 1.0769890546798706\n", - "epoch: 27 step: 161, loss is 1.085533857345581\n", - "epoch: 27 step: 162, loss is 1.130881667137146\n", - "epoch: 27 step: 163, loss is 1.0892539024353027\n", - "epoch: 27 step: 164, loss is 1.0556637048721313\n", - "epoch: 27 step: 165, loss is 1.0475176572799683\n", - "epoch: 27 step: 166, loss is 0.9629400968551636\n", - "epoch: 27 step: 167, loss is 1.0016729831695557\n", - "epoch: 27 step: 168, loss is 1.0096464157104492\n", - "epoch: 27 step: 169, loss is 1.0475547313690186\n", - "epoch: 27 step: 170, loss is 1.0397958755493164\n", - "epoch: 27 step: 171, loss is 1.0928940773010254\n", - "epoch: 27 step: 172, loss is 1.0203711986541748\n", - "epoch: 27 step: 173, loss is 1.1169898509979248\n", - "epoch: 27 step: 174, loss is 1.0201783180236816\n", - "epoch: 27 step: 175, loss is 1.051028847694397\n", - "epoch: 27 step: 176, loss is 1.0660400390625\n", - "epoch: 27 step: 177, loss is 1.073883056640625\n", - "epoch: 27 step: 178, loss is 1.0735760927200317\n", - "epoch: 27 step: 179, loss is 1.0890427827835083\n", - "epoch: 27 step: 180, loss is 1.0405343770980835\n", - "epoch: 27 step: 181, loss is 1.0182738304138184\n", - "epoch: 27 step: 182, loss is 1.1307508945465088\n", - "epoch: 27 step: 183, loss is 1.092827558517456\n", - "epoch: 27 step: 184, loss is 1.081020712852478\n", - "epoch: 27 step: 185, loss is 1.046826958656311\n", - "epoch: 27 step: 186, loss is 1.1096196174621582\n", - "epoch: 27 step: 187, loss is 1.065929889678955\n", - "epoch: 27 step: 188, loss is 1.066022515296936\n", - "epoch: 27 step: 189, loss is 1.0617411136627197\n", - "epoch: 27 step: 190, loss is 1.0043805837631226\n", - "epoch: 27 step: 191, loss is 1.0108695030212402\n", - "epoch: 27 step: 192, loss is 1.0749719142913818\n", - "epoch: 27 step: 193, loss is 1.0734087228775024\n", - "epoch: 27 step: 194, loss is 1.1229863166809082\n", - "epoch: 27 step: 195, loss is 1.0395349264144897\n", - "Train epoch time: 94002.492 ms, per step time: 482.064 ms\n", - "epoch: 28 step: 1, loss is 1.0224573612213135\n", - "epoch: 28 step: 2, loss is 1.0593703985214233\n", - "epoch: 28 step: 3, loss is 1.0230135917663574\n", - "epoch: 28 step: 4, loss is 1.0694102048873901\n", - "epoch: 28 step: 5, loss is 1.1540521383285522\n", - "epoch: 28 step: 6, loss is 1.0666083097457886\n", - "epoch: 28 step: 7, loss is 1.0635886192321777\n", - "epoch: 28 step: 8, loss is 0.9830084443092346\n", - "epoch: 28 step: 9, loss is 1.0175553560256958\n", - "epoch: 28 step: 10, loss is 0.9548657536506653\n", - "epoch: 28 step: 11, loss is 1.08663809299469\n", - "epoch: 28 step: 12, loss is 1.076303482055664\n", - "epoch: 28 step: 13, loss is 0.9986739158630371\n", - "epoch: 28 step: 14, loss is 1.0483555793762207\n", - "epoch: 28 step: 15, loss is 1.0853404998779297\n", - "epoch: 28 step: 16, loss is 0.9979759454727173\n", - "epoch: 28 step: 17, loss is 0.9494763612747192\n", - "epoch: 28 step: 18, loss is 1.0613871812820435\n", - "epoch: 28 step: 19, loss is 1.085707187652588\n", - "epoch: 28 step: 20, loss is 1.054244041442871\n", - "epoch: 28 step: 21, loss is 1.0656378269195557\n", - "epoch: 28 step: 22, loss is 1.0790541172027588\n", - "epoch: 28 step: 23, loss is 1.0171475410461426\n", - "epoch: 28 step: 24, loss is 0.9682968854904175\n", - "epoch: 28 step: 25, loss is 1.0082801580429077\n", - "epoch: 28 step: 26, loss is 1.0530638694763184\n", - "epoch: 28 step: 27, loss is 1.0116281509399414\n", - "epoch: 28 step: 28, loss is 1.042952299118042\n", - "epoch: 28 step: 29, loss is 1.0154204368591309\n", - "epoch: 28 step: 30, loss is 0.9193597435951233\n", - "epoch: 28 step: 31, loss is 1.0131980180740356\n", - "epoch: 28 step: 32, loss is 1.0264443159103394\n", - "epoch: 28 step: 33, loss is 1.0679888725280762\n", - "epoch: 28 step: 34, loss is 1.0144927501678467\n", - "epoch: 28 step: 35, loss is 1.1293317079544067\n", - "epoch: 28 step: 36, loss is 1.0371441841125488\n", - "epoch: 28 step: 37, loss is 0.9944879412651062\n", - "epoch: 28 step: 38, loss is 1.056294560432434\n", - "epoch: 28 step: 39, loss is 0.995818018913269\n", - "epoch: 28 step: 40, loss is 1.031392216682434\n", - "epoch: 28 step: 41, loss is 1.1294634342193604\n", - "epoch: 28 step: 42, loss is 1.034539818763733\n", - "epoch: 28 step: 43, loss is 1.0686429738998413\n", - "epoch: 28 step: 44, loss is 1.011942982673645\n", - "epoch: 28 step: 45, loss is 1.0349501371383667\n", - "epoch: 28 step: 46, loss is 1.0007424354553223\n", - "epoch: 28 step: 47, loss is 0.9678168296813965\n", - "epoch: 28 step: 48, loss is 1.0319151878356934\n", - "epoch: 28 step: 49, loss is 1.0856274366378784\n", - "epoch: 28 step: 50, loss is 1.073692798614502\n", - "epoch: 28 step: 51, loss is 1.056383490562439\n", - "epoch: 28 step: 52, loss is 1.0075013637542725\n", - "epoch: 28 step: 53, loss is 1.0419431924819946\n", - "epoch: 28 step: 54, loss is 0.9617053270339966\n", - "epoch: 28 step: 55, loss is 1.0600147247314453\n", - "epoch: 28 step: 56, loss is 1.0252337455749512\n", - "epoch: 28 step: 57, loss is 0.9948336482048035\n", - "epoch: 28 step: 58, loss is 1.0613398551940918\n", - "epoch: 28 step: 59, loss is 1.0164568424224854\n", - "epoch: 28 step: 60, loss is 1.04026460647583\n", - "epoch: 28 step: 61, loss is 0.9468145370483398\n", - "epoch: 28 step: 62, loss is 1.0781135559082031\n", - "epoch: 28 step: 63, loss is 1.1466772556304932\n", - "epoch: 28 step: 64, loss is 0.9822742938995361\n", - "epoch: 28 step: 65, loss is 1.011114478111267\n", - "epoch: 28 step: 66, loss is 1.037644863128662\n", - "epoch: 28 step: 67, loss is 1.114798665046692\n", - "epoch: 28 step: 68, loss is 1.0587235689163208\n", - "epoch: 28 step: 69, loss is 1.0594000816345215\n", - "epoch: 28 step: 70, loss is 1.028313159942627\n", - "epoch: 28 step: 71, loss is 0.9560521841049194\n", - "epoch: 28 step: 72, loss is 1.067679762840271\n", - "epoch: 28 step: 73, loss is 1.0169572830200195\n", - "epoch: 28 step: 74, loss is 1.074602723121643\n", - "epoch: 28 step: 75, loss is 1.0533905029296875\n", - "epoch: 28 step: 76, loss is 1.0343904495239258\n", - "epoch: 28 step: 77, loss is 1.0418795347213745\n", - "epoch: 28 step: 78, loss is 0.9515565037727356\n", - "epoch: 28 step: 79, loss is 0.9629123210906982\n", - "epoch: 28 step: 80, loss is 0.9854238033294678\n", - "epoch: 28 step: 81, loss is 0.9541388750076294\n", - "epoch: 28 step: 82, loss is 1.0120108127593994\n", - "epoch: 28 step: 83, loss is 0.9861869812011719\n", - "epoch: 28 step: 84, loss is 1.1027255058288574\n", - "epoch: 28 step: 85, loss is 1.0651925802230835\n", - "epoch: 28 step: 86, loss is 1.0357418060302734\n", - "epoch: 28 step: 87, loss is 1.034630298614502\n", - "epoch: 28 step: 88, loss is 0.9883968830108643\n", - "epoch: 28 step: 89, loss is 1.07198166847229\n", - "epoch: 28 step: 90, loss is 1.0301835536956787\n", - "epoch: 28 step: 91, loss is 1.0371626615524292\n", - "epoch: 28 step: 92, loss is 1.0124101638793945\n", - "epoch: 28 step: 93, loss is 1.028367280960083\n", - "epoch: 28 step: 94, loss is 1.0403815507888794\n", - "epoch: 28 step: 95, loss is 1.011099934577942\n", - "epoch: 28 step: 96, loss is 1.0704402923583984\n", - "epoch: 28 step: 97, loss is 1.0807019472122192\n", - "epoch: 28 step: 98, loss is 1.0018218755722046\n", - "epoch: 28 step: 99, loss is 1.0705413818359375\n", - "epoch: 28 step: 100, loss is 1.0765767097473145\n", - "epoch: 28 step: 101, loss is 0.9827833771705627\n", - "epoch: 28 step: 102, loss is 1.0617852210998535\n", - "epoch: 28 step: 103, loss is 1.0776385068893433\n", - "epoch: 28 step: 104, loss is 1.0300766229629517\n", - "epoch: 28 step: 105, loss is 0.9887511134147644\n", - "epoch: 28 step: 106, loss is 0.9741336703300476\n", - "epoch: 28 step: 107, loss is 1.0468299388885498\n", - "epoch: 28 step: 108, loss is 0.9974187016487122\n", - "epoch: 28 step: 109, loss is 1.0514752864837646\n", - "epoch: 28 step: 110, loss is 1.013406753540039\n", - "epoch: 28 step: 111, loss is 1.0026271343231201\n", - "epoch: 28 step: 112, loss is 1.06780207157135\n", - "epoch: 28 step: 113, loss is 1.040790319442749\n", - "epoch: 28 step: 114, loss is 1.023992657661438\n", - "epoch: 28 step: 115, loss is 1.0324180126190186\n", - "epoch: 28 step: 116, loss is 0.9735078811645508\n", - "epoch: 28 step: 117, loss is 1.080316424369812\n", - "epoch: 28 step: 118, loss is 1.0619475841522217\n", - "epoch: 28 step: 119, loss is 1.0990608930587769\n", - "epoch: 28 step: 120, loss is 1.0390394926071167\n", - "epoch: 28 step: 121, loss is 1.0258711576461792\n", - "epoch: 28 step: 122, loss is 1.0874381065368652\n", - "epoch: 28 step: 123, loss is 1.047053575515747\n", - "epoch: 28 step: 124, loss is 1.0236473083496094\n", - "epoch: 28 step: 125, loss is 1.050206184387207\n", - "epoch: 28 step: 126, loss is 1.1069732904434204\n", - "epoch: 28 step: 127, loss is 1.0576605796813965\n", - "epoch: 28 step: 128, loss is 1.0175132751464844\n", - "epoch: 28 step: 129, loss is 0.987375020980835\n", - "epoch: 28 step: 130, loss is 1.0320420265197754\n", - "epoch: 28 step: 131, loss is 0.9382754564285278\n", - "epoch: 28 step: 132, loss is 1.026329755783081\n", - "epoch: 28 step: 133, loss is 1.0330300331115723\n", - "epoch: 28 step: 134, loss is 0.9924443364143372\n", - "epoch: 28 step: 135, loss is 1.1003553867340088\n", - "epoch: 28 step: 136, loss is 0.9750040769577026\n", - "epoch: 28 step: 137, loss is 1.0107910633087158\n", - "epoch: 28 step: 138, loss is 0.9466937780380249\n", - "epoch: 28 step: 139, loss is 1.050576090812683\n", - "epoch: 28 step: 140, loss is 1.0581032037734985\n", - "epoch: 28 step: 141, loss is 1.0400702953338623\n", - "epoch: 28 step: 142, loss is 1.0018815994262695\n", - "epoch: 28 step: 143, loss is 1.027859091758728\n", - "epoch: 28 step: 144, loss is 1.0933524370193481\n", - "epoch: 28 step: 145, loss is 0.937736988067627\n", - "epoch: 28 step: 146, loss is 1.0411815643310547\n", - "epoch: 28 step: 147, loss is 1.032793641090393\n", - "epoch: 28 step: 148, loss is 1.008480429649353\n", - "epoch: 28 step: 149, loss is 1.0352234840393066\n", - "epoch: 28 step: 150, loss is 1.0340938568115234\n", - "epoch: 28 step: 151, loss is 1.0069482326507568\n", - "epoch: 28 step: 152, loss is 1.1067968606948853\n", - "epoch: 28 step: 153, loss is 1.085538625717163\n", - "epoch: 28 step: 154, loss is 1.0101405382156372\n", - "epoch: 28 step: 155, loss is 1.076969861984253\n", - "epoch: 28 step: 156, loss is 1.0146191120147705\n", - "epoch: 28 step: 157, loss is 1.0597333908081055\n", - "epoch: 28 step: 158, loss is 0.9432191848754883\n", - "epoch: 28 step: 159, loss is 1.0018564462661743\n", - "epoch: 28 step: 160, loss is 0.9409213066101074\n", - "epoch: 28 step: 161, loss is 1.031328797340393\n", - "epoch: 28 step: 162, loss is 1.0136876106262207\n", - "epoch: 28 step: 163, loss is 0.9601335525512695\n", - "epoch: 28 step: 164, loss is 1.0553933382034302\n", - "epoch: 28 step: 165, loss is 1.0746946334838867\n", - "epoch: 28 step: 166, loss is 1.0633537769317627\n", - "epoch: 28 step: 167, loss is 1.036028504371643\n", - "epoch: 28 step: 168, loss is 1.080066204071045\n", - "epoch: 28 step: 169, loss is 0.9953891038894653\n", - "epoch: 28 step: 170, loss is 1.0530810356140137\n", - "epoch: 28 step: 171, loss is 1.0139000415802002\n", - "epoch: 28 step: 172, loss is 1.0373109579086304\n", - "epoch: 28 step: 173, loss is 0.9882632493972778\n", - "epoch: 28 step: 174, loss is 1.0238440036773682\n", - "epoch: 28 step: 175, loss is 1.0995649099349976\n", - "epoch: 28 step: 176, loss is 1.0362210273742676\n", - "epoch: 28 step: 177, loss is 1.0361888408660889\n", - "epoch: 28 step: 178, loss is 0.9864203333854675\n", - "epoch: 28 step: 179, loss is 1.0440664291381836\n", - "epoch: 28 step: 180, loss is 1.0287063121795654\n", - "epoch: 28 step: 181, loss is 1.000828742980957\n", - "epoch: 28 step: 182, loss is 1.011562705039978\n", - "epoch: 28 step: 183, loss is 1.0436192750930786\n", - "epoch: 28 step: 184, loss is 0.9986384510993958\n", - "epoch: 28 step: 185, loss is 1.014258861541748\n", - "epoch: 28 step: 186, loss is 1.016108512878418\n", - "epoch: 28 step: 187, loss is 1.049915075302124\n", - "epoch: 28 step: 188, loss is 1.0017075538635254\n", - "epoch: 28 step: 189, loss is 1.1211209297180176\n", - "epoch: 28 step: 190, loss is 1.0556925535202026\n", - "epoch: 28 step: 191, loss is 0.9730790853500366\n", - "epoch: 28 step: 192, loss is 1.1281460523605347\n", - "epoch: 28 step: 193, loss is 0.9577211141586304\n", - "epoch: 28 step: 194, loss is 1.0304639339447021\n", - "epoch: 28 step: 195, loss is 0.9898112416267395\n", - "Train epoch time: 103366.918 ms, per step time: 530.087 ms\n", - "epoch: 29 step: 1, loss is 1.0468782186508179\n", - "epoch: 29 step: 2, loss is 1.1114580631256104\n", - "epoch: 29 step: 3, loss is 0.9974887371063232\n", - "epoch: 29 step: 4, loss is 1.0087288618087769\n", - "epoch: 29 step: 5, loss is 1.0508573055267334\n", - "epoch: 29 step: 6, loss is 1.0497238636016846\n", - "epoch: 29 step: 7, loss is 1.0038063526153564\n", - "epoch: 29 step: 8, loss is 1.0369718074798584\n", - "epoch: 29 step: 9, loss is 1.0110067129135132\n", - "epoch: 29 step: 10, loss is 0.9671221971511841\n", - "epoch: 29 step: 11, loss is 0.9799602031707764\n", - "epoch: 29 step: 12, loss is 1.0456115007400513\n", - "epoch: 29 step: 13, loss is 1.0065470933914185\n", - "epoch: 29 step: 14, loss is 1.0598971843719482\n", - "epoch: 29 step: 15, loss is 1.0293662548065186\n", - "epoch: 29 step: 16, loss is 1.006454348564148\n", - "epoch: 29 step: 17, loss is 1.0159281492233276\n", - "epoch: 29 step: 18, loss is 0.982795238494873\n", - "epoch: 29 step: 19, loss is 0.9600620865821838\n", - "epoch: 29 step: 20, loss is 0.9918646812438965\n", - "epoch: 29 step: 21, loss is 1.088813304901123\n", - "epoch: 29 step: 22, loss is 0.9976871013641357\n", - "epoch: 29 step: 23, loss is 1.0144503116607666\n", - "epoch: 29 step: 24, loss is 0.9566434025764465\n", - "epoch: 29 step: 25, loss is 1.1109318733215332\n", - "epoch: 29 step: 26, loss is 1.0574815273284912\n", - "epoch: 29 step: 27, loss is 0.9989632368087769\n", - "epoch: 29 step: 28, loss is 1.028912901878357\n", - "epoch: 29 step: 29, loss is 0.98712158203125\n", - "epoch: 29 step: 30, loss is 1.0440857410430908\n", - "epoch: 29 step: 31, loss is 1.0611263513565063\n", - "epoch: 29 step: 32, loss is 1.023278832435608\n", - "epoch: 29 step: 33, loss is 1.0410950183868408\n", - "epoch: 29 step: 34, loss is 1.0239065885543823\n", - "epoch: 29 step: 35, loss is 1.0066754817962646\n", - "epoch: 29 step: 36, loss is 0.97225022315979\n", - "epoch: 29 step: 37, loss is 1.0251888036727905\n", - "epoch: 29 step: 38, loss is 0.95610511302948\n", - "epoch: 29 step: 39, loss is 0.9716606140136719\n", - "epoch: 29 step: 40, loss is 1.0700957775115967\n", - "epoch: 29 step: 41, loss is 1.1364301443099976\n", - "epoch: 29 step: 42, loss is 0.9981712102890015\n", - "epoch: 29 step: 43, loss is 0.9684491157531738\n", - "epoch: 29 step: 44, loss is 1.0047703981399536\n", - "epoch: 29 step: 45, loss is 0.9766335487365723\n", - "epoch: 29 step: 46, loss is 0.932518482208252\n", - "epoch: 29 step: 47, loss is 1.0822333097457886\n", - "epoch: 29 step: 48, loss is 1.0352964401245117\n", - "epoch: 29 step: 49, loss is 0.9819753170013428\n", - "epoch: 29 step: 50, loss is 1.0501552820205688\n", - "epoch: 29 step: 51, loss is 1.0353647470474243\n", - "epoch: 29 step: 52, loss is 0.9929361343383789\n", - "epoch: 29 step: 53, loss is 1.0061821937561035\n", - "epoch: 29 step: 54, loss is 1.1126571893692017\n", - "epoch: 29 step: 55, loss is 1.0520261526107788\n", - "epoch: 29 step: 56, loss is 0.9950947761535645\n", - "epoch: 29 step: 57, loss is 0.9885305762290955\n", - "epoch: 29 step: 58, loss is 1.0212897062301636\n", - "epoch: 29 step: 59, loss is 1.034184455871582\n", - "epoch: 29 step: 60, loss is 1.111081838607788\n", - "epoch: 29 step: 61, loss is 1.0557329654693604\n", - "epoch: 29 step: 62, loss is 1.0087419748306274\n", - "epoch: 29 step: 63, loss is 1.0626472234725952\n", - "epoch: 29 step: 64, loss is 1.0498045682907104\n", - "epoch: 29 step: 65, loss is 1.0589845180511475\n", - "epoch: 29 step: 66, loss is 0.9372965097427368\n", - "epoch: 29 step: 67, loss is 0.9701070189476013\n", - "epoch: 29 step: 68, loss is 0.9552212953567505\n", - "epoch: 29 step: 69, loss is 1.0222499370574951\n", - "epoch: 29 step: 70, loss is 1.0246927738189697\n", - "epoch: 29 step: 71, loss is 1.0318398475646973\n", - "epoch: 29 step: 72, loss is 1.0820567607879639\n", - "epoch: 29 step: 73, loss is 1.0294506549835205\n", - "epoch: 29 step: 74, loss is 0.9955751895904541\n", - "epoch: 29 step: 75, loss is 1.0224685668945312\n", - "epoch: 29 step: 76, loss is 1.0156131982803345\n", - "epoch: 29 step: 77, loss is 1.0381712913513184\n", - "epoch: 29 step: 78, loss is 0.9809007048606873\n", - "epoch: 29 step: 79, loss is 0.9801890850067139\n", - "epoch: 29 step: 80, loss is 1.0247673988342285\n", - "epoch: 29 step: 81, loss is 1.0815829038619995\n", - "epoch: 29 step: 82, loss is 1.0255171060562134\n", - "epoch: 29 step: 83, loss is 1.03031587600708\n", - "epoch: 29 step: 84, loss is 1.0341095924377441\n", - "epoch: 29 step: 85, loss is 1.0145692825317383\n", - "epoch: 29 step: 86, loss is 1.036602258682251\n", - "epoch: 29 step: 87, loss is 1.0225090980529785\n", - "epoch: 29 step: 88, loss is 1.0101361274719238\n", - "epoch: 29 step: 89, loss is 0.9694252014160156\n", - "epoch: 29 step: 90, loss is 1.0625033378601074\n", - "epoch: 29 step: 91, loss is 1.0637246370315552\n", - "epoch: 29 step: 92, loss is 1.0185613632202148\n", - "epoch: 29 step: 93, loss is 1.0635919570922852\n", - "epoch: 29 step: 94, loss is 0.9301564693450928\n", - "epoch: 29 step: 95, loss is 0.9594549536705017\n", - "epoch: 29 step: 96, loss is 1.0666102170944214\n", - "epoch: 29 step: 97, loss is 0.9226194620132446\n", - "epoch: 29 step: 98, loss is 1.116838812828064\n", - "epoch: 29 step: 99, loss is 1.0751320123672485\n", - "epoch: 29 step: 100, loss is 1.0321955680847168\n", - "epoch: 29 step: 101, loss is 1.0163720846176147\n", - "epoch: 29 step: 102, loss is 1.0581685304641724\n", - "epoch: 29 step: 103, loss is 1.0705480575561523\n", - "epoch: 29 step: 104, loss is 1.0577776432037354\n", - "epoch: 29 step: 105, loss is 0.9995126128196716\n", - "epoch: 29 step: 106, loss is 1.0135539770126343\n", - "epoch: 29 step: 107, loss is 1.0571461915969849\n", - "epoch: 29 step: 108, loss is 0.9922029972076416\n", - "epoch: 29 step: 109, loss is 1.026610255241394\n", - "epoch: 29 step: 110, loss is 1.0091841220855713\n", - "epoch: 29 step: 111, loss is 1.0650935173034668\n", - "epoch: 29 step: 112, loss is 1.0530184507369995\n", - "epoch: 29 step: 113, loss is 1.13038969039917\n", - "epoch: 29 step: 114, loss is 1.147161602973938\n", - "epoch: 29 step: 115, loss is 1.0174176692962646\n", - "epoch: 29 step: 116, loss is 1.057715892791748\n", - "epoch: 29 step: 117, loss is 0.9989342093467712\n", - "epoch: 29 step: 118, loss is 1.023415446281433\n", - "epoch: 29 step: 119, loss is 0.9874597787857056\n", - "epoch: 29 step: 120, loss is 0.9887452125549316\n", - "epoch: 29 step: 121, loss is 0.9960627555847168\n", - "epoch: 29 step: 122, loss is 0.9821981191635132\n", - "epoch: 29 step: 123, loss is 1.0175116062164307\n", - "epoch: 29 step: 124, loss is 1.0257840156555176\n", - "epoch: 29 step: 125, loss is 1.043405294418335\n", - "epoch: 29 step: 126, loss is 1.0201870203018188\n", - "epoch: 29 step: 127, loss is 0.9205362796783447\n", - "epoch: 29 step: 128, loss is 0.9780855774879456\n", - "epoch: 29 step: 129, loss is 1.005314826965332\n", - "epoch: 29 step: 130, loss is 1.0223867893218994\n", - "epoch: 29 step: 131, loss is 0.9808804988861084\n", - "epoch: 29 step: 132, loss is 0.9813784956932068\n", - "epoch: 29 step: 133, loss is 0.9696794748306274\n", - "epoch: 29 step: 134, loss is 0.9754354953765869\n", - "epoch: 29 step: 135, loss is 0.974109947681427\n", - "epoch: 29 step: 136, loss is 1.0608673095703125\n", - "epoch: 29 step: 137, loss is 0.9931883811950684\n", - "epoch: 29 step: 138, loss is 1.0107691287994385\n", - "epoch: 29 step: 139, loss is 0.9858195781707764\n", - "epoch: 29 step: 140, loss is 0.9933394193649292\n", - "epoch: 29 step: 141, loss is 1.0132172107696533\n", - "epoch: 29 step: 142, loss is 1.0576889514923096\n", - "epoch: 29 step: 143, loss is 1.0245912075042725\n", - "epoch: 29 step: 144, loss is 0.9679259061813354\n", - "epoch: 29 step: 145, loss is 1.016594409942627\n", - "epoch: 29 step: 146, loss is 1.0025317668914795\n", - "epoch: 29 step: 147, loss is 1.1219913959503174\n", - "epoch: 29 step: 148, loss is 1.0062578916549683\n", - "epoch: 29 step: 149, loss is 0.97377610206604\n", - "epoch: 29 step: 150, loss is 0.9701975584030151\n", - "epoch: 29 step: 151, loss is 1.0079452991485596\n", - "epoch: 29 step: 152, loss is 1.004092812538147\n", - "epoch: 29 step: 153, loss is 0.9536013007164001\n", - "epoch: 29 step: 154, loss is 1.0270766019821167\n", - "epoch: 29 step: 155, loss is 1.0396745204925537\n", - "epoch: 29 step: 156, loss is 1.0492552518844604\n", - "epoch: 29 step: 157, loss is 0.9613434076309204\n", - "epoch: 29 step: 158, loss is 0.9539674520492554\n", - "epoch: 29 step: 159, loss is 1.02286958694458\n", - "epoch: 29 step: 160, loss is 0.9810786843299866\n", - "epoch: 29 step: 161, loss is 1.0697016716003418\n", - "epoch: 29 step: 162, loss is 1.0303078889846802\n", - "epoch: 29 step: 163, loss is 0.9976806640625\n", - "epoch: 29 step: 164, loss is 0.9207897186279297\n", - "epoch: 29 step: 165, loss is 1.004880428314209\n", - "epoch: 29 step: 166, loss is 1.1212025880813599\n", - "epoch: 29 step: 167, loss is 0.9979523420333862\n", - "epoch: 29 step: 168, loss is 1.0089640617370605\n", - "epoch: 29 step: 169, loss is 1.0139131546020508\n", - "epoch: 29 step: 170, loss is 1.0088622570037842\n", - "epoch: 29 step: 171, loss is 1.0422072410583496\n", - "epoch: 29 step: 172, loss is 1.0290746688842773\n", - "epoch: 29 step: 173, loss is 1.001901626586914\n", - "epoch: 29 step: 174, loss is 1.006287693977356\n", - "epoch: 29 step: 175, loss is 0.9630842208862305\n", - "epoch: 29 step: 176, loss is 0.9964714646339417\n", - "epoch: 29 step: 177, loss is 0.97801673412323\n", - "epoch: 29 step: 178, loss is 1.006201982498169\n", - "epoch: 29 step: 179, loss is 1.0179517269134521\n", - "epoch: 29 step: 180, loss is 0.9447048902511597\n", - "epoch: 29 step: 181, loss is 1.0067222118377686\n", - "epoch: 29 step: 182, loss is 1.024523377418518\n", - "epoch: 29 step: 183, loss is 1.054722785949707\n", - "epoch: 29 step: 184, loss is 1.0778229236602783\n", - "epoch: 29 step: 185, loss is 1.0047988891601562\n", - "epoch: 29 step: 186, loss is 0.974716067314148\n", - "epoch: 29 step: 187, loss is 1.012957215309143\n", - "epoch: 29 step: 188, loss is 0.9775729179382324\n", - "epoch: 29 step: 189, loss is 0.9560549855232239\n", - "epoch: 29 step: 190, loss is 1.0296050310134888\n", - "epoch: 29 step: 191, loss is 0.9596776962280273\n", - "epoch: 29 step: 192, loss is 1.0023820400238037\n", - "epoch: 29 step: 193, loss is 1.0386229753494263\n", - "epoch: 29 step: 194, loss is 1.0490339994430542\n", - "epoch: 29 step: 195, loss is 1.0020995140075684\n", - "Train epoch time: 105074.946 ms, per step time: 538.846 ms\n", - "epoch: 30 step: 1, loss is 1.0172988176345825\n", - "epoch: 30 step: 2, loss is 0.9978479146957397\n", - "epoch: 30 step: 3, loss is 0.9232980608940125\n", - "epoch: 30 step: 4, loss is 1.024475336074829\n", - "epoch: 30 step: 5, loss is 1.0558017492294312\n", - "epoch: 30 step: 6, loss is 1.0460145473480225\n", - "epoch: 30 step: 7, loss is 1.0038814544677734\n", - "epoch: 30 step: 8, loss is 0.9660797119140625\n", - "epoch: 30 step: 9, loss is 0.9223390817642212\n", - "epoch: 30 step: 10, loss is 1.0001401901245117\n", - "epoch: 30 step: 11, loss is 1.0292820930480957\n", - "epoch: 30 step: 12, loss is 0.9943158626556396\n", - "epoch: 30 step: 13, loss is 0.9613432288169861\n", - "epoch: 30 step: 14, loss is 1.0332069396972656\n", - "epoch: 30 step: 15, loss is 0.9474068880081177\n", - "epoch: 30 step: 16, loss is 1.000193476676941\n", - "epoch: 30 step: 17, loss is 0.9948122501373291\n", - "epoch: 30 step: 18, loss is 0.9685449600219727\n", - "epoch: 30 step: 19, loss is 0.9701645970344543\n", - "epoch: 30 step: 20, loss is 1.0843373537063599\n", - "epoch: 30 step: 21, loss is 1.0845476388931274\n", - "epoch: 30 step: 22, loss is 1.0493748188018799\n", - "epoch: 30 step: 23, loss is 0.9608330726623535\n", - "epoch: 30 step: 24, loss is 0.9862468242645264\n", - "epoch: 30 step: 25, loss is 0.9543552398681641\n", - "epoch: 30 step: 26, loss is 1.023703932762146\n", - "epoch: 30 step: 27, loss is 0.949988842010498\n", - "epoch: 30 step: 28, loss is 1.0160926580429077\n", - "epoch: 30 step: 29, loss is 0.989760160446167\n", - "epoch: 30 step: 30, loss is 0.9852887988090515\n", - "epoch: 30 step: 31, loss is 0.9792947173118591\n", - "epoch: 30 step: 32, loss is 1.006422758102417\n", - "epoch: 30 step: 33, loss is 0.9817548990249634\n", - "epoch: 30 step: 34, loss is 1.0385549068450928\n", - "epoch: 30 step: 35, loss is 1.0086264610290527\n", - "epoch: 30 step: 36, loss is 0.9951794147491455\n", - "epoch: 30 step: 37, loss is 1.0878492593765259\n", - "epoch: 30 step: 38, loss is 0.9851903915405273\n", - "epoch: 30 step: 39, loss is 1.0581046342849731\n", - "epoch: 30 step: 40, loss is 1.0027531385421753\n", - "epoch: 30 step: 41, loss is 0.989090085029602\n", - "epoch: 30 step: 42, loss is 1.0432558059692383\n", - "epoch: 30 step: 43, loss is 1.0111894607543945\n", - "epoch: 30 step: 44, loss is 1.0049055814743042\n", - "epoch: 30 step: 45, loss is 1.0024769306182861\n", - "epoch: 30 step: 46, loss is 0.9750916957855225\n", - "epoch: 30 step: 47, loss is 0.9988186955451965\n", - "epoch: 30 step: 48, loss is 1.0124378204345703\n", - "epoch: 30 step: 49, loss is 1.0005998611450195\n", - "epoch: 30 step: 50, loss is 0.9383172988891602\n", - "epoch: 30 step: 51, loss is 1.0558233261108398\n", - "epoch: 30 step: 52, loss is 0.9960495233535767\n", - "epoch: 30 step: 53, loss is 1.0060667991638184\n", - "epoch: 30 step: 54, loss is 1.120069980621338\n", - "epoch: 30 step: 55, loss is 0.9867266416549683\n", - "epoch: 30 step: 56, loss is 1.096801519393921\n", - "epoch: 30 step: 57, loss is 0.958141565322876\n", - "epoch: 30 step: 58, loss is 0.9496285319328308\n", - "epoch: 30 step: 59, loss is 0.9993870258331299\n", - "epoch: 30 step: 60, loss is 0.9899442195892334\n", - "epoch: 30 step: 61, loss is 0.9559552073478699\n", - "epoch: 30 step: 62, loss is 0.9925178289413452\n", - "epoch: 30 step: 63, loss is 1.0151617527008057\n", - "epoch: 30 step: 64, loss is 0.9675788283348083\n", - "epoch: 30 step: 65, loss is 0.995648980140686\n", - "epoch: 30 step: 66, loss is 1.0137782096862793\n", - "epoch: 30 step: 67, loss is 0.951514482498169\n", - "epoch: 30 step: 68, loss is 1.0109165906906128\n", - "epoch: 30 step: 69, loss is 0.9793285131454468\n", - "epoch: 30 step: 70, loss is 1.0325415134429932\n", - "epoch: 30 step: 71, loss is 1.1178629398345947\n", - "epoch: 30 step: 72, loss is 1.0602610111236572\n", - "epoch: 30 step: 73, loss is 1.019181728363037\n", - "epoch: 30 step: 74, loss is 0.9899566173553467\n", - "epoch: 30 step: 75, loss is 1.0226943492889404\n", - "epoch: 30 step: 76, loss is 1.005814552307129\n", - "epoch: 30 step: 77, loss is 0.9855093955993652\n", - "epoch: 30 step: 78, loss is 0.9942237138748169\n", - "epoch: 30 step: 79, loss is 1.0322569608688354\n", - "epoch: 30 step: 80, loss is 0.9907108545303345\n", - "epoch: 30 step: 81, loss is 0.9839298725128174\n", - "epoch: 30 step: 82, loss is 0.9938308000564575\n", - "epoch: 30 step: 83, loss is 0.9922456741333008\n", - "epoch: 30 step: 84, loss is 1.0004441738128662\n", - "epoch: 30 step: 85, loss is 0.958836019039154\n", - "epoch: 30 step: 86, loss is 1.018061876296997\n", - "epoch: 30 step: 87, loss is 1.022291898727417\n", - "epoch: 30 step: 88, loss is 0.9958430528640747\n", - "epoch: 30 step: 89, loss is 0.9830487966537476\n", - "epoch: 30 step: 90, loss is 0.9965581893920898\n", - "epoch: 30 step: 91, loss is 1.053378701210022\n", - "epoch: 30 step: 92, loss is 1.0521657466888428\n", - "epoch: 30 step: 93, loss is 1.1420154571533203\n", - "epoch: 30 step: 94, loss is 0.9486621022224426\n", - "epoch: 30 step: 95, loss is 1.0100445747375488\n", - "epoch: 30 step: 96, loss is 0.978706955909729\n", - "epoch: 30 step: 97, loss is 0.9665364623069763\n", - "epoch: 30 step: 98, loss is 0.9955102205276489\n", - "epoch: 30 step: 99, loss is 1.0059199333190918\n", - "epoch: 30 step: 100, loss is 0.906088650226593\n", - "epoch: 30 step: 101, loss is 1.1209841966629028\n", - "epoch: 30 step: 102, loss is 1.0130696296691895\n", - "epoch: 30 step: 103, loss is 0.9959266185760498\n", - "epoch: 30 step: 104, loss is 1.0754750967025757\n", - "epoch: 30 step: 105, loss is 0.9853157997131348\n", - "epoch: 30 step: 106, loss is 0.9844763278961182\n", - "epoch: 30 step: 107, loss is 1.031693935394287\n", - "epoch: 30 step: 108, loss is 1.0734367370605469\n", - "epoch: 30 step: 109, loss is 1.0848641395568848\n", - "epoch: 30 step: 110, loss is 1.0457128286361694\n", - "epoch: 30 step: 111, loss is 0.967503011226654\n", - "epoch: 30 step: 112, loss is 1.005852460861206\n", - "epoch: 30 step: 113, loss is 0.9747357368469238\n", - "epoch: 30 step: 114, loss is 0.9861372709274292\n", - "epoch: 30 step: 115, loss is 0.9562469720840454\n", - "epoch: 30 step: 116, loss is 0.9760577082633972\n", - "epoch: 30 step: 117, loss is 1.0128352642059326\n", - "epoch: 30 step: 118, loss is 1.079046607017517\n", - "epoch: 30 step: 119, loss is 1.0906426906585693\n", - "epoch: 30 step: 120, loss is 0.9780118465423584\n", - "epoch: 30 step: 121, loss is 1.0054688453674316\n", - "epoch: 30 step: 122, loss is 1.0219289064407349\n", - "epoch: 30 step: 123, loss is 1.065068244934082\n", - "epoch: 30 step: 124, loss is 1.0039622783660889\n", - "epoch: 30 step: 125, loss is 1.0498899221420288\n", - "epoch: 30 step: 126, loss is 1.0741740465164185\n", - "epoch: 30 step: 127, loss is 1.038702130317688\n", - "epoch: 30 step: 128, loss is 0.9786372184753418\n", - "epoch: 30 step: 129, loss is 0.9668365716934204\n", - "epoch: 30 step: 130, loss is 1.0485410690307617\n", - "epoch: 30 step: 131, loss is 0.9999215602874756\n", - "epoch: 30 step: 132, loss is 1.0152955055236816\n", - "epoch: 30 step: 133, loss is 1.0984938144683838\n", - "epoch: 30 step: 134, loss is 0.9925455451011658\n", - "epoch: 30 step: 135, loss is 0.9964651465415955\n", - "epoch: 30 step: 136, loss is 1.0162088871002197\n", - "epoch: 30 step: 137, loss is 0.9848556518554688\n", - "epoch: 30 step: 138, loss is 0.989091157913208\n", - "epoch: 30 step: 139, loss is 0.9646084308624268\n", - "epoch: 30 step: 140, loss is 0.9500323534011841\n", - "epoch: 30 step: 141, loss is 0.9863404631614685\n", - "epoch: 30 step: 142, loss is 1.040480375289917\n", - "epoch: 30 step: 143, loss is 0.9839382767677307\n", - "epoch: 30 step: 144, loss is 1.041243076324463\n", - "epoch: 30 step: 145, loss is 1.0417735576629639\n", - "epoch: 30 step: 146, loss is 0.981896162033081\n", - "epoch: 30 step: 147, loss is 1.0009806156158447\n", - "epoch: 30 step: 148, loss is 1.0087807178497314\n", - "epoch: 30 step: 149, loss is 1.0064959526062012\n", - "epoch: 30 step: 150, loss is 1.019162654876709\n", - "epoch: 30 step: 151, loss is 1.0246968269348145\n", - "epoch: 30 step: 152, loss is 0.9720010161399841\n", - "epoch: 30 step: 153, loss is 0.9670285582542419\n", - "epoch: 30 step: 154, loss is 0.9997091889381409\n", - "epoch: 30 step: 155, loss is 0.9936108589172363\n", - "epoch: 30 step: 156, loss is 1.0761771202087402\n", - "epoch: 30 step: 157, loss is 1.0168907642364502\n", - "epoch: 30 step: 158, loss is 0.989546537399292\n", - "epoch: 30 step: 159, loss is 0.980770468711853\n", - "epoch: 30 step: 160, loss is 1.0305657386779785\n", - "epoch: 30 step: 161, loss is 1.096156120300293\n", - "epoch: 30 step: 162, loss is 1.0098059177398682\n", - "epoch: 30 step: 163, loss is 1.0632041692733765\n", - "epoch: 30 step: 164, loss is 1.0073914527893066\n", - "epoch: 30 step: 165, loss is 0.9745742082595825\n", - "epoch: 30 step: 166, loss is 1.001832127571106\n", - "epoch: 30 step: 167, loss is 0.970117449760437\n", - "epoch: 30 step: 168, loss is 1.0338062047958374\n", - "epoch: 30 step: 169, loss is 0.9741246104240417\n", - "epoch: 30 step: 170, loss is 0.9390353560447693\n", - "epoch: 30 step: 171, loss is 0.9777118563652039\n", - "epoch: 30 step: 172, loss is 0.9743375778198242\n", - "epoch: 30 step: 173, loss is 0.9726730585098267\n", - "epoch: 30 step: 174, loss is 0.9522745609283447\n", - "epoch: 30 step: 175, loss is 1.091088056564331\n", - "epoch: 30 step: 176, loss is 1.0843918323516846\n", - "epoch: 30 step: 177, loss is 0.9116156101226807\n", - "epoch: 30 step: 178, loss is 0.9912928342819214\n", - "epoch: 30 step: 179, loss is 1.0160400867462158\n", - "epoch: 30 step: 180, loss is 0.9484577178955078\n", - "epoch: 30 step: 181, loss is 1.0433039665222168\n", - "epoch: 30 step: 182, loss is 0.9789596796035767\n", - "epoch: 30 step: 183, loss is 0.9463712573051453\n", - "epoch: 30 step: 184, loss is 1.0079463720321655\n", - "epoch: 30 step: 185, loss is 1.0893367528915405\n", - "epoch: 30 step: 186, loss is 1.0241955518722534\n", - "epoch: 30 step: 187, loss is 1.0297592878341675\n", - "epoch: 30 step: 188, loss is 0.9507424831390381\n", - "epoch: 30 step: 189, loss is 1.0168551206588745\n", - "epoch: 30 step: 190, loss is 1.0782127380371094\n", - "epoch: 30 step: 191, loss is 0.9931322336196899\n", - "epoch: 30 step: 192, loss is 0.9793469905853271\n", - "epoch: 30 step: 193, loss is 0.9780886173248291\n", - "epoch: 30 step: 194, loss is 0.9820125102996826\n", - "epoch: 30 step: 195, loss is 0.9826734066009521\n", - "Train epoch time: 103353.865 ms, per step time: 530.020 ms\n", - "epoch: 31 step: 1, loss is 0.9838765859603882\n", - "epoch: 31 step: 2, loss is 0.9676800966262817\n", - "epoch: 31 step: 3, loss is 1.0030035972595215\n", - "epoch: 31 step: 4, loss is 1.0132195949554443\n", - "epoch: 31 step: 5, loss is 0.9448614716529846\n", - "epoch: 31 step: 6, loss is 0.9858396053314209\n", - "epoch: 31 step: 7, loss is 1.032362699508667\n", - "epoch: 31 step: 8, loss is 0.9368622303009033\n", - "epoch: 31 step: 9, loss is 1.0144985914230347\n", - "epoch: 31 step: 10, loss is 0.9951866865158081\n", - "epoch: 31 step: 11, loss is 0.9697293639183044\n", - "epoch: 31 step: 12, loss is 0.9011222124099731\n", - "epoch: 31 step: 13, loss is 0.9866967797279358\n", - "epoch: 31 step: 14, loss is 1.0454449653625488\n", - "epoch: 31 step: 15, loss is 0.9706379175186157\n", - "epoch: 31 step: 16, loss is 0.971888542175293\n", - "epoch: 31 step: 17, loss is 1.0615586042404175\n", - "epoch: 31 step: 18, loss is 0.9128535389900208\n", - "epoch: 31 step: 19, loss is 0.9617781639099121\n", - "epoch: 31 step: 20, loss is 0.9871184229850769\n", - "epoch: 31 step: 21, loss is 0.975204348564148\n", - "epoch: 31 step: 22, loss is 1.041196584701538\n", - "epoch: 31 step: 23, loss is 0.9909880757331848\n", - "epoch: 31 step: 24, loss is 1.058127760887146\n", - "epoch: 31 step: 25, loss is 0.9973526000976562\n", - "epoch: 31 step: 26, loss is 0.9670200347900391\n", - "epoch: 31 step: 27, loss is 0.9696930646896362\n", - "epoch: 31 step: 28, loss is 0.985702633857727\n", - "epoch: 31 step: 29, loss is 0.9834610223770142\n", - "epoch: 31 step: 30, loss is 0.9319219589233398\n", - "epoch: 31 step: 31, loss is 1.0667709112167358\n", - "epoch: 31 step: 32, loss is 0.9856438636779785\n", - "epoch: 31 step: 33, loss is 0.9819819927215576\n", - "epoch: 31 step: 34, loss is 0.9654231667518616\n", - "epoch: 31 step: 35, loss is 1.0442924499511719\n", - "epoch: 31 step: 36, loss is 0.9580166935920715\n", - "epoch: 31 step: 37, loss is 0.9236143827438354\n", - "epoch: 31 step: 38, loss is 0.9605481624603271\n", - "epoch: 31 step: 39, loss is 0.9396443367004395\n", - "epoch: 31 step: 40, loss is 0.9862281084060669\n", - "epoch: 31 step: 41, loss is 1.0652704238891602\n", - "epoch: 31 step: 42, loss is 0.9577304124832153\n", - "epoch: 31 step: 43, loss is 0.9629460573196411\n", - "epoch: 31 step: 44, loss is 1.001137137413025\n", - "epoch: 31 step: 45, loss is 0.9882875680923462\n", - "epoch: 31 step: 46, loss is 1.020888328552246\n", - "epoch: 31 step: 47, loss is 1.0081356763839722\n", - "epoch: 31 step: 48, loss is 1.0572824478149414\n", - "epoch: 31 step: 49, loss is 1.0539854764938354\n", - "epoch: 31 step: 50, loss is 1.033347487449646\n", - "epoch: 31 step: 51, loss is 0.9682328701019287\n", - "epoch: 31 step: 52, loss is 1.0219882726669312\n", - "epoch: 31 step: 53, loss is 0.9995028376579285\n", - "epoch: 31 step: 54, loss is 1.0134258270263672\n", - "epoch: 31 step: 55, loss is 0.9565858244895935\n", - "epoch: 31 step: 56, loss is 0.9971498250961304\n", - "epoch: 31 step: 57, loss is 0.99558424949646\n", - "epoch: 31 step: 58, loss is 0.9927089214324951\n", - "epoch: 31 step: 59, loss is 1.0176172256469727\n", - "epoch: 31 step: 60, loss is 0.998472273349762\n", - "epoch: 31 step: 61, loss is 0.9748069643974304\n", - "epoch: 31 step: 62, loss is 1.0151033401489258\n", - "epoch: 31 step: 63, loss is 1.0012116432189941\n", - "epoch: 31 step: 64, loss is 1.0647461414337158\n", - "epoch: 31 step: 65, loss is 0.9739506244659424\n", - "epoch: 31 step: 66, loss is 0.9405533075332642\n", - "epoch: 31 step: 67, loss is 0.9269188642501831\n", - "epoch: 31 step: 68, loss is 0.984155535697937\n", - "epoch: 31 step: 69, loss is 1.0550984144210815\n", - "epoch: 31 step: 70, loss is 0.9486294984817505\n", - "epoch: 31 step: 71, loss is 0.9794533252716064\n", - "epoch: 31 step: 72, loss is 1.012418270111084\n", - "epoch: 31 step: 73, loss is 1.0919561386108398\n", - "epoch: 31 step: 74, loss is 0.9477542638778687\n", - "epoch: 31 step: 75, loss is 1.0176360607147217\n", - "epoch: 31 step: 76, loss is 0.9217453002929688\n", - "epoch: 31 step: 77, loss is 0.9346352219581604\n", - "epoch: 31 step: 78, loss is 0.9667227268218994\n", - "epoch: 31 step: 79, loss is 1.0304807424545288\n", - "epoch: 31 step: 80, loss is 1.0767525434494019\n", - "epoch: 31 step: 81, loss is 1.0629236698150635\n", - "epoch: 31 step: 82, loss is 1.0283372402191162\n", - "epoch: 31 step: 83, loss is 0.9429781436920166\n", - "epoch: 31 step: 84, loss is 0.9525980949401855\n", - "epoch: 31 step: 85, loss is 1.0775288343429565\n", - "epoch: 31 step: 86, loss is 1.0258862972259521\n", - "epoch: 31 step: 87, loss is 0.9991978406906128\n", - "epoch: 31 step: 88, loss is 0.979305624961853\n", - "epoch: 31 step: 89, loss is 0.9347435235977173\n", - "epoch: 31 step: 90, loss is 0.9757965803146362\n", - "epoch: 31 step: 91, loss is 1.0024724006652832\n", - "epoch: 31 step: 92, loss is 1.0095936059951782\n", - "epoch: 31 step: 93, loss is 1.066152811050415\n", - "epoch: 31 step: 94, loss is 1.0411654710769653\n", - "epoch: 31 step: 95, loss is 0.8960299491882324\n", - "epoch: 31 step: 96, loss is 1.0025594234466553\n", - "epoch: 31 step: 97, loss is 0.9422812461853027\n", - "epoch: 31 step: 98, loss is 0.9677152633666992\n", - "epoch: 31 step: 99, loss is 1.01357901096344\n", - "epoch: 31 step: 100, loss is 0.9906042218208313\n", - "epoch: 31 step: 101, loss is 1.0029058456420898\n", - "epoch: 31 step: 102, loss is 0.9850889444351196\n", - "epoch: 31 step: 103, loss is 0.9020586609840393\n", - "epoch: 31 step: 104, loss is 0.9287134408950806\n", - "epoch: 31 step: 105, loss is 0.9898317456245422\n", - "epoch: 31 step: 106, loss is 0.9492436051368713\n", - "epoch: 31 step: 107, loss is 0.9706342816352844\n", - "epoch: 31 step: 108, loss is 0.9345952272415161\n", - "epoch: 31 step: 109, loss is 0.9485852718353271\n", - "epoch: 31 step: 110, loss is 0.9597136974334717\n", - "epoch: 31 step: 111, loss is 0.9993665218353271\n", - "epoch: 31 step: 112, loss is 0.9757802486419678\n", - "epoch: 31 step: 113, loss is 0.9277070164680481\n", - "epoch: 31 step: 114, loss is 0.9923455715179443\n", - "epoch: 31 step: 115, loss is 0.962706446647644\n", - "epoch: 31 step: 116, loss is 0.9658639430999756\n", - "epoch: 31 step: 117, loss is 1.044129729270935\n", - "epoch: 31 step: 118, loss is 0.9740756750106812\n", - "epoch: 31 step: 119, loss is 1.0178630352020264\n", - "epoch: 31 step: 120, loss is 0.9592282772064209\n", - "epoch: 31 step: 121, loss is 0.9215011596679688\n", - "epoch: 31 step: 122, loss is 0.9697255492210388\n", - "epoch: 31 step: 123, loss is 0.9495692253112793\n", - "epoch: 31 step: 124, loss is 0.9186446666717529\n", - "epoch: 31 step: 125, loss is 0.9934295415878296\n", - "epoch: 31 step: 126, loss is 0.955391526222229\n", - "epoch: 31 step: 127, loss is 1.087010145187378\n", - "epoch: 31 step: 128, loss is 1.0100611448287964\n", - "epoch: 31 step: 129, loss is 0.9919818639755249\n", - "epoch: 31 step: 130, loss is 0.971847414970398\n", - "epoch: 31 step: 131, loss is 0.931140661239624\n", - "epoch: 31 step: 132, loss is 0.9993005990982056\n", - "epoch: 31 step: 133, loss is 0.9818227291107178\n", - "epoch: 31 step: 134, loss is 1.0026576519012451\n", - "epoch: 31 step: 135, loss is 0.9029465317726135\n", - "epoch: 31 step: 136, loss is 0.9101250171661377\n", - "epoch: 31 step: 137, loss is 0.8914403915405273\n", - "epoch: 31 step: 138, loss is 0.9563717246055603\n", - "epoch: 31 step: 139, loss is 0.9786385893821716\n", - "epoch: 31 step: 140, loss is 1.0017088651657104\n", - "epoch: 31 step: 141, loss is 1.061335802078247\n", - "epoch: 31 step: 142, loss is 0.991165041923523\n", - "epoch: 31 step: 143, loss is 0.919896125793457\n", - "epoch: 31 step: 144, loss is 0.9997259378433228\n", - "epoch: 31 step: 145, loss is 0.9948645830154419\n", - "epoch: 31 step: 146, loss is 0.9947822690010071\n", - "epoch: 31 step: 147, loss is 0.9673951864242554\n", - "epoch: 31 step: 148, loss is 0.9747024774551392\n", - "epoch: 31 step: 149, loss is 1.0629842281341553\n", - "epoch: 31 step: 150, loss is 1.0618189573287964\n", - "epoch: 31 step: 151, loss is 0.8964056968688965\n", - "epoch: 31 step: 152, loss is 0.9454790353775024\n", - "epoch: 31 step: 153, loss is 1.0446432828903198\n", - "epoch: 31 step: 154, loss is 0.9705399870872498\n", - "epoch: 31 step: 155, loss is 1.0141222476959229\n", - "epoch: 31 step: 156, loss is 0.9858977198600769\n", - "epoch: 31 step: 157, loss is 1.0714173316955566\n", - "epoch: 31 step: 158, loss is 0.9159303307533264\n", - "epoch: 31 step: 159, loss is 0.9912005662918091\n", - "epoch: 31 step: 160, loss is 1.0138263702392578\n", - "epoch: 31 step: 161, loss is 0.9641917943954468\n", - "epoch: 31 step: 162, loss is 0.985386073589325\n", - "epoch: 31 step: 163, loss is 1.0194075107574463\n", - "epoch: 31 step: 164, loss is 1.0490658283233643\n", - "epoch: 31 step: 165, loss is 1.0196973085403442\n", - "epoch: 31 step: 166, loss is 0.986809492111206\n", - "epoch: 31 step: 167, loss is 0.9480140805244446\n", - "epoch: 31 step: 168, loss is 1.0087449550628662\n", - "epoch: 31 step: 169, loss is 0.977230966091156\n", - "epoch: 31 step: 170, loss is 0.9360880851745605\n", - "epoch: 31 step: 171, loss is 1.029245376586914\n", - "epoch: 31 step: 172, loss is 0.9766701459884644\n", - "epoch: 31 step: 173, loss is 0.9146973490715027\n", - "epoch: 31 step: 174, loss is 0.9630000591278076\n", - "epoch: 31 step: 175, loss is 1.0156733989715576\n", - "epoch: 31 step: 176, loss is 0.9603570699691772\n", - "epoch: 31 step: 177, loss is 1.0052415132522583\n", - "epoch: 31 step: 178, loss is 0.9530224800109863\n", - "epoch: 31 step: 179, loss is 1.0084125995635986\n", - "epoch: 31 step: 180, loss is 0.993372917175293\n", - "epoch: 31 step: 181, loss is 1.0019429922103882\n", - "epoch: 31 step: 182, loss is 0.9175456762313843\n", - "epoch: 31 step: 183, loss is 0.9294122457504272\n", - "epoch: 31 step: 184, loss is 1.0594414472579956\n", - "epoch: 31 step: 185, loss is 0.9580499529838562\n", - "epoch: 31 step: 186, loss is 1.0653116703033447\n", - "epoch: 31 step: 187, loss is 1.0048925876617432\n", - "epoch: 31 step: 188, loss is 0.9477293491363525\n", - "epoch: 31 step: 189, loss is 1.0387351512908936\n", - "epoch: 31 step: 190, loss is 0.980130672454834\n", - "epoch: 31 step: 191, loss is 0.975700855255127\n", - "epoch: 31 step: 192, loss is 1.0259079933166504\n", - "epoch: 31 step: 193, loss is 1.0097182989120483\n", - "epoch: 31 step: 194, loss is 1.0132098197937012\n", - "epoch: 31 step: 195, loss is 0.9720735549926758\n", - "Train epoch time: 118272.225 ms, per step time: 606.524 ms\n", - "epoch: 32 step: 1, loss is 0.9967446327209473\n", - "epoch: 32 step: 2, loss is 0.9975539445877075\n", - "epoch: 32 step: 3, loss is 0.9803944230079651\n", - "epoch: 32 step: 4, loss is 0.9979706406593323\n", - "epoch: 32 step: 5, loss is 0.9878783226013184\n", - "epoch: 32 step: 6, loss is 0.9467536211013794\n", - "epoch: 32 step: 7, loss is 0.9642983675003052\n", - "epoch: 32 step: 8, loss is 1.0154898166656494\n", - "epoch: 32 step: 9, loss is 0.9270303845405579\n", - "epoch: 32 step: 10, loss is 0.9303413033485413\n", - "epoch: 32 step: 11, loss is 0.9342100620269775\n", - "epoch: 32 step: 12, loss is 0.9710267186164856\n", - "epoch: 32 step: 13, loss is 0.9070530533790588\n", - "epoch: 32 step: 14, loss is 0.963059663772583\n", - "epoch: 32 step: 15, loss is 1.009734869003296\n", - "epoch: 32 step: 16, loss is 1.04564368724823\n", - "epoch: 32 step: 17, loss is 0.873187780380249\n", - "epoch: 32 step: 18, loss is 0.9555357098579407\n", - "epoch: 32 step: 19, loss is 0.949906587600708\n", - "epoch: 32 step: 20, loss is 0.9908300042152405\n", - "epoch: 32 step: 21, loss is 1.0064014196395874\n", - "epoch: 32 step: 22, loss is 1.033950686454773\n", - "epoch: 32 step: 23, loss is 0.9408246278762817\n", - "epoch: 32 step: 24, loss is 0.9503474235534668\n", - "epoch: 32 step: 25, loss is 0.9832029342651367\n", - "epoch: 32 step: 26, loss is 1.0594415664672852\n", - "epoch: 32 step: 27, loss is 0.9694714546203613\n", - "epoch: 32 step: 28, loss is 0.9576462507247925\n", - "epoch: 32 step: 29, loss is 0.9352479577064514\n", - "epoch: 32 step: 30, loss is 0.8866993188858032\n", - "epoch: 32 step: 31, loss is 0.993394136428833\n", - "epoch: 32 step: 32, loss is 1.0037065744400024\n", - "epoch: 32 step: 33, loss is 1.030024766921997\n", - "epoch: 32 step: 34, loss is 0.9296109080314636\n", - "epoch: 32 step: 35, loss is 1.001490592956543\n", - "epoch: 32 step: 36, loss is 0.9830491542816162\n", - "epoch: 32 step: 37, loss is 0.931323766708374\n", - "epoch: 32 step: 38, loss is 1.0032286643981934\n", - "epoch: 32 step: 39, loss is 0.934146523475647\n", - "epoch: 32 step: 40, loss is 0.9692342281341553\n", - "epoch: 32 step: 41, loss is 0.9654061198234558\n", - "epoch: 32 step: 42, loss is 0.9554896354675293\n", - "epoch: 32 step: 43, loss is 1.0171631574630737\n", - "epoch: 32 step: 44, loss is 1.050271987915039\n", - "epoch: 32 step: 45, loss is 0.9823942184448242\n", - "epoch: 32 step: 46, loss is 0.9229292869567871\n", - "epoch: 32 step: 47, loss is 0.9187092781066895\n", - "epoch: 32 step: 48, loss is 0.9608958959579468\n", - "epoch: 32 step: 49, loss is 0.9240633845329285\n", - "epoch: 32 step: 50, loss is 1.0269956588745117\n", - "epoch: 32 step: 51, loss is 1.0255138874053955\n", - "epoch: 32 step: 52, loss is 0.995145320892334\n", - "epoch: 32 step: 53, loss is 0.968718945980072\n", - "epoch: 32 step: 54, loss is 0.9610665440559387\n", - "epoch: 32 step: 55, loss is 0.9250697493553162\n", - "epoch: 32 step: 56, loss is 0.9754111766815186\n", - "epoch: 32 step: 57, loss is 1.0011793375015259\n", - "epoch: 32 step: 58, loss is 0.9802669882774353\n", - "epoch: 32 step: 59, loss is 1.0454319715499878\n", - "epoch: 32 step: 60, loss is 1.0050814151763916\n", - "epoch: 32 step: 61, loss is 0.9290096759796143\n", - "epoch: 32 step: 62, loss is 1.021238088607788\n", - "epoch: 32 step: 63, loss is 0.9674769043922424\n", - "epoch: 32 step: 64, loss is 0.992426872253418\n", - "epoch: 32 step: 65, loss is 1.0265958309173584\n", - "epoch: 32 step: 66, loss is 0.9413033723831177\n", - "epoch: 32 step: 67, loss is 0.9368441104888916\n", - "epoch: 32 step: 68, loss is 0.9719508290290833\n", - "epoch: 32 step: 69, loss is 0.9945214986801147\n", - "epoch: 32 step: 70, loss is 0.925213098526001\n", - "epoch: 32 step: 71, loss is 0.9439241886138916\n", - "epoch: 32 step: 72, loss is 0.9603148698806763\n", - "epoch: 32 step: 73, loss is 0.9954200983047485\n", - "epoch: 32 step: 74, loss is 0.8978986144065857\n", - "epoch: 32 step: 75, loss is 1.0424926280975342\n", - "epoch: 32 step: 76, loss is 0.966179609298706\n", - "epoch: 32 step: 77, loss is 1.0188401937484741\n", - "epoch: 32 step: 78, loss is 0.9219791889190674\n", - "epoch: 32 step: 79, loss is 0.9908080101013184\n", - "epoch: 32 step: 80, loss is 0.9278836846351624\n", - "epoch: 32 step: 81, loss is 1.1501719951629639\n", - "epoch: 32 step: 82, loss is 1.0275460481643677\n", - "epoch: 32 step: 83, loss is 0.9667668342590332\n", - "epoch: 32 step: 84, loss is 0.9274128079414368\n", - "epoch: 32 step: 85, loss is 0.9597902297973633\n", - "epoch: 32 step: 86, loss is 0.9425499439239502\n", - "epoch: 32 step: 87, loss is 0.9447070360183716\n", - "epoch: 32 step: 88, loss is 1.0165438652038574\n", - "epoch: 32 step: 89, loss is 0.9973222613334656\n", - "epoch: 32 step: 90, loss is 0.9558272361755371\n", - "epoch: 32 step: 91, loss is 0.9056956768035889\n", - "epoch: 32 step: 92, loss is 0.9276437163352966\n", - "epoch: 32 step: 93, loss is 0.9099143147468567\n", - "epoch: 32 step: 94, loss is 0.9536274671554565\n", - "epoch: 32 step: 95, loss is 1.0016417503356934\n", - "epoch: 32 step: 96, loss is 1.020653247833252\n", - "epoch: 32 step: 97, loss is 0.8585014939308167\n", - "epoch: 32 step: 98, loss is 0.9623339176177979\n", - "epoch: 32 step: 99, loss is 0.9274699687957764\n", - "epoch: 32 step: 100, loss is 0.9118982553482056\n", - "epoch: 32 step: 101, loss is 1.0120506286621094\n", - "epoch: 32 step: 102, loss is 0.9841887950897217\n", - "epoch: 32 step: 103, loss is 0.9888389110565186\n", - "epoch: 32 step: 104, loss is 0.9532955884933472\n", - "epoch: 32 step: 105, loss is 1.0041470527648926\n", - "epoch: 32 step: 106, loss is 0.9545668363571167\n", - "epoch: 32 step: 107, loss is 1.002959966659546\n", - "epoch: 32 step: 108, loss is 1.0464038848876953\n", - "epoch: 32 step: 109, loss is 0.8906322717666626\n", - "epoch: 32 step: 110, loss is 1.0393657684326172\n", - "epoch: 32 step: 111, loss is 0.9836310148239136\n", - "epoch: 32 step: 112, loss is 0.9498323202133179\n", - "epoch: 32 step: 113, loss is 0.930293619632721\n", - "epoch: 32 step: 114, loss is 1.0118987560272217\n", - "epoch: 32 step: 115, loss is 0.901387095451355\n", - "epoch: 32 step: 116, loss is 0.9259582757949829\n", - "epoch: 32 step: 117, loss is 1.0150636434555054\n", - "epoch: 32 step: 118, loss is 0.9279121160507202\n", - "epoch: 32 step: 119, loss is 0.9991668462753296\n", - "epoch: 32 step: 120, loss is 0.9698737263679504\n", - "epoch: 32 step: 121, loss is 1.0576320886611938\n", - "epoch: 32 step: 122, loss is 0.9925229549407959\n", - "epoch: 32 step: 123, loss is 1.014662265777588\n", - "epoch: 32 step: 124, loss is 0.9706726670265198\n", - "epoch: 32 step: 125, loss is 1.0026535987854004\n", - "epoch: 32 step: 126, loss is 0.9783889055252075\n", - "epoch: 32 step: 127, loss is 1.03402578830719\n", - "epoch: 32 step: 128, loss is 1.0233118534088135\n", - "epoch: 32 step: 129, loss is 0.9969748854637146\n", - "epoch: 32 step: 130, loss is 0.9340372085571289\n", - "epoch: 32 step: 131, loss is 0.9221502542495728\n", - "epoch: 32 step: 132, loss is 0.9947479963302612\n", - "epoch: 32 step: 133, loss is 0.9428037405014038\n", - "epoch: 32 step: 134, loss is 0.9762688279151917\n", - "epoch: 32 step: 135, loss is 1.014094591140747\n", - "epoch: 32 step: 136, loss is 0.9966780543327332\n", - "epoch: 32 step: 137, loss is 0.9831628799438477\n", - "epoch: 32 step: 138, loss is 0.9889219999313354\n", - "epoch: 32 step: 139, loss is 0.9779709577560425\n", - "epoch: 32 step: 140, loss is 0.928521990776062\n", - "epoch: 32 step: 141, loss is 0.966339111328125\n", - "epoch: 32 step: 142, loss is 0.9465529322624207\n", - "epoch: 32 step: 143, loss is 0.8974637985229492\n", - "epoch: 32 step: 144, loss is 0.9706141948699951\n", - "epoch: 32 step: 145, loss is 0.9656973481178284\n", - "epoch: 32 step: 146, loss is 0.9407874941825867\n", - "epoch: 32 step: 147, loss is 0.959028959274292\n", - "epoch: 32 step: 148, loss is 0.9964733123779297\n", - "epoch: 32 step: 149, loss is 0.9901953935623169\n", - "epoch: 32 step: 150, loss is 0.9520010948181152\n", - "epoch: 32 step: 151, loss is 0.9164900183677673\n", - "epoch: 32 step: 152, loss is 1.028070092201233\n", - "epoch: 32 step: 153, loss is 0.8941402435302734\n", - "epoch: 32 step: 154, loss is 1.0651249885559082\n", - "epoch: 32 step: 155, loss is 1.0015056133270264\n", - "epoch: 32 step: 156, loss is 0.9744718074798584\n", - "epoch: 32 step: 157, loss is 0.9472004175186157\n", - "epoch: 32 step: 158, loss is 1.0122981071472168\n", - "epoch: 32 step: 159, loss is 1.0072392225265503\n", - "epoch: 32 step: 160, loss is 0.9154157638549805\n", - "epoch: 32 step: 161, loss is 1.011389970779419\n", - "epoch: 32 step: 162, loss is 0.9131314754486084\n", - "epoch: 32 step: 163, loss is 0.9688203930854797\n", - "epoch: 32 step: 164, loss is 0.9930955171585083\n", - "epoch: 32 step: 165, loss is 0.9361259937286377\n", - "epoch: 32 step: 166, loss is 0.9103786945343018\n", - "epoch: 32 step: 167, loss is 0.9677947759628296\n", - "epoch: 32 step: 168, loss is 1.0035138130187988\n", - "epoch: 32 step: 169, loss is 0.9622212648391724\n", - "epoch: 32 step: 170, loss is 0.9800490140914917\n", - "epoch: 32 step: 171, loss is 1.0072228908538818\n", - "epoch: 32 step: 172, loss is 0.9820506572723389\n", - "epoch: 32 step: 173, loss is 0.9584174752235413\n", - "epoch: 32 step: 174, loss is 1.0656081438064575\n", - "epoch: 32 step: 175, loss is 1.059253215789795\n", - "epoch: 32 step: 176, loss is 1.0160908699035645\n", - "epoch: 32 step: 177, loss is 0.9740145206451416\n", - "epoch: 32 step: 178, loss is 0.9020475149154663\n", - "epoch: 32 step: 179, loss is 0.9617655873298645\n", - "epoch: 32 step: 180, loss is 0.9968571662902832\n", - "epoch: 32 step: 181, loss is 0.9769738912582397\n", - "epoch: 32 step: 182, loss is 0.959153413772583\n", - "epoch: 32 step: 183, loss is 0.9622974395751953\n", - "epoch: 32 step: 184, loss is 1.0063745975494385\n", - "epoch: 32 step: 185, loss is 1.0031746625900269\n", - "epoch: 32 step: 186, loss is 0.9887227416038513\n", - "epoch: 32 step: 187, loss is 1.0121935606002808\n", - "epoch: 32 step: 188, loss is 1.021625280380249\n", - "epoch: 32 step: 189, loss is 0.9291588068008423\n", - "epoch: 32 step: 190, loss is 0.9910972118377686\n", - "epoch: 32 step: 191, loss is 1.0003994703292847\n", - "epoch: 32 step: 192, loss is 0.986520528793335\n", - "epoch: 32 step: 193, loss is 0.9855494499206543\n", - "epoch: 32 step: 194, loss is 0.9918218851089478\n", - "epoch: 32 step: 195, loss is 1.0259497165679932\n", - "Train epoch time: 113439.024 ms, per step time: 581.739 ms\n", - "epoch: 33 step: 1, loss is 0.972176730632782\n", - "epoch: 33 step: 2, loss is 0.9647098779678345\n", - "epoch: 33 step: 3, loss is 0.916054368019104\n", - "epoch: 33 step: 4, loss is 0.9119017124176025\n", - "epoch: 33 step: 5, loss is 0.9944983720779419\n", - "epoch: 33 step: 6, loss is 0.9288936853408813\n", - "epoch: 33 step: 7, loss is 0.8737387657165527\n", - "epoch: 33 step: 8, loss is 0.9890267848968506\n", - "epoch: 33 step: 9, loss is 0.9501098394393921\n", - "epoch: 33 step: 10, loss is 0.9130431413650513\n", - "epoch: 33 step: 11, loss is 0.8931370973587036\n", - "epoch: 33 step: 12, loss is 0.9170550107955933\n", - "epoch: 33 step: 13, loss is 0.8986920118331909\n", - "epoch: 33 step: 14, loss is 0.9364569187164307\n", - "epoch: 33 step: 15, loss is 0.9464807510375977\n", - "epoch: 33 step: 16, loss is 0.9326199889183044\n", - "epoch: 33 step: 17, loss is 0.9244019985198975\n", - "epoch: 33 step: 18, loss is 0.8999835252761841\n", - "epoch: 33 step: 19, loss is 0.8907067179679871\n", - "epoch: 33 step: 20, loss is 0.9408332109451294\n", - "epoch: 33 step: 21, loss is 0.9335184097290039\n", - "epoch: 33 step: 22, loss is 0.9489995241165161\n", - "epoch: 33 step: 23, loss is 0.9366046190261841\n", - "epoch: 33 step: 24, loss is 0.939805269241333\n", - "epoch: 33 step: 25, loss is 0.9253400564193726\n", - "epoch: 33 step: 26, loss is 1.0302006006240845\n", - "epoch: 33 step: 27, loss is 0.9690253734588623\n", - "epoch: 33 step: 28, loss is 0.921459436416626\n", - "epoch: 33 step: 29, loss is 0.9174401760101318\n", - "epoch: 33 step: 30, loss is 0.9065608978271484\n", - "epoch: 33 step: 31, loss is 0.9953237175941467\n", - "epoch: 33 step: 32, loss is 0.9813719391822815\n", - "epoch: 33 step: 33, loss is 0.9210784435272217\n", - "epoch: 33 step: 34, loss is 0.9304892420768738\n", - "epoch: 33 step: 35, loss is 0.9749086499214172\n", - "epoch: 33 step: 36, loss is 0.9353979825973511\n", - "epoch: 33 step: 37, loss is 0.9581946134567261\n", - "epoch: 33 step: 38, loss is 0.9708186984062195\n", - "epoch: 33 step: 39, loss is 0.9135340452194214\n", - "epoch: 33 step: 40, loss is 0.8874043226242065\n", - "epoch: 33 step: 41, loss is 0.9527324438095093\n", - "epoch: 33 step: 42, loss is 0.9566214084625244\n", - "epoch: 33 step: 43, loss is 0.8656256198883057\n", - "epoch: 33 step: 44, loss is 1.1020617485046387\n", - "epoch: 33 step: 45, loss is 0.9758085012435913\n", - "epoch: 33 step: 46, loss is 0.9685569405555725\n", - "epoch: 33 step: 47, loss is 0.9804575443267822\n", - "epoch: 33 step: 48, loss is 0.8939063549041748\n", - "epoch: 33 step: 49, loss is 0.9209359884262085\n", - "epoch: 33 step: 50, loss is 0.9096759557723999\n", - "epoch: 33 step: 51, loss is 1.022656798362732\n", - "epoch: 33 step: 52, loss is 0.9809627532958984\n", - "epoch: 33 step: 53, loss is 0.9726012945175171\n", - "epoch: 33 step: 54, loss is 0.9015921950340271\n", - "epoch: 33 step: 55, loss is 1.021711826324463\n", - "epoch: 33 step: 56, loss is 0.9236119985580444\n", - "epoch: 33 step: 57, loss is 0.9972453713417053\n", - "epoch: 33 step: 58, loss is 0.9518496990203857\n", - "epoch: 33 step: 59, loss is 0.9164246320724487\n", - "epoch: 33 step: 60, loss is 0.9575086236000061\n", - "epoch: 33 step: 61, loss is 0.8775957822799683\n", - "epoch: 33 step: 62, loss is 0.9270531535148621\n", - "epoch: 33 step: 63, loss is 0.9527212381362915\n", - "epoch: 33 step: 64, loss is 1.0476192235946655\n", - "epoch: 33 step: 65, loss is 0.9870452880859375\n", - "epoch: 33 step: 66, loss is 0.9403674602508545\n", - "epoch: 33 step: 67, loss is 1.0003372430801392\n", - "epoch: 33 step: 68, loss is 0.9533125162124634\n", - "epoch: 33 step: 69, loss is 0.9758104085922241\n", - "epoch: 33 step: 70, loss is 0.9470967650413513\n", - "epoch: 33 step: 71, loss is 0.93449467420578\n", - "epoch: 33 step: 72, loss is 0.9770816564559937\n", - "epoch: 33 step: 73, loss is 0.9332225918769836\n", - "epoch: 33 step: 74, loss is 0.9649097919464111\n", - "epoch: 33 step: 75, loss is 0.9458310604095459\n", - "epoch: 33 step: 76, loss is 0.9841179847717285\n", - "epoch: 33 step: 77, loss is 0.9203859567642212\n", - "epoch: 33 step: 78, loss is 0.9566868543624878\n", - "epoch: 33 step: 79, loss is 1.0033209323883057\n", - "epoch: 33 step: 80, loss is 0.982210636138916\n", - "epoch: 33 step: 81, loss is 0.9556820392608643\n", - "epoch: 33 step: 82, loss is 0.9283323287963867\n", - "epoch: 33 step: 83, loss is 0.9442602396011353\n", - "epoch: 33 step: 84, loss is 0.9323667883872986\n", - "epoch: 33 step: 85, loss is 0.9618833065032959\n", - "epoch: 33 step: 86, loss is 0.9919984340667725\n", - "epoch: 33 step: 87, loss is 0.963414192199707\n", - "epoch: 33 step: 88, loss is 0.9294227957725525\n", - "epoch: 33 step: 89, loss is 0.9657580852508545\n", - "epoch: 33 step: 90, loss is 0.9258193969726562\n", - "epoch: 33 step: 91, loss is 0.9991023540496826\n", - "epoch: 33 step: 92, loss is 0.9860532283782959\n", - "epoch: 33 step: 93, loss is 1.021698236465454\n", - "epoch: 33 step: 94, loss is 0.9310401678085327\n", - "epoch: 33 step: 95, loss is 0.9799342155456543\n", - "epoch: 33 step: 96, loss is 0.9793753623962402\n", - "epoch: 33 step: 97, loss is 0.8856238126754761\n", - "epoch: 33 step: 98, loss is 0.9771213531494141\n", - "epoch: 33 step: 99, loss is 0.9536637663841248\n", - "epoch: 33 step: 100, loss is 1.0226426124572754\n", - "epoch: 33 step: 101, loss is 1.0137053728103638\n", - "epoch: 33 step: 102, loss is 0.9649891257286072\n", - "epoch: 33 step: 103, loss is 0.9115580320358276\n", - "epoch: 33 step: 104, loss is 1.0780785083770752\n", - "epoch: 33 step: 105, loss is 1.0131902694702148\n", - "epoch: 33 step: 106, loss is 0.9218496084213257\n", - "epoch: 33 step: 107, loss is 1.0345942974090576\n", - "epoch: 33 step: 108, loss is 1.0418907403945923\n", - "epoch: 33 step: 109, loss is 0.9648298025131226\n", - "epoch: 33 step: 110, loss is 0.9000749588012695\n", - "epoch: 33 step: 111, loss is 1.093456506729126\n", - "epoch: 33 step: 112, loss is 0.9261119961738586\n", - "epoch: 33 step: 113, loss is 0.9823330640792847\n", - "epoch: 33 step: 114, loss is 0.9650970697402954\n", - "epoch: 33 step: 115, loss is 1.0126458406448364\n", - "epoch: 33 step: 116, loss is 0.9840356707572937\n", - "epoch: 33 step: 117, loss is 0.9778417348861694\n", - "epoch: 33 step: 118, loss is 0.94410240650177\n", - "epoch: 33 step: 119, loss is 1.0173594951629639\n", - "epoch: 33 step: 120, loss is 0.9679955840110779\n", - "epoch: 33 step: 121, loss is 1.0188758373260498\n", - "epoch: 33 step: 122, loss is 0.9154094457626343\n", - "epoch: 33 step: 123, loss is 1.002791404724121\n", - "epoch: 33 step: 124, loss is 1.006293773651123\n", - "epoch: 33 step: 125, loss is 1.007016658782959\n", - "epoch: 33 step: 126, loss is 1.0260813236236572\n", - "epoch: 33 step: 127, loss is 0.9723199605941772\n", - "epoch: 33 step: 128, loss is 0.9928407073020935\n", - "epoch: 33 step: 129, loss is 0.9961979389190674\n", - "epoch: 33 step: 130, loss is 0.9842190742492676\n", - "epoch: 33 step: 131, loss is 1.0594377517700195\n", - "epoch: 33 step: 132, loss is 0.969925045967102\n", - "epoch: 33 step: 133, loss is 0.9857386350631714\n", - "epoch: 33 step: 134, loss is 0.9550771117210388\n", - "epoch: 33 step: 135, loss is 0.9595305919647217\n", - "epoch: 33 step: 136, loss is 0.9798208475112915\n", - "epoch: 33 step: 137, loss is 1.0429097414016724\n", - "epoch: 33 step: 138, loss is 0.9756332635879517\n", - "epoch: 33 step: 139, loss is 1.0421208143234253\n", - "epoch: 33 step: 140, loss is 1.055605411529541\n", - "epoch: 33 step: 141, loss is 1.0591111183166504\n", - "epoch: 33 step: 142, loss is 0.9941151142120361\n", - "epoch: 33 step: 143, loss is 0.9986432790756226\n", - "epoch: 33 step: 144, loss is 0.9531167149543762\n", - "epoch: 33 step: 145, loss is 0.9759238958358765\n", - "epoch: 33 step: 146, loss is 0.9562721252441406\n", - "epoch: 33 step: 147, loss is 0.9446579217910767\n", - "epoch: 33 step: 148, loss is 0.9611296653747559\n", - "epoch: 33 step: 149, loss is 0.9357078671455383\n", - "epoch: 33 step: 150, loss is 0.8983238935470581\n", - "epoch: 33 step: 151, loss is 0.9335033893585205\n", - "epoch: 33 step: 152, loss is 0.9647693634033203\n", - "epoch: 33 step: 153, loss is 0.9338184595108032\n", - "epoch: 33 step: 154, loss is 0.934909462928772\n", - "epoch: 33 step: 155, loss is 0.9669569134712219\n", - "epoch: 33 step: 156, loss is 0.9536868333816528\n", - "epoch: 33 step: 157, loss is 0.9431536793708801\n", - "epoch: 33 step: 158, loss is 0.9445674419403076\n", - "epoch: 33 step: 159, loss is 0.9923531413078308\n", - "epoch: 33 step: 160, loss is 0.9583009481430054\n", - "epoch: 33 step: 161, loss is 1.0236531496047974\n", - "epoch: 33 step: 162, loss is 1.017923355102539\n", - "epoch: 33 step: 163, loss is 1.0092699527740479\n", - "epoch: 33 step: 164, loss is 0.9135686159133911\n", - "epoch: 33 step: 165, loss is 1.0051112174987793\n", - "epoch: 33 step: 166, loss is 0.9855773448944092\n", - "epoch: 33 step: 167, loss is 0.9204099774360657\n", - "epoch: 33 step: 168, loss is 0.9891406297683716\n", - "epoch: 33 step: 169, loss is 1.0182404518127441\n", - "epoch: 33 step: 170, loss is 0.9533469676971436\n", - "epoch: 33 step: 171, loss is 0.9587410688400269\n", - "epoch: 33 step: 172, loss is 0.9662201404571533\n", - "epoch: 33 step: 173, loss is 0.9143804311752319\n", - "epoch: 33 step: 174, loss is 0.970787525177002\n", - "epoch: 33 step: 175, loss is 0.9179912209510803\n", - "epoch: 33 step: 176, loss is 0.922834038734436\n", - "epoch: 33 step: 177, loss is 0.9413474798202515\n", - "epoch: 33 step: 178, loss is 0.953830897808075\n", - "epoch: 33 step: 179, loss is 0.9036558866500854\n", - "epoch: 33 step: 180, loss is 0.9865437746047974\n", - "epoch: 33 step: 181, loss is 0.90086430311203\n", - "epoch: 33 step: 182, loss is 0.854681134223938\n", - "epoch: 33 step: 183, loss is 0.9300872087478638\n", - "epoch: 33 step: 184, loss is 0.874895453453064\n", - "epoch: 33 step: 185, loss is 1.0436989068984985\n", - "epoch: 33 step: 186, loss is 0.9987715482711792\n", - "epoch: 33 step: 187, loss is 0.9680948853492737\n", - "epoch: 33 step: 188, loss is 0.891126275062561\n", - "epoch: 33 step: 189, loss is 0.9234379529953003\n", - "epoch: 33 step: 190, loss is 0.9447418451309204\n", - "epoch: 33 step: 191, loss is 0.9901825189590454\n", - "epoch: 33 step: 192, loss is 0.951102614402771\n", - "epoch: 33 step: 193, loss is 0.9049558043479919\n", - "epoch: 33 step: 194, loss is 0.916175365447998\n", - "epoch: 33 step: 195, loss is 0.9999237656593323\n", - "Train epoch time: 105732.515 ms, per step time: 542.218 ms\n", - "epoch: 34 step: 1, loss is 0.9014315009117126\n", - "epoch: 34 step: 2, loss is 1.0061246156692505\n", - "epoch: 34 step: 3, loss is 0.9278513193130493\n", - "epoch: 34 step: 4, loss is 0.9681090116500854\n", - "epoch: 34 step: 5, loss is 0.9338939189910889\n", - "epoch: 34 step: 6, loss is 0.9414672255516052\n", - "epoch: 34 step: 7, loss is 0.9171125888824463\n", - "epoch: 34 step: 8, loss is 0.9877579808235168\n", - "epoch: 34 step: 9, loss is 0.9676296710968018\n", - "epoch: 34 step: 10, loss is 0.9511964917182922\n", - "epoch: 34 step: 11, loss is 0.9489525556564331\n", - "epoch: 34 step: 12, loss is 0.8902410268783569\n", - "epoch: 34 step: 13, loss is 0.9067836403846741\n", - "epoch: 34 step: 14, loss is 0.8833621144294739\n", - "epoch: 34 step: 15, loss is 0.9010988473892212\n", - "epoch: 34 step: 16, loss is 0.9346631765365601\n", - "epoch: 34 step: 17, loss is 0.9013309478759766\n", - "epoch: 34 step: 18, loss is 0.9403157234191895\n", - "epoch: 34 step: 19, loss is 0.9549760818481445\n", - "epoch: 34 step: 20, loss is 0.9385412931442261\n", - "epoch: 34 step: 21, loss is 0.9539570212364197\n", - "epoch: 34 step: 22, loss is 0.9806356430053711\n", - "epoch: 34 step: 23, loss is 0.9956022500991821\n", - "epoch: 34 step: 24, loss is 0.9359166622161865\n", - "epoch: 34 step: 25, loss is 0.8807121515274048\n", - "epoch: 34 step: 26, loss is 0.9323121905326843\n", - "epoch: 34 step: 27, loss is 0.9165089130401611\n", - "epoch: 34 step: 28, loss is 1.0007545948028564\n", - "epoch: 34 step: 29, loss is 0.992508590221405\n", - "epoch: 34 step: 30, loss is 0.9108644127845764\n", - "epoch: 34 step: 31, loss is 0.9335404634475708\n", - "epoch: 34 step: 32, loss is 0.8934336304664612\n", - "epoch: 34 step: 33, loss is 0.9304132461547852\n", - "epoch: 34 step: 34, loss is 0.9634863138198853\n", - "epoch: 34 step: 35, loss is 0.9104371070861816\n", - "epoch: 34 step: 36, loss is 0.9406634569168091\n", - "epoch: 34 step: 37, loss is 1.0494654178619385\n", - "epoch: 34 step: 38, loss is 1.0364055633544922\n", - "epoch: 34 step: 39, loss is 0.9982168674468994\n", - "epoch: 34 step: 40, loss is 0.9110483527183533\n", - "epoch: 34 step: 41, loss is 0.9448615312576294\n", - "epoch: 34 step: 42, loss is 0.9037631750106812\n", - "epoch: 34 step: 43, loss is 0.8959715366363525\n", - "epoch: 34 step: 44, loss is 0.9400544166564941\n", - "epoch: 34 step: 45, loss is 0.8632012605667114\n", - "epoch: 34 step: 46, loss is 1.0307023525238037\n", - "epoch: 34 step: 47, loss is 0.964219331741333\n", - "epoch: 34 step: 48, loss is 0.9474425315856934\n", - "epoch: 34 step: 49, loss is 0.9032085537910461\n", - "epoch: 34 step: 50, loss is 0.9863803386688232\n", - "epoch: 34 step: 51, loss is 0.9763869047164917\n", - "epoch: 34 step: 52, loss is 0.8791936635971069\n", - "epoch: 34 step: 53, loss is 0.9551576375961304\n", - "epoch: 34 step: 54, loss is 1.037739872932434\n", - "epoch: 34 step: 55, loss is 0.9513261318206787\n", - "epoch: 34 step: 56, loss is 0.928236722946167\n", - "epoch: 34 step: 57, loss is 0.916535496711731\n", - "epoch: 34 step: 58, loss is 0.9475514888763428\n", - "epoch: 34 step: 59, loss is 1.02689528465271\n", - "epoch: 34 step: 60, loss is 0.9856384992599487\n", - "epoch: 34 step: 61, loss is 1.020219326019287\n", - "epoch: 34 step: 62, loss is 0.9119340181350708\n", - "epoch: 34 step: 63, loss is 0.9119802713394165\n", - "epoch: 34 step: 64, loss is 0.9378088712692261\n", - "epoch: 34 step: 65, loss is 0.9115553498268127\n", - "epoch: 34 step: 66, loss is 1.001633644104004\n", - "epoch: 34 step: 67, loss is 0.8776998519897461\n", - "epoch: 34 step: 68, loss is 0.9740617275238037\n", - "epoch: 34 step: 69, loss is 0.930700421333313\n", - "epoch: 34 step: 70, loss is 0.9697713851928711\n", - "epoch: 34 step: 71, loss is 0.9253783822059631\n", - "epoch: 34 step: 72, loss is 0.8965550661087036\n", - "epoch: 34 step: 73, loss is 0.946887731552124\n", - "epoch: 34 step: 74, loss is 0.9209814667701721\n", - "epoch: 34 step: 75, loss is 0.9002019166946411\n", - "epoch: 34 step: 76, loss is 1.070088267326355\n", - "epoch: 34 step: 77, loss is 0.8834725618362427\n", - "epoch: 34 step: 78, loss is 0.9126980900764465\n", - "epoch: 34 step: 79, loss is 0.9756837487220764\n", - "epoch: 34 step: 80, loss is 0.9624533653259277\n", - "epoch: 34 step: 81, loss is 0.9520150423049927\n", - "epoch: 34 step: 82, loss is 0.9528704881668091\n", - "epoch: 34 step: 83, loss is 0.8787263035774231\n", - "epoch: 34 step: 84, loss is 0.9074934720993042\n", - "epoch: 34 step: 85, loss is 0.8870270848274231\n", - "epoch: 34 step: 86, loss is 0.9672122597694397\n", - "epoch: 34 step: 87, loss is 0.9973835945129395\n", - "epoch: 34 step: 88, loss is 0.9536515474319458\n", - "epoch: 34 step: 89, loss is 0.9830984473228455\n", - "epoch: 34 step: 90, loss is 0.9154214859008789\n", - "epoch: 34 step: 91, loss is 0.9034596681594849\n", - "epoch: 34 step: 92, loss is 0.9284597635269165\n", - "epoch: 34 step: 93, loss is 0.9021449685096741\n", - "epoch: 34 step: 94, loss is 0.8861116170883179\n", - "epoch: 34 step: 95, loss is 0.9342606663703918\n", - "epoch: 34 step: 96, loss is 1.0601040124893188\n", - "epoch: 34 step: 97, loss is 0.9759514331817627\n", - "epoch: 34 step: 98, loss is 0.9432640075683594\n", - "epoch: 34 step: 99, loss is 0.9687386751174927\n", - "epoch: 34 step: 100, loss is 0.9362058043479919\n", - "epoch: 34 step: 101, loss is 0.9157013893127441\n", - "epoch: 34 step: 102, loss is 0.9615666270256042\n", - "epoch: 34 step: 103, loss is 0.9762511253356934\n", - "epoch: 34 step: 104, loss is 0.9799898862838745\n", - "epoch: 34 step: 105, loss is 0.9388425350189209\n", - "epoch: 34 step: 106, loss is 0.9910931587219238\n", - "epoch: 34 step: 107, loss is 0.9737606644630432\n", - "epoch: 34 step: 108, loss is 0.9557483196258545\n", - "epoch: 34 step: 109, loss is 0.893720269203186\n", - "epoch: 34 step: 110, loss is 0.919302225112915\n", - "epoch: 34 step: 111, loss is 0.925922691822052\n", - "epoch: 34 step: 112, loss is 0.8252326250076294\n", - "epoch: 34 step: 113, loss is 1.1018568277359009\n", - "epoch: 34 step: 114, loss is 0.9709291458129883\n", - "epoch: 34 step: 115, loss is 1.008709192276001\n", - "epoch: 34 step: 116, loss is 0.9278504848480225\n", - "epoch: 34 step: 117, loss is 0.9925538301467896\n", - "epoch: 34 step: 118, loss is 0.9473333358764648\n", - "epoch: 34 step: 119, loss is 0.8593403100967407\n", - "epoch: 34 step: 120, loss is 0.9009590744972229\n", - "epoch: 34 step: 121, loss is 0.9907059073448181\n", - "epoch: 34 step: 122, loss is 0.8917592763900757\n", - "epoch: 34 step: 123, loss is 0.9693436622619629\n", - "epoch: 34 step: 124, loss is 0.9636246562004089\n", - "epoch: 34 step: 125, loss is 1.0571426153182983\n", - "epoch: 34 step: 126, loss is 0.9221630096435547\n", - "epoch: 34 step: 127, loss is 0.9444491267204285\n", - "epoch: 34 step: 128, loss is 0.92763352394104\n", - "epoch: 34 step: 129, loss is 0.9797393083572388\n", - "epoch: 34 step: 130, loss is 0.8912967443466187\n", - "epoch: 34 step: 131, loss is 0.9313188195228577\n", - "epoch: 34 step: 132, loss is 0.9189953804016113\n", - "epoch: 34 step: 133, loss is 0.9340605735778809\n", - "epoch: 34 step: 134, loss is 0.9475011825561523\n", - "epoch: 34 step: 135, loss is 0.970293402671814\n", - "epoch: 34 step: 136, loss is 0.9611556529998779\n", - "epoch: 34 step: 137, loss is 0.9226872324943542\n", - "epoch: 34 step: 138, loss is 0.8941627740859985\n", - "epoch: 34 step: 139, loss is 0.9701416492462158\n", - "epoch: 34 step: 140, loss is 0.9955623149871826\n", - "epoch: 34 step: 141, loss is 0.9524980783462524\n", - "epoch: 34 step: 142, loss is 0.9889296293258667\n", - "epoch: 34 step: 143, loss is 0.9423254728317261\n", - "epoch: 34 step: 144, loss is 0.9051668643951416\n", - "epoch: 34 step: 145, loss is 0.88808274269104\n", - "epoch: 34 step: 146, loss is 0.9182569980621338\n", - "epoch: 34 step: 147, loss is 0.9885237216949463\n", - "epoch: 34 step: 148, loss is 0.9808858633041382\n", - "epoch: 34 step: 149, loss is 1.0045175552368164\n", - "epoch: 34 step: 150, loss is 0.9291503429412842\n", - "epoch: 34 step: 151, loss is 0.8854019641876221\n", - "epoch: 34 step: 152, loss is 0.971072793006897\n", - "epoch: 34 step: 153, loss is 0.9808249473571777\n", - "epoch: 34 step: 154, loss is 0.9965280294418335\n", - "epoch: 34 step: 155, loss is 0.9373445510864258\n", - "epoch: 34 step: 156, loss is 0.9657478332519531\n", - "epoch: 34 step: 157, loss is 0.9578162431716919\n", - "epoch: 34 step: 158, loss is 0.9662688970565796\n", - "epoch: 34 step: 159, loss is 0.9597685933113098\n", - "epoch: 34 step: 160, loss is 0.9286093711853027\n", - "epoch: 34 step: 161, loss is 0.9306361675262451\n", - "epoch: 34 step: 162, loss is 0.9791843295097351\n", - "epoch: 34 step: 163, loss is 1.0070964097976685\n", - "epoch: 34 step: 164, loss is 0.9057208299636841\n", - "epoch: 34 step: 165, loss is 0.8771458864212036\n", - "epoch: 34 step: 166, loss is 0.9728949069976807\n", - "epoch: 34 step: 167, loss is 0.9206799268722534\n", - "epoch: 34 step: 168, loss is 0.9634367227554321\n", - "epoch: 34 step: 169, loss is 0.9787682294845581\n", - "epoch: 34 step: 170, loss is 0.9422827363014221\n", - "epoch: 34 step: 171, loss is 0.8634648323059082\n", - "epoch: 34 step: 172, loss is 0.9658458232879639\n", - "epoch: 34 step: 173, loss is 0.8860664367675781\n", - "epoch: 34 step: 174, loss is 0.999804675579071\n", - "epoch: 34 step: 175, loss is 0.9213178753852844\n", - "epoch: 34 step: 176, loss is 0.9172133207321167\n", - "epoch: 34 step: 177, loss is 0.9587059020996094\n", - "epoch: 34 step: 178, loss is 0.9470864534378052\n", - "epoch: 34 step: 179, loss is 0.942177951335907\n", - "epoch: 34 step: 180, loss is 0.953234076499939\n", - "epoch: 34 step: 181, loss is 0.9524900317192078\n", - "epoch: 34 step: 182, loss is 0.9437451362609863\n", - "epoch: 34 step: 183, loss is 0.9110795855522156\n", - "epoch: 34 step: 184, loss is 0.9684717655181885\n", - "epoch: 34 step: 185, loss is 1.0281192064285278\n", - "epoch: 34 step: 186, loss is 1.0017304420471191\n", - "epoch: 34 step: 187, loss is 0.9481175541877747\n", - "epoch: 34 step: 188, loss is 0.9608588218688965\n", - "epoch: 34 step: 189, loss is 0.8498528003692627\n", - "epoch: 34 step: 190, loss is 0.9759148359298706\n", - "epoch: 34 step: 191, loss is 0.8954848051071167\n", - "epoch: 34 step: 192, loss is 0.92084801197052\n", - "epoch: 34 step: 193, loss is 0.9808803796768188\n", - "epoch: 34 step: 194, loss is 0.8957556486129761\n", - "epoch: 34 step: 195, loss is 0.8824705481529236\n", - "Train epoch time: 103377.454 ms, per step time: 530.141 ms\n", - "epoch: 35 step: 1, loss is 0.9321742057800293\n", - "epoch: 35 step: 2, loss is 0.9236174821853638\n", - "epoch: 35 step: 3, loss is 0.9671396017074585\n", - "epoch: 35 step: 4, loss is 0.9441525340080261\n", - "epoch: 35 step: 5, loss is 0.9191724061965942\n", - "epoch: 35 step: 6, loss is 0.9428202509880066\n", - "epoch: 35 step: 7, loss is 0.9427393674850464\n", - "epoch: 35 step: 8, loss is 0.9015364646911621\n", - "epoch: 35 step: 9, loss is 1.0081089735031128\n", - "epoch: 35 step: 10, loss is 0.9762673377990723\n", - "epoch: 35 step: 11, loss is 0.8611786365509033\n", - "epoch: 35 step: 12, loss is 0.9344070553779602\n", - "epoch: 35 step: 13, loss is 0.9475448727607727\n", - "epoch: 35 step: 14, loss is 0.9237775206565857\n", - "epoch: 35 step: 15, loss is 0.9103908538818359\n", - "epoch: 35 step: 16, loss is 0.9516251087188721\n", - "epoch: 35 step: 17, loss is 0.8770326375961304\n", - "epoch: 35 step: 18, loss is 0.9662160873413086\n", - "epoch: 35 step: 19, loss is 0.9176384210586548\n", - "epoch: 35 step: 20, loss is 0.9855346083641052\n", - "epoch: 35 step: 21, loss is 0.9981141090393066\n", - "epoch: 35 step: 22, loss is 0.897175669670105\n", - "epoch: 35 step: 23, loss is 0.9855985641479492\n", - "epoch: 35 step: 24, loss is 0.9216829538345337\n", - "epoch: 35 step: 25, loss is 0.9321513772010803\n", - "epoch: 35 step: 26, loss is 0.910799503326416\n", - "epoch: 35 step: 27, loss is 0.9253268241882324\n", - "epoch: 35 step: 28, loss is 0.9137090444564819\n", - "epoch: 35 step: 29, loss is 0.9389795064926147\n", - "epoch: 35 step: 30, loss is 0.9452539682388306\n", - "epoch: 35 step: 31, loss is 0.9086108207702637\n", - "epoch: 35 step: 32, loss is 0.9284298419952393\n", - "epoch: 35 step: 33, loss is 0.9007588028907776\n", - "epoch: 35 step: 34, loss is 0.9066742658615112\n", - "epoch: 35 step: 35, loss is 0.9004108905792236\n", - "epoch: 35 step: 36, loss is 0.9152095913887024\n", - "epoch: 35 step: 37, loss is 0.9811350107192993\n", - "epoch: 35 step: 38, loss is 0.8799799084663391\n", - "epoch: 35 step: 39, loss is 0.9755699634552002\n", - "epoch: 35 step: 40, loss is 0.8910683989524841\n", - "epoch: 35 step: 41, loss is 0.90812748670578\n", - "epoch: 35 step: 42, loss is 0.9333764314651489\n", - "epoch: 35 step: 43, loss is 0.9187864065170288\n", - "epoch: 35 step: 44, loss is 0.976913571357727\n", - "epoch: 35 step: 45, loss is 1.0174815654754639\n", - "epoch: 35 step: 46, loss is 0.883173942565918\n", - "epoch: 35 step: 47, loss is 0.9620487689971924\n", - "epoch: 35 step: 48, loss is 0.9159752130508423\n", - "epoch: 35 step: 49, loss is 0.9220020771026611\n", - "epoch: 35 step: 50, loss is 0.8688544034957886\n", - "epoch: 35 step: 51, loss is 0.8959513902664185\n", - "epoch: 35 step: 52, loss is 0.8978442549705505\n", - "epoch: 35 step: 53, loss is 0.9398765563964844\n", - "epoch: 35 step: 54, loss is 0.9128605127334595\n", - "epoch: 35 step: 55, loss is 0.9327958822250366\n", - "epoch: 35 step: 56, loss is 0.8748239278793335\n", - "epoch: 35 step: 57, loss is 0.9932176470756531\n", - "epoch: 35 step: 58, loss is 0.898052990436554\n", - "epoch: 35 step: 59, loss is 0.9831613302230835\n", - "epoch: 35 step: 60, loss is 0.9090197682380676\n", - "epoch: 35 step: 61, loss is 0.9187009930610657\n", - "epoch: 35 step: 62, loss is 0.9349439740180969\n", - "epoch: 35 step: 63, loss is 0.952746570110321\n", - "epoch: 35 step: 64, loss is 0.8808339834213257\n", - "epoch: 35 step: 65, loss is 0.9279297590255737\n", - "epoch: 35 step: 66, loss is 0.8854906558990479\n", - "epoch: 35 step: 67, loss is 0.8512426018714905\n", - "epoch: 35 step: 68, loss is 0.9746465682983398\n", - "epoch: 35 step: 69, loss is 0.9441372156143188\n", - "epoch: 35 step: 70, loss is 0.928647518157959\n", - "epoch: 35 step: 71, loss is 0.8696870803833008\n", - "epoch: 35 step: 72, loss is 0.9274473786354065\n", - "epoch: 35 step: 73, loss is 0.8742825984954834\n", - "epoch: 35 step: 74, loss is 0.9772400259971619\n", - "epoch: 35 step: 75, loss is 0.9277609586715698\n", - "epoch: 35 step: 76, loss is 0.872089147567749\n", - "epoch: 35 step: 77, loss is 0.9477022886276245\n", - "epoch: 35 step: 78, loss is 0.9068365097045898\n", - "epoch: 35 step: 79, loss is 0.8722138404846191\n", - "epoch: 35 step: 80, loss is 0.8726453185081482\n", - "epoch: 35 step: 81, loss is 0.9412095546722412\n", - "epoch: 35 step: 82, loss is 0.8479371070861816\n", - "epoch: 35 step: 83, loss is 0.9269524812698364\n", - "epoch: 35 step: 84, loss is 0.9299825429916382\n", - "epoch: 35 step: 85, loss is 0.8902825713157654\n", - "epoch: 35 step: 86, loss is 0.8793601393699646\n", - "epoch: 35 step: 87, loss is 0.9258752465248108\n", - "epoch: 35 step: 88, loss is 0.8696000576019287\n", - "epoch: 35 step: 89, loss is 0.9275202751159668\n", - "epoch: 35 step: 90, loss is 0.9502071142196655\n", - "epoch: 35 step: 91, loss is 0.8755085468292236\n", - "epoch: 35 step: 92, loss is 0.9016488790512085\n", - "epoch: 35 step: 93, loss is 0.9853478670120239\n", - "epoch: 35 step: 94, loss is 0.8782252669334412\n", - "epoch: 35 step: 95, loss is 0.8758950233459473\n", - "epoch: 35 step: 96, loss is 0.9374468326568604\n", - "epoch: 35 step: 97, loss is 0.9172608852386475\n", - "epoch: 35 step: 98, loss is 0.9012348651885986\n", - "epoch: 35 step: 99, loss is 0.9184340834617615\n", - "epoch: 35 step: 100, loss is 1.0300002098083496\n", - "epoch: 35 step: 101, loss is 0.9288413524627686\n", - "epoch: 35 step: 102, loss is 0.9546959400177002\n", - "epoch: 35 step: 103, loss is 0.9805189371109009\n", - "epoch: 35 step: 104, loss is 0.9015235900878906\n", - "epoch: 35 step: 105, loss is 0.9263312816619873\n", - "epoch: 35 step: 106, loss is 0.9821997880935669\n", - "epoch: 35 step: 107, loss is 0.9396763443946838\n", - "epoch: 35 step: 108, loss is 0.9329175353050232\n", - "epoch: 35 step: 109, loss is 0.8895686864852905\n", - "epoch: 35 step: 110, loss is 0.8698145151138306\n", - "epoch: 35 step: 111, loss is 0.9032471179962158\n", - "epoch: 35 step: 112, loss is 0.8536380529403687\n", - "epoch: 35 step: 113, loss is 0.9940599203109741\n", - "epoch: 35 step: 114, loss is 0.9198892116546631\n", - "epoch: 35 step: 115, loss is 0.9054993391036987\n", - "epoch: 35 step: 116, loss is 0.8404321670532227\n", - "epoch: 35 step: 117, loss is 0.982492983341217\n", - "epoch: 35 step: 118, loss is 0.9637353420257568\n", - "epoch: 35 step: 119, loss is 0.9241425395011902\n", - "epoch: 35 step: 120, loss is 0.9453873634338379\n", - "epoch: 35 step: 121, loss is 0.9492310285568237\n", - "epoch: 35 step: 122, loss is 0.9081125259399414\n", - "epoch: 35 step: 123, loss is 0.941939115524292\n", - "epoch: 35 step: 124, loss is 0.9207726716995239\n", - "epoch: 35 step: 125, loss is 0.971773624420166\n", - "epoch: 35 step: 126, loss is 0.9954080581665039\n", - "epoch: 35 step: 127, loss is 0.9029237031936646\n", - "epoch: 35 step: 128, loss is 0.9804610013961792\n", - "epoch: 35 step: 129, loss is 1.0099862813949585\n", - "epoch: 35 step: 130, loss is 0.9276174306869507\n", - "epoch: 35 step: 131, loss is 0.9244399070739746\n", - "epoch: 35 step: 132, loss is 0.9592991471290588\n", - "epoch: 35 step: 133, loss is 0.9252265691757202\n", - "epoch: 35 step: 134, loss is 0.9535772800445557\n", - "epoch: 35 step: 135, loss is 0.966822624206543\n", - "epoch: 35 step: 136, loss is 0.9317789077758789\n", - "epoch: 35 step: 137, loss is 0.8959453105926514\n", - "epoch: 35 step: 138, loss is 0.892573356628418\n", - "epoch: 35 step: 139, loss is 0.9602587819099426\n", - "epoch: 35 step: 140, loss is 0.9565356969833374\n", - "epoch: 35 step: 141, loss is 0.9129329323768616\n", - "epoch: 35 step: 142, loss is 0.9529698491096497\n", - "epoch: 35 step: 143, loss is 0.9550875425338745\n", - "epoch: 35 step: 144, loss is 0.832931399345398\n", - "epoch: 35 step: 145, loss is 0.8912063241004944\n", - "epoch: 35 step: 146, loss is 0.9463378190994263\n", - "epoch: 35 step: 147, loss is 0.92389976978302\n", - "epoch: 35 step: 148, loss is 0.959328293800354\n", - "epoch: 35 step: 149, loss is 0.9341170787811279\n", - "epoch: 35 step: 150, loss is 0.9353057742118835\n", - "epoch: 35 step: 151, loss is 0.962594211101532\n", - "epoch: 35 step: 152, loss is 0.9945852756500244\n", - "epoch: 35 step: 153, loss is 0.910730242729187\n", - "epoch: 35 step: 154, loss is 0.9615596532821655\n", - "epoch: 35 step: 155, loss is 0.9791237711906433\n", - "epoch: 35 step: 156, loss is 0.9064930081367493\n", - "epoch: 35 step: 157, loss is 0.9724091291427612\n", - "epoch: 35 step: 158, loss is 0.9107381105422974\n", - "epoch: 35 step: 159, loss is 0.943078875541687\n", - "epoch: 35 step: 160, loss is 0.958893358707428\n", - "epoch: 35 step: 161, loss is 0.9211525917053223\n", - "epoch: 35 step: 162, loss is 0.9644378423690796\n", - "epoch: 35 step: 163, loss is 0.9776133298873901\n", - "epoch: 35 step: 164, loss is 0.91977858543396\n", - "epoch: 35 step: 165, loss is 0.8582698106765747\n", - "epoch: 35 step: 166, loss is 0.9591565132141113\n", - "epoch: 35 step: 167, loss is 0.9640787839889526\n", - "epoch: 35 step: 168, loss is 0.9366103410720825\n", - "epoch: 35 step: 169, loss is 0.8446204662322998\n", - "epoch: 35 step: 170, loss is 0.9548232555389404\n", - "epoch: 35 step: 171, loss is 0.9619027376174927\n", - "epoch: 35 step: 172, loss is 0.9666999578475952\n", - "epoch: 35 step: 173, loss is 0.9886733889579773\n", - "epoch: 35 step: 174, loss is 0.9800610542297363\n", - "epoch: 35 step: 175, loss is 0.9355350732803345\n", - "epoch: 35 step: 176, loss is 0.9134535789489746\n", - "epoch: 35 step: 177, loss is 0.9251669645309448\n", - "epoch: 35 step: 178, loss is 1.0481984615325928\n", - "epoch: 35 step: 179, loss is 0.9496559500694275\n", - "epoch: 35 step: 180, loss is 0.9258530735969543\n", - "epoch: 35 step: 181, loss is 0.930387020111084\n", - "epoch: 35 step: 182, loss is 0.9492547512054443\n", - "epoch: 35 step: 183, loss is 0.903343915939331\n", - "epoch: 35 step: 184, loss is 0.942166268825531\n", - "epoch: 35 step: 185, loss is 0.8993254899978638\n", - "epoch: 35 step: 186, loss is 0.9340779185295105\n", - "epoch: 35 step: 187, loss is 0.9205582141876221\n", - "epoch: 35 step: 188, loss is 0.8929234147071838\n", - "epoch: 35 step: 189, loss is 0.9511979818344116\n", - "epoch: 35 step: 190, loss is 0.94832444190979\n", - "epoch: 35 step: 191, loss is 0.95477294921875\n", - "epoch: 35 step: 192, loss is 0.988754153251648\n", - "epoch: 35 step: 193, loss is 0.9798315763473511\n", - "epoch: 35 step: 194, loss is 0.9027866125106812\n", - "epoch: 35 step: 195, loss is 0.913625955581665\n", - "Train epoch time: 101863.854 ms, per step time: 522.379 ms\n", - "epoch: 36 step: 1, loss is 0.9511114358901978\n", - "epoch: 36 step: 2, loss is 0.8820651769638062\n", - "epoch: 36 step: 3, loss is 0.8757919073104858\n", - "epoch: 36 step: 4, loss is 0.9131102561950684\n", - "epoch: 36 step: 5, loss is 0.8579142093658447\n", - "epoch: 36 step: 6, loss is 0.8764615058898926\n", - "epoch: 36 step: 7, loss is 0.944739043712616\n", - "epoch: 36 step: 8, loss is 0.8983330130577087\n", - "epoch: 36 step: 9, loss is 0.8726266622543335\n", - "epoch: 36 step: 10, loss is 0.9318898916244507\n", - "epoch: 36 step: 11, loss is 0.9268391132354736\n", - "epoch: 36 step: 12, loss is 0.9383611083030701\n", - "epoch: 36 step: 13, loss is 0.9060500264167786\n", - "epoch: 36 step: 14, loss is 0.9363413453102112\n", - "epoch: 36 step: 15, loss is 0.9189547896385193\n", - "epoch: 36 step: 16, loss is 0.8941370248794556\n", - "epoch: 36 step: 17, loss is 0.8806160688400269\n", - "epoch: 36 step: 18, loss is 0.9247094392776489\n", - "epoch: 36 step: 19, loss is 0.9538742303848267\n", - "epoch: 36 step: 20, loss is 0.9682211875915527\n", - "epoch: 36 step: 21, loss is 0.9127041101455688\n", - "epoch: 36 step: 22, loss is 0.9629229307174683\n", - "epoch: 36 step: 23, loss is 0.9864892959594727\n", - "epoch: 36 step: 24, loss is 0.9209844470024109\n", - "epoch: 36 step: 25, loss is 0.8738136887550354\n", - "epoch: 36 step: 26, loss is 0.9286003112792969\n", - "epoch: 36 step: 27, loss is 0.862468421459198\n", - "epoch: 36 step: 28, loss is 0.8981037735939026\n", - "epoch: 36 step: 29, loss is 0.9616064429283142\n", - "epoch: 36 step: 30, loss is 0.8877047896385193\n", - "epoch: 36 step: 31, loss is 0.8780917525291443\n", - "epoch: 36 step: 32, loss is 0.9018476009368896\n", - "epoch: 36 step: 33, loss is 0.9169036149978638\n", - "epoch: 36 step: 34, loss is 0.9199008941650391\n", - "epoch: 36 step: 35, loss is 0.8600500822067261\n", - "epoch: 36 step: 36, loss is 0.914757251739502\n", - "epoch: 36 step: 37, loss is 0.9269331693649292\n", - "epoch: 36 step: 38, loss is 0.9137508869171143\n", - "epoch: 36 step: 39, loss is 0.9179561138153076\n", - "epoch: 36 step: 40, loss is 0.9651851654052734\n", - "epoch: 36 step: 41, loss is 0.8911492824554443\n", - "epoch: 36 step: 42, loss is 0.8767808079719543\n", - "epoch: 36 step: 43, loss is 0.9578814506530762\n", - "epoch: 36 step: 44, loss is 0.9101676940917969\n", - "epoch: 36 step: 45, loss is 1.0200754404067993\n", - "epoch: 36 step: 46, loss is 0.924929141998291\n", - "epoch: 36 step: 47, loss is 0.8957664966583252\n", - "epoch: 36 step: 48, loss is 0.9265422821044922\n", - "epoch: 36 step: 49, loss is 0.9253946542739868\n", - "epoch: 36 step: 50, loss is 0.9065577983856201\n", - "epoch: 36 step: 51, loss is 0.8680866956710815\n", - "epoch: 36 step: 52, loss is 0.9180309772491455\n", - "epoch: 36 step: 53, loss is 0.9542644023895264\n", - "epoch: 36 step: 54, loss is 0.915028989315033\n", - "epoch: 36 step: 55, loss is 0.9158083200454712\n", - "epoch: 36 step: 56, loss is 0.8948043584823608\n", - "epoch: 36 step: 57, loss is 0.9329050779342651\n", - "epoch: 36 step: 58, loss is 0.9433001279830933\n", - "epoch: 36 step: 59, loss is 0.8732025623321533\n", - "epoch: 36 step: 60, loss is 0.9161126613616943\n", - "epoch: 36 step: 61, loss is 0.996979296207428\n", - "epoch: 36 step: 62, loss is 0.9671070575714111\n", - "epoch: 36 step: 63, loss is 0.8891600370407104\n", - "epoch: 36 step: 64, loss is 0.9511172771453857\n", - "epoch: 36 step: 65, loss is 0.9074980616569519\n", - "epoch: 36 step: 66, loss is 0.8948079943656921\n", - "epoch: 36 step: 67, loss is 0.8720858693122864\n", - "epoch: 36 step: 68, loss is 0.9487940073013306\n", - "epoch: 36 step: 69, loss is 0.9481832981109619\n", - "epoch: 36 step: 70, loss is 0.974111795425415\n", - "epoch: 36 step: 71, loss is 0.8999603986740112\n", - "epoch: 36 step: 72, loss is 0.9744691252708435\n", - "epoch: 36 step: 73, loss is 0.8973945379257202\n", - "epoch: 36 step: 74, loss is 0.9349753856658936\n", - "epoch: 36 step: 75, loss is 1.0110183954238892\n", - "epoch: 36 step: 76, loss is 0.8624833822250366\n", - "epoch: 36 step: 77, loss is 0.9465134143829346\n", - "epoch: 36 step: 78, loss is 0.905829668045044\n", - "epoch: 36 step: 79, loss is 0.902802050113678\n", - "epoch: 36 step: 80, loss is 0.8999656438827515\n", - "epoch: 36 step: 81, loss is 0.9165370464324951\n", - "epoch: 36 step: 82, loss is 0.9923079013824463\n", - "epoch: 36 step: 83, loss is 0.9337673187255859\n", - "epoch: 36 step: 84, loss is 0.8686660528182983\n", - "epoch: 36 step: 85, loss is 0.9309631586074829\n", - "epoch: 36 step: 86, loss is 0.9067419171333313\n", - "epoch: 36 step: 87, loss is 0.8585471510887146\n", - "epoch: 36 step: 88, loss is 0.9299182891845703\n", - "epoch: 36 step: 89, loss is 0.9427148699760437\n", - "epoch: 36 step: 90, loss is 0.9346275329589844\n", - "epoch: 36 step: 91, loss is 0.9057897329330444\n", - "epoch: 36 step: 92, loss is 0.9160224199295044\n", - "epoch: 36 step: 93, loss is 0.9715698957443237\n", - "epoch: 36 step: 94, loss is 0.8405822515487671\n", - "epoch: 36 step: 95, loss is 0.9172334671020508\n", - "epoch: 36 step: 96, loss is 0.8985906839370728\n", - "epoch: 36 step: 97, loss is 0.8991680145263672\n", - "epoch: 36 step: 98, loss is 1.0034575462341309\n", - "epoch: 36 step: 99, loss is 0.8843522667884827\n", - "epoch: 36 step: 100, loss is 0.9316062331199646\n", - "epoch: 36 step: 101, loss is 0.9076073169708252\n", - "epoch: 36 step: 102, loss is 0.9219316840171814\n", - "epoch: 36 step: 103, loss is 0.9326494932174683\n", - "epoch: 36 step: 104, loss is 1.0392043590545654\n", - "epoch: 36 step: 105, loss is 0.9003927111625671\n", - "epoch: 36 step: 106, loss is 0.9573104381561279\n", - "epoch: 36 step: 107, loss is 0.9353528618812561\n", - "epoch: 36 step: 108, loss is 0.8893758654594421\n", - "epoch: 36 step: 109, loss is 0.9438529014587402\n", - "epoch: 36 step: 110, loss is 0.9243344664573669\n", - "epoch: 36 step: 111, loss is 0.9173280000686646\n", - "epoch: 36 step: 112, loss is 0.9954172968864441\n", - "epoch: 36 step: 113, loss is 0.9105629920959473\n", - "epoch: 36 step: 114, loss is 0.9432250261306763\n", - "epoch: 36 step: 115, loss is 0.919873833656311\n", - "epoch: 36 step: 116, loss is 0.861253559589386\n", - "epoch: 36 step: 117, loss is 0.8716777563095093\n", - "epoch: 36 step: 118, loss is 0.9125101566314697\n", - "epoch: 36 step: 119, loss is 1.029720425605774\n", - "epoch: 36 step: 120, loss is 0.8700671195983887\n", - "epoch: 36 step: 121, loss is 0.9026073217391968\n", - "epoch: 36 step: 122, loss is 0.8827033042907715\n", - "epoch: 36 step: 123, loss is 0.936974287033081\n", - "epoch: 36 step: 124, loss is 0.9659726619720459\n", - "epoch: 36 step: 125, loss is 0.9615401029586792\n", - "epoch: 36 step: 126, loss is 0.9210689067840576\n", - "epoch: 36 step: 127, loss is 0.9252169132232666\n", - "epoch: 36 step: 128, loss is 0.9700291156768799\n", - "epoch: 36 step: 129, loss is 0.8892269134521484\n", - "epoch: 36 step: 130, loss is 0.9737898707389832\n", - "epoch: 36 step: 131, loss is 0.9449940919876099\n", - "epoch: 36 step: 132, loss is 0.9097625017166138\n", - "epoch: 36 step: 133, loss is 1.0079271793365479\n", - "epoch: 36 step: 134, loss is 0.8816779851913452\n", - "epoch: 36 step: 135, loss is 0.856360912322998\n", - "epoch: 36 step: 136, loss is 0.94142746925354\n", - "epoch: 36 step: 137, loss is 0.8603662252426147\n", - "epoch: 36 step: 138, loss is 0.877038300037384\n", - "epoch: 36 step: 139, loss is 0.8284332156181335\n", - "epoch: 36 step: 140, loss is 0.893687903881073\n", - "epoch: 36 step: 141, loss is 0.8579614758491516\n", - "epoch: 36 step: 142, loss is 0.9196799993515015\n", - "epoch: 36 step: 143, loss is 0.9048046469688416\n", - "epoch: 36 step: 144, loss is 0.9042797088623047\n", - "epoch: 36 step: 145, loss is 0.947884202003479\n", - "epoch: 36 step: 146, loss is 0.9408677816390991\n", - "epoch: 36 step: 147, loss is 0.9618299007415771\n", - "epoch: 36 step: 148, loss is 0.966949462890625\n", - "epoch: 36 step: 149, loss is 0.9515199661254883\n", - "epoch: 36 step: 150, loss is 0.9123204350471497\n", - "epoch: 36 step: 151, loss is 0.9099253416061401\n", - "epoch: 36 step: 152, loss is 0.9778099656105042\n", - "epoch: 36 step: 153, loss is 0.9020297527313232\n", - "epoch: 36 step: 154, loss is 0.9614829421043396\n", - "epoch: 36 step: 155, loss is 0.9339619874954224\n", - "epoch: 36 step: 156, loss is 0.9075723886489868\n", - "epoch: 36 step: 157, loss is 0.942044198513031\n", - "epoch: 36 step: 158, loss is 0.9637919068336487\n", - "epoch: 36 step: 159, loss is 0.8919879198074341\n", - "epoch: 36 step: 160, loss is 0.9097334146499634\n", - "epoch: 36 step: 161, loss is 0.8562881946563721\n", - "epoch: 36 step: 162, loss is 0.9017192125320435\n", - "epoch: 36 step: 163, loss is 0.9753199815750122\n", - "epoch: 36 step: 164, loss is 0.9602100253105164\n", - "epoch: 36 step: 165, loss is 0.9270866513252258\n", - "epoch: 36 step: 166, loss is 0.9203907251358032\n", - "epoch: 36 step: 167, loss is 0.9183560609817505\n", - "epoch: 36 step: 168, loss is 0.9253696799278259\n", - "epoch: 36 step: 169, loss is 0.8949852585792542\n", - "epoch: 36 step: 170, loss is 0.9825663566589355\n", - "epoch: 36 step: 171, loss is 0.9700595140457153\n", - "epoch: 36 step: 172, loss is 0.8886866569519043\n", - "epoch: 36 step: 173, loss is 0.945530354976654\n", - "epoch: 36 step: 174, loss is 0.8619333505630493\n", - "epoch: 36 step: 175, loss is 0.8980259895324707\n", - "epoch: 36 step: 176, loss is 1.0327401161193848\n", - "epoch: 36 step: 177, loss is 0.9125475287437439\n", - "epoch: 36 step: 178, loss is 0.9125925302505493\n", - "epoch: 36 step: 179, loss is 0.9102511405944824\n", - "epoch: 36 step: 180, loss is 0.8687487840652466\n", - "epoch: 36 step: 181, loss is 0.9413164854049683\n", - "epoch: 36 step: 182, loss is 0.9617743492126465\n", - "epoch: 36 step: 183, loss is 0.978722333908081\n", - "epoch: 36 step: 184, loss is 0.9441840052604675\n", - "epoch: 36 step: 185, loss is 0.8997694253921509\n", - "epoch: 36 step: 186, loss is 0.9212102890014648\n", - "epoch: 36 step: 187, loss is 0.8969366550445557\n", - "epoch: 36 step: 188, loss is 0.9276988506317139\n", - "epoch: 36 step: 189, loss is 0.9104682803153992\n", - "epoch: 36 step: 190, loss is 0.9014007449150085\n", - "epoch: 36 step: 191, loss is 0.9425451755523682\n", - "epoch: 36 step: 192, loss is 0.9638112783432007\n", - "epoch: 36 step: 193, loss is 0.8806350231170654\n", - "epoch: 36 step: 194, loss is 0.9568960666656494\n", - "epoch: 36 step: 195, loss is 0.8563429117202759\n", - "Train epoch time: 108856.610 ms, per step time: 558.239 ms\n", - "epoch: 37 step: 1, loss is 0.9095693826675415\n", - "epoch: 37 step: 2, loss is 0.8946912288665771\n", - "epoch: 37 step: 3, loss is 0.9607112407684326\n", - "epoch: 37 step: 4, loss is 0.8844408392906189\n", - "epoch: 37 step: 5, loss is 0.8561139106750488\n", - "epoch: 37 step: 6, loss is 0.9027576446533203\n", - "epoch: 37 step: 7, loss is 0.9514608383178711\n", - "epoch: 37 step: 8, loss is 0.8566349744796753\n", - "epoch: 37 step: 9, loss is 0.8834377527236938\n", - "epoch: 37 step: 10, loss is 0.8629799485206604\n", - "epoch: 37 step: 11, loss is 0.8858155012130737\n", - "epoch: 37 step: 12, loss is 1.0256205797195435\n", - "epoch: 37 step: 13, loss is 0.8840547800064087\n", - "epoch: 37 step: 14, loss is 0.9267905950546265\n", - "epoch: 37 step: 15, loss is 0.8789230585098267\n", - "epoch: 37 step: 16, loss is 0.8886498212814331\n", - "epoch: 37 step: 17, loss is 0.9076045751571655\n", - "epoch: 37 step: 18, loss is 0.9312620162963867\n", - "epoch: 37 step: 19, loss is 0.8945556879043579\n", - "epoch: 37 step: 20, loss is 0.8946502208709717\n", - "epoch: 37 step: 21, loss is 0.9535974264144897\n", - "epoch: 37 step: 22, loss is 0.9202501773834229\n", - "epoch: 37 step: 23, loss is 0.9378794431686401\n", - "epoch: 37 step: 24, loss is 0.8477007150650024\n", - "epoch: 37 step: 25, loss is 0.8897684812545776\n", - "epoch: 37 step: 26, loss is 0.8801710605621338\n", - "epoch: 37 step: 27, loss is 0.8462725877761841\n", - "epoch: 37 step: 28, loss is 0.9476919770240784\n", - "epoch: 37 step: 29, loss is 0.9024091362953186\n", - "epoch: 37 step: 30, loss is 1.0029257535934448\n", - "epoch: 37 step: 31, loss is 0.9247019290924072\n", - "epoch: 37 step: 32, loss is 0.8742460608482361\n", - "epoch: 37 step: 33, loss is 0.932390570640564\n", - "epoch: 37 step: 34, loss is 0.9470775723457336\n", - "epoch: 37 step: 35, loss is 0.9477649927139282\n", - "epoch: 37 step: 36, loss is 0.8816271424293518\n", - "epoch: 37 step: 37, loss is 0.8295816779136658\n", - "epoch: 37 step: 38, loss is 0.8644704818725586\n", - "epoch: 37 step: 39, loss is 0.8904820680618286\n", - "epoch: 37 step: 40, loss is 0.9007257223129272\n", - "epoch: 37 step: 41, loss is 0.8873854875564575\n", - "epoch: 37 step: 42, loss is 0.9405839443206787\n", - "epoch: 37 step: 43, loss is 0.9524333477020264\n", - "epoch: 37 step: 44, loss is 0.8904541730880737\n", - "epoch: 37 step: 45, loss is 0.9166953563690186\n", - "epoch: 37 step: 46, loss is 0.9196785688400269\n", - "epoch: 37 step: 47, loss is 0.8488792181015015\n", - "epoch: 37 step: 48, loss is 0.8882365226745605\n", - "epoch: 37 step: 49, loss is 0.8854421377182007\n", - "epoch: 37 step: 50, loss is 0.8858281970024109\n", - "epoch: 37 step: 51, loss is 0.918373703956604\n", - "epoch: 37 step: 52, loss is 0.9284631609916687\n", - "epoch: 37 step: 53, loss is 0.9077353477478027\n", - "epoch: 37 step: 54, loss is 0.9255558252334595\n", - "epoch: 37 step: 55, loss is 0.8999374508857727\n", - "epoch: 37 step: 56, loss is 0.9208312034606934\n", - "epoch: 37 step: 57, loss is 0.9783368706703186\n", - "epoch: 37 step: 58, loss is 0.9398857951164246\n", - "epoch: 37 step: 59, loss is 0.9164717197418213\n", - "epoch: 37 step: 60, loss is 0.9155617952346802\n", - "epoch: 37 step: 61, loss is 0.8569964170455933\n", - "epoch: 37 step: 62, loss is 0.8832811117172241\n", - "epoch: 37 step: 63, loss is 0.9266475439071655\n", - "epoch: 37 step: 64, loss is 0.8678780794143677\n", - "epoch: 37 step: 65, loss is 0.8706825971603394\n", - "epoch: 37 step: 66, loss is 0.8773397207260132\n", - "epoch: 37 step: 67, loss is 0.8799891471862793\n", - "epoch: 37 step: 68, loss is 0.918915867805481\n", - "epoch: 37 step: 69, loss is 0.9259086847305298\n", - "epoch: 37 step: 70, loss is 0.9032549858093262\n", - "epoch: 37 step: 71, loss is 0.9046151638031006\n", - "epoch: 37 step: 72, loss is 0.9039967060089111\n", - "epoch: 37 step: 73, loss is 0.879581868648529\n", - "epoch: 37 step: 74, loss is 0.8996313810348511\n", - "epoch: 37 step: 75, loss is 0.8648873567581177\n", - "epoch: 37 step: 76, loss is 0.92645263671875\n", - "epoch: 37 step: 77, loss is 0.9248169660568237\n", - "epoch: 37 step: 78, loss is 0.9261072278022766\n", - "epoch: 37 step: 79, loss is 0.9547272324562073\n", - "epoch: 37 step: 80, loss is 0.8744072914123535\n", - "epoch: 37 step: 81, loss is 0.8622870445251465\n", - "epoch: 37 step: 82, loss is 0.8943405151367188\n", - "epoch: 37 step: 83, loss is 0.943012535572052\n", - "epoch: 37 step: 84, loss is 0.9002180099487305\n", - "epoch: 37 step: 85, loss is 0.8930972218513489\n", - "epoch: 37 step: 86, loss is 0.9000710248947144\n", - "epoch: 37 step: 87, loss is 0.8691052794456482\n", - "epoch: 37 step: 88, loss is 0.9682796001434326\n", - "epoch: 37 step: 89, loss is 0.9430465698242188\n", - "epoch: 37 step: 90, loss is 0.9295761585235596\n", - "epoch: 37 step: 91, loss is 0.917506992816925\n", - "epoch: 37 step: 92, loss is 0.9550882577896118\n", - "epoch: 37 step: 93, loss is 0.8888190984725952\n", - "epoch: 37 step: 94, loss is 0.8763753175735474\n", - "epoch: 37 step: 95, loss is 0.8857315182685852\n", - "epoch: 37 step: 96, loss is 0.8951612710952759\n", - "epoch: 37 step: 97, loss is 0.8715394735336304\n", - "epoch: 37 step: 98, loss is 0.8762496709823608\n", - "epoch: 37 step: 99, loss is 0.8873938918113708\n", - "epoch: 37 step: 100, loss is 0.9221780896186829\n", - "epoch: 37 step: 101, loss is 0.9156415462493896\n", - "epoch: 37 step: 102, loss is 0.9138498306274414\n", - "epoch: 37 step: 103, loss is 0.847792387008667\n", - "epoch: 37 step: 104, loss is 0.8919596672058105\n", - "epoch: 37 step: 105, loss is 0.8871064186096191\n", - "epoch: 37 step: 106, loss is 0.9727333784103394\n", - "epoch: 37 step: 107, loss is 0.8804023265838623\n", - "epoch: 37 step: 108, loss is 0.964381217956543\n", - "epoch: 37 step: 109, loss is 0.9517145156860352\n", - "epoch: 37 step: 110, loss is 0.8701875805854797\n", - "epoch: 37 step: 111, loss is 0.9295189380645752\n", - "epoch: 37 step: 112, loss is 0.9024710655212402\n", - "epoch: 37 step: 113, loss is 0.9419503808021545\n", - "epoch: 37 step: 114, loss is 0.8932934403419495\n", - "epoch: 37 step: 115, loss is 0.976038932800293\n", - "epoch: 37 step: 116, loss is 0.9735438227653503\n", - "epoch: 37 step: 117, loss is 0.9143580794334412\n", - "epoch: 37 step: 118, loss is 0.8848608732223511\n", - "epoch: 37 step: 119, loss is 0.9654664993286133\n", - "epoch: 37 step: 120, loss is 0.9997798204421997\n", - "epoch: 37 step: 121, loss is 0.8861947059631348\n", - "epoch: 37 step: 122, loss is 0.9138003587722778\n", - "epoch: 37 step: 123, loss is 0.9221610426902771\n", - "epoch: 37 step: 124, loss is 0.9483324289321899\n", - "epoch: 37 step: 125, loss is 0.8758765459060669\n", - "epoch: 37 step: 126, loss is 0.8642892837524414\n", - "epoch: 37 step: 127, loss is 0.9475138187408447\n", - "epoch: 37 step: 128, loss is 0.9165822267532349\n", - "epoch: 37 step: 129, loss is 0.9635480642318726\n", - "epoch: 37 step: 130, loss is 0.9449419975280762\n", - "epoch: 37 step: 131, loss is 0.9008090496063232\n", - "epoch: 37 step: 132, loss is 0.9334031343460083\n", - "epoch: 37 step: 133, loss is 0.9084657430648804\n", - "epoch: 37 step: 134, loss is 0.9120792746543884\n", - "epoch: 37 step: 135, loss is 0.9041306972503662\n", - "epoch: 37 step: 136, loss is 0.9069194793701172\n", - "epoch: 37 step: 137, loss is 0.8848313093185425\n", - "epoch: 37 step: 138, loss is 0.9334437847137451\n", - "epoch: 37 step: 139, loss is 0.9345941543579102\n", - "epoch: 37 step: 140, loss is 0.8847604990005493\n", - "epoch: 37 step: 141, loss is 0.9489641785621643\n", - "epoch: 37 step: 142, loss is 0.9251959323883057\n", - "epoch: 37 step: 143, loss is 0.9147162437438965\n", - "epoch: 37 step: 144, loss is 0.8704555034637451\n", - "epoch: 37 step: 145, loss is 0.9113723039627075\n", - "epoch: 37 step: 146, loss is 0.9305154085159302\n", - "epoch: 37 step: 147, loss is 0.9621871709823608\n", - "epoch: 37 step: 148, loss is 0.9747596979141235\n", - "epoch: 37 step: 149, loss is 0.9222574234008789\n", - "epoch: 37 step: 150, loss is 0.9117982387542725\n", - "epoch: 37 step: 151, loss is 0.932183027267456\n", - "epoch: 37 step: 152, loss is 0.9411630630493164\n", - "epoch: 37 step: 153, loss is 0.8826325535774231\n", - "epoch: 37 step: 154, loss is 0.8335169553756714\n", - "epoch: 37 step: 155, loss is 0.879940390586853\n", - "epoch: 37 step: 156, loss is 0.9017760157585144\n", - "epoch: 37 step: 157, loss is 0.9525506496429443\n", - "epoch: 37 step: 158, loss is 0.9248814582824707\n", - "epoch: 37 step: 159, loss is 0.9696816205978394\n", - "epoch: 37 step: 160, loss is 0.9233083128929138\n", - "epoch: 37 step: 161, loss is 0.8645829558372498\n", - "epoch: 37 step: 162, loss is 0.8845789432525635\n", - "epoch: 37 step: 163, loss is 0.9117435216903687\n", - "epoch: 37 step: 164, loss is 0.9252498149871826\n", - "epoch: 37 step: 165, loss is 0.9188104867935181\n", - "epoch: 37 step: 166, loss is 0.945136547088623\n", - "epoch: 37 step: 167, loss is 0.9285410642623901\n", - "epoch: 37 step: 168, loss is 0.9242152571678162\n", - "epoch: 37 step: 169, loss is 0.8969123363494873\n", - "epoch: 37 step: 170, loss is 0.8918085098266602\n", - "epoch: 37 step: 171, loss is 0.9288882613182068\n", - "epoch: 37 step: 172, loss is 0.856635332107544\n", - "epoch: 37 step: 173, loss is 1.0146739482879639\n", - "epoch: 37 step: 174, loss is 0.9491211175918579\n", - "epoch: 37 step: 175, loss is 0.9513903260231018\n", - "epoch: 37 step: 176, loss is 0.888302743434906\n", - "epoch: 37 step: 177, loss is 0.9066162109375\n", - "epoch: 37 step: 178, loss is 0.9833958148956299\n", - "epoch: 37 step: 179, loss is 0.8735227584838867\n", - "epoch: 37 step: 180, loss is 0.8965466022491455\n", - "epoch: 37 step: 181, loss is 0.95020991563797\n", - "epoch: 37 step: 182, loss is 0.9149634838104248\n", - "epoch: 37 step: 183, loss is 0.9083482027053833\n", - "epoch: 37 step: 184, loss is 0.9381321668624878\n", - "epoch: 37 step: 185, loss is 0.8965525031089783\n", - "epoch: 37 step: 186, loss is 0.939672589302063\n", - "epoch: 37 step: 187, loss is 0.9746996164321899\n", - "epoch: 37 step: 188, loss is 0.8830556273460388\n", - "epoch: 37 step: 189, loss is 0.8871192932128906\n", - "epoch: 37 step: 190, loss is 0.9128410816192627\n", - "epoch: 37 step: 191, loss is 0.9288946390151978\n", - "epoch: 37 step: 192, loss is 0.9417617917060852\n", - "epoch: 37 step: 193, loss is 0.9260659217834473\n", - "epoch: 37 step: 194, loss is 0.8889296054840088\n", - "epoch: 37 step: 195, loss is 0.9149488210678101\n", - "Train epoch time: 107045.336 ms, per step time: 548.950 ms\n", - "epoch: 38 step: 1, loss is 0.947847843170166\n", - "epoch: 38 step: 2, loss is 0.9036130309104919\n", - "epoch: 38 step: 3, loss is 0.9203881025314331\n", - "epoch: 38 step: 4, loss is 0.9586063623428345\n", - "epoch: 38 step: 5, loss is 0.8763056993484497\n", - "epoch: 38 step: 6, loss is 0.9351648092269897\n", - "epoch: 38 step: 7, loss is 0.8595874309539795\n", - "epoch: 38 step: 8, loss is 0.8193209171295166\n", - "epoch: 38 step: 9, loss is 0.9091926217079163\n", - "epoch: 38 step: 10, loss is 0.9313424825668335\n", - "epoch: 38 step: 11, loss is 0.834667980670929\n", - "epoch: 38 step: 12, loss is 0.9018085598945618\n", - "epoch: 38 step: 13, loss is 0.9431716203689575\n", - "epoch: 38 step: 14, loss is 0.8693892955780029\n", - "epoch: 38 step: 15, loss is 0.8874005079269409\n", - "epoch: 38 step: 16, loss is 0.9221977591514587\n", - "epoch: 38 step: 17, loss is 0.8751132488250732\n", - "epoch: 38 step: 18, loss is 0.9085422158241272\n", - "epoch: 38 step: 19, loss is 0.9187201261520386\n", - "epoch: 38 step: 20, loss is 0.8881237506866455\n", - "epoch: 38 step: 21, loss is 0.9519144296646118\n", - "epoch: 38 step: 22, loss is 0.8437051177024841\n", - "epoch: 38 step: 23, loss is 0.8905713558197021\n", - "epoch: 38 step: 24, loss is 0.9040060639381409\n", - "epoch: 38 step: 25, loss is 0.867322564125061\n", - "epoch: 38 step: 26, loss is 0.8954888582229614\n", - "epoch: 38 step: 27, loss is 0.8696068525314331\n", - "epoch: 38 step: 28, loss is 0.9193589687347412\n", - "epoch: 38 step: 29, loss is 0.941120982170105\n", - "epoch: 38 step: 30, loss is 0.9101242423057556\n", - "epoch: 38 step: 31, loss is 0.8599692583084106\n", - "epoch: 38 step: 32, loss is 0.9321194291114807\n", - "epoch: 38 step: 33, loss is 0.8576308488845825\n", - "epoch: 38 step: 34, loss is 0.8774663209915161\n", - "epoch: 38 step: 35, loss is 0.9292721152305603\n", - "epoch: 38 step: 36, loss is 0.8979128003120422\n", - "epoch: 38 step: 37, loss is 0.9194566011428833\n", - "epoch: 38 step: 38, loss is 0.9248204827308655\n", - "epoch: 38 step: 39, loss is 0.9248582124710083\n", - "epoch: 38 step: 40, loss is 0.8664373159408569\n", - "epoch: 38 step: 41, loss is 0.9020886421203613\n", - "epoch: 38 step: 42, loss is 0.860755205154419\n", - "epoch: 38 step: 43, loss is 0.9270052313804626\n", - "epoch: 38 step: 44, loss is 0.8871346116065979\n", - "epoch: 38 step: 45, loss is 0.8886119723320007\n", - "epoch: 38 step: 46, loss is 0.8505973815917969\n", - "epoch: 38 step: 47, loss is 0.8892645835876465\n", - "epoch: 38 step: 48, loss is 0.86323082447052\n", - "epoch: 38 step: 49, loss is 0.9010361433029175\n", - "epoch: 38 step: 50, loss is 0.843924880027771\n", - "epoch: 38 step: 51, loss is 0.8683522343635559\n", - "epoch: 38 step: 52, loss is 0.8686975240707397\n", - "epoch: 38 step: 53, loss is 0.9005516767501831\n", - "epoch: 38 step: 54, loss is 0.9049634337425232\n", - "epoch: 38 step: 55, loss is 0.9569082856178284\n", - "epoch: 38 step: 56, loss is 0.8220974206924438\n", - "epoch: 38 step: 57, loss is 0.9223048686981201\n", - "epoch: 38 step: 58, loss is 0.9032471179962158\n", - "epoch: 38 step: 59, loss is 0.8873544931411743\n", - "epoch: 38 step: 60, loss is 0.8571626543998718\n", - "epoch: 38 step: 61, loss is 0.8502246737480164\n", - "epoch: 38 step: 62, loss is 0.8953334093093872\n", - "epoch: 38 step: 63, loss is 0.914796769618988\n", - "epoch: 38 step: 64, loss is 0.870197594165802\n", - "epoch: 38 step: 65, loss is 0.9589412212371826\n", - "epoch: 38 step: 66, loss is 0.8871922492980957\n", - "epoch: 38 step: 67, loss is 0.9411270618438721\n", - "epoch: 38 step: 68, loss is 0.94203782081604\n", - "epoch: 38 step: 69, loss is 0.9277908802032471\n", - "epoch: 38 step: 70, loss is 0.9426909685134888\n", - "epoch: 38 step: 71, loss is 0.8083962202072144\n", - "epoch: 38 step: 72, loss is 0.9525151252746582\n", - "epoch: 38 step: 73, loss is 0.8909167051315308\n", - "epoch: 38 step: 74, loss is 0.9480742812156677\n", - "epoch: 38 step: 75, loss is 0.949823260307312\n", - "epoch: 38 step: 76, loss is 0.8933626413345337\n", - "epoch: 38 step: 77, loss is 0.9051081538200378\n", - "epoch: 38 step: 78, loss is 0.9268642663955688\n", - "epoch: 38 step: 79, loss is 0.8704352378845215\n", - "epoch: 38 step: 80, loss is 0.870558500289917\n", - "epoch: 38 step: 81, loss is 0.9185476899147034\n", - "epoch: 38 step: 82, loss is 0.9534422755241394\n", - "epoch: 38 step: 83, loss is 0.9293580055236816\n", - "epoch: 38 step: 84, loss is 0.8900469541549683\n", - "epoch: 38 step: 85, loss is 0.8934859037399292\n", - "epoch: 38 step: 86, loss is 0.8404178619384766\n", - "epoch: 38 step: 87, loss is 0.8620786666870117\n", - "epoch: 38 step: 88, loss is 0.9202302098274231\n", - "epoch: 38 step: 89, loss is 0.9322894811630249\n", - "epoch: 38 step: 90, loss is 0.9785972833633423\n", - "epoch: 38 step: 91, loss is 0.9096835255622864\n", - "epoch: 38 step: 92, loss is 0.9075576066970825\n", - "epoch: 38 step: 93, loss is 0.8538344502449036\n", - "epoch: 38 step: 94, loss is 0.9607840776443481\n", - "epoch: 38 step: 95, loss is 0.8489265441894531\n", - "epoch: 38 step: 96, loss is 0.8865064978599548\n", - "epoch: 38 step: 97, loss is 0.8839545249938965\n", - "epoch: 38 step: 98, loss is 0.8950871229171753\n", - "epoch: 38 step: 99, loss is 0.8523801565170288\n", - "epoch: 38 step: 100, loss is 0.91000896692276\n", - "epoch: 38 step: 101, loss is 0.8992164134979248\n", - "epoch: 38 step: 102, loss is 0.8698062896728516\n", - "epoch: 38 step: 103, loss is 0.8935478925704956\n", - "epoch: 38 step: 104, loss is 0.8873968124389648\n", - "epoch: 38 step: 105, loss is 0.8724759221076965\n", - "epoch: 38 step: 106, loss is 0.8088217973709106\n", - "epoch: 38 step: 107, loss is 0.8925939798355103\n", - "epoch: 38 step: 108, loss is 0.8947848677635193\n", - "epoch: 38 step: 109, loss is 0.8637481331825256\n", - "epoch: 38 step: 110, loss is 0.9278912544250488\n", - "epoch: 38 step: 111, loss is 0.8799326419830322\n", - "epoch: 38 step: 112, loss is 0.8360252380371094\n", - "epoch: 38 step: 113, loss is 0.9224178791046143\n", - "epoch: 38 step: 114, loss is 0.869588315486908\n", - "epoch: 38 step: 115, loss is 0.9459971189498901\n", - "epoch: 38 step: 116, loss is 0.8672783374786377\n", - "epoch: 38 step: 117, loss is 0.9090829491615295\n", - "epoch: 38 step: 118, loss is 0.912348747253418\n", - "epoch: 38 step: 119, loss is 0.9361091256141663\n", - "epoch: 38 step: 120, loss is 0.9081732034683228\n", - "epoch: 38 step: 121, loss is 0.8662216663360596\n", - "epoch: 38 step: 122, loss is 0.9041109085083008\n", - "epoch: 38 step: 123, loss is 0.8519059419631958\n", - "epoch: 38 step: 124, loss is 0.9198229312896729\n", - "epoch: 38 step: 125, loss is 0.8815104961395264\n", - "epoch: 38 step: 126, loss is 0.9001454710960388\n", - "epoch: 38 step: 127, loss is 0.846196174621582\n", - "epoch: 38 step: 128, loss is 0.9075549244880676\n", - "epoch: 38 step: 129, loss is 0.8884953260421753\n", - "epoch: 38 step: 130, loss is 0.927135705947876\n", - "epoch: 38 step: 131, loss is 0.903639554977417\n", - "epoch: 38 step: 132, loss is 0.845154881477356\n", - "epoch: 38 step: 133, loss is 0.9414812922477722\n", - "epoch: 38 step: 134, loss is 0.9627749919891357\n", - "epoch: 38 step: 135, loss is 0.955998957157135\n", - "epoch: 38 step: 136, loss is 0.9396650791168213\n", - "epoch: 38 step: 137, loss is 0.8558158874511719\n", - "epoch: 38 step: 138, loss is 0.8120497465133667\n", - "epoch: 38 step: 139, loss is 0.8849430084228516\n", - "epoch: 38 step: 140, loss is 0.8937509059906006\n", - "epoch: 38 step: 141, loss is 0.9384996891021729\n", - "epoch: 38 step: 142, loss is 0.9275363087654114\n", - "epoch: 38 step: 143, loss is 0.8800232410430908\n", - "epoch: 38 step: 144, loss is 0.8448448181152344\n", - "epoch: 38 step: 145, loss is 0.9709151387214661\n", - "epoch: 38 step: 146, loss is 0.9415827393531799\n", - "epoch: 38 step: 147, loss is 0.9436516761779785\n", - "epoch: 38 step: 148, loss is 0.8596976399421692\n", - "epoch: 38 step: 149, loss is 0.8585245609283447\n", - "epoch: 38 step: 150, loss is 0.8733130097389221\n", - "epoch: 38 step: 151, loss is 0.8337608575820923\n", - "epoch: 38 step: 152, loss is 0.8533034324645996\n", - "epoch: 38 step: 153, loss is 0.880772590637207\n", - "epoch: 38 step: 154, loss is 0.8903640508651733\n", - "epoch: 38 step: 155, loss is 0.9234060049057007\n", - "epoch: 38 step: 156, loss is 0.9079392552375793\n", - "epoch: 38 step: 157, loss is 0.8524980545043945\n", - "epoch: 38 step: 158, loss is 0.8755015134811401\n", - "epoch: 38 step: 159, loss is 0.9232819080352783\n", - "epoch: 38 step: 160, loss is 0.9043335318565369\n", - "epoch: 38 step: 161, loss is 0.8350585699081421\n", - "epoch: 38 step: 162, loss is 0.9647220373153687\n", - "epoch: 38 step: 163, loss is 0.9131554365158081\n", - "epoch: 38 step: 164, loss is 0.8664733171463013\n", - "epoch: 38 step: 165, loss is 0.9109227061271667\n", - "epoch: 38 step: 166, loss is 0.876860499382019\n", - "epoch: 38 step: 167, loss is 0.9606586694717407\n", - "epoch: 38 step: 168, loss is 0.900281548500061\n", - "epoch: 38 step: 169, loss is 0.869691014289856\n", - "epoch: 38 step: 170, loss is 0.8674145936965942\n", - "epoch: 38 step: 171, loss is 0.990574300289154\n", - "epoch: 38 step: 172, loss is 0.8615909814834595\n", - "epoch: 38 step: 173, loss is 0.8992669582366943\n", - "epoch: 38 step: 174, loss is 0.9122153520584106\n", - "epoch: 38 step: 175, loss is 0.9545267820358276\n", - "epoch: 38 step: 176, loss is 0.8446837663650513\n", - "epoch: 38 step: 177, loss is 0.8934757709503174\n", - "epoch: 38 step: 178, loss is 0.8827760815620422\n", - "epoch: 38 step: 179, loss is 0.9718413352966309\n", - "epoch: 38 step: 180, loss is 0.8779993653297424\n", - "epoch: 38 step: 181, loss is 0.8366880416870117\n", - "epoch: 38 step: 182, loss is 0.8814271688461304\n", - "epoch: 38 step: 183, loss is 0.9178680181503296\n", - "epoch: 38 step: 184, loss is 0.9376236796379089\n", - "epoch: 38 step: 185, loss is 0.9903892278671265\n", - "epoch: 38 step: 186, loss is 0.9410778880119324\n", - "epoch: 38 step: 187, loss is 0.8669420480728149\n", - "epoch: 38 step: 188, loss is 0.9013009071350098\n", - "epoch: 38 step: 189, loss is 0.9106388688087463\n", - "epoch: 38 step: 190, loss is 0.9394299983978271\n", - "epoch: 38 step: 191, loss is 0.8580970764160156\n", - "epoch: 38 step: 192, loss is 0.9121018648147583\n", - "epoch: 38 step: 193, loss is 0.9768623113632202\n", - "epoch: 38 step: 194, loss is 0.8774149417877197\n", - "epoch: 38 step: 195, loss is 0.9149966239929199\n", - "Train epoch time: 103473.682 ms, per step time: 530.634 ms\n", - "epoch: 39 step: 1, loss is 0.8645210266113281\n", - "epoch: 39 step: 2, loss is 0.8713115453720093\n", - "epoch: 39 step: 3, loss is 0.9012945890426636\n", - "epoch: 39 step: 4, loss is 0.9290074706077576\n", - "epoch: 39 step: 5, loss is 0.8034458160400391\n", - "epoch: 39 step: 6, loss is 0.8567114472389221\n", - "epoch: 39 step: 7, loss is 0.8933814764022827\n", - "epoch: 39 step: 8, loss is 0.8883970379829407\n", - "epoch: 39 step: 9, loss is 0.8710887432098389\n", - "epoch: 39 step: 10, loss is 0.8551831841468811\n", - "epoch: 39 step: 11, loss is 0.8131341934204102\n", - "epoch: 39 step: 12, loss is 0.8605275750160217\n", - "epoch: 39 step: 13, loss is 0.876175045967102\n", - "epoch: 39 step: 14, loss is 0.8787935972213745\n", - "epoch: 39 step: 15, loss is 0.8959838151931763\n", - "epoch: 39 step: 16, loss is 0.9015324115753174\n", - "epoch: 39 step: 17, loss is 0.8644980192184448\n", - "epoch: 39 step: 18, loss is 0.8720925450325012\n", - "epoch: 39 step: 19, loss is 0.9267992973327637\n", - "epoch: 39 step: 20, loss is 0.8648815155029297\n", - "epoch: 39 step: 21, loss is 0.8689542412757874\n", - "epoch: 39 step: 22, loss is 0.8740376234054565\n", - "epoch: 39 step: 23, loss is 0.8852134346961975\n", - "epoch: 39 step: 24, loss is 0.8897039890289307\n", - "epoch: 39 step: 25, loss is 0.9486417770385742\n", - "epoch: 39 step: 26, loss is 0.877617359161377\n", - "epoch: 39 step: 27, loss is 0.883049726486206\n", - "epoch: 39 step: 28, loss is 0.8394241333007812\n", - "epoch: 39 step: 29, loss is 0.8442683219909668\n", - "epoch: 39 step: 30, loss is 0.9370942711830139\n", - "epoch: 39 step: 31, loss is 0.8539810180664062\n", - "epoch: 39 step: 32, loss is 0.9304419755935669\n", - "epoch: 39 step: 33, loss is 0.8801469206809998\n", - "epoch: 39 step: 34, loss is 0.8570430278778076\n", - "epoch: 39 step: 35, loss is 0.868891716003418\n", - "epoch: 39 step: 36, loss is 0.9339327812194824\n", - "epoch: 39 step: 37, loss is 0.850347101688385\n", - "epoch: 39 step: 38, loss is 0.9007909297943115\n", - "epoch: 39 step: 39, loss is 0.9255921244621277\n", - "epoch: 39 step: 40, loss is 0.8538328409194946\n", - "epoch: 39 step: 41, loss is 0.8878750801086426\n", - "epoch: 39 step: 42, loss is 0.817216157913208\n", - "epoch: 39 step: 43, loss is 0.8102796077728271\n", - "epoch: 39 step: 44, loss is 0.8087482452392578\n", - "epoch: 39 step: 45, loss is 0.9019378423690796\n", - "epoch: 39 step: 46, loss is 0.9162269830703735\n", - "epoch: 39 step: 47, loss is 0.8876538276672363\n", - "epoch: 39 step: 48, loss is 0.9473833441734314\n", - "epoch: 39 step: 49, loss is 0.8457210659980774\n", - "epoch: 39 step: 50, loss is 0.8684884309768677\n", - "epoch: 39 step: 51, loss is 0.8881251811981201\n", - "epoch: 39 step: 52, loss is 0.8807567358016968\n", - "epoch: 39 step: 53, loss is 0.8468639850616455\n", - "epoch: 39 step: 54, loss is 0.9167661070823669\n", - "epoch: 39 step: 55, loss is 0.905544102191925\n", - "epoch: 39 step: 56, loss is 0.8458351492881775\n", - "epoch: 39 step: 57, loss is 0.8897790908813477\n", - "epoch: 39 step: 58, loss is 0.8934326171875\n", - "epoch: 39 step: 59, loss is 0.8582804203033447\n", - "epoch: 39 step: 60, loss is 0.8497823476791382\n", - "epoch: 39 step: 61, loss is 0.8500289916992188\n", - "epoch: 39 step: 62, loss is 0.8354147672653198\n", - "epoch: 39 step: 63, loss is 0.8814513683319092\n", - "epoch: 39 step: 64, loss is 0.9449700713157654\n", - "epoch: 39 step: 65, loss is 0.8670333623886108\n", - "epoch: 39 step: 66, loss is 0.8952335119247437\n", - "epoch: 39 step: 67, loss is 0.9354361891746521\n", - "epoch: 39 step: 68, loss is 0.8359534740447998\n", - "epoch: 39 step: 69, loss is 0.8228244781494141\n", - "epoch: 39 step: 70, loss is 0.9506558179855347\n", - "epoch: 39 step: 71, loss is 0.9115484356880188\n", - "epoch: 39 step: 72, loss is 0.8385354280471802\n", - "epoch: 39 step: 73, loss is 0.8614368438720703\n", - "epoch: 39 step: 74, loss is 0.9093254804611206\n", - "epoch: 39 step: 75, loss is 0.8994498252868652\n", - "epoch: 39 step: 76, loss is 0.8284345269203186\n", - "epoch: 39 step: 77, loss is 0.8665653467178345\n", - "epoch: 39 step: 78, loss is 0.9472671747207642\n", - "epoch: 39 step: 79, loss is 0.8840035796165466\n", - "epoch: 39 step: 80, loss is 0.8521105051040649\n", - "epoch: 39 step: 81, loss is 0.896456241607666\n", - "epoch: 39 step: 82, loss is 0.8790334463119507\n", - "epoch: 39 step: 83, loss is 0.8255720138549805\n", - "epoch: 39 step: 84, loss is 0.8690876960754395\n", - "epoch: 39 step: 85, loss is 0.9144492149353027\n", - "epoch: 39 step: 86, loss is 0.9540635347366333\n", - "epoch: 39 step: 87, loss is 0.8606998920440674\n", - "epoch: 39 step: 88, loss is 0.8322345018386841\n", - "epoch: 39 step: 89, loss is 0.8087448477745056\n", - "epoch: 39 step: 90, loss is 0.9061346054077148\n", - "epoch: 39 step: 91, loss is 0.8814910650253296\n", - "epoch: 39 step: 92, loss is 0.9104195237159729\n", - "epoch: 39 step: 93, loss is 0.806530237197876\n", - "epoch: 39 step: 94, loss is 0.8854644894599915\n", - "epoch: 39 step: 95, loss is 0.9003764390945435\n", - "epoch: 39 step: 96, loss is 0.9119036197662354\n", - "epoch: 39 step: 97, loss is 0.8539155721664429\n", - "epoch: 39 step: 98, loss is 0.966382622718811\n", - "epoch: 39 step: 99, loss is 0.9003664255142212\n", - "epoch: 39 step: 100, loss is 0.874913215637207\n", - "epoch: 39 step: 101, loss is 0.8580418229103088\n", - "epoch: 39 step: 102, loss is 0.878940999507904\n", - "epoch: 39 step: 103, loss is 0.8316534757614136\n", - "epoch: 39 step: 104, loss is 0.9011969566345215\n", - "epoch: 39 step: 105, loss is 0.8512711524963379\n", - "epoch: 39 step: 106, loss is 0.9250340461730957\n", - "epoch: 39 step: 107, loss is 0.8459631204605103\n", - "epoch: 39 step: 108, loss is 0.8526691198348999\n", - "epoch: 39 step: 109, loss is 0.8753492832183838\n", - "epoch: 39 step: 110, loss is 0.8582953214645386\n", - "epoch: 39 step: 111, loss is 0.8176575899124146\n", - "epoch: 39 step: 112, loss is 0.8801053166389465\n", - "epoch: 39 step: 113, loss is 0.8328306674957275\n", - "epoch: 39 step: 114, loss is 0.9638676643371582\n", - "epoch: 39 step: 115, loss is 0.9101030826568604\n", - "epoch: 39 step: 116, loss is 0.8906930088996887\n", - "epoch: 39 step: 117, loss is 0.8933236598968506\n", - "epoch: 39 step: 118, loss is 0.8549261093139648\n", - "epoch: 39 step: 119, loss is 0.8882817029953003\n", - "epoch: 39 step: 120, loss is 0.8730419874191284\n", - "epoch: 39 step: 121, loss is 0.8845840692520142\n", - "epoch: 39 step: 122, loss is 0.9062395095825195\n", - "epoch: 39 step: 123, loss is 0.9084655046463013\n", - "epoch: 39 step: 124, loss is 0.8796606063842773\n", - "epoch: 39 step: 125, loss is 0.9037388563156128\n", - "epoch: 39 step: 126, loss is 0.8589247465133667\n", - "epoch: 39 step: 127, loss is 0.8514553308486938\n", - "epoch: 39 step: 128, loss is 0.8845252990722656\n", - "epoch: 39 step: 129, loss is 0.8582234382629395\n", - "epoch: 39 step: 130, loss is 0.8410537838935852\n", - "epoch: 39 step: 131, loss is 0.9522879123687744\n", - "epoch: 39 step: 132, loss is 0.9310232996940613\n", - "epoch: 39 step: 133, loss is 0.8724864721298218\n", - "epoch: 39 step: 134, loss is 0.8914147615432739\n", - "epoch: 39 step: 135, loss is 0.8486027717590332\n", - "epoch: 39 step: 136, loss is 0.8653729557991028\n", - "epoch: 39 step: 137, loss is 0.8387752175331116\n", - "epoch: 39 step: 138, loss is 0.9586123824119568\n", - "epoch: 39 step: 139, loss is 0.9123362898826599\n", - "epoch: 39 step: 140, loss is 0.9376376867294312\n", - "epoch: 39 step: 141, loss is 0.8917154669761658\n", - "epoch: 39 step: 142, loss is 0.85279381275177\n", - "epoch: 39 step: 143, loss is 0.8488315939903259\n", - "epoch: 39 step: 144, loss is 0.8892083168029785\n", - "epoch: 39 step: 145, loss is 0.8788743615150452\n", - "epoch: 39 step: 146, loss is 0.9256724715232849\n", - "epoch: 39 step: 147, loss is 0.8653937578201294\n", - "epoch: 39 step: 148, loss is 0.9412431120872498\n", - "epoch: 39 step: 149, loss is 0.9005237221717834\n", - "epoch: 39 step: 150, loss is 0.8837241530418396\n", - "epoch: 39 step: 151, loss is 0.9084970951080322\n", - "epoch: 39 step: 152, loss is 0.933533251285553\n", - "epoch: 39 step: 153, loss is 0.8947114944458008\n", - "epoch: 39 step: 154, loss is 0.8302844166755676\n", - "epoch: 39 step: 155, loss is 0.8581252098083496\n", - "epoch: 39 step: 156, loss is 0.8931020498275757\n", - "epoch: 39 step: 157, loss is 0.8387654423713684\n", - "epoch: 39 step: 158, loss is 0.8659118413925171\n", - "epoch: 39 step: 159, loss is 0.9112050533294678\n", - "epoch: 39 step: 160, loss is 0.8774600028991699\n", - "epoch: 39 step: 161, loss is 0.9333910942077637\n", - "epoch: 39 step: 162, loss is 0.807689368724823\n", - "epoch: 39 step: 163, loss is 0.8876700401306152\n", - "epoch: 39 step: 164, loss is 0.9277569055557251\n", - "epoch: 39 step: 165, loss is 0.8392190933227539\n", - "epoch: 39 step: 166, loss is 0.9012026190757751\n", - "epoch: 39 step: 167, loss is 0.8890141248703003\n", - "epoch: 39 step: 168, loss is 0.8833734393119812\n", - "epoch: 39 step: 169, loss is 0.9492493271827698\n", - "epoch: 39 step: 170, loss is 0.908758282661438\n", - "epoch: 39 step: 171, loss is 0.9744986891746521\n", - "epoch: 39 step: 172, loss is 0.8908541202545166\n", - "epoch: 39 step: 173, loss is 0.89945387840271\n", - "epoch: 39 step: 174, loss is 0.8402144908905029\n", - "epoch: 39 step: 175, loss is 0.9068995714187622\n", - "epoch: 39 step: 176, loss is 0.8955084085464478\n", - "epoch: 39 step: 177, loss is 0.8400992155075073\n", - "epoch: 39 step: 178, loss is 0.9114003777503967\n", - "epoch: 39 step: 179, loss is 0.9817430377006531\n", - "epoch: 39 step: 180, loss is 0.8850396871566772\n", - "epoch: 39 step: 181, loss is 0.8664795160293579\n", - "epoch: 39 step: 182, loss is 0.8601346015930176\n", - "epoch: 39 step: 183, loss is 0.9466091394424438\n", - "epoch: 39 step: 184, loss is 0.8960914611816406\n", - "epoch: 39 step: 185, loss is 0.9314067959785461\n", - "epoch: 39 step: 186, loss is 0.9052322506904602\n", - "epoch: 39 step: 187, loss is 0.9361927509307861\n", - "epoch: 39 step: 188, loss is 0.8847562670707703\n", - "epoch: 39 step: 189, loss is 0.9764125347137451\n", - "epoch: 39 step: 190, loss is 0.8880001902580261\n", - "epoch: 39 step: 191, loss is 0.8959442377090454\n", - "epoch: 39 step: 192, loss is 0.8160576820373535\n", - "epoch: 39 step: 193, loss is 0.8572779297828674\n", - "epoch: 39 step: 194, loss is 0.870726466178894\n", - "epoch: 39 step: 195, loss is 0.9026192426681519\n", - "Train epoch time: 104825.320 ms, per step time: 537.566 ms\n", - "epoch: 40 step: 1, loss is 0.8851234912872314\n", - "epoch: 40 step: 2, loss is 0.8387653827667236\n", - "epoch: 40 step: 3, loss is 0.8830152750015259\n", - "epoch: 40 step: 4, loss is 0.8329616785049438\n", - "epoch: 40 step: 5, loss is 0.8500375747680664\n", - "epoch: 40 step: 6, loss is 0.8733106851577759\n", - "epoch: 40 step: 7, loss is 0.8880319595336914\n", - "epoch: 40 step: 8, loss is 0.8699027299880981\n", - "epoch: 40 step: 9, loss is 0.9004031419754028\n", - "epoch: 40 step: 10, loss is 0.8964098691940308\n", - "epoch: 40 step: 11, loss is 0.899456262588501\n", - "epoch: 40 step: 12, loss is 0.8893042802810669\n", - "epoch: 40 step: 13, loss is 0.8464663028717041\n", - "epoch: 40 step: 14, loss is 0.8593541979789734\n", - "epoch: 40 step: 15, loss is 0.904763400554657\n", - "epoch: 40 step: 16, loss is 0.8689329624176025\n", - "epoch: 40 step: 17, loss is 0.8954069018363953\n", - "epoch: 40 step: 18, loss is 0.8645673990249634\n", - "epoch: 40 step: 19, loss is 0.8800634145736694\n", - "epoch: 40 step: 20, loss is 0.9918292760848999\n", - "epoch: 40 step: 21, loss is 0.8588066101074219\n", - "epoch: 40 step: 22, loss is 0.821401834487915\n", - "epoch: 40 step: 23, loss is 0.8418498039245605\n", - "epoch: 40 step: 24, loss is 0.9312494397163391\n", - "epoch: 40 step: 25, loss is 0.8298090100288391\n", - "epoch: 40 step: 26, loss is 0.8266054391860962\n", - "epoch: 40 step: 27, loss is 0.8716622591018677\n", - "epoch: 40 step: 28, loss is 0.8610857725143433\n", - "epoch: 40 step: 29, loss is 0.8384156227111816\n", - "epoch: 40 step: 30, loss is 0.8949705362319946\n", - "epoch: 40 step: 31, loss is 0.9197995662689209\n", - "epoch: 40 step: 32, loss is 0.8539305925369263\n", - "epoch: 40 step: 33, loss is 0.9472203254699707\n", - "epoch: 40 step: 34, loss is 0.8451520204544067\n", - "epoch: 40 step: 35, loss is 0.8517283797264099\n", - "epoch: 40 step: 36, loss is 0.80213463306427\n", - "epoch: 40 step: 37, loss is 0.8429053425788879\n", - "epoch: 40 step: 38, loss is 0.8992063999176025\n", - "epoch: 40 step: 39, loss is 0.8799428939819336\n", - "epoch: 40 step: 40, loss is 0.859926700592041\n", - "epoch: 40 step: 41, loss is 0.8412423133850098\n", - "epoch: 40 step: 42, loss is 0.9253013730049133\n", - "epoch: 40 step: 43, loss is 0.833931028842926\n", - "epoch: 40 step: 44, loss is 0.8247307538986206\n", - "epoch: 40 step: 45, loss is 0.9064934253692627\n", - "epoch: 40 step: 46, loss is 0.8344542384147644\n", - "epoch: 40 step: 47, loss is 0.8230990171432495\n", - "epoch: 40 step: 48, loss is 0.8442429304122925\n", - "epoch: 40 step: 49, loss is 0.9023276567459106\n", - "epoch: 40 step: 50, loss is 0.8159782290458679\n", - "epoch: 40 step: 51, loss is 0.7803122401237488\n", - "epoch: 40 step: 52, loss is 0.899043619632721\n", - "epoch: 40 step: 53, loss is 0.8936437368392944\n", - "epoch: 40 step: 54, loss is 0.8918968439102173\n", - "epoch: 40 step: 55, loss is 0.8967644572257996\n", - "epoch: 40 step: 56, loss is 0.8742477893829346\n", - "epoch: 40 step: 57, loss is 0.8631412982940674\n", - "epoch: 40 step: 58, loss is 0.8607645034790039\n", - "epoch: 40 step: 59, loss is 0.9121779799461365\n", - "epoch: 40 step: 60, loss is 0.8896794319152832\n", - "epoch: 40 step: 61, loss is 0.9428945183753967\n", - "epoch: 40 step: 62, loss is 0.883753776550293\n", - "epoch: 40 step: 63, loss is 0.9169254899024963\n", - "epoch: 40 step: 64, loss is 0.9192215204238892\n", - "epoch: 40 step: 65, loss is 0.8570241928100586\n", - "epoch: 40 step: 66, loss is 0.8706960678100586\n", - "epoch: 40 step: 67, loss is 0.9097570180892944\n", - "epoch: 40 step: 68, loss is 0.8814102411270142\n", - "epoch: 40 step: 69, loss is 0.8036127686500549\n", - "epoch: 40 step: 70, loss is 0.9094939231872559\n", - "epoch: 40 step: 71, loss is 0.8332775831222534\n", - "epoch: 40 step: 72, loss is 0.9041279554367065\n", - "epoch: 40 step: 73, loss is 0.8735212087631226\n", - "epoch: 40 step: 74, loss is 0.825016975402832\n", - "epoch: 40 step: 75, loss is 0.9236082434654236\n", - "epoch: 40 step: 76, loss is 0.8707889318466187\n", - "epoch: 40 step: 77, loss is 0.9323348999023438\n", - "epoch: 40 step: 78, loss is 0.9359569549560547\n", - "epoch: 40 step: 79, loss is 0.8720421195030212\n", - "epoch: 40 step: 80, loss is 0.9097332954406738\n", - "epoch: 40 step: 81, loss is 0.8492221832275391\n", - "epoch: 40 step: 82, loss is 0.8324311971664429\n", - "epoch: 40 step: 83, loss is 0.8753854632377625\n", - "epoch: 40 step: 84, loss is 0.8173515796661377\n", - "epoch: 40 step: 85, loss is 0.8949519395828247\n", - "epoch: 40 step: 86, loss is 0.8931065797805786\n", - "epoch: 40 step: 87, loss is 0.8364624977111816\n", - "epoch: 40 step: 88, loss is 0.8630421757698059\n", - "epoch: 40 step: 89, loss is 0.8318980932235718\n", - "epoch: 40 step: 90, loss is 0.8778986930847168\n", - "epoch: 40 step: 91, loss is 0.8972828388214111\n", - "epoch: 40 step: 92, loss is 0.8785991072654724\n", - "epoch: 40 step: 93, loss is 0.907089352607727\n", - "epoch: 40 step: 94, loss is 0.8727007508277893\n", - "epoch: 40 step: 95, loss is 0.9328292608261108\n", - "epoch: 40 step: 96, loss is 0.8604423999786377\n", - "epoch: 40 step: 97, loss is 0.8766709566116333\n", - "epoch: 40 step: 98, loss is 0.9485180377960205\n", - "epoch: 40 step: 99, loss is 0.9245548844337463\n", - "epoch: 40 step: 100, loss is 0.875493049621582\n", - "epoch: 40 step: 101, loss is 0.8908942937850952\n", - "epoch: 40 step: 102, loss is 0.8791377544403076\n", - "epoch: 40 step: 103, loss is 0.8919621706008911\n", - "epoch: 40 step: 104, loss is 0.8836909532546997\n", - "epoch: 40 step: 105, loss is 0.8924317955970764\n", - "epoch: 40 step: 106, loss is 0.9419810175895691\n", - "epoch: 40 step: 107, loss is 0.9120110869407654\n", - "epoch: 40 step: 108, loss is 0.8465275764465332\n", - "epoch: 40 step: 109, loss is 0.8785563707351685\n", - "epoch: 40 step: 110, loss is 0.9054582118988037\n", - "epoch: 40 step: 111, loss is 0.8577967882156372\n", - "epoch: 40 step: 112, loss is 0.8663351535797119\n", - "epoch: 40 step: 113, loss is 0.8312288522720337\n", - "epoch: 40 step: 114, loss is 0.904059648513794\n", - "epoch: 40 step: 115, loss is 0.877544641494751\n", - "epoch: 40 step: 116, loss is 0.8869988918304443\n", - "epoch: 40 step: 117, loss is 0.903724730014801\n", - "epoch: 40 step: 118, loss is 0.8916617035865784\n", - "epoch: 40 step: 119, loss is 0.8024028539657593\n", - "epoch: 40 step: 120, loss is 0.9454323053359985\n", - "epoch: 40 step: 121, loss is 0.8825303316116333\n", - "epoch: 40 step: 122, loss is 0.9104115962982178\n", - "epoch: 40 step: 123, loss is 0.8717929124832153\n", - "epoch: 40 step: 124, loss is 0.8908679485321045\n", - "epoch: 40 step: 125, loss is 0.8974992036819458\n", - "epoch: 40 step: 126, loss is 0.8960497379302979\n", - "epoch: 40 step: 127, loss is 0.8568652868270874\n", - "epoch: 40 step: 128, loss is 0.8469300270080566\n", - "epoch: 40 step: 129, loss is 0.8961756229400635\n", - "epoch: 40 step: 130, loss is 0.8992486000061035\n", - "epoch: 40 step: 131, loss is 0.8704785108566284\n", - "epoch: 40 step: 132, loss is 0.902604341506958\n", - "epoch: 40 step: 133, loss is 0.8849173784255981\n", - "epoch: 40 step: 134, loss is 0.8849829435348511\n", - "epoch: 40 step: 135, loss is 0.9028277397155762\n", - "epoch: 40 step: 136, loss is 0.9375927448272705\n", - "epoch: 40 step: 137, loss is 0.9122540950775146\n", - "epoch: 40 step: 138, loss is 0.8627878427505493\n", - "epoch: 40 step: 139, loss is 0.9118070602416992\n", - "epoch: 40 step: 140, loss is 0.8616949915885925\n", - "epoch: 40 step: 141, loss is 0.8577849268913269\n", - "epoch: 40 step: 142, loss is 0.9246078729629517\n", - "epoch: 40 step: 143, loss is 0.866975724697113\n", - "epoch: 40 step: 144, loss is 0.8619215488433838\n", - "epoch: 40 step: 145, loss is 0.9103195667266846\n", - "epoch: 40 step: 146, loss is 0.9210906028747559\n", - "epoch: 40 step: 147, loss is 0.8680026531219482\n", - "epoch: 40 step: 148, loss is 0.8251380920410156\n", - "epoch: 40 step: 149, loss is 0.8463174700737\n", - "epoch: 40 step: 150, loss is 0.9118173122406006\n", - "epoch: 40 step: 151, loss is 0.9314653277397156\n", - "epoch: 40 step: 152, loss is 0.9237564206123352\n", - "epoch: 40 step: 153, loss is 0.8302289247512817\n", - "epoch: 40 step: 154, loss is 0.835802435874939\n", - "epoch: 40 step: 155, loss is 0.8791258335113525\n", - "epoch: 40 step: 156, loss is 0.9399016499519348\n", - "epoch: 40 step: 157, loss is 0.8605079054832458\n", - "epoch: 40 step: 158, loss is 0.9036983847618103\n", - "epoch: 40 step: 159, loss is 0.8914352059364319\n", - "epoch: 40 step: 160, loss is 0.9488334655761719\n", - "epoch: 40 step: 161, loss is 0.9103049039840698\n", - "epoch: 40 step: 162, loss is 0.8704754114151001\n", - "epoch: 40 step: 163, loss is 0.8842976093292236\n", - "epoch: 40 step: 164, loss is 0.8546466827392578\n", - "epoch: 40 step: 165, loss is 0.8507143259048462\n", - "epoch: 40 step: 166, loss is 0.8573259711265564\n", - "epoch: 40 step: 167, loss is 0.8737185001373291\n", - "epoch: 40 step: 168, loss is 0.8979017734527588\n", - "epoch: 40 step: 169, loss is 0.8496789932250977\n", - "epoch: 40 step: 170, loss is 0.8904184103012085\n", - "epoch: 40 step: 171, loss is 0.8918924331665039\n", - "epoch: 40 step: 172, loss is 0.8876844644546509\n", - "epoch: 40 step: 173, loss is 0.8999760150909424\n", - "epoch: 40 step: 174, loss is 0.895128071308136\n", - "epoch: 40 step: 175, loss is 0.8505512475967407\n", - "epoch: 40 step: 176, loss is 0.9484850168228149\n", - "epoch: 40 step: 177, loss is 0.930647611618042\n", - "epoch: 40 step: 178, loss is 0.8815802335739136\n", - "epoch: 40 step: 179, loss is 0.8763477802276611\n", - "epoch: 40 step: 180, loss is 0.891345202922821\n", - "epoch: 40 step: 181, loss is 0.8182728290557861\n", - "epoch: 40 step: 182, loss is 0.8512617349624634\n", - "epoch: 40 step: 183, loss is 0.949865460395813\n", - "epoch: 40 step: 184, loss is 0.821356475353241\n", - "epoch: 40 step: 185, loss is 0.8953803181648254\n", - "epoch: 40 step: 186, loss is 0.911920428276062\n", - "epoch: 40 step: 187, loss is 0.9853698015213013\n", - "epoch: 40 step: 188, loss is 0.8613886833190918\n", - "epoch: 40 step: 189, loss is 0.8839408159255981\n", - "epoch: 40 step: 190, loss is 0.8179978728294373\n", - "epoch: 40 step: 191, loss is 0.9102081060409546\n", - "epoch: 40 step: 192, loss is 0.8488866090774536\n", - "epoch: 40 step: 193, loss is 0.8513681888580322\n", - "epoch: 40 step: 194, loss is 0.8531274199485779\n", - "epoch: 40 step: 195, loss is 0.842171311378479\n", - "Train epoch time: 109661.694 ms, per step time: 562.368 ms\n", - "epoch: 41 step: 1, loss is 0.8618067502975464\n", - "epoch: 41 step: 2, loss is 0.878968358039856\n", - "epoch: 41 step: 3, loss is 0.859882116317749\n", - "epoch: 41 step: 4, loss is 0.8857802152633667\n", - "epoch: 41 step: 5, loss is 0.8492990136146545\n", - "epoch: 41 step: 6, loss is 0.8154758214950562\n", - "epoch: 41 step: 7, loss is 0.8833035826683044\n", - "epoch: 41 step: 8, loss is 0.8062804937362671\n", - "epoch: 41 step: 9, loss is 0.8593360781669617\n", - "epoch: 41 step: 10, loss is 0.8572665452957153\n", - "epoch: 41 step: 11, loss is 0.8745375275611877\n", - "epoch: 41 step: 12, loss is 0.789219856262207\n", - "epoch: 41 step: 13, loss is 0.8631721138954163\n", - "epoch: 41 step: 14, loss is 0.832859992980957\n", - "epoch: 41 step: 15, loss is 0.8729110956192017\n", - "epoch: 41 step: 16, loss is 0.8440735340118408\n", - "epoch: 41 step: 17, loss is 0.8444580435752869\n", - "epoch: 41 step: 18, loss is 0.903781533241272\n", - "epoch: 41 step: 19, loss is 0.82032310962677\n", - "epoch: 41 step: 20, loss is 0.8752762675285339\n", - "epoch: 41 step: 21, loss is 0.8854423761367798\n", - "epoch: 41 step: 22, loss is 0.8759193420410156\n", - "epoch: 41 step: 23, loss is 0.9510802030563354\n", - "epoch: 41 step: 24, loss is 0.8492807149887085\n", - "epoch: 41 step: 25, loss is 0.999866247177124\n", - "epoch: 41 step: 26, loss is 0.8750281929969788\n", - "epoch: 41 step: 27, loss is 0.886038601398468\n", - "epoch: 41 step: 28, loss is 0.8495041728019714\n", - "epoch: 41 step: 29, loss is 0.9202085733413696\n", - "epoch: 41 step: 30, loss is 0.8238973617553711\n", - "epoch: 41 step: 31, loss is 0.8590933084487915\n", - "epoch: 41 step: 32, loss is 0.8466448783874512\n", - "epoch: 41 step: 33, loss is 0.8425939083099365\n", - "epoch: 41 step: 34, loss is 0.810389518737793\n", - "epoch: 41 step: 35, loss is 0.9089317917823792\n", - "epoch: 41 step: 36, loss is 0.8546086549758911\n", - "epoch: 41 step: 37, loss is 0.8167605400085449\n", - "epoch: 41 step: 38, loss is 0.8202768564224243\n", - "epoch: 41 step: 39, loss is 0.8257353901863098\n", - "epoch: 41 step: 40, loss is 0.8367618322372437\n", - "epoch: 41 step: 41, loss is 0.8686001300811768\n", - "epoch: 41 step: 42, loss is 0.8135358095169067\n", - "epoch: 41 step: 43, loss is 0.8717283010482788\n", - "epoch: 41 step: 44, loss is 0.8612308502197266\n", - "epoch: 41 step: 45, loss is 0.8421732783317566\n", - "epoch: 41 step: 46, loss is 0.8343643546104431\n", - "epoch: 41 step: 47, loss is 0.8678110241889954\n", - "epoch: 41 step: 48, loss is 0.8955135345458984\n", - "epoch: 41 step: 49, loss is 0.8342581391334534\n", - "epoch: 41 step: 50, loss is 0.8171234726905823\n", - "epoch: 41 step: 51, loss is 0.8400593400001526\n", - "epoch: 41 step: 52, loss is 0.8905632495880127\n", - "epoch: 41 step: 53, loss is 0.8648943901062012\n", - "epoch: 41 step: 54, loss is 0.8463799953460693\n", - "epoch: 41 step: 55, loss is 0.9434037804603577\n", - "epoch: 41 step: 56, loss is 0.8762432336807251\n", - "epoch: 41 step: 57, loss is 0.8036152124404907\n", - "epoch: 41 step: 58, loss is 0.8987679481506348\n", - "epoch: 41 step: 59, loss is 0.8129253387451172\n", - "epoch: 41 step: 60, loss is 0.8400129675865173\n", - "epoch: 41 step: 61, loss is 0.8241201043128967\n", - "epoch: 41 step: 62, loss is 0.9228696227073669\n", - "epoch: 41 step: 63, loss is 0.9398709535598755\n", - "epoch: 41 step: 64, loss is 0.9271838665008545\n", - "epoch: 41 step: 65, loss is 0.8938474655151367\n", - "epoch: 41 step: 66, loss is 0.8989920020103455\n", - "epoch: 41 step: 67, loss is 0.8572782278060913\n", - "epoch: 41 step: 68, loss is 0.8624463081359863\n", - "epoch: 41 step: 69, loss is 0.8770366907119751\n", - "epoch: 41 step: 70, loss is 0.856779932975769\n", - "epoch: 41 step: 71, loss is 0.8851869106292725\n", - "epoch: 41 step: 72, loss is 0.9122754335403442\n", - "epoch: 41 step: 73, loss is 0.9410349130630493\n", - "epoch: 41 step: 74, loss is 0.847427248954773\n", - "epoch: 41 step: 75, loss is 0.9520801305770874\n", - "epoch: 41 step: 76, loss is 0.8619092702865601\n", - "epoch: 41 step: 77, loss is 0.9065008163452148\n", - "epoch: 41 step: 78, loss is 0.8984924554824829\n", - "epoch: 41 step: 79, loss is 0.9368110299110413\n", - "epoch: 41 step: 80, loss is 0.8906735777854919\n", - "epoch: 41 step: 81, loss is 0.8947526812553406\n", - "epoch: 41 step: 82, loss is 0.8208470344543457\n", - "epoch: 41 step: 83, loss is 0.8920199871063232\n", - "epoch: 41 step: 84, loss is 0.8303755521774292\n", - "epoch: 41 step: 85, loss is 0.8802638053894043\n", - "epoch: 41 step: 86, loss is 0.8867618441581726\n", - "epoch: 41 step: 87, loss is 0.8940666317939758\n", - "epoch: 41 step: 88, loss is 0.8446831703186035\n", - "epoch: 41 step: 89, loss is 0.8172906637191772\n", - "epoch: 41 step: 90, loss is 0.8237570524215698\n", - "epoch: 41 step: 91, loss is 0.8549254536628723\n", - "epoch: 41 step: 92, loss is 0.8722318410873413\n", - "epoch: 41 step: 93, loss is 0.8188673257827759\n", - "epoch: 41 step: 94, loss is 0.8566056489944458\n", - "epoch: 41 step: 95, loss is 0.8834047317504883\n", - "epoch: 41 step: 96, loss is 0.9314032196998596\n", - "epoch: 41 step: 97, loss is 0.896205484867096\n", - "epoch: 41 step: 98, loss is 0.8750560283660889\n", - "epoch: 41 step: 99, loss is 0.8571526408195496\n", - "epoch: 41 step: 100, loss is 0.8289397954940796\n", - "epoch: 41 step: 101, loss is 0.8239650130271912\n", - "epoch: 41 step: 102, loss is 0.8106339573860168\n", - "epoch: 41 step: 103, loss is 0.8450162410736084\n", - "epoch: 41 step: 104, loss is 0.8610562086105347\n", - "epoch: 41 step: 105, loss is 0.8680316805839539\n", - "epoch: 41 step: 106, loss is 0.8439693450927734\n", - "epoch: 41 step: 107, loss is 0.8370149731636047\n", - "epoch: 41 step: 108, loss is 0.7954291701316833\n", - "epoch: 41 step: 109, loss is 0.8636958599090576\n", - "epoch: 41 step: 110, loss is 0.9023556113243103\n", - "epoch: 41 step: 111, loss is 0.9075411558151245\n", - "epoch: 41 step: 112, loss is 0.8832035064697266\n", - "epoch: 41 step: 113, loss is 0.8567056655883789\n", - "epoch: 41 step: 114, loss is 0.8847880959510803\n", - "epoch: 41 step: 115, loss is 0.8463848829269409\n", - "epoch: 41 step: 116, loss is 0.9032496213912964\n", - "epoch: 41 step: 117, loss is 0.8129565715789795\n", - "epoch: 41 step: 118, loss is 0.8356386423110962\n", - "epoch: 41 step: 119, loss is 0.845257043838501\n", - "epoch: 41 step: 120, loss is 0.8531070947647095\n", - "epoch: 41 step: 121, loss is 0.8797612190246582\n", - "epoch: 41 step: 122, loss is 0.8757193088531494\n", - "epoch: 41 step: 123, loss is 0.8673896789550781\n", - "epoch: 41 step: 124, loss is 0.8790965676307678\n", - "epoch: 41 step: 125, loss is 0.9141356945037842\n", - "epoch: 41 step: 126, loss is 0.85257887840271\n", - "epoch: 41 step: 127, loss is 0.9249833226203918\n", - "epoch: 41 step: 128, loss is 0.843304455280304\n", - "epoch: 41 step: 129, loss is 0.8694417476654053\n", - "epoch: 41 step: 130, loss is 0.9708907604217529\n", - "epoch: 41 step: 131, loss is 0.8083999156951904\n", - "epoch: 41 step: 132, loss is 0.8010560274124146\n", - "epoch: 41 step: 133, loss is 0.8959842920303345\n", - "epoch: 41 step: 134, loss is 0.9247763156890869\n", - "epoch: 41 step: 135, loss is 0.885239839553833\n", - "epoch: 41 step: 136, loss is 0.9220145344734192\n", - "epoch: 41 step: 137, loss is 0.8627980947494507\n", - "epoch: 41 step: 138, loss is 0.8013051152229309\n", - "epoch: 41 step: 139, loss is 0.8849483728408813\n", - "epoch: 41 step: 140, loss is 0.9494187831878662\n", - "epoch: 41 step: 141, loss is 0.8294709920883179\n", - "epoch: 41 step: 142, loss is 0.8639086484909058\n", - "epoch: 41 step: 143, loss is 0.9001109600067139\n", - "epoch: 41 step: 144, loss is 0.8381350040435791\n", - "epoch: 41 step: 145, loss is 0.8885253667831421\n", - "epoch: 41 step: 146, loss is 0.846118152141571\n", - "epoch: 41 step: 147, loss is 0.8563376665115356\n", - "epoch: 41 step: 148, loss is 0.8879783153533936\n", - "epoch: 41 step: 149, loss is 0.877179741859436\n", - "epoch: 41 step: 150, loss is 0.9107189774513245\n", - "epoch: 41 step: 151, loss is 0.9207987785339355\n", - "epoch: 41 step: 152, loss is 0.8938964605331421\n", - "epoch: 41 step: 153, loss is 0.8833598494529724\n", - "epoch: 41 step: 154, loss is 0.8983395099639893\n", - "epoch: 41 step: 155, loss is 0.9261085987091064\n", - "epoch: 41 step: 156, loss is 0.9456740021705627\n", - "epoch: 41 step: 157, loss is 0.9052423238754272\n", - "epoch: 41 step: 158, loss is 0.8480076789855957\n", - "epoch: 41 step: 159, loss is 0.8339735865592957\n", - "epoch: 41 step: 160, loss is 0.8606916666030884\n", - "epoch: 41 step: 161, loss is 0.880690336227417\n", - "epoch: 41 step: 162, loss is 0.8791908025741577\n", - "epoch: 41 step: 163, loss is 0.8408814668655396\n", - "epoch: 41 step: 164, loss is 0.8652082085609436\n", - "epoch: 41 step: 165, loss is 0.8883305191993713\n", - "epoch: 41 step: 166, loss is 0.8621140718460083\n", - "epoch: 41 step: 167, loss is 0.869258463382721\n", - "epoch: 41 step: 168, loss is 0.8321027755737305\n", - "epoch: 41 step: 169, loss is 0.8394078016281128\n", - "epoch: 41 step: 170, loss is 0.8942273855209351\n", - "epoch: 41 step: 171, loss is 0.8461000919342041\n", - "epoch: 41 step: 172, loss is 0.8614010810852051\n", - "epoch: 41 step: 173, loss is 0.8483925461769104\n", - "epoch: 41 step: 174, loss is 0.8731878399848938\n", - "epoch: 41 step: 175, loss is 0.8125861883163452\n", - "epoch: 41 step: 176, loss is 0.7947521209716797\n", - "epoch: 41 step: 177, loss is 0.9045981168746948\n", - "epoch: 41 step: 178, loss is 0.9216893911361694\n", - "epoch: 41 step: 179, loss is 0.9175727367401123\n", - "epoch: 41 step: 180, loss is 0.8301849365234375\n", - "epoch: 41 step: 181, loss is 0.873075008392334\n", - "epoch: 41 step: 182, loss is 0.8370826244354248\n", - "epoch: 41 step: 183, loss is 0.8435347676277161\n", - "epoch: 41 step: 184, loss is 0.9307310581207275\n", - "epoch: 41 step: 185, loss is 0.916243314743042\n", - "epoch: 41 step: 186, loss is 0.8576066493988037\n", - "epoch: 41 step: 187, loss is 0.927433967590332\n", - "epoch: 41 step: 188, loss is 0.8260948061943054\n", - "epoch: 41 step: 189, loss is 0.9360485076904297\n", - "epoch: 41 step: 190, loss is 0.8575282692909241\n", - "epoch: 41 step: 191, loss is 0.8777022361755371\n", - "epoch: 41 step: 192, loss is 0.9341365098953247\n", - "epoch: 41 step: 193, loss is 0.8951187133789062\n", - "epoch: 41 step: 194, loss is 0.9154071807861328\n", - "epoch: 41 step: 195, loss is 0.9125281572341919\n", - "Train epoch time: 101076.283 ms, per step time: 518.340 ms\n", - "epoch: 42 step: 1, loss is 0.8089779615402222\n", - "epoch: 42 step: 2, loss is 0.8644869923591614\n", - "epoch: 42 step: 3, loss is 0.8119103312492371\n", - "epoch: 42 step: 4, loss is 0.8407332897186279\n", - "epoch: 42 step: 5, loss is 0.878099799156189\n", - "epoch: 42 step: 6, loss is 0.8315229415893555\n", - "epoch: 42 step: 7, loss is 0.8036289811134338\n", - "epoch: 42 step: 8, loss is 0.8257417678833008\n", - "epoch: 42 step: 9, loss is 0.8769088387489319\n", - "epoch: 42 step: 10, loss is 0.8532992005348206\n", - "epoch: 42 step: 11, loss is 0.8349636793136597\n", - "epoch: 42 step: 12, loss is 0.8228226900100708\n", - "epoch: 42 step: 13, loss is 0.8221296072006226\n", - "epoch: 42 step: 14, loss is 0.8070749044418335\n", - "epoch: 42 step: 15, loss is 0.8150159120559692\n", - "epoch: 42 step: 16, loss is 0.8096538782119751\n", - "epoch: 42 step: 17, loss is 0.7774046063423157\n", - "epoch: 42 step: 18, loss is 0.8697662353515625\n", - "epoch: 42 step: 19, loss is 0.8677603602409363\n", - "epoch: 42 step: 20, loss is 0.8983956575393677\n", - "epoch: 42 step: 21, loss is 0.861465573310852\n", - "epoch: 42 step: 22, loss is 0.8955117464065552\n", - "epoch: 42 step: 23, loss is 0.8678150177001953\n", - "epoch: 42 step: 24, loss is 0.8251582384109497\n", - "epoch: 42 step: 25, loss is 0.8551746606826782\n", - "epoch: 42 step: 26, loss is 0.8440577983856201\n", - "epoch: 42 step: 27, loss is 0.8638566732406616\n", - "epoch: 42 step: 28, loss is 0.8645436763763428\n", - "epoch: 42 step: 29, loss is 0.8609243631362915\n", - "epoch: 42 step: 30, loss is 0.8043273687362671\n", - "epoch: 42 step: 31, loss is 0.8528478145599365\n", - "epoch: 42 step: 32, loss is 0.838059663772583\n", - "epoch: 42 step: 33, loss is 0.9104222655296326\n", - "epoch: 42 step: 34, loss is 0.8785349130630493\n", - "epoch: 42 step: 35, loss is 0.8559966087341309\n", - "epoch: 42 step: 36, loss is 0.8767120838165283\n", - "epoch: 42 step: 37, loss is 0.811253547668457\n", - "epoch: 42 step: 38, loss is 0.8244295120239258\n", - "epoch: 42 step: 39, loss is 0.8432196378707886\n", - "epoch: 42 step: 40, loss is 0.8452286720275879\n", - "epoch: 42 step: 41, loss is 0.8515384197235107\n", - "epoch: 42 step: 42, loss is 0.8795911073684692\n", - "epoch: 42 step: 43, loss is 0.8898458480834961\n", - "epoch: 42 step: 44, loss is 0.8207236528396606\n", - "epoch: 42 step: 45, loss is 0.8984067440032959\n", - "epoch: 42 step: 46, loss is 0.8878769278526306\n", - "epoch: 42 step: 47, loss is 0.8230024576187134\n", - "epoch: 42 step: 48, loss is 0.8531795740127563\n", - "epoch: 42 step: 49, loss is 0.8085325956344604\n", - "epoch: 42 step: 50, loss is 0.8255131244659424\n", - "epoch: 42 step: 51, loss is 0.8654004335403442\n", - "epoch: 42 step: 52, loss is 0.8093442916870117\n", - "epoch: 42 step: 53, loss is 0.8567377328872681\n", - "epoch: 42 step: 54, loss is 0.9493138790130615\n", - "epoch: 42 step: 55, loss is 0.8640762567520142\n", - "epoch: 42 step: 56, loss is 0.8181732892990112\n", - "epoch: 42 step: 57, loss is 0.849233090877533\n", - "epoch: 42 step: 58, loss is 0.8435302376747131\n", - "epoch: 42 step: 59, loss is 0.8764400482177734\n", - "epoch: 42 step: 60, loss is 0.8560871481895447\n", - "epoch: 42 step: 61, loss is 0.8245770931243896\n", - "epoch: 42 step: 62, loss is 0.8457933664321899\n", - "epoch: 42 step: 63, loss is 0.8329942226409912\n", - "epoch: 42 step: 64, loss is 0.8670783042907715\n", - "epoch: 42 step: 65, loss is 0.8092839121818542\n", - "epoch: 42 step: 66, loss is 0.8762713670730591\n", - "epoch: 42 step: 67, loss is 0.8721253871917725\n", - "epoch: 42 step: 68, loss is 0.8670223951339722\n", - "epoch: 42 step: 69, loss is 0.8507710695266724\n", - "epoch: 42 step: 70, loss is 0.8344552516937256\n", - "epoch: 42 step: 71, loss is 0.8241802453994751\n", - "epoch: 42 step: 72, loss is 0.8111439347267151\n", - "epoch: 42 step: 73, loss is 0.8438748121261597\n", - "epoch: 42 step: 74, loss is 0.8742424249649048\n", - "epoch: 42 step: 75, loss is 0.8726904392242432\n", - "epoch: 42 step: 76, loss is 0.850218653678894\n", - "epoch: 42 step: 77, loss is 0.8470457792282104\n", - "epoch: 42 step: 78, loss is 0.8624528050422668\n", - "epoch: 42 step: 79, loss is 0.834343433380127\n", - "epoch: 42 step: 80, loss is 0.8489726185798645\n", - "epoch: 42 step: 81, loss is 0.8357803821563721\n", - "epoch: 42 step: 82, loss is 0.8839561939239502\n", - "epoch: 42 step: 83, loss is 0.8739861249923706\n", - "epoch: 42 step: 84, loss is 0.8605704307556152\n", - "epoch: 42 step: 85, loss is 0.8114489912986755\n", - "epoch: 42 step: 86, loss is 0.8859652280807495\n", - "epoch: 42 step: 87, loss is 0.8449142575263977\n", - "epoch: 42 step: 88, loss is 0.830005407333374\n", - "epoch: 42 step: 89, loss is 0.868400514125824\n", - "epoch: 42 step: 90, loss is 0.885135293006897\n", - "epoch: 42 step: 91, loss is 0.8562017679214478\n", - "epoch: 42 step: 92, loss is 0.8647804260253906\n", - "epoch: 42 step: 93, loss is 0.8739472031593323\n", - "epoch: 42 step: 94, loss is 0.8252691030502319\n", - "epoch: 42 step: 95, loss is 0.8628320693969727\n", - "epoch: 42 step: 96, loss is 0.8924381732940674\n", - "epoch: 42 step: 97, loss is 0.8443622589111328\n", - "epoch: 42 step: 98, loss is 0.8117328882217407\n", - "epoch: 42 step: 99, loss is 0.8545185327529907\n", - "epoch: 42 step: 100, loss is 0.8547579050064087\n", - "epoch: 42 step: 101, loss is 0.8376301527023315\n", - "epoch: 42 step: 102, loss is 0.8713126182556152\n", - "epoch: 42 step: 103, loss is 0.8685401678085327\n", - "epoch: 42 step: 104, loss is 0.8923622369766235\n", - "epoch: 42 step: 105, loss is 0.8097153306007385\n", - "epoch: 42 step: 106, loss is 0.8733646273612976\n", - "epoch: 42 step: 107, loss is 0.8296031355857849\n", - "epoch: 42 step: 108, loss is 0.8327970504760742\n", - "epoch: 42 step: 109, loss is 0.8926116228103638\n", - "epoch: 42 step: 110, loss is 0.8671724200248718\n", - "epoch: 42 step: 111, loss is 0.8336038589477539\n", - "epoch: 42 step: 112, loss is 0.9052085876464844\n", - "epoch: 42 step: 113, loss is 0.8344736099243164\n", - "epoch: 42 step: 114, loss is 0.8146159648895264\n", - "epoch: 42 step: 115, loss is 0.8312100172042847\n", - "epoch: 42 step: 116, loss is 0.9117698669433594\n", - "epoch: 42 step: 117, loss is 0.8652361631393433\n", - "epoch: 42 step: 118, loss is 0.8404426574707031\n", - "epoch: 42 step: 119, loss is 0.837387204170227\n", - "epoch: 42 step: 120, loss is 0.9015281200408936\n", - "epoch: 42 step: 121, loss is 0.9084380269050598\n", - "epoch: 42 step: 122, loss is 0.8683476448059082\n", - "epoch: 42 step: 123, loss is 0.8659813404083252\n", - "epoch: 42 step: 124, loss is 0.8215182423591614\n", - "epoch: 42 step: 125, loss is 0.8680877685546875\n", - "epoch: 42 step: 126, loss is 0.7969065308570862\n", - "epoch: 42 step: 127, loss is 0.8423664569854736\n", - "epoch: 42 step: 128, loss is 0.8767988085746765\n", - "epoch: 42 step: 129, loss is 0.8997753858566284\n", - "epoch: 42 step: 130, loss is 0.8332815170288086\n", - "epoch: 42 step: 131, loss is 0.8471370935440063\n", - "epoch: 42 step: 132, loss is 0.8557276725769043\n", - "epoch: 42 step: 133, loss is 0.8950415849685669\n", - "epoch: 42 step: 134, loss is 0.84333336353302\n", - "epoch: 42 step: 135, loss is 0.8616927266120911\n", - "epoch: 42 step: 136, loss is 0.8685221672058105\n", - "epoch: 42 step: 137, loss is 0.8692147731781006\n", - "epoch: 42 step: 138, loss is 0.8464861512184143\n", - "epoch: 42 step: 139, loss is 0.8693275451660156\n", - "epoch: 42 step: 140, loss is 0.8784058690071106\n", - "epoch: 42 step: 141, loss is 0.8476649522781372\n", - "epoch: 42 step: 142, loss is 0.8064063787460327\n", - "epoch: 42 step: 143, loss is 0.9054412841796875\n", - "epoch: 42 step: 144, loss is 0.8571453094482422\n", - "epoch: 42 step: 145, loss is 0.9321569204330444\n", - "epoch: 42 step: 146, loss is 0.8750998973846436\n", - "epoch: 42 step: 147, loss is 0.907139778137207\n", - "epoch: 42 step: 148, loss is 0.8659070134162903\n", - "epoch: 42 step: 149, loss is 0.8510759472846985\n", - "epoch: 42 step: 150, loss is 0.8502603769302368\n", - "epoch: 42 step: 151, loss is 0.8500804901123047\n", - "epoch: 42 step: 152, loss is 0.8156150579452515\n", - "epoch: 42 step: 153, loss is 0.8733172416687012\n", - "epoch: 42 step: 154, loss is 0.850848913192749\n", - "epoch: 42 step: 155, loss is 0.8791963458061218\n", - "epoch: 42 step: 156, loss is 0.8015874028205872\n", - "epoch: 42 step: 157, loss is 0.8600988388061523\n", - "epoch: 42 step: 158, loss is 0.8457437753677368\n", - "epoch: 42 step: 159, loss is 0.8352266550064087\n", - "epoch: 42 step: 160, loss is 0.9183065891265869\n", - "epoch: 42 step: 161, loss is 0.8726166486740112\n", - "epoch: 42 step: 162, loss is 0.8349156975746155\n", - "epoch: 42 step: 163, loss is 0.8438750505447388\n", - "epoch: 42 step: 164, loss is 0.7972323894500732\n", - "epoch: 42 step: 165, loss is 0.8881043791770935\n", - "epoch: 42 step: 166, loss is 0.7695393562316895\n", - "epoch: 42 step: 167, loss is 0.9118347764015198\n", - "epoch: 42 step: 168, loss is 0.8156653642654419\n", - "epoch: 42 step: 169, loss is 0.8408712148666382\n", - "epoch: 42 step: 170, loss is 0.8774926662445068\n", - "epoch: 42 step: 171, loss is 0.8023203611373901\n", - "epoch: 42 step: 172, loss is 0.8694781064987183\n", - "epoch: 42 step: 173, loss is 0.8406541347503662\n", - "epoch: 42 step: 174, loss is 0.8344810009002686\n", - "epoch: 42 step: 175, loss is 0.8771547079086304\n", - "epoch: 42 step: 176, loss is 0.8694907426834106\n", - "epoch: 42 step: 177, loss is 0.8860297203063965\n", - "epoch: 42 step: 178, loss is 0.8535282611846924\n", - "epoch: 42 step: 179, loss is 0.8784891963005066\n", - "epoch: 42 step: 180, loss is 0.8673667907714844\n", - "epoch: 42 step: 181, loss is 0.885998010635376\n", - "epoch: 42 step: 182, loss is 0.8101425170898438\n", - "epoch: 42 step: 183, loss is 0.8581392765045166\n", - "epoch: 42 step: 184, loss is 0.8515086770057678\n", - "epoch: 42 step: 185, loss is 0.9192506074905396\n", - "epoch: 42 step: 186, loss is 0.813150942325592\n", - "epoch: 42 step: 187, loss is 0.8777002096176147\n", - "epoch: 42 step: 188, loss is 0.8682333827018738\n", - "epoch: 42 step: 189, loss is 0.8502902388572693\n", - "epoch: 42 step: 190, loss is 0.8853510022163391\n", - "epoch: 42 step: 191, loss is 0.8542659282684326\n", - "epoch: 42 step: 192, loss is 0.7935245037078857\n", - "epoch: 42 step: 193, loss is 0.849128007888794\n", - "epoch: 42 step: 194, loss is 0.8718112111091614\n", - "epoch: 42 step: 195, loss is 0.7927903532981873\n", - "Train epoch time: 104393.616 ms, per step time: 535.352 ms\n", - "epoch: 43 step: 1, loss is 0.8294597864151001\n", - "epoch: 43 step: 2, loss is 0.8970947265625\n", - "epoch: 43 step: 3, loss is 0.8604476451873779\n", - "epoch: 43 step: 4, loss is 0.8271983861923218\n", - "epoch: 43 step: 5, loss is 0.7679990530014038\n", - "epoch: 43 step: 6, loss is 0.814246654510498\n", - "epoch: 43 step: 7, loss is 0.7870493531227112\n", - "epoch: 43 step: 8, loss is 0.7868152856826782\n", - "epoch: 43 step: 9, loss is 0.8285168409347534\n", - "epoch: 43 step: 10, loss is 0.8607795238494873\n", - "epoch: 43 step: 11, loss is 0.8177116513252258\n", - "epoch: 43 step: 12, loss is 0.7858068346977234\n", - "epoch: 43 step: 13, loss is 0.8965579271316528\n", - "epoch: 43 step: 14, loss is 0.8615819215774536\n", - "epoch: 43 step: 15, loss is 0.8327744007110596\n", - "epoch: 43 step: 16, loss is 0.8430631756782532\n", - "epoch: 43 step: 17, loss is 0.8059245944023132\n", - "epoch: 43 step: 18, loss is 0.9019784927368164\n", - "epoch: 43 step: 19, loss is 0.8373185992240906\n", - "epoch: 43 step: 20, loss is 0.8920520544052124\n", - "epoch: 43 step: 21, loss is 0.8596215844154358\n", - "epoch: 43 step: 22, loss is 0.8344764113426208\n", - "epoch: 43 step: 23, loss is 0.8371864557266235\n", - "epoch: 43 step: 24, loss is 0.8886958360671997\n", - "epoch: 43 step: 25, loss is 0.8028319478034973\n", - "epoch: 43 step: 26, loss is 0.8226444721221924\n", - "epoch: 43 step: 27, loss is 0.8300784826278687\n", - "epoch: 43 step: 28, loss is 0.7710871696472168\n", - "epoch: 43 step: 29, loss is 0.8353962898254395\n", - "epoch: 43 step: 30, loss is 0.7986444234848022\n", - "epoch: 43 step: 31, loss is 0.8214825391769409\n", - "epoch: 43 step: 32, loss is 0.7964630126953125\n", - "epoch: 43 step: 33, loss is 0.8764124512672424\n", - "epoch: 43 step: 34, loss is 0.7796955704689026\n", - "epoch: 43 step: 35, loss is 0.8275998830795288\n", - "epoch: 43 step: 36, loss is 0.8654944896697998\n", - "epoch: 43 step: 37, loss is 0.8556229472160339\n", - "epoch: 43 step: 38, loss is 0.8409806489944458\n", - "epoch: 43 step: 39, loss is 0.8394410610198975\n", - "epoch: 43 step: 40, loss is 0.8231090903282166\n", - "epoch: 43 step: 41, loss is 0.8441316485404968\n", - "epoch: 43 step: 42, loss is 0.8980637788772583\n", - "epoch: 43 step: 43, loss is 0.8046373128890991\n", - "epoch: 43 step: 44, loss is 0.8214818835258484\n", - "epoch: 43 step: 45, loss is 0.8548122644424438\n", - "epoch: 43 step: 46, loss is 0.8148654699325562\n", - "epoch: 43 step: 47, loss is 0.8201409578323364\n", - "epoch: 43 step: 48, loss is 0.8499758839607239\n", - "epoch: 43 step: 49, loss is 0.8358092308044434\n", - "epoch: 43 step: 50, loss is 0.8226796388626099\n", - "epoch: 43 step: 51, loss is 0.8605965375900269\n", - "epoch: 43 step: 52, loss is 0.8365210294723511\n", - "epoch: 43 step: 53, loss is 0.759890079498291\n", - "epoch: 43 step: 54, loss is 0.8327456712722778\n", - "epoch: 43 step: 55, loss is 0.8741596341133118\n", - "epoch: 43 step: 56, loss is 0.9026631116867065\n", - "epoch: 43 step: 57, loss is 0.8553040623664856\n", - "epoch: 43 step: 58, loss is 0.9060505628585815\n", - "epoch: 43 step: 59, loss is 0.857209324836731\n", - "epoch: 43 step: 60, loss is 0.7948429584503174\n", - "epoch: 43 step: 61, loss is 0.7962688207626343\n", - "epoch: 43 step: 62, loss is 0.8637653589248657\n", - "epoch: 43 step: 63, loss is 0.8078010082244873\n", - "epoch: 43 step: 64, loss is 0.8711192011833191\n", - "epoch: 43 step: 65, loss is 0.7712925672531128\n", - "epoch: 43 step: 66, loss is 0.7980602979660034\n", - "epoch: 43 step: 67, loss is 0.8307641744613647\n", - "epoch: 43 step: 68, loss is 0.8084980249404907\n", - "epoch: 43 step: 69, loss is 0.9210799336433411\n", - "epoch: 43 step: 70, loss is 0.8832650184631348\n", - "epoch: 43 step: 71, loss is 0.873507022857666\n", - "epoch: 43 step: 72, loss is 0.8338415026664734\n", - "epoch: 43 step: 73, loss is 0.8717669248580933\n", - "epoch: 43 step: 74, loss is 0.7991918921470642\n", - "epoch: 43 step: 75, loss is 0.840673565864563\n", - "epoch: 43 step: 76, loss is 0.7988643646240234\n", - "epoch: 43 step: 77, loss is 0.8677003383636475\n", - "epoch: 43 step: 78, loss is 0.7734363079071045\n", - "epoch: 43 step: 79, loss is 0.7846817970275879\n", - "epoch: 43 step: 80, loss is 0.7889673709869385\n", - "epoch: 43 step: 81, loss is 0.8030487298965454\n", - "epoch: 43 step: 82, loss is 0.8372325301170349\n", - "epoch: 43 step: 83, loss is 0.7864566445350647\n", - "epoch: 43 step: 84, loss is 0.8571981191635132\n", - "epoch: 43 step: 85, loss is 0.8592720627784729\n", - "epoch: 43 step: 86, loss is 0.834925651550293\n", - "epoch: 43 step: 87, loss is 0.8302255868911743\n", - "epoch: 43 step: 88, loss is 0.806150496006012\n", - "epoch: 43 step: 89, loss is 0.8232361078262329\n", - "epoch: 43 step: 90, loss is 0.8831214904785156\n", - "epoch: 43 step: 91, loss is 0.8150962591171265\n", - "epoch: 43 step: 92, loss is 0.8546246886253357\n", - "epoch: 43 step: 93, loss is 0.8501337170600891\n", - "epoch: 43 step: 94, loss is 0.8255654573440552\n", - "epoch: 43 step: 95, loss is 0.8462241888046265\n", - "epoch: 43 step: 96, loss is 0.8206239938735962\n", - "epoch: 43 step: 97, loss is 0.8344476222991943\n", - "epoch: 43 step: 98, loss is 0.8239794969558716\n", - "epoch: 43 step: 99, loss is 0.8198047876358032\n", - "epoch: 43 step: 100, loss is 0.8275083303451538\n", - "epoch: 43 step: 101, loss is 0.8346121907234192\n", - "epoch: 43 step: 102, loss is 0.9012043476104736\n", - "epoch: 43 step: 103, loss is 0.8496511578559875\n", - "epoch: 43 step: 104, loss is 0.8193620443344116\n", - "epoch: 43 step: 105, loss is 0.8473909497261047\n", - "epoch: 43 step: 106, loss is 0.8486902713775635\n", - "epoch: 43 step: 107, loss is 0.8898446559906006\n", - "epoch: 43 step: 108, loss is 0.7972216606140137\n", - "epoch: 43 step: 109, loss is 0.8071879148483276\n", - "epoch: 43 step: 110, loss is 0.8307573795318604\n", - "epoch: 43 step: 111, loss is 0.8682498931884766\n", - "epoch: 43 step: 112, loss is 0.8632298707962036\n", - "epoch: 43 step: 113, loss is 0.8534456491470337\n", - "epoch: 43 step: 114, loss is 0.8309627771377563\n", - "epoch: 43 step: 115, loss is 0.8819482326507568\n", - "epoch: 43 step: 116, loss is 0.8594818115234375\n", - "epoch: 43 step: 117, loss is 0.8103858232498169\n", - "epoch: 43 step: 118, loss is 0.8514858484268188\n", - "epoch: 43 step: 119, loss is 0.8288900852203369\n", - "epoch: 43 step: 120, loss is 0.8233085870742798\n", - "epoch: 43 step: 121, loss is 0.8544740676879883\n", - "epoch: 43 step: 122, loss is 0.8414467573165894\n", - "epoch: 43 step: 123, loss is 0.8041509389877319\n", - "epoch: 43 step: 124, loss is 0.8492350578308105\n", - "epoch: 43 step: 125, loss is 0.8789879083633423\n", - "epoch: 43 step: 126, loss is 0.8492079377174377\n", - "epoch: 43 step: 127, loss is 0.8372697830200195\n", - "epoch: 43 step: 128, loss is 0.8588001132011414\n", - "epoch: 43 step: 129, loss is 0.8964120149612427\n", - "epoch: 43 step: 130, loss is 0.9201116561889648\n", - "epoch: 43 step: 131, loss is 0.8233623504638672\n", - "epoch: 43 step: 132, loss is 0.9195619821548462\n", - "epoch: 43 step: 133, loss is 0.8373682498931885\n", - "epoch: 43 step: 134, loss is 0.8882352113723755\n", - "epoch: 43 step: 135, loss is 0.8993968963623047\n", - "epoch: 43 step: 136, loss is 0.8546263575553894\n", - "epoch: 43 step: 137, loss is 0.7936971783638\n", - "epoch: 43 step: 138, loss is 0.8531118631362915\n", - "epoch: 43 step: 139, loss is 0.8156453371047974\n", - "epoch: 43 step: 140, loss is 0.8670303821563721\n", - "epoch: 43 step: 141, loss is 0.8854354023933411\n", - "epoch: 43 step: 142, loss is 0.9623984098434448\n", - "epoch: 43 step: 143, loss is 0.8263896703720093\n", - "epoch: 43 step: 144, loss is 0.8571993112564087\n", - "epoch: 43 step: 145, loss is 0.8731168508529663\n", - "epoch: 43 step: 146, loss is 0.8626257181167603\n", - "epoch: 43 step: 147, loss is 0.8558142185211182\n", - "epoch: 43 step: 148, loss is 0.8371372818946838\n", - "epoch: 43 step: 149, loss is 0.9152108430862427\n", - "epoch: 43 step: 150, loss is 0.8388125896453857\n", - "epoch: 43 step: 151, loss is 0.8111461997032166\n", - "epoch: 43 step: 152, loss is 0.8510836362838745\n", - "epoch: 43 step: 153, loss is 0.8323482275009155\n", - "epoch: 43 step: 154, loss is 0.8657610416412354\n", - "epoch: 43 step: 155, loss is 0.8141871094703674\n", - "epoch: 43 step: 156, loss is 0.8288489580154419\n", - "epoch: 43 step: 157, loss is 0.8409020900726318\n", - "epoch: 43 step: 158, loss is 0.9119324684143066\n", - "epoch: 43 step: 159, loss is 0.838637113571167\n", - "epoch: 43 step: 160, loss is 0.8470293283462524\n", - "epoch: 43 step: 161, loss is 0.9204859733581543\n", - "epoch: 43 step: 162, loss is 0.8028141260147095\n", - "epoch: 43 step: 163, loss is 0.888090193271637\n", - "epoch: 43 step: 164, loss is 0.908576488494873\n", - "epoch: 43 step: 165, loss is 0.8349628448486328\n", - "epoch: 43 step: 166, loss is 0.8642609715461731\n", - "epoch: 43 step: 167, loss is 0.8975907564163208\n", - "epoch: 43 step: 168, loss is 0.8330146670341492\n", - "epoch: 43 step: 169, loss is 0.8396173119544983\n", - "epoch: 43 step: 170, loss is 0.8458009362220764\n", - "epoch: 43 step: 171, loss is 0.8068820238113403\n", - "epoch: 43 step: 172, loss is 0.8205819129943848\n", - "epoch: 43 step: 173, loss is 0.8182896375656128\n", - "epoch: 43 step: 174, loss is 0.8649743795394897\n", - "epoch: 43 step: 175, loss is 0.8158557415008545\n", - "epoch: 43 step: 176, loss is 0.8410488367080688\n", - "epoch: 43 step: 177, loss is 0.8997222185134888\n", - "epoch: 43 step: 178, loss is 0.7878800630569458\n", - "epoch: 43 step: 179, loss is 0.8774538040161133\n", - "epoch: 43 step: 180, loss is 0.8969354629516602\n", - "epoch: 43 step: 181, loss is 0.8670445680618286\n", - "epoch: 43 step: 182, loss is 0.8310904502868652\n", - "epoch: 43 step: 183, loss is 0.8269047737121582\n", - "epoch: 43 step: 184, loss is 0.8661604523658752\n", - "epoch: 43 step: 185, loss is 0.8083995580673218\n", - "epoch: 43 step: 186, loss is 0.8497345447540283\n", - "epoch: 43 step: 187, loss is 0.7652710676193237\n", - "epoch: 43 step: 188, loss is 0.8509536981582642\n", - "epoch: 43 step: 189, loss is 0.7898473739624023\n", - "epoch: 43 step: 190, loss is 0.8304072618484497\n", - "epoch: 43 step: 191, loss is 0.8282390832901001\n", - "epoch: 43 step: 192, loss is 0.8815032243728638\n", - "epoch: 43 step: 193, loss is 0.8743302822113037\n", - "epoch: 43 step: 194, loss is 0.8324047327041626\n", - "epoch: 43 step: 195, loss is 0.8523470163345337\n", - "Train epoch time: 103265.899 ms, per step time: 529.569 ms\n", - "epoch: 44 step: 1, loss is 0.8453022241592407\n", - "epoch: 44 step: 2, loss is 0.7907478213310242\n", - "epoch: 44 step: 3, loss is 0.8016879558563232\n", - "epoch: 44 step: 4, loss is 0.8263792991638184\n", - "epoch: 44 step: 5, loss is 0.7857260704040527\n", - "epoch: 44 step: 6, loss is 0.8573659658432007\n", - "epoch: 44 step: 7, loss is 0.8057029247283936\n", - "epoch: 44 step: 8, loss is 0.8325988054275513\n", - "epoch: 44 step: 9, loss is 0.8090107440948486\n", - "epoch: 44 step: 10, loss is 0.8518710732460022\n", - "epoch: 44 step: 11, loss is 0.7683022022247314\n", - "epoch: 44 step: 12, loss is 0.8241764307022095\n", - "epoch: 44 step: 13, loss is 0.8171102404594421\n", - "epoch: 44 step: 14, loss is 0.7979844808578491\n", - "epoch: 44 step: 15, loss is 0.8109622001647949\n", - "epoch: 44 step: 16, loss is 0.8302081823348999\n", - "epoch: 44 step: 17, loss is 0.886076807975769\n", - "epoch: 44 step: 18, loss is 0.8752480745315552\n", - "epoch: 44 step: 19, loss is 0.826755166053772\n", - "epoch: 44 step: 20, loss is 0.9024949669837952\n", - "epoch: 44 step: 21, loss is 0.8768868446350098\n", - "epoch: 44 step: 22, loss is 0.7816983461380005\n", - "epoch: 44 step: 23, loss is 0.8135099411010742\n", - "epoch: 44 step: 24, loss is 0.7959494590759277\n", - "epoch: 44 step: 25, loss is 0.8701900243759155\n", - "epoch: 44 step: 26, loss is 0.8962787389755249\n", - "epoch: 44 step: 27, loss is 0.8122208118438721\n", - "epoch: 44 step: 28, loss is 0.8318886756896973\n", - "epoch: 44 step: 29, loss is 0.8071174621582031\n", - "epoch: 44 step: 30, loss is 0.7998003959655762\n", - "epoch: 44 step: 31, loss is 0.8052625060081482\n", - "epoch: 44 step: 32, loss is 0.8384972810745239\n", - "epoch: 44 step: 33, loss is 0.8433929681777954\n", - "epoch: 44 step: 34, loss is 0.8473092317581177\n", - "epoch: 44 step: 35, loss is 0.7479420900344849\n", - "epoch: 44 step: 36, loss is 0.8552576303482056\n", - "epoch: 44 step: 37, loss is 0.8296571969985962\n", - "epoch: 44 step: 38, loss is 0.8225266337394714\n", - "epoch: 44 step: 39, loss is 0.8398158550262451\n", - "epoch: 44 step: 40, loss is 0.810224175453186\n", - "epoch: 44 step: 41, loss is 0.8907671570777893\n", - "epoch: 44 step: 42, loss is 0.8192901015281677\n", - "epoch: 44 step: 43, loss is 0.8924587965011597\n", - "epoch: 44 step: 44, loss is 0.8292673826217651\n", - "epoch: 44 step: 45, loss is 0.771535336971283\n", - "epoch: 44 step: 46, loss is 0.836926281452179\n", - "epoch: 44 step: 47, loss is 0.7787238359451294\n", - "epoch: 44 step: 48, loss is 0.8849904537200928\n", - "epoch: 44 step: 49, loss is 0.7986758351325989\n", - "epoch: 44 step: 50, loss is 0.8151330947875977\n", - "epoch: 44 step: 51, loss is 0.827986478805542\n", - "epoch: 44 step: 52, loss is 0.8659918904304504\n", - "epoch: 44 step: 53, loss is 0.8116614818572998\n", - "epoch: 44 step: 54, loss is 0.7926613092422485\n", - "epoch: 44 step: 55, loss is 0.7859510183334351\n", - "epoch: 44 step: 56, loss is 0.8311823606491089\n", - "epoch: 44 step: 57, loss is 0.8313575387001038\n", - "epoch: 44 step: 58, loss is 0.8747384548187256\n", - "epoch: 44 step: 59, loss is 0.8423038721084595\n", - "epoch: 44 step: 60, loss is 0.7769363522529602\n", - "epoch: 44 step: 61, loss is 0.8134062886238098\n", - "epoch: 44 step: 62, loss is 0.7958183884620667\n", - "epoch: 44 step: 63, loss is 0.8659431338310242\n", - "epoch: 44 step: 64, loss is 0.8552248477935791\n", - "epoch: 44 step: 65, loss is 0.8095966577529907\n", - "epoch: 44 step: 66, loss is 0.8902565240859985\n", - "epoch: 44 step: 67, loss is 0.8342934846878052\n", - "epoch: 44 step: 68, loss is 0.8365081548690796\n", - "epoch: 44 step: 69, loss is 0.7752741575241089\n", - "epoch: 44 step: 70, loss is 0.8244410157203674\n", - "epoch: 44 step: 71, loss is 0.8486750721931458\n", - "epoch: 44 step: 72, loss is 0.8318091630935669\n", - "epoch: 44 step: 73, loss is 0.8393651247024536\n", - "epoch: 44 step: 74, loss is 0.8115312457084656\n", - "epoch: 44 step: 75, loss is 0.8164188861846924\n", - "epoch: 44 step: 76, loss is 0.8508647680282593\n", - "epoch: 44 step: 77, loss is 0.8699759840965271\n", - "epoch: 44 step: 78, loss is 0.8322898149490356\n", - "epoch: 44 step: 79, loss is 0.8376330137252808\n", - "epoch: 44 step: 80, loss is 0.8160139322280884\n", - "epoch: 44 step: 81, loss is 0.8363475799560547\n", - "epoch: 44 step: 82, loss is 0.8166965246200562\n", - "epoch: 44 step: 83, loss is 0.8335794806480408\n", - "epoch: 44 step: 84, loss is 0.8391317129135132\n", - "epoch: 44 step: 85, loss is 0.7605409622192383\n", - "epoch: 44 step: 86, loss is 0.8214500546455383\n", - "epoch: 44 step: 87, loss is 0.9058740139007568\n", - "epoch: 44 step: 88, loss is 0.8651093244552612\n", - "epoch: 44 step: 89, loss is 0.8033688068389893\n", - "epoch: 44 step: 90, loss is 0.8479012250900269\n", - "epoch: 44 step: 91, loss is 0.8389463424682617\n", - "epoch: 44 step: 92, loss is 0.7884944677352905\n", - "epoch: 44 step: 93, loss is 0.833466649055481\n", - "epoch: 44 step: 94, loss is 0.8223673701286316\n", - "epoch: 44 step: 95, loss is 0.8585664629936218\n", - "epoch: 44 step: 96, loss is 0.8613909482955933\n", - "epoch: 44 step: 97, loss is 0.8217692375183105\n", - "epoch: 44 step: 98, loss is 0.77707439661026\n", - "epoch: 44 step: 99, loss is 0.8251650929450989\n", - "epoch: 44 step: 100, loss is 0.8254645466804504\n", - "epoch: 44 step: 101, loss is 0.801839292049408\n", - "epoch: 44 step: 102, loss is 0.8903477191925049\n", - "epoch: 44 step: 103, loss is 0.846129298210144\n", - "epoch: 44 step: 104, loss is 0.8039761781692505\n", - "epoch: 44 step: 105, loss is 0.8283898234367371\n", - "epoch: 44 step: 106, loss is 0.8672746419906616\n", - "epoch: 44 step: 107, loss is 0.839009165763855\n", - "epoch: 44 step: 108, loss is 0.7480974197387695\n", - "epoch: 44 step: 109, loss is 0.8659617304801941\n", - "epoch: 44 step: 110, loss is 0.9020228981971741\n", - "epoch: 44 step: 111, loss is 0.843647837638855\n", - "epoch: 44 step: 112, loss is 0.7947038412094116\n", - "epoch: 44 step: 113, loss is 0.8495566844940186\n", - "epoch: 44 step: 114, loss is 0.8038637638092041\n", - "epoch: 44 step: 115, loss is 0.8976993560791016\n", - "epoch: 44 step: 116, loss is 0.8217252492904663\n", - "epoch: 44 step: 117, loss is 0.8445823788642883\n", - "epoch: 44 step: 118, loss is 0.848206639289856\n", - "epoch: 44 step: 119, loss is 0.8971410393714905\n", - "epoch: 44 step: 120, loss is 0.7785561084747314\n", - "epoch: 44 step: 121, loss is 0.8077627420425415\n", - "epoch: 44 step: 122, loss is 0.8520123362541199\n", - "epoch: 44 step: 123, loss is 0.7765213251113892\n", - "epoch: 44 step: 124, loss is 0.8805909752845764\n", - "epoch: 44 step: 125, loss is 0.7757835388183594\n", - "epoch: 44 step: 126, loss is 0.8509600758552551\n", - "epoch: 44 step: 127, loss is 0.8228942155838013\n", - "epoch: 44 step: 128, loss is 0.8111655712127686\n", - "epoch: 44 step: 129, loss is 0.8631141781806946\n", - "epoch: 44 step: 130, loss is 0.8184478282928467\n", - "epoch: 44 step: 131, loss is 0.7894569039344788\n", - "epoch: 44 step: 132, loss is 0.8719875812530518\n", - "epoch: 44 step: 133, loss is 0.8447509407997131\n", - "epoch: 44 step: 134, loss is 0.8809040784835815\n", - "epoch: 44 step: 135, loss is 0.8311432600021362\n", - "epoch: 44 step: 136, loss is 0.8442236185073853\n", - "epoch: 44 step: 137, loss is 0.7761156558990479\n", - "epoch: 44 step: 138, loss is 0.8501737117767334\n", - "epoch: 44 step: 139, loss is 0.8985190391540527\n", - "epoch: 44 step: 140, loss is 0.8687268495559692\n", - "epoch: 44 step: 141, loss is 0.8174731731414795\n", - "epoch: 44 step: 142, loss is 0.832199215888977\n", - "epoch: 44 step: 143, loss is 0.8185088634490967\n", - "epoch: 44 step: 144, loss is 0.7955659627914429\n", - "epoch: 44 step: 145, loss is 0.8968285322189331\n", - "epoch: 44 step: 146, loss is 0.8759627342224121\n", - "epoch: 44 step: 147, loss is 0.867445707321167\n", - "epoch: 44 step: 148, loss is 0.8908854722976685\n", - "epoch: 44 step: 149, loss is 0.8264645338058472\n", - "epoch: 44 step: 150, loss is 0.7830431461334229\n", - "epoch: 44 step: 151, loss is 0.8348606824874878\n", - "epoch: 44 step: 152, loss is 0.8119888305664062\n", - "epoch: 44 step: 153, loss is 0.8212461471557617\n", - "epoch: 44 step: 154, loss is 0.9734259843826294\n", - "epoch: 44 step: 155, loss is 0.8453748822212219\n", - "epoch: 44 step: 156, loss is 0.88047856092453\n", - "epoch: 44 step: 157, loss is 0.8310037851333618\n", - "epoch: 44 step: 158, loss is 0.8443740606307983\n", - "epoch: 44 step: 159, loss is 0.7672539949417114\n", - "epoch: 44 step: 160, loss is 0.8437290191650391\n", - "epoch: 44 step: 161, loss is 0.7949817776679993\n", - "epoch: 44 step: 162, loss is 0.845282256603241\n", - "epoch: 44 step: 163, loss is 0.7958941459655762\n", - "epoch: 44 step: 164, loss is 0.8234926462173462\n", - "epoch: 44 step: 165, loss is 0.8065224885940552\n", - "epoch: 44 step: 166, loss is 0.8482771515846252\n", - "epoch: 44 step: 167, loss is 0.8111386299133301\n", - "epoch: 44 step: 168, loss is 0.776309609413147\n", - "epoch: 44 step: 169, loss is 0.8399442434310913\n", - "epoch: 44 step: 170, loss is 0.8336600065231323\n", - "epoch: 44 step: 171, loss is 0.9151747226715088\n", - "epoch: 44 step: 172, loss is 0.8099561929702759\n", - "epoch: 44 step: 173, loss is 0.807794451713562\n", - "epoch: 44 step: 174, loss is 0.8684530258178711\n", - "epoch: 44 step: 175, loss is 0.8516542911529541\n", - "epoch: 44 step: 176, loss is 0.8287756443023682\n", - "epoch: 44 step: 177, loss is 0.8382424116134644\n", - "epoch: 44 step: 178, loss is 0.7728651165962219\n", - "epoch: 44 step: 179, loss is 0.8073314428329468\n", - "epoch: 44 step: 180, loss is 0.848498523235321\n", - "epoch: 44 step: 181, loss is 0.9014191627502441\n", - "epoch: 44 step: 182, loss is 0.8897876739501953\n", - "epoch: 44 step: 183, loss is 0.8118309378623962\n", - "epoch: 44 step: 184, loss is 0.7635383605957031\n", - "epoch: 44 step: 185, loss is 0.8467649221420288\n", - "epoch: 44 step: 186, loss is 0.8409745693206787\n", - "epoch: 44 step: 187, loss is 0.7921956777572632\n", - "epoch: 44 step: 188, loss is 0.8431222438812256\n", - "epoch: 44 step: 189, loss is 0.8319634199142456\n", - "epoch: 44 step: 190, loss is 0.8678156137466431\n", - "epoch: 44 step: 191, loss is 0.8332831859588623\n", - "epoch: 44 step: 192, loss is 0.8472182154655457\n", - "epoch: 44 step: 193, loss is 0.8923056125640869\n", - "epoch: 44 step: 194, loss is 0.8984596133232117\n", - "epoch: 44 step: 195, loss is 0.8531150221824646\n", - "Train epoch time: 105868.401 ms, per step time: 542.915 ms\n", - "epoch: 45 step: 1, loss is 0.797378420829773\n", - "epoch: 45 step: 2, loss is 0.8434414863586426\n", - "epoch: 45 step: 3, loss is 0.8462778329849243\n", - "epoch: 45 step: 4, loss is 0.8658885955810547\n", - "epoch: 45 step: 5, loss is 0.8098921775817871\n", - "epoch: 45 step: 6, loss is 0.8026620149612427\n", - "epoch: 45 step: 7, loss is 0.8996759653091431\n", - "epoch: 45 step: 8, loss is 0.8386918306350708\n", - "epoch: 45 step: 9, loss is 0.8338483572006226\n", - "epoch: 45 step: 10, loss is 0.9263638257980347\n", - "epoch: 45 step: 11, loss is 0.8632202744483948\n", - "epoch: 45 step: 12, loss is 0.8471955060958862\n", - "epoch: 45 step: 13, loss is 0.8372879028320312\n", - "epoch: 45 step: 14, loss is 0.8255120515823364\n", - "epoch: 45 step: 15, loss is 0.8737128973007202\n", - "epoch: 45 step: 16, loss is 0.8354191780090332\n", - "epoch: 45 step: 17, loss is 0.7987959384918213\n", - "epoch: 45 step: 18, loss is 0.8505175113677979\n", - "epoch: 45 step: 19, loss is 0.8368593454360962\n", - "epoch: 45 step: 20, loss is 0.784697413444519\n", - "epoch: 45 step: 21, loss is 0.8348579406738281\n", - "epoch: 45 step: 22, loss is 0.8364343643188477\n", - "epoch: 45 step: 23, loss is 0.8612481355667114\n", - "epoch: 45 step: 24, loss is 0.7814860343933105\n", - "epoch: 45 step: 25, loss is 0.8684799671173096\n", - "epoch: 45 step: 26, loss is 0.8997472524642944\n", - "epoch: 45 step: 27, loss is 0.8619316816329956\n", - "epoch: 45 step: 28, loss is 0.817363977432251\n", - "epoch: 45 step: 29, loss is 0.7749216556549072\n", - "epoch: 45 step: 30, loss is 0.8594024181365967\n", - "epoch: 45 step: 31, loss is 0.8051797747612\n", - "epoch: 45 step: 32, loss is 0.808334231376648\n", - "epoch: 45 step: 33, loss is 0.816953718662262\n", - "epoch: 45 step: 34, loss is 0.8255509734153748\n", - "epoch: 45 step: 35, loss is 0.8560952544212341\n", - "epoch: 45 step: 36, loss is 0.85727459192276\n", - "epoch: 45 step: 37, loss is 0.7978469133377075\n", - "epoch: 45 step: 38, loss is 0.827349066734314\n", - "epoch: 45 step: 39, loss is 0.812595546245575\n", - "epoch: 45 step: 40, loss is 0.8315937519073486\n", - "epoch: 45 step: 41, loss is 0.8432806730270386\n", - "epoch: 45 step: 42, loss is 0.8510632514953613\n", - "epoch: 45 step: 43, loss is 0.8693996667861938\n", - "epoch: 45 step: 44, loss is 0.8253210783004761\n", - "epoch: 45 step: 45, loss is 0.8483833074569702\n", - "epoch: 45 step: 46, loss is 0.8250950574874878\n", - "epoch: 45 step: 47, loss is 0.81036776304245\n", - "epoch: 45 step: 48, loss is 0.7913225889205933\n", - "epoch: 45 step: 49, loss is 0.849341630935669\n", - "epoch: 45 step: 50, loss is 0.8297770023345947\n", - "epoch: 45 step: 51, loss is 0.820899248123169\n", - "epoch: 45 step: 52, loss is 0.7853142619132996\n", - "epoch: 45 step: 53, loss is 0.8117645382881165\n", - "epoch: 45 step: 54, loss is 0.7955257296562195\n", - "epoch: 45 step: 55, loss is 0.8264058828353882\n", - "epoch: 45 step: 56, loss is 0.7929840087890625\n", - "epoch: 45 step: 57, loss is 0.7925326824188232\n", - "epoch: 45 step: 58, loss is 0.8115711808204651\n", - "epoch: 45 step: 59, loss is 0.884651780128479\n", - "epoch: 45 step: 60, loss is 0.7932153940200806\n", - "epoch: 45 step: 61, loss is 0.8263003826141357\n", - "epoch: 45 step: 62, loss is 0.8249435424804688\n", - "epoch: 45 step: 63, loss is 0.824442446231842\n", - "epoch: 45 step: 64, loss is 0.8158690929412842\n", - "epoch: 45 step: 65, loss is 0.7852048277854919\n", - "epoch: 45 step: 66, loss is 0.7816163301467896\n", - "epoch: 45 step: 67, loss is 0.8764089941978455\n", - "epoch: 45 step: 68, loss is 0.8593099117279053\n", - "epoch: 45 step: 69, loss is 0.866447925567627\n", - "epoch: 45 step: 70, loss is 0.8498247861862183\n", - "epoch: 45 step: 71, loss is 0.8391462564468384\n", - "epoch: 45 step: 72, loss is 0.8566574454307556\n", - "epoch: 45 step: 73, loss is 0.8557162284851074\n", - "epoch: 45 step: 74, loss is 0.8707220554351807\n", - "epoch: 45 step: 75, loss is 0.8360041379928589\n", - "epoch: 45 step: 76, loss is 0.8407829999923706\n", - "epoch: 45 step: 77, loss is 0.8129657506942749\n", - "epoch: 45 step: 78, loss is 0.8092179894447327\n", - "epoch: 45 step: 79, loss is 0.8521150350570679\n", - "epoch: 45 step: 80, loss is 0.8713109493255615\n", - "epoch: 45 step: 81, loss is 0.8957943916320801\n", - "epoch: 45 step: 82, loss is 0.8533645868301392\n", - "epoch: 45 step: 83, loss is 0.912147045135498\n", - "epoch: 45 step: 84, loss is 0.7805302143096924\n", - "epoch: 45 step: 85, loss is 0.8388156890869141\n", - "epoch: 45 step: 86, loss is 0.8268339037895203\n", - "epoch: 45 step: 87, loss is 0.9514163732528687\n", - "epoch: 45 step: 88, loss is 0.829018235206604\n", - "epoch: 45 step: 89, loss is 0.8328359127044678\n", - "epoch: 45 step: 90, loss is 0.7594138383865356\n", - "epoch: 45 step: 91, loss is 0.8835211992263794\n", - "epoch: 45 step: 92, loss is 0.7978073358535767\n", - "epoch: 45 step: 93, loss is 0.7841440439224243\n", - "epoch: 45 step: 94, loss is 0.8134328722953796\n", - "epoch: 45 step: 95, loss is 0.8328547477722168\n", - "epoch: 45 step: 96, loss is 0.8723311424255371\n", - "epoch: 45 step: 97, loss is 0.8953766822814941\n", - "epoch: 45 step: 98, loss is 0.782135546207428\n", - "epoch: 45 step: 99, loss is 0.8980734348297119\n", - "epoch: 45 step: 100, loss is 0.8931200504302979\n", - "epoch: 45 step: 101, loss is 0.8560555577278137\n", - "epoch: 45 step: 102, loss is 0.8412013053894043\n", - "epoch: 45 step: 103, loss is 0.7824513912200928\n", - "epoch: 45 step: 104, loss is 0.818356990814209\n", - "epoch: 45 step: 105, loss is 0.8363431692123413\n", - "epoch: 45 step: 106, loss is 0.8344317078590393\n", - "epoch: 45 step: 107, loss is 0.8848496675491333\n", - "epoch: 45 step: 108, loss is 0.8350567817687988\n", - "epoch: 45 step: 109, loss is 0.8348894119262695\n", - "epoch: 45 step: 110, loss is 0.8175472617149353\n", - "epoch: 45 step: 111, loss is 0.8738548159599304\n", - "epoch: 45 step: 112, loss is 0.8716980218887329\n", - "epoch: 45 step: 113, loss is 0.8722313642501831\n", - "epoch: 45 step: 114, loss is 0.827862024307251\n", - "epoch: 45 step: 115, loss is 0.8458893299102783\n", - "epoch: 45 step: 116, loss is 0.8171547651290894\n", - "epoch: 45 step: 117, loss is 0.8268876075744629\n", - "epoch: 45 step: 118, loss is 0.8999385833740234\n", - "epoch: 45 step: 119, loss is 0.7990801334381104\n", - "epoch: 45 step: 120, loss is 0.8432843685150146\n", - "epoch: 45 step: 121, loss is 0.7690677642822266\n", - "epoch: 45 step: 122, loss is 0.7851119041442871\n", - "epoch: 45 step: 123, loss is 0.7990196943283081\n", - "epoch: 45 step: 124, loss is 0.8134939670562744\n", - "epoch: 45 step: 125, loss is 0.8478057384490967\n", - "epoch: 45 step: 126, loss is 0.835658073425293\n", - "epoch: 45 step: 127, loss is 0.8786047697067261\n", - "epoch: 45 step: 128, loss is 0.853824257850647\n", - "epoch: 45 step: 129, loss is 0.7791602611541748\n", - "epoch: 45 step: 130, loss is 0.8465268611907959\n", - "epoch: 45 step: 131, loss is 0.817635178565979\n", - "epoch: 45 step: 132, loss is 0.8957144618034363\n", - "epoch: 45 step: 133, loss is 0.7893065214157104\n", - "epoch: 45 step: 134, loss is 0.8525814414024353\n", - "epoch: 45 step: 135, loss is 0.9079052209854126\n", - "epoch: 45 step: 136, loss is 0.8197199702262878\n", - "epoch: 45 step: 137, loss is 0.8526945114135742\n", - "epoch: 45 step: 138, loss is 0.8059414625167847\n", - "epoch: 45 step: 139, loss is 0.9117308855056763\n", - "epoch: 45 step: 140, loss is 0.8042845726013184\n", - "epoch: 45 step: 141, loss is 0.7515856623649597\n", - "epoch: 45 step: 142, loss is 0.8385648727416992\n", - "epoch: 45 step: 143, loss is 0.8327391147613525\n", - "epoch: 45 step: 144, loss is 0.8003973364830017\n", - "epoch: 45 step: 145, loss is 0.8098223209381104\n", - "epoch: 45 step: 146, loss is 0.8295820951461792\n", - "epoch: 45 step: 147, loss is 0.8350188732147217\n", - "epoch: 45 step: 148, loss is 0.8676146268844604\n", - "epoch: 45 step: 149, loss is 0.9002435207366943\n", - "epoch: 45 step: 150, loss is 0.8670786619186401\n", - "epoch: 45 step: 151, loss is 0.8624051809310913\n", - "epoch: 45 step: 152, loss is 0.8451559543609619\n", - "epoch: 45 step: 153, loss is 0.8103901147842407\n", - "epoch: 45 step: 154, loss is 0.878807783126831\n", - "epoch: 45 step: 155, loss is 0.8267253041267395\n", - "epoch: 45 step: 156, loss is 0.8838249444961548\n", - "epoch: 45 step: 157, loss is 0.8725638389587402\n", - "epoch: 45 step: 158, loss is 0.8776443004608154\n", - "epoch: 45 step: 159, loss is 0.8134714365005493\n", - "epoch: 45 step: 160, loss is 0.8230506181716919\n", - "epoch: 45 step: 161, loss is 0.8746716976165771\n", - "epoch: 45 step: 162, loss is 0.8307143449783325\n", - "epoch: 45 step: 163, loss is 0.8589231967926025\n", - "epoch: 45 step: 164, loss is 0.8498011827468872\n", - "epoch: 45 step: 165, loss is 0.832342267036438\n", - "epoch: 45 step: 166, loss is 0.7678655385971069\n", - "epoch: 45 step: 167, loss is 0.8398581743240356\n", - "epoch: 45 step: 168, loss is 0.8458168506622314\n", - "epoch: 45 step: 169, loss is 0.7884650230407715\n", - "epoch: 45 step: 170, loss is 0.8584935069084167\n", - "epoch: 45 step: 171, loss is 0.8416811227798462\n", - "epoch: 45 step: 172, loss is 0.8532075881958008\n", - "epoch: 45 step: 173, loss is 0.814327597618103\n", - "epoch: 45 step: 174, loss is 0.8263550996780396\n", - "epoch: 45 step: 175, loss is 0.8444576263427734\n", - "epoch: 45 step: 176, loss is 0.8330240249633789\n", - "epoch: 45 step: 177, loss is 0.818058967590332\n", - "epoch: 45 step: 178, loss is 0.8668172955513\n", - "epoch: 45 step: 179, loss is 0.8916217088699341\n", - "epoch: 45 step: 180, loss is 0.8691087365150452\n", - "epoch: 45 step: 181, loss is 0.8513957262039185\n", - "epoch: 45 step: 182, loss is 0.7577865123748779\n", - "epoch: 45 step: 183, loss is 0.8647889494895935\n", - "epoch: 45 step: 184, loss is 0.8602781295776367\n", - "epoch: 45 step: 185, loss is 0.8313045501708984\n", - "epoch: 45 step: 186, loss is 0.8350428342819214\n", - "epoch: 45 step: 187, loss is 0.9122365713119507\n", - "epoch: 45 step: 188, loss is 0.889366090297699\n", - "epoch: 45 step: 189, loss is 0.8846367597579956\n", - "epoch: 45 step: 190, loss is 0.8638669848442078\n", - "epoch: 45 step: 191, loss is 0.8321487903594971\n", - "epoch: 45 step: 192, loss is 0.8331589698791504\n", - "epoch: 45 step: 193, loss is 0.8638803958892822\n", - "epoch: 45 step: 194, loss is 0.8489837646484375\n", - "epoch: 45 step: 195, loss is 0.8395015001296997\n", - "Train epoch time: 114752.062 ms, per step time: 588.472 ms\n", - "epoch: 46 step: 1, loss is 0.8241428136825562\n", - "epoch: 46 step: 2, loss is 0.8198268413543701\n", - "epoch: 46 step: 3, loss is 0.8129709362983704\n", - "epoch: 46 step: 4, loss is 0.8201661109924316\n", - "epoch: 46 step: 5, loss is 0.8342757225036621\n", - "epoch: 46 step: 6, loss is 0.8319631814956665\n", - "epoch: 46 step: 7, loss is 0.881583571434021\n", - "epoch: 46 step: 8, loss is 0.8755643367767334\n", - "epoch: 46 step: 9, loss is 0.835669755935669\n", - "epoch: 46 step: 10, loss is 0.8311923146247864\n", - "epoch: 46 step: 11, loss is 0.8169000148773193\n", - "epoch: 46 step: 12, loss is 0.7972627878189087\n", - "epoch: 46 step: 13, loss is 0.831824004650116\n", - "epoch: 46 step: 14, loss is 0.8250946998596191\n", - "epoch: 46 step: 15, loss is 0.8427072763442993\n", - "epoch: 46 step: 16, loss is 0.8215987682342529\n", - "epoch: 46 step: 17, loss is 0.8460427522659302\n", - "epoch: 46 step: 18, loss is 0.7887213230133057\n", - "epoch: 46 step: 19, loss is 0.7746330499649048\n", - "epoch: 46 step: 20, loss is 0.7785488367080688\n", - "epoch: 46 step: 21, loss is 0.8420257568359375\n", - "epoch: 46 step: 22, loss is 0.8168672323226929\n", - "epoch: 46 step: 23, loss is 0.8602473139762878\n", - "epoch: 46 step: 24, loss is 0.8273557424545288\n", - "epoch: 46 step: 25, loss is 0.7976227402687073\n", - "epoch: 46 step: 26, loss is 0.8435715436935425\n", - "epoch: 46 step: 27, loss is 0.7444930076599121\n", - "epoch: 46 step: 28, loss is 0.7639228105545044\n", - "epoch: 46 step: 29, loss is 0.7700619697570801\n", - "epoch: 46 step: 30, loss is 0.7767044305801392\n", - "epoch: 46 step: 31, loss is 0.8928658962249756\n", - "epoch: 46 step: 32, loss is 0.8092775344848633\n", - "epoch: 46 step: 33, loss is 0.817742645740509\n", - "epoch: 46 step: 34, loss is 0.7949438691139221\n", - "epoch: 46 step: 35, loss is 0.7891939878463745\n", - "epoch: 46 step: 36, loss is 0.8418062925338745\n", - "epoch: 46 step: 37, loss is 0.8259104490280151\n", - "epoch: 46 step: 38, loss is 0.822014331817627\n", - "epoch: 46 step: 39, loss is 0.8292301297187805\n", - "epoch: 46 step: 40, loss is 0.8203307390213013\n", - "epoch: 46 step: 41, loss is 0.8033885955810547\n", - "epoch: 46 step: 42, loss is 0.7611885666847229\n", - "epoch: 46 step: 43, loss is 0.790657103061676\n", - "epoch: 46 step: 44, loss is 0.8673189878463745\n", - "epoch: 46 step: 45, loss is 0.8155452609062195\n", - "epoch: 46 step: 46, loss is 0.7793391942977905\n", - "epoch: 46 step: 47, loss is 0.8320930600166321\n", - "epoch: 46 step: 48, loss is 0.83961421251297\n", - "epoch: 46 step: 49, loss is 0.8510888814926147\n", - "epoch: 46 step: 50, loss is 0.8384056091308594\n", - "epoch: 46 step: 51, loss is 0.7854084372520447\n", - "epoch: 46 step: 52, loss is 0.8357799053192139\n", - "epoch: 46 step: 53, loss is 0.8482872247695923\n", - "epoch: 46 step: 54, loss is 0.8354528546333313\n", - "epoch: 46 step: 55, loss is 0.8168013095855713\n", - "epoch: 46 step: 56, loss is 0.8158823251724243\n", - "epoch: 46 step: 57, loss is 0.8118408918380737\n", - "epoch: 46 step: 58, loss is 0.7957957983016968\n", - "epoch: 46 step: 59, loss is 0.8076456785202026\n", - "epoch: 46 step: 60, loss is 0.8619415760040283\n", - "epoch: 46 step: 61, loss is 0.7997012138366699\n", - "epoch: 46 step: 62, loss is 0.8699153661727905\n", - "epoch: 46 step: 63, loss is 0.8166255354881287\n", - "epoch: 46 step: 64, loss is 0.7943860292434692\n", - "epoch: 46 step: 65, loss is 0.8302253484725952\n", - "epoch: 46 step: 66, loss is 0.8435865640640259\n", - "epoch: 46 step: 67, loss is 0.8516188859939575\n", - "epoch: 46 step: 68, loss is 0.814971923828125\n", - "epoch: 46 step: 69, loss is 0.8109402656555176\n", - "epoch: 46 step: 70, loss is 0.7735173106193542\n", - "epoch: 46 step: 71, loss is 0.761288046836853\n", - "epoch: 46 step: 72, loss is 0.7574002742767334\n", - "epoch: 46 step: 73, loss is 0.7842967510223389\n", - "epoch: 46 step: 74, loss is 0.8089268207550049\n", - "epoch: 46 step: 75, loss is 0.8627971410751343\n", - "epoch: 46 step: 76, loss is 0.8655576705932617\n", - "epoch: 46 step: 77, loss is 0.7533696889877319\n", - "epoch: 46 step: 78, loss is 0.8612561225891113\n", - "epoch: 46 step: 79, loss is 0.7615969181060791\n", - "epoch: 46 step: 80, loss is 0.8821630477905273\n", - "epoch: 46 step: 81, loss is 0.7837828397750854\n", - "epoch: 46 step: 82, loss is 0.7969744801521301\n", - "epoch: 46 step: 83, loss is 0.7795194387435913\n", - "epoch: 46 step: 84, loss is 0.8275541067123413\n", - "epoch: 46 step: 85, loss is 0.7891772985458374\n", - "epoch: 46 step: 86, loss is 0.8247214555740356\n", - "epoch: 46 step: 87, loss is 0.865561842918396\n", - "epoch: 46 step: 88, loss is 0.7688478231430054\n", - "epoch: 46 step: 89, loss is 0.802191972732544\n", - "epoch: 46 step: 90, loss is 0.8483977317810059\n", - "epoch: 46 step: 91, loss is 0.8808273673057556\n", - "epoch: 46 step: 92, loss is 0.826119601726532\n", - "epoch: 46 step: 93, loss is 0.8446128368377686\n", - "epoch: 46 step: 94, loss is 0.7945296168327332\n", - "epoch: 46 step: 95, loss is 0.8301156759262085\n", - "epoch: 46 step: 96, loss is 0.8345024585723877\n", - "epoch: 46 step: 97, loss is 0.8405659198760986\n", - "epoch: 46 step: 98, loss is 0.8060035109519958\n", - "epoch: 46 step: 99, loss is 0.8336965441703796\n", - "epoch: 46 step: 100, loss is 0.8295565843582153\n", - "epoch: 46 step: 101, loss is 0.8482775688171387\n", - "epoch: 46 step: 102, loss is 0.8425776958465576\n", - "epoch: 46 step: 103, loss is 0.7583234906196594\n", - "epoch: 46 step: 104, loss is 0.8163668513298035\n", - "epoch: 46 step: 105, loss is 0.7805321216583252\n", - "epoch: 46 step: 106, loss is 0.87967449426651\n", - "epoch: 46 step: 107, loss is 0.7966701984405518\n", - "epoch: 46 step: 108, loss is 0.8330563306808472\n", - "epoch: 46 step: 109, loss is 0.8039921522140503\n", - "epoch: 46 step: 110, loss is 0.825942873954773\n", - "epoch: 46 step: 111, loss is 0.8071377277374268\n", - "epoch: 46 step: 112, loss is 0.8056633472442627\n", - "epoch: 46 step: 113, loss is 0.8058628439903259\n", - "epoch: 46 step: 114, loss is 0.8430216312408447\n", - "epoch: 46 step: 115, loss is 0.835838794708252\n", - "epoch: 46 step: 116, loss is 0.9042809009552002\n", - "epoch: 46 step: 117, loss is 0.8359941244125366\n", - "epoch: 46 step: 118, loss is 0.8480358719825745\n", - "epoch: 46 step: 119, loss is 0.7780492901802063\n", - "epoch: 46 step: 120, loss is 0.7874987125396729\n", - "epoch: 46 step: 121, loss is 0.8154670000076294\n", - "epoch: 46 step: 122, loss is 0.8486837148666382\n", - "epoch: 46 step: 123, loss is 0.8206446170806885\n", - "epoch: 46 step: 124, loss is 0.7668036818504333\n", - "epoch: 46 step: 125, loss is 0.8983955383300781\n", - "epoch: 46 step: 126, loss is 0.8270624876022339\n", - "epoch: 46 step: 127, loss is 0.8639649152755737\n", - "epoch: 46 step: 128, loss is 0.8789225816726685\n", - "epoch: 46 step: 129, loss is 0.8909988403320312\n", - "epoch: 46 step: 130, loss is 0.7931268215179443\n", - "epoch: 46 step: 131, loss is 0.7770794034004211\n", - "epoch: 46 step: 132, loss is 0.8237321972846985\n", - "epoch: 46 step: 133, loss is 0.7815769910812378\n", - "epoch: 46 step: 134, loss is 0.8146185278892517\n", - "epoch: 46 step: 135, loss is 0.8281830549240112\n", - "epoch: 46 step: 136, loss is 0.780537486076355\n", - "epoch: 46 step: 137, loss is 0.8505522608757019\n", - "epoch: 46 step: 138, loss is 0.8142297267913818\n", - "epoch: 46 step: 139, loss is 0.8095492124557495\n", - "epoch: 46 step: 140, loss is 0.7869609594345093\n", - "epoch: 46 step: 141, loss is 0.77549147605896\n", - "epoch: 46 step: 142, loss is 0.8502395153045654\n", - "epoch: 46 step: 143, loss is 0.8290643095970154\n", - "epoch: 46 step: 144, loss is 0.8618435859680176\n", - "epoch: 46 step: 145, loss is 0.8417675495147705\n", - "epoch: 46 step: 146, loss is 0.8477214574813843\n", - "epoch: 46 step: 147, loss is 0.835898756980896\n", - "epoch: 46 step: 148, loss is 0.829328715801239\n", - "epoch: 46 step: 149, loss is 0.841103196144104\n", - "epoch: 46 step: 150, loss is 0.8395781517028809\n", - "epoch: 46 step: 151, loss is 0.8519724607467651\n", - "epoch: 46 step: 152, loss is 0.8676567673683167\n", - "epoch: 46 step: 153, loss is 0.7732604146003723\n", - "epoch: 46 step: 154, loss is 0.8139134645462036\n", - "epoch: 46 step: 155, loss is 0.7969661951065063\n", - "epoch: 46 step: 156, loss is 0.800744354724884\n", - "epoch: 46 step: 157, loss is 0.8469538688659668\n", - "epoch: 46 step: 158, loss is 0.917851448059082\n", - "epoch: 46 step: 159, loss is 0.8033638000488281\n", - "epoch: 46 step: 160, loss is 0.7875678539276123\n", - "epoch: 46 step: 161, loss is 0.7906544208526611\n", - "epoch: 46 step: 162, loss is 0.8086165189743042\n", - "epoch: 46 step: 163, loss is 0.7941075563430786\n", - "epoch: 46 step: 164, loss is 0.8072389960289001\n", - "epoch: 46 step: 165, loss is 0.8204647302627563\n", - "epoch: 46 step: 166, loss is 0.8305543661117554\n", - "epoch: 46 step: 167, loss is 0.8164989948272705\n", - "epoch: 46 step: 168, loss is 0.8476718664169312\n", - "epoch: 46 step: 169, loss is 0.8033667802810669\n", - "epoch: 46 step: 170, loss is 0.7796691060066223\n", - "epoch: 46 step: 171, loss is 0.8332198858261108\n", - "epoch: 46 step: 172, loss is 0.8674505949020386\n", - "epoch: 46 step: 173, loss is 0.8543125987052917\n", - "epoch: 46 step: 174, loss is 0.7897580862045288\n", - "epoch: 46 step: 175, loss is 0.7825151681900024\n", - "epoch: 46 step: 176, loss is 0.7727033495903015\n", - "epoch: 46 step: 177, loss is 0.8895996809005737\n", - "epoch: 46 step: 178, loss is 0.8177825212478638\n", - "epoch: 46 step: 179, loss is 0.8484655618667603\n", - "epoch: 46 step: 180, loss is 0.8740428686141968\n", - "epoch: 46 step: 181, loss is 0.8240212202072144\n", - "epoch: 46 step: 182, loss is 0.8600034713745117\n", - "epoch: 46 step: 183, loss is 0.8790847659111023\n", - "epoch: 46 step: 184, loss is 0.8532842397689819\n", - "epoch: 46 step: 185, loss is 0.8142194151878357\n", - "epoch: 46 step: 186, loss is 0.7789756655693054\n", - "epoch: 46 step: 187, loss is 0.7541089057922363\n", - "epoch: 46 step: 188, loss is 0.8007475137710571\n", - "epoch: 46 step: 189, loss is 0.8353444337844849\n", - "epoch: 46 step: 190, loss is 0.8356935381889343\n", - "epoch: 46 step: 191, loss is 0.8688722848892212\n", - "epoch: 46 step: 192, loss is 0.8129825592041016\n", - "epoch: 46 step: 193, loss is 0.8500024676322937\n", - "epoch: 46 step: 194, loss is 0.8285856246948242\n", - "epoch: 46 step: 195, loss is 0.8185120820999146\n", - "Train epoch time: 111264.102 ms, per step time: 570.585 ms\n", - "epoch: 47 step: 1, loss is 0.741552472114563\n", - "epoch: 47 step: 2, loss is 0.7869369983673096\n", - "epoch: 47 step: 3, loss is 0.7670413255691528\n", - "epoch: 47 step: 4, loss is 0.786548912525177\n", - "epoch: 47 step: 5, loss is 0.783910870552063\n", - "epoch: 47 step: 6, loss is 0.7992252707481384\n", - "epoch: 47 step: 7, loss is 0.833053469657898\n", - "epoch: 47 step: 8, loss is 0.8502484560012817\n", - "epoch: 47 step: 9, loss is 0.7930958867073059\n", - "epoch: 47 step: 10, loss is 0.7828489542007446\n", - "epoch: 47 step: 11, loss is 0.7718036770820618\n", - "epoch: 47 step: 12, loss is 0.8859713077545166\n", - "epoch: 47 step: 13, loss is 0.8423645496368408\n", - "epoch: 47 step: 14, loss is 0.8093716502189636\n", - "epoch: 47 step: 15, loss is 0.7629830837249756\n", - "epoch: 47 step: 16, loss is 0.7908875942230225\n", - "epoch: 47 step: 17, loss is 0.8388998508453369\n", - "epoch: 47 step: 18, loss is 0.8233922123908997\n", - "epoch: 47 step: 19, loss is 0.7927170991897583\n", - "epoch: 47 step: 20, loss is 0.7859585285186768\n", - "epoch: 47 step: 21, loss is 0.753327488899231\n", - "epoch: 47 step: 22, loss is 0.7685935497283936\n", - "epoch: 47 step: 23, loss is 0.781694769859314\n", - "epoch: 47 step: 24, loss is 0.804739236831665\n", - "epoch: 47 step: 25, loss is 0.8146156668663025\n", - "epoch: 47 step: 26, loss is 0.8600995540618896\n", - "epoch: 47 step: 27, loss is 0.8308267593383789\n", - "epoch: 47 step: 28, loss is 0.8322280645370483\n", - "epoch: 47 step: 29, loss is 0.7741639018058777\n", - "epoch: 47 step: 30, loss is 0.8213191032409668\n", - "epoch: 47 step: 31, loss is 0.7831569314002991\n", - "epoch: 47 step: 32, loss is 0.7681152820587158\n", - "epoch: 47 step: 33, loss is 0.824280321598053\n", - "epoch: 47 step: 34, loss is 0.8093822002410889\n", - "epoch: 47 step: 35, loss is 0.830852746963501\n", - "epoch: 47 step: 36, loss is 0.826646089553833\n", - "epoch: 47 step: 37, loss is 0.7619863152503967\n", - "epoch: 47 step: 38, loss is 0.8023829460144043\n", - "epoch: 47 step: 39, loss is 0.8096007108688354\n", - "epoch: 47 step: 40, loss is 0.7708150148391724\n", - "epoch: 47 step: 41, loss is 0.7820541858673096\n", - "epoch: 47 step: 42, loss is 0.8145972490310669\n", - "epoch: 47 step: 43, loss is 0.7995203733444214\n", - "epoch: 47 step: 44, loss is 0.8055315017700195\n", - "epoch: 47 step: 45, loss is 0.7588891983032227\n", - "epoch: 47 step: 46, loss is 0.8128566145896912\n", - "epoch: 47 step: 47, loss is 0.7844560146331787\n", - "epoch: 47 step: 48, loss is 0.7878127098083496\n", - "epoch: 47 step: 49, loss is 0.7575433254241943\n", - "epoch: 47 step: 50, loss is 0.7710021734237671\n", - "epoch: 47 step: 51, loss is 0.7984703779220581\n", - "epoch: 47 step: 52, loss is 0.8164563775062561\n", - "epoch: 47 step: 53, loss is 0.7953324317932129\n", - "epoch: 47 step: 54, loss is 0.763611376285553\n", - "epoch: 47 step: 55, loss is 0.8312032222747803\n", - "epoch: 47 step: 56, loss is 0.8389297723770142\n", - "epoch: 47 step: 57, loss is 0.7681288719177246\n", - "epoch: 47 step: 58, loss is 0.7997048497200012\n", - "epoch: 47 step: 59, loss is 0.7894763946533203\n", - "epoch: 47 step: 60, loss is 0.8447983264923096\n", - "epoch: 47 step: 61, loss is 0.7990909814834595\n", - "epoch: 47 step: 62, loss is 0.8265284299850464\n", - "epoch: 47 step: 63, loss is 0.7807724475860596\n", - "epoch: 47 step: 64, loss is 0.8012140393257141\n", - "epoch: 47 step: 65, loss is 0.847800612449646\n", - "epoch: 47 step: 66, loss is 0.8159493207931519\n", - "epoch: 47 step: 67, loss is 0.7848581075668335\n", - "epoch: 47 step: 68, loss is 0.8169198036193848\n", - "epoch: 47 step: 69, loss is 0.8213446140289307\n", - "epoch: 47 step: 70, loss is 0.7913756370544434\n", - "epoch: 47 step: 71, loss is 0.8364079594612122\n", - "epoch: 47 step: 72, loss is 0.8319885730743408\n", - "epoch: 47 step: 73, loss is 0.7899740934371948\n", - "epoch: 47 step: 74, loss is 0.7718700170516968\n", - "epoch: 47 step: 75, loss is 0.7948117852210999\n", - "epoch: 47 step: 76, loss is 0.8256717324256897\n", - "epoch: 47 step: 77, loss is 0.7930840253829956\n", - "epoch: 47 step: 78, loss is 0.8517683744430542\n", - "epoch: 47 step: 79, loss is 0.8413327932357788\n", - "epoch: 47 step: 80, loss is 0.8484016060829163\n", - "epoch: 47 step: 81, loss is 0.8393490314483643\n", - "epoch: 47 step: 82, loss is 0.8126164674758911\n", - "epoch: 47 step: 83, loss is 0.8387781381607056\n", - "epoch: 47 step: 84, loss is 0.7694931030273438\n", - "epoch: 47 step: 85, loss is 0.7488237619400024\n", - "epoch: 47 step: 86, loss is 0.8365889191627502\n", - "epoch: 47 step: 87, loss is 0.8496346473693848\n", - "epoch: 47 step: 88, loss is 0.8019750118255615\n", - "epoch: 47 step: 89, loss is 0.8621324896812439\n", - "epoch: 47 step: 90, loss is 0.7956498861312866\n", - "epoch: 47 step: 91, loss is 0.8049017190933228\n", - "epoch: 47 step: 92, loss is 0.8429933190345764\n", - "epoch: 47 step: 93, loss is 0.8153611421585083\n", - "epoch: 47 step: 94, loss is 0.8012315630912781\n", - "epoch: 47 step: 95, loss is 0.7995126247406006\n", - "epoch: 47 step: 96, loss is 0.9002833366394043\n", - "epoch: 47 step: 97, loss is 0.8137738704681396\n", - "epoch: 47 step: 98, loss is 0.8340169191360474\n", - "epoch: 47 step: 99, loss is 0.8763140439987183\n", - "epoch: 47 step: 100, loss is 0.8086587190628052\n", - "epoch: 47 step: 101, loss is 0.8279298543930054\n", - "epoch: 47 step: 102, loss is 0.8052487969398499\n", - "epoch: 47 step: 103, loss is 0.7765262126922607\n", - "epoch: 47 step: 104, loss is 0.8090825080871582\n", - "epoch: 47 step: 105, loss is 0.834517240524292\n", - "epoch: 47 step: 106, loss is 0.8045772910118103\n", - "epoch: 47 step: 107, loss is 0.7957711219787598\n", - "epoch: 47 step: 108, loss is 0.8495107293128967\n", - "epoch: 47 step: 109, loss is 0.8085159659385681\n", - "epoch: 47 step: 110, loss is 0.7983773350715637\n", - "epoch: 47 step: 111, loss is 0.7545939683914185\n", - "epoch: 47 step: 112, loss is 0.8663461804389954\n", - "epoch: 47 step: 113, loss is 0.8471894860267639\n", - "epoch: 47 step: 114, loss is 0.8013104796409607\n", - "epoch: 47 step: 115, loss is 0.799373984336853\n", - "epoch: 47 step: 116, loss is 0.7965735197067261\n", - "epoch: 47 step: 117, loss is 0.8104082345962524\n", - "epoch: 47 step: 118, loss is 0.8343092203140259\n", - "epoch: 47 step: 119, loss is 0.8118151426315308\n", - "epoch: 47 step: 120, loss is 0.8478751182556152\n", - "epoch: 47 step: 121, loss is 0.8315154314041138\n", - "epoch: 47 step: 122, loss is 0.8308762907981873\n", - "epoch: 47 step: 123, loss is 0.8416837453842163\n", - "epoch: 47 step: 124, loss is 0.8177800178527832\n", - "epoch: 47 step: 125, loss is 0.8980522155761719\n", - "epoch: 47 step: 126, loss is 0.7803118228912354\n", - "epoch: 47 step: 127, loss is 0.8042851686477661\n", - "epoch: 47 step: 128, loss is 0.8006830215454102\n", - "epoch: 47 step: 129, loss is 0.8073627948760986\n", - "epoch: 47 step: 130, loss is 0.8098573684692383\n", - "epoch: 47 step: 131, loss is 0.7812566161155701\n", - "epoch: 47 step: 132, loss is 0.8760423064231873\n", - "epoch: 47 step: 133, loss is 0.8098917007446289\n", - "epoch: 47 step: 134, loss is 0.7880294322967529\n", - "epoch: 47 step: 135, loss is 0.8070981502532959\n", - "epoch: 47 step: 136, loss is 0.8566524982452393\n", - "epoch: 47 step: 137, loss is 0.8180603981018066\n", - "epoch: 47 step: 138, loss is 0.8448362350463867\n", - "epoch: 47 step: 139, loss is 0.8345110416412354\n", - "epoch: 47 step: 140, loss is 0.817063570022583\n", - "epoch: 47 step: 141, loss is 0.8985965847969055\n", - "epoch: 47 step: 142, loss is 0.8288454413414001\n", - "epoch: 47 step: 143, loss is 0.7888994812965393\n", - "epoch: 47 step: 144, loss is 0.7951934337615967\n", - "epoch: 47 step: 145, loss is 0.8227888345718384\n", - "epoch: 47 step: 146, loss is 0.7408844232559204\n", - "epoch: 47 step: 147, loss is 0.8490642309188843\n", - "epoch: 47 step: 148, loss is 0.803614616394043\n", - "epoch: 47 step: 149, loss is 0.8783717155456543\n", - "epoch: 47 step: 150, loss is 0.7888124585151672\n", - "epoch: 47 step: 151, loss is 0.7704468965530396\n", - "epoch: 47 step: 152, loss is 0.8534597158432007\n", - "epoch: 47 step: 153, loss is 0.7592794299125671\n", - "epoch: 47 step: 154, loss is 0.850061297416687\n", - "epoch: 47 step: 155, loss is 0.8231570720672607\n", - "epoch: 47 step: 156, loss is 0.795661449432373\n", - "epoch: 47 step: 157, loss is 0.7892980575561523\n", - "epoch: 47 step: 158, loss is 0.773695707321167\n", - "epoch: 47 step: 159, loss is 0.850645899772644\n", - "epoch: 47 step: 160, loss is 0.797680139541626\n", - "epoch: 47 step: 161, loss is 0.7559200525283813\n", - "epoch: 47 step: 162, loss is 0.8073853850364685\n", - "epoch: 47 step: 163, loss is 0.7921529412269592\n", - "epoch: 47 step: 164, loss is 0.8541891574859619\n", - "epoch: 47 step: 165, loss is 0.863169252872467\n", - "epoch: 47 step: 166, loss is 0.8390312790870667\n", - "epoch: 47 step: 167, loss is 0.8027635812759399\n", - "epoch: 47 step: 168, loss is 0.7743616700172424\n", - "epoch: 47 step: 169, loss is 0.782721996307373\n", - "epoch: 47 step: 170, loss is 0.7950056791305542\n", - "epoch: 47 step: 171, loss is 0.8015437126159668\n", - "epoch: 47 step: 172, loss is 0.7673452496528625\n", - "epoch: 47 step: 173, loss is 0.9026637673377991\n", - "epoch: 47 step: 174, loss is 0.7537099123001099\n", - "epoch: 47 step: 175, loss is 0.7923679351806641\n", - "epoch: 47 step: 176, loss is 0.7599753141403198\n", - "epoch: 47 step: 177, loss is 0.8260632157325745\n", - "epoch: 47 step: 178, loss is 0.834107518196106\n", - "epoch: 47 step: 179, loss is 0.8202983140945435\n", - "epoch: 47 step: 180, loss is 0.8250386118888855\n", - "epoch: 47 step: 181, loss is 0.775850772857666\n", - "epoch: 47 step: 182, loss is 0.8187068700790405\n", - "epoch: 47 step: 183, loss is 0.8338702321052551\n", - "epoch: 47 step: 184, loss is 0.8149653673171997\n", - "epoch: 47 step: 185, loss is 0.788771390914917\n", - "epoch: 47 step: 186, loss is 0.8771972060203552\n", - "epoch: 47 step: 187, loss is 0.7369555234909058\n", - "epoch: 47 step: 188, loss is 0.8002363443374634\n", - "epoch: 47 step: 189, loss is 0.7842756509780884\n", - "epoch: 47 step: 190, loss is 0.7500882148742676\n", - "epoch: 47 step: 191, loss is 0.7606333494186401\n", - "epoch: 47 step: 192, loss is 0.8267337083816528\n", - "epoch: 47 step: 193, loss is 0.8365844488143921\n", - "epoch: 47 step: 194, loss is 0.8187090158462524\n", - "epoch: 47 step: 195, loss is 0.794952929019928\n", - "Train epoch time: 105697.900 ms, per step time: 542.041 ms\n", - "epoch: 48 step: 1, loss is 0.8252642154693604\n", - "epoch: 48 step: 2, loss is 0.7846331000328064\n", - "epoch: 48 step: 3, loss is 0.7627409100532532\n", - "epoch: 48 step: 4, loss is 0.7874749898910522\n", - "epoch: 48 step: 5, loss is 0.7671093940734863\n", - "epoch: 48 step: 6, loss is 0.7325209975242615\n", - "epoch: 48 step: 7, loss is 0.8216731548309326\n", - "epoch: 48 step: 8, loss is 0.8236619234085083\n", - "epoch: 48 step: 9, loss is 0.7570247650146484\n", - "epoch: 48 step: 10, loss is 0.7512523531913757\n", - "epoch: 48 step: 11, loss is 0.7422149181365967\n", - "epoch: 48 step: 12, loss is 0.7971491813659668\n", - "epoch: 48 step: 13, loss is 0.7920883297920227\n", - "epoch: 48 step: 14, loss is 0.8135398626327515\n", - "epoch: 48 step: 15, loss is 0.7504023313522339\n", - "epoch: 48 step: 16, loss is 0.7809324264526367\n", - "epoch: 48 step: 17, loss is 0.8135817050933838\n", - "epoch: 48 step: 18, loss is 0.7787463068962097\n", - "epoch: 48 step: 19, loss is 0.7101670503616333\n", - "epoch: 48 step: 20, loss is 0.7973510026931763\n", - "epoch: 48 step: 21, loss is 0.7758889198303223\n", - "epoch: 48 step: 22, loss is 0.7448439598083496\n", - "epoch: 48 step: 23, loss is 0.8042567372322083\n", - "epoch: 48 step: 24, loss is 0.7750134468078613\n", - "epoch: 48 step: 25, loss is 0.7835952639579773\n", - "epoch: 48 step: 26, loss is 0.8140444755554199\n", - "epoch: 48 step: 27, loss is 0.8123120665550232\n", - "epoch: 48 step: 28, loss is 0.7777227759361267\n", - "epoch: 48 step: 29, loss is 0.791978657245636\n", - "epoch: 48 step: 30, loss is 0.7723613977432251\n", - "epoch: 48 step: 31, loss is 0.770689845085144\n", - "epoch: 48 step: 32, loss is 0.7872849702835083\n", - "epoch: 48 step: 33, loss is 0.8119267821311951\n", - "epoch: 48 step: 34, loss is 0.8017038106918335\n", - "epoch: 48 step: 35, loss is 0.8050541877746582\n", - "epoch: 48 step: 36, loss is 0.7787994146347046\n", - "epoch: 48 step: 37, loss is 0.7455751895904541\n", - "epoch: 48 step: 38, loss is 0.8420274257659912\n", - "epoch: 48 step: 39, loss is 0.7522145509719849\n", - "epoch: 48 step: 40, loss is 0.7932430505752563\n", - "epoch: 48 step: 41, loss is 0.7832766175270081\n", - "epoch: 48 step: 42, loss is 0.8193838596343994\n", - "epoch: 48 step: 43, loss is 0.7878310680389404\n", - "epoch: 48 step: 44, loss is 0.7560722827911377\n", - "epoch: 48 step: 45, loss is 0.754957914352417\n", - "epoch: 48 step: 46, loss is 0.834762454032898\n", - "epoch: 48 step: 47, loss is 0.8015562295913696\n", - "epoch: 48 step: 48, loss is 0.7719260454177856\n", - "epoch: 48 step: 49, loss is 0.7946747541427612\n", - "epoch: 48 step: 50, loss is 0.7476240396499634\n", - "epoch: 48 step: 51, loss is 0.7706553339958191\n", - "epoch: 48 step: 52, loss is 0.7806861400604248\n", - "epoch: 48 step: 53, loss is 0.7911163568496704\n", - "epoch: 48 step: 54, loss is 0.7617675065994263\n", - "epoch: 48 step: 55, loss is 0.7908929586410522\n", - "epoch: 48 step: 56, loss is 0.8148800134658813\n", - "epoch: 48 step: 57, loss is 0.8009278774261475\n", - "epoch: 48 step: 58, loss is 0.7801589965820312\n", - "epoch: 48 step: 59, loss is 0.7940460443496704\n", - "epoch: 48 step: 60, loss is 0.7626351714134216\n", - "epoch: 48 step: 61, loss is 0.7989853620529175\n", - "epoch: 48 step: 62, loss is 0.7799445390701294\n", - "epoch: 48 step: 63, loss is 0.7848852872848511\n", - "epoch: 48 step: 64, loss is 0.7382599711418152\n", - "epoch: 48 step: 65, loss is 0.775367021560669\n", - "epoch: 48 step: 66, loss is 0.8026669025421143\n", - "epoch: 48 step: 67, loss is 0.7997349500656128\n", - "epoch: 48 step: 68, loss is 0.8699275255203247\n", - "epoch: 48 step: 69, loss is 0.781948447227478\n", - "epoch: 48 step: 70, loss is 0.8102300763130188\n", - "epoch: 48 step: 71, loss is 0.8048820495605469\n", - "epoch: 48 step: 72, loss is 0.8408288955688477\n", - "epoch: 48 step: 73, loss is 0.8065498471260071\n", - "epoch: 48 step: 74, loss is 0.7943682670593262\n", - "epoch: 48 step: 75, loss is 0.8081449270248413\n", - "epoch: 48 step: 76, loss is 0.7582502365112305\n", - "epoch: 48 step: 77, loss is 0.7979034185409546\n", - "epoch: 48 step: 78, loss is 0.7846277356147766\n", - "epoch: 48 step: 79, loss is 0.7651919722557068\n", - "epoch: 48 step: 80, loss is 0.774290919303894\n", - "epoch: 48 step: 81, loss is 0.7797107696533203\n", - "epoch: 48 step: 82, loss is 0.79066002368927\n", - "epoch: 48 step: 83, loss is 0.788832426071167\n", - "epoch: 48 step: 84, loss is 0.7935502529144287\n", - "epoch: 48 step: 85, loss is 0.8166543245315552\n", - "epoch: 48 step: 86, loss is 0.810423731803894\n", - "epoch: 48 step: 87, loss is 0.802788257598877\n", - "epoch: 48 step: 88, loss is 0.806795597076416\n", - "epoch: 48 step: 89, loss is 0.7810733318328857\n", - "epoch: 48 step: 90, loss is 0.8100214004516602\n", - "epoch: 48 step: 91, loss is 0.8145033121109009\n", - "epoch: 48 step: 92, loss is 0.7571572661399841\n", - "epoch: 48 step: 93, loss is 0.8138021230697632\n", - "epoch: 48 step: 94, loss is 0.7865696549415588\n", - "epoch: 48 step: 95, loss is 0.7822436094284058\n", - "epoch: 48 step: 96, loss is 0.8231836557388306\n", - "epoch: 48 step: 97, loss is 0.8345212936401367\n", - "epoch: 48 step: 98, loss is 0.7939504981040955\n", - "epoch: 48 step: 99, loss is 0.7969664335250854\n", - "epoch: 48 step: 100, loss is 0.8337759971618652\n", - "epoch: 48 step: 101, loss is 0.7979365587234497\n", - "epoch: 48 step: 102, loss is 0.8203608989715576\n", - "epoch: 48 step: 103, loss is 0.8300102949142456\n", - "epoch: 48 step: 104, loss is 0.7686358690261841\n", - "epoch: 48 step: 105, loss is 0.8536856174468994\n", - "epoch: 48 step: 106, loss is 0.8485356569290161\n", - "epoch: 48 step: 107, loss is 0.7963102459907532\n", - "epoch: 48 step: 108, loss is 0.7817773222923279\n", - "epoch: 48 step: 109, loss is 0.7513248920440674\n", - "epoch: 48 step: 110, loss is 0.8003104329109192\n", - "epoch: 48 step: 111, loss is 0.7924702167510986\n", - "epoch: 48 step: 112, loss is 0.8472844362258911\n", - "epoch: 48 step: 113, loss is 0.7717529535293579\n", - "epoch: 48 step: 114, loss is 0.8512321710586548\n", - "epoch: 48 step: 115, loss is 0.826546311378479\n", - "epoch: 48 step: 116, loss is 0.7678852081298828\n", - "epoch: 48 step: 117, loss is 0.8522099256515503\n", - "epoch: 48 step: 118, loss is 0.8004857301712036\n", - "epoch: 48 step: 119, loss is 0.7410879731178284\n", - "epoch: 48 step: 120, loss is 0.7845205664634705\n", - "epoch: 48 step: 121, loss is 0.778408408164978\n", - "epoch: 48 step: 122, loss is 0.867497980594635\n", - "epoch: 48 step: 123, loss is 0.771753191947937\n", - "epoch: 48 step: 124, loss is 0.7742478251457214\n", - "epoch: 48 step: 125, loss is 0.8009547591209412\n", - "epoch: 48 step: 126, loss is 0.76050865650177\n", - "epoch: 48 step: 127, loss is 0.8306655883789062\n", - "epoch: 48 step: 128, loss is 0.7917838096618652\n", - "epoch: 48 step: 129, loss is 0.8127344846725464\n", - "epoch: 48 step: 130, loss is 0.821445882320404\n", - "epoch: 48 step: 131, loss is 0.823959231376648\n", - "epoch: 48 step: 132, loss is 0.8723894357681274\n", - "epoch: 48 step: 133, loss is 0.8379552364349365\n", - "epoch: 48 step: 134, loss is 0.8205667734146118\n", - "epoch: 48 step: 135, loss is 0.7530902624130249\n", - "epoch: 48 step: 136, loss is 0.7990720272064209\n", - "epoch: 48 step: 137, loss is 0.7934009432792664\n", - "epoch: 48 step: 138, loss is 0.8206827640533447\n", - "epoch: 48 step: 139, loss is 0.8163573741912842\n", - "epoch: 48 step: 140, loss is 0.838467001914978\n", - "epoch: 48 step: 141, loss is 0.8516031503677368\n", - "epoch: 48 step: 142, loss is 0.7780231237411499\n", - "epoch: 48 step: 143, loss is 0.7560743093490601\n", - "epoch: 48 step: 144, loss is 0.8164941072463989\n", - "epoch: 48 step: 145, loss is 0.8088443875312805\n", - "epoch: 48 step: 146, loss is 0.7626193761825562\n", - "epoch: 48 step: 147, loss is 0.776249885559082\n", - "epoch: 48 step: 148, loss is 0.7884475588798523\n", - "epoch: 48 step: 149, loss is 0.7949196100234985\n", - "epoch: 48 step: 150, loss is 0.839379072189331\n", - "epoch: 48 step: 151, loss is 0.8444326519966125\n", - "epoch: 48 step: 152, loss is 0.794615626335144\n", - "epoch: 48 step: 153, loss is 0.8015128374099731\n", - "epoch: 48 step: 154, loss is 0.8122559785842896\n", - "epoch: 48 step: 155, loss is 0.7777997255325317\n", - "epoch: 48 step: 156, loss is 0.7934497594833374\n", - "epoch: 48 step: 157, loss is 0.7735564112663269\n", - "epoch: 48 step: 158, loss is 0.8185248374938965\n", - "epoch: 48 step: 159, loss is 0.8267778158187866\n", - "epoch: 48 step: 160, loss is 0.7741972208023071\n", - "epoch: 48 step: 161, loss is 0.8254348039627075\n", - "epoch: 48 step: 162, loss is 0.8229646682739258\n", - "epoch: 48 step: 163, loss is 0.8060611486434937\n", - "epoch: 48 step: 164, loss is 0.8187951445579529\n", - "epoch: 48 step: 165, loss is 0.7873944044113159\n", - "epoch: 48 step: 166, loss is 0.82916659116745\n", - "epoch: 48 step: 167, loss is 0.8049300312995911\n", - "epoch: 48 step: 168, loss is 0.8202552795410156\n", - "epoch: 48 step: 169, loss is 0.7610690593719482\n", - "epoch: 48 step: 170, loss is 0.7952737808227539\n", - "epoch: 48 step: 171, loss is 0.8008180856704712\n", - "epoch: 48 step: 172, loss is 0.7652289867401123\n", - "epoch: 48 step: 173, loss is 0.8047548532485962\n", - "epoch: 48 step: 174, loss is 0.7387188673019409\n", - "epoch: 48 step: 175, loss is 0.812306821346283\n", - "epoch: 48 step: 176, loss is 0.7933809757232666\n", - "epoch: 48 step: 177, loss is 0.8117033243179321\n", - "epoch: 48 step: 178, loss is 0.8230520486831665\n", - "epoch: 48 step: 179, loss is 0.8238486051559448\n", - "epoch: 48 step: 180, loss is 0.793167233467102\n", - "epoch: 48 step: 181, loss is 0.7981551289558411\n", - "epoch: 48 step: 182, loss is 0.7721530795097351\n", - "epoch: 48 step: 183, loss is 0.8216645121574402\n", - "epoch: 48 step: 184, loss is 0.8020264506340027\n", - "epoch: 48 step: 185, loss is 0.8105981349945068\n", - "epoch: 48 step: 186, loss is 0.7390283346176147\n", - "epoch: 48 step: 187, loss is 0.8725963830947876\n", - "epoch: 48 step: 188, loss is 0.7743350863456726\n", - "epoch: 48 step: 189, loss is 0.8417801856994629\n", - "epoch: 48 step: 190, loss is 0.8000257015228271\n", - "epoch: 48 step: 191, loss is 0.7969749569892883\n", - "epoch: 48 step: 192, loss is 0.8321455717086792\n", - "epoch: 48 step: 193, loss is 0.8362094163894653\n", - "epoch: 48 step: 194, loss is 0.7986562252044678\n", - "epoch: 48 step: 195, loss is 0.7951048612594604\n", - "Train epoch time: 105944.219 ms, per step time: 543.304 ms\n", - "epoch: 49 step: 1, loss is 0.7474855184555054\n", - "epoch: 49 step: 2, loss is 0.7554413080215454\n", - "epoch: 49 step: 3, loss is 0.7626525163650513\n", - "epoch: 49 step: 4, loss is 0.7984957098960876\n", - "epoch: 49 step: 5, loss is 0.7519217133522034\n", - "epoch: 49 step: 6, loss is 0.7466658353805542\n", - "epoch: 49 step: 7, loss is 0.7688969373703003\n", - "epoch: 49 step: 8, loss is 0.7932398915290833\n", - "epoch: 49 step: 9, loss is 0.7910264730453491\n", - "epoch: 49 step: 10, loss is 0.7722512483596802\n", - "epoch: 49 step: 11, loss is 0.7694035768508911\n", - "epoch: 49 step: 12, loss is 0.7862279415130615\n", - "epoch: 49 step: 13, loss is 0.7968816757202148\n", - "epoch: 49 step: 14, loss is 0.7573424577713013\n", - "epoch: 49 step: 15, loss is 0.7916443943977356\n", - "epoch: 49 step: 16, loss is 0.7331437468528748\n", - "epoch: 49 step: 17, loss is 0.7709574103355408\n", - "epoch: 49 step: 18, loss is 0.7906079292297363\n", - "epoch: 49 step: 19, loss is 0.7682799696922302\n", - "epoch: 49 step: 20, loss is 0.8010478019714355\n", - "epoch: 49 step: 21, loss is 0.773858368396759\n", - "epoch: 49 step: 22, loss is 0.7452703714370728\n", - "epoch: 49 step: 23, loss is 0.8269109129905701\n", - "epoch: 49 step: 24, loss is 0.7589935064315796\n", - "epoch: 49 step: 25, loss is 0.79229736328125\n", - "epoch: 49 step: 26, loss is 0.7595837116241455\n", - "epoch: 49 step: 27, loss is 0.7790958881378174\n", - "epoch: 49 step: 28, loss is 0.8197448253631592\n", - "epoch: 49 step: 29, loss is 0.7496641278266907\n", - "epoch: 49 step: 30, loss is 0.8050153255462646\n", - "epoch: 49 step: 31, loss is 0.8073294162750244\n", - "epoch: 49 step: 32, loss is 0.7981759309768677\n", - "epoch: 49 step: 33, loss is 0.7715888023376465\n", - "epoch: 49 step: 34, loss is 0.7878152132034302\n", - "epoch: 49 step: 35, loss is 0.7643104791641235\n", - "epoch: 49 step: 36, loss is 0.7838542461395264\n", - "epoch: 49 step: 37, loss is 0.7966660261154175\n", - "epoch: 49 step: 38, loss is 0.7848427295684814\n", - "epoch: 49 step: 39, loss is 0.7410179972648621\n", - "epoch: 49 step: 40, loss is 0.7847524881362915\n", - "epoch: 49 step: 41, loss is 0.7892673015594482\n", - "epoch: 49 step: 42, loss is 0.7674024105072021\n", - "epoch: 49 step: 43, loss is 0.7687370777130127\n", - "epoch: 49 step: 44, loss is 0.7798919081687927\n", - "epoch: 49 step: 45, loss is 0.7786242365837097\n", - "epoch: 49 step: 46, loss is 0.8034029603004456\n", - "epoch: 49 step: 47, loss is 0.8314440846443176\n", - "epoch: 49 step: 48, loss is 0.7291785478591919\n", - "epoch: 49 step: 49, loss is 0.7359766960144043\n", - "epoch: 49 step: 50, loss is 0.7356237173080444\n", - "epoch: 49 step: 51, loss is 0.7716754674911499\n", - "epoch: 49 step: 52, loss is 0.7936745285987854\n", - "epoch: 49 step: 53, loss is 0.8178622722625732\n", - "epoch: 49 step: 54, loss is 0.7448766231536865\n", - "epoch: 49 step: 55, loss is 0.7648531198501587\n", - "epoch: 49 step: 56, loss is 0.7474914193153381\n", - "epoch: 49 step: 57, loss is 0.8393149375915527\n", - "epoch: 49 step: 58, loss is 0.7431994676589966\n", - "epoch: 49 step: 59, loss is 0.7995795011520386\n", - "epoch: 49 step: 60, loss is 0.7663533687591553\n", - "epoch: 49 step: 61, loss is 0.7868015170097351\n", - "epoch: 49 step: 62, loss is 0.8144644498825073\n", - "epoch: 49 step: 63, loss is 0.7292894124984741\n", - "epoch: 49 step: 64, loss is 0.7749843001365662\n", - "epoch: 49 step: 65, loss is 0.7751369476318359\n", - "epoch: 49 step: 66, loss is 0.7851287126541138\n", - "epoch: 49 step: 67, loss is 0.7768024206161499\n", - "epoch: 49 step: 68, loss is 0.7923818826675415\n", - "epoch: 49 step: 69, loss is 0.7657811641693115\n", - "epoch: 49 step: 70, loss is 0.7764246463775635\n", - "epoch: 49 step: 71, loss is 0.7763193249702454\n", - "epoch: 49 step: 72, loss is 0.7649475336074829\n", - "epoch: 49 step: 73, loss is 0.7994956970214844\n", - "epoch: 49 step: 74, loss is 0.7806899547576904\n", - "epoch: 49 step: 75, loss is 0.8170067071914673\n", - "epoch: 49 step: 76, loss is 0.8295549154281616\n", - "epoch: 49 step: 77, loss is 0.7846189737319946\n", - "epoch: 49 step: 78, loss is 0.8127977848052979\n", - "epoch: 49 step: 79, loss is 0.7666932344436646\n", - "epoch: 49 step: 80, loss is 0.8099918365478516\n", - "epoch: 49 step: 81, loss is 0.7591879367828369\n", - "epoch: 49 step: 82, loss is 0.8109831809997559\n", - "epoch: 49 step: 83, loss is 0.795573890209198\n", - "epoch: 49 step: 84, loss is 0.8494633436203003\n", - "epoch: 49 step: 85, loss is 0.8163701295852661\n", - "epoch: 49 step: 86, loss is 0.8191530704498291\n", - "epoch: 49 step: 87, loss is 0.7566444277763367\n", - "epoch: 49 step: 88, loss is 0.7639744877815247\n", - "epoch: 49 step: 89, loss is 0.8149380683898926\n", - "epoch: 49 step: 90, loss is 0.7706125974655151\n", - "epoch: 49 step: 91, loss is 0.813895583152771\n", - "epoch: 49 step: 92, loss is 0.8559594750404358\n", - "epoch: 49 step: 93, loss is 0.8276405930519104\n", - "epoch: 49 step: 94, loss is 0.8072350025177002\n", - "epoch: 49 step: 95, loss is 0.8047367334365845\n", - "epoch: 49 step: 96, loss is 0.7500251531600952\n", - "epoch: 49 step: 97, loss is 0.7780362367630005\n", - "epoch: 49 step: 98, loss is 0.765553891658783\n", - "epoch: 49 step: 99, loss is 0.7548193335533142\n", - "epoch: 49 step: 100, loss is 0.7291454076766968\n", - "epoch: 49 step: 101, loss is 0.825963020324707\n", - "epoch: 49 step: 102, loss is 0.8251543045043945\n", - "epoch: 49 step: 103, loss is 0.7915137410163879\n", - "epoch: 49 step: 104, loss is 0.7403810024261475\n", - "epoch: 49 step: 105, loss is 0.7919790744781494\n", - "epoch: 49 step: 106, loss is 0.7666395902633667\n", - "epoch: 49 step: 107, loss is 0.8282239437103271\n", - "epoch: 49 step: 108, loss is 0.8074923753738403\n", - "epoch: 49 step: 109, loss is 0.8358535766601562\n", - "epoch: 49 step: 110, loss is 0.8195568323135376\n", - "epoch: 49 step: 111, loss is 0.8383195996284485\n", - "epoch: 49 step: 112, loss is 0.7909601330757141\n", - "epoch: 49 step: 113, loss is 0.7730705738067627\n", - "epoch: 49 step: 114, loss is 0.8017328977584839\n", - "epoch: 49 step: 115, loss is 0.7961600422859192\n", - "epoch: 49 step: 116, loss is 0.7793943881988525\n", - "epoch: 49 step: 117, loss is 0.8248316049575806\n", - "epoch: 49 step: 118, loss is 0.7936378717422485\n", - "epoch: 49 step: 119, loss is 0.8629387617111206\n", - "epoch: 49 step: 120, loss is 0.8050061464309692\n", - "epoch: 49 step: 121, loss is 0.8020141124725342\n", - "epoch: 49 step: 122, loss is 0.7881861925125122\n", - "epoch: 49 step: 123, loss is 0.7941316366195679\n", - "epoch: 49 step: 124, loss is 0.7949413657188416\n", - "epoch: 49 step: 125, loss is 0.752636194229126\n", - "epoch: 49 step: 126, loss is 0.7947443723678589\n", - "epoch: 49 step: 127, loss is 0.7749506235122681\n", - "epoch: 49 step: 128, loss is 0.8439511060714722\n", - "epoch: 49 step: 129, loss is 0.749794602394104\n", - "epoch: 49 step: 130, loss is 0.8645422458648682\n", - "epoch: 49 step: 131, loss is 0.7738044261932373\n", - "epoch: 49 step: 132, loss is 0.7980045676231384\n", - "epoch: 49 step: 133, loss is 0.8118914365768433\n", - "epoch: 49 step: 134, loss is 0.8130654096603394\n", - "epoch: 49 step: 135, loss is 0.7552171945571899\n", - "epoch: 49 step: 136, loss is 0.8086338043212891\n", - "epoch: 49 step: 137, loss is 0.7789323329925537\n", - "epoch: 49 step: 138, loss is 0.87713623046875\n", - "epoch: 49 step: 139, loss is 0.788098931312561\n", - "epoch: 49 step: 140, loss is 0.7871702909469604\n", - "epoch: 49 step: 141, loss is 0.7906486392021179\n", - "epoch: 49 step: 142, loss is 0.7599376440048218\n", - "epoch: 49 step: 143, loss is 0.7541152834892273\n", - "epoch: 49 step: 144, loss is 0.7779464721679688\n", - "epoch: 49 step: 145, loss is 0.8201999664306641\n", - "epoch: 49 step: 146, loss is 0.8153160810470581\n", - "epoch: 49 step: 147, loss is 0.8511422872543335\n", - "epoch: 49 step: 148, loss is 0.7591521739959717\n", - "epoch: 49 step: 149, loss is 0.7936468720436096\n", - "epoch: 49 step: 150, loss is 0.7542546987533569\n", - "epoch: 49 step: 151, loss is 0.787798285484314\n", - "epoch: 49 step: 152, loss is 0.7628979682922363\n", - "epoch: 49 step: 153, loss is 0.8427466750144958\n", - "epoch: 49 step: 154, loss is 0.7713915705680847\n", - "epoch: 49 step: 155, loss is 0.8218947649002075\n", - "epoch: 49 step: 156, loss is 0.7897173762321472\n", - "epoch: 49 step: 157, loss is 0.7629675269126892\n", - "epoch: 49 step: 158, loss is 0.787702202796936\n", - "epoch: 49 step: 159, loss is 0.8200827836990356\n", - "epoch: 49 step: 160, loss is 0.821740448474884\n", - "epoch: 49 step: 161, loss is 0.7424212694168091\n", - "epoch: 49 step: 162, loss is 0.8110157251358032\n", - "epoch: 49 step: 163, loss is 0.7846983075141907\n", - "epoch: 49 step: 164, loss is 0.8021287322044373\n", - "epoch: 49 step: 165, loss is 0.7915834188461304\n", - "epoch: 49 step: 166, loss is 0.8582480549812317\n", - "epoch: 49 step: 167, loss is 0.835322380065918\n", - "epoch: 49 step: 168, loss is 0.7697802782058716\n", - "epoch: 49 step: 169, loss is 0.7804192304611206\n", - "epoch: 49 step: 170, loss is 0.790429949760437\n", - "epoch: 49 step: 171, loss is 0.7891809940338135\n", - "epoch: 49 step: 172, loss is 0.805351197719574\n", - "epoch: 49 step: 173, loss is 0.804185152053833\n", - "epoch: 49 step: 174, loss is 0.7713534832000732\n", - "epoch: 49 step: 175, loss is 0.8341310024261475\n", - "epoch: 49 step: 176, loss is 0.8138383626937866\n", - "epoch: 49 step: 177, loss is 0.7346597909927368\n", - "epoch: 49 step: 178, loss is 0.790809154510498\n", - "epoch: 49 step: 179, loss is 0.8158509135246277\n", - "epoch: 49 step: 180, loss is 0.7741419076919556\n", - "epoch: 49 step: 181, loss is 0.8178799152374268\n", - "epoch: 49 step: 182, loss is 0.7746651768684387\n", - "epoch: 49 step: 183, loss is 0.7272830605506897\n", - "epoch: 49 step: 184, loss is 0.7682174444198608\n", - "epoch: 49 step: 185, loss is 0.7859704494476318\n", - "epoch: 49 step: 186, loss is 0.7947738170623779\n", - "epoch: 49 step: 187, loss is 0.8164531588554382\n", - "epoch: 49 step: 188, loss is 0.8137848377227783\n", - "epoch: 49 step: 189, loss is 0.7600142955780029\n", - "epoch: 49 step: 190, loss is 0.7909482717514038\n", - "epoch: 49 step: 191, loss is 0.7888385057449341\n", - "epoch: 49 step: 192, loss is 0.7625305652618408\n", - "epoch: 49 step: 193, loss is 0.8482733368873596\n", - "epoch: 49 step: 194, loss is 0.7858377695083618\n", - "epoch: 49 step: 195, loss is 0.7894809246063232\n", - "Train epoch time: 105988.249 ms, per step time: 543.529 ms\n", - "epoch: 50 step: 1, loss is 0.7802066802978516\n", - "epoch: 50 step: 2, loss is 0.8158935308456421\n", - "epoch: 50 step: 3, loss is 0.7644997835159302\n", - "epoch: 50 step: 4, loss is 0.7955084443092346\n", - "epoch: 50 step: 5, loss is 0.7448071241378784\n", - "epoch: 50 step: 6, loss is 0.7629739046096802\n", - "epoch: 50 step: 7, loss is 0.7969454526901245\n", - "epoch: 50 step: 8, loss is 0.7565621733665466\n", - "epoch: 50 step: 9, loss is 0.7852126359939575\n", - "epoch: 50 step: 10, loss is 0.8052510023117065\n", - "epoch: 50 step: 11, loss is 0.7628653049468994\n", - "epoch: 50 step: 12, loss is 0.7715508937835693\n", - "epoch: 50 step: 13, loss is 0.7526652812957764\n", - "epoch: 50 step: 14, loss is 0.7467508316040039\n", - "epoch: 50 step: 15, loss is 0.7383567690849304\n", - "epoch: 50 step: 16, loss is 0.6938237547874451\n", - "epoch: 50 step: 17, loss is 0.7300999164581299\n", - "epoch: 50 step: 18, loss is 0.7443526983261108\n", - "epoch: 50 step: 19, loss is 0.7856084108352661\n", - "epoch: 50 step: 20, loss is 0.7771008014678955\n", - "epoch: 50 step: 21, loss is 0.7702484726905823\n", - "epoch: 50 step: 22, loss is 0.766179084777832\n", - "epoch: 50 step: 23, loss is 0.7603368759155273\n", - "epoch: 50 step: 24, loss is 0.8527711629867554\n", - "epoch: 50 step: 25, loss is 0.7504369020462036\n", - "epoch: 50 step: 26, loss is 0.7563770413398743\n", - "epoch: 50 step: 27, loss is 0.8033452033996582\n", - "epoch: 50 step: 28, loss is 0.7848401665687561\n", - "epoch: 50 step: 29, loss is 0.7955296039581299\n", - "epoch: 50 step: 30, loss is 0.7846652269363403\n", - "epoch: 50 step: 31, loss is 0.8232280015945435\n", - "epoch: 50 step: 32, loss is 0.7746000289916992\n", - "epoch: 50 step: 33, loss is 0.7539740204811096\n", - "epoch: 50 step: 34, loss is 0.7519662380218506\n", - "epoch: 50 step: 35, loss is 0.7946498990058899\n", - "epoch: 50 step: 36, loss is 0.7660290002822876\n", - "epoch: 50 step: 37, loss is 0.7935041189193726\n", - "epoch: 50 step: 38, loss is 0.7729010581970215\n", - "epoch: 50 step: 39, loss is 0.7572159171104431\n", - "epoch: 50 step: 40, loss is 0.7402242422103882\n", - "epoch: 50 step: 41, loss is 0.7734825611114502\n", - "epoch: 50 step: 42, loss is 0.7434002161026001\n", - "epoch: 50 step: 43, loss is 0.7792245149612427\n", - "epoch: 50 step: 44, loss is 0.7526705265045166\n", - "epoch: 50 step: 45, loss is 0.7489557266235352\n", - "epoch: 50 step: 46, loss is 0.823969841003418\n", - "epoch: 50 step: 47, loss is 0.7713980078697205\n", - "epoch: 50 step: 48, loss is 0.7927367687225342\n", - "epoch: 50 step: 49, loss is 0.7652636766433716\n", - "epoch: 50 step: 50, loss is 0.7508813738822937\n", - "epoch: 50 step: 51, loss is 0.7696555256843567\n", - "epoch: 50 step: 52, loss is 0.8084716200828552\n", - "epoch: 50 step: 53, loss is 0.7491806149482727\n", - "epoch: 50 step: 54, loss is 0.758628249168396\n", - "epoch: 50 step: 55, loss is 0.7774040102958679\n", - "epoch: 50 step: 56, loss is 0.757311224937439\n", - "epoch: 50 step: 57, loss is 0.7130249738693237\n", - "epoch: 50 step: 58, loss is 0.7959308624267578\n", - "epoch: 50 step: 59, loss is 0.7907053232192993\n", - "epoch: 50 step: 60, loss is 0.7715981006622314\n", - "epoch: 50 step: 61, loss is 0.7525125741958618\n", - "epoch: 50 step: 62, loss is 0.7828436493873596\n", - "epoch: 50 step: 63, loss is 0.7406209707260132\n", - "epoch: 50 step: 64, loss is 0.8018389344215393\n", - "epoch: 50 step: 65, loss is 0.7422670722007751\n", - "epoch: 50 step: 66, loss is 0.773597240447998\n", - "epoch: 50 step: 67, loss is 0.7702914476394653\n", - "epoch: 50 step: 68, loss is 0.7826281785964966\n", - "epoch: 50 step: 69, loss is 0.7465894222259521\n", - "epoch: 50 step: 70, loss is 0.7769197225570679\n", - "epoch: 50 step: 71, loss is 0.7516205310821533\n", - "epoch: 50 step: 72, loss is 0.786666989326477\n", - "epoch: 50 step: 73, loss is 0.786081850528717\n", - "epoch: 50 step: 74, loss is 0.7419713735580444\n", - "epoch: 50 step: 75, loss is 0.7913058996200562\n", - "epoch: 50 step: 76, loss is 0.7525515556335449\n", - "epoch: 50 step: 77, loss is 0.8237977027893066\n", - "epoch: 50 step: 78, loss is 0.710051953792572\n", - "epoch: 50 step: 79, loss is 0.7696235775947571\n", - "epoch: 50 step: 80, loss is 0.7754746079444885\n", - "epoch: 50 step: 81, loss is 0.8062629699707031\n", - "epoch: 50 step: 82, loss is 0.7503039240837097\n", - "epoch: 50 step: 83, loss is 0.8364999890327454\n", - "epoch: 50 step: 84, loss is 0.7519097328186035\n", - "epoch: 50 step: 85, loss is 0.8097488284111023\n", - "epoch: 50 step: 86, loss is 0.7733085751533508\n", - "epoch: 50 step: 87, loss is 0.7943763732910156\n", - "epoch: 50 step: 88, loss is 0.8017860651016235\n", - "epoch: 50 step: 89, loss is 0.7503619194030762\n", - "epoch: 50 step: 90, loss is 0.7689992189407349\n", - "epoch: 50 step: 91, loss is 0.8445016741752625\n", - "epoch: 50 step: 92, loss is 0.7536423802375793\n", - "epoch: 50 step: 93, loss is 0.7786149978637695\n", - "epoch: 50 step: 94, loss is 0.8036640882492065\n", - "epoch: 50 step: 95, loss is 0.755380392074585\n", - "epoch: 50 step: 96, loss is 0.7683913707733154\n", - "epoch: 50 step: 97, loss is 0.7809499502182007\n", - "epoch: 50 step: 98, loss is 0.743462324142456\n", - "epoch: 50 step: 99, loss is 0.7978581190109253\n", - "epoch: 50 step: 100, loss is 0.8087695240974426\n", - "epoch: 50 step: 101, loss is 0.7854674458503723\n", - "epoch: 50 step: 102, loss is 0.8454350233078003\n", - "epoch: 50 step: 103, loss is 0.8055614233016968\n", - "epoch: 50 step: 104, loss is 0.7752905488014221\n", - "epoch: 50 step: 105, loss is 0.8072637319564819\n", - "epoch: 50 step: 106, loss is 0.7843447327613831\n", - "epoch: 50 step: 107, loss is 0.7667314410209656\n", - "epoch: 50 step: 108, loss is 0.8206599950790405\n", - "epoch: 50 step: 109, loss is 0.7495514750480652\n", - "epoch: 50 step: 110, loss is 0.7722309231758118\n", - "epoch: 50 step: 111, loss is 0.7730912566184998\n", - "epoch: 50 step: 112, loss is 0.7769516706466675\n", - "epoch: 50 step: 113, loss is 0.7311548590660095\n", - "epoch: 50 step: 114, loss is 0.7196918725967407\n", - "epoch: 50 step: 115, loss is 0.8131150007247925\n", - "epoch: 50 step: 116, loss is 0.8293939828872681\n", - "epoch: 50 step: 117, loss is 0.844794511795044\n", - "epoch: 50 step: 118, loss is 0.8097021579742432\n", - "epoch: 50 step: 119, loss is 0.8067548274993896\n", - "epoch: 50 step: 120, loss is 0.7451876401901245\n", - "epoch: 50 step: 121, loss is 0.7942838668823242\n", - "epoch: 50 step: 122, loss is 0.7816265821456909\n", - "epoch: 50 step: 123, loss is 0.7714554071426392\n", - "epoch: 50 step: 124, loss is 0.7801728248596191\n", - "epoch: 50 step: 125, loss is 0.7892995476722717\n", - "epoch: 50 step: 126, loss is 0.8033266067504883\n", - "epoch: 50 step: 127, loss is 0.7724478840827942\n", - "epoch: 50 step: 128, loss is 0.7689555883407593\n", - "epoch: 50 step: 129, loss is 0.7624392509460449\n", - "epoch: 50 step: 130, loss is 0.7530295848846436\n", - "epoch: 50 step: 131, loss is 0.7497451305389404\n", - "epoch: 50 step: 132, loss is 0.7675462961196899\n", - "epoch: 50 step: 133, loss is 0.7926802635192871\n", - "epoch: 50 step: 134, loss is 0.8272131681442261\n", - "epoch: 50 step: 135, loss is 0.8109605312347412\n", - "epoch: 50 step: 136, loss is 0.8057304620742798\n", - "epoch: 50 step: 137, loss is 0.7566056251525879\n", - "epoch: 50 step: 138, loss is 0.8100849390029907\n", - "epoch: 50 step: 139, loss is 0.7956655621528625\n", - "epoch: 50 step: 140, loss is 0.8203774690628052\n", - "epoch: 50 step: 141, loss is 0.7864224910736084\n", - "epoch: 50 step: 142, loss is 0.7455155849456787\n", - "epoch: 50 step: 143, loss is 0.7382572889328003\n", - "epoch: 50 step: 144, loss is 0.7661005258560181\n", - "epoch: 50 step: 145, loss is 0.8068943023681641\n", - "epoch: 50 step: 146, loss is 0.7878588438034058\n", - "epoch: 50 step: 147, loss is 0.8080874681472778\n", - "epoch: 50 step: 148, loss is 0.776960015296936\n", - "epoch: 50 step: 149, loss is 0.7400027513504028\n", - "epoch: 50 step: 150, loss is 0.7906967401504517\n", - "epoch: 50 step: 151, loss is 0.7190502882003784\n", - "epoch: 50 step: 152, loss is 0.7657128572463989\n", - "epoch: 50 step: 153, loss is 0.7764486074447632\n", - "epoch: 50 step: 154, loss is 0.829918384552002\n", - "epoch: 50 step: 155, loss is 0.7433205842971802\n", - "epoch: 50 step: 156, loss is 0.797990083694458\n", - "epoch: 50 step: 157, loss is 0.7626293897628784\n", - "epoch: 50 step: 158, loss is 0.7843010425567627\n", - "epoch: 50 step: 159, loss is 0.7543965578079224\n", - "epoch: 50 step: 160, loss is 0.7702991962432861\n", - "epoch: 50 step: 161, loss is 0.7387254238128662\n", - "epoch: 50 step: 162, loss is 0.8245499134063721\n", - "epoch: 50 step: 163, loss is 0.8047354221343994\n", - "epoch: 50 step: 164, loss is 0.7772183418273926\n", - "epoch: 50 step: 165, loss is 0.8162798881530762\n", - "epoch: 50 step: 166, loss is 0.7937183380126953\n", - "epoch: 50 step: 167, loss is 0.8447754383087158\n", - "epoch: 50 step: 168, loss is 0.7309650182723999\n", - "epoch: 50 step: 169, loss is 0.7304731011390686\n", - "epoch: 50 step: 170, loss is 0.8367864489555359\n", - "epoch: 50 step: 171, loss is 0.7436604499816895\n", - "epoch: 50 step: 172, loss is 0.8774688243865967\n", - "epoch: 50 step: 173, loss is 0.775653600692749\n", - "epoch: 50 step: 174, loss is 0.7849935293197632\n", - "epoch: 50 step: 175, loss is 0.755415678024292\n", - "epoch: 50 step: 176, loss is 0.7601606845855713\n", - "epoch: 50 step: 177, loss is 0.7827877402305603\n", - "epoch: 50 step: 178, loss is 0.785349428653717\n", - "epoch: 50 step: 179, loss is 0.7730883359909058\n", - "epoch: 50 step: 180, loss is 0.7766386270523071\n", - "epoch: 50 step: 181, loss is 0.7792547941207886\n", - "epoch: 50 step: 182, loss is 0.7630850672721863\n", - "epoch: 50 step: 183, loss is 0.7395979166030884\n", - "epoch: 50 step: 184, loss is 0.8013859987258911\n", - "epoch: 50 step: 185, loss is 0.8058763742446899\n", - "epoch: 50 step: 186, loss is 0.8001610040664673\n", - "epoch: 50 step: 187, loss is 0.7663059234619141\n", - "epoch: 50 step: 188, loss is 0.769565224647522\n", - "epoch: 50 step: 189, loss is 0.7952616810798645\n", - "epoch: 50 step: 190, loss is 0.8387209177017212\n", - "epoch: 50 step: 191, loss is 0.7682342529296875\n", - "epoch: 50 step: 192, loss is 0.772983729839325\n", - "epoch: 50 step: 193, loss is 0.7586737275123596\n", - "epoch: 50 step: 194, loss is 0.738025963306427\n", - "epoch: 50 step: 195, loss is 0.7450219392776489\n", - "Train epoch time: 104200.438 ms, per step time: 534.361 ms\n", - "epoch: 51 step: 1, loss is 0.7127658724784851\n", - "epoch: 51 step: 2, loss is 0.7525131702423096\n", - "epoch: 51 step: 3, loss is 0.7650803327560425\n", - "epoch: 51 step: 4, loss is 0.7396030426025391\n", - "epoch: 51 step: 5, loss is 0.7432870268821716\n", - "epoch: 51 step: 6, loss is 0.7518507242202759\n", - "epoch: 51 step: 7, loss is 0.7713302373886108\n", - "epoch: 51 step: 8, loss is 0.7250156402587891\n", - "epoch: 51 step: 9, loss is 0.7508498430252075\n", - "epoch: 51 step: 10, loss is 0.7378017902374268\n", - "epoch: 51 step: 11, loss is 0.7797620296478271\n", - "epoch: 51 step: 12, loss is 0.8774091005325317\n", - "epoch: 51 step: 13, loss is 0.7689456939697266\n", - "epoch: 51 step: 14, loss is 0.748863935470581\n", - "epoch: 51 step: 15, loss is 0.7871088981628418\n", - "epoch: 51 step: 16, loss is 0.7642532587051392\n", - "epoch: 51 step: 17, loss is 0.7468241453170776\n", - "epoch: 51 step: 18, loss is 0.7388325929641724\n", - "epoch: 51 step: 19, loss is 0.7790994048118591\n", - "epoch: 51 step: 20, loss is 0.7604823112487793\n", - "epoch: 51 step: 21, loss is 0.8115692138671875\n", - "epoch: 51 step: 22, loss is 0.7392019629478455\n", - "epoch: 51 step: 23, loss is 0.746444582939148\n", - "epoch: 51 step: 24, loss is 0.7668007612228394\n", - "epoch: 51 step: 25, loss is 0.7720839977264404\n", - "epoch: 51 step: 26, loss is 0.7280883193016052\n", - "epoch: 51 step: 27, loss is 0.769680380821228\n", - "epoch: 51 step: 28, loss is 0.7526214122772217\n", - "epoch: 51 step: 29, loss is 0.7811195850372314\n", - "epoch: 51 step: 30, loss is 0.7703922986984253\n", - "epoch: 51 step: 31, loss is 0.7603752613067627\n", - "epoch: 51 step: 32, loss is 0.7431195378303528\n", - "epoch: 51 step: 33, loss is 0.7410872578620911\n", - "epoch: 51 step: 34, loss is 0.749547004699707\n", - "epoch: 51 step: 35, loss is 0.7891870737075806\n", - "epoch: 51 step: 36, loss is 0.7648804187774658\n", - "epoch: 51 step: 37, loss is 0.8084181547164917\n", - "epoch: 51 step: 38, loss is 0.7439834475517273\n", - "epoch: 51 step: 39, loss is 0.74545818567276\n", - "epoch: 51 step: 40, loss is 0.7496793270111084\n", - "epoch: 51 step: 41, loss is 0.7657278776168823\n", - "epoch: 51 step: 42, loss is 0.7244853973388672\n", - "epoch: 51 step: 43, loss is 0.7605078220367432\n", - "epoch: 51 step: 44, loss is 0.7777740955352783\n", - "epoch: 51 step: 45, loss is 0.7483956813812256\n", - "epoch: 51 step: 46, loss is 0.8061268925666809\n", - "epoch: 51 step: 47, loss is 0.7469605207443237\n", - "epoch: 51 step: 48, loss is 0.7621632218360901\n", - "epoch: 51 step: 49, loss is 0.7748622894287109\n", - "epoch: 51 step: 50, loss is 0.782288134098053\n", - "epoch: 51 step: 51, loss is 0.7466800212860107\n", - "epoch: 51 step: 52, loss is 0.7720746994018555\n", - "epoch: 51 step: 53, loss is 0.8019874095916748\n", - "epoch: 51 step: 54, loss is 0.7637181878089905\n", - "epoch: 51 step: 55, loss is 0.7649544477462769\n", - "epoch: 51 step: 56, loss is 0.8419444561004639\n", - "epoch: 51 step: 57, loss is 0.7405215501785278\n", - "epoch: 51 step: 58, loss is 0.7835460901260376\n", - "epoch: 51 step: 59, loss is 0.7488666772842407\n", - "epoch: 51 step: 60, loss is 0.7705710530281067\n", - "epoch: 51 step: 61, loss is 0.8128464818000793\n", - "epoch: 51 step: 62, loss is 0.7886109352111816\n", - "epoch: 51 step: 63, loss is 0.797203779220581\n", - "epoch: 51 step: 64, loss is 0.7576704025268555\n", - "epoch: 51 step: 65, loss is 0.7174893021583557\n", - "epoch: 51 step: 66, loss is 0.7788109183311462\n", - "epoch: 51 step: 67, loss is 0.7986986637115479\n", - "epoch: 51 step: 68, loss is 0.7810406684875488\n", - "epoch: 51 step: 69, loss is 0.7389135956764221\n", - "epoch: 51 step: 70, loss is 0.7036351561546326\n", - "epoch: 51 step: 71, loss is 0.798374354839325\n", - "epoch: 51 step: 72, loss is 0.7760833501815796\n", - "epoch: 51 step: 73, loss is 0.7864766120910645\n", - "epoch: 51 step: 74, loss is 0.7776474952697754\n", - "epoch: 51 step: 75, loss is 0.7284588813781738\n", - "epoch: 51 step: 76, loss is 0.7835901975631714\n", - "epoch: 51 step: 77, loss is 0.8169816136360168\n", - "epoch: 51 step: 78, loss is 0.7176551222801208\n", - "epoch: 51 step: 79, loss is 0.7850744724273682\n", - "epoch: 51 step: 80, loss is 0.8125274777412415\n", - "epoch: 51 step: 81, loss is 0.7326189875602722\n", - "epoch: 51 step: 82, loss is 0.7908948659896851\n", - "epoch: 51 step: 83, loss is 0.7501875758171082\n", - "epoch: 51 step: 84, loss is 0.7747730016708374\n", - "epoch: 51 step: 85, loss is 0.7624555230140686\n", - "epoch: 51 step: 86, loss is 0.7588902711868286\n", - "epoch: 51 step: 87, loss is 0.7848199605941772\n", - "epoch: 51 step: 88, loss is 0.7944551706314087\n", - "epoch: 51 step: 89, loss is 0.7779529094696045\n", - "epoch: 51 step: 90, loss is 0.7726327776908875\n", - "epoch: 51 step: 91, loss is 0.7434247732162476\n", - "epoch: 51 step: 92, loss is 0.753853440284729\n", - "epoch: 51 step: 93, loss is 0.7727560997009277\n", - "epoch: 51 step: 94, loss is 0.8118842840194702\n", - "epoch: 51 step: 95, loss is 0.708669900894165\n", - "epoch: 51 step: 96, loss is 0.7835653424263\n", - "epoch: 51 step: 97, loss is 0.7835577726364136\n", - "epoch: 51 step: 98, loss is 0.7509108781814575\n", - "epoch: 51 step: 99, loss is 0.7754564881324768\n", - "epoch: 51 step: 100, loss is 0.8136333227157593\n", - "epoch: 51 step: 101, loss is 0.7317966818809509\n", - "epoch: 51 step: 102, loss is 0.7739089727401733\n", - "epoch: 51 step: 103, loss is 0.7911791801452637\n", - "epoch: 51 step: 104, loss is 0.7707613110542297\n", - "epoch: 51 step: 105, loss is 0.8094954490661621\n", - "epoch: 51 step: 106, loss is 0.7213757038116455\n", - "epoch: 51 step: 107, loss is 0.7850294709205627\n", - "epoch: 51 step: 108, loss is 0.8264412879943848\n", - "epoch: 51 step: 109, loss is 0.7776114344596863\n", - "epoch: 51 step: 110, loss is 0.7412658333778381\n", - "epoch: 51 step: 111, loss is 0.7742754220962524\n", - "epoch: 51 step: 112, loss is 0.8313534259796143\n", - "epoch: 51 step: 113, loss is 0.7464664578437805\n", - "epoch: 51 step: 114, loss is 0.802405595779419\n", - "epoch: 51 step: 115, loss is 0.7652677297592163\n", - "epoch: 51 step: 116, loss is 0.7501987218856812\n", - "epoch: 51 step: 117, loss is 0.7817049026489258\n", - "epoch: 51 step: 118, loss is 0.7619132995605469\n", - "epoch: 51 step: 119, loss is 0.761806845664978\n", - "epoch: 51 step: 120, loss is 0.8153101205825806\n", - "epoch: 51 step: 121, loss is 0.738982081413269\n", - "epoch: 51 step: 122, loss is 0.7805833220481873\n", - "epoch: 51 step: 123, loss is 0.7610634565353394\n", - "epoch: 51 step: 124, loss is 0.7832593321800232\n", - "epoch: 51 step: 125, loss is 0.7634925246238708\n", - "epoch: 51 step: 126, loss is 0.7431166172027588\n", - "epoch: 51 step: 127, loss is 0.7651649117469788\n", - "epoch: 51 step: 128, loss is 0.8076580166816711\n", - "epoch: 51 step: 129, loss is 0.7732875347137451\n", - "epoch: 51 step: 130, loss is 0.7844958901405334\n", - "epoch: 51 step: 131, loss is 0.750457763671875\n", - "epoch: 51 step: 132, loss is 0.7433779239654541\n", - "epoch: 51 step: 133, loss is 0.7661430239677429\n", - "epoch: 51 step: 134, loss is 0.7537803053855896\n", - "epoch: 51 step: 135, loss is 0.7477529048919678\n", - "epoch: 51 step: 136, loss is 0.8021606206893921\n", - "epoch: 51 step: 137, loss is 0.7597280740737915\n", - "epoch: 51 step: 138, loss is 0.747283935546875\n", - "epoch: 51 step: 139, loss is 0.7839834690093994\n", - "epoch: 51 step: 140, loss is 0.7966628074645996\n", - "epoch: 51 step: 141, loss is 0.8159871101379395\n", - "epoch: 51 step: 142, loss is 0.783802330493927\n", - "epoch: 51 step: 143, loss is 0.7079719305038452\n", - "epoch: 51 step: 144, loss is 0.7701084017753601\n", - "epoch: 51 step: 145, loss is 0.7423214912414551\n", - "epoch: 51 step: 146, loss is 0.7296830415725708\n", - "epoch: 51 step: 147, loss is 0.7885416746139526\n", - "epoch: 51 step: 148, loss is 0.7704241275787354\n", - "epoch: 51 step: 149, loss is 0.7362068891525269\n", - "epoch: 51 step: 150, loss is 0.7220796346664429\n", - "epoch: 51 step: 151, loss is 0.7163593769073486\n", - "epoch: 51 step: 152, loss is 0.7799816727638245\n", - "epoch: 51 step: 153, loss is 0.8441452980041504\n", - "epoch: 51 step: 154, loss is 0.7525169849395752\n", - "epoch: 51 step: 155, loss is 0.7980644106864929\n", - "epoch: 51 step: 156, loss is 0.7558544874191284\n", - "epoch: 51 step: 157, loss is 0.7281650900840759\n", - "epoch: 51 step: 158, loss is 0.776863157749176\n", - "epoch: 51 step: 159, loss is 0.7516960501670837\n", - "epoch: 51 step: 160, loss is 0.7773051857948303\n", - "epoch: 51 step: 161, loss is 0.7648395299911499\n", - "epoch: 51 step: 162, loss is 0.8116007447242737\n", - "epoch: 51 step: 163, loss is 0.7319633960723877\n", - "epoch: 51 step: 164, loss is 0.7862023115158081\n", - "epoch: 51 step: 165, loss is 0.7634849548339844\n", - "epoch: 51 step: 166, loss is 0.7841250896453857\n", - "epoch: 51 step: 167, loss is 0.8194866180419922\n", - "epoch: 51 step: 168, loss is 0.768380343914032\n", - "epoch: 51 step: 169, loss is 0.8089847564697266\n", - "epoch: 51 step: 170, loss is 0.7286646366119385\n", - "epoch: 51 step: 171, loss is 0.7401659488677979\n", - "epoch: 51 step: 172, loss is 0.7706607580184937\n", - "epoch: 51 step: 173, loss is 0.7353518009185791\n", - "epoch: 51 step: 174, loss is 0.7514238357543945\n", - "epoch: 51 step: 175, loss is 0.7638115882873535\n", - "epoch: 51 step: 176, loss is 0.7566566467285156\n", - "epoch: 51 step: 177, loss is 0.7689902782440186\n", - "epoch: 51 step: 178, loss is 0.762540876865387\n", - "epoch: 51 step: 179, loss is 0.8596534132957458\n", - "epoch: 51 step: 180, loss is 0.761650562286377\n", - "epoch: 51 step: 181, loss is 0.820151686668396\n", - "epoch: 51 step: 182, loss is 0.7691572904586792\n", - "epoch: 51 step: 183, loss is 0.7595763206481934\n", - "epoch: 51 step: 184, loss is 0.7404129505157471\n", - "epoch: 51 step: 185, loss is 0.7587924599647522\n", - "epoch: 51 step: 186, loss is 0.772550642490387\n", - "epoch: 51 step: 187, loss is 0.761353611946106\n", - "epoch: 51 step: 188, loss is 0.7576625943183899\n", - "epoch: 51 step: 189, loss is 0.8490115404129028\n", - "epoch: 51 step: 190, loss is 0.7551292181015015\n", - "epoch: 51 step: 191, loss is 0.7356715798377991\n", - "epoch: 51 step: 192, loss is 0.7816735506057739\n", - "epoch: 51 step: 193, loss is 0.7799474596977234\n", - "epoch: 51 step: 194, loss is 0.7315225005149841\n", - "epoch: 51 step: 195, loss is 0.8464182615280151\n", - "Train epoch time: 109124.733 ms, per step time: 559.614 ms\n", - "epoch: 52 step: 1, loss is 0.7237507104873657\n", - "epoch: 52 step: 2, loss is 0.7205532789230347\n", - "epoch: 52 step: 3, loss is 0.8071622252464294\n", - "epoch: 52 step: 4, loss is 0.741265594959259\n", - "epoch: 52 step: 5, loss is 0.7746148109436035\n", - "epoch: 52 step: 6, loss is 0.7444890737533569\n", - "epoch: 52 step: 7, loss is 0.7293844223022461\n", - "epoch: 52 step: 8, loss is 0.7480308413505554\n", - "epoch: 52 step: 9, loss is 0.7517484426498413\n", - "epoch: 52 step: 10, loss is 0.726035475730896\n", - "epoch: 52 step: 11, loss is 0.778565526008606\n", - "epoch: 52 step: 12, loss is 0.718412458896637\n", - "epoch: 52 step: 13, loss is 0.7599987387657166\n", - "epoch: 52 step: 14, loss is 0.7636425495147705\n", - "epoch: 52 step: 15, loss is 0.7087388038635254\n", - "epoch: 52 step: 16, loss is 0.7806265950202942\n", - "epoch: 52 step: 17, loss is 0.7335832118988037\n", - "epoch: 52 step: 18, loss is 0.7266861200332642\n", - "epoch: 52 step: 19, loss is 0.7294358611106873\n", - "epoch: 52 step: 20, loss is 0.7528926134109497\n", - "epoch: 52 step: 21, loss is 0.7243163585662842\n", - "epoch: 52 step: 22, loss is 0.7174777984619141\n", - "epoch: 52 step: 23, loss is 0.7128415107727051\n", - "epoch: 52 step: 24, loss is 0.7517701387405396\n", - "epoch: 52 step: 25, loss is 0.7569383978843689\n", - "epoch: 52 step: 26, loss is 0.7723703384399414\n", - "epoch: 52 step: 27, loss is 0.7549576759338379\n", - "epoch: 52 step: 28, loss is 0.7578855752944946\n", - "epoch: 52 step: 29, loss is 0.814480185508728\n", - "epoch: 52 step: 30, loss is 0.7381181716918945\n", - "epoch: 52 step: 31, loss is 0.7099692821502686\n", - "epoch: 52 step: 32, loss is 0.7362306714057922\n", - "epoch: 52 step: 33, loss is 0.7346323132514954\n", - "epoch: 52 step: 34, loss is 0.7914421558380127\n", - "epoch: 52 step: 35, loss is 0.7378039956092834\n", - "epoch: 52 step: 36, loss is 0.7863876223564148\n", - "epoch: 52 step: 37, loss is 0.7144011855125427\n", - "epoch: 52 step: 38, loss is 0.7149479985237122\n", - "epoch: 52 step: 39, loss is 0.7533828020095825\n", - "epoch: 52 step: 40, loss is 0.7164735794067383\n", - "epoch: 52 step: 41, loss is 0.7641370296478271\n", - "epoch: 52 step: 42, loss is 0.7775751352310181\n", - "epoch: 52 step: 43, loss is 0.7493149042129517\n", - "epoch: 52 step: 44, loss is 0.729965329170227\n", - "epoch: 52 step: 45, loss is 0.713408350944519\n", - "epoch: 52 step: 46, loss is 0.7059417963027954\n", - "epoch: 52 step: 47, loss is 0.7202169299125671\n", - "epoch: 52 step: 48, loss is 0.7584760785102844\n", - "epoch: 52 step: 49, loss is 0.7468349933624268\n", - "epoch: 52 step: 50, loss is 0.7310805320739746\n", - "epoch: 52 step: 51, loss is 0.72611403465271\n", - "epoch: 52 step: 52, loss is 0.7282006740570068\n", - "epoch: 52 step: 53, loss is 0.7801165580749512\n", - "epoch: 52 step: 54, loss is 0.7690773010253906\n", - "epoch: 52 step: 55, loss is 0.7466640472412109\n", - "epoch: 52 step: 56, loss is 0.7575427293777466\n", - "epoch: 52 step: 57, loss is 0.7473092079162598\n", - "epoch: 52 step: 58, loss is 0.7716274261474609\n", - "epoch: 52 step: 59, loss is 0.7699079513549805\n", - "epoch: 52 step: 60, loss is 0.7655402421951294\n", - "epoch: 52 step: 61, loss is 0.7438780069351196\n", - "epoch: 52 step: 62, loss is 0.7624803781509399\n", - "epoch: 52 step: 63, loss is 0.7648270130157471\n", - "epoch: 52 step: 64, loss is 0.7593474388122559\n", - "epoch: 52 step: 65, loss is 0.7308676242828369\n", - "epoch: 52 step: 66, loss is 0.7604267597198486\n", - "epoch: 52 step: 67, loss is 0.7411351203918457\n", - "epoch: 52 step: 68, loss is 0.7379381656646729\n", - "epoch: 52 step: 69, loss is 0.7681666612625122\n", - "epoch: 52 step: 70, loss is 0.788827657699585\n", - "epoch: 52 step: 71, loss is 0.7026586532592773\n", - "epoch: 52 step: 72, loss is 0.7977089881896973\n", - "epoch: 52 step: 73, loss is 0.7567089796066284\n", - "epoch: 52 step: 74, loss is 0.7211205959320068\n", - "epoch: 52 step: 75, loss is 0.769801139831543\n", - "epoch: 52 step: 76, loss is 0.7555128335952759\n", - "epoch: 52 step: 77, loss is 0.7387851476669312\n", - "epoch: 52 step: 78, loss is 0.7286485433578491\n", - "epoch: 52 step: 79, loss is 0.7403643131256104\n", - "epoch: 52 step: 80, loss is 0.7305927276611328\n", - "epoch: 52 step: 81, loss is 0.7350622415542603\n", - "epoch: 52 step: 82, loss is 0.753395676612854\n", - "epoch: 52 step: 83, loss is 0.7631814479827881\n", - "epoch: 52 step: 84, loss is 0.7930261492729187\n", - "epoch: 52 step: 85, loss is 0.7467182874679565\n", - "epoch: 52 step: 86, loss is 0.7452768087387085\n", - "epoch: 52 step: 87, loss is 0.7621490955352783\n", - "epoch: 52 step: 88, loss is 0.799854040145874\n", - "epoch: 52 step: 89, loss is 0.7455629110336304\n", - "epoch: 52 step: 90, loss is 0.8005139827728271\n", - "epoch: 52 step: 91, loss is 0.7672126889228821\n", - "epoch: 52 step: 92, loss is 0.7397708892822266\n", - "epoch: 52 step: 93, loss is 0.7564449310302734\n", - "epoch: 52 step: 94, loss is 0.8015995621681213\n", - "epoch: 52 step: 95, loss is 0.7834384441375732\n", - "epoch: 52 step: 96, loss is 0.7492989301681519\n", - "epoch: 52 step: 97, loss is 0.7958290576934814\n", - "epoch: 52 step: 98, loss is 0.7990648150444031\n", - "epoch: 52 step: 99, loss is 0.8202942609786987\n", - "epoch: 52 step: 100, loss is 0.7845600247383118\n", - "epoch: 52 step: 101, loss is 0.7387982606887817\n", - "epoch: 52 step: 102, loss is 0.768934965133667\n", - "epoch: 52 step: 103, loss is 0.763335108757019\n", - "epoch: 52 step: 104, loss is 0.7692549228668213\n", - "epoch: 52 step: 105, loss is 0.7130553722381592\n", - "epoch: 52 step: 106, loss is 0.8097378015518188\n", - "epoch: 52 step: 107, loss is 0.8266847729682922\n", - "epoch: 52 step: 108, loss is 0.8026797771453857\n", - "epoch: 52 step: 109, loss is 0.7485285997390747\n", - "epoch: 52 step: 110, loss is 0.788648784160614\n", - "epoch: 52 step: 111, loss is 0.7271437644958496\n", - "epoch: 52 step: 112, loss is 0.7399160861968994\n", - "epoch: 52 step: 113, loss is 0.7678052186965942\n", - "epoch: 52 step: 114, loss is 0.777267575263977\n", - "epoch: 52 step: 115, loss is 0.8275370001792908\n", - "epoch: 52 step: 116, loss is 0.7496453523635864\n", - "epoch: 52 step: 117, loss is 0.7804430723190308\n", - "epoch: 52 step: 118, loss is 0.7585937976837158\n", - "epoch: 52 step: 119, loss is 0.7894551753997803\n", - "epoch: 52 step: 120, loss is 0.73940110206604\n", - "epoch: 52 step: 121, loss is 0.7449439764022827\n", - "epoch: 52 step: 122, loss is 0.7929477095603943\n", - "epoch: 52 step: 123, loss is 0.7425300478935242\n", - "epoch: 52 step: 124, loss is 0.8194437026977539\n", - "epoch: 52 step: 125, loss is 0.7628844380378723\n", - "epoch: 52 step: 126, loss is 0.7711507081985474\n", - "epoch: 52 step: 127, loss is 0.7821146249771118\n", - "epoch: 52 step: 128, loss is 0.7881607413291931\n", - "epoch: 52 step: 129, loss is 0.7612645626068115\n", - "epoch: 52 step: 130, loss is 0.7549643516540527\n", - "epoch: 52 step: 131, loss is 0.8177905082702637\n", - "epoch: 52 step: 132, loss is 0.809404194355011\n", - "epoch: 52 step: 133, loss is 0.7244337797164917\n", - "epoch: 52 step: 134, loss is 0.7772903442382812\n", - "epoch: 52 step: 135, loss is 0.8083624839782715\n", - "epoch: 52 step: 136, loss is 0.7536703944206238\n", - "epoch: 52 step: 137, loss is 0.7519947290420532\n", - "epoch: 52 step: 138, loss is 0.7664327621459961\n", - "epoch: 52 step: 139, loss is 0.7935937643051147\n", - "epoch: 52 step: 140, loss is 0.7848483324050903\n", - "epoch: 52 step: 141, loss is 0.7091739773750305\n", - "epoch: 52 step: 142, loss is 0.7917645573616028\n", - "epoch: 52 step: 143, loss is 0.7789062261581421\n", - "epoch: 52 step: 144, loss is 0.7541940808296204\n", - "epoch: 52 step: 145, loss is 0.7768668532371521\n", - "epoch: 52 step: 146, loss is 0.7654355764389038\n", - "epoch: 52 step: 147, loss is 0.7951533794403076\n", - "epoch: 52 step: 148, loss is 0.772946834564209\n", - "epoch: 52 step: 149, loss is 0.7534056901931763\n", - "epoch: 52 step: 150, loss is 0.7249675393104553\n", - "epoch: 52 step: 151, loss is 0.7385531663894653\n", - "epoch: 52 step: 152, loss is 0.7756916284561157\n", - "epoch: 52 step: 153, loss is 0.7675639390945435\n", - "epoch: 52 step: 154, loss is 0.7033215761184692\n", - "epoch: 52 step: 155, loss is 0.795699417591095\n", - "epoch: 52 step: 156, loss is 0.81412672996521\n", - "epoch: 52 step: 157, loss is 0.7636189460754395\n", - "epoch: 52 step: 158, loss is 0.793228268623352\n", - "epoch: 52 step: 159, loss is 0.7766045331954956\n", - "epoch: 52 step: 160, loss is 0.766132116317749\n", - "epoch: 52 step: 161, loss is 0.7687993049621582\n", - "epoch: 52 step: 162, loss is 0.781251072883606\n", - "epoch: 52 step: 163, loss is 0.8399323225021362\n", - "epoch: 52 step: 164, loss is 0.742940366268158\n", - "epoch: 52 step: 165, loss is 0.8020011186599731\n", - "epoch: 52 step: 166, loss is 0.7560544013977051\n", - "epoch: 52 step: 167, loss is 0.7643052339553833\n", - "epoch: 52 step: 168, loss is 0.7376540899276733\n", - "epoch: 52 step: 169, loss is 0.7556971311569214\n", - "epoch: 52 step: 170, loss is 0.7848159670829773\n", - "epoch: 52 step: 171, loss is 0.719372034072876\n", - "epoch: 52 step: 172, loss is 0.7569981217384338\n", - "epoch: 52 step: 173, loss is 0.7398255467414856\n", - "epoch: 52 step: 174, loss is 0.7781835198402405\n", - "epoch: 52 step: 175, loss is 0.750700056552887\n", - "epoch: 52 step: 176, loss is 0.7795656323432922\n", - "epoch: 52 step: 177, loss is 0.7786036133766174\n", - "epoch: 52 step: 178, loss is 0.8127820491790771\n", - "epoch: 52 step: 179, loss is 0.8024686574935913\n", - "epoch: 52 step: 180, loss is 0.7406350374221802\n", - "epoch: 52 step: 181, loss is 0.8004850745201111\n", - "epoch: 52 step: 182, loss is 0.7902394533157349\n", - "epoch: 52 step: 183, loss is 0.7333025932312012\n", - "epoch: 52 step: 184, loss is 0.8184046745300293\n", - "epoch: 52 step: 185, loss is 0.7648668885231018\n", - "epoch: 52 step: 186, loss is 0.746066153049469\n", - "epoch: 52 step: 187, loss is 0.7810167074203491\n", - "epoch: 52 step: 188, loss is 0.8257111310958862\n", - "epoch: 52 step: 189, loss is 0.8096011281013489\n", - "epoch: 52 step: 190, loss is 0.766956090927124\n", - "epoch: 52 step: 191, loss is 0.7744662761688232\n", - "epoch: 52 step: 192, loss is 0.7553519010543823\n", - "epoch: 52 step: 193, loss is 0.8144704103469849\n", - "epoch: 52 step: 194, loss is 0.7532559633255005\n", - "epoch: 52 step: 195, loss is 0.8063337206840515\n", - "Train epoch time: 108694.718 ms, per step time: 557.409 ms\n", - "epoch: 53 step: 1, loss is 0.7500209808349609\n", - "epoch: 53 step: 2, loss is 0.7467270493507385\n", - "epoch: 53 step: 3, loss is 0.75927734375\n", - "epoch: 53 step: 4, loss is 0.7659368515014648\n", - "epoch: 53 step: 5, loss is 0.7466236352920532\n", - "epoch: 53 step: 6, loss is 0.75738525390625\n", - "epoch: 53 step: 7, loss is 0.6892359852790833\n", - "epoch: 53 step: 8, loss is 0.7542592287063599\n", - "epoch: 53 step: 9, loss is 0.7519485950469971\n", - "epoch: 53 step: 10, loss is 0.7212932109832764\n", - "epoch: 53 step: 11, loss is 0.7168842554092407\n", - "epoch: 53 step: 12, loss is 0.7416212558746338\n", - "epoch: 53 step: 13, loss is 0.7490566372871399\n", - "epoch: 53 step: 14, loss is 0.7451211214065552\n", - "epoch: 53 step: 15, loss is 0.7452759742736816\n", - "epoch: 53 step: 16, loss is 0.7458174228668213\n", - "epoch: 53 step: 17, loss is 0.7457907199859619\n", - "epoch: 53 step: 18, loss is 0.7160977125167847\n", - "epoch: 53 step: 19, loss is 0.7072196006774902\n", - "epoch: 53 step: 20, loss is 0.7525694966316223\n", - "epoch: 53 step: 21, loss is 0.761174201965332\n", - "epoch: 53 step: 22, loss is 0.7387212514877319\n", - "epoch: 53 step: 23, loss is 0.7277069091796875\n", - "epoch: 53 step: 24, loss is 0.7699098587036133\n", - "epoch: 53 step: 25, loss is 0.7390406131744385\n", - "epoch: 53 step: 26, loss is 0.7414827346801758\n", - "epoch: 53 step: 27, loss is 0.7728409767150879\n", - "epoch: 53 step: 28, loss is 0.7129389047622681\n", - "epoch: 53 step: 29, loss is 0.7603026628494263\n", - "epoch: 53 step: 30, loss is 0.713713526725769\n", - "epoch: 53 step: 31, loss is 0.7287588119506836\n", - "epoch: 53 step: 32, loss is 0.8140697479248047\n", - "epoch: 53 step: 33, loss is 0.7393417954444885\n", - "epoch: 53 step: 34, loss is 0.737850546836853\n", - "epoch: 53 step: 35, loss is 0.7361228466033936\n", - "epoch: 53 step: 36, loss is 0.749570906162262\n", - "epoch: 53 step: 37, loss is 0.7559493780136108\n", - "epoch: 53 step: 38, loss is 0.7264422178268433\n", - "epoch: 53 step: 39, loss is 0.7156509160995483\n", - "epoch: 53 step: 40, loss is 0.7469439506530762\n", - "epoch: 53 step: 41, loss is 0.750981330871582\n", - "epoch: 53 step: 42, loss is 0.7791249752044678\n", - "epoch: 53 step: 43, loss is 0.779373288154602\n", - "epoch: 53 step: 44, loss is 0.710938572883606\n", - "epoch: 53 step: 45, loss is 0.7452494502067566\n", - "epoch: 53 step: 46, loss is 0.7114530205726624\n", - "epoch: 53 step: 47, loss is 0.7834138870239258\n", - "epoch: 53 step: 48, loss is 0.7266097068786621\n", - "epoch: 53 step: 49, loss is 0.7368125319480896\n", - "epoch: 53 step: 50, loss is 0.77834552526474\n", - "epoch: 53 step: 51, loss is 0.8421415686607361\n", - "epoch: 53 step: 52, loss is 0.7480530142784119\n", - "epoch: 53 step: 53, loss is 0.7690051794052124\n", - "epoch: 53 step: 54, loss is 0.7389063835144043\n", - "epoch: 53 step: 55, loss is 0.7824430465698242\n", - "epoch: 53 step: 56, loss is 0.7503558397293091\n", - "epoch: 53 step: 57, loss is 0.8129368424415588\n", - "epoch: 53 step: 58, loss is 0.7774947881698608\n", - "epoch: 53 step: 59, loss is 0.7319918870925903\n", - "epoch: 53 step: 60, loss is 0.7464447617530823\n", - "epoch: 53 step: 61, loss is 0.7171962261199951\n", - "epoch: 53 step: 62, loss is 0.7247670888900757\n", - "epoch: 53 step: 63, loss is 0.7707715630531311\n", - "epoch: 53 step: 64, loss is 0.787301242351532\n", - "epoch: 53 step: 65, loss is 0.7492901086807251\n", - "epoch: 53 step: 66, loss is 0.725997269153595\n", - "epoch: 53 step: 67, loss is 0.748086154460907\n", - "epoch: 53 step: 68, loss is 0.7362990975379944\n", - "epoch: 53 step: 69, loss is 0.7885322570800781\n", - "epoch: 53 step: 70, loss is 0.7696475982666016\n", - "epoch: 53 step: 71, loss is 0.7926758527755737\n", - "epoch: 53 step: 72, loss is 0.787043571472168\n", - "epoch: 53 step: 73, loss is 0.7333463430404663\n", - "epoch: 53 step: 74, loss is 0.7788268327713013\n", - "epoch: 53 step: 75, loss is 0.7439805269241333\n", - "epoch: 53 step: 76, loss is 0.7463969588279724\n", - "epoch: 53 step: 77, loss is 0.7507326602935791\n", - "epoch: 53 step: 78, loss is 0.7579741477966309\n", - "epoch: 53 step: 79, loss is 0.7475249767303467\n", - "epoch: 53 step: 80, loss is 0.7456820011138916\n", - "epoch: 53 step: 81, loss is 0.7280092835426331\n", - "epoch: 53 step: 82, loss is 0.7140530347824097\n", - "epoch: 53 step: 83, loss is 0.7463144659996033\n", - "epoch: 53 step: 84, loss is 0.7169508934020996\n", - "epoch: 53 step: 85, loss is 0.7434486150741577\n", - "epoch: 53 step: 86, loss is 0.7139694690704346\n", - "epoch: 53 step: 87, loss is 0.7232010364532471\n", - "epoch: 53 step: 88, loss is 0.7720944881439209\n", - "epoch: 53 step: 89, loss is 0.7757639288902283\n", - "epoch: 53 step: 90, loss is 0.7018445730209351\n", - "epoch: 53 step: 91, loss is 0.7927099466323853\n", - "epoch: 53 step: 92, loss is 0.7350521087646484\n", - "epoch: 53 step: 93, loss is 0.7524237036705017\n", - "epoch: 53 step: 94, loss is 0.7354158163070679\n", - "epoch: 53 step: 95, loss is 0.7233775854110718\n", - "epoch: 53 step: 96, loss is 0.745295524597168\n", - "epoch: 53 step: 97, loss is 0.7490715980529785\n", - "epoch: 53 step: 98, loss is 0.7399924993515015\n", - "epoch: 53 step: 99, loss is 0.7267537117004395\n", - "epoch: 53 step: 100, loss is 0.7782707214355469\n", - "epoch: 53 step: 101, loss is 0.770723283290863\n", - "epoch: 53 step: 102, loss is 0.7729467749595642\n", - "epoch: 53 step: 103, loss is 0.7696723341941833\n", - "epoch: 53 step: 104, loss is 0.7105939388275146\n", - "epoch: 53 step: 105, loss is 0.7165085077285767\n", - "epoch: 53 step: 106, loss is 0.7689510583877563\n", - "epoch: 53 step: 107, loss is 0.7937767505645752\n", - "epoch: 53 step: 108, loss is 0.7529879808425903\n", - "epoch: 53 step: 109, loss is 0.7714258432388306\n", - "epoch: 53 step: 110, loss is 0.824413537979126\n", - "epoch: 53 step: 111, loss is 0.7632749080657959\n", - "epoch: 53 step: 112, loss is 0.7888000011444092\n", - "epoch: 53 step: 113, loss is 0.8106561899185181\n", - "epoch: 53 step: 114, loss is 0.7466120719909668\n", - "epoch: 53 step: 115, loss is 0.751723051071167\n", - "epoch: 53 step: 116, loss is 0.735780656337738\n", - "epoch: 53 step: 117, loss is 0.8043343424797058\n", - "epoch: 53 step: 118, loss is 0.743895947933197\n", - "epoch: 53 step: 119, loss is 0.73650723695755\n", - "epoch: 53 step: 120, loss is 0.709223747253418\n", - "epoch: 53 step: 121, loss is 0.7556557655334473\n", - "epoch: 53 step: 122, loss is 0.728112518787384\n", - "epoch: 53 step: 123, loss is 0.7267941236495972\n", - "epoch: 53 step: 124, loss is 0.7535509467124939\n", - "epoch: 53 step: 125, loss is 0.7727494239807129\n", - "epoch: 53 step: 126, loss is 0.7751871347427368\n", - "epoch: 53 step: 127, loss is 0.8032330870628357\n", - "epoch: 53 step: 128, loss is 0.7599368691444397\n", - "epoch: 53 step: 129, loss is 0.8154371976852417\n", - "epoch: 53 step: 130, loss is 0.7534074783325195\n", - "epoch: 53 step: 131, loss is 0.7141103148460388\n", - "epoch: 53 step: 132, loss is 0.7176271677017212\n", - "epoch: 53 step: 133, loss is 0.7566998600959778\n", - "epoch: 53 step: 134, loss is 0.7405641078948975\n", - "epoch: 53 step: 135, loss is 0.7781600952148438\n", - "epoch: 53 step: 136, loss is 0.7720069885253906\n", - "epoch: 53 step: 137, loss is 0.719613790512085\n", - "epoch: 53 step: 138, loss is 0.7210479974746704\n", - "epoch: 53 step: 139, loss is 0.808986246585846\n", - "epoch: 53 step: 140, loss is 0.788841962814331\n", - "epoch: 53 step: 141, loss is 0.7690080404281616\n", - "epoch: 53 step: 142, loss is 0.8168197870254517\n", - "epoch: 53 step: 143, loss is 0.7603106498718262\n", - "epoch: 53 step: 144, loss is 0.740598201751709\n", - "epoch: 53 step: 145, loss is 0.7679888010025024\n", - "epoch: 53 step: 146, loss is 0.7148758172988892\n", - "epoch: 53 step: 147, loss is 0.7595497369766235\n", - "epoch: 53 step: 148, loss is 0.7679424285888672\n", - "epoch: 53 step: 149, loss is 0.8097102642059326\n", - "epoch: 53 step: 150, loss is 0.7641118764877319\n", - "epoch: 53 step: 151, loss is 0.7704870104789734\n", - "epoch: 53 step: 152, loss is 0.7517968416213989\n", - "epoch: 53 step: 153, loss is 0.8034548759460449\n", - "epoch: 53 step: 154, loss is 0.7359227538108826\n", - "epoch: 53 step: 155, loss is 0.781670331954956\n", - "epoch: 53 step: 156, loss is 0.7458946704864502\n", - "epoch: 53 step: 157, loss is 0.7879830002784729\n", - "epoch: 53 step: 158, loss is 0.7548004388809204\n", - "epoch: 53 step: 159, loss is 0.7684754133224487\n", - "epoch: 53 step: 160, loss is 0.8089704513549805\n", - "epoch: 53 step: 161, loss is 0.7638421058654785\n", - "epoch: 53 step: 162, loss is 0.7455356121063232\n", - "epoch: 53 step: 163, loss is 0.7628594040870667\n", - "epoch: 53 step: 164, loss is 0.767367959022522\n", - "epoch: 53 step: 165, loss is 0.7827467918395996\n", - "epoch: 53 step: 166, loss is 0.7819128036499023\n", - "epoch: 53 step: 167, loss is 0.7710360288619995\n", - "epoch: 53 step: 168, loss is 0.7814900279045105\n", - "epoch: 53 step: 169, loss is 0.7141121625900269\n", - "epoch: 53 step: 170, loss is 0.7374953031539917\n", - "epoch: 53 step: 171, loss is 0.7819555997848511\n", - "epoch: 53 step: 172, loss is 0.7101056575775146\n", - "epoch: 53 step: 173, loss is 0.772534966468811\n", - "epoch: 53 step: 174, loss is 0.8179224729537964\n", - "epoch: 53 step: 175, loss is 0.728415846824646\n", - "epoch: 53 step: 176, loss is 0.7604031562805176\n", - "epoch: 53 step: 177, loss is 0.7305001020431519\n", - "epoch: 53 step: 178, loss is 0.7464556097984314\n", - "epoch: 53 step: 179, loss is 0.779193639755249\n", - "epoch: 53 step: 180, loss is 0.7490922808647156\n", - "epoch: 53 step: 181, loss is 0.7686058282852173\n", - "epoch: 53 step: 182, loss is 0.7863794565200806\n", - "epoch: 53 step: 183, loss is 0.7736130952835083\n", - "epoch: 53 step: 184, loss is 0.7619942426681519\n", - "epoch: 53 step: 185, loss is 0.7964603900909424\n", - "epoch: 53 step: 186, loss is 0.7618675231933594\n", - "epoch: 53 step: 187, loss is 0.7607605457305908\n", - "epoch: 53 step: 188, loss is 0.7574695944786072\n", - "epoch: 53 step: 189, loss is 0.8425401449203491\n", - "epoch: 53 step: 190, loss is 0.7625440359115601\n", - "epoch: 53 step: 191, loss is 0.7400980591773987\n", - "epoch: 53 step: 192, loss is 0.7028496265411377\n", - "epoch: 53 step: 193, loss is 0.7878526449203491\n", - "epoch: 53 step: 194, loss is 0.7359610199928284\n", - "epoch: 53 step: 195, loss is 0.7948102951049805\n", - "Train epoch time: 110477.204 ms, per step time: 566.550 ms\n", - "epoch: 54 step: 1, loss is 0.7541799545288086\n", - "epoch: 54 step: 2, loss is 0.7337019443511963\n", - "epoch: 54 step: 3, loss is 0.7299398183822632\n", - "epoch: 54 step: 4, loss is 0.7510303258895874\n", - "epoch: 54 step: 5, loss is 0.7197021245956421\n", - "epoch: 54 step: 6, loss is 0.7509708404541016\n", - "epoch: 54 step: 7, loss is 0.7345867156982422\n", - "epoch: 54 step: 8, loss is 0.725814700126648\n", - "epoch: 54 step: 9, loss is 0.7599236369132996\n", - "epoch: 54 step: 10, loss is 0.7361415028572083\n", - "epoch: 54 step: 11, loss is 0.7698123455047607\n", - "epoch: 54 step: 12, loss is 0.7233853340148926\n", - "epoch: 54 step: 13, loss is 0.7654253244400024\n", - "epoch: 54 step: 14, loss is 0.7545778751373291\n", - "epoch: 54 step: 15, loss is 0.7837658524513245\n", - "epoch: 54 step: 16, loss is 0.7558947801589966\n", - "epoch: 54 step: 17, loss is 0.7312799692153931\n", - "epoch: 54 step: 18, loss is 0.718626856803894\n", - "epoch: 54 step: 19, loss is 0.7159774303436279\n", - "epoch: 54 step: 20, loss is 0.7009605169296265\n", - "epoch: 54 step: 21, loss is 0.7420499324798584\n", - "epoch: 54 step: 22, loss is 0.7563588619232178\n", - "epoch: 54 step: 23, loss is 0.7516261339187622\n", - "epoch: 54 step: 24, loss is 0.7275060415267944\n", - "epoch: 54 step: 25, loss is 0.6901177763938904\n", - "epoch: 54 step: 26, loss is 0.7574909925460815\n", - "epoch: 54 step: 27, loss is 0.8043515682220459\n", - "epoch: 54 step: 28, loss is 0.7236243486404419\n", - "epoch: 54 step: 29, loss is 0.7325580716133118\n", - "epoch: 54 step: 30, loss is 0.7318954467773438\n", - "epoch: 54 step: 31, loss is 0.7882870435714722\n", - "epoch: 54 step: 32, loss is 0.7557847499847412\n", - "epoch: 54 step: 33, loss is 0.723382830619812\n", - "epoch: 54 step: 34, loss is 0.7487095594406128\n", - "epoch: 54 step: 35, loss is 0.7597826719284058\n", - "epoch: 54 step: 36, loss is 0.7986350059509277\n", - "epoch: 54 step: 37, loss is 0.7197121381759644\n", - "epoch: 54 step: 38, loss is 0.7237682342529297\n", - "epoch: 54 step: 39, loss is 0.6924357414245605\n", - "epoch: 54 step: 40, loss is 0.7305189967155457\n", - "epoch: 54 step: 41, loss is 0.7423522472381592\n", - "epoch: 54 step: 42, loss is 0.7294976711273193\n", - "epoch: 54 step: 43, loss is 0.7265321016311646\n", - "epoch: 54 step: 44, loss is 0.7738445997238159\n", - "epoch: 54 step: 45, loss is 0.7417761087417603\n", - "epoch: 54 step: 46, loss is 0.7337515950202942\n", - "epoch: 54 step: 47, loss is 0.7158221006393433\n", - "epoch: 54 step: 48, loss is 0.7415804266929626\n", - "epoch: 54 step: 49, loss is 0.7707773447036743\n", - "epoch: 54 step: 50, loss is 0.7458369731903076\n", - "epoch: 54 step: 51, loss is 0.7439395189285278\n", - "epoch: 54 step: 52, loss is 0.7504356503486633\n", - "epoch: 54 step: 53, loss is 0.7271573543548584\n", - "epoch: 54 step: 54, loss is 0.7263691425323486\n", - "epoch: 54 step: 55, loss is 0.7322994470596313\n", - "epoch: 54 step: 56, loss is 0.731638491153717\n", - "epoch: 54 step: 57, loss is 0.7607872486114502\n", - "epoch: 54 step: 58, loss is 0.7385143637657166\n", - "epoch: 54 step: 59, loss is 0.7565065622329712\n", - "epoch: 54 step: 60, loss is 0.7223849296569824\n", - "epoch: 54 step: 61, loss is 0.7290220260620117\n", - "epoch: 54 step: 62, loss is 0.7081509828567505\n", - "epoch: 54 step: 63, loss is 0.7626541256904602\n", - "epoch: 54 step: 64, loss is 0.7654244899749756\n", - "epoch: 54 step: 65, loss is 0.7653444409370422\n", - "epoch: 54 step: 66, loss is 0.760387659072876\n", - "epoch: 54 step: 67, loss is 0.7472813725471497\n", - "epoch: 54 step: 68, loss is 0.7141896486282349\n", - "epoch: 54 step: 69, loss is 0.746383786201477\n", - "epoch: 54 step: 70, loss is 0.7493718862533569\n", - "epoch: 54 step: 71, loss is 0.7681102752685547\n", - "epoch: 54 step: 72, loss is 0.7416931986808777\n", - "epoch: 54 step: 73, loss is 0.7065954208374023\n", - "epoch: 54 step: 74, loss is 0.7480291128158569\n", - "epoch: 54 step: 75, loss is 0.7770567536354065\n", - "epoch: 54 step: 76, loss is 0.7952818870544434\n", - "epoch: 54 step: 77, loss is 0.7983008623123169\n", - "epoch: 54 step: 78, loss is 0.7558801174163818\n", - "epoch: 54 step: 79, loss is 0.7203941345214844\n", - "epoch: 54 step: 80, loss is 0.7246007919311523\n", - "epoch: 54 step: 81, loss is 0.7673553228378296\n", - "epoch: 54 step: 82, loss is 0.7812671661376953\n", - "epoch: 54 step: 83, loss is 0.7313001751899719\n", - "epoch: 54 step: 84, loss is 0.7276346683502197\n", - "epoch: 54 step: 85, loss is 0.7108947038650513\n", - "epoch: 54 step: 86, loss is 0.7738375663757324\n", - "epoch: 54 step: 87, loss is 0.7338845729827881\n", - "epoch: 54 step: 88, loss is 0.814232587814331\n", - "epoch: 54 step: 89, loss is 0.7387315630912781\n", - "epoch: 54 step: 90, loss is 0.7484081983566284\n", - "epoch: 54 step: 91, loss is 0.7564253211021423\n", - "epoch: 54 step: 92, loss is 0.7670139074325562\n", - "epoch: 54 step: 93, loss is 0.755821704864502\n", - "epoch: 54 step: 94, loss is 0.7833334803581238\n", - "epoch: 54 step: 95, loss is 0.7665554285049438\n", - "epoch: 54 step: 96, loss is 0.7675753235816956\n", - "epoch: 54 step: 97, loss is 0.7377117276191711\n", - "epoch: 54 step: 98, loss is 0.7363862991333008\n", - "epoch: 54 step: 99, loss is 0.7812254428863525\n", - "epoch: 54 step: 100, loss is 0.7952500581741333\n", - "epoch: 54 step: 101, loss is 0.7169637680053711\n", - "epoch: 54 step: 102, loss is 0.7211380004882812\n", - "epoch: 54 step: 103, loss is 0.7168134450912476\n", - "epoch: 54 step: 104, loss is 0.7754094004631042\n", - "epoch: 54 step: 105, loss is 0.7297463417053223\n", - "epoch: 54 step: 106, loss is 0.7191413640975952\n", - "epoch: 54 step: 107, loss is 0.7724271416664124\n", - "epoch: 54 step: 108, loss is 0.7343404293060303\n", - "epoch: 54 step: 109, loss is 0.7264528274536133\n", - "epoch: 54 step: 110, loss is 0.7644981145858765\n", - "epoch: 54 step: 111, loss is 0.790616512298584\n", - "epoch: 54 step: 112, loss is 0.776673436164856\n", - "epoch: 54 step: 113, loss is 0.7522897720336914\n", - "epoch: 54 step: 114, loss is 0.6739146709442139\n", - "epoch: 54 step: 115, loss is 0.7514182329177856\n", - "epoch: 54 step: 116, loss is 0.7457122802734375\n", - "epoch: 54 step: 117, loss is 0.7181903123855591\n", - "epoch: 54 step: 118, loss is 0.7044118642807007\n", - "epoch: 54 step: 119, loss is 0.7699830532073975\n", - "epoch: 54 step: 120, loss is 0.7391380071640015\n", - "epoch: 54 step: 121, loss is 0.7953115701675415\n", - "epoch: 54 step: 122, loss is 0.7866383194923401\n", - "epoch: 54 step: 123, loss is 0.7408630847930908\n", - "epoch: 54 step: 124, loss is 0.783271074295044\n", - "epoch: 54 step: 125, loss is 0.7475206851959229\n", - "epoch: 54 step: 126, loss is 0.738511323928833\n", - "epoch: 54 step: 127, loss is 0.7573363184928894\n", - "epoch: 54 step: 128, loss is 0.755993127822876\n", - "epoch: 54 step: 129, loss is 0.7700057029724121\n", - "epoch: 54 step: 130, loss is 0.7661925554275513\n", - "epoch: 54 step: 131, loss is 0.7119203209877014\n", - "epoch: 54 step: 132, loss is 0.7729599475860596\n", - "epoch: 54 step: 133, loss is 0.7674583196640015\n", - "epoch: 54 step: 134, loss is 0.7573413848876953\n", - "epoch: 54 step: 135, loss is 0.7746201157569885\n", - "epoch: 54 step: 136, loss is 0.7438004016876221\n", - "epoch: 54 step: 137, loss is 0.7321425080299377\n", - "epoch: 54 step: 138, loss is 0.7572171688079834\n", - "epoch: 54 step: 139, loss is 0.7575472593307495\n", - "epoch: 54 step: 140, loss is 0.7502453327178955\n", - "epoch: 54 step: 141, loss is 0.7555367946624756\n", - "epoch: 54 step: 142, loss is 0.7287209033966064\n", - "epoch: 54 step: 143, loss is 0.7316848039627075\n", - "epoch: 54 step: 144, loss is 0.748663604259491\n", - "epoch: 54 step: 145, loss is 0.6767247915267944\n", - "epoch: 54 step: 146, loss is 0.7249683141708374\n", - "epoch: 54 step: 147, loss is 0.7552636861801147\n", - "epoch: 54 step: 148, loss is 0.7152411937713623\n", - "epoch: 54 step: 149, loss is 0.6876088976860046\n", - "epoch: 54 step: 150, loss is 0.7704192996025085\n", - "epoch: 54 step: 151, loss is 0.7437937259674072\n", - "epoch: 54 step: 152, loss is 0.7584824562072754\n", - "epoch: 54 step: 153, loss is 0.7430762052536011\n", - "epoch: 54 step: 154, loss is 0.7273309230804443\n", - "epoch: 54 step: 155, loss is 0.7476761341094971\n", - "epoch: 54 step: 156, loss is 0.7825077772140503\n", - "epoch: 54 step: 157, loss is 0.7951003313064575\n", - "epoch: 54 step: 158, loss is 0.7270292043685913\n", - "epoch: 54 step: 159, loss is 0.7613527774810791\n", - "epoch: 54 step: 160, loss is 0.7755423784255981\n", - "epoch: 54 step: 161, loss is 0.7348408699035645\n", - "epoch: 54 step: 162, loss is 0.7177440524101257\n", - "epoch: 54 step: 163, loss is 0.7837967872619629\n", - "epoch: 54 step: 164, loss is 0.76091468334198\n", - "epoch: 54 step: 165, loss is 0.7560702562332153\n", - "epoch: 54 step: 166, loss is 0.7678524255752563\n", - "epoch: 54 step: 167, loss is 0.7723098993301392\n", - "epoch: 54 step: 168, loss is 0.7421074509620667\n", - "epoch: 54 step: 169, loss is 0.7724562883377075\n", - "epoch: 54 step: 170, loss is 0.7641831636428833\n", - "epoch: 54 step: 171, loss is 0.7909409999847412\n", - "epoch: 54 step: 172, loss is 0.7446756362915039\n", - "epoch: 54 step: 173, loss is 0.727199375629425\n", - "epoch: 54 step: 174, loss is 0.8046799898147583\n", - "epoch: 54 step: 175, loss is 0.7564865350723267\n", - "epoch: 54 step: 176, loss is 0.7449790835380554\n", - "epoch: 54 step: 177, loss is 0.7786581516265869\n", - "epoch: 54 step: 178, loss is 0.7472392320632935\n", - "epoch: 54 step: 179, loss is 0.7383459806442261\n", - "epoch: 54 step: 180, loss is 0.7299227714538574\n", - "epoch: 54 step: 181, loss is 0.7417009472846985\n", - "epoch: 54 step: 182, loss is 0.7536630034446716\n", - "epoch: 54 step: 183, loss is 0.8001237511634827\n", - "epoch: 54 step: 184, loss is 0.7870405912399292\n", - "epoch: 54 step: 185, loss is 0.7591056823730469\n", - "epoch: 54 step: 186, loss is 0.7271779775619507\n", - "epoch: 54 step: 187, loss is 0.7362672686576843\n", - "epoch: 54 step: 188, loss is 0.7715362310409546\n", - "epoch: 54 step: 189, loss is 0.7378365993499756\n", - "epoch: 54 step: 190, loss is 0.685612142086029\n", - "epoch: 54 step: 191, loss is 0.7172574996948242\n", - "epoch: 54 step: 192, loss is 0.776557207107544\n", - "epoch: 54 step: 193, loss is 0.7957172989845276\n", - "epoch: 54 step: 194, loss is 0.7354265451431274\n", - "epoch: 54 step: 195, loss is 0.7489856481552124\n", - "Train epoch time: 112734.780 ms, per step time: 578.127 ms\n", - "epoch: 55 step: 1, loss is 0.744275689125061\n", - "epoch: 55 step: 2, loss is 0.7038910388946533\n", - "epoch: 55 step: 3, loss is 0.6882286667823792\n", - "epoch: 55 step: 4, loss is 0.7866321206092834\n", - "epoch: 55 step: 5, loss is 0.7007147669792175\n", - "epoch: 55 step: 6, loss is 0.7042255401611328\n", - "epoch: 55 step: 7, loss is 0.7378193736076355\n", - "epoch: 55 step: 8, loss is 0.7591078281402588\n", - "epoch: 55 step: 9, loss is 0.7172237634658813\n", - "epoch: 55 step: 10, loss is 0.7517695426940918\n", - "epoch: 55 step: 11, loss is 0.7702610492706299\n", - "epoch: 55 step: 12, loss is 0.7580333352088928\n", - "epoch: 55 step: 13, loss is 0.7520982623100281\n", - "epoch: 55 step: 14, loss is 0.7355461716651917\n", - "epoch: 55 step: 15, loss is 0.7338203191757202\n", - "epoch: 55 step: 16, loss is 0.7597132921218872\n", - "epoch: 55 step: 17, loss is 0.7511581182479858\n", - "epoch: 55 step: 18, loss is 0.7844325304031372\n", - "epoch: 55 step: 19, loss is 0.7535659074783325\n", - "epoch: 55 step: 20, loss is 0.7130852937698364\n", - "epoch: 55 step: 21, loss is 0.7166057825088501\n", - "epoch: 55 step: 22, loss is 0.7339085340499878\n", - "epoch: 55 step: 23, loss is 0.677399754524231\n", - "epoch: 55 step: 24, loss is 0.7447442412376404\n", - "epoch: 55 step: 25, loss is 0.6997517347335815\n", - "epoch: 55 step: 26, loss is 0.7391203045845032\n", - "epoch: 55 step: 27, loss is 0.773114800453186\n", - "epoch: 55 step: 28, loss is 0.7541046142578125\n", - "epoch: 55 step: 29, loss is 0.7303310632705688\n", - "epoch: 55 step: 30, loss is 0.7417986392974854\n", - "epoch: 55 step: 31, loss is 0.6984444856643677\n", - "epoch: 55 step: 32, loss is 0.7298721671104431\n", - "epoch: 55 step: 33, loss is 0.7427204847335815\n", - "epoch: 55 step: 34, loss is 0.7060085535049438\n", - "epoch: 55 step: 35, loss is 0.7307683229446411\n", - "epoch: 55 step: 36, loss is 0.7617698907852173\n", - "epoch: 55 step: 37, loss is 0.6857370138168335\n", - "epoch: 55 step: 38, loss is 0.7156643867492676\n", - "epoch: 55 step: 39, loss is 0.7336837649345398\n", - "epoch: 55 step: 40, loss is 0.7335735559463501\n", - "epoch: 55 step: 41, loss is 0.7490066885948181\n", - "epoch: 55 step: 42, loss is 0.7337225675582886\n", - "epoch: 55 step: 43, loss is 0.7216358780860901\n", - "epoch: 55 step: 44, loss is 0.7226842641830444\n", - "epoch: 55 step: 45, loss is 0.7380735874176025\n", - "epoch: 55 step: 46, loss is 0.8030295372009277\n", - "epoch: 55 step: 47, loss is 0.750422477722168\n", - "epoch: 55 step: 48, loss is 0.7096849679946899\n", - "epoch: 55 step: 49, loss is 0.7561691999435425\n", - "epoch: 55 step: 50, loss is 0.7495482563972473\n", - "epoch: 55 step: 51, loss is 0.7428330183029175\n", - "epoch: 55 step: 52, loss is 0.7190721035003662\n", - "epoch: 55 step: 53, loss is 0.745968222618103\n", - "epoch: 55 step: 54, loss is 0.7170397043228149\n", - "epoch: 55 step: 55, loss is 0.7815959453582764\n", - "epoch: 55 step: 56, loss is 0.7834920883178711\n", - "epoch: 55 step: 57, loss is 0.7659237384796143\n", - "epoch: 55 step: 58, loss is 0.7417773604393005\n", - "epoch: 55 step: 59, loss is 0.757443904876709\n", - "epoch: 55 step: 60, loss is 0.6910176277160645\n", - "epoch: 55 step: 61, loss is 0.7310269474983215\n", - "epoch: 55 step: 62, loss is 0.7495406866073608\n", - "epoch: 55 step: 63, loss is 0.7034529447555542\n", - "epoch: 55 step: 64, loss is 0.723935067653656\n", - "epoch: 55 step: 65, loss is 0.7491556406021118\n", - "epoch: 55 step: 66, loss is 0.6997479200363159\n", - "epoch: 55 step: 67, loss is 0.7111839056015015\n", - "epoch: 55 step: 68, loss is 0.7310456037521362\n", - "epoch: 55 step: 69, loss is 0.7571691274642944\n", - "epoch: 55 step: 70, loss is 0.7079789638519287\n", - "epoch: 55 step: 71, loss is 0.7275900840759277\n", - "epoch: 55 step: 72, loss is 0.7407675981521606\n", - "epoch: 55 step: 73, loss is 0.7183035016059875\n", - "epoch: 55 step: 74, loss is 0.7393774390220642\n", - "epoch: 55 step: 75, loss is 0.712228536605835\n", - "epoch: 55 step: 76, loss is 0.775634765625\n", - "epoch: 55 step: 77, loss is 0.7496112585067749\n", - "epoch: 55 step: 78, loss is 0.749141275882721\n", - "epoch: 55 step: 79, loss is 0.746747612953186\n", - "epoch: 55 step: 80, loss is 0.70037841796875\n", - "epoch: 55 step: 81, loss is 0.7143192291259766\n", - "epoch: 55 step: 82, loss is 0.750150203704834\n", - "epoch: 55 step: 83, loss is 0.7737506628036499\n", - "epoch: 55 step: 84, loss is 0.7171932458877563\n", - "epoch: 55 step: 85, loss is 0.7532806396484375\n", - "epoch: 55 step: 86, loss is 0.7723277807235718\n", - "epoch: 55 step: 87, loss is 0.7289742231369019\n", - "epoch: 55 step: 88, loss is 0.7107532620429993\n", - "epoch: 55 step: 89, loss is 0.7122771739959717\n", - "epoch: 55 step: 90, loss is 0.7294716238975525\n", - "epoch: 55 step: 91, loss is 0.7449286580085754\n", - "epoch: 55 step: 92, loss is 0.7512769103050232\n", - "epoch: 55 step: 93, loss is 0.6922203302383423\n", - "epoch: 55 step: 94, loss is 0.7573877573013306\n", - "epoch: 55 step: 95, loss is 0.7619054317474365\n", - "epoch: 55 step: 96, loss is 0.7789785861968994\n", - "epoch: 55 step: 97, loss is 0.7423627376556396\n", - "epoch: 55 step: 98, loss is 0.7708364725112915\n", - "epoch: 55 step: 99, loss is 0.7475329637527466\n", - "epoch: 55 step: 100, loss is 0.6854932308197021\n", - "epoch: 55 step: 101, loss is 0.6960785388946533\n", - "epoch: 55 step: 102, loss is 0.7220172882080078\n", - "epoch: 55 step: 103, loss is 0.7396911978721619\n", - "epoch: 55 step: 104, loss is 0.7481573224067688\n", - "epoch: 55 step: 105, loss is 0.7013437747955322\n", - "epoch: 55 step: 106, loss is 0.7170718908309937\n", - "epoch: 55 step: 107, loss is 0.7182402610778809\n", - "epoch: 55 step: 108, loss is 0.7258366346359253\n", - "epoch: 55 step: 109, loss is 0.7338352203369141\n", - "epoch: 55 step: 110, loss is 0.7683756947517395\n", - "epoch: 55 step: 111, loss is 0.7408244609832764\n", - "epoch: 55 step: 112, loss is 0.7270534038543701\n", - "epoch: 55 step: 113, loss is 0.742049515247345\n", - "epoch: 55 step: 114, loss is 0.72972571849823\n", - "epoch: 55 step: 115, loss is 0.7034549117088318\n", - "epoch: 55 step: 116, loss is 0.7003132104873657\n", - "epoch: 55 step: 117, loss is 0.722968339920044\n", - "epoch: 55 step: 118, loss is 0.7479698657989502\n", - "epoch: 55 step: 119, loss is 0.7531453967094421\n", - "epoch: 55 step: 120, loss is 0.727893054485321\n", - "epoch: 55 step: 121, loss is 0.7445448637008667\n", - "epoch: 55 step: 122, loss is 0.7122910022735596\n", - "epoch: 55 step: 123, loss is 0.7443070411682129\n", - "epoch: 55 step: 124, loss is 0.7298433780670166\n", - "epoch: 55 step: 125, loss is 0.7234458923339844\n", - "epoch: 55 step: 126, loss is 0.8009555339813232\n", - "epoch: 55 step: 127, loss is 0.7044442892074585\n", - "epoch: 55 step: 128, loss is 0.7753439545631409\n", - "epoch: 55 step: 129, loss is 0.6885216236114502\n", - "epoch: 55 step: 130, loss is 0.7310723066329956\n", - "epoch: 55 step: 131, loss is 0.7252342700958252\n", - "epoch: 55 step: 132, loss is 0.7497819662094116\n", - "epoch: 55 step: 133, loss is 0.7822544574737549\n", - "epoch: 55 step: 134, loss is 0.7890368700027466\n", - "epoch: 55 step: 135, loss is 0.7503283023834229\n", - "epoch: 55 step: 136, loss is 0.7194048166275024\n", - "epoch: 55 step: 137, loss is 0.6863012909889221\n", - "epoch: 55 step: 138, loss is 0.7515264749526978\n", - "epoch: 55 step: 139, loss is 0.7841272354125977\n", - "epoch: 55 step: 140, loss is 0.7787648439407349\n", - "epoch: 55 step: 141, loss is 0.7027889490127563\n", - "epoch: 55 step: 142, loss is 0.7284849882125854\n", - "epoch: 55 step: 143, loss is 0.7433218359947205\n", - "epoch: 55 step: 144, loss is 0.7432136535644531\n", - "epoch: 55 step: 145, loss is 0.7335867881774902\n", - "epoch: 55 step: 146, loss is 0.7527831792831421\n", - "epoch: 55 step: 147, loss is 0.7496621012687683\n", - "epoch: 55 step: 148, loss is 0.7479175329208374\n", - "epoch: 55 step: 149, loss is 0.7389644384384155\n", - "epoch: 55 step: 150, loss is 0.7516632676124573\n", - "epoch: 55 step: 151, loss is 0.7238610982894897\n", - "epoch: 55 step: 152, loss is 0.736897349357605\n", - "epoch: 55 step: 153, loss is 0.7705087661743164\n", - "epoch: 55 step: 154, loss is 0.7149750590324402\n", - "epoch: 55 step: 155, loss is 0.7272302508354187\n", - "epoch: 55 step: 156, loss is 0.7419195175170898\n", - "epoch: 55 step: 157, loss is 0.7760586142539978\n", - "epoch: 55 step: 158, loss is 0.7338032126426697\n", - "epoch: 55 step: 159, loss is 0.7698612213134766\n", - "epoch: 55 step: 160, loss is 0.7154054641723633\n", - "epoch: 55 step: 161, loss is 0.7244197726249695\n", - "epoch: 55 step: 162, loss is 0.7362625598907471\n", - "epoch: 55 step: 163, loss is 0.72906893491745\n", - "epoch: 55 step: 164, loss is 0.7270680069923401\n", - "epoch: 55 step: 165, loss is 0.7562347054481506\n", - "epoch: 55 step: 166, loss is 0.7544159889221191\n", - "epoch: 55 step: 167, loss is 0.789046049118042\n", - "epoch: 55 step: 168, loss is 0.7636502385139465\n", - "epoch: 55 step: 169, loss is 0.8119328618049622\n", - "epoch: 55 step: 170, loss is 0.743886411190033\n", - "epoch: 55 step: 171, loss is 0.7024474143981934\n", - "epoch: 55 step: 172, loss is 0.7490172386169434\n", - "epoch: 55 step: 173, loss is 0.7480899691581726\n", - "epoch: 55 step: 174, loss is 0.7499377727508545\n", - "epoch: 55 step: 175, loss is 0.7528517246246338\n", - "epoch: 55 step: 176, loss is 0.7139164805412292\n", - "epoch: 55 step: 177, loss is 0.7293939590454102\n", - "epoch: 55 step: 178, loss is 0.7589470148086548\n", - "epoch: 55 step: 179, loss is 0.7250729203224182\n", - "epoch: 55 step: 180, loss is 0.8480008244514465\n", - "epoch: 55 step: 181, loss is 0.7570416331291199\n", - "epoch: 55 step: 182, loss is 0.7542234659194946\n", - "epoch: 55 step: 183, loss is 0.7431902289390564\n", - "epoch: 55 step: 184, loss is 0.7533789873123169\n", - "epoch: 55 step: 185, loss is 0.7397176027297974\n", - "epoch: 55 step: 186, loss is 0.753251314163208\n", - "epoch: 55 step: 187, loss is 0.7259076833724976\n", - "epoch: 55 step: 188, loss is 0.7928569912910461\n", - "epoch: 55 step: 189, loss is 0.7570458650588989\n", - "epoch: 55 step: 190, loss is 0.790752649307251\n", - "epoch: 55 step: 191, loss is 0.763931393623352\n", - "epoch: 55 step: 192, loss is 0.7105399370193481\n", - "epoch: 55 step: 193, loss is 0.783504843711853\n", - "epoch: 55 step: 194, loss is 0.7484074831008911\n", - "epoch: 55 step: 195, loss is 0.7609691619873047\n", - "Train epoch time: 112069.891 ms, per step time: 574.717 ms\n", - "total time:1h 39m 16s\n", + ".........epoch: 1 step: 1, loss is 2.638883113861084\n", + "epoch: 1 step: 2, loss is 2.573518753051758\n", + "epoch: 1 step: 3, loss is 2.4405431747436523\n", + "epoch: 1 step: 4, loss is 2.3872592449188232\n", + "epoch: 1 step: 5, loss is 2.420213460922241\n", + "epoch: 1 step: 6, loss is 2.4159908294677734\n", + "epoch: 1 step: 7, loss is 2.4326491355895996\n", + "epoch: 1 step: 8, loss is 2.548156261444092\n", + "epoch: 1 step: 9, loss is 2.4759747982025146\n", + "epoch: 1 step: 10, loss is 2.30470609664917\n", + "epoch: 1 step: 11, loss is 2.295321464538574\n", + "epoch: 1 step: 12, loss is 2.345783233642578\n", + "epoch: 1 step: 13, loss is 2.405390977859497\n", + "epoch: 1 step: 14, loss is 2.4544620513916016\n", + "epoch: 1 step: 15, loss is 2.4000041484832764\n", + "epoch: 1 step: 16, loss is 2.310173511505127\n", + "epoch: 1 step: 17, loss is 2.2907655239105225\n", + "epoch: 1 step: 18, loss is 2.3175344467163086\n", + "epoch: 1 step: 19, loss is 2.2723848819732666\n", + "epoch: 1 step: 20, loss is 2.303256034851074\n", + "epoch: 1 step: 21, loss is 2.3760530948638916\n", + "epoch: 1 step: 22, loss is 2.320138931274414\n", + "epoch: 1 step: 23, loss is 2.2911341190338135\n", + "epoch: 1 step: 24, loss is 2.220949411392212\n", + "epoch: 1 step: 25, loss is 2.2211709022521973\n", + "epoch: 1 step: 26, loss is 2.20534610748291\n", + "epoch: 1 step: 27, loss is 2.336505174636841\n", + "epoch: 1 step: 28, loss is 2.328484535217285\n", + "epoch: 1 step: 29, loss is 2.266464948654175\n", + "epoch: 1 step: 30, loss is 2.2147138118743896\n", + "epoch: 1 step: 31, loss is 2.194572925567627\n", + "epoch: 1 step: 32, loss is 2.2162346839904785\n", + "epoch: 1 step: 33, loss is 2.230374336242676\n", + "epoch: 1 step: 34, loss is 2.23865008354187\n", + "epoch: 1 step: 35, loss is 2.216477394104004\n", + "epoch: 1 step: 36, loss is 2.1522412300109863\n", + "epoch: 1 step: 37, loss is 2.144378662109375\n", + "epoch: 1 step: 38, loss is 2.1931869983673096\n", + "epoch: 1 step: 39, loss is 2.1908183097839355\n", + "epoch: 1 step: 40, loss is 2.140573263168335\n", + "epoch: 1 step: 41, loss is 2.2353622913360596\n", + "epoch: 1 step: 42, loss is 2.2642080783843994\n", + "epoch: 1 step: 43, loss is 2.151273012161255\n", + "epoch: 1 step: 44, loss is 2.136136054992676\n", + "epoch: 1 step: 45, loss is 2.1131815910339355\n", + "epoch: 1 step: 46, loss is 2.0992860794067383\n", + "epoch: 1 step: 47, loss is 2.1418492794036865\n", + "epoch: 1 step: 48, loss is 2.1934714317321777\n", + "epoch: 1 step: 49, loss is 2.1348917484283447\n", + "epoch: 1 step: 50, loss is 2.0952866077423096\n", + "epoch: 1 step: 51, loss is 2.0843687057495117\n", + "epoch: 1 step: 52, loss is 2.0794713497161865\n", + "epoch: 1 step: 53, loss is 2.093808650970459\n", + "epoch: 1 step: 54, loss is 2.1402297019958496\n", + "epoch: 1 step: 55, loss is 2.1106529235839844\n", + "epoch: 1 step: 56, loss is 2.039935827255249\n", + "epoch: 1 step: 57, loss is 2.0566112995147705\n", + "epoch: 1 step: 58, loss is 1.981400966644287\n", + "epoch: 1 step: 59, loss is 2.0986363887786865\n", + "epoch: 1 step: 60, loss is 2.0696754455566406\n", + "epoch: 1 step: 61, loss is 2.075943946838379\n", + "epoch: 1 step: 62, loss is 2.0449602603912354\n", + "epoch: 1 step: 63, loss is 2.05940318107605\n", + "epoch: 1 step: 64, loss is 1.9994714260101318\n", + "epoch: 1 step: 65, loss is 2.019737482070923\n", + "epoch: 1 step: 66, loss is 2.031968832015991\n", + "epoch: 1 step: 67, loss is 2.012155055999756\n", + "epoch: 1 step: 68, loss is 2.037416696548462\n", + "epoch: 1 step: 69, loss is 2.0915579795837402\n", + "epoch: 1 step: 70, loss is 2.0728042125701904\n", + "epoch: 1 step: 71, loss is 1.9910438060760498\n", + "epoch: 1 step: 72, loss is 2.0037577152252197\n", + "epoch: 1 step: 73, loss is 1.9950460195541382\n", + "epoch: 1 step: 74, loss is 2.1307425498962402\n", + "epoch: 1 step: 75, loss is 2.048360824584961\n", + "epoch: 1 step: 76, loss is 1.9907093048095703\n", + "epoch: 1 step: 77, loss is 2.0013222694396973\n", + "epoch: 1 step: 78, loss is 2.0078232288360596\n", + "epoch: 1 step: 79, loss is 2.016446828842163\n", + "epoch: 1 step: 80, loss is 2.0288758277893066\n", + "epoch: 1 step: 81, loss is 2.043100118637085\n", + "epoch: 1 step: 82, loss is 1.9567327499389648\n", + "epoch: 1 step: 83, loss is 2.0198161602020264\n", + "epoch: 1 step: 84, loss is 1.9604343175888062\n", + "epoch: 1 step: 85, loss is 2.006375312805176\n", + "epoch: 1 step: 86, loss is 1.9785192012786865\n", + "epoch: 1 step: 87, loss is 2.0788588523864746\n", + "epoch: 1 step: 88, loss is 2.1227529048919678\n", + "epoch: 1 step: 89, loss is 2.081730604171753\n", + "epoch: 1 step: 90, loss is 1.9872316122055054\n", + "epoch: 1 step: 91, loss is 1.9781984090805054\n", + "epoch: 1 step: 92, loss is 1.9763184785842896\n", + "epoch: 1 step: 93, loss is 1.9550793170928955\n", + "epoch: 1 step: 94, loss is 2.0373826026916504\n", + "epoch: 1 step: 95, loss is 1.982328176498413\n", + "epoch: 1 step: 96, loss is 2.0081329345703125\n", + "epoch: 1 step: 97, loss is 2.052156925201416\n", + "epoch: 1 step: 98, loss is 1.9447203874588013\n", + "epoch: 1 step: 99, loss is 1.9829121828079224\n", + "epoch: 1 step: 100, loss is 2.0165650844573975\n", + "epoch: 1 step: 101, loss is 1.9304027557373047\n", + "epoch: 1 step: 102, loss is 1.9683899879455566\n", + "epoch: 1 step: 103, loss is 1.9072933197021484\n", + "epoch: 1 step: 104, loss is 2.0095560550689697\n", + "epoch: 1 step: 105, loss is 1.916046380996704\n", + "epoch: 1 step: 106, loss is 1.9833627939224243\n", + "epoch: 1 step: 107, loss is 1.996013879776001\n", + "epoch: 1 step: 108, loss is 2.0358314514160156\n", + "epoch: 1 step: 109, loss is 1.9624022245407104\n", + "epoch: 1 step: 110, loss is 1.9491597414016724\n", + "epoch: 1 step: 111, loss is 1.8805303573608398\n", + "epoch: 1 step: 112, loss is 1.9458775520324707\n", + "epoch: 1 step: 113, loss is 2.0051183700561523\n", + "epoch: 1 step: 114, loss is 1.9815006256103516\n", + "epoch: 1 step: 115, loss is 1.9561545848846436\n", + "epoch: 1 step: 116, loss is 1.8841098546981812\n", + "epoch: 1 step: 117, loss is 1.9205280542373657\n", + "epoch: 1 step: 118, loss is 1.8687891960144043\n", + "epoch: 1 step: 119, loss is 2.002972364425659\n", + "epoch: 1 step: 120, loss is 1.9056200981140137\n", + "epoch: 1 step: 121, loss is 1.9248032569885254\n", + "epoch: 1 step: 122, loss is 1.9763829708099365\n", + "epoch: 1 step: 123, loss is 1.9713716506958008\n", + "epoch: 1 step: 124, loss is 1.908636450767517\n", + "epoch: 1 step: 125, loss is 1.8764126300811768\n", + "epoch: 1 step: 126, loss is 1.9688462018966675\n", + "epoch: 1 step: 127, loss is 1.9470515251159668\n", + "epoch: 1 step: 128, loss is 1.9135217666625977\n", + "epoch: 1 step: 129, loss is 1.9001421928405762\n", + "epoch: 1 step: 130, loss is 1.8629754781723022\n", + "epoch: 1 step: 131, loss is 1.9099149703979492\n", + "epoch: 1 step: 132, loss is 1.9651963710784912\n", + "epoch: 1 step: 133, loss is 1.9044862985610962\n", + "epoch: 1 step: 134, loss is 1.9421677589416504\n", + "epoch: 1 step: 135, loss is 1.8889411687850952\n", + "epoch: 1 step: 136, loss is 1.8921895027160645\n", + "epoch: 1 step: 137, loss is 1.9069132804870605\n", + "epoch: 1 step: 138, loss is 1.8675012588500977\n", + "epoch: 1 step: 139, loss is 1.8725543022155762\n", + "epoch: 1 step: 140, loss is 1.9239650964736938\n", + "epoch: 1 step: 141, loss is 1.819309949874878\n", + "epoch: 1 step: 142, loss is 1.9148330688476562\n", + "epoch: 1 step: 143, loss is 1.9105656147003174\n", + "epoch: 1 step: 144, loss is 1.891392707824707\n", + "epoch: 1 step: 145, loss is 1.9262800216674805\n", + "epoch: 1 step: 146, loss is 1.9419193267822266\n", + "epoch: 1 step: 147, loss is 1.9830487966537476\n", + "epoch: 1 step: 148, loss is 1.89872407913208\n", + "epoch: 1 step: 149, loss is 1.87285315990448\n", + "epoch: 1 step: 150, loss is 1.9204710721969604\n", + "epoch: 1 step: 151, loss is 1.9746975898742676\n", + "epoch: 1 step: 152, loss is 1.9520539045333862\n", + "epoch: 1 step: 153, loss is 1.949217677116394\n", + "epoch: 1 step: 154, loss is 1.8618682622909546\n", + "epoch: 1 step: 155, loss is 1.951012372970581\n", + "epoch: 1 step: 156, loss is 1.9485276937484741\n", + "epoch: 1 step: 157, loss is 1.91765296459198\n", + "epoch: 1 step: 158, loss is 1.8950800895690918\n", + "epoch: 1 step: 159, loss is 1.8799867630004883\n", + "epoch: 1 step: 160, loss is 1.8923043012619019\n", + "epoch: 1 step: 161, loss is 1.8607168197631836\n", + "epoch: 1 step: 162, loss is 1.9474350214004517\n", + "epoch: 1 step: 163, loss is 1.930651068687439\n", + "epoch: 1 step: 164, loss is 1.8652654886245728\n", + "epoch: 1 step: 165, loss is 1.8791214227676392\n", + "epoch: 1 step: 166, loss is 1.8464587926864624\n", + "epoch: 1 step: 167, loss is 1.870832920074463\n", + "epoch: 1 step: 168, loss is 1.8554033041000366\n", + "epoch: 1 step: 169, loss is 1.9062395095825195\n", + "epoch: 1 step: 170, loss is 1.832066297531128\n", + "epoch: 1 step: 171, loss is 1.9498040676116943\n", + "epoch: 1 step: 172, loss is 1.876810073852539\n", + "epoch: 1 step: 173, loss is 1.8209364414215088\n", + "epoch: 1 step: 174, loss is 1.928417444229126\n", + "epoch: 1 step: 175, loss is 1.8859729766845703\n", + "epoch: 1 step: 176, loss is 1.8222718238830566\n", + "epoch: 1 step: 177, loss is 1.9297282695770264\n", + "epoch: 1 step: 178, loss is 1.9256749153137207\n", + "epoch: 1 step: 179, loss is 1.9307948350906372\n", + "epoch: 1 step: 180, loss is 1.9046683311462402\n", + "epoch: 1 step: 181, loss is 1.8602244853973389\n", + "epoch: 1 step: 182, loss is 1.8815189599990845\n", + "epoch: 1 step: 183, loss is 1.831653356552124\n", + "epoch: 1 step: 184, loss is 1.9122625589370728\n", + "epoch: 1 step: 185, loss is 1.8868417739868164\n", + "epoch: 1 step: 186, loss is 1.886857271194458\n", + "epoch: 1 step: 187, loss is 1.8857313394546509\n", + "epoch: 1 step: 188, loss is 1.8315393924713135\n", + "epoch: 1 step: 189, loss is 1.795844316482544\n", + "epoch: 1 step: 190, loss is 1.903660535812378\n", + "epoch: 1 step: 191, loss is 1.886720061302185\n", + "epoch: 1 step: 192, loss is 1.8783310651779175\n", + "epoch: 1 step: 193, loss is 1.8260449171066284\n", + "epoch: 1 step: 194, loss is 1.9001442193984985\n", + "epoch: 1 step: 195, loss is 1.7717247009277344\n", + "Train epoch time: 259177.301 ms, per step time: 1329.114 ms\n", + "epoch: 2 step: 1, loss is 1.8764171600341797\n", + "epoch: 2 step: 2, loss is 1.8654210567474365\n", + "epoch: 2 step: 3, loss is 1.8962819576263428\n", + "epoch: 2 step: 4, loss is 1.9044393301010132\n", + "epoch: 2 step: 5, loss is 1.9159349203109741\n", + "epoch: 2 step: 6, loss is 1.860903263092041\n", + "epoch: 2 step: 7, loss is 1.8397780656814575\n", + "epoch: 2 step: 8, loss is 1.8566522598266602\n", + "epoch: 2 step: 9, loss is 1.8660041093826294\n", + "epoch: 2 step: 10, loss is 1.811643123626709\n", + "epoch: 2 step: 11, loss is 1.9158527851104736\n", + "epoch: 2 step: 12, loss is 1.8845221996307373\n", + "epoch: 2 step: 13, loss is 1.8094360828399658\n", + "epoch: 2 step: 14, loss is 1.8882935047149658\n", + "epoch: 2 step: 15, loss is 1.8395198583602905\n", + "epoch: 2 step: 16, loss is 1.8069320917129517\n", + "epoch: 2 step: 17, loss is 1.8525073528289795\n", + "epoch: 2 step: 18, loss is 1.880152702331543\n", + "epoch: 2 step: 19, loss is 1.8806684017181396\n", + "epoch: 2 step: 20, loss is 1.9303481578826904\n", + "epoch: 2 step: 21, loss is 1.8876415491104126\n", + "epoch: 2 step: 22, loss is 1.8666844367980957\n", + "epoch: 2 step: 23, loss is 1.7484865188598633\n", + "epoch: 2 step: 24, loss is 1.825082778930664\n", + "epoch: 2 step: 25, loss is 1.916107416152954\n", + "epoch: 2 step: 26, loss is 1.8612622022628784\n", + "epoch: 2 step: 27, loss is 1.8842146396636963\n", + "epoch: 2 step: 28, loss is 1.913114070892334\n", + "epoch: 2 step: 29, loss is 1.8397939205169678\n", + "epoch: 2 step: 30, loss is 1.810388445854187\n", + "epoch: 2 step: 31, loss is 1.8721940517425537\n", + "epoch: 2 step: 32, loss is 1.9090489149093628\n", + "epoch: 2 step: 33, loss is 1.9180216789245605\n", + "epoch: 2 step: 34, loss is 1.8483084440231323\n", + "epoch: 2 step: 35, loss is 1.8458813428878784\n", + "epoch: 2 step: 36, loss is 1.7775852680206299\n", + "epoch: 2 step: 37, loss is 1.82937753200531\n", + "epoch: 2 step: 38, loss is 1.9284522533416748\n", + "epoch: 2 step: 39, loss is 1.8203046321868896\n", + "epoch: 2 step: 40, loss is 1.8719173669815063\n", + "epoch: 2 step: 41, loss is 1.91469407081604\n", + "epoch: 2 step: 42, loss is 1.8199340105056763\n", + "epoch: 2 step: 43, loss is 1.9178109169006348\n", + "epoch: 2 step: 44, loss is 1.7785841226577759\n", + "epoch: 2 step: 45, loss is 1.8188291788101196\n", + "epoch: 2 step: 46, loss is 1.8912720680236816\n", + "epoch: 2 step: 47, loss is 1.8314101696014404\n", + "epoch: 2 step: 48, loss is 1.8089042901992798\n", + "epoch: 2 step: 49, loss is 1.761697769165039\n", + "epoch: 2 step: 50, loss is 1.8489223718643188\n", + "epoch: 2 step: 51, loss is 1.8007612228393555\n", + "epoch: 2 step: 52, loss is 1.8365551233291626\n", + "epoch: 2 step: 53, loss is 1.8732047080993652\n", + "epoch: 2 step: 54, loss is 1.858842134475708\n", + "epoch: 2 step: 55, loss is 1.8669698238372803\n", + "epoch: 2 step: 56, loss is 1.9102803468704224\n", + "epoch: 2 step: 57, loss is 1.7693079710006714\n", + "epoch: 2 step: 58, loss is 1.886691927909851\n", + "epoch: 2 step: 59, loss is 1.8711930513381958\n", + "epoch: 2 step: 60, loss is 1.8170037269592285\n", + "epoch: 2 step: 61, loss is 1.8750832080841064\n", + "epoch: 2 step: 62, loss is 1.827492594718933\n", + "epoch: 2 step: 63, loss is 1.8723680973052979\n", + "epoch: 2 step: 64, loss is 1.8801594972610474\n", + "epoch: 2 step: 65, loss is 1.8117002248764038\n", + "epoch: 2 step: 66, loss is 1.8199105262756348\n", + "epoch: 2 step: 67, loss is 1.7689077854156494\n", + "epoch: 2 step: 68, loss is 1.8566724061965942\n", + "epoch: 2 step: 69, loss is 1.7874548435211182\n", + "epoch: 2 step: 70, loss is 1.846337080001831\n", + "epoch: 2 step: 71, loss is 1.8704969882965088\n", + "epoch: 2 step: 72, loss is 1.865816593170166\n", + "epoch: 2 step: 73, loss is 1.7931475639343262\n", + "epoch: 2 step: 74, loss is 1.8223557472229004\n", + "epoch: 2 step: 75, loss is 1.7836610078811646\n", + "epoch: 2 step: 76, loss is 1.7720184326171875\n", + "epoch: 2 step: 77, loss is 1.7444547414779663\n", + "epoch: 2 step: 78, loss is 1.7940417528152466\n", + "epoch: 2 step: 79, loss is 1.9691872596740723\n", + "epoch: 2 step: 80, loss is 1.9548568725585938\n", + "epoch: 2 step: 81, loss is 1.836742877960205\n", + "epoch: 2 step: 82, loss is 1.8257728815078735\n", + "epoch: 2 step: 83, loss is 1.7189502716064453\n", + "epoch: 2 step: 84, loss is 1.783265233039856\n", + "epoch: 2 step: 85, loss is 1.8870549201965332\n", + "epoch: 2 step: 86, loss is 1.79862380027771\n", + "epoch: 2 step: 87, loss is 1.75785493850708\n", + "epoch: 2 step: 88, loss is 1.83668851852417\n", + "epoch: 2 step: 89, loss is 1.7678914070129395\n", + "epoch: 2 step: 90, loss is 1.7843494415283203\n", + "epoch: 2 step: 91, loss is 1.8175278902053833\n", + "epoch: 2 step: 92, loss is 1.8062794208526611\n", + "epoch: 2 step: 93, loss is 1.8651372194290161\n", + "epoch: 2 step: 94, loss is 1.7595592737197876\n", + "epoch: 2 step: 95, loss is 1.7526390552520752\n", + "epoch: 2 step: 96, loss is 1.8090466260910034\n", + "epoch: 2 step: 97, loss is 1.8611671924591064\n", + "epoch: 2 step: 98, loss is 1.7677181959152222\n", + "epoch: 2 step: 99, loss is 1.7586630582809448\n", + "epoch: 2 step: 100, loss is 1.7534208297729492\n", + "epoch: 2 step: 101, loss is 1.8238509893417358\n", + "epoch: 2 step: 102, loss is 1.802830696105957\n", + "epoch: 2 step: 103, loss is 1.8146181106567383\n", + "epoch: 2 step: 104, loss is 1.802114486694336\n", + "epoch: 2 step: 105, loss is 1.8473379611968994\n", + "epoch: 2 step: 106, loss is 1.7681238651275635\n", + "epoch: 2 step: 107, loss is 1.7690260410308838\n", + "epoch: 2 step: 108, loss is 1.780522346496582\n", + "epoch: 2 step: 109, loss is 1.8052101135253906\n", + "epoch: 2 step: 110, loss is 1.7892342805862427\n", + "epoch: 2 step: 111, loss is 1.8029518127441406\n", + "epoch: 2 step: 112, loss is 1.7715411186218262\n", + "epoch: 2 step: 113, loss is 1.8475960493087769\n", + "epoch: 2 step: 114, loss is 1.7511953115463257\n", + "epoch: 2 step: 115, loss is 1.8202978372573853\n", + "epoch: 2 step: 116, loss is 1.7831478118896484\n", + "epoch: 2 step: 117, loss is 1.7480263710021973\n", + "epoch: 2 step: 118, loss is 1.7436833381652832\n", + "epoch: 2 step: 119, loss is 1.8040213584899902\n", + "epoch: 2 step: 120, loss is 1.83659029006958\n", + "epoch: 2 step: 121, loss is 1.820040225982666\n", + "epoch: 2 step: 122, loss is 1.73844575881958\n", + "epoch: 2 step: 123, loss is 1.9094018936157227\n", + "epoch: 2 step: 124, loss is 1.7867511510849\n", + "epoch: 2 step: 125, loss is 1.7701431512832642\n", + "epoch: 2 step: 126, loss is 1.8570033311843872\n", + "epoch: 2 step: 127, loss is 1.7837119102478027\n", + "epoch: 2 step: 128, loss is 1.843719720840454\n", + "epoch: 2 step: 129, loss is 1.7842251062393188\n", + "epoch: 2 step: 130, loss is 1.7220251560211182\n", + "epoch: 2 step: 131, loss is 1.7603051662445068\n", + "epoch: 2 step: 132, loss is 1.7527294158935547\n", + "epoch: 2 step: 133, loss is 1.7984405755996704\n", + "epoch: 2 step: 134, loss is 1.6513129472732544\n", + "epoch: 2 step: 135, loss is 1.7134417295455933\n", + "epoch: 2 step: 136, loss is 1.8161194324493408\n", + "epoch: 2 step: 137, loss is 1.7737832069396973\n", + "epoch: 2 step: 138, loss is 1.798601746559143\n", + "epoch: 2 step: 139, loss is 1.7748043537139893\n", + "epoch: 2 step: 140, loss is 1.7491514682769775\n", + "epoch: 2 step: 141, loss is 1.7135157585144043\n", + "epoch: 2 step: 142, loss is 1.8159574270248413\n", + "epoch: 2 step: 143, loss is 1.7296099662780762\n", + "epoch: 2 step: 144, loss is 1.687991976737976\n", + "epoch: 2 step: 145, loss is 1.7544621229171753\n", + "epoch: 2 step: 146, loss is 1.773935079574585\n", + "epoch: 2 step: 147, loss is 1.7961208820343018\n", + "epoch: 2 step: 148, loss is 1.767361044883728\n", + "epoch: 2 step: 149, loss is 1.726383924484253\n", + "epoch: 2 step: 150, loss is 1.7894582748413086\n", + "epoch: 2 step: 151, loss is 1.749338150024414\n", + "epoch: 2 step: 152, loss is 1.743436336517334\n", + "epoch: 2 step: 153, loss is 1.8571580648422241\n", + "epoch: 2 step: 154, loss is 1.7430217266082764\n", + "epoch: 2 step: 155, loss is 1.8001377582550049\n", + "epoch: 2 step: 156, loss is 1.6999181509017944\n", + "epoch: 2 step: 157, loss is 1.6559371948242188\n", + "epoch: 2 step: 158, loss is 1.6835334300994873\n", + "epoch: 2 step: 159, loss is 1.689095377922058\n", + "epoch: 2 step: 160, loss is 1.7932332754135132\n", + "epoch: 2 step: 161, loss is 1.7728204727172852\n", + "epoch: 2 step: 162, loss is 1.7910892963409424\n", + "epoch: 2 step: 163, loss is 1.7347700595855713\n", + "epoch: 2 step: 164, loss is 1.7747690677642822\n", + "epoch: 2 step: 165, loss is 1.8257920742034912\n", + "epoch: 2 step: 166, loss is 1.796236515045166\n", + "epoch: 2 step: 167, loss is 1.725193977355957\n", + "epoch: 2 step: 168, loss is 1.7659201622009277\n", + "epoch: 2 step: 169, loss is 1.732904314994812\n", + "epoch: 2 step: 170, loss is 1.7640420198440552\n", + "epoch: 2 step: 171, loss is 1.839599609375\n", + "epoch: 2 step: 172, loss is 1.8699873685836792\n", + "epoch: 2 step: 173, loss is 1.781844973564148\n", + "epoch: 2 step: 174, loss is 1.6786046028137207\n", + "epoch: 2 step: 175, loss is 1.7864519357681274\n", + "epoch: 2 step: 176, loss is 1.7681034803390503\n", + "epoch: 2 step: 177, loss is 1.7039620876312256\n", + "epoch: 2 step: 178, loss is 1.6894598007202148\n", + "epoch: 2 step: 179, loss is 1.657956600189209\n", + "epoch: 2 step: 180, loss is 1.7412686347961426\n", + "epoch: 2 step: 181, loss is 1.7372746467590332\n", + "epoch: 2 step: 182, loss is 1.764717698097229\n", + "epoch: 2 step: 183, loss is 1.7185282707214355\n", + "epoch: 2 step: 184, loss is 1.7518666982650757\n", + "epoch: 2 step: 185, loss is 1.713765025138855\n", + "epoch: 2 step: 186, loss is 1.6638474464416504\n", + "epoch: 2 step: 187, loss is 1.6977347135543823\n", + "epoch: 2 step: 188, loss is 1.713078498840332\n", + "epoch: 2 step: 189, loss is 1.7128965854644775\n", + "epoch: 2 step: 190, loss is 1.6974934339523315\n", + "epoch: 2 step: 191, loss is 1.7476190328598022\n", + "epoch: 2 step: 192, loss is 1.7601354122161865\n", + "epoch: 2 step: 193, loss is 1.7244296073913574\n", + "epoch: 2 step: 194, loss is 1.734021544456482\n", + "epoch: 2 step: 195, loss is 1.7158465385437012\n", + "Train epoch time: 104869.525 ms, per step time: 537.792 ms\n", + "epoch: 3 step: 1, loss is 1.8867528438568115\n", + "epoch: 3 step: 2, loss is 1.6933417320251465\n", + "epoch: 3 step: 3, loss is 1.719986081123352\n", + "epoch: 3 step: 4, loss is 1.7560515403747559\n", + "epoch: 3 step: 5, loss is 1.750830054283142\n", + "epoch: 3 step: 6, loss is 1.7522552013397217\n", + "epoch: 3 step: 7, loss is 1.725388526916504\n", + "epoch: 3 step: 8, loss is 1.7407855987548828\n", + "epoch: 3 step: 9, loss is 1.7579766511917114\n", + "epoch: 3 step: 10, loss is 1.7326068878173828\n", + "epoch: 3 step: 11, loss is 1.6915056705474854\n", + "epoch: 3 step: 12, loss is 1.6440839767456055\n", + "epoch: 3 step: 13, loss is 1.7490901947021484\n", + "epoch: 3 step: 14, loss is 1.7360472679138184\n", + "epoch: 3 step: 15, loss is 1.6632709503173828\n", + "epoch: 3 step: 16, loss is 1.7057838439941406\n", + "epoch: 3 step: 17, loss is 1.6148725748062134\n", + "epoch: 3 step: 18, loss is 1.7857904434204102\n", + "epoch: 3 step: 19, loss is 1.7227210998535156\n", + "epoch: 3 step: 20, loss is 1.7231005430221558\n", + "epoch: 3 step: 21, loss is 1.6339426040649414\n", + "epoch: 3 step: 22, loss is 1.6736745834350586\n", + "epoch: 3 step: 23, loss is 1.8101587295532227\n", + "epoch: 3 step: 24, loss is 1.7207558155059814\n", + "epoch: 3 step: 25, loss is 1.654760479927063\n", + "epoch: 3 step: 26, loss is 1.7834831476211548\n", + "epoch: 3 step: 27, loss is 1.7731984853744507\n", + "epoch: 3 step: 28, loss is 1.7473971843719482\n", + "epoch: 3 step: 29, loss is 1.7750029563903809\n", + "epoch: 3 step: 30, loss is 1.7596079111099243\n", + "epoch: 3 step: 31, loss is 1.725136637687683\n", + "epoch: 3 step: 32, loss is 1.7741496562957764\n", + "epoch: 3 step: 33, loss is 1.7664138078689575\n", + "epoch: 3 step: 34, loss is 1.670475959777832\n", + "epoch: 3 step: 35, loss is 1.828613519668579\n", + "epoch: 3 step: 36, loss is 1.7001616954803467\n", + "epoch: 3 step: 37, loss is 1.7580057382583618\n", + "epoch: 3 step: 38, loss is 1.7495745420455933\n", + "epoch: 3 step: 39, loss is 1.7551522254943848\n", + "epoch: 3 step: 40, loss is 1.7314077615737915\n", + "epoch: 3 step: 41, loss is 1.6849501132965088\n", + "epoch: 3 step: 42, loss is 1.5595388412475586\n", + "epoch: 3 step: 43, loss is 1.678504467010498\n", + "epoch: 3 step: 44, loss is 1.667729139328003\n", + "epoch: 3 step: 45, loss is 1.6634106636047363\n", + "epoch: 3 step: 46, loss is 1.6988580226898193\n", + "epoch: 3 step: 47, loss is 1.7852537631988525\n", + "epoch: 3 step: 48, loss is 1.7001802921295166\n", + "epoch: 3 step: 49, loss is 1.7837165594100952\n", + "epoch: 3 step: 50, loss is 1.720348596572876\n", + "epoch: 3 step: 51, loss is 1.6855592727661133\n", + "epoch: 3 step: 52, loss is 1.7118786573410034\n", + "epoch: 3 step: 53, loss is 1.7916330099105835\n", + "epoch: 3 step: 54, loss is 1.7429475784301758\n", + "epoch: 3 step: 55, loss is 1.7210909128189087\n", + "epoch: 3 step: 56, loss is 1.6557064056396484\n", + "epoch: 3 step: 57, loss is 1.7130653858184814\n", + "epoch: 3 step: 58, loss is 1.7289283275604248\n", + "epoch: 3 step: 59, loss is 1.7473716735839844\n", + "epoch: 3 step: 60, loss is 1.706933617591858\n", + "epoch: 3 step: 61, loss is 1.7702888250350952\n", + "epoch: 3 step: 62, loss is 1.7082152366638184\n", + "epoch: 3 step: 63, loss is 1.7035220861434937\n", + "epoch: 3 step: 64, loss is 1.6729192733764648\n", + "epoch: 3 step: 65, loss is 1.7105777263641357\n", + "epoch: 3 step: 66, loss is 1.6988070011138916\n", + "epoch: 3 step: 67, loss is 1.6678558588027954\n", + "epoch: 3 step: 68, loss is 1.7534213066101074\n", + "epoch: 3 step: 69, loss is 1.6507441997528076\n", + "epoch: 3 step: 70, loss is 1.7488107681274414\n", + "epoch: 3 step: 71, loss is 1.7332558631896973\n", + "epoch: 3 step: 72, loss is 1.5936641693115234\n", + "epoch: 3 step: 73, loss is 1.7822604179382324\n", + "epoch: 3 step: 74, loss is 1.7028495073318481\n", + "epoch: 3 step: 75, loss is 1.6848320960998535\n", + "epoch: 3 step: 76, loss is 1.674005150794983\n", + "epoch: 3 step: 77, loss is 1.6766102313995361\n", + "epoch: 3 step: 78, loss is 1.664645791053772\n", + "epoch: 3 step: 79, loss is 1.612847089767456\n", + "epoch: 3 step: 80, loss is 1.7215285301208496\n", + "epoch: 3 step: 81, loss is 1.7921013832092285\n", + "epoch: 3 step: 82, loss is 1.6928977966308594\n", + "epoch: 3 step: 83, loss is 1.6910545825958252\n", + "epoch: 3 step: 84, loss is 1.664106845855713\n", + "epoch: 3 step: 85, loss is 1.7183380126953125\n", + "epoch: 3 step: 86, loss is 1.7484159469604492\n", + "epoch: 3 step: 87, loss is 1.6730763912200928\n", + "epoch: 3 step: 88, loss is 1.756532073020935\n", + "epoch: 3 step: 89, loss is 1.6362700462341309\n", + "epoch: 3 step: 90, loss is 1.6835956573486328\n", + "epoch: 3 step: 91, loss is 1.674790382385254\n", + "epoch: 3 step: 92, loss is 1.7913627624511719\n", + "epoch: 3 step: 93, loss is 1.6417760848999023\n", + "epoch: 3 step: 94, loss is 1.7850971221923828\n", + "epoch: 3 step: 95, loss is 1.748813509941101\n", + "epoch: 3 step: 96, loss is 1.6437792778015137\n", + "epoch: 3 step: 97, loss is 1.7059261798858643\n", + "epoch: 3 step: 98, loss is 1.7096869945526123\n", + "epoch: 3 step: 99, loss is 1.754014253616333\n", + "epoch: 3 step: 100, loss is 1.7702745199203491\n", + "epoch: 3 step: 101, loss is 1.6299293041229248\n", + "epoch: 3 step: 102, loss is 1.670060157775879\n", + "epoch: 3 step: 103, loss is 1.6484415531158447\n", + "epoch: 3 step: 104, loss is 1.7012429237365723\n", + "epoch: 3 step: 105, loss is 1.7077670097351074\n", + "epoch: 3 step: 106, loss is 1.6386703252792358\n", + "epoch: 3 step: 107, loss is 1.747912883758545\n", + "epoch: 3 step: 108, loss is 1.6825727224349976\n", + "epoch: 3 step: 109, loss is 1.6795318126678467\n", + "epoch: 3 step: 110, loss is 1.6648521423339844\n", + "epoch: 3 step: 111, loss is 1.5892670154571533\n", + "epoch: 3 step: 112, loss is 1.694698452949524\n", + "epoch: 3 step: 113, loss is 1.7172842025756836\n", + "epoch: 3 step: 114, loss is 1.6431453227996826\n", + "epoch: 3 step: 115, loss is 1.6932786703109741\n", + "epoch: 3 step: 116, loss is 1.6999952793121338\n", + "epoch: 3 step: 117, loss is 1.7273551225662231\n", + "epoch: 3 step: 118, loss is 1.7236411571502686\n", + "epoch: 3 step: 119, loss is 1.6992449760437012\n", + "epoch: 3 step: 120, loss is 1.637320876121521\n", + "epoch: 3 step: 121, loss is 1.626643180847168\n", + "epoch: 3 step: 122, loss is 1.683396816253662\n", + "epoch: 3 step: 123, loss is 1.629233956336975\n", + "epoch: 3 step: 124, loss is 1.7601648569107056\n", + "epoch: 3 step: 125, loss is 1.653026819229126\n", + "epoch: 3 step: 126, loss is 1.6500368118286133\n", + "epoch: 3 step: 127, loss is 1.617058515548706\n", + "epoch: 3 step: 128, loss is 1.7935128211975098\n", + "epoch: 3 step: 129, loss is 1.7299102544784546\n", + "epoch: 3 step: 130, loss is 1.6581716537475586\n", + "epoch: 3 step: 131, loss is 1.642491340637207\n", + "epoch: 3 step: 132, loss is 1.6019057035446167\n", + "epoch: 3 step: 133, loss is 1.6681201457977295\n", + "epoch: 3 step: 134, loss is 1.6873180866241455\n", + "epoch: 3 step: 135, loss is 1.7119628190994263\n", + "epoch: 3 step: 136, loss is 1.608562707901001\n", + "epoch: 3 step: 137, loss is 1.7033265829086304\n", + "epoch: 3 step: 138, loss is 1.7127063274383545\n", + "epoch: 3 step: 139, loss is 1.6402771472930908\n", + "epoch: 3 step: 140, loss is 1.67829167842865\n", + "epoch: 3 step: 141, loss is 1.711958646774292\n", + "epoch: 3 step: 142, loss is 1.6943080425262451\n", + "epoch: 3 step: 143, loss is 1.737385869026184\n", + "epoch: 3 step: 144, loss is 1.576507806777954\n", + "epoch: 3 step: 145, loss is 1.7363826036453247\n", + "epoch: 3 step: 146, loss is 1.6845638751983643\n", + "epoch: 3 step: 147, loss is 1.7747251987457275\n", + "epoch: 3 step: 148, loss is 1.7011734247207642\n", + "epoch: 3 step: 149, loss is 1.7067878246307373\n", + "epoch: 3 step: 150, loss is 1.6283619403839111\n", + "epoch: 3 step: 151, loss is 1.6366691589355469\n", + "epoch: 3 step: 152, loss is 1.6314365863800049\n", + "epoch: 3 step: 153, loss is 1.5955181121826172\n", + "epoch: 3 step: 154, loss is 1.6463158130645752\n", + "epoch: 3 step: 155, loss is 1.6713025569915771\n", + "epoch: 3 step: 156, loss is 1.6097553968429565\n", + "epoch: 3 step: 157, loss is 1.7114529609680176\n", + "epoch: 3 step: 158, loss is 1.6502881050109863\n", + "epoch: 3 step: 159, loss is 1.6827635765075684\n", + "epoch: 3 step: 160, loss is 1.6780025959014893\n", + "epoch: 3 step: 161, loss is 1.6402701139450073\n", + "epoch: 3 step: 162, loss is 1.7331740856170654\n", + "epoch: 3 step: 163, loss is 1.6986899375915527\n", + "epoch: 3 step: 164, loss is 1.634945273399353\n", + "epoch: 3 step: 165, loss is 1.6613928079605103\n", + "epoch: 3 step: 166, loss is 1.668107271194458\n", + "epoch: 3 step: 167, loss is 1.575466275215149\n", + "epoch: 3 step: 168, loss is 1.6078819036483765\n", + "epoch: 3 step: 169, loss is 1.6605746746063232\n", + "epoch: 3 step: 170, loss is 1.6862506866455078\n", + "epoch: 3 step: 171, loss is 1.6295291185379028\n", + "epoch: 3 step: 172, loss is 1.6742234230041504\n", + "epoch: 3 step: 173, loss is 1.6285483837127686\n", + "epoch: 3 step: 174, loss is 1.567223310470581\n", + "epoch: 3 step: 175, loss is 1.6306066513061523\n", + "epoch: 3 step: 176, loss is 1.6267826557159424\n", + "epoch: 3 step: 177, loss is 1.653724193572998\n", + "epoch: 3 step: 178, loss is 1.6965084075927734\n", + "epoch: 3 step: 179, loss is 1.6604341268539429\n", + "epoch: 3 step: 180, loss is 1.7102789878845215\n", + "epoch: 3 step: 181, loss is 1.7177000045776367\n", + "epoch: 3 step: 182, loss is 1.6334165334701538\n", + "epoch: 3 step: 183, loss is 1.638780117034912\n", + "epoch: 3 step: 184, loss is 1.604297399520874\n", + "epoch: 3 step: 185, loss is 1.6767387390136719\n", + "epoch: 3 step: 186, loss is 1.7102922201156616\n", + "epoch: 3 step: 187, loss is 1.6600589752197266\n", + "epoch: 3 step: 188, loss is 1.6319167613983154\n", + "epoch: 3 step: 189, loss is 1.7054791450500488\n", + "epoch: 3 step: 190, loss is 1.6692930459976196\n", + "epoch: 3 step: 191, loss is 1.7226042747497559\n", + "epoch: 3 step: 192, loss is 1.5994446277618408\n", + "epoch: 3 step: 193, loss is 1.6193468570709229\n", + "epoch: 3 step: 194, loss is 1.58241868019104\n", + "epoch: 3 step: 195, loss is 1.6260730028152466\n", + "Train epoch time: 104752.403 ms, per step time: 537.192 ms\n", + "epoch: 4 step: 1, loss is 1.6170899868011475\n", + "epoch: 4 step: 2, loss is 1.5874394178390503\n", + "epoch: 4 step: 3, loss is 1.5994703769683838\n", + "epoch: 4 step: 4, loss is 1.62481689453125\n", + "epoch: 4 step: 5, loss is 1.6291778087615967\n", + "epoch: 4 step: 6, loss is 1.6213722229003906\n", + "epoch: 4 step: 7, loss is 1.7046566009521484\n", + "epoch: 4 step: 8, loss is 1.6915771961212158\n", + "epoch: 4 step: 9, loss is 1.677065134048462\n", + "epoch: 4 step: 10, loss is 1.6184395551681519\n", + "epoch: 4 step: 11, loss is 1.6148278713226318\n", + "epoch: 4 step: 12, loss is 1.6507956981658936\n", + "epoch: 4 step: 13, loss is 1.6364986896514893\n", + "epoch: 4 step: 14, loss is 1.821528434753418\n", + "epoch: 4 step: 15, loss is 1.6267576217651367\n", + "epoch: 4 step: 16, loss is 1.534109115600586\n", + "epoch: 4 step: 17, loss is 1.592832326889038\n", + "epoch: 4 step: 18, loss is 1.6248440742492676\n", + "epoch: 4 step: 19, loss is 1.6529544591903687\n", + "epoch: 4 step: 20, loss is 1.6775751113891602\n", + "epoch: 4 step: 21, loss is 1.6181339025497437\n", + "epoch: 4 step: 22, loss is 1.6417533159255981\n", + "epoch: 4 step: 23, loss is 1.6672406196594238\n", + "epoch: 4 step: 24, loss is 1.6022335290908813\n", + "epoch: 4 step: 25, loss is 1.6584341526031494\n", + "epoch: 4 step: 26, loss is 1.6036672592163086\n", + "epoch: 4 step: 27, loss is 1.5961905717849731\n", + "epoch: 4 step: 28, loss is 1.6078736782073975\n", + "epoch: 4 step: 29, loss is 1.588315486907959\n", + "epoch: 4 step: 30, loss is 1.6788959503173828\n", + "epoch: 4 step: 31, loss is 1.6397335529327393\n", + "epoch: 4 step: 32, loss is 1.654549241065979\n", + "epoch: 4 step: 33, loss is 1.693988561630249\n", + "epoch: 4 step: 34, loss is 1.6104265451431274\n", + "epoch: 4 step: 35, loss is 1.619600772857666\n", + "epoch: 4 step: 36, loss is 1.6329056024551392\n", + "epoch: 4 step: 37, loss is 1.60945463180542\n", + "epoch: 4 step: 38, loss is 1.574212908744812\n", + "epoch: 4 step: 39, loss is 1.5889909267425537\n", + "epoch: 4 step: 40, loss is 1.6877466440200806\n", + "epoch: 4 step: 41, loss is 1.6301648616790771\n", + "epoch: 4 step: 42, loss is 1.6851108074188232\n", + "epoch: 4 step: 43, loss is 1.7270790338516235\n", + "epoch: 4 step: 44, loss is 1.5986454486846924\n", + "epoch: 4 step: 45, loss is 1.7481826543807983\n", + "epoch: 4 step: 46, loss is 1.6562446355819702\n", + "epoch: 4 step: 47, loss is 1.634871482849121\n", + "epoch: 4 step: 48, loss is 1.5858131647109985\n", + "epoch: 4 step: 49, loss is 1.5915989875793457\n", + "epoch: 4 step: 50, loss is 1.6169230937957764\n", + "epoch: 4 step: 51, loss is 1.6662136316299438\n", + "epoch: 4 step: 52, loss is 1.490707516670227\n", + "epoch: 4 step: 53, loss is 1.6311596632003784\n", + "epoch: 4 step: 54, loss is 1.6416935920715332\n", + "epoch: 4 step: 55, loss is 1.5951164960861206\n", + "epoch: 4 step: 56, loss is 1.6339879035949707\n", + "epoch: 4 step: 57, loss is 1.5342048406600952\n", + "epoch: 4 step: 58, loss is 1.6007111072540283\n", + "epoch: 4 step: 59, loss is 1.6212396621704102\n", + "epoch: 4 step: 60, loss is 1.6135421991348267\n", + "epoch: 4 step: 61, loss is 1.6039801836013794\n", + "epoch: 4 step: 62, loss is 1.6352843046188354\n", + "epoch: 4 step: 63, loss is 1.595940113067627\n", + "epoch: 4 step: 64, loss is 1.583545207977295\n", + "epoch: 4 step: 65, loss is 1.5905790328979492\n", + "epoch: 4 step: 66, loss is 1.6474370956420898\n", + "epoch: 4 step: 67, loss is 1.6864407062530518\n", + "epoch: 4 step: 68, loss is 1.6477031707763672\n", + "epoch: 4 step: 69, loss is 1.6076931953430176\n", + "epoch: 4 step: 70, loss is 1.562612771987915\n", + "epoch: 4 step: 71, loss is 1.6473801136016846\n", + "epoch: 4 step: 72, loss is 1.5088900327682495\n", + "epoch: 4 step: 73, loss is 1.593577265739441\n", + "epoch: 4 step: 74, loss is 1.611776351928711\n", + "epoch: 4 step: 75, loss is 1.5477757453918457\n", + "epoch: 4 step: 76, loss is 1.519180178642273\n", + "epoch: 4 step: 77, loss is 1.6877485513687134\n", + "epoch: 4 step: 78, loss is 1.580800175666809\n", + "epoch: 4 step: 79, loss is 1.6085501909255981\n", + "epoch: 4 step: 80, loss is 1.5472790002822876\n", + "epoch: 4 step: 81, loss is 1.6766204833984375\n", + "epoch: 4 step: 82, loss is 1.6223424673080444\n", + "epoch: 4 step: 83, loss is 1.6401386260986328\n", + "epoch: 4 step: 84, loss is 1.6479227542877197\n", + "epoch: 4 step: 85, loss is 1.6788415908813477\n", + "epoch: 4 step: 86, loss is 1.6782042980194092\n", + "epoch: 4 step: 87, loss is 1.584916591644287\n", + "epoch: 4 step: 88, loss is 1.6628146171569824\n", + "epoch: 4 step: 89, loss is 1.6550742387771606\n", + "epoch: 4 step: 90, loss is 1.5907825231552124\n", + "epoch: 4 step: 91, loss is 1.5791600942611694\n", + "epoch: 4 step: 92, loss is 1.6070717573165894\n", + "epoch: 4 step: 93, loss is 1.6779537200927734\n", + "epoch: 4 step: 94, loss is 1.597235083580017\n", + "epoch: 4 step: 95, loss is 1.612176775932312\n", + "epoch: 4 step: 96, loss is 1.5669639110565186\n", + "epoch: 4 step: 97, loss is 1.6128562688827515\n", + "epoch: 4 step: 98, loss is 1.6882696151733398\n", + "epoch: 4 step: 99, loss is 1.6880414485931396\n", + "epoch: 4 step: 100, loss is 1.636788249015808\n", + "epoch: 4 step: 101, loss is 1.6431469917297363\n", + "epoch: 4 step: 102, loss is 1.6372301578521729\n", + "epoch: 4 step: 103, loss is 1.6725802421569824\n", + "epoch: 4 step: 104, loss is 1.6099274158477783\n", + "epoch: 4 step: 105, loss is 1.59178626537323\n", + "epoch: 4 step: 106, loss is 1.5561518669128418\n", + "epoch: 4 step: 107, loss is 1.5439170598983765\n", + "epoch: 4 step: 108, loss is 1.618263840675354\n", + "epoch: 4 step: 109, loss is 1.6772207021713257\n", + "epoch: 4 step: 110, loss is 1.6131733655929565\n", + "epoch: 4 step: 111, loss is 1.5681874752044678\n", + "epoch: 4 step: 112, loss is 1.600456953048706\n", + "epoch: 4 step: 113, loss is 1.6290345191955566\n", + "epoch: 4 step: 114, loss is 1.6119649410247803\n", + "epoch: 4 step: 115, loss is 1.7450265884399414\n", + "epoch: 4 step: 116, loss is 1.6324599981307983\n", + "epoch: 4 step: 117, loss is 1.5781363248825073\n", + "epoch: 4 step: 118, loss is 1.6506357192993164\n", + "epoch: 4 step: 119, loss is 1.6227306127548218\n", + "epoch: 4 step: 120, loss is 1.7140979766845703\n", + "epoch: 4 step: 121, loss is 1.598092794418335\n", + "epoch: 4 step: 122, loss is 1.5405499935150146\n", + "epoch: 4 step: 123, loss is 1.6116992235183716\n", + "epoch: 4 step: 124, loss is 1.702329158782959\n", + "epoch: 4 step: 125, loss is 1.5701944828033447\n", + "epoch: 4 step: 126, loss is 1.6739180088043213\n", + "epoch: 4 step: 127, loss is 1.6243786811828613\n", + "epoch: 4 step: 128, loss is 1.5911505222320557\n", + "epoch: 4 step: 129, loss is 1.6837303638458252\n", + "epoch: 4 step: 130, loss is 1.7331933975219727\n", + "epoch: 4 step: 131, loss is 1.6338481903076172\n", + "epoch: 4 step: 132, loss is 1.687780499458313\n", + "epoch: 4 step: 133, loss is 1.631411075592041\n", + "epoch: 4 step: 134, loss is 1.5082311630249023\n", + "epoch: 4 step: 135, loss is 1.7093132734298706\n", + "epoch: 4 step: 136, loss is 1.6863054037094116\n", + "epoch: 4 step: 137, loss is 1.6428139209747314\n", + "epoch: 4 step: 138, loss is 1.6071183681488037\n", + "epoch: 4 step: 139, loss is 1.6849231719970703\n", + "epoch: 4 step: 140, loss is 1.5914067029953003\n", + "epoch: 4 step: 141, loss is 1.6723777055740356\n", + "epoch: 4 step: 142, loss is 1.6657674312591553\n", + "epoch: 4 step: 143, loss is 1.6729118824005127\n", + "epoch: 4 step: 144, loss is 1.6285974979400635\n", + "epoch: 4 step: 145, loss is 1.5749783515930176\n", + "epoch: 4 step: 146, loss is 1.551990032196045\n", + "epoch: 4 step: 147, loss is 1.5702435970306396\n", + "epoch: 4 step: 148, loss is 1.5880986452102661\n", + "epoch: 4 step: 149, loss is 1.6107535362243652\n", + "epoch: 4 step: 150, loss is 1.6097286939620972\n", + "epoch: 4 step: 151, loss is 1.6384549140930176\n", + "epoch: 4 step: 152, loss is 1.6657705307006836\n", + "epoch: 4 step: 153, loss is 1.6280202865600586\n", + "epoch: 4 step: 154, loss is 1.5140613317489624\n", + "epoch: 4 step: 155, loss is 1.6598504781723022\n", + "epoch: 4 step: 156, loss is 1.6261745691299438\n", + "epoch: 4 step: 157, loss is 1.586988091468811\n", + "epoch: 4 step: 158, loss is 1.596299171447754\n", + "epoch: 4 step: 159, loss is 1.6569433212280273\n", + "epoch: 4 step: 160, loss is 1.5751378536224365\n", + "epoch: 4 step: 161, loss is 1.5774681568145752\n", + "epoch: 4 step: 162, loss is 1.610267996788025\n", + "epoch: 4 step: 163, loss is 1.6441675424575806\n", + "epoch: 4 step: 164, loss is 1.6687980890274048\n", + "epoch: 4 step: 165, loss is 1.5634090900421143\n", + "epoch: 4 step: 166, loss is 1.5610171556472778\n", + "epoch: 4 step: 167, loss is 1.5455100536346436\n", + "epoch: 4 step: 168, loss is 1.6239489316940308\n", + "epoch: 4 step: 169, loss is 1.623913049697876\n", + "epoch: 4 step: 170, loss is 1.6626644134521484\n", + "epoch: 4 step: 171, loss is 1.5875416994094849\n", + "epoch: 4 step: 172, loss is 1.6008483171463013\n", + "epoch: 4 step: 173, loss is 1.5488783121109009\n", + "epoch: 4 step: 174, loss is 1.6322025060653687\n", + "epoch: 4 step: 175, loss is 1.6677271127700806\n", + "epoch: 4 step: 176, loss is 1.6860058307647705\n", + "epoch: 4 step: 177, loss is 1.64773690700531\n", + "epoch: 4 step: 178, loss is 1.6091396808624268\n", + "epoch: 4 step: 179, loss is 1.6081349849700928\n", + "epoch: 4 step: 180, loss is 1.567929744720459\n", + "epoch: 4 step: 181, loss is 1.649071455001831\n", + "epoch: 4 step: 182, loss is 1.618532419204712\n", + "epoch: 4 step: 183, loss is 1.5510741472244263\n", + "epoch: 4 step: 184, loss is 1.5800877809524536\n", + "epoch: 4 step: 185, loss is 1.5564684867858887\n", + "epoch: 4 step: 186, loss is 1.5395474433898926\n", + "epoch: 4 step: 187, loss is 1.580711007118225\n", + "epoch: 4 step: 188, loss is 1.6125603914260864\n", + "epoch: 4 step: 189, loss is 1.6111979484558105\n", + "epoch: 4 step: 190, loss is 1.5172547101974487\n", + "epoch: 4 step: 191, loss is 1.5355224609375\n", + "epoch: 4 step: 192, loss is 1.5614515542984009\n", + "epoch: 4 step: 193, loss is 1.6335713863372803\n", + "epoch: 4 step: 194, loss is 1.6863150596618652\n", + "epoch: 4 step: 195, loss is 1.6139249801635742\n", + "Train epoch time: 110636.391 ms, per step time: 567.366 ms\n", + "epoch: 5 step: 1, loss is 1.5943663120269775\n", + "epoch: 5 step: 2, loss is 1.554307460784912\n", + "epoch: 5 step: 3, loss is 1.5956385135650635\n", + "epoch: 5 step: 4, loss is 1.6179301738739014\n", + "epoch: 5 step: 5, loss is 1.5203056335449219\n", + "epoch: 5 step: 6, loss is 1.6206932067871094\n", + "epoch: 5 step: 7, loss is 1.6204309463500977\n", + "epoch: 5 step: 8, loss is 1.590932846069336\n", + "epoch: 5 step: 9, loss is 1.5942968130111694\n", + "epoch: 5 step: 10, loss is 1.666048288345337\n", + "epoch: 5 step: 11, loss is 1.4801009893417358\n", + "epoch: 5 step: 12, loss is 1.567136526107788\n", + "epoch: 5 step: 13, loss is 1.5542263984680176\n", + "epoch: 5 step: 14, loss is 1.597791314125061\n", + "epoch: 5 step: 15, loss is 1.6002317667007446\n", + "epoch: 5 step: 16, loss is 1.5313507318496704\n", + "epoch: 5 step: 17, loss is 1.5855352878570557\n", + "epoch: 5 step: 18, loss is 1.57502281665802\n", + "epoch: 5 step: 19, loss is 1.5729063749313354\n", + "epoch: 5 step: 20, loss is 1.5732524394989014\n", + "epoch: 5 step: 21, loss is 1.6349763870239258\n", + "epoch: 5 step: 22, loss is 1.4730257987976074\n", + "epoch: 5 step: 23, loss is 1.5612541437149048\n", + "epoch: 5 step: 24, loss is 1.5818355083465576\n", + "epoch: 5 step: 25, loss is 1.5759236812591553\n", + "epoch: 5 step: 26, loss is 1.6010531187057495\n", + "epoch: 5 step: 27, loss is 1.5873029232025146\n", + "epoch: 5 step: 28, loss is 1.587331771850586\n", + "epoch: 5 step: 29, loss is 1.5597217082977295\n", + "epoch: 5 step: 30, loss is 1.5778923034667969\n", + "epoch: 5 step: 31, loss is 1.5769906044006348\n", + "epoch: 5 step: 32, loss is 1.594029188156128\n", + "epoch: 5 step: 33, loss is 1.6094048023223877\n", + "epoch: 5 step: 34, loss is 1.532876968383789\n", + "epoch: 5 step: 35, loss is 1.5543620586395264\n", + "epoch: 5 step: 36, loss is 1.5067697763442993\n", + "epoch: 5 step: 37, loss is 1.5700966119766235\n", + "epoch: 5 step: 38, loss is 1.615880012512207\n", + "epoch: 5 step: 39, loss is 1.6059263944625854\n", + "epoch: 5 step: 40, loss is 1.5424622297286987\n", + "epoch: 5 step: 41, loss is 1.531445860862732\n", + "epoch: 5 step: 42, loss is 1.5171324014663696\n", + "epoch: 5 step: 43, loss is 1.606215000152588\n", + "epoch: 5 step: 44, loss is 1.6207685470581055\n", + "epoch: 5 step: 45, loss is 1.579961895942688\n", + "epoch: 5 step: 46, loss is 1.5797560214996338\n", + "epoch: 5 step: 47, loss is 1.6405726671218872\n", + "epoch: 5 step: 48, loss is 1.5306518077850342\n", + "epoch: 5 step: 49, loss is 1.6000994443893433\n", + "epoch: 5 step: 50, loss is 1.5719830989837646\n", + "epoch: 5 step: 51, loss is 1.5672587156295776\n", + "epoch: 5 step: 52, loss is 1.5067540407180786\n", + "epoch: 5 step: 53, loss is 1.5423171520233154\n", + "epoch: 5 step: 54, loss is 1.5768980979919434\n", + "epoch: 5 step: 55, loss is 1.5263104438781738\n", + "epoch: 5 step: 56, loss is 1.5447728633880615\n", + "epoch: 5 step: 57, loss is 1.5517319440841675\n", + "epoch: 5 step: 58, loss is 1.580042839050293\n", + "epoch: 5 step: 59, loss is 1.522869348526001\n", + "epoch: 5 step: 60, loss is 1.524379849433899\n", + "epoch: 5 step: 61, loss is 1.5448496341705322\n", + "epoch: 5 step: 62, loss is 1.6373225450515747\n", + "epoch: 5 step: 63, loss is 1.5713413953781128\n", + "epoch: 5 step: 64, loss is 1.5196709632873535\n", + "epoch: 5 step: 65, loss is 1.6079051494598389\n", + "epoch: 5 step: 66, loss is 1.5023094415664673\n", + "epoch: 5 step: 67, loss is 1.5509767532348633\n", + "epoch: 5 step: 68, loss is 1.557348370552063\n", + "epoch: 5 step: 69, loss is 1.5673748254776\n", + "epoch: 5 step: 70, loss is 1.5405642986297607\n", + "epoch: 5 step: 71, loss is 1.7009708881378174\n", + "epoch: 5 step: 72, loss is 1.6010756492614746\n", + "epoch: 5 step: 73, loss is 1.588024616241455\n", + "epoch: 5 step: 74, loss is 1.66653311252594\n", + "epoch: 5 step: 75, loss is 1.547072410583496\n", + "epoch: 5 step: 76, loss is 1.5790114402770996\n", + "epoch: 5 step: 77, loss is 1.6439878940582275\n", + "epoch: 5 step: 78, loss is 1.6928495168685913\n", + "epoch: 5 step: 79, loss is 1.602914810180664\n", + "epoch: 5 step: 80, loss is 1.6542658805847168\n", + "epoch: 5 step: 81, loss is 1.5870393514633179\n", + "epoch: 5 step: 82, loss is 1.5492987632751465\n", + "epoch: 5 step: 83, loss is 1.6716631650924683\n", + "epoch: 5 step: 84, loss is 1.5759131908416748\n", + "epoch: 5 step: 85, loss is 1.5805974006652832\n", + "epoch: 5 step: 86, loss is 1.5672096014022827\n", + "epoch: 5 step: 87, loss is 1.6223864555358887\n", + "epoch: 5 step: 88, loss is 1.5661996603012085\n", + "epoch: 5 step: 89, loss is 1.597973346710205\n", + "epoch: 5 step: 90, loss is 1.6521817445755005\n", + "epoch: 5 step: 91, loss is 1.6496332883834839\n", + "epoch: 5 step: 92, loss is 1.6924188137054443\n", + "epoch: 5 step: 93, loss is 1.6457374095916748\n", + "epoch: 5 step: 94, loss is 1.6355054378509521\n", + "epoch: 5 step: 95, loss is 1.5781891345977783\n", + "epoch: 5 step: 96, loss is 1.6036258935928345\n", + "epoch: 5 step: 97, loss is 1.53224778175354\n", + "epoch: 5 step: 98, loss is 1.591837763786316\n", + "epoch: 5 step: 99, loss is 1.6007894277572632\n", + "epoch: 5 step: 100, loss is 1.6559832096099854\n", + "epoch: 5 step: 101, loss is 1.6003804206848145\n", + "epoch: 5 step: 102, loss is 1.5162981748580933\n", + "epoch: 5 step: 103, loss is 1.6077687740325928\n", + "epoch: 5 step: 104, loss is 1.7394366264343262\n", + "epoch: 5 step: 105, loss is 1.5982387065887451\n", + "epoch: 5 step: 106, loss is 1.5703903436660767\n", + "epoch: 5 step: 107, loss is 1.5859620571136475\n", + "epoch: 5 step: 108, loss is 1.5634443759918213\n", + "epoch: 5 step: 109, loss is 1.5840113162994385\n", + "epoch: 5 step: 110, loss is 1.5682717561721802\n", + "epoch: 5 step: 111, loss is 1.6015645265579224\n", + "epoch: 5 step: 112, loss is 1.58243989944458\n", + "epoch: 5 step: 113, loss is 1.556445598602295\n", + "epoch: 5 step: 114, loss is 1.5774033069610596\n", + "epoch: 5 step: 115, loss is 1.6934646368026733\n", + "epoch: 5 step: 116, loss is 1.6230828762054443\n", + "epoch: 5 step: 117, loss is 1.5840500593185425\n", + "epoch: 5 step: 118, loss is 1.5250436067581177\n", + "epoch: 5 step: 119, loss is 1.5588792562484741\n", + "epoch: 5 step: 120, loss is 1.5542608499526978\n", + "epoch: 5 step: 121, loss is 1.5289967060089111\n", + "epoch: 5 step: 122, loss is 1.5215528011322021\n", + "epoch: 5 step: 123, loss is 1.6318316459655762\n", + "epoch: 5 step: 124, loss is 1.5558998584747314\n", + "epoch: 5 step: 125, loss is 1.5618566274642944\n", + "epoch: 5 step: 126, loss is 1.5083032846450806\n", + "epoch: 5 step: 127, loss is 1.6160345077514648\n", + "epoch: 5 step: 128, loss is 1.5610473155975342\n", + "epoch: 5 step: 129, loss is 1.616487741470337\n", + "epoch: 5 step: 130, loss is 1.5368707180023193\n", + "epoch: 5 step: 131, loss is 1.6319594383239746\n", + "epoch: 5 step: 132, loss is 1.4831931591033936\n", + "epoch: 5 step: 133, loss is 1.5388773679733276\n", + "epoch: 5 step: 134, loss is 1.579185962677002\n", + "epoch: 5 step: 135, loss is 1.5821640491485596\n", + "epoch: 5 step: 136, loss is 1.6592166423797607\n", + "epoch: 5 step: 137, loss is 1.523725152015686\n", + "epoch: 5 step: 138, loss is 1.501162052154541\n", + "epoch: 5 step: 139, loss is 1.4744919538497925\n", + "epoch: 5 step: 140, loss is 1.6055625677108765\n", + "epoch: 5 step: 141, loss is 1.6004948616027832\n", + "epoch: 5 step: 142, loss is 1.6344472169876099\n", + "epoch: 5 step: 143, loss is 1.5452232360839844\n", + "epoch: 5 step: 144, loss is 1.5726525783538818\n", + "epoch: 5 step: 145, loss is 1.6451191902160645\n", + "epoch: 5 step: 146, loss is 1.5385394096374512\n", + "epoch: 5 step: 147, loss is 1.667829155921936\n", + "epoch: 5 step: 148, loss is 1.5719233751296997\n", + "epoch: 5 step: 149, loss is 1.6380459070205688\n", + "epoch: 5 step: 150, loss is 1.4807301759719849\n", + "epoch: 5 step: 151, loss is 1.5496678352355957\n", + "epoch: 5 step: 152, loss is 1.5287295579910278\n", + "epoch: 5 step: 153, loss is 1.6034035682678223\n", + "epoch: 5 step: 154, loss is 1.5908045768737793\n", + "epoch: 5 step: 155, loss is 1.4985716342926025\n", + "epoch: 5 step: 156, loss is 1.6048939228057861\n", + "epoch: 5 step: 157, loss is 1.5636354684829712\n", + "epoch: 5 step: 158, loss is 1.638756275177002\n", + "epoch: 5 step: 159, loss is 1.5528602600097656\n", + "epoch: 5 step: 160, loss is 1.5117521286010742\n", + "epoch: 5 step: 161, loss is 1.52243173122406\n", + "epoch: 5 step: 162, loss is 1.4143059253692627\n", + "epoch: 5 step: 163, loss is 1.5850802659988403\n", + "epoch: 5 step: 164, loss is 1.5521283149719238\n", + "epoch: 5 step: 165, loss is 1.5938066244125366\n", + "epoch: 5 step: 166, loss is 1.5635497570037842\n", + "epoch: 5 step: 167, loss is 1.6322648525238037\n", + "epoch: 5 step: 168, loss is 1.5397059917449951\n", + "epoch: 5 step: 169, loss is 1.5669395923614502\n", + "epoch: 5 step: 170, loss is 1.5303573608398438\n", + "epoch: 5 step: 171, loss is 1.58109712600708\n", + "epoch: 5 step: 172, loss is 1.4746376276016235\n", + "epoch: 5 step: 173, loss is 1.5410019159317017\n", + "epoch: 5 step: 174, loss is 1.5294852256774902\n", + "epoch: 5 step: 175, loss is 1.4756050109863281\n", + "epoch: 5 step: 176, loss is 1.5638854503631592\n", + "epoch: 5 step: 177, loss is 1.5061533451080322\n", + "epoch: 5 step: 178, loss is 1.5360673666000366\n", + "epoch: 5 step: 179, loss is 1.596306324005127\n", + "epoch: 5 step: 180, loss is 1.5214159488677979\n", + "epoch: 5 step: 181, loss is 1.5845446586608887\n", + "epoch: 5 step: 182, loss is 1.476664423942566\n", + "epoch: 5 step: 183, loss is 1.564254879951477\n", + "epoch: 5 step: 184, loss is 1.495198369026184\n", + "epoch: 5 step: 185, loss is 1.5550212860107422\n", + "epoch: 5 step: 186, loss is 1.4823147058486938\n", + "epoch: 5 step: 187, loss is 1.5259217023849487\n", + "epoch: 5 step: 188, loss is 1.565096378326416\n", + "epoch: 5 step: 189, loss is 1.4968708753585815\n", + "epoch: 5 step: 190, loss is 1.5009922981262207\n", + "epoch: 5 step: 191, loss is 1.4912614822387695\n", + "epoch: 5 step: 192, loss is 1.5315996408462524\n", + "epoch: 5 step: 193, loss is 1.5215895175933838\n", + "epoch: 5 step: 194, loss is 1.5874063968658447\n", + "epoch: 5 step: 195, loss is 1.5553781986236572\n", + "Train epoch time: 109940.212 ms, per step time: 563.796 ms\n", + "epoch: 6 step: 1, loss is 1.4820706844329834\n", + "epoch: 6 step: 2, loss is 1.5628302097320557\n", + "epoch: 6 step: 3, loss is 1.5254963636398315\n", + "epoch: 6 step: 4, loss is 1.54030179977417\n", + "epoch: 6 step: 5, loss is 1.6452958583831787\n", + "epoch: 6 step: 6, loss is 1.5791105031967163\n", + "epoch: 6 step: 7, loss is 1.5203231573104858\n", + "epoch: 6 step: 8, loss is 1.587199091911316\n", + "epoch: 6 step: 9, loss is 1.5797126293182373\n", + "epoch: 6 step: 10, loss is 1.5302902460098267\n", + "epoch: 6 step: 11, loss is 1.5219321250915527\n", + "epoch: 6 step: 12, loss is 1.4697996377944946\n", + "epoch: 6 step: 13, loss is 1.495786190032959\n", + "epoch: 6 step: 14, loss is 1.562410593032837\n", + "epoch: 6 step: 15, loss is 1.4582977294921875\n", + "epoch: 6 step: 16, loss is 1.5436890125274658\n", + "epoch: 6 step: 17, loss is 1.5115001201629639\n", + "epoch: 6 step: 18, loss is 1.587449073791504\n", + "epoch: 6 step: 19, loss is 1.5912611484527588\n", + "epoch: 6 step: 20, loss is 1.5498285293579102\n", + "epoch: 6 step: 21, loss is 1.5494215488433838\n", + "epoch: 6 step: 22, loss is 1.5920791625976562\n", + "epoch: 6 step: 23, loss is 1.600709080696106\n", + "epoch: 6 step: 24, loss is 1.5743154287338257\n", + "epoch: 6 step: 25, loss is 1.4922211170196533\n", + "epoch: 6 step: 26, loss is 1.462087631225586\n", + "epoch: 6 step: 27, loss is 1.4587817192077637\n", + "epoch: 6 step: 28, loss is 1.5293550491333008\n", + "epoch: 6 step: 29, loss is 1.7556171417236328\n", + "epoch: 6 step: 30, loss is 1.5378761291503906\n", + "epoch: 6 step: 31, loss is 1.5729938745498657\n", + "epoch: 6 step: 32, loss is 1.5792850255966187\n", + "epoch: 6 step: 33, loss is 1.5292260646820068\n", + "epoch: 6 step: 34, loss is 1.5687099695205688\n", + "epoch: 6 step: 35, loss is 1.5966664552688599\n", + "epoch: 6 step: 36, loss is 1.550017237663269\n", + "epoch: 6 step: 37, loss is 1.602752923965454\n", + "epoch: 6 step: 38, loss is 1.5238289833068848\n", + "epoch: 6 step: 39, loss is 1.4592957496643066\n", + "epoch: 6 step: 40, loss is 1.591481328010559\n", + "epoch: 6 step: 41, loss is 1.5168592929840088\n", + "epoch: 6 step: 42, loss is 1.6536837816238403\n", + "epoch: 6 step: 43, loss is 1.5050888061523438\n", + "epoch: 6 step: 44, loss is 1.545893907546997\n", + "epoch: 6 step: 45, loss is 1.5664050579071045\n", + "epoch: 6 step: 46, loss is 1.5696938037872314\n", + "epoch: 6 step: 47, loss is 1.5202670097351074\n", + "epoch: 6 step: 48, loss is 1.53323495388031\n", + "epoch: 6 step: 49, loss is 1.6041948795318604\n", + "epoch: 6 step: 50, loss is 1.5902481079101562\n", + "epoch: 6 step: 51, loss is 1.5249226093292236\n", + "epoch: 6 step: 52, loss is 1.565647840499878\n", + "epoch: 6 step: 53, loss is 1.512770175933838\n", + "epoch: 6 step: 54, loss is 1.4508510828018188\n", + "epoch: 6 step: 55, loss is 1.5551884174346924\n", + "epoch: 6 step: 56, loss is 1.6061748266220093\n", + "epoch: 6 step: 57, loss is 1.546997308731079\n", + "epoch: 6 step: 58, loss is 1.4901765584945679\n", + "epoch: 6 step: 59, loss is 1.4814434051513672\n", + "epoch: 6 step: 60, loss is 1.4967684745788574\n", + "epoch: 6 step: 61, loss is 1.5344221591949463\n", + "epoch: 6 step: 62, loss is 1.4963053464889526\n", + "epoch: 6 step: 63, loss is 1.5127581357955933\n", + "epoch: 6 step: 64, loss is 1.5542186498641968\n", + "epoch: 6 step: 65, loss is 1.5392463207244873\n", + "epoch: 6 step: 66, loss is 1.5112230777740479\n", + "epoch: 6 step: 67, loss is 1.4811499118804932\n", + "epoch: 6 step: 68, loss is 1.4790319204330444\n", + "epoch: 6 step: 69, loss is 1.5338348150253296\n", + "epoch: 6 step: 70, loss is 1.479151964187622\n", + "epoch: 6 step: 71, loss is 1.5310709476470947\n", + "epoch: 6 step: 72, loss is 1.443009853363037\n", + "epoch: 6 step: 73, loss is 1.5717990398406982\n", + "epoch: 6 step: 74, loss is 1.5053502321243286\n", + "epoch: 6 step: 75, loss is 1.5676695108413696\n", + "epoch: 6 step: 76, loss is 1.6194803714752197\n", + "epoch: 6 step: 77, loss is 1.5513170957565308\n", + "epoch: 6 step: 78, loss is 1.56096613407135\n", + "epoch: 6 step: 79, loss is 1.5508379936218262\n", + "epoch: 6 step: 80, loss is 1.540370225906372\n", + "epoch: 6 step: 81, loss is 1.484365463256836\n", + "epoch: 6 step: 82, loss is 1.670857310295105\n", + "epoch: 6 step: 83, loss is 1.4762821197509766\n", + "epoch: 6 step: 84, loss is 1.5637893676757812\n", + "epoch: 6 step: 85, loss is 1.5487871170043945\n", + "epoch: 6 step: 86, loss is 1.5412838459014893\n", + "epoch: 6 step: 87, loss is 1.5195348262786865\n", + "epoch: 6 step: 88, loss is 1.5217773914337158\n", + "epoch: 6 step: 89, loss is 1.5372576713562012\n", + "epoch: 6 step: 90, loss is 1.477549433708191\n", + "epoch: 6 step: 91, loss is 1.590430736541748\n", + "epoch: 6 step: 92, loss is 1.5854997634887695\n", + "epoch: 6 step: 93, loss is 1.4605967998504639\n", + "epoch: 6 step: 94, loss is 1.4280827045440674\n", + "epoch: 6 step: 95, loss is 1.4254584312438965\n", + "epoch: 6 step: 96, loss is 1.537740707397461\n", + "epoch: 6 step: 97, loss is 1.6506030559539795\n", + "epoch: 6 step: 98, loss is 1.477738380432129\n", + "epoch: 6 step: 99, loss is 1.589169979095459\n", + "epoch: 6 step: 100, loss is 1.4808933734893799\n", + "epoch: 6 step: 101, loss is 1.5074498653411865\n", + "epoch: 6 step: 102, loss is 1.5635223388671875\n", + "epoch: 6 step: 103, loss is 1.5437006950378418\n", + "epoch: 6 step: 104, loss is 1.4969143867492676\n", + "epoch: 6 step: 105, loss is 1.5058225393295288\n", + "epoch: 6 step: 106, loss is 1.5589568614959717\n", + "epoch: 6 step: 107, loss is 1.6535900831222534\n", + "epoch: 6 step: 108, loss is 1.4975215196609497\n", + "epoch: 6 step: 109, loss is 1.534811019897461\n", + "epoch: 6 step: 110, loss is 1.531850814819336\n", + "epoch: 6 step: 111, loss is 1.5115636587142944\n", + "epoch: 6 step: 112, loss is 1.5775026082992554\n", + "epoch: 6 step: 113, loss is 1.5099866390228271\n", + "epoch: 6 step: 114, loss is 1.5446399450302124\n", + "epoch: 6 step: 115, loss is 1.4686355590820312\n", + "epoch: 6 step: 116, loss is 1.4502360820770264\n", + "epoch: 6 step: 117, loss is 1.512837290763855\n", + "epoch: 6 step: 118, loss is 1.5582005977630615\n", + "epoch: 6 step: 119, loss is 1.5097419023513794\n", + "epoch: 6 step: 120, loss is 1.546525478363037\n", + "epoch: 6 step: 121, loss is 1.4619269371032715\n", + "epoch: 6 step: 122, loss is 1.6216639280319214\n", + "epoch: 6 step: 123, loss is 1.5246281623840332\n", + "epoch: 6 step: 124, loss is 1.5888254642486572\n", + "epoch: 6 step: 125, loss is 1.5490646362304688\n", + "epoch: 6 step: 126, loss is 1.4977644681930542\n", + "epoch: 6 step: 127, loss is 1.5160419940948486\n", + "epoch: 6 step: 128, loss is 1.6227569580078125\n", + "epoch: 6 step: 129, loss is 1.5051426887512207\n", + "epoch: 6 step: 130, loss is 1.5211124420166016\n", + "epoch: 6 step: 131, loss is 1.5353591442108154\n", + "epoch: 6 step: 132, loss is 1.550866723060608\n", + "epoch: 6 step: 133, loss is 1.3935943841934204\n", + "epoch: 6 step: 134, loss is 1.467165231704712\n", + "epoch: 6 step: 135, loss is 1.4651075601577759\n", + "epoch: 6 step: 136, loss is 1.464550495147705\n", + "epoch: 6 step: 137, loss is 1.5777981281280518\n", + "epoch: 6 step: 138, loss is 1.5134718418121338\n", + "epoch: 6 step: 139, loss is 1.4513416290283203\n", + "epoch: 6 step: 140, loss is 1.4803986549377441\n", + "epoch: 6 step: 141, loss is 1.4986960887908936\n", + "epoch: 6 step: 142, loss is 1.5765844583511353\n", + "epoch: 6 step: 143, loss is 1.5319902896881104\n", + "epoch: 6 step: 144, loss is 1.503429651260376\n", + "epoch: 6 step: 145, loss is 1.4282722473144531\n", + "epoch: 6 step: 146, loss is 1.540123701095581\n", + "epoch: 6 step: 147, loss is 1.4694690704345703\n", + "epoch: 6 step: 148, loss is 1.527375340461731\n", + "epoch: 6 step: 149, loss is 1.5286122560501099\n", + "epoch: 6 step: 150, loss is 1.5294378995895386\n", + "epoch: 6 step: 151, loss is 1.4084991216659546\n", + "epoch: 6 step: 152, loss is 1.5220143795013428\n", + "epoch: 6 step: 153, loss is 1.462801456451416\n", + "epoch: 6 step: 154, loss is 1.4989583492279053\n", + "epoch: 6 step: 155, loss is 1.4936156272888184\n", + "epoch: 6 step: 156, loss is 1.4188055992126465\n", + "epoch: 6 step: 157, loss is 1.5452033281326294\n", + "epoch: 6 step: 158, loss is 1.517438292503357\n", + "epoch: 6 step: 159, loss is 1.6103657484054565\n", + "epoch: 6 step: 160, loss is 1.5563052892684937\n", + "epoch: 6 step: 161, loss is 1.5444371700286865\n", + "epoch: 6 step: 162, loss is 1.4468681812286377\n", + "epoch: 6 step: 163, loss is 1.5466785430908203\n", + "epoch: 6 step: 164, loss is 1.5068588256835938\n", + "epoch: 6 step: 165, loss is 1.5993201732635498\n", + "epoch: 6 step: 166, loss is 1.4437881708145142\n", + "epoch: 6 step: 167, loss is 1.4915460348129272\n", + "epoch: 6 step: 168, loss is 1.5283218622207642\n", + "epoch: 6 step: 169, loss is 1.527174949645996\n", + "epoch: 6 step: 170, loss is 1.4889256954193115\n", + "epoch: 6 step: 171, loss is 1.5235013961791992\n", + "epoch: 6 step: 172, loss is 1.4517359733581543\n", + "epoch: 6 step: 173, loss is 1.5080068111419678\n", + "epoch: 6 step: 174, loss is 1.5042757987976074\n", + "epoch: 6 step: 175, loss is 1.4629448652267456\n", + "epoch: 6 step: 176, loss is 1.47317636013031\n", + "epoch: 6 step: 177, loss is 1.4372434616088867\n", + "epoch: 6 step: 178, loss is 1.495514988899231\n", + "epoch: 6 step: 179, loss is 1.4621520042419434\n", + "epoch: 6 step: 180, loss is 1.4509470462799072\n", + "epoch: 6 step: 181, loss is 1.4900439977645874\n", + "epoch: 6 step: 182, loss is 1.4709784984588623\n", + "epoch: 6 step: 183, loss is 1.475778341293335\n", + "epoch: 6 step: 184, loss is 1.4405007362365723\n", + "epoch: 6 step: 185, loss is 1.4910614490509033\n", + "epoch: 6 step: 186, loss is 1.489043951034546\n", + "epoch: 6 step: 187, loss is 1.4047229290008545\n", + "epoch: 6 step: 188, loss is 1.5125796794891357\n", + "epoch: 6 step: 189, loss is 1.4376065731048584\n", + "epoch: 6 step: 190, loss is 1.557037115097046\n", + "epoch: 6 step: 191, loss is 1.4511313438415527\n", + "epoch: 6 step: 192, loss is 1.3752686977386475\n", + "epoch: 6 step: 193, loss is 1.4388941526412964\n", + "epoch: 6 step: 194, loss is 1.42202889919281\n", + "epoch: 6 step: 195, loss is 1.5037436485290527\n", + "Train epoch time: 100902.387 ms, per step time: 517.448 ms\n", + "epoch: 7 step: 1, loss is 1.4944714307785034\n", + "epoch: 7 step: 2, loss is 1.5231469869613647\n", + "epoch: 7 step: 3, loss is 1.5051963329315186\n", + "epoch: 7 step: 4, loss is 1.4750311374664307\n", + "epoch: 7 step: 5, loss is 1.4238444566726685\n", + "epoch: 7 step: 6, loss is 1.3889541625976562\n", + "epoch: 7 step: 7, loss is 1.51995849609375\n", + "epoch: 7 step: 8, loss is 1.3617196083068848\n", + "epoch: 7 step: 9, loss is 1.4291565418243408\n", + "epoch: 7 step: 10, loss is 1.5141608715057373\n", + "epoch: 7 step: 11, loss is 1.40212082862854\n", + "epoch: 7 step: 12, loss is 1.5683667659759521\n", + "epoch: 7 step: 13, loss is 1.4186745882034302\n", + "epoch: 7 step: 14, loss is 1.4872891902923584\n", + "epoch: 7 step: 15, loss is 1.442793607711792\n", + "epoch: 7 step: 16, loss is 1.5195460319519043\n", + "epoch: 7 step: 17, loss is 1.4752014875411987\n", + "epoch: 7 step: 18, loss is 1.5036280155181885\n", + "epoch: 7 step: 19, loss is 1.4479248523712158\n", + "epoch: 7 step: 20, loss is 1.4271469116210938\n", + "epoch: 7 step: 21, loss is 1.526932716369629\n", + "epoch: 7 step: 22, loss is 1.4624922275543213\n", + "epoch: 7 step: 23, loss is 1.4283968210220337\n", + "epoch: 7 step: 24, loss is 1.4121249914169312\n", + "epoch: 7 step: 25, loss is 1.4773215055465698\n", + "epoch: 7 step: 26, loss is 1.5261627435684204\n", + "epoch: 7 step: 27, loss is 1.4904704093933105\n", + "epoch: 7 step: 28, loss is 1.5609296560287476\n", + "epoch: 7 step: 29, loss is 1.5647296905517578\n", + "epoch: 7 step: 30, loss is 1.47566819190979\n", + "epoch: 7 step: 31, loss is 1.474450707435608\n", + "epoch: 7 step: 32, loss is 1.487202763557434\n", + "epoch: 7 step: 33, loss is 1.4235256910324097\n", + "epoch: 7 step: 34, loss is 1.389725923538208\n", + "epoch: 7 step: 35, loss is 1.4937418699264526\n", + "epoch: 7 step: 36, loss is 1.503326654434204\n", + "epoch: 7 step: 37, loss is 1.381772756576538\n", + "epoch: 7 step: 38, loss is 1.4201788902282715\n", + "epoch: 7 step: 39, loss is 1.482316017150879\n", + "epoch: 7 step: 40, loss is 1.4755967855453491\n", + "epoch: 7 step: 41, loss is 1.467300295829773\n", + "epoch: 7 step: 42, loss is 1.3595844507217407\n", + "epoch: 7 step: 43, loss is 1.445594310760498\n", + "epoch: 7 step: 44, loss is 1.5281751155853271\n", + "epoch: 7 step: 45, loss is 1.42103111743927\n", + "epoch: 7 step: 46, loss is 1.3971266746520996\n", + "epoch: 7 step: 47, loss is 1.4020984172821045\n", + "epoch: 7 step: 48, loss is 1.4719732999801636\n", + "epoch: 7 step: 49, loss is 1.522125244140625\n", + "epoch: 7 step: 50, loss is 1.5082917213439941\n", + "epoch: 7 step: 51, loss is 1.4327963590621948\n", + "epoch: 7 step: 52, loss is 1.47660493850708\n", + "epoch: 7 step: 53, loss is 1.524113655090332\n", + "epoch: 7 step: 54, loss is 1.5392918586730957\n", + "epoch: 7 step: 55, loss is 1.4205468893051147\n", + "epoch: 7 step: 56, loss is 1.4417359828948975\n", + "epoch: 7 step: 57, loss is 1.5153014659881592\n", + "epoch: 7 step: 58, loss is 1.424911379814148\n", + "epoch: 7 step: 59, loss is 1.402571678161621\n", + "epoch: 7 step: 60, loss is 1.4859745502471924\n", + "epoch: 7 step: 61, loss is 1.446738600730896\n", + "epoch: 7 step: 62, loss is 1.5359376668930054\n", + "epoch: 7 step: 63, loss is 1.4564790725708008\n", + "epoch: 7 step: 64, loss is 1.5191245079040527\n", + "epoch: 7 step: 65, loss is 1.420469045639038\n", + "epoch: 7 step: 66, loss is 1.4890409708023071\n", + "epoch: 7 step: 67, loss is 1.4921247959136963\n", + "epoch: 7 step: 68, loss is 1.4159307479858398\n", + "epoch: 7 step: 69, loss is 1.558229684829712\n", + "epoch: 7 step: 70, loss is 1.3937363624572754\n", + "epoch: 7 step: 71, loss is 1.5051112174987793\n", + "epoch: 7 step: 72, loss is 1.4460937976837158\n", + "epoch: 7 step: 73, loss is 1.4406070709228516\n", + "epoch: 7 step: 74, loss is 1.4612257480621338\n", + "epoch: 7 step: 75, loss is 1.4589953422546387\n", + "epoch: 7 step: 76, loss is 1.47183096408844\n", + "epoch: 7 step: 77, loss is 1.5335661172866821\n", + "epoch: 7 step: 78, loss is 1.4855353832244873\n", + "epoch: 7 step: 79, loss is 1.622459053993225\n", + "epoch: 7 step: 80, loss is 1.3972759246826172\n", + "epoch: 7 step: 81, loss is 1.464990496635437\n", + "epoch: 7 step: 82, loss is 1.4602009057998657\n", + "epoch: 7 step: 83, loss is 1.434541940689087\n", + "epoch: 7 step: 84, loss is 1.528083324432373\n", + "epoch: 7 step: 85, loss is 1.5632069110870361\n", + "epoch: 7 step: 86, loss is 1.4714771509170532\n", + "epoch: 7 step: 87, loss is 1.3942105770111084\n", + "epoch: 7 step: 88, loss is 1.5436633825302124\n", + "epoch: 7 step: 89, loss is 1.463935136795044\n", + "epoch: 7 step: 90, loss is 1.492416501045227\n", + "epoch: 7 step: 91, loss is 1.5689785480499268\n", + "epoch: 7 step: 92, loss is 1.5560786724090576\n", + "epoch: 7 step: 93, loss is 1.4833225011825562\n", + "epoch: 7 step: 94, loss is 1.402191400527954\n", + "epoch: 7 step: 95, loss is 1.4837050437927246\n", + "epoch: 7 step: 96, loss is 1.4615161418914795\n", + "epoch: 7 step: 97, loss is 1.5056214332580566\n", + "epoch: 7 step: 98, loss is 1.494551658630371\n", + "epoch: 7 step: 99, loss is 1.5455174446105957\n", + "epoch: 7 step: 100, loss is 1.4874598979949951\n", + "epoch: 7 step: 101, loss is 1.5478882789611816\n", + "epoch: 7 step: 102, loss is 1.498094916343689\n", + "epoch: 7 step: 103, loss is 1.5262911319732666\n", + "epoch: 7 step: 104, loss is 1.5094325542449951\n", + "epoch: 7 step: 105, loss is 1.4646320343017578\n", + "epoch: 7 step: 106, loss is 1.4455924034118652\n", + "epoch: 7 step: 107, loss is 1.4808841943740845\n", + "epoch: 7 step: 108, loss is 1.4008368253707886\n", + "epoch: 7 step: 109, loss is 1.4588732719421387\n", + "epoch: 7 step: 110, loss is 1.4891598224639893\n", + "epoch: 7 step: 111, loss is 1.5023365020751953\n", + "epoch: 7 step: 112, loss is 1.5045151710510254\n", + "epoch: 7 step: 113, loss is 1.6366803646087646\n", + "epoch: 7 step: 114, loss is 1.5848846435546875\n", + "epoch: 7 step: 115, loss is 1.4340283870697021\n", + "epoch: 7 step: 116, loss is 1.59546959400177\n", + "epoch: 7 step: 117, loss is 1.3935943841934204\n", + "epoch: 7 step: 118, loss is 1.4636374711990356\n", + "epoch: 7 step: 119, loss is 1.5157415866851807\n", + "epoch: 7 step: 120, loss is 1.4912312030792236\n", + "epoch: 7 step: 121, loss is 1.4712835550308228\n", + "epoch: 7 step: 122, loss is 1.4802789688110352\n", + "epoch: 7 step: 123, loss is 1.423905372619629\n", + "epoch: 7 step: 124, loss is 1.3578457832336426\n", + "epoch: 7 step: 125, loss is 1.4569061994552612\n", + "epoch: 7 step: 126, loss is 1.4467921257019043\n", + "epoch: 7 step: 127, loss is 1.4651986360549927\n", + "epoch: 7 step: 128, loss is 1.5564364194869995\n", + "epoch: 7 step: 129, loss is 1.4806163311004639\n", + "epoch: 7 step: 130, loss is 1.5269190073013306\n", + "epoch: 7 step: 131, loss is 1.48906409740448\n", + "epoch: 7 step: 132, loss is 1.5267400741577148\n", + "epoch: 7 step: 133, loss is 1.5908863544464111\n", + "epoch: 7 step: 134, loss is 1.4729294776916504\n", + "epoch: 7 step: 135, loss is 1.4604854583740234\n", + "epoch: 7 step: 136, loss is 1.486339807510376\n", + "epoch: 7 step: 137, loss is 1.3781015872955322\n", + "epoch: 7 step: 138, loss is 1.4461236000061035\n", + "epoch: 7 step: 139, loss is 1.5392694473266602\n", + "epoch: 7 step: 140, loss is 1.5232210159301758\n", + "epoch: 7 step: 141, loss is 1.509253740310669\n", + "epoch: 7 step: 142, loss is 1.4578773975372314\n", + "epoch: 7 step: 143, loss is 1.4639045000076294\n", + "epoch: 7 step: 144, loss is 1.4021847248077393\n", + "epoch: 7 step: 145, loss is 1.4260993003845215\n", + "epoch: 7 step: 146, loss is 1.5574249029159546\n", + "epoch: 7 step: 147, loss is 1.4592702388763428\n", + "epoch: 7 step: 148, loss is 1.4143478870391846\n", + "epoch: 7 step: 149, loss is 1.4869390726089478\n", + "epoch: 7 step: 150, loss is 1.4725490808486938\n", + "epoch: 7 step: 151, loss is 1.410143256187439\n", + "epoch: 7 step: 152, loss is 1.5853480100631714\n", + "epoch: 7 step: 153, loss is 1.4704235792160034\n", + "epoch: 7 step: 154, loss is 1.4944982528686523\n", + "epoch: 7 step: 155, loss is 1.4102634191513062\n", + "epoch: 7 step: 156, loss is 1.5553852319717407\n", + "epoch: 7 step: 157, loss is 1.4854648113250732\n", + "epoch: 7 step: 158, loss is 1.5937894582748413\n", + "epoch: 7 step: 159, loss is 1.4951167106628418\n", + "epoch: 7 step: 160, loss is 1.4862568378448486\n", + "epoch: 7 step: 161, loss is 1.5175212621688843\n", + "epoch: 7 step: 162, loss is 1.4748740196228027\n", + "epoch: 7 step: 163, loss is 1.4965319633483887\n", + "epoch: 7 step: 164, loss is 1.462143898010254\n", + "epoch: 7 step: 165, loss is 1.4679056406021118\n", + "epoch: 7 step: 166, loss is 1.501708984375\n", + "epoch: 7 step: 167, loss is 1.4492489099502563\n", + "epoch: 7 step: 168, loss is 1.523318886756897\n", + "epoch: 7 step: 169, loss is 1.429626703262329\n", + "epoch: 7 step: 170, loss is 1.5353296995162964\n", + "epoch: 7 step: 171, loss is 1.4752514362335205\n", + "epoch: 7 step: 172, loss is 1.5380438566207886\n", + "epoch: 7 step: 173, loss is 1.4968098402023315\n", + "epoch: 7 step: 174, loss is 1.3898118734359741\n", + "epoch: 7 step: 175, loss is 1.3686456680297852\n", + "epoch: 7 step: 176, loss is 1.4135990142822266\n", + "epoch: 7 step: 177, loss is 1.4567596912384033\n", + "epoch: 7 step: 178, loss is 1.4875967502593994\n", + "epoch: 7 step: 179, loss is 1.4699066877365112\n", + "epoch: 7 step: 180, loss is 1.5830459594726562\n", + "epoch: 7 step: 181, loss is 1.450166940689087\n", + "epoch: 7 step: 182, loss is 1.433647632598877\n", + "epoch: 7 step: 183, loss is 1.511103630065918\n", + "epoch: 7 step: 184, loss is 1.5083644390106201\n", + "epoch: 7 step: 185, loss is 1.5314066410064697\n", + "epoch: 7 step: 186, loss is 1.5834465026855469\n", + "epoch: 7 step: 187, loss is 1.5017123222351074\n", + "epoch: 7 step: 188, loss is 1.5392719507217407\n", + "epoch: 7 step: 189, loss is 1.6001806259155273\n", + "epoch: 7 step: 190, loss is 1.4650452136993408\n", + "epoch: 7 step: 191, loss is 1.4433813095092773\n", + "epoch: 7 step: 192, loss is 1.4713046550750732\n", + "epoch: 7 step: 193, loss is 1.3501620292663574\n", + "epoch: 7 step: 194, loss is 1.508476972579956\n", + "epoch: 7 step: 195, loss is 1.4853405952453613\n", + "Train epoch time: 101557.019 ms, per step time: 520.805 ms\n", + "epoch: 8 step: 1, loss is 1.429003119468689\n", + "epoch: 8 step: 2, loss is 1.4107272624969482\n", + "epoch: 8 step: 3, loss is 1.4755034446716309\n", + "epoch: 8 step: 4, loss is 1.466585636138916\n", + "epoch: 8 step: 5, loss is 1.464191198348999\n", + "epoch: 8 step: 6, loss is 1.5379765033721924\n", + "epoch: 8 step: 7, loss is 1.4338098764419556\n", + "epoch: 8 step: 8, loss is 1.3605345487594604\n", + "epoch: 8 step: 9, loss is 1.5084402561187744\n", + "epoch: 8 step: 10, loss is 1.5329866409301758\n", + "epoch: 8 step: 11, loss is 1.418543815612793\n", + "epoch: 8 step: 12, loss is 1.362414002418518\n", + "epoch: 8 step: 13, loss is 1.5388834476470947\n", + "epoch: 8 step: 14, loss is 1.4648784399032593\n", + "epoch: 8 step: 15, loss is 1.4367684125900269\n", + "epoch: 8 step: 16, loss is 1.359085202217102\n", + "epoch: 8 step: 17, loss is 1.4556517601013184\n", + "epoch: 8 step: 18, loss is 1.3779821395874023\n", + "epoch: 8 step: 19, loss is 1.5997884273529053\n", + "epoch: 8 step: 20, loss is 1.4702845811843872\n", + "epoch: 8 step: 21, loss is 1.3810019493103027\n", + "epoch: 8 step: 22, loss is 1.5574049949645996\n", + "epoch: 8 step: 23, loss is 1.4199693202972412\n", + "epoch: 8 step: 24, loss is 1.3633902072906494\n", + "epoch: 8 step: 25, loss is 1.4752495288848877\n", + "epoch: 8 step: 26, loss is 1.4578269720077515\n", + "epoch: 8 step: 27, loss is 1.4942775964736938\n", + "epoch: 8 step: 28, loss is 1.496826410293579\n", + "epoch: 8 step: 29, loss is 1.3824048042297363\n", + "epoch: 8 step: 30, loss is 1.4689632654190063\n", + "epoch: 8 step: 31, loss is 1.4100630283355713\n", + "epoch: 8 step: 32, loss is 1.418769121170044\n", + "epoch: 8 step: 33, loss is 1.4170594215393066\n", + "epoch: 8 step: 34, loss is 1.5153520107269287\n", + "epoch: 8 step: 35, loss is 1.507736086845398\n", + "epoch: 8 step: 36, loss is 1.4221203327178955\n", + "epoch: 8 step: 37, loss is 1.513373851776123\n", + "epoch: 8 step: 38, loss is 1.4114494323730469\n", + "epoch: 8 step: 39, loss is 1.4581331014633179\n", + "epoch: 8 step: 40, loss is 1.296491026878357\n", + "epoch: 8 step: 41, loss is 1.4114437103271484\n", + "epoch: 8 step: 42, loss is 1.4571266174316406\n", + "epoch: 8 step: 43, loss is 1.4468079805374146\n", + "epoch: 8 step: 44, loss is 1.4959814548492432\n", + "epoch: 8 step: 45, loss is 1.431275486946106\n", + "epoch: 8 step: 46, loss is 1.472455382347107\n", + "epoch: 8 step: 47, loss is 1.473832130432129\n", + "epoch: 8 step: 48, loss is 1.4359221458435059\n", + "epoch: 8 step: 49, loss is 1.4642051458358765\n", + "epoch: 8 step: 50, loss is 1.4303513765335083\n", + "epoch: 8 step: 51, loss is 1.4232962131500244\n", + "epoch: 8 step: 52, loss is 1.3396320343017578\n", + "epoch: 8 step: 53, loss is 1.4309030771255493\n", + "epoch: 8 step: 54, loss is 1.4642832279205322\n", + "epoch: 8 step: 55, loss is 1.4269510507583618\n", + "epoch: 8 step: 56, loss is 1.4269236326217651\n", + "epoch: 8 step: 57, loss is 1.4916191101074219\n", + "epoch: 8 step: 58, loss is 1.3578250408172607\n", + "epoch: 8 step: 59, loss is 1.3441658020019531\n", + "epoch: 8 step: 60, loss is 1.4207022190093994\n", + "epoch: 8 step: 61, loss is 1.3968126773834229\n", + "epoch: 8 step: 62, loss is 1.4943828582763672\n", + "epoch: 8 step: 63, loss is 1.5397770404815674\n", + "epoch: 8 step: 64, loss is 1.3313508033752441\n", + "epoch: 8 step: 65, loss is 1.424198865890503\n", + "epoch: 8 step: 66, loss is 1.399994969367981\n", + "epoch: 8 step: 67, loss is 1.4523358345031738\n", + "epoch: 8 step: 68, loss is 1.469861626625061\n", + "epoch: 8 step: 69, loss is 1.4691545963287354\n", + "epoch: 8 step: 70, loss is 1.4232629537582397\n", + "epoch: 8 step: 71, loss is 1.3901350498199463\n", + "epoch: 8 step: 72, loss is 1.4405544996261597\n", + "epoch: 8 step: 73, loss is 1.471373200416565\n", + "epoch: 8 step: 74, loss is 1.4410492181777954\n", + "epoch: 8 step: 75, loss is 1.4994655847549438\n", + "epoch: 8 step: 76, loss is 1.5018466711044312\n", + "epoch: 8 step: 77, loss is 1.4824144840240479\n", + "epoch: 8 step: 78, loss is 1.3977067470550537\n", + "epoch: 8 step: 79, loss is 1.4713956117630005\n", + "epoch: 8 step: 80, loss is 1.5311514139175415\n", + "epoch: 8 step: 81, loss is 1.504010796546936\n", + "epoch: 8 step: 82, loss is 1.461827278137207\n", + "epoch: 8 step: 83, loss is 1.4809831380844116\n", + "epoch: 8 step: 84, loss is 1.4892903566360474\n", + "epoch: 8 step: 85, loss is 1.4124118089675903\n", + "epoch: 8 step: 86, loss is 1.4833234548568726\n", + "epoch: 8 step: 87, loss is 1.4509873390197754\n", + "epoch: 8 step: 88, loss is 1.4055708646774292\n", + "epoch: 8 step: 89, loss is 1.484975814819336\n", + "epoch: 8 step: 90, loss is 1.4744317531585693\n", + "epoch: 8 step: 91, loss is 1.4226067066192627\n", + "epoch: 8 step: 92, loss is 1.4499731063842773\n", + "epoch: 8 step: 93, loss is 1.505363941192627\n", + "epoch: 8 step: 94, loss is 1.4462205171585083\n", + "epoch: 8 step: 95, loss is 1.4438321590423584\n", + "epoch: 8 step: 96, loss is 1.4360851049423218\n", + "epoch: 8 step: 97, loss is 1.4075359106063843\n", + "epoch: 8 step: 98, loss is 1.5101979970932007\n", + "epoch: 8 step: 99, loss is 1.3896653652191162\n", + "epoch: 8 step: 100, loss is 1.3638538122177124\n", + "epoch: 8 step: 101, loss is 1.4947052001953125\n", + "epoch: 8 step: 102, loss is 1.4254498481750488\n", + "epoch: 8 step: 103, loss is 1.5178403854370117\n", + "epoch: 8 step: 104, loss is 1.4559102058410645\n", + "epoch: 8 step: 105, loss is 1.4494906663894653\n", + "epoch: 8 step: 106, loss is 1.3803677558898926\n", + "epoch: 8 step: 107, loss is 1.3965094089508057\n", + "epoch: 8 step: 108, loss is 1.440703272819519\n", + "epoch: 8 step: 109, loss is 1.5034723281860352\n", + "epoch: 8 step: 110, loss is 1.3417942523956299\n", + "epoch: 8 step: 111, loss is 1.3907225131988525\n", + "epoch: 8 step: 112, loss is 1.5542733669281006\n", + "epoch: 8 step: 113, loss is 1.5081100463867188\n", + "epoch: 8 step: 114, loss is 1.286160945892334\n", + "epoch: 8 step: 115, loss is 1.434015154838562\n", + "epoch: 8 step: 116, loss is 1.3799386024475098\n", + "epoch: 8 step: 117, loss is 1.4344258308410645\n", + "epoch: 8 step: 118, loss is 1.4317759275436401\n", + "epoch: 8 step: 119, loss is 1.4413771629333496\n", + "epoch: 8 step: 120, loss is 1.4894990921020508\n", + "epoch: 8 step: 121, loss is 1.4797909259796143\n", + "epoch: 8 step: 122, loss is 1.444528341293335\n", + "epoch: 8 step: 123, loss is 1.528463363647461\n", + "epoch: 8 step: 124, loss is 1.3826470375061035\n", + "epoch: 8 step: 125, loss is 1.5277185440063477\n", + "epoch: 8 step: 126, loss is 1.4367634057998657\n", + "epoch: 8 step: 127, loss is 1.404809594154358\n", + "epoch: 8 step: 128, loss is 1.399905800819397\n", + "epoch: 8 step: 129, loss is 1.477609395980835\n", + "epoch: 8 step: 130, loss is 1.397735834121704\n", + "epoch: 8 step: 131, loss is 1.371984601020813\n", + "epoch: 8 step: 132, loss is 1.3613680601119995\n", + "epoch: 8 step: 133, loss is 1.4195046424865723\n", + "epoch: 8 step: 134, loss is 1.4840214252471924\n", + "epoch: 8 step: 135, loss is 1.5081497430801392\n", + "epoch: 8 step: 136, loss is 1.4006762504577637\n", + "epoch: 8 step: 137, loss is 1.4661130905151367\n", + "epoch: 8 step: 138, loss is 1.3958911895751953\n", + "epoch: 8 step: 139, loss is 1.429317593574524\n", + "epoch: 8 step: 140, loss is 1.4355273246765137\n", + "epoch: 8 step: 141, loss is 1.5072500705718994\n", + "epoch: 8 step: 142, loss is 1.4722821712493896\n", + "epoch: 8 step: 143, loss is 1.424737811088562\n", + "epoch: 8 step: 144, loss is 1.497867465019226\n", + "epoch: 8 step: 145, loss is 1.3229339122772217\n", + "epoch: 8 step: 146, loss is 1.4480931758880615\n", + "epoch: 8 step: 147, loss is 1.4199450016021729\n", + "epoch: 8 step: 148, loss is 1.4179375171661377\n", + "epoch: 8 step: 149, loss is 1.5060895681381226\n", + "epoch: 8 step: 150, loss is 1.3936312198638916\n", + "epoch: 8 step: 151, loss is 1.4231541156768799\n", + "epoch: 8 step: 152, loss is 1.402383804321289\n", + "epoch: 8 step: 153, loss is 1.3710225820541382\n", + "epoch: 8 step: 154, loss is 1.4251216650009155\n", + "epoch: 8 step: 155, loss is 1.4750220775604248\n", + "epoch: 8 step: 156, loss is 1.428410291671753\n", + "epoch: 8 step: 157, loss is 1.4272665977478027\n", + "epoch: 8 step: 158, loss is 1.354636311531067\n", + "epoch: 8 step: 159, loss is 1.4741674661636353\n", + "epoch: 8 step: 160, loss is 1.5270094871520996\n", + "epoch: 8 step: 161, loss is 1.3780601024627686\n", + "epoch: 8 step: 162, loss is 1.3886785507202148\n", + "epoch: 8 step: 163, loss is 1.3936128616333008\n", + "epoch: 8 step: 164, loss is 1.4821451902389526\n", + "epoch: 8 step: 165, loss is 1.437671422958374\n", + "epoch: 8 step: 166, loss is 1.4124289751052856\n", + "epoch: 8 step: 167, loss is 1.4979243278503418\n", + "epoch: 8 step: 168, loss is 1.40871262550354\n", + "epoch: 8 step: 169, loss is 1.4486162662506104\n", + "epoch: 8 step: 170, loss is 1.3667347431182861\n", + "epoch: 8 step: 171, loss is 1.4552711248397827\n", + "epoch: 8 step: 172, loss is 1.4582101106643677\n", + "epoch: 8 step: 173, loss is 1.3745197057724\n", + "epoch: 8 step: 174, loss is 1.4341392517089844\n", + "epoch: 8 step: 175, loss is 1.34609055519104\n", + "epoch: 8 step: 176, loss is 1.4226138591766357\n", + "epoch: 8 step: 177, loss is 1.3741638660430908\n", + "epoch: 8 step: 178, loss is 1.3936405181884766\n", + "epoch: 8 step: 179, loss is 1.389925479888916\n", + "epoch: 8 step: 180, loss is 1.4102472066879272\n", + "epoch: 8 step: 181, loss is 1.3728044033050537\n", + "epoch: 8 step: 182, loss is 1.434373378753662\n", + "epoch: 8 step: 183, loss is 1.45587158203125\n", + "epoch: 8 step: 184, loss is 1.4060404300689697\n", + "epoch: 8 step: 185, loss is 1.480263590812683\n", + "epoch: 8 step: 186, loss is 1.3975260257720947\n", + "epoch: 8 step: 187, loss is 1.3906896114349365\n", + "epoch: 8 step: 188, loss is 1.4579757452011108\n", + "epoch: 8 step: 189, loss is 1.4524158239364624\n", + "epoch: 8 step: 190, loss is 1.443595051765442\n", + "epoch: 8 step: 191, loss is 1.461517572402954\n", + "epoch: 8 step: 192, loss is 1.431726336479187\n", + "epoch: 8 step: 193, loss is 1.3512992858886719\n", + "epoch: 8 step: 194, loss is 1.377821922302246\n", + "epoch: 8 step: 195, loss is 1.3678979873657227\n", + "Train epoch time: 97920.525 ms, per step time: 502.157 ms\n", + "epoch: 9 step: 1, loss is 1.3686871528625488\n", + "epoch: 9 step: 2, loss is 1.3451966047286987\n", + "epoch: 9 step: 3, loss is 1.4053659439086914\n", + "epoch: 9 step: 4, loss is 1.454399585723877\n", + "epoch: 9 step: 5, loss is 1.4835536479949951\n", + "epoch: 9 step: 6, loss is 1.3674170970916748\n", + "epoch: 9 step: 7, loss is 1.4360063076019287\n", + "epoch: 9 step: 8, loss is 1.4041427373886108\n", + "epoch: 9 step: 9, loss is 1.4062691926956177\n", + "epoch: 9 step: 10, loss is 1.3885431289672852\n", + "epoch: 9 step: 11, loss is 1.413304090499878\n", + "epoch: 9 step: 12, loss is 1.3882882595062256\n", + "epoch: 9 step: 13, loss is 1.443940281867981\n", + "epoch: 9 step: 14, loss is 1.4185893535614014\n", + "epoch: 9 step: 15, loss is 1.4061042070388794\n", + "epoch: 9 step: 16, loss is 1.3990658521652222\n", + "epoch: 9 step: 17, loss is 1.38161301612854\n", + "epoch: 9 step: 18, loss is 1.3650332689285278\n", + "epoch: 9 step: 19, loss is 1.457192301750183\n", + "epoch: 9 step: 20, loss is 1.4112701416015625\n", + "epoch: 9 step: 21, loss is 1.4283533096313477\n", + "epoch: 9 step: 22, loss is 1.4139353036880493\n", + "epoch: 9 step: 23, loss is 1.4048259258270264\n", + "epoch: 9 step: 24, loss is 1.4009816646575928\n", + "epoch: 9 step: 25, loss is 1.5118703842163086\n", + "epoch: 9 step: 26, loss is 1.4526433944702148\n", + "epoch: 9 step: 27, loss is 1.3354270458221436\n", + "epoch: 9 step: 28, loss is 1.3892382383346558\n", + "epoch: 9 step: 29, loss is 1.4553167819976807\n", + "epoch: 9 step: 30, loss is 1.4349106550216675\n", + "epoch: 9 step: 31, loss is 1.4712719917297363\n", + "epoch: 9 step: 32, loss is 1.347480297088623\n", + "epoch: 9 step: 33, loss is 1.5218734741210938\n", + "epoch: 9 step: 34, loss is 1.3241169452667236\n", + "epoch: 9 step: 35, loss is 1.4006105661392212\n", + "epoch: 9 step: 36, loss is 1.375659465789795\n", + "epoch: 9 step: 37, loss is 1.353757619857788\n", + "epoch: 9 step: 38, loss is 1.4498956203460693\n", + "epoch: 9 step: 39, loss is 1.3663690090179443\n", + "epoch: 9 step: 40, loss is 1.4606060981750488\n", + "epoch: 9 step: 41, loss is 1.4095120429992676\n", + "epoch: 9 step: 42, loss is 1.4442204236984253\n", + "epoch: 9 step: 43, loss is 1.439152717590332\n", + "epoch: 9 step: 44, loss is 1.566872000694275\n", + "epoch: 9 step: 45, loss is 1.3448981046676636\n", + "epoch: 9 step: 46, loss is 1.3993322849273682\n", + "epoch: 9 step: 47, loss is 1.5240325927734375\n", + "epoch: 9 step: 48, loss is 1.351813554763794\n", + "epoch: 9 step: 49, loss is 1.3476250171661377\n", + "epoch: 9 step: 50, loss is 1.3886604309082031\n", + "epoch: 9 step: 51, loss is 1.4283287525177002\n", + "epoch: 9 step: 52, loss is 1.4593119621276855\n", + "epoch: 9 step: 53, loss is 1.4681148529052734\n", + "epoch: 9 step: 54, loss is 1.3508656024932861\n", + "epoch: 9 step: 55, loss is 1.4791004657745361\n", + "epoch: 9 step: 56, loss is 1.3645414113998413\n", + "epoch: 9 step: 57, loss is 1.4643195867538452\n", + "epoch: 9 step: 58, loss is 1.483736515045166\n", + "epoch: 9 step: 59, loss is 1.3867449760437012\n", + "epoch: 9 step: 60, loss is 1.443368911743164\n", + "epoch: 9 step: 61, loss is 1.3780328035354614\n", + "epoch: 9 step: 62, loss is 1.4346749782562256\n", + "epoch: 9 step: 63, loss is 1.439383864402771\n", + "epoch: 9 step: 64, loss is 1.415440320968628\n", + "epoch: 9 step: 65, loss is 1.5304211378097534\n", + "epoch: 9 step: 66, loss is 1.4358162879943848\n", + "epoch: 9 step: 67, loss is 1.4200738668441772\n", + "epoch: 9 step: 68, loss is 1.3502346277236938\n", + "epoch: 9 step: 69, loss is 1.4126152992248535\n", + "epoch: 9 step: 70, loss is 1.4287028312683105\n", + "epoch: 9 step: 71, loss is 1.4949285984039307\n", + "epoch: 9 step: 72, loss is 1.3996925354003906\n", + "epoch: 9 step: 73, loss is 1.4609135389328003\n", + "epoch: 9 step: 74, loss is 1.4889978170394897\n", + "epoch: 9 step: 75, loss is 1.4861128330230713\n", + "epoch: 9 step: 76, loss is 1.3677623271942139\n", + "epoch: 9 step: 77, loss is 1.4899822473526\n", + "epoch: 9 step: 78, loss is 1.4568486213684082\n", + "epoch: 9 step: 79, loss is 1.4456920623779297\n", + "epoch: 9 step: 80, loss is 1.4080407619476318\n", + "epoch: 9 step: 81, loss is 1.3916215896606445\n", + "epoch: 9 step: 82, loss is 1.4552912712097168\n", + "epoch: 9 step: 83, loss is 1.4424139261245728\n", + "epoch: 9 step: 84, loss is 1.4066710472106934\n", + "epoch: 9 step: 85, loss is 1.382904291152954\n", + "epoch: 9 step: 86, loss is 1.4724788665771484\n", + "epoch: 9 step: 87, loss is 1.351231575012207\n", + "epoch: 9 step: 88, loss is 1.4019391536712646\n", + "epoch: 9 step: 89, loss is 1.4055564403533936\n", + "epoch: 9 step: 90, loss is 1.396646499633789\n", + "epoch: 9 step: 91, loss is 1.3298311233520508\n", + "epoch: 9 step: 92, loss is 1.3646538257598877\n", + "epoch: 9 step: 93, loss is 1.4459621906280518\n", + "epoch: 9 step: 94, loss is 1.4223806858062744\n", + "epoch: 9 step: 95, loss is 1.3589954376220703\n", + "epoch: 9 step: 96, loss is 1.4673244953155518\n", + "epoch: 9 step: 97, loss is 1.3436877727508545\n", + "epoch: 9 step: 98, loss is 1.373197078704834\n", + "epoch: 9 step: 99, loss is 1.325591802597046\n", + "epoch: 9 step: 100, loss is 1.4480148553848267\n", + "epoch: 9 step: 101, loss is 1.3950047492980957\n", + "epoch: 9 step: 102, loss is 1.3989112377166748\n", + "epoch: 9 step: 103, loss is 1.3851940631866455\n", + "epoch: 9 step: 104, loss is 1.3710730075836182\n", + "epoch: 9 step: 105, loss is 1.335435390472412\n", + "epoch: 9 step: 106, loss is 1.4590922594070435\n", + "epoch: 9 step: 107, loss is 1.3792110681533813\n", + "epoch: 9 step: 108, loss is 1.3592002391815186\n", + "epoch: 9 step: 109, loss is 1.4157559871673584\n", + "epoch: 9 step: 110, loss is 1.4189109802246094\n", + "epoch: 9 step: 111, loss is 1.3659608364105225\n", + "epoch: 9 step: 112, loss is 1.393045425415039\n", + "epoch: 9 step: 113, loss is 1.4213069677352905\n", + "epoch: 9 step: 114, loss is 1.4512839317321777\n", + "epoch: 9 step: 115, loss is 1.3438502550125122\n", + "epoch: 9 step: 116, loss is 1.4337797164916992\n", + "epoch: 9 step: 117, loss is 1.417262315750122\n", + "epoch: 9 step: 118, loss is 1.3823806047439575\n", + "epoch: 9 step: 119, loss is 1.3666141033172607\n", + "epoch: 9 step: 120, loss is 1.285462737083435\n", + "epoch: 9 step: 121, loss is 1.3507628440856934\n", + "epoch: 9 step: 122, loss is 1.3796911239624023\n", + "epoch: 9 step: 123, loss is 1.3914493322372437\n", + "epoch: 9 step: 124, loss is 1.3504307270050049\n", + "epoch: 9 step: 125, loss is 1.4435615539550781\n", + "epoch: 9 step: 126, loss is 1.3231216669082642\n", + "epoch: 9 step: 127, loss is 1.4540362358093262\n", + "epoch: 9 step: 128, loss is 1.4278836250305176\n", + "epoch: 9 step: 129, loss is 1.423729658126831\n", + "epoch: 9 step: 130, loss is 1.3733189105987549\n", + "epoch: 9 step: 131, loss is 1.4015660285949707\n", + "epoch: 9 step: 132, loss is 1.38005793094635\n", + "epoch: 9 step: 133, loss is 1.3644016981124878\n", + "epoch: 9 step: 134, loss is 1.3994139432907104\n", + "epoch: 9 step: 135, loss is 1.3939355611801147\n", + "epoch: 9 step: 136, loss is 1.4121742248535156\n", + "epoch: 9 step: 137, loss is 1.3381192684173584\n", + "epoch: 9 step: 138, loss is 1.4272220134735107\n", + "epoch: 9 step: 139, loss is 1.4910632371902466\n", + "epoch: 9 step: 140, loss is 1.430234432220459\n", + "epoch: 9 step: 141, loss is 1.4548242092132568\n", + "epoch: 9 step: 142, loss is 1.4000707864761353\n", + "epoch: 9 step: 143, loss is 1.3340342044830322\n", + "epoch: 9 step: 144, loss is 1.4599030017852783\n", + "epoch: 9 step: 145, loss is 1.3995065689086914\n", + "epoch: 9 step: 146, loss is 1.4190359115600586\n", + "epoch: 9 step: 147, loss is 1.4137353897094727\n", + "epoch: 9 step: 148, loss is 1.2872203588485718\n", + "epoch: 9 step: 149, loss is 1.5593078136444092\n", + "epoch: 9 step: 150, loss is 1.331124186515808\n", + "epoch: 9 step: 151, loss is 1.3246105909347534\n", + "epoch: 9 step: 152, loss is 1.3242878913879395\n", + "epoch: 9 step: 153, loss is 1.4031710624694824\n", + "epoch: 9 step: 154, loss is 1.3380694389343262\n", + "epoch: 9 step: 155, loss is 1.2859454154968262\n", + "epoch: 9 step: 156, loss is 1.3648788928985596\n", + "epoch: 9 step: 157, loss is 1.3359448909759521\n", + "epoch: 9 step: 158, loss is 1.4753774404525757\n", + "epoch: 9 step: 159, loss is 1.3960984945297241\n", + "epoch: 9 step: 160, loss is 1.4248446226119995\n", + "epoch: 9 step: 161, loss is 1.4070972204208374\n", + "epoch: 9 step: 162, loss is 1.4206180572509766\n", + "epoch: 9 step: 163, loss is 1.3528648614883423\n", + "epoch: 9 step: 164, loss is 1.3106021881103516\n", + "epoch: 9 step: 165, loss is 1.3364908695220947\n", + "epoch: 9 step: 166, loss is 1.3478691577911377\n", + "epoch: 9 step: 167, loss is 1.3992176055908203\n", + "epoch: 9 step: 168, loss is 1.4506597518920898\n", + "epoch: 9 step: 169, loss is 1.5256593227386475\n", + "epoch: 9 step: 170, loss is 1.3843673467636108\n", + "epoch: 9 step: 171, loss is 1.2918672561645508\n", + "epoch: 9 step: 172, loss is 1.4553159475326538\n", + "epoch: 9 step: 173, loss is 1.395153284072876\n", + "epoch: 9 step: 174, loss is 1.390548825263977\n", + "epoch: 9 step: 175, loss is 1.365472435951233\n", + "epoch: 9 step: 176, loss is 1.4091366529464722\n", + "epoch: 9 step: 177, loss is 1.2780108451843262\n", + "epoch: 9 step: 178, loss is 1.3780494928359985\n", + "epoch: 9 step: 179, loss is 1.4274547100067139\n", + "epoch: 9 step: 180, loss is 1.3743255138397217\n", + "epoch: 9 step: 181, loss is 1.3822728395462036\n", + "epoch: 9 step: 182, loss is 1.4511778354644775\n", + "epoch: 9 step: 183, loss is 1.3597838878631592\n", + "epoch: 9 step: 184, loss is 1.4413615465164185\n", + "epoch: 9 step: 185, loss is 1.3855236768722534\n", + "epoch: 9 step: 186, loss is 1.3783398866653442\n", + "epoch: 9 step: 187, loss is 1.4307358264923096\n", + "epoch: 9 step: 188, loss is 1.326853632926941\n", + "epoch: 9 step: 189, loss is 1.3830974102020264\n", + "epoch: 9 step: 190, loss is 1.395775318145752\n", + "epoch: 9 step: 191, loss is 1.3078207969665527\n", + "epoch: 9 step: 192, loss is 1.2989792823791504\n", + "epoch: 9 step: 193, loss is 1.4185712337493896\n", + "epoch: 9 step: 194, loss is 1.339128017425537\n", + "epoch: 9 step: 195, loss is 1.3476219177246094\n", + "Train epoch time: 106593.444 ms, per step time: 546.633 ms\n", + "epoch: 10 step: 1, loss is 1.4013646841049194\n", + "epoch: 10 step: 2, loss is 1.3300243616104126\n", + "epoch: 10 step: 3, loss is 1.3094394207000732\n", + "epoch: 10 step: 4, loss is 1.3548839092254639\n", + "epoch: 10 step: 5, loss is 1.4156841039657593\n", + "epoch: 10 step: 6, loss is 1.3820728063583374\n", + "epoch: 10 step: 7, loss is 1.4879790544509888\n", + "epoch: 10 step: 8, loss is 1.4722046852111816\n", + "epoch: 10 step: 9, loss is 1.3858039379119873\n", + "epoch: 10 step: 10, loss is 1.429699420928955\n", + "epoch: 10 step: 11, loss is 1.419289469718933\n", + "epoch: 10 step: 12, loss is 1.4895614385604858\n", + "epoch: 10 step: 13, loss is 1.4256426095962524\n", + "epoch: 10 step: 14, loss is 1.3901841640472412\n", + "epoch: 10 step: 15, loss is 1.3972671031951904\n", + "epoch: 10 step: 16, loss is 1.399070143699646\n", + "epoch: 10 step: 17, loss is 1.3201708793640137\n", + "epoch: 10 step: 18, loss is 1.3512731790542603\n", + "epoch: 10 step: 19, loss is 1.3862638473510742\n", + "epoch: 10 step: 20, loss is 1.401485562324524\n", + "epoch: 10 step: 21, loss is 1.436686396598816\n", + "epoch: 10 step: 22, loss is 1.3069672584533691\n", + "epoch: 10 step: 23, loss is 1.3247520923614502\n", + "epoch: 10 step: 24, loss is 1.3055996894836426\n", + "epoch: 10 step: 25, loss is 1.439944863319397\n", + "epoch: 10 step: 26, loss is 1.3489394187927246\n", + "epoch: 10 step: 27, loss is 1.380202293395996\n", + "epoch: 10 step: 28, loss is 1.334869146347046\n", + "epoch: 10 step: 29, loss is 1.290797472000122\n", + "epoch: 10 step: 30, loss is 1.325467824935913\n", + "epoch: 10 step: 31, loss is 1.4006500244140625\n", + "epoch: 10 step: 32, loss is 1.3276009559631348\n", + "epoch: 10 step: 33, loss is 1.3087867498397827\n", + "epoch: 10 step: 34, loss is 1.3664500713348389\n", + "epoch: 10 step: 35, loss is 1.4247545003890991\n", + "epoch: 10 step: 36, loss is 1.2811508178710938\n", + "epoch: 10 step: 37, loss is 1.3659154176712036\n", + "epoch: 10 step: 38, loss is 1.4141485691070557\n", + "epoch: 10 step: 39, loss is 1.3306363821029663\n", + "epoch: 10 step: 40, loss is 1.3256539106369019\n", + "epoch: 10 step: 41, loss is 1.3956319093704224\n", + "epoch: 10 step: 42, loss is 1.3946948051452637\n", + "epoch: 10 step: 43, loss is 1.3561468124389648\n", + "epoch: 10 step: 44, loss is 1.3659508228302002\n", + "epoch: 10 step: 45, loss is 1.330806016921997\n", + "epoch: 10 step: 46, loss is 1.316137671470642\n", + "epoch: 10 step: 47, loss is 1.261836290359497\n", + "epoch: 10 step: 48, loss is 1.3875095844268799\n", + "epoch: 10 step: 49, loss is 1.376421570777893\n", + "epoch: 10 step: 50, loss is 1.3242244720458984\n", + "epoch: 10 step: 51, loss is 1.2726612091064453\n", + "epoch: 10 step: 52, loss is 1.322344422340393\n", + "epoch: 10 step: 53, loss is 1.33793044090271\n", + "epoch: 10 step: 54, loss is 1.3586516380310059\n", + "epoch: 10 step: 55, loss is 1.3359687328338623\n", + "epoch: 10 step: 56, loss is 1.3563358783721924\n", + "epoch: 10 step: 57, loss is 1.2862935066223145\n", + "epoch: 10 step: 58, loss is 1.319016933441162\n", + "epoch: 10 step: 59, loss is 1.3530882596969604\n", + "epoch: 10 step: 60, loss is 1.3836643695831299\n", + "epoch: 10 step: 61, loss is 1.3903762102127075\n", + "epoch: 10 step: 62, loss is 1.32072913646698\n", + "epoch: 10 step: 63, loss is 1.413024663925171\n", + "epoch: 10 step: 64, loss is 1.3698439598083496\n", + "epoch: 10 step: 65, loss is 1.2824244499206543\n", + "epoch: 10 step: 66, loss is 1.3327823877334595\n", + "epoch: 10 step: 67, loss is 1.3529623746871948\n", + "epoch: 10 step: 68, loss is 1.3206793069839478\n", + "epoch: 10 step: 69, loss is 1.3999276161193848\n", + "epoch: 10 step: 70, loss is 1.409599781036377\n", + "epoch: 10 step: 71, loss is 1.367915153503418\n", + "epoch: 10 step: 72, loss is 1.39909029006958\n", + "epoch: 10 step: 73, loss is 1.372831106185913\n", + "epoch: 10 step: 74, loss is 1.3448381423950195\n", + "epoch: 10 step: 75, loss is 1.4166792631149292\n", + "epoch: 10 step: 76, loss is 1.400881290435791\n", + "epoch: 10 step: 77, loss is 1.3689132928848267\n", + "epoch: 10 step: 78, loss is 1.366376280784607\n", + "epoch: 10 step: 79, loss is 1.3263789415359497\n", + "epoch: 10 step: 80, loss is 1.4032373428344727\n", + "epoch: 10 step: 81, loss is 1.3168869018554688\n", + "epoch: 10 step: 82, loss is 1.3266562223434448\n", + "epoch: 10 step: 83, loss is 1.3931050300598145\n", + "epoch: 10 step: 84, loss is 1.3634028434753418\n", + "epoch: 10 step: 85, loss is 1.416879415512085\n", + "epoch: 10 step: 86, loss is 1.4401990175247192\n", + "epoch: 10 step: 87, loss is 1.4115872383117676\n", + "epoch: 10 step: 88, loss is 1.3289239406585693\n", + "epoch: 10 step: 89, loss is 1.4174799919128418\n", + "epoch: 10 step: 90, loss is 1.4395992755889893\n", + "epoch: 10 step: 91, loss is 1.4242947101593018\n", + "epoch: 10 step: 92, loss is 1.3185385465621948\n", + "epoch: 10 step: 93, loss is 1.398950457572937\n", + "epoch: 10 step: 94, loss is 1.447624921798706\n", + "epoch: 10 step: 95, loss is 1.4001009464263916\n", + "epoch: 10 step: 96, loss is 1.391319990158081\n", + "epoch: 10 step: 97, loss is 1.327488899230957\n", + "epoch: 10 step: 98, loss is 1.4119454622268677\n", + "epoch: 10 step: 99, loss is 1.382166862487793\n", + "epoch: 10 step: 100, loss is 1.426313042640686\n", + "epoch: 10 step: 101, loss is 1.4767072200775146\n", + "epoch: 10 step: 102, loss is 1.4145902395248413\n", + "epoch: 10 step: 103, loss is 1.394844889640808\n", + "epoch: 10 step: 104, loss is 1.3868297338485718\n", + "epoch: 10 step: 105, loss is 1.3799166679382324\n", + "epoch: 10 step: 106, loss is 1.3603644371032715\n", + "epoch: 10 step: 107, loss is 1.332298755645752\n", + "epoch: 10 step: 108, loss is 1.4076708555221558\n", + "epoch: 10 step: 109, loss is 1.393653154373169\n", + "epoch: 10 step: 110, loss is 1.4306777715682983\n", + "epoch: 10 step: 111, loss is 1.39041006565094\n", + "epoch: 10 step: 112, loss is 1.3423151969909668\n", + "epoch: 10 step: 113, loss is 1.3801604509353638\n", + "epoch: 10 step: 114, loss is 1.3595304489135742\n", + "epoch: 10 step: 115, loss is 1.4160792827606201\n", + "epoch: 10 step: 116, loss is 1.410888910293579\n", + "epoch: 10 step: 117, loss is 1.539491891860962\n", + "epoch: 10 step: 118, loss is 1.400894284248352\n", + "epoch: 10 step: 119, loss is 1.3887121677398682\n", + "epoch: 10 step: 120, loss is 1.386242389678955\n", + "epoch: 10 step: 121, loss is 1.3862394094467163\n", + "epoch: 10 step: 122, loss is 1.4397380352020264\n", + "epoch: 10 step: 123, loss is 1.4158350229263306\n", + "epoch: 10 step: 124, loss is 1.3675822019577026\n", + "epoch: 10 step: 125, loss is 1.3877127170562744\n", + "epoch: 10 step: 126, loss is 1.4099981784820557\n", + "epoch: 10 step: 127, loss is 1.412335753440857\n", + "epoch: 10 step: 128, loss is 1.3391391038894653\n", + "epoch: 10 step: 129, loss is 1.3768151998519897\n", + "epoch: 10 step: 130, loss is 1.3818291425704956\n", + "epoch: 10 step: 131, loss is 1.393455982208252\n", + "epoch: 10 step: 132, loss is 1.4105712175369263\n", + "epoch: 10 step: 133, loss is 1.424351692199707\n", + "epoch: 10 step: 134, loss is 1.385884404182434\n", + "epoch: 10 step: 135, loss is 1.4136109352111816\n", + "epoch: 10 step: 136, loss is 1.4716448783874512\n", + "epoch: 10 step: 137, loss is 1.4720277786254883\n", + "epoch: 10 step: 138, loss is 1.3697043657302856\n", + "epoch: 10 step: 139, loss is 1.3716278076171875\n", + "epoch: 10 step: 140, loss is 1.3665812015533447\n", + "epoch: 10 step: 141, loss is 1.4167522192001343\n", + "epoch: 10 step: 142, loss is 1.3111876249313354\n", + "epoch: 10 step: 143, loss is 1.2813811302185059\n", + "epoch: 10 step: 144, loss is 1.4540469646453857\n", + "epoch: 10 step: 145, loss is 1.3342125415802002\n", + "epoch: 10 step: 146, loss is 1.5023877620697021\n", + "epoch: 10 step: 147, loss is 1.4522697925567627\n", + "epoch: 10 step: 148, loss is 1.3384051322937012\n", + "epoch: 10 step: 149, loss is 1.3747470378875732\n", + "epoch: 10 step: 150, loss is 1.3822827339172363\n", + "epoch: 10 step: 151, loss is 1.2676966190338135\n", + "epoch: 10 step: 152, loss is 1.3274333477020264\n", + "epoch: 10 step: 153, loss is 1.4666736125946045\n", + "epoch: 10 step: 154, loss is 1.4700156450271606\n", + "epoch: 10 step: 155, loss is 1.3765552043914795\n", + "epoch: 10 step: 156, loss is 1.359393835067749\n", + "epoch: 10 step: 157, loss is 1.3291218280792236\n", + "epoch: 10 step: 158, loss is 1.4734513759613037\n", + "epoch: 10 step: 159, loss is 1.3942980766296387\n", + "epoch: 10 step: 160, loss is 1.369293212890625\n", + "epoch: 10 step: 161, loss is 1.3312673568725586\n", + "epoch: 10 step: 162, loss is 1.3422342538833618\n", + "epoch: 10 step: 163, loss is 1.4342436790466309\n", + "epoch: 10 step: 164, loss is 1.296769380569458\n", + "epoch: 10 step: 165, loss is 1.3552517890930176\n", + "epoch: 10 step: 166, loss is 1.4191365242004395\n", + "epoch: 10 step: 167, loss is 1.3718644380569458\n", + "epoch: 10 step: 168, loss is 1.4317768812179565\n", + "epoch: 10 step: 169, loss is 1.2771432399749756\n", + "epoch: 10 step: 170, loss is 1.3373687267303467\n", + "epoch: 10 step: 171, loss is 1.3937783241271973\n", + "epoch: 10 step: 172, loss is 1.403265357017517\n", + "epoch: 10 step: 173, loss is 1.3601553440093994\n", + "epoch: 10 step: 174, loss is 1.3486100435256958\n", + "epoch: 10 step: 175, loss is 1.3206748962402344\n", + "epoch: 10 step: 176, loss is 1.4928590059280396\n", + "epoch: 10 step: 177, loss is 1.4510397911071777\n", + "epoch: 10 step: 178, loss is 1.4134538173675537\n", + "epoch: 10 step: 179, loss is 1.3104578256607056\n", + "epoch: 10 step: 180, loss is 1.3773128986358643\n", + "epoch: 10 step: 181, loss is 1.407386064529419\n", + "epoch: 10 step: 182, loss is 1.3224601745605469\n", + "epoch: 10 step: 183, loss is 1.4529212713241577\n", + "epoch: 10 step: 184, loss is 1.4459569454193115\n", + "epoch: 10 step: 185, loss is 1.3213711977005005\n", + "epoch: 10 step: 186, loss is 1.3183817863464355\n", + "epoch: 10 step: 187, loss is 1.3618711233139038\n", + "epoch: 10 step: 188, loss is 1.4097741842269897\n", + "epoch: 10 step: 189, loss is 1.5016825199127197\n", + "epoch: 10 step: 190, loss is 1.3960559368133545\n", + "epoch: 10 step: 191, loss is 1.3795318603515625\n", + "epoch: 10 step: 192, loss is 1.3406782150268555\n", + "epoch: 10 step: 193, loss is 1.3935420513153076\n", + "epoch: 10 step: 194, loss is 1.248767614364624\n", + "epoch: 10 step: 195, loss is 1.313734531402588\n", + "Train epoch time: 101941.147 ms, per step time: 522.775 ms\n", + "epoch: 11 step: 1, loss is 1.3239222764968872\n", + "epoch: 11 step: 2, loss is 1.4892491102218628\n", + "epoch: 11 step: 3, loss is 1.287330985069275\n", + "epoch: 11 step: 4, loss is 1.3996427059173584\n", + "epoch: 11 step: 5, loss is 1.3222274780273438\n", + "epoch: 11 step: 6, loss is 1.343487024307251\n", + "epoch: 11 step: 7, loss is 1.321366786956787\n", + "epoch: 11 step: 8, loss is 1.3500736951828003\n", + "epoch: 11 step: 9, loss is 1.3998191356658936\n", + "epoch: 11 step: 10, loss is 1.3743828535079956\n", + "epoch: 11 step: 11, loss is 1.3673455715179443\n", + "epoch: 11 step: 12, loss is 1.3636493682861328\n", + "epoch: 11 step: 13, loss is 1.2758365869522095\n", + "epoch: 11 step: 14, loss is 1.3332066535949707\n", + "epoch: 11 step: 15, loss is 1.3797101974487305\n", + "epoch: 11 step: 16, loss is 1.3767448663711548\n", + "epoch: 11 step: 17, loss is 1.3558061122894287\n", + "epoch: 11 step: 18, loss is 1.3457655906677246\n", + "epoch: 11 step: 19, loss is 1.2993528842926025\n", + "epoch: 11 step: 20, loss is 1.363696575164795\n", + "epoch: 11 step: 21, loss is 1.4025229215621948\n", + "epoch: 11 step: 22, loss is 1.3280218839645386\n", + "epoch: 11 step: 23, loss is 1.2557034492492676\n", + "epoch: 11 step: 24, loss is 1.3380120992660522\n", + "epoch: 11 step: 25, loss is 1.3283090591430664\n", + "epoch: 11 step: 26, loss is 1.4030088186264038\n", + "epoch: 11 step: 27, loss is 1.4175913333892822\n", + "epoch: 11 step: 28, loss is 1.37468683719635\n", + "epoch: 11 step: 29, loss is 1.3147392272949219\n", + "epoch: 11 step: 30, loss is 1.3024029731750488\n", + "epoch: 11 step: 31, loss is 1.3717888593673706\n", + "epoch: 11 step: 32, loss is 1.437185525894165\n", + "epoch: 11 step: 33, loss is 1.4035956859588623\n", + "epoch: 11 step: 34, loss is 1.424149990081787\n", + "epoch: 11 step: 35, loss is 1.3060106039047241\n", + "epoch: 11 step: 36, loss is 1.37245512008667\n", + "epoch: 11 step: 37, loss is 1.4380356073379517\n", + "epoch: 11 step: 38, loss is 1.4093317985534668\n", + "epoch: 11 step: 39, loss is 1.4513821601867676\n", + "epoch: 11 step: 40, loss is 1.3380879163742065\n", + "epoch: 11 step: 41, loss is 1.45511794090271\n", + "epoch: 11 step: 42, loss is 1.4032286405563354\n", + "epoch: 11 step: 43, loss is 1.3253309726715088\n", + "epoch: 11 step: 44, loss is 1.4490903615951538\n", + "epoch: 11 step: 45, loss is 1.3551750183105469\n", + "epoch: 11 step: 46, loss is 1.3301514387130737\n", + "epoch: 11 step: 47, loss is 1.3427329063415527\n", + "epoch: 11 step: 48, loss is 1.2980215549468994\n", + "epoch: 11 step: 49, loss is 1.3580743074417114\n", + "epoch: 11 step: 50, loss is 1.3082925081253052\n", + "epoch: 11 step: 51, loss is 1.3764516115188599\n", + "epoch: 11 step: 52, loss is 1.3171124458312988\n", + "epoch: 11 step: 53, loss is 1.3810725212097168\n", + "epoch: 11 step: 54, loss is 1.3921984434127808\n", + "epoch: 11 step: 55, loss is 1.3987445831298828\n", + "epoch: 11 step: 56, loss is 1.3948462009429932\n", + "epoch: 11 step: 57, loss is 1.3588066101074219\n", + "epoch: 11 step: 58, loss is 1.3931316137313843\n", + "epoch: 11 step: 59, loss is 1.3349018096923828\n", + "epoch: 11 step: 60, loss is 1.3748314380645752\n", + "epoch: 11 step: 61, loss is 1.3201358318328857\n", + "epoch: 11 step: 62, loss is 1.415076494216919\n", + "epoch: 11 step: 63, loss is 1.373231291770935\n", + "epoch: 11 step: 64, loss is 1.3360192775726318\n", + "epoch: 11 step: 65, loss is 1.3343547582626343\n", + "epoch: 11 step: 66, loss is 1.3890399932861328\n", + "epoch: 11 step: 67, loss is 1.3564846515655518\n", + "epoch: 11 step: 68, loss is 1.3072667121887207\n", + "epoch: 11 step: 69, loss is 1.42959463596344\n", + "epoch: 11 step: 70, loss is 1.405236840248108\n", + "epoch: 11 step: 71, loss is 1.3281099796295166\n", + "epoch: 11 step: 72, loss is 1.334294319152832\n", + "epoch: 11 step: 73, loss is 1.291043758392334\n", + "epoch: 11 step: 74, loss is 1.3220468759536743\n", + "epoch: 11 step: 75, loss is 1.2375478744506836\n", + "epoch: 11 step: 76, loss is 1.369995355606079\n", + "epoch: 11 step: 77, loss is 1.3984791040420532\n", + "epoch: 11 step: 78, loss is 1.4166988134384155\n", + "epoch: 11 step: 79, loss is 1.3448117971420288\n", + "epoch: 11 step: 80, loss is 1.3023135662078857\n", + "epoch: 11 step: 81, loss is 1.2857303619384766\n", + "epoch: 11 step: 82, loss is 1.3443052768707275\n", + "epoch: 11 step: 83, loss is 1.435492992401123\n", + "epoch: 11 step: 84, loss is 1.4448342323303223\n", + "epoch: 11 step: 85, loss is 1.3370622396469116\n", + "epoch: 11 step: 86, loss is 1.3319203853607178\n", + "epoch: 11 step: 87, loss is 1.3835664987564087\n", + "epoch: 11 step: 88, loss is 1.3281656503677368\n", + "epoch: 11 step: 89, loss is 1.420332431793213\n", + "epoch: 11 step: 90, loss is 1.2688525915145874\n", + "epoch: 11 step: 91, loss is 1.32149338722229\n", + "epoch: 11 step: 92, loss is 1.2928276062011719\n", + "epoch: 11 step: 93, loss is 1.3172919750213623\n", + "epoch: 11 step: 94, loss is 1.4477988481521606\n", + "epoch: 11 step: 95, loss is 1.4821466207504272\n", + "epoch: 11 step: 96, loss is 1.3575549125671387\n", + "epoch: 11 step: 97, loss is 1.3792929649353027\n", + "epoch: 11 step: 98, loss is 1.4668335914611816\n", + "epoch: 11 step: 99, loss is 1.4162007570266724\n", + "epoch: 11 step: 100, loss is 1.244016170501709\n", + "epoch: 11 step: 101, loss is 1.4638797044754028\n", + "epoch: 11 step: 102, loss is 1.45845627784729\n", + "epoch: 11 step: 103, loss is 1.4470628499984741\n", + "epoch: 11 step: 104, loss is 1.332723617553711\n", + "epoch: 11 step: 105, loss is 1.3498574495315552\n", + "epoch: 11 step: 106, loss is 1.306030035018921\n", + "epoch: 11 step: 107, loss is 1.3930026292800903\n", + "epoch: 11 step: 108, loss is 1.5132588148117065\n", + "epoch: 11 step: 109, loss is 1.3249351978302002\n", + "epoch: 11 step: 110, loss is 1.334914207458496\n", + "epoch: 11 step: 111, loss is 1.3368526697158813\n", + "epoch: 11 step: 112, loss is 1.3097819089889526\n", + "epoch: 11 step: 113, loss is 1.4475044012069702\n", + "epoch: 11 step: 114, loss is 1.329129934310913\n", + "epoch: 11 step: 115, loss is 1.3737046718597412\n", + "epoch: 11 step: 116, loss is 1.3279380798339844\n", + "epoch: 11 step: 117, loss is 1.3950164318084717\n", + "epoch: 11 step: 118, loss is 1.3570687770843506\n", + "epoch: 11 step: 119, loss is 1.3909435272216797\n", + "epoch: 11 step: 120, loss is 1.4072437286376953\n", + "epoch: 11 step: 121, loss is 1.3730319738388062\n", + "epoch: 11 step: 122, loss is 1.405045747756958\n", + "epoch: 11 step: 123, loss is 1.3441792726516724\n", + "epoch: 11 step: 124, loss is 1.315730333328247\n", + "epoch: 11 step: 125, loss is 1.3955453634262085\n", + "epoch: 11 step: 126, loss is 1.412559986114502\n", + "epoch: 11 step: 127, loss is 1.3404154777526855\n", + "epoch: 11 step: 128, loss is 1.3243657350540161\n", + "epoch: 11 step: 129, loss is 1.3306430578231812\n", + "epoch: 11 step: 130, loss is 1.382091999053955\n", + "epoch: 11 step: 131, loss is 1.3827937841415405\n", + "epoch: 11 step: 132, loss is 1.2044376134872437\n", + "epoch: 11 step: 133, loss is 1.4359469413757324\n", + "epoch: 11 step: 134, loss is 1.345341444015503\n", + "epoch: 11 step: 135, loss is 1.407517433166504\n", + "epoch: 11 step: 136, loss is 1.3837766647338867\n", + "epoch: 11 step: 137, loss is 1.38344407081604\n", + "epoch: 11 step: 138, loss is 1.3621829748153687\n", + "epoch: 11 step: 139, loss is 1.4087772369384766\n", + "epoch: 11 step: 140, loss is 1.3669524192810059\n", + "epoch: 11 step: 141, loss is 1.3048416376113892\n", + "epoch: 11 step: 142, loss is 1.3926987648010254\n", + "epoch: 11 step: 143, loss is 1.3213107585906982\n", + "epoch: 11 step: 144, loss is 1.3737306594848633\n", + "epoch: 11 step: 145, loss is 1.4166673421859741\n", + "epoch: 11 step: 146, loss is 1.3080700635910034\n", + "epoch: 11 step: 147, loss is 1.3977267742156982\n", + "epoch: 11 step: 148, loss is 1.336409568786621\n", + "epoch: 11 step: 149, loss is 1.3063257932662964\n", + "epoch: 11 step: 150, loss is 1.2896627187728882\n", + "epoch: 11 step: 151, loss is 1.351517915725708\n", + "epoch: 11 step: 152, loss is 1.3548204898834229\n", + "epoch: 11 step: 153, loss is 1.3780314922332764\n", + "epoch: 11 step: 154, loss is 1.3319382667541504\n", + "epoch: 11 step: 155, loss is 1.3694047927856445\n", + "epoch: 11 step: 156, loss is 1.3806877136230469\n", + "epoch: 11 step: 157, loss is 1.280369520187378\n", + "epoch: 11 step: 158, loss is 1.3556132316589355\n", + "epoch: 11 step: 159, loss is 1.418996810913086\n", + "epoch: 11 step: 160, loss is 1.3857463598251343\n", + "epoch: 11 step: 161, loss is 1.5061358213424683\n", + "epoch: 11 step: 162, loss is 1.3109298944473267\n", + "epoch: 11 step: 163, loss is 1.329021692276001\n", + "epoch: 11 step: 164, loss is 1.4278942346572876\n", + "epoch: 11 step: 165, loss is 1.3547637462615967\n", + "epoch: 11 step: 166, loss is 1.3459391593933105\n", + "epoch: 11 step: 167, loss is 1.4812567234039307\n", + "epoch: 11 step: 168, loss is 1.3091429471969604\n", + "epoch: 11 step: 169, loss is 1.3596996068954468\n", + "epoch: 11 step: 170, loss is 1.314809799194336\n", + "epoch: 11 step: 171, loss is 1.3165165185928345\n", + "epoch: 11 step: 172, loss is 1.4424617290496826\n", + "epoch: 11 step: 173, loss is 1.3309221267700195\n", + "epoch: 11 step: 174, loss is 1.4910966157913208\n", + "epoch: 11 step: 175, loss is 1.3754777908325195\n", + "epoch: 11 step: 176, loss is 1.2853807210922241\n", + "epoch: 11 step: 177, loss is 1.3758426904678345\n", + "epoch: 11 step: 178, loss is 1.3289133310317993\n", + "epoch: 11 step: 179, loss is 1.3372819423675537\n", + "epoch: 11 step: 180, loss is 1.3332667350769043\n", + "epoch: 11 step: 181, loss is 1.3524348735809326\n", + "epoch: 11 step: 182, loss is 1.399627685546875\n", + "epoch: 11 step: 183, loss is 1.3141158819198608\n", + "epoch: 11 step: 184, loss is 1.4124747514724731\n", + "epoch: 11 step: 185, loss is 1.2423462867736816\n", + "epoch: 11 step: 186, loss is 1.2827988862991333\n", + "epoch: 11 step: 187, loss is 1.3626313209533691\n", + "epoch: 11 step: 188, loss is 1.3941175937652588\n", + "epoch: 11 step: 189, loss is 1.358626127243042\n", + "epoch: 11 step: 190, loss is 1.3742905855178833\n", + "epoch: 11 step: 191, loss is 1.3206183910369873\n", + "epoch: 11 step: 192, loss is 1.2068994045257568\n", + "epoch: 11 step: 193, loss is 1.4057044982910156\n", + "epoch: 11 step: 194, loss is 1.3851126432418823\n", + "epoch: 11 step: 195, loss is 1.3102353811264038\n", + "Train epoch time: 102838.922 ms, per step time: 527.379 ms\n", + "epoch: 12 step: 1, loss is 1.327026128768921\n", + "epoch: 12 step: 2, loss is 1.3173730373382568\n", + "epoch: 12 step: 3, loss is 1.3774597644805908\n", + "epoch: 12 step: 4, loss is 1.2259258031845093\n", + "epoch: 12 step: 5, loss is 1.3117047548294067\n", + "epoch: 12 step: 6, loss is 1.3197301626205444\n", + "epoch: 12 step: 7, loss is 1.3706828355789185\n", + "epoch: 12 step: 8, loss is 1.3027945756912231\n", + "epoch: 12 step: 9, loss is 1.2978596687316895\n", + "epoch: 12 step: 10, loss is 1.2810213565826416\n", + "epoch: 12 step: 11, loss is 1.3749604225158691\n", + "epoch: 12 step: 12, loss is 1.2705374956130981\n", + "epoch: 12 step: 13, loss is 1.3675390481948853\n", + "epoch: 12 step: 14, loss is 1.1818132400512695\n", + "epoch: 12 step: 15, loss is 1.3443031311035156\n", + "epoch: 12 step: 16, loss is 1.2984507083892822\n", + "epoch: 12 step: 17, loss is 1.2640411853790283\n", + "epoch: 12 step: 18, loss is 1.2466943264007568\n", + "epoch: 12 step: 19, loss is 1.3362829685211182\n", + "epoch: 12 step: 20, loss is 1.3291038274765015\n", + "epoch: 12 step: 21, loss is 1.2754344940185547\n", + "epoch: 12 step: 22, loss is 1.249971866607666\n", + "epoch: 12 step: 23, loss is 1.4381786584854126\n", + "epoch: 12 step: 24, loss is 1.3093699216842651\n", + "epoch: 12 step: 25, loss is 1.3244006633758545\n", + "epoch: 12 step: 26, loss is 1.3164682388305664\n", + "epoch: 12 step: 27, loss is 1.3604499101638794\n", + "epoch: 12 step: 28, loss is 1.3116767406463623\n", + "epoch: 12 step: 29, loss is 1.3305410146713257\n", + "epoch: 12 step: 30, loss is 1.3778505325317383\n", + "epoch: 12 step: 31, loss is 1.2713477611541748\n", + "epoch: 12 step: 32, loss is 1.3089419603347778\n", + "epoch: 12 step: 33, loss is 1.343804121017456\n", + "epoch: 12 step: 34, loss is 1.3283692598342896\n", + "epoch: 12 step: 35, loss is 1.2964812517166138\n", + "epoch: 12 step: 36, loss is 1.2972931861877441\n", + "epoch: 12 step: 37, loss is 1.3493517637252808\n", + "epoch: 12 step: 38, loss is 1.363483190536499\n", + "epoch: 12 step: 39, loss is 1.3378397226333618\n", + "epoch: 12 step: 40, loss is 1.3168466091156006\n", + "epoch: 12 step: 41, loss is 1.3463879823684692\n", + "epoch: 12 step: 42, loss is 1.390561580657959\n", + "epoch: 12 step: 43, loss is 1.329767107963562\n", + "epoch: 12 step: 44, loss is 1.2688947916030884\n", + "epoch: 12 step: 45, loss is 1.3319787979125977\n", + "epoch: 12 step: 46, loss is 1.2854238748550415\n", + "epoch: 12 step: 47, loss is 1.3659913539886475\n", + "epoch: 12 step: 48, loss is 1.3565953969955444\n", + "epoch: 12 step: 49, loss is 1.352964162826538\n", + "epoch: 12 step: 50, loss is 1.3488420248031616\n", + "epoch: 12 step: 51, loss is 1.3072986602783203\n", + "epoch: 12 step: 52, loss is 1.2873427867889404\n", + "epoch: 12 step: 53, loss is 1.3294517993927002\n", + "epoch: 12 step: 54, loss is 1.317840814590454\n", + "epoch: 12 step: 55, loss is 1.311105489730835\n", + "epoch: 12 step: 56, loss is 1.433809518814087\n", + "epoch: 12 step: 57, loss is 1.3161168098449707\n", + "epoch: 12 step: 58, loss is 1.3467252254486084\n", + "epoch: 12 step: 59, loss is 1.3549232482910156\n", + "epoch: 12 step: 60, loss is 1.2946125268936157\n", + "epoch: 12 step: 61, loss is 1.3434147834777832\n", + "epoch: 12 step: 62, loss is 1.3651973009109497\n", + "epoch: 12 step: 63, loss is 1.3390532732009888\n", + "epoch: 12 step: 64, loss is 1.2795588970184326\n", + "epoch: 12 step: 65, loss is 1.3094165325164795\n", + "epoch: 12 step: 66, loss is 1.4843984842300415\n", + "epoch: 12 step: 67, loss is 1.3429453372955322\n", + "epoch: 12 step: 68, loss is 1.3876069784164429\n", + "epoch: 12 step: 69, loss is 1.376800298690796\n", + "epoch: 12 step: 70, loss is 1.2961795330047607\n", + "epoch: 12 step: 71, loss is 1.3623687028884888\n", + "epoch: 12 step: 72, loss is 1.2880795001983643\n", + "epoch: 12 step: 73, loss is 1.3425891399383545\n", + "epoch: 12 step: 74, loss is 1.4143120050430298\n", + "epoch: 12 step: 75, loss is 1.3197654485702515\n", + "epoch: 12 step: 76, loss is 1.3404440879821777\n", + "epoch: 12 step: 77, loss is 1.459045648574829\n", + "epoch: 12 step: 78, loss is 1.3283709287643433\n", + "epoch: 12 step: 79, loss is 1.2457114458084106\n", + "epoch: 12 step: 80, loss is 1.3207110166549683\n", + "epoch: 12 step: 81, loss is 1.3890496492385864\n", + "epoch: 12 step: 82, loss is 1.30064058303833\n", + "epoch: 12 step: 83, loss is 1.240519404411316\n", + "epoch: 12 step: 84, loss is 1.2745981216430664\n", + "epoch: 12 step: 85, loss is 1.3951038122177124\n", + "epoch: 12 step: 86, loss is 1.3749144077301025\n", + "epoch: 12 step: 87, loss is 1.350846290588379\n", + "epoch: 12 step: 88, loss is 1.3137550354003906\n", + "epoch: 12 step: 89, loss is 1.3323330879211426\n", + "epoch: 12 step: 90, loss is 1.430253267288208\n", + "epoch: 12 step: 91, loss is 1.3553212881088257\n", + "epoch: 12 step: 92, loss is 1.453997254371643\n", + "epoch: 12 step: 93, loss is 1.2296017408370972\n", + "epoch: 12 step: 94, loss is 1.3046904802322388\n", + "epoch: 12 step: 95, loss is 1.4371883869171143\n", + "epoch: 12 step: 96, loss is 1.2798352241516113\n", + "epoch: 12 step: 97, loss is 1.3683488368988037\n", + "epoch: 12 step: 98, loss is 1.3431471586227417\n", + "epoch: 12 step: 99, loss is 1.2673938274383545\n", + "epoch: 12 step: 100, loss is 1.3472249507904053\n", + "epoch: 12 step: 101, loss is 1.3641998767852783\n", + "epoch: 12 step: 102, loss is 1.3557207584381104\n", + "epoch: 12 step: 103, loss is 1.3294907808303833\n", + "epoch: 12 step: 104, loss is 1.3175936937332153\n", + "epoch: 12 step: 105, loss is 1.3331634998321533\n", + "epoch: 12 step: 106, loss is 1.3657900094985962\n", + "epoch: 12 step: 107, loss is 1.3688373565673828\n", + "epoch: 12 step: 108, loss is 1.3948956727981567\n", + "epoch: 12 step: 109, loss is 1.3047645092010498\n", + "epoch: 12 step: 110, loss is 1.3157539367675781\n", + "epoch: 12 step: 111, loss is 1.4039983749389648\n", + "epoch: 12 step: 112, loss is 1.2620614767074585\n", + "epoch: 12 step: 113, loss is 1.2842594385147095\n", + "epoch: 12 step: 114, loss is 1.3508824110031128\n", + "epoch: 12 step: 115, loss is 1.3847217559814453\n", + "epoch: 12 step: 116, loss is 1.3555371761322021\n", + "epoch: 12 step: 117, loss is 1.333901047706604\n", + "epoch: 12 step: 118, loss is 1.3432618379592896\n", + "epoch: 12 step: 119, loss is 1.3599635362625122\n", + "epoch: 12 step: 120, loss is 1.3736331462860107\n", + "epoch: 12 step: 121, loss is 1.3865598440170288\n", + "epoch: 12 step: 122, loss is 1.302436351776123\n", + "epoch: 12 step: 123, loss is 1.2784943580627441\n", + "epoch: 12 step: 124, loss is 1.3168596029281616\n", + "epoch: 12 step: 125, loss is 1.2755303382873535\n", + "epoch: 12 step: 126, loss is 1.330269455909729\n", + "epoch: 12 step: 127, loss is 1.3541271686553955\n", + "epoch: 12 step: 128, loss is 1.361104965209961\n", + "epoch: 12 step: 129, loss is 1.2265288829803467\n", + "epoch: 12 step: 130, loss is 1.312816858291626\n", + "epoch: 12 step: 131, loss is 1.3427910804748535\n", + "epoch: 12 step: 132, loss is 1.3259042501449585\n", + "epoch: 12 step: 133, loss is 1.2724202871322632\n", + "epoch: 12 step: 134, loss is 1.309052586555481\n", + "epoch: 12 step: 135, loss is 1.321662425994873\n", + "epoch: 12 step: 136, loss is 1.2371594905853271\n", + "epoch: 12 step: 137, loss is 1.2539255619049072\n", + "epoch: 12 step: 138, loss is 1.2389318943023682\n", + "epoch: 12 step: 139, loss is 1.4220154285430908\n", + "epoch: 12 step: 140, loss is 1.3060020208358765\n", + "epoch: 12 step: 141, loss is 1.3340067863464355\n", + "epoch: 12 step: 142, loss is 1.307440996170044\n", + "epoch: 12 step: 143, loss is 1.2782407999038696\n", + "epoch: 12 step: 144, loss is 1.3806309700012207\n", + "epoch: 12 step: 145, loss is 1.2851614952087402\n", + "epoch: 12 step: 146, loss is 1.3173688650131226\n", + "epoch: 12 step: 147, loss is 1.2722043991088867\n", + "epoch: 12 step: 148, loss is 1.2604314088821411\n", + "epoch: 12 step: 149, loss is 1.333242416381836\n", + "epoch: 12 step: 150, loss is 1.2983170747756958\n", + "epoch: 12 step: 151, loss is 1.348721981048584\n", + "epoch: 12 step: 152, loss is 1.3315503597259521\n", + "epoch: 12 step: 153, loss is 1.3334791660308838\n", + "epoch: 12 step: 154, loss is 1.2908501625061035\n", + "epoch: 12 step: 155, loss is 1.3752222061157227\n", + "epoch: 12 step: 156, loss is 1.3085559606552124\n", + "epoch: 12 step: 157, loss is 1.296125888824463\n", + "epoch: 12 step: 158, loss is 1.3439446687698364\n", + "epoch: 12 step: 159, loss is 1.2782665491104126\n", + "epoch: 12 step: 160, loss is 1.372861623764038\n", + "epoch: 12 step: 161, loss is 1.3047735691070557\n", + "epoch: 12 step: 162, loss is 1.3577654361724854\n", + "epoch: 12 step: 163, loss is 1.3488445281982422\n", + "epoch: 12 step: 164, loss is 1.3826509714126587\n", + "epoch: 12 step: 165, loss is 1.2839025259017944\n", + "epoch: 12 step: 166, loss is 1.3456828594207764\n", + "epoch: 12 step: 167, loss is 1.2590911388397217\n", + "epoch: 12 step: 168, loss is 1.3177158832550049\n", + "epoch: 12 step: 169, loss is 1.3734434843063354\n", + "epoch: 12 step: 170, loss is 1.2572014331817627\n", + "epoch: 12 step: 171, loss is 1.2331089973449707\n", + "epoch: 12 step: 172, loss is 1.2745554447174072\n", + "epoch: 12 step: 173, loss is 1.4080028533935547\n", + "epoch: 12 step: 174, loss is 1.3452833890914917\n", + "epoch: 12 step: 175, loss is 1.2618916034698486\n", + "epoch: 12 step: 176, loss is 1.3445005416870117\n", + "epoch: 12 step: 177, loss is 1.3041263818740845\n", + "epoch: 12 step: 178, loss is 1.340512752532959\n", + "epoch: 12 step: 179, loss is 1.3627338409423828\n", + "epoch: 12 step: 180, loss is 1.3088977336883545\n", + "epoch: 12 step: 181, loss is 1.271829605102539\n", + "epoch: 12 step: 182, loss is 1.3220219612121582\n", + "epoch: 12 step: 183, loss is 1.3493311405181885\n", + "epoch: 12 step: 184, loss is 1.3543145656585693\n", + "epoch: 12 step: 185, loss is 1.2913708686828613\n", + "epoch: 12 step: 186, loss is 1.2430154085159302\n", + "epoch: 12 step: 187, loss is 1.322335124015808\n", + "epoch: 12 step: 188, loss is 1.2865006923675537\n", + "epoch: 12 step: 189, loss is 1.3991228342056274\n", + "epoch: 12 step: 190, loss is 1.271315097808838\n", + "epoch: 12 step: 191, loss is 1.3231325149536133\n", + "epoch: 12 step: 192, loss is 1.3084592819213867\n", + "epoch: 12 step: 193, loss is 1.2886078357696533\n", + "epoch: 12 step: 194, loss is 1.365661859512329\n", + "epoch: 12 step: 195, loss is 1.320265293121338\n", + "Train epoch time: 98058.209 ms, per step time: 502.863 ms\n", + "epoch: 13 step: 1, loss is 1.3731111288070679\n", + "epoch: 13 step: 2, loss is 1.2634320259094238\n", + "epoch: 13 step: 3, loss is 1.3648123741149902\n", + "epoch: 13 step: 4, loss is 1.232408881187439\n", + "epoch: 13 step: 5, loss is 1.3674691915512085\n", + "epoch: 13 step: 6, loss is 1.3160877227783203\n", + "epoch: 13 step: 7, loss is 1.3341853618621826\n", + "epoch: 13 step: 8, loss is 1.295053482055664\n", + "epoch: 13 step: 9, loss is 1.3152763843536377\n", + "epoch: 13 step: 10, loss is 1.3866056203842163\n", + "epoch: 13 step: 11, loss is 1.3348817825317383\n", + "epoch: 13 step: 12, loss is 1.318849802017212\n", + "epoch: 13 step: 13, loss is 1.3011295795440674\n", + "epoch: 13 step: 14, loss is 1.3839666843414307\n", + "epoch: 13 step: 15, loss is 1.257509708404541\n", + "epoch: 13 step: 16, loss is 1.324901819229126\n", + "epoch: 13 step: 17, loss is 1.2808488607406616\n", + "epoch: 13 step: 18, loss is 1.2661362886428833\n", + "epoch: 13 step: 19, loss is 1.2517410516738892\n", + "epoch: 13 step: 20, loss is 1.3603410720825195\n", + "epoch: 13 step: 21, loss is 1.3086423873901367\n", + "epoch: 13 step: 22, loss is 1.294408917427063\n", + "epoch: 13 step: 23, loss is 1.2815544605255127\n", + "epoch: 13 step: 24, loss is 1.3189040422439575\n", + "epoch: 13 step: 25, loss is 1.4427878856658936\n", + "epoch: 13 step: 26, loss is 1.2429438829421997\n", + "epoch: 13 step: 27, loss is 1.4315178394317627\n", + "epoch: 13 step: 28, loss is 1.3154947757720947\n", + "epoch: 13 step: 29, loss is 1.2732410430908203\n", + "epoch: 13 step: 30, loss is 1.3203074932098389\n", + "epoch: 13 step: 31, loss is 1.1672260761260986\n", + "epoch: 13 step: 32, loss is 1.3304266929626465\n", + "epoch: 13 step: 33, loss is 1.3123095035552979\n", + "epoch: 13 step: 34, loss is 1.332945466041565\n", + "epoch: 13 step: 35, loss is 1.314396858215332\n", + "epoch: 13 step: 36, loss is 1.3988434076309204\n", + "epoch: 13 step: 37, loss is 1.2609853744506836\n", + "epoch: 13 step: 38, loss is 1.3247756958007812\n", + "epoch: 13 step: 39, loss is 1.3397608995437622\n", + "epoch: 13 step: 40, loss is 1.2902374267578125\n", + "epoch: 13 step: 41, loss is 1.332524299621582\n", + "epoch: 13 step: 42, loss is 1.3209984302520752\n", + "epoch: 13 step: 43, loss is 1.2857451438903809\n", + "epoch: 13 step: 44, loss is 1.358852505683899\n", + "epoch: 13 step: 45, loss is 1.317845344543457\n", + "epoch: 13 step: 46, loss is 1.3496190309524536\n", + "epoch: 13 step: 47, loss is 1.3427393436431885\n", + "epoch: 13 step: 48, loss is 1.3145873546600342\n", + "epoch: 13 step: 49, loss is 1.342260479927063\n", + "epoch: 13 step: 50, loss is 1.2680681943893433\n", + "epoch: 13 step: 51, loss is 1.2644908428192139\n", + "epoch: 13 step: 52, loss is 1.301267147064209\n", + "epoch: 13 step: 53, loss is 1.3681424856185913\n", + "epoch: 13 step: 54, loss is 1.312465786933899\n", + "epoch: 13 step: 55, loss is 1.3786842823028564\n", + "epoch: 13 step: 56, loss is 1.3259011507034302\n", + "epoch: 13 step: 57, loss is 1.3602008819580078\n", + "epoch: 13 step: 58, loss is 1.3767340183258057\n", + "epoch: 13 step: 59, loss is 1.3026719093322754\n", + "epoch: 13 step: 60, loss is 1.2399662733078003\n", + "epoch: 13 step: 61, loss is 1.3124758005142212\n", + "epoch: 13 step: 62, loss is 1.2192462682724\n", + "epoch: 13 step: 63, loss is 1.3080198764801025\n", + "epoch: 13 step: 64, loss is 1.463259220123291\n", + "epoch: 13 step: 65, loss is 1.2925983667373657\n", + "epoch: 13 step: 66, loss is 1.2872076034545898\n", + "epoch: 13 step: 67, loss is 1.3264009952545166\n", + "epoch: 13 step: 68, loss is 1.3435412645339966\n", + "epoch: 13 step: 69, loss is 1.4949607849121094\n", + "epoch: 13 step: 70, loss is 1.3088423013687134\n", + "epoch: 13 step: 71, loss is 1.2812811136245728\n", + "epoch: 13 step: 72, loss is 1.2821332216262817\n", + "epoch: 13 step: 73, loss is 1.3501254320144653\n", + "epoch: 13 step: 74, loss is 1.228576898574829\n", + "epoch: 13 step: 75, loss is 1.395953893661499\n", + "epoch: 13 step: 76, loss is 1.3578919172286987\n", + "epoch: 13 step: 77, loss is 1.359946370124817\n", + "epoch: 13 step: 78, loss is 1.1786251068115234\n", + "epoch: 13 step: 79, loss is 1.2441065311431885\n", + "epoch: 13 step: 80, loss is 1.3149113655090332\n", + "epoch: 13 step: 81, loss is 1.2050533294677734\n", + "epoch: 13 step: 82, loss is 1.2697174549102783\n", + "epoch: 13 step: 83, loss is 1.2487437725067139\n", + "epoch: 13 step: 84, loss is 1.2940945625305176\n", + "epoch: 13 step: 85, loss is 1.3027994632720947\n", + "epoch: 13 step: 86, loss is 1.3279591798782349\n", + "epoch: 13 step: 87, loss is 1.2179299592971802\n", + "epoch: 13 step: 88, loss is 1.321157455444336\n", + "epoch: 13 step: 89, loss is 1.2613568305969238\n", + "epoch: 13 step: 90, loss is 1.3044451475143433\n", + "epoch: 13 step: 91, loss is 1.2271833419799805\n", + "epoch: 13 step: 92, loss is 1.3015658855438232\n", + "epoch: 13 step: 93, loss is 1.3145222663879395\n", + "epoch: 13 step: 94, loss is 1.3219165802001953\n", + "epoch: 13 step: 95, loss is 1.3922600746154785\n", + "epoch: 13 step: 96, loss is 1.2988475561141968\n", + "epoch: 13 step: 97, loss is 1.2443124055862427\n", + "epoch: 13 step: 98, loss is 1.249673843383789\n", + "epoch: 13 step: 99, loss is 1.3132131099700928\n", + "epoch: 13 step: 100, loss is 1.312071681022644\n", + "epoch: 13 step: 101, loss is 1.3220592737197876\n", + "epoch: 13 step: 102, loss is 1.3133000135421753\n", + "epoch: 13 step: 103, loss is 1.2884197235107422\n", + "epoch: 13 step: 104, loss is 1.241107702255249\n", + "epoch: 13 step: 105, loss is 1.320326805114746\n", + "epoch: 13 step: 106, loss is 1.3430711030960083\n", + "epoch: 13 step: 107, loss is 1.280260443687439\n", + "epoch: 13 step: 108, loss is 1.3277902603149414\n", + "epoch: 13 step: 109, loss is 1.3835492134094238\n", + "epoch: 13 step: 110, loss is 1.2493220567703247\n", + "epoch: 13 step: 111, loss is 1.3322455883026123\n", + "epoch: 13 step: 112, loss is 1.292103886604309\n", + "epoch: 13 step: 113, loss is 1.349307894706726\n", + "epoch: 13 step: 114, loss is 1.3346630334854126\n", + "epoch: 13 step: 115, loss is 1.2585268020629883\n", + "epoch: 13 step: 116, loss is 1.260076880455017\n", + "epoch: 13 step: 117, loss is 1.2696207761764526\n", + "epoch: 13 step: 118, loss is 1.3020399808883667\n", + "epoch: 13 step: 119, loss is 1.3435735702514648\n", + "epoch: 13 step: 120, loss is 1.3046023845672607\n", + "epoch: 13 step: 121, loss is 1.2918150424957275\n", + "epoch: 13 step: 122, loss is 1.2767258882522583\n", + "epoch: 13 step: 123, loss is 1.397086262702942\n", + "epoch: 13 step: 124, loss is 1.24363112449646\n", + "epoch: 13 step: 125, loss is 1.3478165864944458\n", + "epoch: 13 step: 126, loss is 1.2549513578414917\n", + "epoch: 13 step: 127, loss is 1.3111228942871094\n", + "epoch: 13 step: 128, loss is 1.2641546726226807\n", + "epoch: 13 step: 129, loss is 1.2345516681671143\n", + "epoch: 13 step: 130, loss is 1.2682994604110718\n", + "epoch: 13 step: 131, loss is 1.3319275379180908\n", + "epoch: 13 step: 132, loss is 1.3194541931152344\n", + "epoch: 13 step: 133, loss is 1.2420580387115479\n", + "epoch: 13 step: 134, loss is 1.2563047409057617\n", + "epoch: 13 step: 135, loss is 1.2819440364837646\n", + "epoch: 13 step: 136, loss is 1.2707570791244507\n", + "epoch: 13 step: 137, loss is 1.2796916961669922\n", + "epoch: 13 step: 138, loss is 1.2875670194625854\n", + "epoch: 13 step: 139, loss is 1.2072495222091675\n", + "epoch: 13 step: 140, loss is 1.278395414352417\n", + "epoch: 13 step: 141, loss is 1.265931248664856\n", + "epoch: 13 step: 142, loss is 1.3156286478042603\n", + "epoch: 13 step: 143, loss is 1.2538084983825684\n", + "epoch: 13 step: 144, loss is 1.3008878231048584\n", + "epoch: 13 step: 145, loss is 1.318015694618225\n", + "epoch: 13 step: 146, loss is 1.3954684734344482\n", + "epoch: 13 step: 147, loss is 1.3432377576828003\n", + "epoch: 13 step: 148, loss is 1.2520208358764648\n", + "epoch: 13 step: 149, loss is 1.2684029340744019\n", + "epoch: 13 step: 150, loss is 1.3661141395568848\n", + "epoch: 13 step: 151, loss is 1.3773260116577148\n", + "epoch: 13 step: 152, loss is 1.390134334564209\n", + "epoch: 13 step: 153, loss is 1.412437081336975\n", + "epoch: 13 step: 154, loss is 1.2236233949661255\n", + "epoch: 13 step: 155, loss is 1.310290813446045\n", + "epoch: 13 step: 156, loss is 1.2966665029525757\n", + "epoch: 13 step: 157, loss is 1.2890366315841675\n", + "epoch: 13 step: 158, loss is 1.4251946210861206\n", + "epoch: 13 step: 159, loss is 1.3091590404510498\n", + "epoch: 13 step: 160, loss is 1.2350969314575195\n", + "epoch: 13 step: 161, loss is 1.3273627758026123\n", + "epoch: 13 step: 162, loss is 1.3197396993637085\n", + "epoch: 13 step: 163, loss is 1.272780418395996\n", + "epoch: 13 step: 164, loss is 1.3244132995605469\n", + "epoch: 13 step: 165, loss is 1.3248703479766846\n", + "epoch: 13 step: 166, loss is 1.2295153141021729\n", + "epoch: 13 step: 167, loss is 1.322451114654541\n", + "epoch: 13 step: 168, loss is 1.30779230594635\n", + "epoch: 13 step: 169, loss is 1.3108086585998535\n", + "epoch: 13 step: 170, loss is 1.290183663368225\n", + "epoch: 13 step: 171, loss is 1.347169041633606\n", + "epoch: 13 step: 172, loss is 1.227373480796814\n", + "epoch: 13 step: 173, loss is 1.2416304349899292\n", + "epoch: 13 step: 174, loss is 1.2794163227081299\n", + "epoch: 13 step: 175, loss is 1.2105714082717896\n", + "epoch: 13 step: 176, loss is 1.2612857818603516\n", + "epoch: 13 step: 177, loss is 1.2677839994430542\n", + "epoch: 13 step: 178, loss is 1.3345489501953125\n", + "epoch: 13 step: 179, loss is 1.3343784809112549\n", + "epoch: 13 step: 180, loss is 1.285339117050171\n", + "epoch: 13 step: 181, loss is 1.3462417125701904\n", + "epoch: 13 step: 182, loss is 1.2408521175384521\n", + "epoch: 13 step: 183, loss is 1.260144591331482\n", + "epoch: 13 step: 184, loss is 1.2421387434005737\n", + "epoch: 13 step: 185, loss is 1.3621032238006592\n", + "epoch: 13 step: 186, loss is 1.1739734411239624\n", + "epoch: 13 step: 187, loss is 1.240351676940918\n", + "epoch: 13 step: 188, loss is 1.2454617023468018\n", + "epoch: 13 step: 189, loss is 1.328011393547058\n", + "epoch: 13 step: 190, loss is 1.254755973815918\n", + "epoch: 13 step: 191, loss is 1.2397685050964355\n", + "epoch: 13 step: 192, loss is 1.2991011142730713\n", + "epoch: 13 step: 193, loss is 1.2880969047546387\n", + "epoch: 13 step: 194, loss is 1.3067762851715088\n", + "epoch: 13 step: 195, loss is 1.2389551401138306\n", + "Train epoch time: 101021.364 ms, per step time: 518.058 ms\n", + "epoch: 14 step: 1, loss is 1.2513421773910522\n", + "epoch: 14 step: 2, loss is 1.307371973991394\n", + "epoch: 14 step: 3, loss is 1.2958106994628906\n", + "epoch: 14 step: 4, loss is 1.326501488685608\n", + "epoch: 14 step: 5, loss is 1.2220616340637207\n", + "epoch: 14 step: 6, loss is 1.2774852514266968\n", + "epoch: 14 step: 7, loss is 1.304757833480835\n", + "epoch: 14 step: 8, loss is 1.3830931186676025\n", + "epoch: 14 step: 9, loss is 1.3203561305999756\n", + "epoch: 14 step: 10, loss is 1.2560847997665405\n", + "epoch: 14 step: 11, loss is 1.293959379196167\n", + "epoch: 14 step: 12, loss is 1.2078639268875122\n", + "epoch: 14 step: 13, loss is 1.3025882244110107\n", + "epoch: 14 step: 14, loss is 1.2396502494812012\n", + "epoch: 14 step: 15, loss is 1.2834243774414062\n", + "epoch: 14 step: 16, loss is 1.3208894729614258\n", + "epoch: 14 step: 17, loss is 1.188075304031372\n", + "epoch: 14 step: 18, loss is 1.323318600654602\n", + "epoch: 14 step: 19, loss is 1.2397055625915527\n", + "epoch: 14 step: 20, loss is 1.2148218154907227\n", + "epoch: 14 step: 21, loss is 1.228224515914917\n", + "epoch: 14 step: 22, loss is 1.2707430124282837\n", + "epoch: 14 step: 23, loss is 1.2944676876068115\n", + "epoch: 14 step: 24, loss is 1.3032093048095703\n", + "epoch: 14 step: 25, loss is 1.3095864057540894\n", + "epoch: 14 step: 26, loss is 1.285978078842163\n", + "epoch: 14 step: 27, loss is 1.4204058647155762\n", + "epoch: 14 step: 28, loss is 1.2598328590393066\n", + "epoch: 14 step: 29, loss is 1.2646915912628174\n", + "epoch: 14 step: 30, loss is 1.3470865488052368\n", + "epoch: 14 step: 31, loss is 1.2287296056747437\n", + "epoch: 14 step: 32, loss is 1.2661017179489136\n", + "epoch: 14 step: 33, loss is 1.2569637298583984\n", + "epoch: 14 step: 34, loss is 1.2751437425613403\n", + "epoch: 14 step: 35, loss is 1.22812819480896\n", + "epoch: 14 step: 36, loss is 1.2900094985961914\n", + "epoch: 14 step: 37, loss is 1.2350289821624756\n", + "epoch: 14 step: 38, loss is 1.3704018592834473\n", + "epoch: 14 step: 39, loss is 1.3096911907196045\n", + "epoch: 14 step: 40, loss is 1.289719820022583\n", + "epoch: 14 step: 41, loss is 1.2979159355163574\n", + "epoch: 14 step: 42, loss is 1.177910566329956\n", + "epoch: 14 step: 43, loss is 1.2698071002960205\n", + "epoch: 14 step: 44, loss is 1.2542076110839844\n", + "epoch: 14 step: 45, loss is 1.3208494186401367\n", + "epoch: 14 step: 46, loss is 1.2425804138183594\n", + "epoch: 14 step: 47, loss is 1.2037850618362427\n", + "epoch: 14 step: 48, loss is 1.2680304050445557\n", + "epoch: 14 step: 49, loss is 1.2316769361495972\n", + "epoch: 14 step: 50, loss is 1.3343312740325928\n", + "epoch: 14 step: 51, loss is 1.3088274002075195\n", + "epoch: 14 step: 52, loss is 1.1645398139953613\n", + "epoch: 14 step: 53, loss is 1.280738115310669\n", + "epoch: 14 step: 54, loss is 1.3053570985794067\n", + "epoch: 14 step: 55, loss is 1.195155143737793\n", + "epoch: 14 step: 56, loss is 1.2838466167449951\n", + "epoch: 14 step: 57, loss is 1.2421350479125977\n", + "epoch: 14 step: 58, loss is 1.2291312217712402\n", + "epoch: 14 step: 59, loss is 1.2686344385147095\n", + "epoch: 14 step: 60, loss is 1.2688946723937988\n", + "epoch: 14 step: 61, loss is 1.2948637008666992\n", + "epoch: 14 step: 62, loss is 1.250746726989746\n", + "epoch: 14 step: 63, loss is 1.2920738458633423\n", + "epoch: 14 step: 64, loss is 1.2702243328094482\n", + "epoch: 14 step: 65, loss is 1.231886625289917\n", + "epoch: 14 step: 66, loss is 1.2895371913909912\n", + "epoch: 14 step: 67, loss is 1.323811411857605\n", + "epoch: 14 step: 68, loss is 1.2258471250534058\n", + "epoch: 14 step: 69, loss is 1.2456809282302856\n", + "epoch: 14 step: 70, loss is 1.260493516921997\n", + "epoch: 14 step: 71, loss is 1.257092833518982\n", + "epoch: 14 step: 72, loss is 1.2530608177185059\n", + "epoch: 14 step: 73, loss is 1.3512314558029175\n", + "epoch: 14 step: 74, loss is 1.2599537372589111\n", + "epoch: 14 step: 75, loss is 1.2466319799423218\n", + "epoch: 14 step: 76, loss is 1.2079722881317139\n", + "epoch: 14 step: 77, loss is 1.3306097984313965\n", + "epoch: 14 step: 78, loss is 1.2841402292251587\n", + "epoch: 14 step: 79, loss is 1.2581716775894165\n", + "epoch: 14 step: 80, loss is 1.3150897026062012\n", + "epoch: 14 step: 81, loss is 1.1622238159179688\n", + "epoch: 14 step: 82, loss is 1.2699413299560547\n", + "epoch: 14 step: 83, loss is 1.191473364830017\n", + "epoch: 14 step: 84, loss is 1.2768495082855225\n", + "epoch: 14 step: 85, loss is 1.1928811073303223\n", + "epoch: 14 step: 86, loss is 1.2648366689682007\n", + "epoch: 14 step: 87, loss is 1.3239529132843018\n", + "epoch: 14 step: 88, loss is 1.2721190452575684\n", + "epoch: 14 step: 89, loss is 1.30233633518219\n", + "epoch: 14 step: 90, loss is 1.3146576881408691\n", + "epoch: 14 step: 91, loss is 1.3391027450561523\n", + "epoch: 14 step: 92, loss is 1.3466790914535522\n", + "epoch: 14 step: 93, loss is 1.2402769327163696\n", + "epoch: 14 step: 94, loss is 1.3865251541137695\n", + "epoch: 14 step: 95, loss is 1.2596609592437744\n", + "epoch: 14 step: 96, loss is 1.3145735263824463\n", + "epoch: 14 step: 97, loss is 1.2662038803100586\n", + "epoch: 14 step: 98, loss is 1.2704427242279053\n", + "epoch: 14 step: 99, loss is 1.2284151315689087\n", + "epoch: 14 step: 100, loss is 1.3509762287139893\n", + "epoch: 14 step: 101, loss is 1.304516315460205\n", + "epoch: 14 step: 102, loss is 1.3576745986938477\n", + "epoch: 14 step: 103, loss is 1.3356584310531616\n", + "epoch: 14 step: 104, loss is 1.3060625791549683\n", + "epoch: 14 step: 105, loss is 1.3046444654464722\n", + "epoch: 14 step: 106, loss is 1.2677886486053467\n", + "epoch: 14 step: 107, loss is 1.3368091583251953\n", + "epoch: 14 step: 108, loss is 1.3049417734146118\n", + "epoch: 14 step: 109, loss is 1.3444597721099854\n", + "epoch: 14 step: 110, loss is 1.1928423643112183\n", + "epoch: 14 step: 111, loss is 1.3370239734649658\n", + "epoch: 14 step: 112, loss is 1.3793162107467651\n", + "epoch: 14 step: 113, loss is 1.2397453784942627\n", + "epoch: 14 step: 114, loss is 1.3350205421447754\n", + "epoch: 14 step: 115, loss is 1.3321820497512817\n", + "epoch: 14 step: 116, loss is 1.3754030466079712\n", + "epoch: 14 step: 117, loss is 1.3262406587600708\n", + "epoch: 14 step: 118, loss is 1.2842631340026855\n", + "epoch: 14 step: 119, loss is 1.3583341836929321\n", + "epoch: 14 step: 120, loss is 1.3193408250808716\n", + "epoch: 14 step: 121, loss is 1.287000060081482\n", + "epoch: 14 step: 122, loss is 1.3011341094970703\n", + "epoch: 14 step: 123, loss is 1.236382007598877\n", + "epoch: 14 step: 124, loss is 1.179038166999817\n", + "epoch: 14 step: 125, loss is 1.3618255853652954\n", + "epoch: 14 step: 126, loss is 1.2957353591918945\n", + "epoch: 14 step: 127, loss is 1.2812951803207397\n", + "epoch: 14 step: 128, loss is 1.246927261352539\n", + "epoch: 14 step: 129, loss is 1.3354310989379883\n", + "epoch: 14 step: 130, loss is 1.3790749311447144\n", + "epoch: 14 step: 131, loss is 1.3381459712982178\n", + "epoch: 14 step: 132, loss is 1.2163848876953125\n", + "epoch: 14 step: 133, loss is 1.1688952445983887\n", + "epoch: 14 step: 134, loss is 1.30483078956604\n", + "epoch: 14 step: 135, loss is 1.2212098836898804\n", + "epoch: 14 step: 136, loss is 1.319518804550171\n", + "epoch: 14 step: 137, loss is 1.3367749452590942\n", + "epoch: 14 step: 138, loss is 1.3820970058441162\n", + "epoch: 14 step: 139, loss is 1.2846062183380127\n", + "epoch: 14 step: 140, loss is 1.2011009454727173\n", + "epoch: 14 step: 141, loss is 1.2790604829788208\n", + "epoch: 14 step: 142, loss is 1.309399127960205\n", + "epoch: 14 step: 143, loss is 1.3257976770401\n", + "epoch: 14 step: 144, loss is 1.2927271127700806\n", + "epoch: 14 step: 145, loss is 1.3679730892181396\n", + "epoch: 14 step: 146, loss is 1.3699826002120972\n", + "epoch: 14 step: 147, loss is 1.2677311897277832\n", + "epoch: 14 step: 148, loss is 1.3358569145202637\n", + "epoch: 14 step: 149, loss is 1.3559479713439941\n", + "epoch: 14 step: 150, loss is 1.2527754306793213\n", + "epoch: 14 step: 151, loss is 1.239604115486145\n", + "epoch: 14 step: 152, loss is 1.2650799751281738\n", + "epoch: 14 step: 153, loss is 1.2774558067321777\n", + "epoch: 14 step: 154, loss is 1.313377857208252\n", + "epoch: 14 step: 155, loss is 1.3749868869781494\n", + "epoch: 14 step: 156, loss is 1.2589658498764038\n", + "epoch: 14 step: 157, loss is 1.2646384239196777\n", + "epoch: 14 step: 158, loss is 1.3242838382720947\n", + "epoch: 14 step: 159, loss is 1.3297337293624878\n", + "epoch: 14 step: 160, loss is 1.2755286693572998\n", + "epoch: 14 step: 161, loss is 1.2398349046707153\n", + "epoch: 14 step: 162, loss is 1.2889139652252197\n", + "epoch: 14 step: 163, loss is 1.2675137519836426\n", + "epoch: 14 step: 164, loss is 1.1852307319641113\n", + "epoch: 14 step: 165, loss is 1.2154347896575928\n", + "epoch: 14 step: 166, loss is 1.3590447902679443\n", + "epoch: 14 step: 167, loss is 1.246332049369812\n", + "epoch: 14 step: 168, loss is 1.2514660358428955\n", + "epoch: 14 step: 169, loss is 1.429602861404419\n", + "epoch: 14 step: 170, loss is 1.245104193687439\n", + "epoch: 14 step: 171, loss is 1.2994418144226074\n", + "epoch: 14 step: 172, loss is 1.2243003845214844\n", + "epoch: 14 step: 173, loss is 1.2848795652389526\n", + "epoch: 14 step: 174, loss is 1.1486916542053223\n", + "epoch: 14 step: 175, loss is 1.2034591436386108\n", + "epoch: 14 step: 176, loss is 1.447587013244629\n", + "epoch: 14 step: 177, loss is 1.2768170833587646\n", + "epoch: 14 step: 178, loss is 1.3106508255004883\n", + "epoch: 14 step: 179, loss is 1.3295912742614746\n", + "epoch: 14 step: 180, loss is 1.2365552186965942\n", + "epoch: 14 step: 181, loss is 1.3087679147720337\n", + "epoch: 14 step: 182, loss is 1.216645359992981\n", + "epoch: 14 step: 183, loss is 1.260462760925293\n", + "epoch: 14 step: 184, loss is 1.3036162853240967\n", + "epoch: 14 step: 185, loss is 1.2818548679351807\n", + "epoch: 14 step: 186, loss is 1.2589049339294434\n", + "epoch: 14 step: 187, loss is 1.2314083576202393\n", + "epoch: 14 step: 188, loss is 1.2508622407913208\n", + "epoch: 14 step: 189, loss is 1.2764978408813477\n", + "epoch: 14 step: 190, loss is 1.3679895401000977\n", + "epoch: 14 step: 191, loss is 1.2999221086502075\n", + "epoch: 14 step: 192, loss is 1.2946550846099854\n", + "epoch: 14 step: 193, loss is 1.239687442779541\n", + "epoch: 14 step: 194, loss is 1.2565150260925293\n", + "epoch: 14 step: 195, loss is 1.3046984672546387\n", + "Train epoch time: 103209.554 ms, per step time: 529.280 ms\n", + "epoch: 15 step: 1, loss is 1.1791326999664307\n", + "epoch: 15 step: 2, loss is 1.2480360269546509\n", + "epoch: 15 step: 3, loss is 1.185152292251587\n", + "epoch: 15 step: 4, loss is 1.1897872686386108\n", + "epoch: 15 step: 5, loss is 1.1433063745498657\n", + "epoch: 15 step: 6, loss is 1.3101938962936401\n", + "epoch: 15 step: 7, loss is 1.2477742433547974\n", + "epoch: 15 step: 8, loss is 1.309960961341858\n", + "epoch: 15 step: 9, loss is 1.2478764057159424\n", + "epoch: 15 step: 10, loss is 1.264357566833496\n", + "epoch: 15 step: 11, loss is 1.1672794818878174\n", + "epoch: 15 step: 12, loss is 1.26129150390625\n", + "epoch: 15 step: 13, loss is 1.3189500570297241\n", + "epoch: 15 step: 14, loss is 1.2818711996078491\n", + "epoch: 15 step: 15, loss is 1.2437939643859863\n", + "epoch: 15 step: 16, loss is 1.2726610898971558\n", + "epoch: 15 step: 17, loss is 1.2215086221694946\n", + "epoch: 15 step: 18, loss is 1.2479790449142456\n", + "epoch: 15 step: 19, loss is 1.2386729717254639\n", + "epoch: 15 step: 20, loss is 1.2764594554901123\n", + "epoch: 15 step: 21, loss is 1.1341373920440674\n", + "epoch: 15 step: 22, loss is 1.151210069656372\n", + "epoch: 15 step: 23, loss is 1.3267583847045898\n", + "epoch: 15 step: 24, loss is 1.215308666229248\n", + "epoch: 15 step: 25, loss is 1.2446253299713135\n", + "epoch: 15 step: 26, loss is 1.2732253074645996\n", + "epoch: 15 step: 27, loss is 1.232658863067627\n", + "epoch: 15 step: 28, loss is 1.1974923610687256\n", + "epoch: 15 step: 29, loss is 1.3204797506332397\n", + "epoch: 15 step: 30, loss is 1.2955272197723389\n", + "epoch: 15 step: 31, loss is 1.2737457752227783\n", + "epoch: 15 step: 32, loss is 1.2584962844848633\n", + "epoch: 15 step: 33, loss is 1.2297146320343018\n", + "epoch: 15 step: 34, loss is 1.1948540210723877\n", + "epoch: 15 step: 35, loss is 1.2867512702941895\n", + "epoch: 15 step: 36, loss is 1.3191170692443848\n", + "epoch: 15 step: 37, loss is 1.281394600868225\n", + "epoch: 15 step: 38, loss is 1.2341880798339844\n", + "epoch: 15 step: 39, loss is 1.3141766786575317\n", + "epoch: 15 step: 40, loss is 1.2390385866165161\n", + "epoch: 15 step: 41, loss is 1.2860889434814453\n", + "epoch: 15 step: 42, loss is 1.2086138725280762\n", + "epoch: 15 step: 43, loss is 1.2054606676101685\n", + "epoch: 15 step: 44, loss is 1.2822281122207642\n", + "epoch: 15 step: 45, loss is 1.2026267051696777\n", + "epoch: 15 step: 46, loss is 1.2525709867477417\n", + "epoch: 15 step: 47, loss is 1.2265334129333496\n", + "epoch: 15 step: 48, loss is 1.2277594804763794\n", + "epoch: 15 step: 49, loss is 1.234175205230713\n", + "epoch: 15 step: 50, loss is 1.2462139129638672\n", + "epoch: 15 step: 51, loss is 1.2206382751464844\n", + "epoch: 15 step: 52, loss is 1.1836093664169312\n", + "epoch: 15 step: 53, loss is 1.1617777347564697\n", + "epoch: 15 step: 54, loss is 1.3166807889938354\n", + "epoch: 15 step: 55, loss is 1.2165322303771973\n", + "epoch: 15 step: 56, loss is 1.2298619747161865\n", + "epoch: 15 step: 57, loss is 1.2463188171386719\n", + "epoch: 15 step: 58, loss is 1.2828303575515747\n", + "epoch: 15 step: 59, loss is 1.3334670066833496\n", + "epoch: 15 step: 60, loss is 1.2109458446502686\n", + "epoch: 15 step: 61, loss is 1.276214838027954\n", + "epoch: 15 step: 62, loss is 1.1850547790527344\n", + "epoch: 15 step: 63, loss is 1.2411510944366455\n", + "epoch: 15 step: 64, loss is 1.2805147171020508\n", + "epoch: 15 step: 65, loss is 1.3054208755493164\n", + "epoch: 15 step: 66, loss is 1.3083550930023193\n", + "epoch: 15 step: 67, loss is 1.227347493171692\n", + "epoch: 15 step: 68, loss is 1.2534650564193726\n", + "epoch: 15 step: 69, loss is 1.3429102897644043\n", + "epoch: 15 step: 70, loss is 1.1815394163131714\n", + "epoch: 15 step: 71, loss is 1.2000973224639893\n", + "epoch: 15 step: 72, loss is 1.1915626525878906\n", + "epoch: 15 step: 73, loss is 1.293008804321289\n", + "epoch: 15 step: 74, loss is 1.2717163562774658\n", + "epoch: 15 step: 75, loss is 1.2249311208724976\n", + "epoch: 15 step: 76, loss is 1.2490530014038086\n", + "epoch: 15 step: 77, loss is 1.3470611572265625\n", + "epoch: 15 step: 78, loss is 1.2979528903961182\n", + "epoch: 15 step: 79, loss is 1.2857476472854614\n", + "epoch: 15 step: 80, loss is 1.2156164646148682\n", + "epoch: 15 step: 81, loss is 1.176559329032898\n", + "epoch: 15 step: 82, loss is 1.1298017501831055\n", + "epoch: 15 step: 83, loss is 1.2682733535766602\n", + "epoch: 15 step: 84, loss is 1.3136621713638306\n", + "epoch: 15 step: 85, loss is 1.2441003322601318\n", + "epoch: 15 step: 86, loss is 1.2925041913986206\n", + "epoch: 15 step: 87, loss is 1.2088254690170288\n", + "epoch: 15 step: 88, loss is 1.1555368900299072\n", + "epoch: 15 step: 89, loss is 1.2447195053100586\n", + "epoch: 15 step: 90, loss is 1.258355975151062\n", + "epoch: 15 step: 91, loss is 1.1804115772247314\n", + "epoch: 15 step: 92, loss is 1.2975350618362427\n", + "epoch: 15 step: 93, loss is 1.3041279315948486\n", + "epoch: 15 step: 94, loss is 1.2351593971252441\n", + "epoch: 15 step: 95, loss is 1.27621328830719\n", + "epoch: 15 step: 96, loss is 1.2741023302078247\n", + "epoch: 15 step: 97, loss is 1.2202893495559692\n", + "epoch: 15 step: 98, loss is 1.1945627927780151\n", + "epoch: 15 step: 99, loss is 1.2992929220199585\n", + "epoch: 15 step: 100, loss is 1.2867404222488403\n", + "epoch: 15 step: 101, loss is 1.3704323768615723\n", + "epoch: 15 step: 102, loss is 1.3196825981140137\n", + "epoch: 15 step: 103, loss is 1.2161059379577637\n", + "epoch: 15 step: 104, loss is 1.335008144378662\n", + "epoch: 15 step: 105, loss is 1.3360949754714966\n", + "epoch: 15 step: 106, loss is 1.2405881881713867\n", + "epoch: 15 step: 107, loss is 1.2826411724090576\n", + "epoch: 15 step: 108, loss is 1.3021302223205566\n", + "epoch: 15 step: 109, loss is 1.1625785827636719\n", + "epoch: 15 step: 110, loss is 1.2347108125686646\n", + "epoch: 15 step: 111, loss is 1.2526850700378418\n", + "epoch: 15 step: 112, loss is 1.2504109144210815\n", + "epoch: 15 step: 113, loss is 1.192201018333435\n", + "epoch: 15 step: 114, loss is 1.2503217458724976\n", + "epoch: 15 step: 115, loss is 1.2847121953964233\n", + "epoch: 15 step: 116, loss is 1.2634613513946533\n", + "epoch: 15 step: 117, loss is 1.2199795246124268\n", + "epoch: 15 step: 118, loss is 1.2526047229766846\n", + "epoch: 15 step: 119, loss is 1.3462177515029907\n", + "epoch: 15 step: 120, loss is 1.3879477977752686\n", + "epoch: 15 step: 121, loss is 1.2149841785430908\n", + "epoch: 15 step: 122, loss is 1.313737154006958\n", + "epoch: 15 step: 123, loss is 1.244073748588562\n", + "epoch: 15 step: 124, loss is 1.3427650928497314\n", + "epoch: 15 step: 125, loss is 1.3131890296936035\n", + "epoch: 15 step: 126, loss is 1.2538390159606934\n", + "epoch: 15 step: 127, loss is 1.2206143140792847\n", + "epoch: 15 step: 128, loss is 1.2522499561309814\n", + "epoch: 15 step: 129, loss is 1.2515599727630615\n", + "epoch: 15 step: 130, loss is 1.314711093902588\n", + "epoch: 15 step: 131, loss is 1.2150650024414062\n", + "epoch: 15 step: 132, loss is 1.2211346626281738\n", + "epoch: 15 step: 133, loss is 1.2635703086853027\n", + "epoch: 15 step: 134, loss is 1.365792989730835\n", + "epoch: 15 step: 135, loss is 1.2477487325668335\n", + "epoch: 15 step: 136, loss is 1.3134124279022217\n", + "epoch: 15 step: 137, loss is 1.2808356285095215\n", + "epoch: 15 step: 138, loss is 1.2966740131378174\n", + "epoch: 15 step: 139, loss is 1.250905990600586\n", + "epoch: 15 step: 140, loss is 1.303296685218811\n", + "epoch: 15 step: 141, loss is 1.130969524383545\n", + "epoch: 15 step: 142, loss is 1.2696727514266968\n", + "epoch: 15 step: 143, loss is 1.312705397605896\n", + "epoch: 15 step: 144, loss is 1.2774499654769897\n", + "epoch: 15 step: 145, loss is 1.2085390090942383\n", + "epoch: 15 step: 146, loss is 1.1759907007217407\n", + "epoch: 15 step: 147, loss is 1.305101990699768\n", + "epoch: 15 step: 148, loss is 1.191148281097412\n", + "epoch: 15 step: 149, loss is 1.2121191024780273\n", + "epoch: 15 step: 150, loss is 1.2399464845657349\n", + "epoch: 15 step: 151, loss is 1.3743118047714233\n", + "epoch: 15 step: 152, loss is 1.2721891403198242\n", + "epoch: 15 step: 153, loss is 1.2777607440948486\n", + "epoch: 15 step: 154, loss is 1.3066418170928955\n", + "epoch: 15 step: 155, loss is 1.1897282600402832\n", + "epoch: 15 step: 156, loss is 1.246858835220337\n", + "epoch: 15 step: 157, loss is 1.237580418586731\n", + "epoch: 15 step: 158, loss is 1.3281484842300415\n", + "epoch: 15 step: 159, loss is 1.1904457807540894\n", + "epoch: 15 step: 160, loss is 1.2875434160232544\n", + "epoch: 15 step: 161, loss is 1.2580541372299194\n", + "epoch: 15 step: 162, loss is 1.2144930362701416\n", + "epoch: 15 step: 163, loss is 1.2472859621047974\n", + "epoch: 15 step: 164, loss is 1.2386099100112915\n", + "epoch: 15 step: 165, loss is 1.370002269744873\n", + "epoch: 15 step: 166, loss is 1.2298567295074463\n", + "epoch: 15 step: 167, loss is 1.2378009557724\n", + "epoch: 15 step: 168, loss is 1.2597062587738037\n", + "epoch: 15 step: 169, loss is 1.2916275262832642\n", + "epoch: 15 step: 170, loss is 1.357182502746582\n", + "epoch: 15 step: 171, loss is 1.313921570777893\n", + "epoch: 15 step: 172, loss is 1.2667970657348633\n", + "epoch: 15 step: 173, loss is 1.199094295501709\n", + "epoch: 15 step: 174, loss is 1.2199780941009521\n", + "epoch: 15 step: 175, loss is 1.234758973121643\n", + "epoch: 15 step: 176, loss is 1.2161195278167725\n", + "epoch: 15 step: 177, loss is 1.3210134506225586\n", + "epoch: 15 step: 178, loss is 1.3581925630569458\n", + "epoch: 15 step: 179, loss is 1.283360481262207\n", + "epoch: 15 step: 180, loss is 1.2270911931991577\n", + "epoch: 15 step: 181, loss is 1.1928776502609253\n", + "epoch: 15 step: 182, loss is 1.2902495861053467\n", + "epoch: 15 step: 183, loss is 1.2807612419128418\n", + "epoch: 15 step: 184, loss is 1.1979628801345825\n", + "epoch: 15 step: 185, loss is 1.333815097808838\n", + "epoch: 15 step: 186, loss is 1.2922663688659668\n", + "epoch: 15 step: 187, loss is 1.336081624031067\n", + "epoch: 15 step: 188, loss is 1.3322334289550781\n", + "epoch: 15 step: 189, loss is 1.238813877105713\n", + "epoch: 15 step: 190, loss is 1.2274882793426514\n", + "epoch: 15 step: 191, loss is 1.271338701248169\n", + "epoch: 15 step: 192, loss is 1.449723243713379\n", + "epoch: 15 step: 193, loss is 1.2112326622009277\n", + "epoch: 15 step: 194, loss is 1.2691900730133057\n", + "epoch: 15 step: 195, loss is 1.2203199863433838\n", + "Train epoch time: 104817.599 ms, per step time: 537.526 ms\n", + "epoch: 16 step: 1, loss is 1.1947009563446045\n", + "epoch: 16 step: 2, loss is 1.2287623882293701\n", + "epoch: 16 step: 3, loss is 1.2500028610229492\n", + "epoch: 16 step: 4, loss is 1.2934281826019287\n", + "epoch: 16 step: 5, loss is 1.2586241960525513\n", + "epoch: 16 step: 6, loss is 1.288259506225586\n", + "epoch: 16 step: 7, loss is 1.253739833831787\n", + "epoch: 16 step: 8, loss is 1.228844165802002\n", + "epoch: 16 step: 9, loss is 1.2209796905517578\n", + "epoch: 16 step: 10, loss is 1.3683332204818726\n", + "epoch: 16 step: 11, loss is 1.285400629043579\n", + "epoch: 16 step: 12, loss is 1.2100260257720947\n", + "epoch: 16 step: 13, loss is 1.2653402090072632\n", + "epoch: 16 step: 14, loss is 1.251564860343933\n", + "epoch: 16 step: 15, loss is 1.3319480419158936\n", + "epoch: 16 step: 16, loss is 1.3213443756103516\n", + "epoch: 16 step: 17, loss is 1.2858242988586426\n", + "epoch: 16 step: 18, loss is 1.1926155090332031\n", + "epoch: 16 step: 19, loss is 1.2943110466003418\n", + "epoch: 16 step: 20, loss is 1.2065175771713257\n", + "epoch: 16 step: 21, loss is 1.1897673606872559\n", + "epoch: 16 step: 22, loss is 1.2142388820648193\n", + "epoch: 16 step: 23, loss is 1.301005244255066\n", + "epoch: 16 step: 24, loss is 1.2547839879989624\n", + "epoch: 16 step: 25, loss is 1.2366114854812622\n", + "epoch: 16 step: 26, loss is 1.2016746997833252\n", + "epoch: 16 step: 27, loss is 1.267812728881836\n", + "epoch: 16 step: 28, loss is 1.2759954929351807\n", + "epoch: 16 step: 29, loss is 1.2571719884872437\n", + "epoch: 16 step: 30, loss is 1.3570400476455688\n", + "epoch: 16 step: 31, loss is 1.164757490158081\n", + "epoch: 16 step: 32, loss is 1.2746424674987793\n", + "epoch: 16 step: 33, loss is 1.304887056350708\n", + "epoch: 16 step: 34, loss is 1.2547755241394043\n", + "epoch: 16 step: 35, loss is 1.2164866924285889\n", + "epoch: 16 step: 36, loss is 1.2905058860778809\n", + "epoch: 16 step: 37, loss is 1.2563413381576538\n", + "epoch: 16 step: 38, loss is 1.2331888675689697\n", + "epoch: 16 step: 39, loss is 1.269131064414978\n", + "epoch: 16 step: 40, loss is 1.314037799835205\n", + "epoch: 16 step: 41, loss is 1.34562349319458\n", + "epoch: 16 step: 42, loss is 1.3055976629257202\n", + "epoch: 16 step: 43, loss is 1.320859670639038\n", + "epoch: 16 step: 44, loss is 1.321611762046814\n", + "epoch: 16 step: 45, loss is 1.2214484214782715\n", + "epoch: 16 step: 46, loss is 1.2356425523757935\n", + "epoch: 16 step: 47, loss is 1.2776618003845215\n", + "epoch: 16 step: 48, loss is 1.2375551462173462\n", + "epoch: 16 step: 49, loss is 1.257582664489746\n", + "epoch: 16 step: 50, loss is 1.2607347965240479\n", + "epoch: 16 step: 51, loss is 1.300096035003662\n", + "epoch: 16 step: 52, loss is 1.230241060256958\n", + "epoch: 16 step: 53, loss is 1.2198978662490845\n", + "epoch: 16 step: 54, loss is 1.3302406072616577\n", + "epoch: 16 step: 55, loss is 1.2093960046768188\n", + "epoch: 16 step: 56, loss is 1.2528321743011475\n", + "epoch: 16 step: 57, loss is 1.2607020139694214\n", + "epoch: 16 step: 58, loss is 1.1858192682266235\n", + "epoch: 16 step: 59, loss is 1.2439669370651245\n", + "epoch: 16 step: 60, loss is 1.3281053304672241\n", + "epoch: 16 step: 61, loss is 1.2397569417953491\n", + "epoch: 16 step: 62, loss is 1.2549790143966675\n", + "epoch: 16 step: 63, loss is 1.1868987083435059\n", + "epoch: 16 step: 64, loss is 1.2539594173431396\n", + "epoch: 16 step: 65, loss is 1.23625910282135\n", + "epoch: 16 step: 66, loss is 1.2450515031814575\n", + "epoch: 16 step: 67, loss is 1.236538052558899\n", + "epoch: 16 step: 68, loss is 1.2704534530639648\n", + "epoch: 16 step: 69, loss is 1.271124005317688\n", + "epoch: 16 step: 70, loss is 1.20676589012146\n", + "epoch: 16 step: 71, loss is 1.3370919227600098\n", + "epoch: 16 step: 72, loss is 1.2238819599151611\n", + "epoch: 16 step: 73, loss is 1.219181776046753\n", + "epoch: 16 step: 74, loss is 1.2147396802902222\n", + "epoch: 16 step: 75, loss is 1.2122384309768677\n", + "epoch: 16 step: 76, loss is 1.2078975439071655\n", + "epoch: 16 step: 77, loss is 1.2957980632781982\n", + "epoch: 16 step: 78, loss is 1.1568442583084106\n", + "epoch: 16 step: 79, loss is 1.2624907493591309\n", + "epoch: 16 step: 80, loss is 1.248931646347046\n", + "epoch: 16 step: 81, loss is 1.2379169464111328\n", + "epoch: 16 step: 82, loss is 1.1835695505142212\n", + "epoch: 16 step: 83, loss is 1.2805770635604858\n", + "epoch: 16 step: 84, loss is 1.2096208333969116\n", + "epoch: 16 step: 85, loss is 1.2799229621887207\n", + "epoch: 16 step: 86, loss is 1.2105207443237305\n", + "epoch: 16 step: 87, loss is 1.3341243267059326\n", + "epoch: 16 step: 88, loss is 1.2342267036437988\n", + "epoch: 16 step: 89, loss is 1.2966971397399902\n", + "epoch: 16 step: 90, loss is 1.2618072032928467\n", + "epoch: 16 step: 91, loss is 1.21379816532135\n", + "epoch: 16 step: 92, loss is 1.291253924369812\n", + "epoch: 16 step: 93, loss is 1.2288347482681274\n", + "epoch: 16 step: 94, loss is 1.1788885593414307\n", + "epoch: 16 step: 95, loss is 1.25655996799469\n", + "epoch: 16 step: 96, loss is 1.2297910451889038\n", + "epoch: 16 step: 97, loss is 1.252648115158081\n", + "epoch: 16 step: 98, loss is 1.3472394943237305\n", + "epoch: 16 step: 99, loss is 1.2707152366638184\n", + "epoch: 16 step: 100, loss is 1.176541805267334\n", + "epoch: 16 step: 101, loss is 1.1955702304840088\n", + "epoch: 16 step: 102, loss is 1.2390841245651245\n", + "epoch: 16 step: 103, loss is 1.281428575515747\n", + "epoch: 16 step: 104, loss is 1.2182505130767822\n", + "epoch: 16 step: 105, loss is 1.2471964359283447\n", + "epoch: 16 step: 106, loss is 1.302913784980774\n", + "epoch: 16 step: 107, loss is 1.201845407485962\n", + "epoch: 16 step: 108, loss is 1.2150561809539795\n", + "epoch: 16 step: 109, loss is 1.2420053482055664\n", + "epoch: 16 step: 110, loss is 1.2859855890274048\n", + "epoch: 16 step: 111, loss is 1.2770713567733765\n", + "epoch: 16 step: 112, loss is 1.163621425628662\n", + "epoch: 16 step: 113, loss is 1.241167664527893\n", + "epoch: 16 step: 114, loss is 1.2583272457122803\n", + "epoch: 16 step: 115, loss is 1.2480366230010986\n", + "epoch: 16 step: 116, loss is 1.3042904138565063\n", + "epoch: 16 step: 117, loss is 1.1744455099105835\n", + "epoch: 16 step: 118, loss is 1.2714340686798096\n", + "epoch: 16 step: 119, loss is 1.3120265007019043\n", + "epoch: 16 step: 120, loss is 1.1965923309326172\n", + "epoch: 16 step: 121, loss is 1.1812838315963745\n", + "epoch: 16 step: 122, loss is 1.2438241243362427\n", + "epoch: 16 step: 123, loss is 1.326674461364746\n", + "epoch: 16 step: 124, loss is 1.2353733777999878\n", + "epoch: 16 step: 125, loss is 1.2332054376602173\n", + "epoch: 16 step: 126, loss is 1.2559731006622314\n", + "epoch: 16 step: 127, loss is 1.2618350982666016\n", + "epoch: 16 step: 128, loss is 1.2570734024047852\n", + "epoch: 16 step: 129, loss is 1.2965185642242432\n", + "epoch: 16 step: 130, loss is 1.2210967540740967\n", + "epoch: 16 step: 131, loss is 1.3334177732467651\n", + "epoch: 16 step: 132, loss is 1.1643315553665161\n", + "epoch: 16 step: 133, loss is 1.2743427753448486\n", + "epoch: 16 step: 134, loss is 1.282071828842163\n", + "epoch: 16 step: 135, loss is 1.206343650817871\n", + "epoch: 16 step: 136, loss is 1.2378766536712646\n", + "epoch: 16 step: 137, loss is 1.2262754440307617\n", + "epoch: 16 step: 138, loss is 1.2778189182281494\n", + "epoch: 16 step: 139, loss is 1.2141274213790894\n", + "epoch: 16 step: 140, loss is 1.233507513999939\n", + "epoch: 16 step: 141, loss is 1.2078462839126587\n", + "epoch: 16 step: 142, loss is 1.192690134048462\n", + "epoch: 16 step: 143, loss is 1.2914237976074219\n", + "epoch: 16 step: 144, loss is 1.2659624814987183\n", + "epoch: 16 step: 145, loss is 1.3544061183929443\n", + "epoch: 16 step: 146, loss is 1.349966049194336\n", + "epoch: 16 step: 147, loss is 1.1611443758010864\n", + "epoch: 16 step: 148, loss is 1.216354489326477\n", + "epoch: 16 step: 149, loss is 1.1907342672348022\n", + "epoch: 16 step: 150, loss is 1.228072166442871\n", + "epoch: 16 step: 151, loss is 1.2619554996490479\n", + "epoch: 16 step: 152, loss is 1.2515692710876465\n", + "epoch: 16 step: 153, loss is 1.3382594585418701\n", + "epoch: 16 step: 154, loss is 1.2513341903686523\n", + "epoch: 16 step: 155, loss is 1.1982851028442383\n", + "epoch: 16 step: 156, loss is 1.2759783267974854\n", + "epoch: 16 step: 157, loss is 1.3069448471069336\n", + "epoch: 16 step: 158, loss is 1.2523043155670166\n", + "epoch: 16 step: 159, loss is 1.2892566919326782\n", + "epoch: 16 step: 160, loss is 1.2502245903015137\n", + "epoch: 16 step: 161, loss is 1.203040361404419\n", + "epoch: 16 step: 162, loss is 1.2717342376708984\n", + "epoch: 16 step: 163, loss is 1.228580117225647\n", + "epoch: 16 step: 164, loss is 1.1726922988891602\n", + "epoch: 16 step: 165, loss is 1.2135756015777588\n", + "epoch: 16 step: 166, loss is 1.276719570159912\n", + "epoch: 16 step: 167, loss is 1.2969268560409546\n", + "epoch: 16 step: 168, loss is 1.2456297874450684\n", + "epoch: 16 step: 169, loss is 1.1960266828536987\n", + "epoch: 16 step: 170, loss is 1.1156516075134277\n", + "epoch: 16 step: 171, loss is 1.2143917083740234\n", + "epoch: 16 step: 172, loss is 1.16255784034729\n", + "epoch: 16 step: 173, loss is 1.3832745552062988\n", + "epoch: 16 step: 174, loss is 1.2033945322036743\n", + "epoch: 16 step: 175, loss is 1.3091492652893066\n", + "epoch: 16 step: 176, loss is 1.2962968349456787\n", + "epoch: 16 step: 177, loss is 1.1745792627334595\n", + "epoch: 16 step: 178, loss is 1.2540977001190186\n", + "epoch: 16 step: 179, loss is 1.2704955339431763\n", + "epoch: 16 step: 180, loss is 1.249572515487671\n", + "epoch: 16 step: 181, loss is 1.2056024074554443\n", + "epoch: 16 step: 182, loss is 1.2439602613449097\n", + "epoch: 16 step: 183, loss is 1.1769452095031738\n", + "epoch: 16 step: 184, loss is 1.2338814735412598\n", + "epoch: 16 step: 185, loss is 1.1687448024749756\n", + "epoch: 16 step: 186, loss is 1.2545605897903442\n", + "epoch: 16 step: 187, loss is 1.2732011079788208\n", + "epoch: 16 step: 188, loss is 1.2035003900527954\n", + "epoch: 16 step: 189, loss is 1.240733027458191\n", + "epoch: 16 step: 190, loss is 1.2843533754348755\n", + "epoch: 16 step: 191, loss is 1.2281990051269531\n", + "epoch: 16 step: 192, loss is 1.2771230936050415\n", + "epoch: 16 step: 193, loss is 1.272323489189148\n", + "epoch: 16 step: 194, loss is 1.2166976928710938\n", + "epoch: 16 step: 195, loss is 1.2135474681854248\n", + "Train epoch time: 111985.317 ms, per step time: 574.284 ms\n", + "epoch: 17 step: 1, loss is 1.1566863059997559\n", + "epoch: 17 step: 2, loss is 1.2569364309310913\n", + "epoch: 17 step: 3, loss is 1.2202942371368408\n", + "epoch: 17 step: 4, loss is 1.219804048538208\n", + "epoch: 17 step: 5, loss is 1.1521563529968262\n", + "epoch: 17 step: 6, loss is 1.1664772033691406\n", + "epoch: 17 step: 7, loss is 1.1728764772415161\n", + "epoch: 17 step: 8, loss is 1.2309823036193848\n", + "epoch: 17 step: 9, loss is 1.3161332607269287\n", + "epoch: 17 step: 10, loss is 1.196271538734436\n", + "epoch: 17 step: 11, loss is 1.2973086833953857\n", + "epoch: 17 step: 12, loss is 1.2545697689056396\n", + "epoch: 17 step: 13, loss is 1.1811177730560303\n", + "epoch: 17 step: 14, loss is 1.1914072036743164\n", + "epoch: 17 step: 15, loss is 1.134885311126709\n", + "epoch: 17 step: 16, loss is 1.2302080392837524\n", + "epoch: 17 step: 17, loss is 1.1666297912597656\n", + "epoch: 17 step: 18, loss is 1.1739472150802612\n", + "epoch: 17 step: 19, loss is 1.137118935585022\n", + "epoch: 17 step: 20, loss is 1.213318109512329\n", + "epoch: 17 step: 21, loss is 1.1999073028564453\n", + "epoch: 17 step: 22, loss is 1.2078241109848022\n", + "epoch: 17 step: 23, loss is 1.2042276859283447\n", + "epoch: 17 step: 24, loss is 1.2061725854873657\n", + "epoch: 17 step: 25, loss is 1.2110854387283325\n", + "epoch: 17 step: 26, loss is 1.2945696115493774\n", + "epoch: 17 step: 27, loss is 1.2700958251953125\n", + "epoch: 17 step: 28, loss is 1.253418207168579\n", + "epoch: 17 step: 29, loss is 1.2184525728225708\n", + "epoch: 17 step: 30, loss is 1.2951228618621826\n", + "epoch: 17 step: 31, loss is 1.1991249322891235\n", + "epoch: 17 step: 32, loss is 1.1453678607940674\n", + "epoch: 17 step: 33, loss is 1.3568341732025146\n", + "epoch: 17 step: 34, loss is 1.3165676593780518\n", + "epoch: 17 step: 35, loss is 1.2473329305648804\n", + "epoch: 17 step: 36, loss is 1.122117042541504\n", + "epoch: 17 step: 37, loss is 1.2460416555404663\n", + "epoch: 17 step: 38, loss is 1.191550612449646\n", + "epoch: 17 step: 39, loss is 1.2172082662582397\n", + "epoch: 17 step: 40, loss is 1.2528938055038452\n", + "epoch: 17 step: 41, loss is 1.3422269821166992\n", + "epoch: 17 step: 42, loss is 1.1929373741149902\n", + "epoch: 17 step: 43, loss is 1.1999951601028442\n", + "epoch: 17 step: 44, loss is 1.2334985733032227\n", + "epoch: 17 step: 45, loss is 1.2017409801483154\n", + "epoch: 17 step: 46, loss is 1.1764206886291504\n", + "epoch: 17 step: 47, loss is 1.2031724452972412\n", + "epoch: 17 step: 48, loss is 1.3089604377746582\n", + "epoch: 17 step: 49, loss is 1.2126314640045166\n", + "epoch: 17 step: 50, loss is 1.260947346687317\n", + "epoch: 17 step: 51, loss is 1.2196898460388184\n", + "epoch: 17 step: 52, loss is 1.2679917812347412\n", + "epoch: 17 step: 53, loss is 1.2638006210327148\n", + "epoch: 17 step: 54, loss is 1.1767176389694214\n", + "epoch: 17 step: 55, loss is 1.219530463218689\n", + "epoch: 17 step: 56, loss is 1.297010064125061\n", + "epoch: 17 step: 57, loss is 1.2144399881362915\n", + "epoch: 17 step: 58, loss is 1.1902120113372803\n", + "epoch: 17 step: 59, loss is 1.237060546875\n", + "epoch: 17 step: 60, loss is 1.2803491353988647\n", + "epoch: 17 step: 61, loss is 1.176782250404358\n", + "epoch: 17 step: 62, loss is 1.202889084815979\n", + "epoch: 17 step: 63, loss is 1.1460330486297607\n", + "epoch: 17 step: 64, loss is 1.3070706129074097\n", + "epoch: 17 step: 65, loss is 1.2503321170806885\n", + "epoch: 17 step: 66, loss is 1.2274585962295532\n", + "epoch: 17 step: 67, loss is 1.1865298748016357\n", + "epoch: 17 step: 68, loss is 1.2059781551361084\n", + "epoch: 17 step: 69, loss is 1.246989369392395\n", + "epoch: 17 step: 70, loss is 1.2291651964187622\n", + "epoch: 17 step: 71, loss is 1.2375967502593994\n", + "epoch: 17 step: 72, loss is 1.2222172021865845\n", + "epoch: 17 step: 73, loss is 1.2196295261383057\n", + "epoch: 17 step: 74, loss is 1.2502106428146362\n", + "epoch: 17 step: 75, loss is 1.2079637050628662\n", + "epoch: 17 step: 76, loss is 1.3466041088104248\n", + "epoch: 17 step: 77, loss is 1.2630064487457275\n", + "epoch: 17 step: 78, loss is 1.1890925168991089\n", + "epoch: 17 step: 79, loss is 1.2341885566711426\n", + "epoch: 17 step: 80, loss is 1.1592082977294922\n", + "epoch: 17 step: 81, loss is 1.2353037595748901\n", + "epoch: 17 step: 82, loss is 1.2051960229873657\n", + "epoch: 17 step: 83, loss is 1.176168441772461\n", + "epoch: 17 step: 84, loss is 1.276667833328247\n", + "epoch: 17 step: 85, loss is 1.1226613521575928\n", + "epoch: 17 step: 86, loss is 1.1966869831085205\n", + "epoch: 17 step: 87, loss is 1.2591105699539185\n", + "epoch: 17 step: 88, loss is 1.224778413772583\n", + "epoch: 17 step: 89, loss is 1.2179979085922241\n", + "epoch: 17 step: 90, loss is 1.2121496200561523\n", + "epoch: 17 step: 91, loss is 1.0753662586212158\n", + "epoch: 17 step: 92, loss is 1.2631268501281738\n", + "epoch: 17 step: 93, loss is 1.2428685426712036\n", + "epoch: 17 step: 94, loss is 1.213384747505188\n", + "epoch: 17 step: 95, loss is 1.2140002250671387\n", + "epoch: 17 step: 96, loss is 1.249873161315918\n", + "epoch: 17 step: 97, loss is 1.2787315845489502\n", + "epoch: 17 step: 98, loss is 1.269054889678955\n", + "epoch: 17 step: 99, loss is 1.2405542135238647\n", + "epoch: 17 step: 100, loss is 1.20047128200531\n", + "epoch: 17 step: 101, loss is 1.1487576961517334\n", + "epoch: 17 step: 102, loss is 1.2780028581619263\n", + "epoch: 17 step: 103, loss is 1.2083079814910889\n", + "epoch: 17 step: 104, loss is 1.2201555967330933\n", + "epoch: 17 step: 105, loss is 1.2904889583587646\n", + "epoch: 17 step: 106, loss is 1.2272766828536987\n", + "epoch: 17 step: 107, loss is 1.2146106958389282\n", + "epoch: 17 step: 108, loss is 1.2419209480285645\n", + "epoch: 17 step: 109, loss is 1.185360074043274\n", + "epoch: 17 step: 110, loss is 1.2435626983642578\n", + "epoch: 17 step: 111, loss is 1.2297799587249756\n", + "epoch: 17 step: 112, loss is 1.221032738685608\n", + "epoch: 17 step: 113, loss is 1.30337393283844\n", + "epoch: 17 step: 114, loss is 1.208892822265625\n", + "epoch: 17 step: 115, loss is 1.1460647583007812\n", + "epoch: 17 step: 116, loss is 1.2144430875778198\n", + "epoch: 17 step: 117, loss is 1.289759635925293\n", + "epoch: 17 step: 118, loss is 1.211726427078247\n", + "epoch: 17 step: 119, loss is 1.3336515426635742\n", + "epoch: 17 step: 120, loss is 1.1341745853424072\n", + "epoch: 17 step: 121, loss is 1.1671730279922485\n", + "epoch: 17 step: 122, loss is 1.2247884273529053\n", + "epoch: 17 step: 123, loss is 1.1449570655822754\n", + "epoch: 17 step: 124, loss is 1.1770524978637695\n", + "epoch: 17 step: 125, loss is 1.2060871124267578\n", + "epoch: 17 step: 126, loss is 1.2386994361877441\n", + "epoch: 17 step: 127, loss is 1.195181965827942\n", + "epoch: 17 step: 128, loss is 1.1288741827011108\n", + "epoch: 17 step: 129, loss is 1.2127325534820557\n", + "epoch: 17 step: 130, loss is 1.2764102220535278\n", + "epoch: 17 step: 131, loss is 1.2448809146881104\n", + "epoch: 17 step: 132, loss is 1.3185031414031982\n", + "epoch: 17 step: 133, loss is 1.2381937503814697\n", + "epoch: 17 step: 134, loss is 1.284919261932373\n", + "epoch: 17 step: 135, loss is 1.2284390926361084\n", + "epoch: 17 step: 136, loss is 1.2389166355133057\n", + "epoch: 17 step: 137, loss is 1.210745930671692\n", + "epoch: 17 step: 138, loss is 1.1554230451583862\n", + "epoch: 17 step: 139, loss is 1.2791310548782349\n", + "epoch: 17 step: 140, loss is 1.3189854621887207\n", + "epoch: 17 step: 141, loss is 1.2957948446273804\n", + "epoch: 17 step: 142, loss is 1.2013063430786133\n", + "epoch: 17 step: 143, loss is 1.1816816329956055\n", + "epoch: 17 step: 144, loss is 1.1968919038772583\n", + "epoch: 17 step: 145, loss is 1.1366263628005981\n", + "epoch: 17 step: 146, loss is 1.1942400932312012\n", + "epoch: 17 step: 147, loss is 1.2144365310668945\n", + "epoch: 17 step: 148, loss is 1.1905946731567383\n", + "epoch: 17 step: 149, loss is 1.2631694078445435\n", + "epoch: 17 step: 150, loss is 1.25564706325531\n", + "epoch: 17 step: 151, loss is 1.288573980331421\n", + "epoch: 17 step: 152, loss is 1.219386100769043\n", + "epoch: 17 step: 153, loss is 1.2060942649841309\n", + "epoch: 17 step: 154, loss is 1.2181204557418823\n", + "epoch: 17 step: 155, loss is 1.2388548851013184\n", + "epoch: 17 step: 156, loss is 1.2107465267181396\n", + "epoch: 17 step: 157, loss is 1.1720499992370605\n", + "epoch: 17 step: 158, loss is 1.2214686870574951\n", + "epoch: 17 step: 159, loss is 1.1913577318191528\n", + "epoch: 17 step: 160, loss is 1.2819938659667969\n", + "epoch: 17 step: 161, loss is 1.1673972606658936\n", + "epoch: 17 step: 162, loss is 1.2020583152770996\n", + "epoch: 17 step: 163, loss is 1.2256495952606201\n", + "epoch: 17 step: 164, loss is 1.2562158107757568\n", + "epoch: 17 step: 165, loss is 1.2415380477905273\n", + "epoch: 17 step: 166, loss is 1.2678935527801514\n", + "epoch: 17 step: 167, loss is 1.1828043460845947\n", + "epoch: 17 step: 168, loss is 1.2300388813018799\n", + "epoch: 17 step: 169, loss is 1.2291924953460693\n", + "epoch: 17 step: 170, loss is 1.15118408203125\n", + "epoch: 17 step: 171, loss is 1.1679412126541138\n", + "epoch: 17 step: 172, loss is 1.1754260063171387\n", + "epoch: 17 step: 173, loss is 1.2785770893096924\n", + "epoch: 17 step: 174, loss is 1.1833245754241943\n", + "epoch: 17 step: 175, loss is 1.2491295337677002\n", + "epoch: 17 step: 176, loss is 1.249530553817749\n", + "epoch: 17 step: 177, loss is 1.185660481452942\n", + "epoch: 17 step: 178, loss is 1.198456048965454\n", + "epoch: 17 step: 179, loss is 1.2034506797790527\n", + "epoch: 17 step: 180, loss is 1.1450915336608887\n", + "epoch: 17 step: 181, loss is 1.1735645532608032\n", + "epoch: 17 step: 182, loss is 1.2913461923599243\n", + "epoch: 17 step: 183, loss is 1.1961063146591187\n", + "epoch: 17 step: 184, loss is 1.1578097343444824\n", + "epoch: 17 step: 185, loss is 1.2728545665740967\n", + "epoch: 17 step: 186, loss is 1.2408148050308228\n", + "epoch: 17 step: 187, loss is 1.1661924123764038\n", + "epoch: 17 step: 188, loss is 1.28676176071167\n", + "epoch: 17 step: 189, loss is 1.1886515617370605\n", + "epoch: 17 step: 190, loss is 1.1743133068084717\n", + "epoch: 17 step: 191, loss is 1.2176458835601807\n", + "epoch: 17 step: 192, loss is 1.197649598121643\n", + "epoch: 17 step: 193, loss is 1.2084490060806274\n", + "epoch: 17 step: 194, loss is 1.2318187952041626\n", + "epoch: 17 step: 195, loss is 1.2242952585220337\n", + "Train epoch time: 104576.986 ms, per step time: 536.292 ms\n", + "epoch: 18 step: 1, loss is 1.163093090057373\n", + "epoch: 18 step: 2, loss is 1.1643081903457642\n", + "epoch: 18 step: 3, loss is 1.1587316989898682\n", + "epoch: 18 step: 4, loss is 1.21018385887146\n", + "epoch: 18 step: 5, loss is 1.2351661920547485\n", + "epoch: 18 step: 6, loss is 1.2684526443481445\n", + "epoch: 18 step: 7, loss is 1.1714850664138794\n", + "epoch: 18 step: 8, loss is 1.2787055969238281\n", + "epoch: 18 step: 9, loss is 1.234318494796753\n", + "epoch: 18 step: 10, loss is 1.2003421783447266\n", + "epoch: 18 step: 11, loss is 1.1013171672821045\n", + "epoch: 18 step: 12, loss is 1.2364200353622437\n", + "epoch: 18 step: 13, loss is 1.251915454864502\n", + "epoch: 18 step: 14, loss is 1.2352931499481201\n", + "epoch: 18 step: 15, loss is 1.2348082065582275\n", + "epoch: 18 step: 16, loss is 1.1728001832962036\n", + "epoch: 18 step: 17, loss is 1.2627544403076172\n", + "epoch: 18 step: 18, loss is 1.18878173828125\n", + "epoch: 18 step: 19, loss is 1.089637041091919\n", + "epoch: 18 step: 20, loss is 1.1151251792907715\n", + "epoch: 18 step: 21, loss is 1.3205705881118774\n", + "epoch: 18 step: 22, loss is 1.1920576095581055\n", + "epoch: 18 step: 23, loss is 1.2054365873336792\n", + "epoch: 18 step: 24, loss is 1.183617353439331\n", + "epoch: 18 step: 25, loss is 1.2065211534500122\n", + "epoch: 18 step: 26, loss is 1.1877928972244263\n", + "epoch: 18 step: 27, loss is 1.1878981590270996\n", + "epoch: 18 step: 28, loss is 1.2044098377227783\n", + "epoch: 18 step: 29, loss is 1.2073894739151\n", + "epoch: 18 step: 30, loss is 1.2755573987960815\n", + "epoch: 18 step: 31, loss is 1.1904524564743042\n", + "epoch: 18 step: 32, loss is 1.300213098526001\n", + "epoch: 18 step: 33, loss is 1.1769633293151855\n", + "epoch: 18 step: 34, loss is 1.1749913692474365\n", + "epoch: 18 step: 35, loss is 1.2387583255767822\n", + "epoch: 18 step: 36, loss is 1.2537668943405151\n", + "epoch: 18 step: 37, loss is 1.2281330823898315\n", + "epoch: 18 step: 38, loss is 1.2183899879455566\n", + "epoch: 18 step: 39, loss is 1.1393439769744873\n", + "epoch: 18 step: 40, loss is 1.1871339082717896\n", + "epoch: 18 step: 41, loss is 1.2349929809570312\n", + "epoch: 18 step: 42, loss is 1.2370070219039917\n", + "epoch: 18 step: 43, loss is 1.2657219171524048\n", + "epoch: 18 step: 44, loss is 1.1243919134140015\n", + "epoch: 18 step: 45, loss is 1.112611174583435\n", + "epoch: 18 step: 46, loss is 1.2645459175109863\n", + "epoch: 18 step: 47, loss is 1.183924674987793\n", + "epoch: 18 step: 48, loss is 1.1749018430709839\n", + "epoch: 18 step: 49, loss is 1.1337485313415527\n", + "epoch: 18 step: 50, loss is 1.203305959701538\n", + "epoch: 18 step: 51, loss is 1.1232362985610962\n", + "epoch: 18 step: 52, loss is 1.230475902557373\n", + "epoch: 18 step: 53, loss is 1.2323929071426392\n", + "epoch: 18 step: 54, loss is 1.2612800598144531\n", + "epoch: 18 step: 55, loss is 1.2179036140441895\n", + "epoch: 18 step: 56, loss is 1.3103959560394287\n", + "epoch: 18 step: 57, loss is 1.124367594718933\n", + "epoch: 18 step: 58, loss is 1.188981294631958\n", + "epoch: 18 step: 59, loss is 1.1093450784683228\n", + "epoch: 18 step: 60, loss is 1.1761821508407593\n", + "epoch: 18 step: 61, loss is 1.2362127304077148\n", + "epoch: 18 step: 62, loss is 1.1960983276367188\n", + "epoch: 18 step: 63, loss is 1.25058913230896\n", + "epoch: 18 step: 64, loss is 1.1914722919464111\n", + "epoch: 18 step: 65, loss is 1.1974217891693115\n", + "epoch: 18 step: 66, loss is 1.1456913948059082\n", + "epoch: 18 step: 67, loss is 1.1405397653579712\n", + "epoch: 18 step: 68, loss is 1.2314611673355103\n", + "epoch: 18 step: 69, loss is 1.2180103063583374\n", + "epoch: 18 step: 70, loss is 1.1892707347869873\n", + "epoch: 18 step: 71, loss is 1.1572495698928833\n", + "epoch: 18 step: 72, loss is 1.189610481262207\n", + "epoch: 18 step: 73, loss is 1.1331360340118408\n", + "epoch: 18 step: 74, loss is 1.2528396844863892\n", + "epoch: 18 step: 75, loss is 1.233729362487793\n", + "epoch: 18 step: 76, loss is 1.1350704431533813\n", + "epoch: 18 step: 77, loss is 1.2029328346252441\n", + "epoch: 18 step: 78, loss is 1.2669801712036133\n", + "epoch: 18 step: 79, loss is 1.1749601364135742\n", + "epoch: 18 step: 80, loss is 1.2658461332321167\n", + "epoch: 18 step: 81, loss is 1.2532382011413574\n", + "epoch: 18 step: 82, loss is 1.2482271194458008\n", + "epoch: 18 step: 83, loss is 1.2788283824920654\n", + "epoch: 18 step: 84, loss is 1.1999070644378662\n", + "epoch: 18 step: 85, loss is 1.2336230278015137\n", + "epoch: 18 step: 86, loss is 1.2273966073989868\n", + "epoch: 18 step: 87, loss is 1.239780306816101\n", + "epoch: 18 step: 88, loss is 1.2861137390136719\n", + "epoch: 18 step: 89, loss is 1.2196464538574219\n", + "epoch: 18 step: 90, loss is 1.2045379877090454\n", + "epoch: 18 step: 91, loss is 1.2559536695480347\n", + "epoch: 18 step: 92, loss is 1.2547101974487305\n", + "epoch: 18 step: 93, loss is 1.1551785469055176\n", + "epoch: 18 step: 94, loss is 1.2645609378814697\n", + "epoch: 18 step: 95, loss is 1.1907762289047241\n", + "epoch: 18 step: 96, loss is 1.1534792184829712\n", + "epoch: 18 step: 97, loss is 1.2244325876235962\n", + "epoch: 18 step: 98, loss is 1.2930206060409546\n", + "epoch: 18 step: 99, loss is 1.2432889938354492\n", + "epoch: 18 step: 100, loss is 1.2136526107788086\n", + "epoch: 18 step: 101, loss is 1.2331199645996094\n", + "epoch: 18 step: 102, loss is 1.150872826576233\n", + "epoch: 18 step: 103, loss is 1.16279935836792\n", + "epoch: 18 step: 104, loss is 1.1222825050354004\n", + "epoch: 18 step: 105, loss is 1.1680495738983154\n", + "epoch: 18 step: 106, loss is 1.2303258180618286\n", + "epoch: 18 step: 107, loss is 1.1810110807418823\n", + "epoch: 18 step: 108, loss is 1.2568986415863037\n", + "epoch: 18 step: 109, loss is 1.2848972082138062\n", + "epoch: 18 step: 110, loss is 1.1899828910827637\n", + "epoch: 18 step: 111, loss is 1.2457060813903809\n", + "epoch: 18 step: 112, loss is 1.157608985900879\n", + "epoch: 18 step: 113, loss is 1.218801736831665\n", + "epoch: 18 step: 114, loss is 1.2719181776046753\n", + "epoch: 18 step: 115, loss is 1.2844077348709106\n", + "epoch: 18 step: 116, loss is 1.1837624311447144\n", + "epoch: 18 step: 117, loss is 1.2297815084457397\n", + "epoch: 18 step: 118, loss is 1.2345877885818481\n", + "epoch: 18 step: 119, loss is 1.1778076887130737\n", + "epoch: 18 step: 120, loss is 1.233988642692566\n", + "epoch: 18 step: 121, loss is 1.160495400428772\n", + "epoch: 18 step: 122, loss is 1.154070258140564\n", + "epoch: 18 step: 123, loss is 1.1838046312332153\n", + "epoch: 18 step: 124, loss is 1.1283586025238037\n", + "epoch: 18 step: 125, loss is 1.1446781158447266\n", + "epoch: 18 step: 126, loss is 1.2172372341156006\n", + "epoch: 18 step: 127, loss is 1.102413535118103\n", + "epoch: 18 step: 128, loss is 1.1221530437469482\n", + "epoch: 18 step: 129, loss is 1.161347508430481\n", + "epoch: 18 step: 130, loss is 1.2036614418029785\n", + "epoch: 18 step: 131, loss is 1.229783058166504\n", + "epoch: 18 step: 132, loss is 1.289193034172058\n", + "epoch: 18 step: 133, loss is 1.2354049682617188\n", + "epoch: 18 step: 134, loss is 1.2707123756408691\n", + "epoch: 18 step: 135, loss is 1.2075436115264893\n", + "epoch: 18 step: 136, loss is 1.2127444744110107\n", + "epoch: 18 step: 137, loss is 1.1712807416915894\n", + "epoch: 18 step: 138, loss is 1.192334532737732\n", + "epoch: 18 step: 139, loss is 1.1715645790100098\n", + "epoch: 18 step: 140, loss is 1.1782574653625488\n", + "epoch: 18 step: 141, loss is 1.2325711250305176\n", + "epoch: 18 step: 142, loss is 1.2152174711227417\n", + "epoch: 18 step: 143, loss is 1.167007327079773\n", + "epoch: 18 step: 144, loss is 1.2537723779678345\n", + "epoch: 18 step: 145, loss is 1.1079801321029663\n", + "epoch: 18 step: 146, loss is 1.1673047542572021\n", + "epoch: 18 step: 147, loss is 1.2298269271850586\n", + "epoch: 18 step: 148, loss is 1.13134765625\n", + "epoch: 18 step: 149, loss is 1.1548757553100586\n", + "epoch: 18 step: 150, loss is 1.191606879234314\n", + "epoch: 18 step: 151, loss is 1.268114686012268\n", + "epoch: 18 step: 152, loss is 1.1557695865631104\n", + "epoch: 18 step: 153, loss is 1.1915086507797241\n", + "epoch: 18 step: 154, loss is 1.1919896602630615\n", + "epoch: 18 step: 155, loss is 1.1202706098556519\n", + "epoch: 18 step: 156, loss is 1.1974549293518066\n", + "epoch: 18 step: 157, loss is 1.1415072679519653\n", + "epoch: 18 step: 158, loss is 1.1623033285140991\n", + "epoch: 18 step: 159, loss is 1.2083604335784912\n", + "epoch: 18 step: 160, loss is 1.2254550457000732\n", + "epoch: 18 step: 161, loss is 1.2757511138916016\n", + "epoch: 18 step: 162, loss is 1.0976552963256836\n", + "epoch: 18 step: 163, loss is 1.2416324615478516\n", + "epoch: 18 step: 164, loss is 1.170705795288086\n", + "epoch: 18 step: 165, loss is 1.321297526359558\n", + "epoch: 18 step: 166, loss is 1.1000334024429321\n", + "epoch: 18 step: 167, loss is 1.2043811082839966\n", + "epoch: 18 step: 168, loss is 1.1270864009857178\n", + "epoch: 18 step: 169, loss is 1.212064504623413\n", + "epoch: 18 step: 170, loss is 1.158576488494873\n", + "epoch: 18 step: 171, loss is 1.2327550649642944\n", + "epoch: 18 step: 172, loss is 1.1547142267227173\n", + "epoch: 18 step: 173, loss is 1.1802492141723633\n", + "epoch: 18 step: 174, loss is 1.0938972234725952\n", + "epoch: 18 step: 175, loss is 1.2214994430541992\n", + "epoch: 18 step: 176, loss is 1.1707316637039185\n", + "epoch: 18 step: 177, loss is 1.1639564037322998\n", + "epoch: 18 step: 178, loss is 1.1819384098052979\n", + "epoch: 18 step: 179, loss is 1.1715550422668457\n", + "epoch: 18 step: 180, loss is 1.1930725574493408\n", + "epoch: 18 step: 181, loss is 1.2016639709472656\n", + "epoch: 18 step: 182, loss is 1.2069354057312012\n", + "epoch: 18 step: 183, loss is 1.2415690422058105\n", + "epoch: 18 step: 184, loss is 1.1326361894607544\n", + "epoch: 18 step: 185, loss is 1.1912896633148193\n", + "epoch: 18 step: 186, loss is 1.2241482734680176\n", + "epoch: 18 step: 187, loss is 1.1880868673324585\n", + "epoch: 18 step: 188, loss is 1.1509827375411987\n", + "epoch: 18 step: 189, loss is 1.1008312702178955\n", + "epoch: 18 step: 190, loss is 1.1766343116760254\n", + "epoch: 18 step: 191, loss is 1.2119250297546387\n", + "epoch: 18 step: 192, loss is 1.21315336227417\n", + "epoch: 18 step: 193, loss is 1.2216898202896118\n", + "epoch: 18 step: 194, loss is 1.1220433712005615\n", + "epoch: 18 step: 195, loss is 1.1890733242034912\n", + "Train epoch time: 101352.981 ms, per step time: 519.759 ms\n", + "epoch: 19 step: 1, loss is 1.1919630765914917\n", + "epoch: 19 step: 2, loss is 1.1452809572219849\n", + "epoch: 19 step: 3, loss is 1.1854350566864014\n", + "epoch: 19 step: 4, loss is 1.242354154586792\n", + "epoch: 19 step: 5, loss is 1.219765067100525\n", + "epoch: 19 step: 6, loss is 1.1540756225585938\n", + "epoch: 19 step: 7, loss is 1.1764079332351685\n", + "epoch: 19 step: 8, loss is 1.1338316202163696\n", + "epoch: 19 step: 9, loss is 1.2136054039001465\n", + "epoch: 19 step: 10, loss is 1.2507076263427734\n", + "epoch: 19 step: 11, loss is 1.1587414741516113\n", + "epoch: 19 step: 12, loss is 1.1728321313858032\n", + "epoch: 19 step: 13, loss is 1.1209793090820312\n", + "epoch: 19 step: 14, loss is 1.1606454849243164\n", + "epoch: 19 step: 15, loss is 1.2101693153381348\n", + "epoch: 19 step: 16, loss is 1.1525294780731201\n", + "epoch: 19 step: 17, loss is 1.2325918674468994\n", + "epoch: 19 step: 18, loss is 1.152976155281067\n", + "epoch: 19 step: 19, loss is 1.2223122119903564\n", + "epoch: 19 step: 20, loss is 1.145082712173462\n", + "epoch: 19 step: 21, loss is 1.120816707611084\n", + "epoch: 19 step: 22, loss is 1.1953506469726562\n", + "epoch: 19 step: 23, loss is 1.152255654335022\n", + "epoch: 19 step: 24, loss is 1.2057310342788696\n", + "epoch: 19 step: 25, loss is 1.2995456457138062\n", + "epoch: 19 step: 26, loss is 1.2323886156082153\n", + "epoch: 19 step: 27, loss is 1.1984307765960693\n", + "epoch: 19 step: 28, loss is 1.162811040878296\n", + "epoch: 19 step: 29, loss is 1.2152609825134277\n", + "epoch: 19 step: 30, loss is 1.1348754167556763\n", + "epoch: 19 step: 31, loss is 1.1925075054168701\n", + "epoch: 19 step: 32, loss is 1.1990911960601807\n", + "epoch: 19 step: 33, loss is 1.2733755111694336\n", + "epoch: 19 step: 34, loss is 1.1507935523986816\n", + "epoch: 19 step: 35, loss is 1.196659803390503\n", + "epoch: 19 step: 36, loss is 1.1736938953399658\n", + "epoch: 19 step: 37, loss is 1.235499620437622\n", + "epoch: 19 step: 38, loss is 1.2186579704284668\n", + "epoch: 19 step: 39, loss is 1.1198337078094482\n", + "epoch: 19 step: 40, loss is 1.256303071975708\n", + "epoch: 19 step: 41, loss is 1.1797213554382324\n", + "epoch: 19 step: 42, loss is 1.259636402130127\n", + "epoch: 19 step: 43, loss is 1.140516996383667\n", + "epoch: 19 step: 44, loss is 1.2015693187713623\n", + "epoch: 19 step: 45, loss is 1.2132294178009033\n", + "epoch: 19 step: 46, loss is 1.221127986907959\n", + "epoch: 19 step: 47, loss is 1.1456588506698608\n", + "epoch: 19 step: 48, loss is 1.1030199527740479\n", + "epoch: 19 step: 49, loss is 1.1661518812179565\n", + "epoch: 19 step: 50, loss is 1.1913514137268066\n", + "epoch: 19 step: 51, loss is 1.3098371028900146\n", + "epoch: 19 step: 52, loss is 1.148003101348877\n", + "epoch: 19 step: 53, loss is 1.2410410642623901\n", + "epoch: 19 step: 54, loss is 1.1160907745361328\n", + "epoch: 19 step: 55, loss is 1.2480964660644531\n", + "epoch: 19 step: 56, loss is 1.211594581604004\n", + "epoch: 19 step: 57, loss is 1.173123836517334\n", + "epoch: 19 step: 58, loss is 1.1381464004516602\n", + "epoch: 19 step: 59, loss is 1.1156631708145142\n", + "epoch: 19 step: 60, loss is 1.1293622255325317\n", + "epoch: 19 step: 61, loss is 1.2640362977981567\n", + "epoch: 19 step: 62, loss is 1.1820635795593262\n", + "epoch: 19 step: 63, loss is 1.1795369386672974\n", + "epoch: 19 step: 64, loss is 1.1875749826431274\n", + "epoch: 19 step: 65, loss is 1.2626574039459229\n", + "epoch: 19 step: 66, loss is 1.1899378299713135\n", + "epoch: 19 step: 67, loss is 1.2518035173416138\n", + "epoch: 19 step: 68, loss is 1.2231760025024414\n", + "epoch: 19 step: 69, loss is 1.1257281303405762\n", + "epoch: 19 step: 70, loss is 1.1706583499908447\n", + "epoch: 19 step: 71, loss is 1.239606261253357\n", + "epoch: 19 step: 72, loss is 1.1154091358184814\n", + "epoch: 19 step: 73, loss is 1.1009653806686401\n", + "epoch: 19 step: 74, loss is 1.204609990119934\n", + "epoch: 19 step: 75, loss is 1.188640832901001\n", + "epoch: 19 step: 76, loss is 1.1468273401260376\n", + "epoch: 19 step: 77, loss is 1.1947543621063232\n", + "epoch: 19 step: 78, loss is 1.1897432804107666\n", + "epoch: 19 step: 79, loss is 1.1052907705307007\n", + "epoch: 19 step: 80, loss is 1.117377519607544\n", + "epoch: 19 step: 81, loss is 1.201124906539917\n", + "epoch: 19 step: 82, loss is 1.2599222660064697\n", + "epoch: 19 step: 83, loss is 1.1742149591445923\n", + "epoch: 19 step: 84, loss is 1.238377571105957\n", + "epoch: 19 step: 85, loss is 1.1299456357955933\n", + "epoch: 19 step: 86, loss is 1.167280673980713\n", + "epoch: 19 step: 87, loss is 1.1920278072357178\n", + "epoch: 19 step: 88, loss is 1.1810556650161743\n", + "epoch: 19 step: 89, loss is 1.0923088788986206\n", + "epoch: 19 step: 90, loss is 1.1809931993484497\n", + "epoch: 19 step: 91, loss is 1.1511962413787842\n", + "epoch: 19 step: 92, loss is 1.1819801330566406\n", + "epoch: 19 step: 93, loss is 1.229771614074707\n", + "epoch: 19 step: 94, loss is 1.2212837934494019\n", + "epoch: 19 step: 95, loss is 1.145430326461792\n", + "epoch: 19 step: 96, loss is 1.058290958404541\n", + "epoch: 19 step: 97, loss is 1.2015659809112549\n", + "epoch: 19 step: 98, loss is 1.1454386711120605\n", + "epoch: 19 step: 99, loss is 1.1712946891784668\n", + "epoch: 19 step: 100, loss is 1.2032614946365356\n", + "epoch: 19 step: 101, loss is 1.171003818511963\n", + "epoch: 19 step: 102, loss is 1.2177945375442505\n", + "epoch: 19 step: 103, loss is 1.2064201831817627\n", + "epoch: 19 step: 104, loss is 1.113767147064209\n", + "epoch: 19 step: 105, loss is 1.2046644687652588\n", + "epoch: 19 step: 106, loss is 1.0973188877105713\n", + "epoch: 19 step: 107, loss is 1.1275098323822021\n", + "epoch: 19 step: 108, loss is 1.2290962934494019\n", + "epoch: 19 step: 109, loss is 1.230661392211914\n", + "epoch: 19 step: 110, loss is 1.185966968536377\n", + "epoch: 19 step: 111, loss is 1.2543350458145142\n", + "epoch: 19 step: 112, loss is 1.2419428825378418\n", + "epoch: 19 step: 113, loss is 1.1682695150375366\n", + "epoch: 19 step: 114, loss is 1.1540536880493164\n", + "epoch: 19 step: 115, loss is 1.1985615491867065\n", + "epoch: 19 step: 116, loss is 1.2403814792633057\n", + "epoch: 19 step: 117, loss is 1.1536333560943604\n", + "epoch: 19 step: 118, loss is 1.1810245513916016\n", + "epoch: 19 step: 119, loss is 1.0652104616165161\n", + "epoch: 19 step: 120, loss is 1.2170697450637817\n", + "epoch: 19 step: 121, loss is 1.1755530834197998\n", + "epoch: 19 step: 122, loss is 1.2050964832305908\n", + "epoch: 19 step: 123, loss is 1.2334606647491455\n", + "epoch: 19 step: 124, loss is 1.160338282585144\n", + "epoch: 19 step: 125, loss is 1.1376289129257202\n", + "epoch: 19 step: 126, loss is 1.1381865739822388\n", + "epoch: 19 step: 127, loss is 1.181434154510498\n", + "epoch: 19 step: 128, loss is 1.1460726261138916\n", + "epoch: 19 step: 129, loss is 1.2577080726623535\n", + "epoch: 19 step: 130, loss is 1.2217777967453003\n", + "epoch: 19 step: 131, loss is 1.2583128213882446\n", + "epoch: 19 step: 132, loss is 1.2502541542053223\n", + "epoch: 19 step: 133, loss is 1.176171064376831\n", + "epoch: 19 step: 134, loss is 1.1830650568008423\n", + "epoch: 19 step: 135, loss is 1.1607081890106201\n", + "epoch: 19 step: 136, loss is 1.2066712379455566\n", + "epoch: 19 step: 137, loss is 1.2293413877487183\n", + "epoch: 19 step: 138, loss is 1.1863219738006592\n", + "epoch: 19 step: 139, loss is 1.221279263496399\n", + "epoch: 19 step: 140, loss is 1.2304773330688477\n", + "epoch: 19 step: 141, loss is 1.1204911470413208\n", + "epoch: 19 step: 142, loss is 1.1246674060821533\n", + "epoch: 19 step: 143, loss is 1.1996190547943115\n", + "epoch: 19 step: 144, loss is 1.182210922241211\n", + "epoch: 19 step: 145, loss is 1.186364769935608\n", + "epoch: 19 step: 146, loss is 1.173928141593933\n", + "epoch: 19 step: 147, loss is 1.202514886856079\n", + "epoch: 19 step: 148, loss is 1.2326751947402954\n", + "epoch: 19 step: 149, loss is 1.230668544769287\n", + "epoch: 19 step: 150, loss is 1.1976358890533447\n", + "epoch: 19 step: 151, loss is 1.1679675579071045\n", + "epoch: 19 step: 152, loss is 1.1975855827331543\n", + "epoch: 19 step: 153, loss is 1.1967251300811768\n", + "epoch: 19 step: 154, loss is 1.171470284461975\n", + "epoch: 19 step: 155, loss is 1.2162375450134277\n", + "epoch: 19 step: 156, loss is 1.1722983121871948\n", + "epoch: 19 step: 157, loss is 1.1767600774765015\n", + "epoch: 19 step: 158, loss is 1.2443314790725708\n", + "epoch: 19 step: 159, loss is 1.2596409320831299\n", + "epoch: 19 step: 160, loss is 1.1492427587509155\n", + "epoch: 19 step: 161, loss is 1.127406358718872\n", + "epoch: 19 step: 162, loss is 1.199941635131836\n", + "epoch: 19 step: 163, loss is 1.1503417491912842\n", + "epoch: 19 step: 164, loss is 1.2358161211013794\n", + "epoch: 19 step: 165, loss is 1.166011929512024\n", + "epoch: 19 step: 166, loss is 1.1833696365356445\n", + "epoch: 19 step: 167, loss is 1.1844494342803955\n", + "epoch: 19 step: 168, loss is 1.2348508834838867\n", + "epoch: 19 step: 169, loss is 1.1334558725357056\n", + "epoch: 19 step: 170, loss is 1.2088367938995361\n", + "epoch: 19 step: 171, loss is 1.242146372795105\n", + "epoch: 19 step: 172, loss is 1.2126331329345703\n", + "epoch: 19 step: 173, loss is 1.1997523307800293\n", + "epoch: 19 step: 174, loss is 1.2128143310546875\n", + "epoch: 19 step: 175, loss is 1.2789485454559326\n", + "epoch: 19 step: 176, loss is 1.08676278591156\n", + "epoch: 19 step: 177, loss is 1.2217650413513184\n", + "epoch: 19 step: 178, loss is 1.2049286365509033\n", + "epoch: 19 step: 179, loss is 1.121901273727417\n", + "epoch: 19 step: 180, loss is 1.2339587211608887\n", + "epoch: 19 step: 181, loss is 1.1899642944335938\n", + "epoch: 19 step: 182, loss is 1.290479302406311\n", + "epoch: 19 step: 183, loss is 1.1912024021148682\n", + "epoch: 19 step: 184, loss is 1.1393516063690186\n", + "epoch: 19 step: 185, loss is 1.1532948017120361\n", + "epoch: 19 step: 186, loss is 1.1583597660064697\n", + "epoch: 19 step: 187, loss is 1.257065773010254\n", + "epoch: 19 step: 188, loss is 1.24973726272583\n", + "epoch: 19 step: 189, loss is 1.1877572536468506\n", + "epoch: 19 step: 190, loss is 1.3120734691619873\n", + "epoch: 19 step: 191, loss is 1.1645631790161133\n", + "epoch: 19 step: 192, loss is 1.212127923965454\n", + "epoch: 19 step: 193, loss is 1.0855450630187988\n", + "epoch: 19 step: 194, loss is 1.2783218622207642\n", + "epoch: 19 step: 195, loss is 1.2610738277435303\n", + "Train epoch time: 98985.140 ms, per step time: 507.616 ms\n", + "epoch: 20 step: 1, loss is 1.171416997909546\n", + "epoch: 20 step: 2, loss is 1.1891226768493652\n", + "epoch: 20 step: 3, loss is 1.2037484645843506\n", + "epoch: 20 step: 4, loss is 1.1683003902435303\n", + "epoch: 20 step: 5, loss is 1.202706217765808\n", + "epoch: 20 step: 6, loss is 1.19107985496521\n", + "epoch: 20 step: 7, loss is 1.1876400709152222\n", + "epoch: 20 step: 8, loss is 1.1576197147369385\n", + "epoch: 20 step: 9, loss is 1.1448185443878174\n", + "epoch: 20 step: 10, loss is 1.1584618091583252\n", + "epoch: 20 step: 11, loss is 1.260995626449585\n", + "epoch: 20 step: 12, loss is 1.148430585861206\n", + "epoch: 20 step: 13, loss is 1.114514708518982\n", + "epoch: 20 step: 14, loss is 1.1793315410614014\n", + "epoch: 20 step: 15, loss is 1.1173157691955566\n", + "epoch: 20 step: 16, loss is 1.144849181175232\n", + "epoch: 20 step: 17, loss is 1.1708478927612305\n", + "epoch: 20 step: 18, loss is 1.135984182357788\n", + "epoch: 20 step: 19, loss is 1.168503761291504\n", + "epoch: 20 step: 20, loss is 1.1360942125320435\n", + "epoch: 20 step: 21, loss is 1.122178554534912\n", + "epoch: 20 step: 22, loss is 1.1900696754455566\n", + "epoch: 20 step: 23, loss is 1.1573057174682617\n", + "epoch: 20 step: 24, loss is 1.1680129766464233\n", + "epoch: 20 step: 25, loss is 1.1646864414215088\n", + "epoch: 20 step: 26, loss is 1.1183346509933472\n", + "epoch: 20 step: 27, loss is 1.1286094188690186\n", + "epoch: 20 step: 28, loss is 1.1177390813827515\n", + "epoch: 20 step: 29, loss is 1.2192113399505615\n", + "epoch: 20 step: 30, loss is 1.1064953804016113\n", + "epoch: 20 step: 31, loss is 1.1534074544906616\n", + "epoch: 20 step: 32, loss is 1.1372215747833252\n", + "epoch: 20 step: 33, loss is 1.135436773300171\n", + "epoch: 20 step: 34, loss is 1.145035982131958\n", + "epoch: 20 step: 35, loss is 1.1357909440994263\n", + "epoch: 20 step: 36, loss is 1.18682861328125\n", + "epoch: 20 step: 37, loss is 1.2157211303710938\n", + "epoch: 20 step: 38, loss is 1.0553467273712158\n", + "epoch: 20 step: 39, loss is 1.1967600584030151\n", + "epoch: 20 step: 40, loss is 1.1371276378631592\n", + "epoch: 20 step: 41, loss is 1.1208088397979736\n", + "epoch: 20 step: 42, loss is 1.1698044538497925\n", + "epoch: 20 step: 43, loss is 1.2036762237548828\n", + "epoch: 20 step: 44, loss is 1.1338982582092285\n", + "epoch: 20 step: 45, loss is 1.132802128791809\n", + "epoch: 20 step: 46, loss is 1.1992285251617432\n", + "epoch: 20 step: 47, loss is 1.128832459449768\n", + "epoch: 20 step: 48, loss is 1.1307011842727661\n", + "epoch: 20 step: 49, loss is 1.1735988855361938\n", + "epoch: 20 step: 50, loss is 1.2631378173828125\n", + "epoch: 20 step: 51, loss is 1.1495540142059326\n", + "epoch: 20 step: 52, loss is 1.0928657054901123\n", + "epoch: 20 step: 53, loss is 1.1211551427841187\n", + "epoch: 20 step: 54, loss is 1.0811808109283447\n", + "epoch: 20 step: 55, loss is 1.158988356590271\n", + "epoch: 20 step: 56, loss is 1.1648566722869873\n", + "epoch: 20 step: 57, loss is 1.059006690979004\n", + "epoch: 20 step: 58, loss is 1.1529488563537598\n", + "epoch: 20 step: 59, loss is 1.106798529624939\n", + "epoch: 20 step: 60, loss is 1.1177051067352295\n", + "epoch: 20 step: 61, loss is 1.2094417810440063\n", + "epoch: 20 step: 62, loss is 1.198118805885315\n", + "epoch: 20 step: 63, loss is 1.2114638090133667\n", + "epoch: 20 step: 64, loss is 1.2480026483535767\n", + "epoch: 20 step: 65, loss is 1.1223540306091309\n", + "epoch: 20 step: 66, loss is 1.0834541320800781\n", + "epoch: 20 step: 67, loss is 1.1901201009750366\n", + "epoch: 20 step: 68, loss is 1.2501845359802246\n", + "epoch: 20 step: 69, loss is 1.1692668199539185\n", + "epoch: 20 step: 70, loss is 1.288611888885498\n", + "epoch: 20 step: 71, loss is 1.0942113399505615\n", + "epoch: 20 step: 72, loss is 1.1066564321517944\n", + "epoch: 20 step: 73, loss is 1.1757688522338867\n", + "epoch: 20 step: 74, loss is 1.1543374061584473\n", + "epoch: 20 step: 75, loss is 1.1465678215026855\n", + "epoch: 20 step: 76, loss is 1.1502877473831177\n", + "epoch: 20 step: 77, loss is 1.1814221143722534\n", + "epoch: 20 step: 78, loss is 1.154991865158081\n", + "epoch: 20 step: 79, loss is 1.1736503839492798\n", + "epoch: 20 step: 80, loss is 1.2003438472747803\n", + "epoch: 20 step: 81, loss is 1.185755729675293\n", + "epoch: 20 step: 82, loss is 1.14713716506958\n", + "epoch: 20 step: 83, loss is 1.2447561025619507\n", + "epoch: 20 step: 84, loss is 1.1890451908111572\n", + "epoch: 20 step: 85, loss is 1.1572215557098389\n", + "epoch: 20 step: 86, loss is 1.0833027362823486\n", + "epoch: 20 step: 87, loss is 1.2199535369873047\n", + "epoch: 20 step: 88, loss is 1.1976938247680664\n", + "epoch: 20 step: 89, loss is 1.0819637775421143\n", + "epoch: 20 step: 90, loss is 1.1475963592529297\n", + "epoch: 20 step: 91, loss is 1.0737191438674927\n", + "epoch: 20 step: 92, loss is 1.1654491424560547\n", + "epoch: 20 step: 93, loss is 1.1825215816497803\n", + "epoch: 20 step: 94, loss is 1.1032356023788452\n", + "epoch: 20 step: 95, loss is 1.1863725185394287\n", + "epoch: 20 step: 96, loss is 1.1883025169372559\n", + "epoch: 20 step: 97, loss is 1.1805461645126343\n", + "epoch: 20 step: 98, loss is 1.1032130718231201\n", + "epoch: 20 step: 99, loss is 1.1556464433670044\n", + "epoch: 20 step: 100, loss is 1.1605743169784546\n", + "epoch: 20 step: 101, loss is 1.1690945625305176\n", + "epoch: 20 step: 102, loss is 1.130037546157837\n", + "epoch: 20 step: 103, loss is 1.1504579782485962\n", + "epoch: 20 step: 104, loss is 1.178275465965271\n", + "epoch: 20 step: 105, loss is 1.1509562730789185\n", + "epoch: 20 step: 106, loss is 1.144791603088379\n", + "epoch: 20 step: 107, loss is 1.1362645626068115\n", + "epoch: 20 step: 108, loss is 1.2036851644515991\n", + "epoch: 20 step: 109, loss is 1.2267506122589111\n", + "epoch: 20 step: 110, loss is 1.0866347551345825\n", + "epoch: 20 step: 111, loss is 1.2031121253967285\n", + "epoch: 20 step: 112, loss is 1.2599865198135376\n", + "epoch: 20 step: 113, loss is 1.2101389169692993\n", + "epoch: 20 step: 114, loss is 1.1447999477386475\n", + "epoch: 20 step: 115, loss is 1.1208603382110596\n", + "epoch: 20 step: 116, loss is 1.1714301109313965\n", + "epoch: 20 step: 117, loss is 1.1814026832580566\n", + "epoch: 20 step: 118, loss is 1.1908423900604248\n", + "epoch: 20 step: 119, loss is 1.1768466234207153\n", + "epoch: 20 step: 120, loss is 1.2135396003723145\n", + "epoch: 20 step: 121, loss is 1.1163644790649414\n", + "epoch: 20 step: 122, loss is 1.1494414806365967\n", + "epoch: 20 step: 123, loss is 1.1984634399414062\n", + "epoch: 20 step: 124, loss is 1.1460108757019043\n", + "epoch: 20 step: 125, loss is 1.1925883293151855\n", + "epoch: 20 step: 126, loss is 1.180509090423584\n", + "epoch: 20 step: 127, loss is 1.1332647800445557\n", + "epoch: 20 step: 128, loss is 1.2072861194610596\n", + "epoch: 20 step: 129, loss is 1.1722263097763062\n", + "epoch: 20 step: 130, loss is 1.1853983402252197\n", + "epoch: 20 step: 131, loss is 1.157015323638916\n", + "epoch: 20 step: 132, loss is 1.1700773239135742\n", + "epoch: 20 step: 133, loss is 1.1471483707427979\n", + "epoch: 20 step: 134, loss is 1.1582492589950562\n", + "epoch: 20 step: 135, loss is 1.167000651359558\n", + "epoch: 20 step: 136, loss is 1.152208924293518\n", + "epoch: 20 step: 137, loss is 1.2254990339279175\n", + "epoch: 20 step: 138, loss is 1.1749860048294067\n", + "epoch: 20 step: 139, loss is 1.1845543384552002\n", + "epoch: 20 step: 140, loss is 1.2049533128738403\n", + "epoch: 20 step: 141, loss is 1.1038239002227783\n", + "epoch: 20 step: 142, loss is 1.0985110998153687\n", + "epoch: 20 step: 143, loss is 1.1985714435577393\n", + "epoch: 20 step: 144, loss is 1.1989521980285645\n", + "epoch: 20 step: 145, loss is 1.1879767179489136\n", + "epoch: 20 step: 146, loss is 1.2484734058380127\n", + "epoch: 20 step: 147, loss is 1.2236131429672241\n", + "epoch: 20 step: 148, loss is 1.1662968397140503\n", + "epoch: 20 step: 149, loss is 1.167721152305603\n", + "epoch: 20 step: 150, loss is 1.092069149017334\n", + "epoch: 20 step: 151, loss is 1.1716843843460083\n", + "epoch: 20 step: 152, loss is 1.1662172079086304\n", + "epoch: 20 step: 153, loss is 1.1618289947509766\n", + "epoch: 20 step: 154, loss is 1.1092547178268433\n", + "epoch: 20 step: 155, loss is 1.1374517679214478\n", + "epoch: 20 step: 156, loss is 1.1378986835479736\n", + "epoch: 20 step: 157, loss is 1.1868596076965332\n", + "epoch: 20 step: 158, loss is 1.1807548999786377\n", + "epoch: 20 step: 159, loss is 1.183173418045044\n", + "epoch: 20 step: 160, loss is 1.1944574117660522\n", + "epoch: 20 step: 161, loss is 1.1941155195236206\n", + "epoch: 20 step: 162, loss is 1.1461974382400513\n", + "epoch: 20 step: 163, loss is 1.1514352560043335\n", + "epoch: 20 step: 164, loss is 1.1706821918487549\n", + "epoch: 20 step: 165, loss is 1.1658430099487305\n", + "epoch: 20 step: 166, loss is 1.1380360126495361\n", + "epoch: 20 step: 167, loss is 1.1350111961364746\n", + "epoch: 20 step: 168, loss is 1.156770944595337\n", + "epoch: 20 step: 169, loss is 1.1557471752166748\n", + "epoch: 20 step: 170, loss is 1.1548410654067993\n", + "epoch: 20 step: 171, loss is 1.1343274116516113\n", + "epoch: 20 step: 172, loss is 1.1592196226119995\n", + "epoch: 20 step: 173, loss is 1.209885597229004\n", + "epoch: 20 step: 174, loss is 1.2574174404144287\n", + "epoch: 20 step: 175, loss is 1.1715922355651855\n", + "epoch: 20 step: 176, loss is 1.1506952047348022\n", + "epoch: 20 step: 177, loss is 1.1678893566131592\n", + "epoch: 20 step: 178, loss is 1.1502816677093506\n", + "epoch: 20 step: 179, loss is 1.2388675212860107\n", + "epoch: 20 step: 180, loss is 1.1580145359039307\n", + "epoch: 20 step: 181, loss is 1.1210862398147583\n", + "epoch: 20 step: 182, loss is 1.2468597888946533\n", + "epoch: 20 step: 183, loss is 1.1764988899230957\n", + "epoch: 20 step: 184, loss is 1.0629863739013672\n", + "epoch: 20 step: 185, loss is 1.237863540649414\n", + "epoch: 20 step: 186, loss is 1.237848162651062\n", + "epoch: 20 step: 187, loss is 1.1853551864624023\n", + "epoch: 20 step: 188, loss is 1.1534221172332764\n", + "epoch: 20 step: 189, loss is 1.255497932434082\n", + "epoch: 20 step: 190, loss is 1.2386860847473145\n", + "epoch: 20 step: 191, loss is 1.2321326732635498\n", + "epoch: 20 step: 192, loss is 1.1634422540664673\n", + "epoch: 20 step: 193, loss is 1.0897608995437622\n", + "epoch: 20 step: 194, loss is 1.1868847608566284\n", + "epoch: 20 step: 195, loss is 1.2006402015686035\n", + "Train epoch time: 100180.662 ms, per step time: 513.747 ms\n", + "epoch: 21 step: 1, loss is 1.1136233806610107\n", + "epoch: 21 step: 2, loss is 1.1924371719360352\n", + "epoch: 21 step: 3, loss is 1.1554560661315918\n", + "epoch: 21 step: 4, loss is 1.0925145149230957\n", + "epoch: 21 step: 5, loss is 1.076945185661316\n", + "epoch: 21 step: 6, loss is 1.202726125717163\n", + "epoch: 21 step: 7, loss is 1.166943073272705\n", + "epoch: 21 step: 8, loss is 1.1830153465270996\n", + "epoch: 21 step: 9, loss is 1.1505470275878906\n", + "epoch: 21 step: 10, loss is 1.163312554359436\n", + "epoch: 21 step: 11, loss is 1.1061975955963135\n", + "epoch: 21 step: 12, loss is 1.197044849395752\n", + "epoch: 21 step: 13, loss is 1.188642144203186\n", + "epoch: 21 step: 14, loss is 1.1751655340194702\n", + "epoch: 21 step: 15, loss is 1.1289103031158447\n", + "epoch: 21 step: 16, loss is 1.0814595222473145\n", + "epoch: 21 step: 17, loss is 1.1015903949737549\n", + "epoch: 21 step: 18, loss is 1.0989611148834229\n", + "epoch: 21 step: 19, loss is 1.2133710384368896\n", + "epoch: 21 step: 20, loss is 1.178265929222107\n", + "epoch: 21 step: 21, loss is 1.1221787929534912\n", + "epoch: 21 step: 22, loss is 1.1879911422729492\n", + "epoch: 21 step: 23, loss is 1.261170506477356\n", + "epoch: 21 step: 24, loss is 1.0898795127868652\n", + "epoch: 21 step: 25, loss is 1.127431035041809\n", + "epoch: 21 step: 26, loss is 1.162710189819336\n", + "epoch: 21 step: 27, loss is 1.1290348768234253\n", + "epoch: 21 step: 28, loss is 1.0981855392456055\n", + "epoch: 21 step: 29, loss is 1.215757131576538\n", + "epoch: 21 step: 30, loss is 1.127636194229126\n", + "epoch: 21 step: 31, loss is 1.0912600755691528\n", + "epoch: 21 step: 32, loss is 1.1477477550506592\n", + "epoch: 21 step: 33, loss is 1.1790227890014648\n", + "epoch: 21 step: 34, loss is 1.1896525621414185\n", + "epoch: 21 step: 35, loss is 1.2226746082305908\n", + "epoch: 21 step: 36, loss is 1.1000785827636719\n", + "epoch: 21 step: 37, loss is 1.1924872398376465\n", + "epoch: 21 step: 38, loss is 1.2016026973724365\n", + "epoch: 21 step: 39, loss is 1.1450155973434448\n", + "epoch: 21 step: 40, loss is 1.1145334243774414\n", + "epoch: 21 step: 41, loss is 1.1462466716766357\n", + "epoch: 21 step: 42, loss is 1.1533167362213135\n", + "epoch: 21 step: 43, loss is 1.1012704372406006\n", + "epoch: 21 step: 44, loss is 1.1797215938568115\n", + "epoch: 21 step: 45, loss is 1.2103328704833984\n", + "epoch: 21 step: 46, loss is 1.1467087268829346\n", + "epoch: 21 step: 47, loss is 1.1233391761779785\n", + "epoch: 21 step: 48, loss is 1.1665340662002563\n", + "epoch: 21 step: 49, loss is 1.206769585609436\n", + "epoch: 21 step: 50, loss is 1.1737746000289917\n", + "epoch: 21 step: 51, loss is 1.1529309749603271\n", + "epoch: 21 step: 52, loss is 1.1829960346221924\n", + "epoch: 21 step: 53, loss is 1.1621856689453125\n", + "epoch: 21 step: 54, loss is 1.125807523727417\n", + "epoch: 21 step: 55, loss is 1.1452544927597046\n", + "epoch: 21 step: 56, loss is 1.1784058809280396\n", + "epoch: 21 step: 57, loss is 1.1605210304260254\n", + "epoch: 21 step: 58, loss is 1.203359603881836\n", + "epoch: 21 step: 59, loss is 1.2041962146759033\n", + "epoch: 21 step: 60, loss is 1.1478453874588013\n", + "epoch: 21 step: 61, loss is 1.1452339887619019\n", + "epoch: 21 step: 62, loss is 1.157623052597046\n", + "epoch: 21 step: 63, loss is 1.1311287879943848\n", + "epoch: 21 step: 64, loss is 1.0759551525115967\n", + "epoch: 21 step: 65, loss is 1.1086596250534058\n", + "epoch: 21 step: 66, loss is 1.2060517072677612\n", + "epoch: 21 step: 67, loss is 1.2691706418991089\n", + "epoch: 21 step: 68, loss is 1.1908296346664429\n", + "epoch: 21 step: 69, loss is 1.1695635318756104\n", + "epoch: 21 step: 70, loss is 1.066986083984375\n", + "epoch: 21 step: 71, loss is 1.1171053647994995\n", + "epoch: 21 step: 72, loss is 1.2222929000854492\n", + "epoch: 21 step: 73, loss is 1.120353102684021\n", + "epoch: 21 step: 74, loss is 1.1316025257110596\n", + "epoch: 21 step: 75, loss is 1.1524975299835205\n", + "epoch: 21 step: 76, loss is 1.2082349061965942\n", + "epoch: 21 step: 77, loss is 1.1045547723770142\n", + "epoch: 21 step: 78, loss is 1.1486722230911255\n", + "epoch: 21 step: 79, loss is 1.189565896987915\n", + "epoch: 21 step: 80, loss is 1.0804485082626343\n", + "epoch: 21 step: 81, loss is 1.1257343292236328\n", + "epoch: 21 step: 82, loss is 1.2272095680236816\n", + "epoch: 21 step: 83, loss is 1.1544325351715088\n", + "epoch: 21 step: 84, loss is 1.1596571207046509\n", + "epoch: 21 step: 85, loss is 1.1800297498703003\n", + "epoch: 21 step: 86, loss is 1.1395714282989502\n", + "epoch: 21 step: 87, loss is 1.123936414718628\n", + "epoch: 21 step: 88, loss is 1.1012885570526123\n", + "epoch: 21 step: 89, loss is 1.1599925756454468\n", + "epoch: 21 step: 90, loss is 1.12013578414917\n", + "epoch: 21 step: 91, loss is 1.1764113903045654\n", + "epoch: 21 step: 92, loss is 1.102885127067566\n", + "epoch: 21 step: 93, loss is 1.1448493003845215\n", + "epoch: 21 step: 94, loss is 1.1611634492874146\n", + "epoch: 21 step: 95, loss is 1.1960501670837402\n", + "epoch: 21 step: 96, loss is 1.0660121440887451\n", + "epoch: 21 step: 97, loss is 1.1688830852508545\n", + "epoch: 21 step: 98, loss is 1.1193877458572388\n", + "epoch: 21 step: 99, loss is 1.1021188497543335\n", + "epoch: 21 step: 100, loss is 1.227249026298523\n", + "epoch: 21 step: 101, loss is 1.1259087324142456\n", + "epoch: 21 step: 102, loss is 1.120883584022522\n", + "epoch: 21 step: 103, loss is 1.133556604385376\n", + "epoch: 21 step: 104, loss is 1.1527225971221924\n", + "epoch: 21 step: 105, loss is 1.0652472972869873\n", + "epoch: 21 step: 106, loss is 1.1547417640686035\n", + "epoch: 21 step: 107, loss is 1.1278295516967773\n", + "epoch: 21 step: 108, loss is 1.1259499788284302\n", + "epoch: 21 step: 109, loss is 1.1263880729675293\n", + "epoch: 21 step: 110, loss is 1.0741418600082397\n", + "epoch: 21 step: 111, loss is 1.1559582948684692\n", + "epoch: 21 step: 112, loss is 1.0334546566009521\n", + "epoch: 21 step: 113, loss is 1.186385989189148\n", + "epoch: 21 step: 114, loss is 1.1376380920410156\n", + "epoch: 21 step: 115, loss is 1.069919228553772\n", + "epoch: 21 step: 116, loss is 1.1116620302200317\n", + "epoch: 21 step: 117, loss is 1.1393089294433594\n", + "epoch: 21 step: 118, loss is 1.1985349655151367\n", + "epoch: 21 step: 119, loss is 1.1146472692489624\n", + "epoch: 21 step: 120, loss is 1.1724528074264526\n", + "epoch: 21 step: 121, loss is 1.1801071166992188\n", + "epoch: 21 step: 122, loss is 1.2109780311584473\n", + "epoch: 21 step: 123, loss is 1.213984727859497\n", + "epoch: 21 step: 124, loss is 1.1322370767593384\n", + "epoch: 21 step: 125, loss is 1.1845097541809082\n", + "epoch: 21 step: 126, loss is 1.1123130321502686\n", + "epoch: 21 step: 127, loss is 1.1387230157852173\n", + "epoch: 21 step: 128, loss is 1.1547086238861084\n", + "epoch: 21 step: 129, loss is 1.114665150642395\n", + "epoch: 21 step: 130, loss is 1.1472835540771484\n", + "epoch: 21 step: 131, loss is 1.0767631530761719\n", + "epoch: 21 step: 132, loss is 1.1490250825881958\n", + "epoch: 21 step: 133, loss is 1.0783729553222656\n", + "epoch: 21 step: 134, loss is 1.1131491661071777\n", + "epoch: 21 step: 135, loss is 1.1070185899734497\n", + "epoch: 21 step: 136, loss is 1.1252334117889404\n", + "epoch: 21 step: 137, loss is 1.1467362642288208\n", + "epoch: 21 step: 138, loss is 1.0787755250930786\n", + "epoch: 21 step: 139, loss is 1.0633907318115234\n", + "epoch: 21 step: 140, loss is 1.156173825263977\n", + "epoch: 21 step: 141, loss is 1.1732720136642456\n", + "epoch: 21 step: 142, loss is 1.0812647342681885\n", + "epoch: 21 step: 143, loss is 1.1575112342834473\n", + "epoch: 21 step: 144, loss is 1.0708969831466675\n", + "epoch: 21 step: 145, loss is 1.129906177520752\n", + "epoch: 21 step: 146, loss is 1.0662868022918701\n", + "epoch: 21 step: 147, loss is 1.1299110651016235\n", + "epoch: 21 step: 148, loss is 1.0945532321929932\n", + "epoch: 21 step: 149, loss is 1.1256828308105469\n", + "epoch: 21 step: 150, loss is 1.166341781616211\n", + "epoch: 21 step: 151, loss is 1.1523692607879639\n", + "epoch: 21 step: 152, loss is 1.1554231643676758\n", + "epoch: 21 step: 153, loss is 1.142305850982666\n", + "epoch: 21 step: 154, loss is 1.1474905014038086\n", + "epoch: 21 step: 155, loss is 1.164309024810791\n", + "epoch: 21 step: 156, loss is 1.08024001121521\n", + "epoch: 21 step: 157, loss is 1.0947679281234741\n", + "epoch: 21 step: 158, loss is 1.1829907894134521\n", + "epoch: 21 step: 159, loss is 1.103774070739746\n", + "epoch: 21 step: 160, loss is 1.0320134162902832\n", + "epoch: 21 step: 161, loss is 1.1995084285736084\n", + "epoch: 21 step: 162, loss is 1.2578468322753906\n", + "epoch: 21 step: 163, loss is 1.2138800621032715\n", + "epoch: 21 step: 164, loss is 1.1422381401062012\n", + "epoch: 21 step: 165, loss is 1.1439235210418701\n", + "epoch: 21 step: 166, loss is 1.2324931621551514\n", + "epoch: 21 step: 167, loss is 1.0578545331954956\n", + "epoch: 21 step: 168, loss is 1.1423414945602417\n", + "epoch: 21 step: 169, loss is 1.1647958755493164\n", + "epoch: 21 step: 170, loss is 1.1603341102600098\n", + "epoch: 21 step: 171, loss is 1.1409279108047485\n", + "epoch: 21 step: 172, loss is 1.1343870162963867\n", + "epoch: 21 step: 173, loss is 1.216925859451294\n", + "epoch: 21 step: 174, loss is 1.1190595626831055\n", + "epoch: 21 step: 175, loss is 1.1064687967300415\n", + "epoch: 21 step: 176, loss is 1.174953818321228\n", + "epoch: 21 step: 177, loss is 1.104549765586853\n", + "epoch: 21 step: 178, loss is 1.1936630010604858\n", + "epoch: 21 step: 179, loss is 1.155011773109436\n", + "epoch: 21 step: 180, loss is 1.1706663370132446\n", + "epoch: 21 step: 181, loss is 1.0950441360473633\n", + "epoch: 21 step: 182, loss is 1.2145719528198242\n", + "epoch: 21 step: 183, loss is 1.0936195850372314\n", + "epoch: 21 step: 184, loss is 1.0203099250793457\n", + "epoch: 21 step: 185, loss is 1.1337761878967285\n", + "epoch: 21 step: 186, loss is 1.1367603540420532\n", + "epoch: 21 step: 187, loss is 1.1859527826309204\n", + "epoch: 21 step: 188, loss is 1.0892118215560913\n", + "epoch: 21 step: 189, loss is 1.1981728076934814\n", + "epoch: 21 step: 190, loss is 1.1291687488555908\n", + "epoch: 21 step: 191, loss is 1.0830864906311035\n", + "epoch: 21 step: 192, loss is 1.091564416885376\n", + "epoch: 21 step: 193, loss is 1.2007150650024414\n", + "epoch: 21 step: 194, loss is 1.1622735261917114\n", + "epoch: 21 step: 195, loss is 1.1157915592193604\n", + "Train epoch time: 102196.990 ms, per step time: 524.087 ms\n", + "epoch: 22 step: 1, loss is 1.053156852722168\n", + "epoch: 22 step: 2, loss is 1.1535413265228271\n", + "epoch: 22 step: 3, loss is 1.0816398859024048\n", + "epoch: 22 step: 4, loss is 1.1328957080841064\n", + "epoch: 22 step: 5, loss is 1.0946224927902222\n", + "epoch: 22 step: 6, loss is 1.099941372871399\n", + "epoch: 22 step: 7, loss is 1.1025413274765015\n", + "epoch: 22 step: 8, loss is 1.1687490940093994\n", + "epoch: 22 step: 9, loss is 1.1164209842681885\n", + "epoch: 22 step: 10, loss is 1.1398595571517944\n", + "epoch: 22 step: 11, loss is 1.0430141687393188\n", + "epoch: 22 step: 12, loss is 1.171239972114563\n", + "epoch: 22 step: 13, loss is 1.1105473041534424\n", + "epoch: 22 step: 14, loss is 1.150712251663208\n", + "epoch: 22 step: 15, loss is 1.0943889617919922\n", + "epoch: 22 step: 16, loss is 1.1118571758270264\n", + "epoch: 22 step: 17, loss is 1.0643647909164429\n", + "epoch: 22 step: 18, loss is 0.9957579374313354\n", + "epoch: 22 step: 19, loss is 1.1990325450897217\n", + "epoch: 22 step: 20, loss is 1.1433093547821045\n", + "epoch: 22 step: 21, loss is 1.135042667388916\n", + "epoch: 22 step: 22, loss is 1.080661654472351\n", + "epoch: 22 step: 23, loss is 1.1187849044799805\n", + "epoch: 22 step: 24, loss is 1.0400731563568115\n", + "epoch: 22 step: 25, loss is 1.0477231740951538\n", + "epoch: 22 step: 26, loss is 1.0271868705749512\n", + "epoch: 22 step: 27, loss is 1.181168556213379\n", + "epoch: 22 step: 28, loss is 1.192152500152588\n", + "epoch: 22 step: 29, loss is 1.0384271144866943\n", + "epoch: 22 step: 30, loss is 1.111762285232544\n", + "epoch: 22 step: 31, loss is 1.1032397747039795\n", + "epoch: 22 step: 32, loss is 1.1420546770095825\n", + "epoch: 22 step: 33, loss is 1.1147706508636475\n", + "epoch: 22 step: 34, loss is 1.0690999031066895\n", + "epoch: 22 step: 35, loss is 1.082465410232544\n", + "epoch: 22 step: 36, loss is 1.2221803665161133\n", + "epoch: 22 step: 37, loss is 1.0350701808929443\n", + "epoch: 22 step: 38, loss is 1.1678917407989502\n", + "epoch: 22 step: 39, loss is 1.1202714443206787\n", + "epoch: 22 step: 40, loss is 1.139586329460144\n", + "epoch: 22 step: 41, loss is 1.1417877674102783\n", + "epoch: 22 step: 42, loss is 1.1896815299987793\n", + "epoch: 22 step: 43, loss is 1.1297979354858398\n", + "epoch: 22 step: 44, loss is 1.0879833698272705\n", + "epoch: 22 step: 45, loss is 1.133296251296997\n", + "epoch: 22 step: 46, loss is 1.1499215364456177\n", + "epoch: 22 step: 47, loss is 1.1757020950317383\n", + "epoch: 22 step: 48, loss is 1.0820949077606201\n", + "epoch: 22 step: 49, loss is 1.0915420055389404\n", + "epoch: 22 step: 50, loss is 1.1721776723861694\n", + "epoch: 22 step: 51, loss is 1.1921318769454956\n", + "epoch: 22 step: 52, loss is 1.1627157926559448\n", + "epoch: 22 step: 53, loss is 1.0917532444000244\n", + "epoch: 22 step: 54, loss is 1.1842501163482666\n", + "epoch: 22 step: 55, loss is 1.2218613624572754\n", + "epoch: 22 step: 56, loss is 1.0742710828781128\n", + "epoch: 22 step: 57, loss is 1.1106314659118652\n", + "epoch: 22 step: 58, loss is 1.0843173265457153\n", + "epoch: 22 step: 59, loss is 1.1337382793426514\n", + "epoch: 22 step: 60, loss is 1.1362837553024292\n", + "epoch: 22 step: 61, loss is 1.152259111404419\n", + "epoch: 22 step: 62, loss is 1.0915368795394897\n", + "epoch: 22 step: 63, loss is 1.1396265029907227\n", + "epoch: 22 step: 64, loss is 1.1222875118255615\n", + "epoch: 22 step: 65, loss is 1.1460210084915161\n", + "epoch: 22 step: 66, loss is 1.132211685180664\n", + "epoch: 22 step: 67, loss is 1.1640839576721191\n", + "epoch: 22 step: 68, loss is 1.208666205406189\n", + "epoch: 22 step: 69, loss is 1.1289105415344238\n", + "epoch: 22 step: 70, loss is 1.043383002281189\n", + "epoch: 22 step: 71, loss is 1.1550343036651611\n", + "epoch: 22 step: 72, loss is 1.1157106161117554\n", + "epoch: 22 step: 73, loss is 1.1634641885757446\n", + "epoch: 22 step: 74, loss is 1.079584002494812\n", + "epoch: 22 step: 75, loss is 1.1676127910614014\n", + "epoch: 22 step: 76, loss is 1.1242144107818604\n", + "epoch: 22 step: 77, loss is 1.1164196729660034\n", + "epoch: 22 step: 78, loss is 1.1505260467529297\n", + "epoch: 22 step: 79, loss is 1.0869994163513184\n", + "epoch: 22 step: 80, loss is 1.1616209745407104\n", + "epoch: 22 step: 81, loss is 1.063849925994873\n", + "epoch: 22 step: 82, loss is 1.1704427003860474\n", + "epoch: 22 step: 83, loss is 1.1020615100860596\n", + "epoch: 22 step: 84, loss is 1.0423674583435059\n", + "epoch: 22 step: 85, loss is 1.086961030960083\n", + "epoch: 22 step: 86, loss is 1.0874052047729492\n", + "epoch: 22 step: 87, loss is 1.0662345886230469\n", + "epoch: 22 step: 88, loss is 1.1146552562713623\n", + "epoch: 22 step: 89, loss is 1.1078271865844727\n", + "epoch: 22 step: 90, loss is 1.1391448974609375\n", + "epoch: 22 step: 91, loss is 1.2246800661087036\n", + "epoch: 22 step: 92, loss is 1.0871126651763916\n", + "epoch: 22 step: 93, loss is 1.2083103656768799\n", + "epoch: 22 step: 94, loss is 1.116729497909546\n", + "epoch: 22 step: 95, loss is 1.0926835536956787\n", + "epoch: 22 step: 96, loss is 1.1633312702178955\n", + "epoch: 22 step: 97, loss is 1.2342963218688965\n", + "epoch: 22 step: 98, loss is 1.1413816213607788\n", + "epoch: 22 step: 99, loss is 1.1097474098205566\n", + "epoch: 22 step: 100, loss is 1.091911792755127\n", + "epoch: 22 step: 101, loss is 1.0712780952453613\n", + "epoch: 22 step: 102, loss is 1.1108686923980713\n", + "epoch: 22 step: 103, loss is 1.1630536317825317\n", + "epoch: 22 step: 104, loss is 1.1125123500823975\n", + "epoch: 22 step: 105, loss is 1.1197980642318726\n", + "epoch: 22 step: 106, loss is 1.0898280143737793\n", + "epoch: 22 step: 107, loss is 1.1364562511444092\n", + "epoch: 22 step: 108, loss is 1.0692002773284912\n", + "epoch: 22 step: 109, loss is 1.0743474960327148\n", + "epoch: 22 step: 110, loss is 1.145050287246704\n", + "epoch: 22 step: 111, loss is 1.1503729820251465\n", + "epoch: 22 step: 112, loss is 1.0571787357330322\n", + "epoch: 22 step: 113, loss is 1.1153061389923096\n", + "epoch: 22 step: 114, loss is 1.119551420211792\n", + "epoch: 22 step: 115, loss is 1.0604743957519531\n", + "epoch: 22 step: 116, loss is 1.0439152717590332\n", + "epoch: 22 step: 117, loss is 1.1970635652542114\n", + "epoch: 22 step: 118, loss is 1.1921751499176025\n", + "epoch: 22 step: 119, loss is 1.1625492572784424\n", + "epoch: 22 step: 120, loss is 1.0381760597229004\n", + "epoch: 22 step: 121, loss is 1.0540997982025146\n", + "epoch: 22 step: 122, loss is 1.1956419944763184\n", + "epoch: 22 step: 123, loss is 1.1831533908843994\n", + "epoch: 22 step: 124, loss is 1.099522590637207\n", + "epoch: 22 step: 125, loss is 1.1484341621398926\n", + "epoch: 22 step: 126, loss is 1.1566627025604248\n", + "epoch: 22 step: 127, loss is 1.1988028287887573\n", + "epoch: 22 step: 128, loss is 1.199836015701294\n", + "epoch: 22 step: 129, loss is 1.202191948890686\n", + "epoch: 22 step: 130, loss is 1.204795479774475\n", + "epoch: 22 step: 131, loss is 1.0909414291381836\n", + "epoch: 22 step: 132, loss is 1.142120361328125\n", + "epoch: 22 step: 133, loss is 1.1876928806304932\n", + "epoch: 22 step: 134, loss is 1.1259181499481201\n", + "epoch: 22 step: 135, loss is 1.1735620498657227\n", + "epoch: 22 step: 136, loss is 1.154081106185913\n", + "epoch: 22 step: 137, loss is 1.2274457216262817\n", + "epoch: 22 step: 138, loss is 1.0974849462509155\n", + "epoch: 22 step: 139, loss is 1.1664543151855469\n", + "epoch: 22 step: 140, loss is 1.0953965187072754\n", + "epoch: 22 step: 141, loss is 1.2180781364440918\n", + "epoch: 22 step: 142, loss is 1.171477198600769\n", + "epoch: 22 step: 143, loss is 1.2661272287368774\n", + "epoch: 22 step: 144, loss is 1.0082556009292603\n", + "epoch: 22 step: 145, loss is 1.2302515506744385\n", + "epoch: 22 step: 146, loss is 1.1275852918624878\n", + "epoch: 22 step: 147, loss is 1.1927855014801025\n", + "epoch: 22 step: 148, loss is 1.1288310289382935\n", + "epoch: 22 step: 149, loss is 1.2113319635391235\n", + "epoch: 22 step: 150, loss is 1.1766245365142822\n", + "epoch: 22 step: 151, loss is 1.0534050464630127\n", + "epoch: 22 step: 152, loss is 1.159206748008728\n", + "epoch: 22 step: 153, loss is 1.0224480628967285\n", + "epoch: 22 step: 154, loss is 1.1384189128875732\n", + "epoch: 22 step: 155, loss is 1.1645461320877075\n", + "epoch: 22 step: 156, loss is 1.1113648414611816\n", + "epoch: 22 step: 157, loss is 1.1159203052520752\n", + "epoch: 22 step: 158, loss is 1.1814916133880615\n", + "epoch: 22 step: 159, loss is 1.1772476434707642\n", + "epoch: 22 step: 160, loss is 1.072504997253418\n", + "epoch: 22 step: 161, loss is 1.110647201538086\n", + "epoch: 22 step: 162, loss is 1.161494255065918\n", + "epoch: 22 step: 163, loss is 1.094288945198059\n", + "epoch: 22 step: 164, loss is 1.1697369813919067\n", + "epoch: 22 step: 165, loss is 1.1365221738815308\n", + "epoch: 22 step: 166, loss is 1.1303839683532715\n", + "epoch: 22 step: 167, loss is 1.109291672706604\n", + "epoch: 22 step: 168, loss is 1.185670256614685\n", + "epoch: 22 step: 169, loss is 1.130418300628662\n", + "epoch: 22 step: 170, loss is 1.1546063423156738\n", + "epoch: 22 step: 171, loss is 1.1468586921691895\n", + "epoch: 22 step: 172, loss is 1.1738550662994385\n", + "epoch: 22 step: 173, loss is 1.1578304767608643\n", + "epoch: 22 step: 174, loss is 1.1375482082366943\n", + "epoch: 22 step: 175, loss is 1.1644868850708008\n", + "epoch: 22 step: 176, loss is 1.1947007179260254\n", + "epoch: 22 step: 177, loss is 1.1481926441192627\n", + "epoch: 22 step: 178, loss is 1.12645423412323\n", + "epoch: 22 step: 179, loss is 1.1093240976333618\n", + "epoch: 22 step: 180, loss is 1.076364517211914\n", + "epoch: 22 step: 181, loss is 1.1648823022842407\n", + "epoch: 22 step: 182, loss is 1.1309123039245605\n", + "epoch: 22 step: 183, loss is 1.1633305549621582\n", + "epoch: 22 step: 184, loss is 1.3893473148345947\n", + "epoch: 22 step: 185, loss is 1.1795145273208618\n", + "epoch: 22 step: 186, loss is 1.1634504795074463\n", + "epoch: 22 step: 187, loss is 1.128756046295166\n", + "epoch: 22 step: 188, loss is 1.1844087839126587\n", + "epoch: 22 step: 189, loss is 1.1458275318145752\n", + "epoch: 22 step: 190, loss is 1.203652024269104\n", + "epoch: 22 step: 191, loss is 1.1693229675292969\n", + "epoch: 22 step: 192, loss is 1.3400585651397705\n", + "epoch: 22 step: 193, loss is 1.2078784704208374\n", + "epoch: 22 step: 194, loss is 1.1676981449127197\n", + "epoch: 22 step: 195, loss is 1.0499308109283447\n", + "Train epoch time: 103053.446 ms, per step time: 528.479 ms\n", + "epoch: 23 step: 1, loss is 1.23060142993927\n", + "epoch: 23 step: 2, loss is 1.0255039930343628\n", + "epoch: 23 step: 3, loss is 1.1687312126159668\n", + "epoch: 23 step: 4, loss is 1.1005406379699707\n", + "epoch: 23 step: 5, loss is 1.1148852109909058\n", + "epoch: 23 step: 6, loss is 1.132994294166565\n", + "epoch: 23 step: 7, loss is 1.1580065488815308\n", + "epoch: 23 step: 8, loss is 1.1325548887252808\n", + "epoch: 23 step: 9, loss is 1.0975141525268555\n", + "epoch: 23 step: 10, loss is 1.0765408277511597\n", + "epoch: 23 step: 11, loss is 1.051107406616211\n", + "epoch: 23 step: 12, loss is 1.1210918426513672\n", + "epoch: 23 step: 13, loss is 1.0923571586608887\n", + "epoch: 23 step: 14, loss is 1.0521118640899658\n", + "epoch: 23 step: 15, loss is 1.1086299419403076\n", + "epoch: 23 step: 16, loss is 1.1891865730285645\n", + "epoch: 23 step: 17, loss is 1.1739667654037476\n", + "epoch: 23 step: 18, loss is 1.1619439125061035\n", + "epoch: 23 step: 19, loss is 1.155267596244812\n", + "epoch: 23 step: 20, loss is 1.1631109714508057\n", + "epoch: 23 step: 21, loss is 1.0774905681610107\n", + "epoch: 23 step: 22, loss is 1.1254184246063232\n", + "epoch: 23 step: 23, loss is 1.0895957946777344\n", + "epoch: 23 step: 24, loss is 1.0899434089660645\n", + "epoch: 23 step: 25, loss is 1.1948548555374146\n", + "epoch: 23 step: 26, loss is 1.1558705568313599\n", + "epoch: 23 step: 27, loss is 1.086866855621338\n", + "epoch: 23 step: 28, loss is 1.1321508884429932\n", + "epoch: 23 step: 29, loss is 1.1464433670043945\n", + "epoch: 23 step: 30, loss is 1.1823558807373047\n", + "epoch: 23 step: 31, loss is 1.0476692914962769\n", + "epoch: 23 step: 32, loss is 1.1315399408340454\n", + "epoch: 23 step: 33, loss is 1.1987141370773315\n", + "epoch: 23 step: 34, loss is 1.0620555877685547\n", + "epoch: 23 step: 35, loss is 1.1231003999710083\n", + "epoch: 23 step: 36, loss is 1.1471318006515503\n", + "epoch: 23 step: 37, loss is 1.1865224838256836\n", + "epoch: 23 step: 38, loss is 1.1108522415161133\n", + "epoch: 23 step: 39, loss is 1.0680031776428223\n", + "epoch: 23 step: 40, loss is 1.1681307554244995\n", + "epoch: 23 step: 41, loss is 1.148149013519287\n", + "epoch: 23 step: 42, loss is 1.1501030921936035\n", + "epoch: 23 step: 43, loss is 1.0906188488006592\n", + "epoch: 23 step: 44, loss is 1.004403829574585\n", + "epoch: 23 step: 45, loss is 1.117950677871704\n", + "epoch: 23 step: 46, loss is 1.1454203128814697\n", + "epoch: 23 step: 47, loss is 1.2016693353652954\n", + "epoch: 23 step: 48, loss is 1.094958782196045\n", + "epoch: 23 step: 49, loss is 1.2368526458740234\n", + "epoch: 23 step: 50, loss is 1.0840750932693481\n", + "epoch: 23 step: 51, loss is 1.1317412853240967\n", + "epoch: 23 step: 52, loss is 1.0132205486297607\n", + "epoch: 23 step: 53, loss is 1.2127323150634766\n", + "epoch: 23 step: 54, loss is 1.0677368640899658\n", + "epoch: 23 step: 55, loss is 1.0687330961227417\n", + "epoch: 23 step: 56, loss is 1.1159323453903198\n", + "epoch: 23 step: 57, loss is 1.0296789407730103\n", + "epoch: 23 step: 58, loss is 1.0986673831939697\n", + "epoch: 23 step: 59, loss is 1.1459102630615234\n", + "epoch: 23 step: 60, loss is 1.0554723739624023\n", + "epoch: 23 step: 61, loss is 1.0922234058380127\n", + "epoch: 23 step: 62, loss is 1.1049891710281372\n", + "epoch: 23 step: 63, loss is 1.0720704793930054\n", + "epoch: 23 step: 64, loss is 1.047195553779602\n", + "epoch: 23 step: 65, loss is 1.011221170425415\n", + "epoch: 23 step: 66, loss is 1.1429213285446167\n", + "epoch: 23 step: 67, loss is 1.1427664756774902\n", + "epoch: 23 step: 68, loss is 1.1061527729034424\n", + "epoch: 23 step: 69, loss is 1.0624393224716187\n", + "epoch: 23 step: 70, loss is 1.1704926490783691\n", + "epoch: 23 step: 71, loss is 1.000014066696167\n", + "epoch: 23 step: 72, loss is 1.0424562692642212\n", + "epoch: 23 step: 73, loss is 1.1205432415008545\n", + "epoch: 23 step: 74, loss is 1.072997808456421\n", + "epoch: 23 step: 75, loss is 1.147596836090088\n", + "epoch: 23 step: 76, loss is 1.1593763828277588\n", + "epoch: 23 step: 77, loss is 1.11289644241333\n", + "epoch: 23 step: 78, loss is 1.094421148300171\n", + "epoch: 23 step: 79, loss is 1.1736756563186646\n", + "epoch: 23 step: 80, loss is 1.1590044498443604\n", + "epoch: 23 step: 81, loss is 1.1220461130142212\n", + "epoch: 23 step: 82, loss is 1.11630380153656\n", + "epoch: 23 step: 83, loss is 1.1953988075256348\n", + "epoch: 23 step: 84, loss is 1.0856605768203735\n", + "epoch: 23 step: 85, loss is 1.111409306526184\n", + "epoch: 23 step: 86, loss is 1.1028155088424683\n", + "epoch: 23 step: 87, loss is 1.0907913446426392\n", + "epoch: 23 step: 88, loss is 1.0787906646728516\n", + "epoch: 23 step: 89, loss is 1.0538429021835327\n", + "epoch: 23 step: 90, loss is 1.13177490234375\n", + "epoch: 23 step: 91, loss is 1.1705591678619385\n", + "epoch: 23 step: 92, loss is 1.0735583305358887\n", + "epoch: 23 step: 93, loss is 1.1454147100448608\n", + "epoch: 23 step: 94, loss is 1.1158503293991089\n", + "epoch: 23 step: 95, loss is 1.0735394954681396\n", + "epoch: 23 step: 96, loss is 1.1525248289108276\n", + "epoch: 23 step: 97, loss is 1.1024384498596191\n", + "epoch: 23 step: 98, loss is 1.1699261665344238\n", + "epoch: 23 step: 99, loss is 1.1905255317687988\n", + "epoch: 23 step: 100, loss is 1.1377266645431519\n", + "epoch: 23 step: 101, loss is 1.2313189506530762\n", + "epoch: 23 step: 102, loss is 1.07224440574646\n", + "epoch: 23 step: 103, loss is 1.103691816329956\n", + "epoch: 23 step: 104, loss is 1.15224027633667\n", + "epoch: 23 step: 105, loss is 1.0705592632293701\n", + "epoch: 23 step: 106, loss is 1.1051955223083496\n", + "epoch: 23 step: 107, loss is 1.2066456079483032\n", + "epoch: 23 step: 108, loss is 1.0786014795303345\n", + "epoch: 23 step: 109, loss is 1.185365915298462\n", + "epoch: 23 step: 110, loss is 1.1303402185440063\n", + "epoch: 23 step: 111, loss is 0.9840598106384277\n", + "epoch: 23 step: 112, loss is 1.1175740957260132\n", + "epoch: 23 step: 113, loss is 1.1302778720855713\n", + "epoch: 23 step: 114, loss is 1.0298997163772583\n", + "epoch: 23 step: 115, loss is 1.2638742923736572\n", + "epoch: 23 step: 116, loss is 1.1628462076187134\n", + "epoch: 23 step: 117, loss is 1.0850586891174316\n", + "epoch: 23 step: 118, loss is 1.167520523071289\n", + "epoch: 23 step: 119, loss is 1.0793277025222778\n", + "epoch: 23 step: 120, loss is 1.1597110033035278\n", + "epoch: 23 step: 121, loss is 1.1698490381240845\n", + "epoch: 23 step: 122, loss is 1.1650772094726562\n", + "epoch: 23 step: 123, loss is 1.1600513458251953\n", + "epoch: 23 step: 124, loss is 1.1099028587341309\n", + "epoch: 23 step: 125, loss is 1.0584297180175781\n", + "epoch: 23 step: 126, loss is 1.1048550605773926\n", + "epoch: 23 step: 127, loss is 1.0792443752288818\n", + "epoch: 23 step: 128, loss is 1.2057498693466187\n", + "epoch: 23 step: 129, loss is 1.1588170528411865\n", + "epoch: 23 step: 130, loss is 1.102455496788025\n", + "epoch: 23 step: 131, loss is 1.1319975852966309\n", + "epoch: 23 step: 132, loss is 1.1234331130981445\n", + "epoch: 23 step: 133, loss is 1.128485918045044\n", + "epoch: 23 step: 134, loss is 1.1213276386260986\n", + "epoch: 23 step: 135, loss is 1.0617302656173706\n", + "epoch: 23 step: 136, loss is 1.1427631378173828\n", + "epoch: 23 step: 137, loss is 1.125303864479065\n", + "epoch: 23 step: 138, loss is 1.1129512786865234\n", + "epoch: 23 step: 139, loss is 1.1597378253936768\n", + "epoch: 23 step: 140, loss is 1.1364946365356445\n", + "epoch: 23 step: 141, loss is 1.1551380157470703\n", + "epoch: 23 step: 142, loss is 1.0561033487319946\n", + "epoch: 23 step: 143, loss is 1.063478946685791\n", + "epoch: 23 step: 144, loss is 1.1405067443847656\n", + "epoch: 23 step: 145, loss is 1.0635958909988403\n", + "epoch: 23 step: 146, loss is 1.1711987257003784\n", + "epoch: 23 step: 147, loss is 1.1311051845550537\n", + "epoch: 23 step: 148, loss is 1.1289314031600952\n", + "epoch: 23 step: 149, loss is 1.0933849811553955\n", + "epoch: 23 step: 150, loss is 1.1984546184539795\n", + "epoch: 23 step: 151, loss is 1.126842737197876\n", + "epoch: 23 step: 152, loss is 1.0809719562530518\n", + "epoch: 23 step: 153, loss is 1.1505146026611328\n", + "epoch: 23 step: 154, loss is 1.0994770526885986\n", + "epoch: 23 step: 155, loss is 1.160103440284729\n", + "epoch: 23 step: 156, loss is 1.1276865005493164\n", + "epoch: 23 step: 157, loss is 1.0915659666061401\n", + "epoch: 23 step: 158, loss is 1.0549752712249756\n", + "epoch: 23 step: 159, loss is 1.1737967729568481\n", + "epoch: 23 step: 160, loss is 1.0977596044540405\n", + "epoch: 23 step: 161, loss is 1.1503750085830688\n", + "epoch: 23 step: 162, loss is 1.0595636367797852\n", + "epoch: 23 step: 163, loss is 1.0559470653533936\n", + "epoch: 23 step: 164, loss is 1.0374300479888916\n", + "epoch: 23 step: 165, loss is 1.0670839548110962\n", + "epoch: 23 step: 166, loss is 1.1164746284484863\n", + "epoch: 23 step: 167, loss is 1.18875253200531\n", + "epoch: 23 step: 168, loss is 1.1237956285476685\n", + "epoch: 23 step: 169, loss is 1.0908786058425903\n", + "epoch: 23 step: 170, loss is 1.111847162246704\n", + "epoch: 23 step: 171, loss is 1.088847041130066\n", + "epoch: 23 step: 172, loss is 1.1184419393539429\n", + "epoch: 23 step: 173, loss is 1.0997718572616577\n", + "epoch: 23 step: 174, loss is 1.2070255279541016\n", + "epoch: 23 step: 175, loss is 1.1819725036621094\n", + "epoch: 23 step: 176, loss is 1.0090901851654053\n", + "epoch: 23 step: 177, loss is 1.0174307823181152\n", + "epoch: 23 step: 178, loss is 1.112828016281128\n", + "epoch: 23 step: 179, loss is 1.1326416730880737\n", + "epoch: 23 step: 180, loss is 1.1470729112625122\n", + "epoch: 23 step: 181, loss is 1.0882997512817383\n", + "epoch: 23 step: 182, loss is 1.072413682937622\n", + "epoch: 23 step: 183, loss is 1.0885734558105469\n", + "epoch: 23 step: 184, loss is 1.1408852338790894\n", + "epoch: 23 step: 185, loss is 1.0942869186401367\n", + "epoch: 23 step: 186, loss is 1.0758434534072876\n", + "epoch: 23 step: 187, loss is 1.124873399734497\n", + "epoch: 23 step: 188, loss is 1.1136083602905273\n", + "epoch: 23 step: 189, loss is 1.0702288150787354\n", + "epoch: 23 step: 190, loss is 1.1252052783966064\n", + "epoch: 23 step: 191, loss is 1.0550436973571777\n", + "epoch: 23 step: 192, loss is 1.0438551902770996\n", + "epoch: 23 step: 193, loss is 1.1110122203826904\n", + "epoch: 23 step: 194, loss is 1.1157619953155518\n", + "epoch: 23 step: 195, loss is 1.0738880634307861\n", + "Train epoch time: 95633.299 ms, per step time: 490.427 ms\n", + "epoch: 24 step: 1, loss is 1.096152424812317\n", + "epoch: 24 step: 2, loss is 1.0520071983337402\n", + "epoch: 24 step: 3, loss is 1.1662302017211914\n", + "epoch: 24 step: 4, loss is 1.1078131198883057\n", + "epoch: 24 step: 5, loss is 1.1180524826049805\n", + "epoch: 24 step: 6, loss is 1.0745381116867065\n", + "epoch: 24 step: 7, loss is 1.058577060699463\n", + "epoch: 24 step: 8, loss is 1.1413288116455078\n", + "epoch: 24 step: 9, loss is 0.9957098364830017\n", + "epoch: 24 step: 10, loss is 1.0945830345153809\n", + "epoch: 24 step: 11, loss is 1.156493902206421\n", + "epoch: 24 step: 12, loss is 1.0764557123184204\n", + "epoch: 24 step: 13, loss is 1.037079930305481\n", + "epoch: 24 step: 14, loss is 1.0762743949890137\n", + "epoch: 24 step: 15, loss is 1.0631129741668701\n", + "epoch: 24 step: 16, loss is 1.082192063331604\n", + "epoch: 24 step: 17, loss is 1.0326694250106812\n", + "epoch: 24 step: 18, loss is 1.0645891427993774\n", + "epoch: 24 step: 19, loss is 1.0412588119506836\n", + "epoch: 24 step: 20, loss is 1.0649237632751465\n", + "epoch: 24 step: 21, loss is 1.0811808109283447\n", + "epoch: 24 step: 22, loss is 1.1197242736816406\n", + "epoch: 24 step: 23, loss is 0.978630542755127\n", + "epoch: 24 step: 24, loss is 1.0717936754226685\n", + "epoch: 24 step: 25, loss is 1.025496006011963\n", + "epoch: 24 step: 26, loss is 1.0743672847747803\n", + "epoch: 24 step: 27, loss is 1.010847568511963\n", + "epoch: 24 step: 28, loss is 1.0650514364242554\n", + "epoch: 24 step: 29, loss is 1.0444257259368896\n", + "epoch: 24 step: 30, loss is 1.1398463249206543\n", + "epoch: 24 step: 31, loss is 1.0559895038604736\n", + "epoch: 24 step: 32, loss is 1.1017115116119385\n", + "epoch: 24 step: 33, loss is 1.1054749488830566\n", + "epoch: 24 step: 34, loss is 1.1087939739227295\n", + "epoch: 24 step: 35, loss is 1.0712831020355225\n", + "epoch: 24 step: 36, loss is 1.0619534254074097\n", + "epoch: 24 step: 37, loss is 1.079497218132019\n", + "epoch: 24 step: 38, loss is 1.0836414098739624\n", + "epoch: 24 step: 39, loss is 1.1021862030029297\n", + "epoch: 24 step: 40, loss is 1.142088770866394\n", + "epoch: 24 step: 41, loss is 1.1445538997650146\n", + "epoch: 24 step: 42, loss is 1.0974385738372803\n", + "epoch: 24 step: 43, loss is 1.0653133392333984\n", + "epoch: 24 step: 44, loss is 1.0574004650115967\n", + "epoch: 24 step: 45, loss is 1.0424071550369263\n", + "epoch: 24 step: 46, loss is 1.0724709033966064\n", + "epoch: 24 step: 47, loss is 1.1203973293304443\n", + "epoch: 24 step: 48, loss is 1.1303638219833374\n", + "epoch: 24 step: 49, loss is 1.030886173248291\n", + "epoch: 24 step: 50, loss is 1.1133726835250854\n", + "epoch: 24 step: 51, loss is 1.0405986309051514\n", + "epoch: 24 step: 52, loss is 1.1081304550170898\n", + "epoch: 24 step: 53, loss is 1.0910568237304688\n", + "epoch: 24 step: 54, loss is 1.1189912557601929\n", + "epoch: 24 step: 55, loss is 1.0701524019241333\n", + "epoch: 24 step: 56, loss is 1.0900342464447021\n", + "epoch: 24 step: 57, loss is 1.1272692680358887\n", + "epoch: 24 step: 58, loss is 1.0400912761688232\n", + "epoch: 24 step: 59, loss is 1.0722076892852783\n", + "epoch: 24 step: 60, loss is 1.0862547159194946\n", + "epoch: 24 step: 61, loss is 1.1114259958267212\n", + "epoch: 24 step: 62, loss is 1.1696957349777222\n", + "epoch: 24 step: 63, loss is 1.019826889038086\n", + "epoch: 24 step: 64, loss is 1.0864108800888062\n", + "epoch: 24 step: 65, loss is 1.0550880432128906\n", + "epoch: 24 step: 66, loss is 1.1201727390289307\n", + "epoch: 24 step: 67, loss is 1.0929617881774902\n", + "epoch: 24 step: 68, loss is 1.1151789426803589\n", + "epoch: 24 step: 69, loss is 1.1193866729736328\n", + "epoch: 24 step: 70, loss is 1.0559523105621338\n", + "epoch: 24 step: 71, loss is 1.1011052131652832\n", + "epoch: 24 step: 72, loss is 1.0652623176574707\n", + "epoch: 24 step: 73, loss is 1.1209611892700195\n", + "epoch: 24 step: 74, loss is 1.132474660873413\n", + "epoch: 24 step: 75, loss is 1.0862020254135132\n", + "epoch: 24 step: 76, loss is 0.995017945766449\n", + "epoch: 24 step: 77, loss is 1.1501684188842773\n", + "epoch: 24 step: 78, loss is 1.0823405981063843\n", + "epoch: 24 step: 79, loss is 1.022630214691162\n", + "epoch: 24 step: 80, loss is 1.0457707643508911\n", + "epoch: 24 step: 81, loss is 1.0823180675506592\n", + "epoch: 24 step: 82, loss is 1.05121648311615\n", + "epoch: 24 step: 83, loss is 1.085707426071167\n", + "epoch: 24 step: 84, loss is 1.0759004354476929\n", + "epoch: 24 step: 85, loss is 1.082223892211914\n", + "epoch: 24 step: 86, loss is 1.128966212272644\n", + "epoch: 24 step: 87, loss is 1.1260977983474731\n", + "epoch: 24 step: 88, loss is 1.1166375875473022\n", + "epoch: 24 step: 89, loss is 1.121874213218689\n", + "epoch: 24 step: 90, loss is 1.1183830499649048\n", + "epoch: 24 step: 91, loss is 1.0714095830917358\n", + "epoch: 24 step: 92, loss is 1.0668385028839111\n", + "epoch: 24 step: 93, loss is 1.047579050064087\n", + "epoch: 24 step: 94, loss is 1.039780855178833\n", + "epoch: 24 step: 95, loss is 1.0675655603408813\n", + "epoch: 24 step: 96, loss is 1.0588375329971313\n", + "epoch: 24 step: 97, loss is 1.1765391826629639\n", + "epoch: 24 step: 98, loss is 1.1153229475021362\n", + "epoch: 24 step: 99, loss is 1.0653719902038574\n", + "epoch: 24 step: 100, loss is 0.9948022365570068\n", + "epoch: 24 step: 101, loss is 1.0454946756362915\n", + "epoch: 24 step: 102, loss is 1.0172431468963623\n", + "epoch: 24 step: 103, loss is 1.111290693283081\n", + "epoch: 24 step: 104, loss is 1.1113276481628418\n", + "epoch: 24 step: 105, loss is 1.0548597574234009\n", + "epoch: 24 step: 106, loss is 1.037804126739502\n", + "epoch: 24 step: 107, loss is 1.069464087486267\n", + "epoch: 24 step: 108, loss is 1.1003122329711914\n", + "epoch: 24 step: 109, loss is 1.0783336162567139\n", + "epoch: 24 step: 110, loss is 0.986735463142395\n", + "epoch: 24 step: 111, loss is 1.0348858833312988\n", + "epoch: 24 step: 112, loss is 1.1161210536956787\n", + "epoch: 24 step: 113, loss is 1.031844973564148\n", + "epoch: 24 step: 114, loss is 1.014697551727295\n", + "epoch: 24 step: 115, loss is 1.118982195854187\n", + "epoch: 24 step: 116, loss is 1.0907526016235352\n", + "epoch: 24 step: 117, loss is 1.0741140842437744\n", + "epoch: 24 step: 118, loss is 1.0825951099395752\n", + "epoch: 24 step: 119, loss is 1.101610541343689\n", + "epoch: 24 step: 120, loss is 1.1411774158477783\n", + "epoch: 24 step: 121, loss is 1.1668100357055664\n", + "epoch: 24 step: 122, loss is 1.1070239543914795\n", + "epoch: 24 step: 123, loss is 1.1728230714797974\n", + "epoch: 24 step: 124, loss is 0.997096836566925\n", + "epoch: 24 step: 125, loss is 1.1085114479064941\n", + "epoch: 24 step: 126, loss is 1.0279585123062134\n", + "epoch: 24 step: 127, loss is 1.043959617614746\n", + "epoch: 24 step: 128, loss is 1.1281020641326904\n", + "epoch: 24 step: 129, loss is 1.0456418991088867\n", + "epoch: 24 step: 130, loss is 1.082914113998413\n", + "epoch: 24 step: 131, loss is 1.046229362487793\n", + "epoch: 24 step: 132, loss is 1.093191146850586\n", + "epoch: 24 step: 133, loss is 1.0866057872772217\n", + "epoch: 24 step: 134, loss is 1.0642213821411133\n", + "epoch: 24 step: 135, loss is 1.1136398315429688\n", + "epoch: 24 step: 136, loss is 1.0770180225372314\n", + "epoch: 24 step: 137, loss is 1.084915041923523\n", + "epoch: 24 step: 138, loss is 1.06498384475708\n", + "epoch: 24 step: 139, loss is 1.0873427391052246\n", + "epoch: 24 step: 140, loss is 1.1768497228622437\n", + "epoch: 24 step: 141, loss is 1.0861817598342896\n", + "epoch: 24 step: 142, loss is 1.1886545419692993\n", + "epoch: 24 step: 143, loss is 1.0437426567077637\n", + "epoch: 24 step: 144, loss is 1.1548455953598022\n", + "epoch: 24 step: 145, loss is 1.1071984767913818\n", + "epoch: 24 step: 146, loss is 1.0699193477630615\n", + "epoch: 24 step: 147, loss is 1.1265416145324707\n", + "epoch: 24 step: 148, loss is 1.0821709632873535\n", + "epoch: 24 step: 149, loss is 1.1060984134674072\n", + "epoch: 24 step: 150, loss is 1.1378763914108276\n", + "epoch: 24 step: 151, loss is 1.081998586654663\n", + "epoch: 24 step: 152, loss is 1.1639556884765625\n", + "epoch: 24 step: 153, loss is 1.0505272150039673\n", + "epoch: 24 step: 154, loss is 1.0912973880767822\n", + "epoch: 24 step: 155, loss is 1.0907968282699585\n", + "epoch: 24 step: 156, loss is 1.059391975402832\n", + "epoch: 24 step: 157, loss is 1.1339949369430542\n", + "epoch: 24 step: 158, loss is 1.1344521045684814\n", + "epoch: 24 step: 159, loss is 1.0964425802230835\n", + "epoch: 24 step: 160, loss is 1.0616052150726318\n", + "epoch: 24 step: 161, loss is 1.0505805015563965\n", + "epoch: 24 step: 162, loss is 1.03092622756958\n", + "epoch: 24 step: 163, loss is 1.1385046243667603\n", + "epoch: 24 step: 164, loss is 1.1685724258422852\n", + "epoch: 24 step: 165, loss is 1.0794379711151123\n", + "epoch: 24 step: 166, loss is 1.1040763854980469\n", + "epoch: 24 step: 167, loss is 1.0609817504882812\n", + "epoch: 24 step: 168, loss is 1.0839005708694458\n", + "epoch: 24 step: 169, loss is 1.1828938722610474\n", + "epoch: 24 step: 170, loss is 1.1196638345718384\n", + "epoch: 24 step: 171, loss is 1.2198418378829956\n", + "epoch: 24 step: 172, loss is 1.0947452783584595\n", + "epoch: 24 step: 173, loss is 1.067333698272705\n", + "epoch: 24 step: 174, loss is 1.171656608581543\n", + "epoch: 24 step: 175, loss is 1.114274501800537\n", + "epoch: 24 step: 176, loss is 1.0562642812728882\n", + "epoch: 24 step: 177, loss is 1.055665135383606\n", + "epoch: 24 step: 178, loss is 1.1491202116012573\n", + "epoch: 24 step: 179, loss is 1.07564115524292\n", + "epoch: 24 step: 180, loss is 1.0792652368545532\n", + "epoch: 24 step: 181, loss is 1.1454006433486938\n", + "epoch: 24 step: 182, loss is 1.1562148332595825\n", + "epoch: 24 step: 183, loss is 1.0712690353393555\n", + "epoch: 24 step: 184, loss is 1.0680439472198486\n", + "epoch: 24 step: 185, loss is 1.1142473220825195\n", + "epoch: 24 step: 186, loss is 1.0798276662826538\n", + "epoch: 24 step: 187, loss is 1.1558393239974976\n", + "epoch: 24 step: 188, loss is 1.0682551860809326\n", + "epoch: 24 step: 189, loss is 1.0201600790023804\n", + "epoch: 24 step: 190, loss is 1.141103744506836\n", + "epoch: 24 step: 191, loss is 1.1102389097213745\n", + "epoch: 24 step: 192, loss is 1.06436026096344\n", + "epoch: 24 step: 193, loss is 0.9822629690170288\n", + "epoch: 24 step: 194, loss is 1.0634980201721191\n", + "epoch: 24 step: 195, loss is 1.1474499702453613\n", + "Train epoch time: 99402.173 ms, per step time: 509.755 ms\n", + "epoch: 25 step: 1, loss is 1.0749220848083496\n", + "epoch: 25 step: 2, loss is 1.0379492044448853\n", + "epoch: 25 step: 3, loss is 1.0403025150299072\n", + "epoch: 25 step: 4, loss is 1.0051038265228271\n", + "epoch: 25 step: 5, loss is 1.0393528938293457\n", + "epoch: 25 step: 6, loss is 1.017599105834961\n", + "epoch: 25 step: 7, loss is 1.084409236907959\n", + "epoch: 25 step: 8, loss is 1.0471415519714355\n", + "epoch: 25 step: 9, loss is 1.0347020626068115\n", + "epoch: 25 step: 10, loss is 1.0314974784851074\n", + "epoch: 25 step: 11, loss is 1.144582748413086\n", + "epoch: 25 step: 12, loss is 1.0551949739456177\n", + "epoch: 25 step: 13, loss is 1.064736247062683\n", + "epoch: 25 step: 14, loss is 1.023233413696289\n", + "epoch: 25 step: 15, loss is 0.9984829425811768\n", + "epoch: 25 step: 16, loss is 1.083414912223816\n", + "epoch: 25 step: 17, loss is 1.1002464294433594\n", + "epoch: 25 step: 18, loss is 1.0444315671920776\n", + "epoch: 25 step: 19, loss is 0.995835542678833\n", + "epoch: 25 step: 20, loss is 1.0372276306152344\n", + "epoch: 25 step: 21, loss is 1.0701453685760498\n", + "epoch: 25 step: 22, loss is 1.0857269763946533\n", + "epoch: 25 step: 23, loss is 1.0861470699310303\n", + "epoch: 25 step: 24, loss is 1.078922986984253\n", + "epoch: 25 step: 25, loss is 1.169203281402588\n", + "epoch: 25 step: 26, loss is 1.0408902168273926\n", + "epoch: 25 step: 27, loss is 1.0514144897460938\n", + "epoch: 25 step: 28, loss is 1.0632753372192383\n", + "epoch: 25 step: 29, loss is 1.104520320892334\n", + "epoch: 25 step: 30, loss is 1.074289321899414\n", + "epoch: 25 step: 31, loss is 1.0821237564086914\n", + "epoch: 25 step: 32, loss is 1.1281318664550781\n", + "epoch: 25 step: 33, loss is 1.1050763130187988\n", + "epoch: 25 step: 34, loss is 1.0671796798706055\n", + "epoch: 25 step: 35, loss is 1.0637259483337402\n", + "epoch: 25 step: 36, loss is 1.066709041595459\n", + "epoch: 25 step: 37, loss is 1.0637730360031128\n", + "epoch: 25 step: 38, loss is 1.0174492597579956\n", + "epoch: 25 step: 39, loss is 1.168111801147461\n", + "epoch: 25 step: 40, loss is 1.0607125759124756\n", + "epoch: 25 step: 41, loss is 1.089240550994873\n", + "epoch: 25 step: 42, loss is 1.0490936040878296\n", + "epoch: 25 step: 43, loss is 1.1080021858215332\n", + "epoch: 25 step: 44, loss is 0.9858703017234802\n", + "epoch: 25 step: 45, loss is 1.0853673219680786\n", + "epoch: 25 step: 46, loss is 1.0885578393936157\n", + "epoch: 25 step: 47, loss is 1.2018464803695679\n", + "epoch: 25 step: 48, loss is 1.0625593662261963\n", + "epoch: 25 step: 49, loss is 1.1245335340499878\n", + "epoch: 25 step: 50, loss is 1.135481357574463\n", + "epoch: 25 step: 51, loss is 1.008669137954712\n", + "epoch: 25 step: 52, loss is 1.1920673847198486\n", + "epoch: 25 step: 53, loss is 1.0021615028381348\n", + "epoch: 25 step: 54, loss is 1.0977214574813843\n", + "epoch: 25 step: 55, loss is 1.0840771198272705\n", + "epoch: 25 step: 56, loss is 1.103593349456787\n", + "epoch: 25 step: 57, loss is 1.150404453277588\n", + "epoch: 25 step: 58, loss is 1.122603416442871\n", + "epoch: 25 step: 59, loss is 1.1690304279327393\n", + "epoch: 25 step: 60, loss is 1.0875837802886963\n", + "epoch: 25 step: 61, loss is 1.153435230255127\n", + "epoch: 25 step: 62, loss is 1.0937747955322266\n", + "epoch: 25 step: 63, loss is 1.0937683582305908\n", + "epoch: 25 step: 64, loss is 1.0497373342514038\n", + "epoch: 25 step: 65, loss is 1.0424377918243408\n", + "epoch: 25 step: 66, loss is 1.1378694772720337\n", + "epoch: 25 step: 67, loss is 1.0675889253616333\n", + "epoch: 25 step: 68, loss is 1.0757155418395996\n", + "epoch: 25 step: 69, loss is 1.1098382472991943\n", + "epoch: 25 step: 70, loss is 1.0310020446777344\n", + "epoch: 25 step: 71, loss is 1.0837379693984985\n", + "epoch: 25 step: 72, loss is 1.0741844177246094\n", + "epoch: 25 step: 73, loss is 1.1160144805908203\n", + "epoch: 25 step: 74, loss is 1.0398166179656982\n", + "epoch: 25 step: 75, loss is 1.082122802734375\n", + "epoch: 25 step: 76, loss is 1.0905686616897583\n", + "epoch: 25 step: 77, loss is 1.1977332830429077\n", + "epoch: 25 step: 78, loss is 1.0998982191085815\n", + "epoch: 25 step: 79, loss is 1.0738770961761475\n", + "epoch: 25 step: 80, loss is 1.0800620317459106\n", + "epoch: 25 step: 81, loss is 1.1624616384506226\n", + "epoch: 25 step: 82, loss is 1.1241395473480225\n", + "epoch: 25 step: 83, loss is 1.1237306594848633\n", + "epoch: 25 step: 84, loss is 1.1876444816589355\n", + "epoch: 25 step: 85, loss is 1.0593899488449097\n", + "epoch: 25 step: 86, loss is 1.1413724422454834\n", + "epoch: 25 step: 87, loss is 1.1098703145980835\n", + "epoch: 25 step: 88, loss is 1.1218563318252563\n", + "epoch: 25 step: 89, loss is 1.0533998012542725\n", + "epoch: 25 step: 90, loss is 1.1111992597579956\n", + "epoch: 25 step: 91, loss is 1.0958877801895142\n", + "epoch: 25 step: 92, loss is 1.1060446500778198\n", + "epoch: 25 step: 93, loss is 1.050142765045166\n", + "epoch: 25 step: 94, loss is 1.1848796606063843\n", + "epoch: 25 step: 95, loss is 1.1111098527908325\n", + "epoch: 25 step: 96, loss is 1.0556793212890625\n", + "epoch: 25 step: 97, loss is 1.110507607460022\n", + "epoch: 25 step: 98, loss is 1.0809193849563599\n", + "epoch: 25 step: 99, loss is 1.101583480834961\n", + "epoch: 25 step: 100, loss is 1.0841407775878906\n", + "epoch: 25 step: 101, loss is 1.1078135967254639\n", + "epoch: 25 step: 102, loss is 1.135474681854248\n", + "epoch: 25 step: 103, loss is 1.0238667726516724\n", + "epoch: 25 step: 104, loss is 1.0637602806091309\n", + "epoch: 25 step: 105, loss is 1.101015567779541\n", + "epoch: 25 step: 106, loss is 1.099929928779602\n", + "epoch: 25 step: 107, loss is 1.0296080112457275\n", + "epoch: 25 step: 108, loss is 1.1028733253479004\n", + "epoch: 25 step: 109, loss is 1.1569968461990356\n", + "epoch: 25 step: 110, loss is 1.085854411125183\n", + "epoch: 25 step: 111, loss is 1.1489572525024414\n", + "epoch: 25 step: 112, loss is 1.1082024574279785\n", + "epoch: 25 step: 113, loss is 0.9982233047485352\n", + "epoch: 25 step: 114, loss is 1.091442346572876\n", + "epoch: 25 step: 115, loss is 1.0865168571472168\n", + "epoch: 25 step: 116, loss is 1.0767576694488525\n", + "epoch: 25 step: 117, loss is 1.097948431968689\n", + "epoch: 25 step: 118, loss is 1.0709913969039917\n", + "epoch: 25 step: 119, loss is 1.1485974788665771\n", + "epoch: 25 step: 120, loss is 1.03230619430542\n", + "epoch: 25 step: 121, loss is 1.1467266082763672\n", + "epoch: 25 step: 122, loss is 1.1516987085342407\n", + "epoch: 25 step: 123, loss is 1.021085262298584\n", + "epoch: 25 step: 124, loss is 1.095871925354004\n", + "epoch: 25 step: 125, loss is 1.1351364850997925\n", + "epoch: 25 step: 126, loss is 1.0784903764724731\n", + "epoch: 25 step: 127, loss is 1.131304144859314\n", + "epoch: 25 step: 128, loss is 1.0256192684173584\n", + "epoch: 25 step: 129, loss is 1.1145023107528687\n", + "epoch: 25 step: 130, loss is 1.0707743167877197\n", + "epoch: 25 step: 131, loss is 1.1265639066696167\n", + "epoch: 25 step: 132, loss is 1.052263855934143\n", + "epoch: 25 step: 133, loss is 1.0156241655349731\n", + "epoch: 25 step: 134, loss is 1.042445182800293\n", + "epoch: 25 step: 135, loss is 1.0751914978027344\n", + "epoch: 25 step: 136, loss is 1.1900955438613892\n", + "epoch: 25 step: 137, loss is 1.153630018234253\n", + "epoch: 25 step: 138, loss is 1.0784456729888916\n", + "epoch: 25 step: 139, loss is 1.0961649417877197\n", + "epoch: 25 step: 140, loss is 1.1608918905258179\n", + "epoch: 25 step: 141, loss is 1.0814754962921143\n", + "epoch: 25 step: 142, loss is 1.239119052886963\n", + "epoch: 25 step: 143, loss is 1.1088074445724487\n", + "epoch: 25 step: 144, loss is 1.1636638641357422\n", + "epoch: 25 step: 145, loss is 1.035202980041504\n", + "epoch: 25 step: 146, loss is 1.035993218421936\n", + "epoch: 25 step: 147, loss is 1.0820560455322266\n", + "epoch: 25 step: 148, loss is 1.058875560760498\n", + "epoch: 25 step: 149, loss is 1.1934581995010376\n", + "epoch: 25 step: 150, loss is 0.9936809539794922\n", + "epoch: 25 step: 151, loss is 1.0508790016174316\n", + "epoch: 25 step: 152, loss is 1.0967371463775635\n", + "epoch: 25 step: 153, loss is 1.1068633794784546\n", + "epoch: 25 step: 154, loss is 1.1238404512405396\n", + "epoch: 25 step: 155, loss is 1.1152658462524414\n", + "epoch: 25 step: 156, loss is 1.0772194862365723\n", + "epoch: 25 step: 157, loss is 1.1279296875\n", + "epoch: 25 step: 158, loss is 1.0917967557907104\n", + "epoch: 25 step: 159, loss is 1.024680256843567\n", + "epoch: 25 step: 160, loss is 1.0305293798446655\n", + "epoch: 25 step: 161, loss is 1.029788851737976\n", + "epoch: 25 step: 162, loss is 1.0818700790405273\n", + "epoch: 25 step: 163, loss is 1.1243774890899658\n", + "epoch: 25 step: 164, loss is 1.0760592222213745\n", + "epoch: 25 step: 165, loss is 1.1050708293914795\n", + "epoch: 25 step: 166, loss is 1.1303510665893555\n", + "epoch: 25 step: 167, loss is 1.0312774181365967\n", + "epoch: 25 step: 168, loss is 1.1573801040649414\n", + "epoch: 25 step: 169, loss is 1.0986881256103516\n", + "epoch: 25 step: 170, loss is 1.1014612913131714\n", + "epoch: 25 step: 171, loss is 1.1209540367126465\n", + "epoch: 25 step: 172, loss is 1.1032898426055908\n", + "epoch: 25 step: 173, loss is 1.1240501403808594\n", + "epoch: 25 step: 174, loss is 1.0729403495788574\n", + "epoch: 25 step: 175, loss is 1.156635046005249\n", + "epoch: 25 step: 176, loss is 1.1322540044784546\n", + "epoch: 25 step: 177, loss is 1.1024738550186157\n", + "epoch: 25 step: 178, loss is 1.0661497116088867\n", + "epoch: 25 step: 179, loss is 1.069516897201538\n", + "epoch: 25 step: 180, loss is 0.9995025992393494\n", + "epoch: 25 step: 181, loss is 1.120347261428833\n", + "epoch: 25 step: 182, loss is 1.1292648315429688\n", + "epoch: 25 step: 183, loss is 1.0347588062286377\n", + "epoch: 25 step: 184, loss is 1.0398995876312256\n", + "epoch: 25 step: 185, loss is 1.0884382724761963\n", + "epoch: 25 step: 186, loss is 1.1189897060394287\n", + "epoch: 25 step: 187, loss is 1.012641191482544\n", + "epoch: 25 step: 188, loss is 1.022613763809204\n", + "epoch: 25 step: 189, loss is 1.0172086954116821\n", + "epoch: 25 step: 190, loss is 1.0877549648284912\n", + "epoch: 25 step: 191, loss is 1.1894092559814453\n", + "epoch: 25 step: 192, loss is 1.0635960102081299\n", + "epoch: 25 step: 193, loss is 1.018746018409729\n", + "epoch: 25 step: 194, loss is 1.0949205160140991\n", + "epoch: 25 step: 195, loss is 1.0922601222991943\n", + "Train epoch time: 99045.700 ms, per step time: 507.927 ms\n", + "epoch: 26 step: 1, loss is 1.0588682889938354\n", + "epoch: 26 step: 2, loss is 0.9864459037780762\n", + "epoch: 26 step: 3, loss is 1.1088792085647583\n", + "epoch: 26 step: 4, loss is 1.044288158416748\n", + "epoch: 26 step: 5, loss is 1.0198500156402588\n", + "epoch: 26 step: 6, loss is 1.1288080215454102\n", + "epoch: 26 step: 7, loss is 1.0551180839538574\n", + "epoch: 26 step: 8, loss is 1.0818400382995605\n", + "epoch: 26 step: 9, loss is 0.9872353076934814\n", + "epoch: 26 step: 10, loss is 1.124257206916809\n", + "epoch: 26 step: 11, loss is 1.0578993558883667\n", + "epoch: 26 step: 12, loss is 1.0334570407867432\n", + "epoch: 26 step: 13, loss is 1.0642715692520142\n", + "epoch: 26 step: 14, loss is 1.0508956909179688\n", + "epoch: 26 step: 15, loss is 1.023590087890625\n", + "epoch: 26 step: 16, loss is 1.037264347076416\n", + "epoch: 26 step: 17, loss is 1.0748530626296997\n", + "epoch: 26 step: 18, loss is 1.0846394300460815\n", + "epoch: 26 step: 19, loss is 0.9672864675521851\n", + "epoch: 26 step: 20, loss is 1.0041342973709106\n", + "epoch: 26 step: 21, loss is 1.0067589282989502\n", + "epoch: 26 step: 22, loss is 1.0192878246307373\n", + "epoch: 26 step: 23, loss is 1.0926614999771118\n", + "epoch: 26 step: 24, loss is 1.0823075771331787\n", + "epoch: 26 step: 25, loss is 0.9974242448806763\n", + "epoch: 26 step: 26, loss is 1.1325483322143555\n", + "epoch: 26 step: 27, loss is 1.16825532913208\n", + "epoch: 26 step: 28, loss is 1.0898933410644531\n", + "epoch: 26 step: 29, loss is 1.0758275985717773\n", + "epoch: 26 step: 30, loss is 1.1113241910934448\n", + "epoch: 26 step: 31, loss is 0.9773358702659607\n", + "epoch: 26 step: 32, loss is 1.0833854675292969\n", + "epoch: 26 step: 33, loss is 1.0709326267242432\n", + "epoch: 26 step: 34, loss is 1.0887677669525146\n", + "epoch: 26 step: 35, loss is 1.110779881477356\n", + "epoch: 26 step: 36, loss is 1.1127338409423828\n", + "epoch: 26 step: 37, loss is 1.0500644445419312\n", + "epoch: 26 step: 38, loss is 1.0615084171295166\n", + "epoch: 26 step: 39, loss is 1.1052722930908203\n", + "epoch: 26 step: 40, loss is 1.0591717958450317\n", + "epoch: 26 step: 41, loss is 1.1540465354919434\n", + "epoch: 26 step: 42, loss is 1.0811409950256348\n", + "epoch: 26 step: 43, loss is 1.0632864236831665\n", + "epoch: 26 step: 44, loss is 1.0455198287963867\n", + "epoch: 26 step: 45, loss is 1.0768296718597412\n", + "epoch: 26 step: 46, loss is 1.0419708490371704\n", + "epoch: 26 step: 47, loss is 1.0711581707000732\n", + "epoch: 26 step: 48, loss is 1.0763905048370361\n", + "epoch: 26 step: 49, loss is 1.11626398563385\n", + "epoch: 26 step: 50, loss is 1.1138763427734375\n", + "epoch: 26 step: 51, loss is 1.0366363525390625\n", + "epoch: 26 step: 52, loss is 1.1421151161193848\n", + "epoch: 26 step: 53, loss is 1.1082251071929932\n", + "epoch: 26 step: 54, loss is 1.1021032333374023\n", + "epoch: 26 step: 55, loss is 1.1303074359893799\n", + "epoch: 26 step: 56, loss is 0.9623292684555054\n", + "epoch: 26 step: 57, loss is 1.0499966144561768\n", + "epoch: 26 step: 58, loss is 1.0344343185424805\n", + "epoch: 26 step: 59, loss is 1.0933879613876343\n", + "epoch: 26 step: 60, loss is 1.091841697692871\n", + "epoch: 26 step: 61, loss is 1.078423023223877\n", + "epoch: 26 step: 62, loss is 1.034406065940857\n", + "epoch: 26 step: 63, loss is 1.0127979516983032\n", + "epoch: 26 step: 64, loss is 1.0910660028457642\n", + "epoch: 26 step: 65, loss is 1.034348726272583\n", + "epoch: 26 step: 66, loss is 1.0127599239349365\n", + "epoch: 26 step: 67, loss is 1.0390090942382812\n", + "epoch: 26 step: 68, loss is 1.101246953010559\n", + "epoch: 26 step: 69, loss is 1.1350762844085693\n", + "epoch: 26 step: 70, loss is 1.013222336769104\n", + "epoch: 26 step: 71, loss is 1.1363506317138672\n", + "epoch: 26 step: 72, loss is 1.0457558631896973\n", + "epoch: 26 step: 73, loss is 1.1319408416748047\n", + "epoch: 26 step: 74, loss is 1.0664180517196655\n", + "epoch: 26 step: 75, loss is 1.016939401626587\n", + "epoch: 26 step: 76, loss is 1.0416345596313477\n", + "epoch: 26 step: 77, loss is 1.0265374183654785\n", + "epoch: 26 step: 78, loss is 1.0100188255310059\n", + "epoch: 26 step: 79, loss is 1.0723769664764404\n", + "epoch: 26 step: 80, loss is 1.0507785081863403\n", + "epoch: 26 step: 81, loss is 1.088484525680542\n", + "epoch: 26 step: 82, loss is 1.0253151655197144\n", + "epoch: 26 step: 83, loss is 0.9803435802459717\n", + "epoch: 26 step: 84, loss is 0.9944482445716858\n", + "epoch: 26 step: 85, loss is 1.0290894508361816\n", + "epoch: 26 step: 86, loss is 0.9729404449462891\n", + "epoch: 26 step: 87, loss is 1.0764617919921875\n", + "epoch: 26 step: 88, loss is 0.9833414554595947\n", + "epoch: 26 step: 89, loss is 1.0466423034667969\n", + "epoch: 26 step: 90, loss is 1.0545907020568848\n", + "epoch: 26 step: 91, loss is 1.0737918615341187\n", + "epoch: 26 step: 92, loss is 1.080391526222229\n", + "epoch: 26 step: 93, loss is 1.1075928211212158\n", + "epoch: 26 step: 94, loss is 1.1011675596237183\n", + "epoch: 26 step: 95, loss is 1.0183749198913574\n", + "epoch: 26 step: 96, loss is 1.0465056896209717\n", + "epoch: 26 step: 97, loss is 1.0415849685668945\n", + "epoch: 26 step: 98, loss is 1.0527229309082031\n", + "epoch: 26 step: 99, loss is 1.0698708295822144\n", + "epoch: 26 step: 100, loss is 1.0282378196716309\n", + "epoch: 26 step: 101, loss is 1.0372211933135986\n", + "epoch: 26 step: 102, loss is 1.0519763231277466\n", + "epoch: 26 step: 103, loss is 0.9330244660377502\n", + "epoch: 26 step: 104, loss is 1.1315479278564453\n", + "epoch: 26 step: 105, loss is 1.0983659029006958\n", + "epoch: 26 step: 106, loss is 1.1040188074111938\n", + "epoch: 26 step: 107, loss is 0.9946911334991455\n", + "epoch: 26 step: 108, loss is 0.9729794263839722\n", + "epoch: 26 step: 109, loss is 0.9741456508636475\n", + "epoch: 26 step: 110, loss is 1.0764539241790771\n", + "epoch: 26 step: 111, loss is 1.05156409740448\n", + "epoch: 26 step: 112, loss is 1.030848503112793\n", + "epoch: 26 step: 113, loss is 1.0814640522003174\n", + "epoch: 26 step: 114, loss is 0.985041618347168\n", + "epoch: 26 step: 115, loss is 1.043461799621582\n", + "epoch: 26 step: 116, loss is 1.0336103439331055\n", + "epoch: 26 step: 117, loss is 1.0685243606567383\n", + "epoch: 26 step: 118, loss is 1.0249128341674805\n", + "epoch: 26 step: 119, loss is 1.1075993776321411\n", + "epoch: 26 step: 120, loss is 1.1082558631896973\n", + "epoch: 26 step: 121, loss is 1.0534366369247437\n", + "epoch: 26 step: 122, loss is 1.0825867652893066\n", + "epoch: 26 step: 123, loss is 1.0999637842178345\n", + "epoch: 26 step: 124, loss is 1.0848159790039062\n", + "epoch: 26 step: 125, loss is 1.0737316608428955\n", + "epoch: 26 step: 126, loss is 1.0879358053207397\n", + "epoch: 26 step: 127, loss is 1.0680149793624878\n", + "epoch: 26 step: 128, loss is 1.033268928527832\n", + "epoch: 26 step: 129, loss is 1.0289251804351807\n", + "epoch: 26 step: 130, loss is 1.1608487367630005\n", + "epoch: 26 step: 131, loss is 1.1289267539978027\n", + "epoch: 26 step: 132, loss is 1.0863478183746338\n", + "epoch: 26 step: 133, loss is 1.0898380279541016\n", + "epoch: 26 step: 134, loss is 1.0463165044784546\n", + "epoch: 26 step: 135, loss is 1.1662288904190063\n", + "epoch: 26 step: 136, loss is 1.1281936168670654\n", + "epoch: 26 step: 137, loss is 1.1531133651733398\n", + "epoch: 26 step: 138, loss is 1.0728158950805664\n", + "epoch: 26 step: 139, loss is 1.025587558746338\n", + "epoch: 26 step: 140, loss is 1.1282386779785156\n", + "epoch: 26 step: 141, loss is 1.0488083362579346\n", + "epoch: 26 step: 142, loss is 1.0115898847579956\n", + "epoch: 26 step: 143, loss is 1.0893551111221313\n", + "epoch: 26 step: 144, loss is 1.1418253183364868\n", + "epoch: 26 step: 145, loss is 1.1108213663101196\n", + "epoch: 26 step: 146, loss is 1.0718536376953125\n", + "epoch: 26 step: 147, loss is 1.1308940649032593\n", + "epoch: 26 step: 148, loss is 1.0619099140167236\n", + "epoch: 26 step: 149, loss is 1.0612963438034058\n", + "epoch: 26 step: 150, loss is 1.0523836612701416\n", + "epoch: 26 step: 151, loss is 1.0216165781021118\n", + "epoch: 26 step: 152, loss is 1.0072866678237915\n", + "epoch: 26 step: 153, loss is 1.072240948677063\n", + "epoch: 26 step: 154, loss is 1.1181700229644775\n", + "epoch: 26 step: 155, loss is 1.1036887168884277\n", + "epoch: 26 step: 156, loss is 1.0955078601837158\n", + "epoch: 26 step: 157, loss is 1.023759365081787\n", + "epoch: 26 step: 158, loss is 1.111055850982666\n", + "epoch: 26 step: 159, loss is 1.1055041551589966\n", + "epoch: 26 step: 160, loss is 1.0991675853729248\n", + "epoch: 26 step: 161, loss is 1.0909236669540405\n", + "epoch: 26 step: 162, loss is 1.0082244873046875\n", + "epoch: 26 step: 163, loss is 1.1319206953048706\n", + "epoch: 26 step: 164, loss is 1.077684998512268\n", + "epoch: 26 step: 165, loss is 1.115843653678894\n", + "epoch: 26 step: 166, loss is 1.032466173171997\n", + "epoch: 26 step: 167, loss is 1.1265573501586914\n", + "epoch: 26 step: 168, loss is 1.0997035503387451\n", + "epoch: 26 step: 169, loss is 1.0728034973144531\n", + "epoch: 26 step: 170, loss is 1.141798973083496\n", + "epoch: 26 step: 171, loss is 0.9944081902503967\n", + "epoch: 26 step: 172, loss is 1.1190491914749146\n", + "epoch: 26 step: 173, loss is 1.1955615282058716\n", + "epoch: 26 step: 174, loss is 1.0336834192276\n", + "epoch: 26 step: 175, loss is 1.0670442581176758\n", + "epoch: 26 step: 176, loss is 1.053788661956787\n", + "epoch: 26 step: 177, loss is 1.0639196634292603\n", + "epoch: 26 step: 178, loss is 1.0148476362228394\n", + "epoch: 26 step: 179, loss is 1.1092960834503174\n", + "epoch: 26 step: 180, loss is 1.0516902208328247\n", + "epoch: 26 step: 181, loss is 1.0661725997924805\n", + "epoch: 26 step: 182, loss is 1.0308539867401123\n", + "epoch: 26 step: 183, loss is 1.0419073104858398\n", + "epoch: 26 step: 184, loss is 1.0245132446289062\n", + "epoch: 26 step: 185, loss is 1.060319185256958\n", + "epoch: 26 step: 186, loss is 1.0343084335327148\n", + "epoch: 26 step: 187, loss is 1.129612684249878\n", + "epoch: 26 step: 188, loss is 1.106515645980835\n", + "epoch: 26 step: 189, loss is 1.0862451791763306\n", + "epoch: 26 step: 190, loss is 1.0947397947311401\n", + "epoch: 26 step: 191, loss is 1.1126788854599\n", + "epoch: 26 step: 192, loss is 1.1024670600891113\n", + "epoch: 26 step: 193, loss is 1.0762914419174194\n", + "epoch: 26 step: 194, loss is 1.023280143737793\n", + "epoch: 26 step: 195, loss is 1.0492475032806396\n", + "Train epoch time: 99456.534 ms, per step time: 510.034 ms\n", + "epoch: 27 step: 1, loss is 1.0526854991912842\n", + "epoch: 27 step: 2, loss is 0.9937281608581543\n", + "epoch: 27 step: 3, loss is 1.0630714893341064\n", + "epoch: 27 step: 4, loss is 1.0876054763793945\n", + "epoch: 27 step: 5, loss is 1.0303118228912354\n", + "epoch: 27 step: 6, loss is 1.0511847734451294\n", + "epoch: 27 step: 7, loss is 0.9790855050086975\n", + "epoch: 27 step: 8, loss is 0.9959620237350464\n", + "epoch: 27 step: 9, loss is 1.0755510330200195\n", + "epoch: 27 step: 10, loss is 1.192626714706421\n", + "epoch: 27 step: 11, loss is 0.9872756004333496\n", + "epoch: 27 step: 12, loss is 1.0444339513778687\n", + "epoch: 27 step: 13, loss is 1.1215991973876953\n", + "epoch: 27 step: 14, loss is 0.9605674743652344\n", + "epoch: 27 step: 15, loss is 1.0592670440673828\n", + "epoch: 27 step: 16, loss is 1.0175102949142456\n", + "epoch: 27 step: 17, loss is 1.0561697483062744\n", + "epoch: 27 step: 18, loss is 1.0676268339157104\n", + "epoch: 27 step: 19, loss is 0.9874426126480103\n", + "epoch: 27 step: 20, loss is 1.0693225860595703\n", + "epoch: 27 step: 21, loss is 1.0654617547988892\n", + "epoch: 27 step: 22, loss is 1.0466432571411133\n", + "epoch: 27 step: 23, loss is 1.0767848491668701\n", + "epoch: 27 step: 24, loss is 0.9652069211006165\n", + "epoch: 27 step: 25, loss is 0.9964195489883423\n", + "epoch: 27 step: 26, loss is 1.0223865509033203\n", + "epoch: 27 step: 27, loss is 1.064711093902588\n", + "epoch: 27 step: 28, loss is 0.9916448593139648\n", + "epoch: 27 step: 29, loss is 0.976990282535553\n", + "epoch: 27 step: 30, loss is 1.0567874908447266\n", + "epoch: 27 step: 31, loss is 1.0956326723098755\n", + "epoch: 27 step: 32, loss is 0.9955192804336548\n", + "epoch: 27 step: 33, loss is 1.0239386558532715\n", + "epoch: 27 step: 34, loss is 1.0463179349899292\n", + "epoch: 27 step: 35, loss is 1.0413639545440674\n", + "epoch: 27 step: 36, loss is 1.1038379669189453\n", + "epoch: 27 step: 37, loss is 0.9907110929489136\n", + "epoch: 27 step: 38, loss is 1.0985716581344604\n", + "epoch: 27 step: 39, loss is 1.002617359161377\n", + "epoch: 27 step: 40, loss is 0.9854321479797363\n", + "epoch: 27 step: 41, loss is 1.0757564306259155\n", + "epoch: 27 step: 42, loss is 1.0707387924194336\n", + "epoch: 27 step: 43, loss is 1.0410258769989014\n", + "epoch: 27 step: 44, loss is 1.062170147895813\n", + "epoch: 27 step: 45, loss is 1.0545423030853271\n", + "epoch: 27 step: 46, loss is 1.012305498123169\n", + "epoch: 27 step: 47, loss is 1.0080597400665283\n", + "epoch: 27 step: 48, loss is 0.9942372441291809\n", + "epoch: 27 step: 49, loss is 0.9852883815765381\n", + "epoch: 27 step: 50, loss is 1.0489373207092285\n", + "epoch: 27 step: 51, loss is 1.0099055767059326\n", + "epoch: 27 step: 52, loss is 1.057629108428955\n", + "epoch: 27 step: 53, loss is 0.998903751373291\n", + "epoch: 27 step: 54, loss is 1.116986632347107\n", + "epoch: 27 step: 55, loss is 1.0408835411071777\n", + "epoch: 27 step: 56, loss is 1.0491348505020142\n", + "epoch: 27 step: 57, loss is 1.045902132987976\n", + "epoch: 27 step: 58, loss is 1.085667371749878\n", + "epoch: 27 step: 59, loss is 0.982683539390564\n", + "epoch: 27 step: 60, loss is 1.0369943380355835\n", + "epoch: 27 step: 61, loss is 1.0263030529022217\n", + "epoch: 27 step: 62, loss is 1.0683033466339111\n", + "epoch: 27 step: 63, loss is 1.0359469652175903\n", + "epoch: 27 step: 64, loss is 1.063988447189331\n", + "epoch: 27 step: 65, loss is 1.0717943906784058\n", + "epoch: 27 step: 66, loss is 1.0246657133102417\n", + "epoch: 27 step: 67, loss is 1.082115650177002\n", + "epoch: 27 step: 68, loss is 0.9994350671768188\n", + "epoch: 27 step: 69, loss is 1.036766767501831\n", + "epoch: 27 step: 70, loss is 0.9659408330917358\n", + "epoch: 27 step: 71, loss is 1.0439201593399048\n", + "epoch: 27 step: 72, loss is 0.9875251054763794\n", + "epoch: 27 step: 73, loss is 1.0474096536636353\n", + "epoch: 27 step: 74, loss is 1.0217721462249756\n", + "epoch: 27 step: 75, loss is 1.0964553356170654\n", + "epoch: 27 step: 76, loss is 1.118463397026062\n", + "epoch: 27 step: 77, loss is 0.954917848110199\n", + "epoch: 27 step: 78, loss is 1.0984251499176025\n", + "epoch: 27 step: 79, loss is 0.9941762685775757\n", + "epoch: 27 step: 80, loss is 1.0397579669952393\n", + "epoch: 27 step: 81, loss is 1.1020092964172363\n", + "epoch: 27 step: 82, loss is 1.0649852752685547\n", + "epoch: 27 step: 83, loss is 1.134073257446289\n", + "epoch: 27 step: 84, loss is 1.0587884187698364\n", + "epoch: 27 step: 85, loss is 1.0757195949554443\n", + "epoch: 27 step: 86, loss is 1.0126988887786865\n", + "epoch: 27 step: 87, loss is 1.0671371221542358\n", + "epoch: 27 step: 88, loss is 1.0568971633911133\n", + "epoch: 27 step: 89, loss is 1.0100717544555664\n", + "epoch: 27 step: 90, loss is 1.0385042428970337\n", + "epoch: 27 step: 91, loss is 0.9726125001907349\n", + "epoch: 27 step: 92, loss is 1.024322271347046\n", + "epoch: 27 step: 93, loss is 1.0335557460784912\n", + "epoch: 27 step: 94, loss is 1.172904372215271\n", + "epoch: 27 step: 95, loss is 1.1171581745147705\n", + "epoch: 27 step: 96, loss is 1.028235912322998\n", + "epoch: 27 step: 97, loss is 1.0854473114013672\n", + "epoch: 27 step: 98, loss is 1.024569034576416\n", + "epoch: 27 step: 99, loss is 1.029360294342041\n", + "epoch: 27 step: 100, loss is 1.0270912647247314\n", + "epoch: 27 step: 101, loss is 1.0691239833831787\n", + "epoch: 27 step: 102, loss is 1.0786973237991333\n", + "epoch: 27 step: 103, loss is 1.068032145500183\n", + "epoch: 27 step: 104, loss is 1.0142853260040283\n", + "epoch: 27 step: 105, loss is 1.015321969985962\n", + "epoch: 27 step: 106, loss is 1.021181583404541\n", + "epoch: 27 step: 107, loss is 1.0757720470428467\n", + "epoch: 27 step: 108, loss is 1.0231081247329712\n", + "epoch: 27 step: 109, loss is 0.9905176758766174\n", + "epoch: 27 step: 110, loss is 1.0062568187713623\n", + "epoch: 27 step: 111, loss is 1.0276365280151367\n", + "epoch: 27 step: 112, loss is 1.038588523864746\n", + "epoch: 27 step: 113, loss is 1.0927976369857788\n", + "epoch: 27 step: 114, loss is 1.0270683765411377\n", + "epoch: 27 step: 115, loss is 1.1264139413833618\n", + "epoch: 27 step: 116, loss is 1.0104048252105713\n", + "epoch: 27 step: 117, loss is 0.9740996360778809\n", + "epoch: 27 step: 118, loss is 1.017890214920044\n", + "epoch: 27 step: 119, loss is 1.0443947315216064\n", + "epoch: 27 step: 120, loss is 1.0570416450500488\n", + "epoch: 27 step: 121, loss is 1.0962488651275635\n", + "epoch: 27 step: 122, loss is 1.025974988937378\n", + "epoch: 27 step: 123, loss is 1.0516247749328613\n", + "epoch: 27 step: 124, loss is 1.025416374206543\n", + "epoch: 27 step: 125, loss is 1.038508653640747\n", + "epoch: 27 step: 126, loss is 1.1030253171920776\n", + "epoch: 27 step: 127, loss is 1.1237566471099854\n", + "epoch: 27 step: 128, loss is 1.0477077960968018\n", + "epoch: 27 step: 129, loss is 1.0291980504989624\n", + "epoch: 27 step: 130, loss is 0.9877532124519348\n", + "epoch: 27 step: 131, loss is 1.0750223398208618\n", + "epoch: 27 step: 132, loss is 1.0729460716247559\n", + "epoch: 27 step: 133, loss is 1.1262553930282593\n", + "epoch: 27 step: 134, loss is 1.0381098985671997\n", + "epoch: 27 step: 135, loss is 1.0125904083251953\n", + "epoch: 27 step: 136, loss is 1.0346237421035767\n", + "epoch: 27 step: 137, loss is 1.0335253477096558\n", + "epoch: 27 step: 138, loss is 0.9727094173431396\n", + "epoch: 27 step: 139, loss is 0.9927142858505249\n", + "epoch: 27 step: 140, loss is 0.9552862644195557\n", + "epoch: 27 step: 141, loss is 0.9754505157470703\n", + "epoch: 27 step: 142, loss is 1.1062318086624146\n", + "epoch: 27 step: 143, loss is 1.0531337261199951\n", + "epoch: 27 step: 144, loss is 1.0233051776885986\n", + "epoch: 27 step: 145, loss is 1.005049705505371\n", + "epoch: 27 step: 146, loss is 0.9729474782943726\n", + "epoch: 27 step: 147, loss is 1.0543980598449707\n", + "epoch: 27 step: 148, loss is 1.0315217971801758\n", + "epoch: 27 step: 149, loss is 1.0610698461532593\n", + "epoch: 27 step: 150, loss is 0.9832221865653992\n", + "epoch: 27 step: 151, loss is 0.9682477712631226\n", + "epoch: 27 step: 152, loss is 1.035267949104309\n", + "epoch: 27 step: 153, loss is 0.9336796998977661\n", + "epoch: 27 step: 154, loss is 1.1197657585144043\n", + "epoch: 27 step: 155, loss is 0.9690948724746704\n", + "epoch: 27 step: 156, loss is 1.0189069509506226\n", + "epoch: 27 step: 157, loss is 0.961148202419281\n", + "epoch: 27 step: 158, loss is 1.1340242624282837\n", + "epoch: 27 step: 159, loss is 1.0158350467681885\n", + "epoch: 27 step: 160, loss is 1.0097346305847168\n", + "epoch: 27 step: 161, loss is 1.0252338647842407\n", + "epoch: 27 step: 162, loss is 1.0048034191131592\n", + "epoch: 27 step: 163, loss is 1.0464726686477661\n", + "epoch: 27 step: 164, loss is 1.050296425819397\n", + "epoch: 27 step: 165, loss is 1.0201377868652344\n", + "epoch: 27 step: 166, loss is 1.0120985507965088\n", + "epoch: 27 step: 167, loss is 1.0084853172302246\n", + "epoch: 27 step: 168, loss is 1.0441557168960571\n", + "epoch: 27 step: 169, loss is 1.0333523750305176\n", + "epoch: 27 step: 170, loss is 1.037323236465454\n", + "epoch: 27 step: 171, loss is 0.9951084852218628\n", + "epoch: 27 step: 172, loss is 1.066593050956726\n", + "epoch: 27 step: 173, loss is 0.9831953048706055\n", + "epoch: 27 step: 174, loss is 1.0604780912399292\n", + "epoch: 27 step: 175, loss is 0.9716159105300903\n", + "epoch: 27 step: 176, loss is 1.1197837591171265\n", + "epoch: 27 step: 177, loss is 1.090072512626648\n", + "epoch: 27 step: 178, loss is 1.1200783252716064\n", + "epoch: 27 step: 179, loss is 1.0334203243255615\n", + "epoch: 27 step: 180, loss is 1.0805426836013794\n", + "epoch: 27 step: 181, loss is 0.9902610182762146\n", + "epoch: 27 step: 182, loss is 1.0433247089385986\n", + "epoch: 27 step: 183, loss is 1.0624687671661377\n", + "epoch: 27 step: 184, loss is 1.1269488334655762\n", + "epoch: 27 step: 185, loss is 1.0515531301498413\n", + "epoch: 27 step: 186, loss is 1.0816028118133545\n", + "epoch: 27 step: 187, loss is 0.9533476829528809\n", + "epoch: 27 step: 188, loss is 1.0793060064315796\n", + "epoch: 27 step: 189, loss is 1.0674424171447754\n", + "epoch: 27 step: 190, loss is 1.0444698333740234\n", + "epoch: 27 step: 191, loss is 1.0327328443527222\n", + "epoch: 27 step: 192, loss is 1.0749540328979492\n", + "epoch: 27 step: 193, loss is 1.0135306119918823\n", + "epoch: 27 step: 194, loss is 1.0802350044250488\n", + "epoch: 27 step: 195, loss is 1.0297240018844604\n", + "Train epoch time: 100054.073 ms, per step time: 513.098 ms\n", + "epoch: 28 step: 1, loss is 0.9768978357315063\n", + "epoch: 28 step: 2, loss is 1.011438250541687\n", + "epoch: 28 step: 3, loss is 1.0540649890899658\n", + "epoch: 28 step: 4, loss is 1.0295305252075195\n", + "epoch: 28 step: 5, loss is 1.0583062171936035\n", + "epoch: 28 step: 6, loss is 1.0308281183242798\n", + "epoch: 28 step: 7, loss is 1.0012125968933105\n", + "epoch: 28 step: 8, loss is 1.0233781337738037\n", + "epoch: 28 step: 9, loss is 1.0912978649139404\n", + "epoch: 28 step: 10, loss is 1.0549890995025635\n", + "epoch: 28 step: 11, loss is 1.0203168392181396\n", + "epoch: 28 step: 12, loss is 1.042939305305481\n", + "epoch: 28 step: 13, loss is 0.9663825631141663\n", + "epoch: 28 step: 14, loss is 1.0283424854278564\n", + "epoch: 28 step: 15, loss is 0.9900602102279663\n", + "epoch: 28 step: 16, loss is 0.9642919301986694\n", + "epoch: 28 step: 17, loss is 1.0747977495193481\n", + "epoch: 28 step: 18, loss is 1.0437085628509521\n", + "epoch: 28 step: 19, loss is 1.0819729566574097\n", + "epoch: 28 step: 20, loss is 0.9990142583847046\n", + "epoch: 28 step: 21, loss is 0.9578334093093872\n", + "epoch: 28 step: 22, loss is 1.032682180404663\n", + "epoch: 28 step: 23, loss is 0.9217183589935303\n", + "epoch: 28 step: 24, loss is 1.051794409751892\n", + "epoch: 28 step: 25, loss is 1.0243024826049805\n", + "epoch: 28 step: 26, loss is 0.9752638339996338\n", + "epoch: 28 step: 27, loss is 1.0112234354019165\n", + "epoch: 28 step: 28, loss is 1.0409693717956543\n", + "epoch: 28 step: 29, loss is 1.0225067138671875\n", + "epoch: 28 step: 30, loss is 1.0407947301864624\n", + "epoch: 28 step: 31, loss is 0.9923410415649414\n", + "epoch: 28 step: 32, loss is 0.9571505784988403\n", + "epoch: 28 step: 33, loss is 1.0072588920593262\n", + "epoch: 28 step: 34, loss is 1.0513379573822021\n", + "epoch: 28 step: 35, loss is 1.050743579864502\n", + "epoch: 28 step: 36, loss is 1.0152827501296997\n", + "epoch: 28 step: 37, loss is 1.103642225265503\n", + "epoch: 28 step: 38, loss is 1.0778645277023315\n", + "epoch: 28 step: 39, loss is 1.0131088495254517\n", + "epoch: 28 step: 40, loss is 1.0169264078140259\n", + "epoch: 28 step: 41, loss is 1.0346143245697021\n", + "epoch: 28 step: 42, loss is 1.0501995086669922\n", + "epoch: 28 step: 43, loss is 0.9975701570510864\n", + "epoch: 28 step: 44, loss is 1.039603590965271\n", + "epoch: 28 step: 45, loss is 1.0361976623535156\n", + "epoch: 28 step: 46, loss is 1.105332851409912\n", + "epoch: 28 step: 47, loss is 0.9797074794769287\n", + "epoch: 28 step: 48, loss is 1.0023000240325928\n", + "epoch: 28 step: 49, loss is 1.0069626569747925\n", + "epoch: 28 step: 50, loss is 1.1028532981872559\n", + "epoch: 28 step: 51, loss is 1.049206018447876\n", + "epoch: 28 step: 52, loss is 1.0318819284439087\n", + "epoch: 28 step: 53, loss is 0.9959818124771118\n", + "epoch: 28 step: 54, loss is 1.0138258934020996\n", + "epoch: 28 step: 55, loss is 1.0541132688522339\n", + "epoch: 28 step: 56, loss is 1.0520435571670532\n", + "epoch: 28 step: 57, loss is 0.9917528629302979\n", + "epoch: 28 step: 58, loss is 0.9724152088165283\n", + "epoch: 28 step: 59, loss is 1.016716480255127\n", + "epoch: 28 step: 60, loss is 1.110845923423767\n", + "epoch: 28 step: 61, loss is 0.9222214221954346\n", + "epoch: 28 step: 62, loss is 1.0565391778945923\n", + "epoch: 28 step: 63, loss is 1.1290690898895264\n", + "epoch: 28 step: 64, loss is 0.9861545562744141\n", + "epoch: 28 step: 65, loss is 0.995411217212677\n", + "epoch: 28 step: 66, loss is 1.0193976163864136\n", + "epoch: 28 step: 67, loss is 1.0388143062591553\n", + "epoch: 28 step: 68, loss is 1.0538315773010254\n", + "epoch: 28 step: 69, loss is 0.9850651025772095\n", + "epoch: 28 step: 70, loss is 0.9734764099121094\n", + "epoch: 28 step: 71, loss is 1.0339399576187134\n", + "epoch: 28 step: 72, loss is 1.074372410774231\n", + "epoch: 28 step: 73, loss is 0.9904830455780029\n", + "epoch: 28 step: 74, loss is 1.0123374462127686\n", + "epoch: 28 step: 75, loss is 1.0614429712295532\n", + "epoch: 28 step: 76, loss is 1.0116841793060303\n", + "epoch: 28 step: 77, loss is 1.0675652027130127\n", + "epoch: 28 step: 78, loss is 1.0776848793029785\n", + "epoch: 28 step: 79, loss is 0.9965294599533081\n", + "epoch: 28 step: 80, loss is 1.0637240409851074\n", + "epoch: 28 step: 81, loss is 1.1100058555603027\n", + "epoch: 28 step: 82, loss is 0.9977750778198242\n", + "epoch: 28 step: 83, loss is 0.9972972869873047\n", + "epoch: 28 step: 84, loss is 1.0264652967453003\n", + "epoch: 28 step: 85, loss is 0.9927026033401489\n", + "epoch: 28 step: 86, loss is 1.0163460969924927\n", + "epoch: 28 step: 87, loss is 1.005353569984436\n", + "epoch: 28 step: 88, loss is 1.0065993070602417\n", + "epoch: 28 step: 89, loss is 1.1068792343139648\n", + "epoch: 28 step: 90, loss is 1.0006680488586426\n", + "epoch: 28 step: 91, loss is 0.9676666259765625\n", + "epoch: 28 step: 92, loss is 1.0619800090789795\n", + "epoch: 28 step: 93, loss is 1.0958220958709717\n", + "epoch: 28 step: 94, loss is 1.0031142234802246\n", + "epoch: 28 step: 95, loss is 0.9728577136993408\n", + "epoch: 28 step: 96, loss is 1.0364242792129517\n", + "epoch: 28 step: 97, loss is 1.0060973167419434\n", + "epoch: 28 step: 98, loss is 1.0834031105041504\n", + "epoch: 28 step: 99, loss is 1.013091802597046\n", + "epoch: 28 step: 100, loss is 1.0047576427459717\n", + "epoch: 28 step: 101, loss is 1.0833673477172852\n", + "epoch: 28 step: 102, loss is 1.0603303909301758\n", + "epoch: 28 step: 103, loss is 1.0336132049560547\n", + "epoch: 28 step: 104, loss is 0.9807831645011902\n", + "epoch: 28 step: 105, loss is 0.9532424211502075\n", + "epoch: 28 step: 106, loss is 1.0297553539276123\n", + "epoch: 28 step: 107, loss is 1.0842108726501465\n", + "epoch: 28 step: 108, loss is 0.9722625017166138\n", + "epoch: 28 step: 109, loss is 1.059722661972046\n", + "epoch: 28 step: 110, loss is 1.0596944093704224\n", + "epoch: 28 step: 111, loss is 0.9357523918151855\n", + "epoch: 28 step: 112, loss is 1.0731136798858643\n", + "epoch: 28 step: 113, loss is 0.9822525382041931\n", + "epoch: 28 step: 114, loss is 0.9297336339950562\n", + "epoch: 28 step: 115, loss is 1.0681562423706055\n", + "epoch: 28 step: 116, loss is 0.9942779541015625\n", + "epoch: 28 step: 117, loss is 1.0666155815124512\n", + "epoch: 28 step: 118, loss is 1.034701943397522\n", + "epoch: 28 step: 119, loss is 1.0481171607971191\n", + "epoch: 28 step: 120, loss is 1.0580331087112427\n", + "epoch: 28 step: 121, loss is 1.0079816579818726\n", + "epoch: 28 step: 122, loss is 1.0554571151733398\n", + "epoch: 28 step: 123, loss is 1.0470993518829346\n", + "epoch: 28 step: 124, loss is 1.0487565994262695\n", + "epoch: 28 step: 125, loss is 1.0470064878463745\n", + "epoch: 28 step: 126, loss is 1.0947911739349365\n", + "epoch: 28 step: 127, loss is 1.0249555110931396\n", + "epoch: 28 step: 128, loss is 0.9741905331611633\n", + "epoch: 28 step: 129, loss is 0.9869459271430969\n", + "epoch: 28 step: 130, loss is 0.9771156311035156\n", + "epoch: 28 step: 131, loss is 1.0210492610931396\n", + "epoch: 28 step: 132, loss is 0.959272027015686\n", + "epoch: 28 step: 133, loss is 0.9504667520523071\n", + "epoch: 28 step: 134, loss is 1.0188090801239014\n", + "epoch: 28 step: 135, loss is 0.9607672691345215\n", + "epoch: 28 step: 136, loss is 1.041892409324646\n", + "epoch: 28 step: 137, loss is 1.0620537996292114\n", + "epoch: 28 step: 138, loss is 0.9639115333557129\n", + "epoch: 28 step: 139, loss is 1.0261393785476685\n", + "epoch: 28 step: 140, loss is 1.0134022235870361\n", + "epoch: 28 step: 141, loss is 0.955686092376709\n", + "epoch: 28 step: 142, loss is 1.0189423561096191\n", + "epoch: 28 step: 143, loss is 0.9791193008422852\n", + "epoch: 28 step: 144, loss is 0.99223393201828\n", + "epoch: 28 step: 145, loss is 0.997028112411499\n", + "epoch: 28 step: 146, loss is 1.0696191787719727\n", + "epoch: 28 step: 147, loss is 1.0090281963348389\n", + "epoch: 28 step: 148, loss is 1.0334564447402954\n", + "epoch: 28 step: 149, loss is 1.0368938446044922\n", + "epoch: 28 step: 150, loss is 1.0314865112304688\n", + "epoch: 28 step: 151, loss is 1.0216596126556396\n", + "epoch: 28 step: 152, loss is 0.9705299139022827\n", + "epoch: 28 step: 153, loss is 1.1176831722259521\n", + "epoch: 28 step: 154, loss is 1.0833543539047241\n", + "epoch: 28 step: 155, loss is 1.0577386617660522\n", + "epoch: 28 step: 156, loss is 1.057447075843811\n", + "epoch: 28 step: 157, loss is 1.0717121362686157\n", + "epoch: 28 step: 158, loss is 1.0135096311569214\n", + "epoch: 28 step: 159, loss is 0.9886143207550049\n", + "epoch: 28 step: 160, loss is 0.9925169944763184\n", + "epoch: 28 step: 161, loss is 1.0890202522277832\n", + "epoch: 28 step: 162, loss is 1.0779019594192505\n", + "epoch: 28 step: 163, loss is 1.0651758909225464\n", + "epoch: 28 step: 164, loss is 1.1058025360107422\n", + "epoch: 28 step: 165, loss is 1.0332889556884766\n", + "epoch: 28 step: 166, loss is 1.0287368297576904\n", + "epoch: 28 step: 167, loss is 1.0235332250595093\n", + "epoch: 28 step: 168, loss is 1.0412390232086182\n", + "epoch: 28 step: 169, loss is 1.0308414697647095\n", + "epoch: 28 step: 170, loss is 1.0187835693359375\n", + "epoch: 28 step: 171, loss is 1.0555707216262817\n", + "epoch: 28 step: 172, loss is 1.0532854795455933\n", + "epoch: 28 step: 173, loss is 1.0077811479568481\n", + "epoch: 28 step: 174, loss is 1.0431509017944336\n", + "epoch: 28 step: 175, loss is 1.0792124271392822\n", + "epoch: 28 step: 176, loss is 0.9964842796325684\n", + "epoch: 28 step: 177, loss is 1.0176483392715454\n", + "epoch: 28 step: 178, loss is 1.068379282951355\n", + "epoch: 28 step: 179, loss is 1.0465813875198364\n", + "epoch: 28 step: 180, loss is 1.086358904838562\n", + "epoch: 28 step: 181, loss is 0.9957306385040283\n", + "epoch: 28 step: 182, loss is 1.0367214679718018\n", + "epoch: 28 step: 183, loss is 0.9927847981452942\n", + "epoch: 28 step: 184, loss is 1.0580514669418335\n", + "epoch: 28 step: 185, loss is 1.0400638580322266\n", + "epoch: 28 step: 186, loss is 1.0047892332077026\n", + "epoch: 28 step: 187, loss is 1.0676014423370361\n", + "epoch: 28 step: 188, loss is 1.0133253335952759\n", + "epoch: 28 step: 189, loss is 1.0660847425460815\n", + "epoch: 28 step: 190, loss is 1.021042823791504\n", + "epoch: 28 step: 191, loss is 1.0757741928100586\n", + "epoch: 28 step: 192, loss is 1.029125690460205\n", + "epoch: 28 step: 193, loss is 1.0784742832183838\n", + "epoch: 28 step: 194, loss is 1.0393130779266357\n", + "epoch: 28 step: 195, loss is 0.9943758845329285\n", + "Train epoch time: 100871.994 ms, per step time: 517.292 ms\n", + "epoch: 29 step: 1, loss is 1.0407788753509521\n", + "epoch: 29 step: 2, loss is 1.0637695789337158\n", + "epoch: 29 step: 3, loss is 0.9292352199554443\n", + "epoch: 29 step: 4, loss is 0.9884016513824463\n", + "epoch: 29 step: 5, loss is 0.9706739783287048\n", + "epoch: 29 step: 6, loss is 1.0430243015289307\n", + "epoch: 29 step: 7, loss is 1.0544759035110474\n", + "epoch: 29 step: 8, loss is 0.9973989725112915\n", + "epoch: 29 step: 9, loss is 0.9741648435592651\n", + "epoch: 29 step: 10, loss is 1.0517704486846924\n", + "epoch: 29 step: 11, loss is 1.0238486528396606\n", + "epoch: 29 step: 12, loss is 0.9554126262664795\n", + "epoch: 29 step: 13, loss is 0.9752805233001709\n", + "epoch: 29 step: 14, loss is 1.0828278064727783\n", + "epoch: 29 step: 15, loss is 1.0323519706726074\n", + "epoch: 29 step: 16, loss is 0.9991198778152466\n", + "epoch: 29 step: 17, loss is 1.0034513473510742\n", + "epoch: 29 step: 18, loss is 1.0403470993041992\n", + "epoch: 29 step: 19, loss is 1.0346083641052246\n", + "epoch: 29 step: 20, loss is 0.9948194026947021\n", + "epoch: 29 step: 21, loss is 1.0433100461959839\n", + "epoch: 29 step: 22, loss is 0.9553579092025757\n", + "epoch: 29 step: 23, loss is 1.0055723190307617\n", + "epoch: 29 step: 24, loss is 0.9689807891845703\n", + "epoch: 29 step: 25, loss is 1.011678695678711\n", + "epoch: 29 step: 26, loss is 1.031489610671997\n", + "epoch: 29 step: 27, loss is 1.0262272357940674\n", + "epoch: 29 step: 28, loss is 0.9669137001037598\n", + "epoch: 29 step: 29, loss is 1.0247385501861572\n", + "epoch: 29 step: 30, loss is 0.9784743189811707\n", + "epoch: 29 step: 31, loss is 1.0974183082580566\n", + "epoch: 29 step: 32, loss is 1.0415010452270508\n", + "epoch: 29 step: 33, loss is 1.047931432723999\n", + "epoch: 29 step: 34, loss is 0.960902214050293\n", + "epoch: 29 step: 35, loss is 1.040098786354065\n", + "epoch: 29 step: 36, loss is 0.9660264253616333\n", + "epoch: 29 step: 37, loss is 1.0638439655303955\n", + "epoch: 29 step: 38, loss is 1.007096529006958\n", + "epoch: 29 step: 39, loss is 1.0124177932739258\n", + "epoch: 29 step: 40, loss is 1.0795748233795166\n", + "epoch: 29 step: 41, loss is 1.0224976539611816\n", + "epoch: 29 step: 42, loss is 1.0881859064102173\n", + "epoch: 29 step: 43, loss is 1.038787603378296\n", + "epoch: 29 step: 44, loss is 1.0061229467391968\n", + "epoch: 29 step: 45, loss is 0.9898855686187744\n", + "epoch: 29 step: 46, loss is 0.9672864079475403\n", + "epoch: 29 step: 47, loss is 1.0257149934768677\n", + "epoch: 29 step: 48, loss is 1.0882174968719482\n", + "epoch: 29 step: 49, loss is 1.014822006225586\n", + "epoch: 29 step: 50, loss is 1.0430872440338135\n", + "epoch: 29 step: 51, loss is 1.0103795528411865\n", + "epoch: 29 step: 52, loss is 0.9888336062431335\n", + "epoch: 29 step: 53, loss is 0.9584945440292358\n", + "epoch: 29 step: 54, loss is 1.0116956233978271\n", + "epoch: 29 step: 55, loss is 0.9098508358001709\n", + "epoch: 29 step: 56, loss is 1.1255085468292236\n", + "epoch: 29 step: 57, loss is 1.0366719961166382\n", + "epoch: 29 step: 58, loss is 1.0049347877502441\n", + "epoch: 29 step: 59, loss is 1.0444241762161255\n", + "epoch: 29 step: 60, loss is 1.0319877862930298\n", + "epoch: 29 step: 61, loss is 1.0231144428253174\n", + "epoch: 29 step: 62, loss is 0.9593279361724854\n", + "epoch: 29 step: 63, loss is 1.0496163368225098\n", + "epoch: 29 step: 64, loss is 0.9382298588752747\n", + "epoch: 29 step: 65, loss is 0.9843930006027222\n", + "epoch: 29 step: 66, loss is 0.9759025573730469\n", + "epoch: 29 step: 67, loss is 1.0225858688354492\n", + "epoch: 29 step: 68, loss is 0.9638782739639282\n", + "epoch: 29 step: 69, loss is 0.9991258382797241\n", + "epoch: 29 step: 70, loss is 1.0444711446762085\n", + "epoch: 29 step: 71, loss is 1.0010274648666382\n", + "epoch: 29 step: 72, loss is 1.0454368591308594\n", + "epoch: 29 step: 73, loss is 1.0265555381774902\n", + "epoch: 29 step: 74, loss is 1.1062390804290771\n", + "epoch: 29 step: 75, loss is 1.0821627378463745\n", + "epoch: 29 step: 76, loss is 0.9848671555519104\n", + "epoch: 29 step: 77, loss is 1.0931134223937988\n", + "epoch: 29 step: 78, loss is 0.961536169052124\n", + "epoch: 29 step: 79, loss is 0.9768364429473877\n", + "epoch: 29 step: 80, loss is 1.0635055303573608\n", + "epoch: 29 step: 81, loss is 1.0446562767028809\n", + "epoch: 29 step: 82, loss is 0.9574897289276123\n", + "epoch: 29 step: 83, loss is 0.9450322389602661\n", + "epoch: 29 step: 84, loss is 1.0574498176574707\n", + "epoch: 29 step: 85, loss is 0.9945051670074463\n", + "epoch: 29 step: 86, loss is 0.9574263691902161\n", + "epoch: 29 step: 87, loss is 0.9640874862670898\n", + "epoch: 29 step: 88, loss is 0.9548008441925049\n", + "epoch: 29 step: 89, loss is 0.9639506340026855\n", + "epoch: 29 step: 90, loss is 0.996070146560669\n", + "epoch: 29 step: 91, loss is 0.9660776853561401\n", + "epoch: 29 step: 92, loss is 1.0158365964889526\n", + "epoch: 29 step: 93, loss is 1.0468298196792603\n", + "epoch: 29 step: 94, loss is 1.0220201015472412\n", + "epoch: 29 step: 95, loss is 1.0134357213974\n", + "epoch: 29 step: 96, loss is 1.0439496040344238\n", + "epoch: 29 step: 97, loss is 1.0162057876586914\n", + "epoch: 29 step: 98, loss is 1.0484950542449951\n", + "epoch: 29 step: 99, loss is 1.030895471572876\n", + "epoch: 29 step: 100, loss is 1.0218477249145508\n", + "epoch: 29 step: 101, loss is 1.077397346496582\n", + "epoch: 29 step: 102, loss is 0.9472166299819946\n", + "epoch: 29 step: 103, loss is 1.0810017585754395\n", + "epoch: 29 step: 104, loss is 1.038450002670288\n", + "epoch: 29 step: 105, loss is 1.037611961364746\n", + "epoch: 29 step: 106, loss is 1.0361745357513428\n", + "epoch: 29 step: 107, loss is 0.9492537975311279\n", + "epoch: 29 step: 108, loss is 0.9478514194488525\n", + "epoch: 29 step: 109, loss is 0.969481885433197\n", + "epoch: 29 step: 110, loss is 0.981480062007904\n", + "epoch: 29 step: 111, loss is 1.072493314743042\n", + "epoch: 29 step: 112, loss is 0.9894115328788757\n", + "epoch: 29 step: 113, loss is 1.0069798231124878\n", + "epoch: 29 step: 114, loss is 1.0523707866668701\n", + "epoch: 29 step: 115, loss is 1.01170015335083\n", + "epoch: 29 step: 116, loss is 1.066066026687622\n", + "epoch: 29 step: 117, loss is 1.015343189239502\n", + "epoch: 29 step: 118, loss is 1.068076491355896\n", + "epoch: 29 step: 119, loss is 1.0630701780319214\n", + "epoch: 29 step: 120, loss is 1.0181667804718018\n", + "epoch: 29 step: 121, loss is 1.0215051174163818\n", + "epoch: 29 step: 122, loss is 1.0417544841766357\n", + "epoch: 29 step: 123, loss is 1.0163848400115967\n", + "epoch: 29 step: 124, loss is 0.9889693260192871\n", + "epoch: 29 step: 125, loss is 1.0689202547073364\n", + "epoch: 29 step: 126, loss is 1.0885461568832397\n", + "epoch: 29 step: 127, loss is 1.1003847122192383\n", + "epoch: 29 step: 128, loss is 0.9501825571060181\n", + "epoch: 29 step: 129, loss is 0.9945886731147766\n", + "epoch: 29 step: 130, loss is 1.0214505195617676\n", + "epoch: 29 step: 131, loss is 1.0455273389816284\n", + "epoch: 29 step: 132, loss is 1.037502408027649\n", + "epoch: 29 step: 133, loss is 1.036919116973877\n", + "epoch: 29 step: 134, loss is 1.0009368658065796\n", + "epoch: 29 step: 135, loss is 1.0601999759674072\n", + "epoch: 29 step: 136, loss is 1.0780024528503418\n", + "epoch: 29 step: 137, loss is 1.0945024490356445\n", + "epoch: 29 step: 138, loss is 1.0264477729797363\n", + "epoch: 29 step: 139, loss is 1.0624333620071411\n", + "epoch: 29 step: 140, loss is 1.070090889930725\n", + "epoch: 29 step: 141, loss is 0.9887958765029907\n", + "epoch: 29 step: 142, loss is 1.0092542171478271\n", + "epoch: 29 step: 143, loss is 0.9999644160270691\n", + "epoch: 29 step: 144, loss is 1.0095415115356445\n", + "epoch: 29 step: 145, loss is 1.0635193586349487\n", + "epoch: 29 step: 146, loss is 1.0327811241149902\n", + "epoch: 29 step: 147, loss is 0.9859423637390137\n", + "epoch: 29 step: 148, loss is 1.0211715698242188\n", + "epoch: 29 step: 149, loss is 1.0725746154785156\n", + "epoch: 29 step: 150, loss is 1.076345682144165\n", + "epoch: 29 step: 151, loss is 1.0603017807006836\n", + "epoch: 29 step: 152, loss is 0.9798332452774048\n", + "epoch: 29 step: 153, loss is 0.9745957255363464\n", + "epoch: 29 step: 154, loss is 1.0383312702178955\n", + "epoch: 29 step: 155, loss is 1.0346415042877197\n", + "epoch: 29 step: 156, loss is 1.0310614109039307\n", + "epoch: 29 step: 157, loss is 1.0825738906860352\n", + "epoch: 29 step: 158, loss is 1.0109246969223022\n", + "epoch: 29 step: 159, loss is 1.101780891418457\n", + "epoch: 29 step: 160, loss is 1.058441162109375\n", + "epoch: 29 step: 161, loss is 1.0311076641082764\n", + "epoch: 29 step: 162, loss is 1.060399055480957\n", + "epoch: 29 step: 163, loss is 1.016143560409546\n", + "epoch: 29 step: 164, loss is 1.0876520872116089\n", + "epoch: 29 step: 165, loss is 0.9958343505859375\n", + "epoch: 29 step: 166, loss is 1.0398386716842651\n", + "epoch: 29 step: 167, loss is 0.9904701709747314\n", + "epoch: 29 step: 168, loss is 1.0305869579315186\n", + "epoch: 29 step: 169, loss is 1.0024069547653198\n", + "epoch: 29 step: 170, loss is 1.0597033500671387\n", + "epoch: 29 step: 171, loss is 1.0113916397094727\n", + "epoch: 29 step: 172, loss is 0.9871842861175537\n", + "epoch: 29 step: 173, loss is 1.0051435232162476\n", + "epoch: 29 step: 174, loss is 0.9345237016677856\n", + "epoch: 29 step: 175, loss is 0.9617389440536499\n", + "epoch: 29 step: 176, loss is 1.059571623802185\n", + "epoch: 29 step: 177, loss is 1.008862018585205\n", + "epoch: 29 step: 178, loss is 1.040310025215149\n", + "epoch: 29 step: 179, loss is 0.9755335450172424\n", + "epoch: 29 step: 180, loss is 0.9957504272460938\n", + "epoch: 29 step: 181, loss is 1.0708088874816895\n", + "epoch: 29 step: 182, loss is 1.0129039287567139\n", + "epoch: 29 step: 183, loss is 1.0059263706207275\n", + "epoch: 29 step: 184, loss is 1.031178593635559\n", + "epoch: 29 step: 185, loss is 1.00508713722229\n", + "epoch: 29 step: 186, loss is 1.0956320762634277\n", + "epoch: 29 step: 187, loss is 1.067992091178894\n", + "epoch: 29 step: 188, loss is 0.9962552785873413\n", + "epoch: 29 step: 189, loss is 1.0249525308609009\n", + "epoch: 29 step: 190, loss is 0.9448201060295105\n", + "epoch: 29 step: 191, loss is 1.035441279411316\n", + "epoch: 29 step: 192, loss is 1.0258303880691528\n", + "epoch: 29 step: 193, loss is 1.0715141296386719\n", + "epoch: 29 step: 194, loss is 1.1181957721710205\n", + "epoch: 29 step: 195, loss is 1.0847971439361572\n", + "Train epoch time: 101129.313 ms, per step time: 518.612 ms\n", + "epoch: 30 step: 1, loss is 0.9899706840515137\n", + "epoch: 30 step: 2, loss is 1.0503990650177002\n", + "epoch: 30 step: 3, loss is 0.9827224016189575\n", + "epoch: 30 step: 4, loss is 0.9666826725006104\n", + "epoch: 30 step: 5, loss is 0.9803193807601929\n", + "epoch: 30 step: 6, loss is 1.02921462059021\n", + "epoch: 30 step: 7, loss is 1.0919078588485718\n", + "epoch: 30 step: 8, loss is 0.9597034454345703\n", + "epoch: 30 step: 9, loss is 1.0098576545715332\n", + "epoch: 30 step: 10, loss is 0.9648170471191406\n", + "epoch: 30 step: 11, loss is 1.0506465435028076\n", + "epoch: 30 step: 12, loss is 0.9860092997550964\n", + "epoch: 30 step: 13, loss is 0.9352508783340454\n", + "epoch: 30 step: 14, loss is 1.0555951595306396\n", + "epoch: 30 step: 15, loss is 0.9791457653045654\n", + "epoch: 30 step: 16, loss is 1.0256506204605103\n", + "epoch: 30 step: 17, loss is 1.0124415159225464\n", + "epoch: 30 step: 18, loss is 0.9454635977745056\n", + "epoch: 30 step: 19, loss is 1.0209312438964844\n", + "epoch: 30 step: 20, loss is 0.9391233325004578\n", + "epoch: 30 step: 21, loss is 0.9547476768493652\n", + "epoch: 30 step: 22, loss is 1.0115635395050049\n", + "epoch: 30 step: 23, loss is 0.9937213659286499\n", + "epoch: 30 step: 24, loss is 1.0610384941101074\n", + "epoch: 30 step: 25, loss is 0.9965990781784058\n", + "epoch: 30 step: 26, loss is 0.9767809510231018\n", + "epoch: 30 step: 27, loss is 0.9886520504951477\n", + "epoch: 30 step: 28, loss is 1.0023707151412964\n", + "epoch: 30 step: 29, loss is 1.0267466306686401\n", + "epoch: 30 step: 30, loss is 0.9591872692108154\n", + "epoch: 30 step: 31, loss is 1.0274662971496582\n", + "epoch: 30 step: 32, loss is 1.0013270378112793\n", + "epoch: 30 step: 33, loss is 1.077789545059204\n", + "epoch: 30 step: 34, loss is 0.9539870619773865\n", + "epoch: 30 step: 35, loss is 1.052889347076416\n", + "epoch: 30 step: 36, loss is 1.0383741855621338\n", + "epoch: 30 step: 37, loss is 0.9506371021270752\n", + "epoch: 30 step: 38, loss is 1.0391002893447876\n", + "epoch: 30 step: 39, loss is 0.9624879360198975\n", + "epoch: 30 step: 40, loss is 1.0275952816009521\n", + "epoch: 30 step: 41, loss is 1.0304292440414429\n", + "epoch: 30 step: 42, loss is 0.9949325323104858\n", + "epoch: 30 step: 43, loss is 0.9956941604614258\n", + "epoch: 30 step: 44, loss is 1.0199260711669922\n", + "epoch: 30 step: 45, loss is 0.9533531069755554\n", + "epoch: 30 step: 46, loss is 1.0074979066848755\n", + "epoch: 30 step: 47, loss is 1.0022573471069336\n", + "epoch: 30 step: 48, loss is 1.022313117980957\n", + "epoch: 30 step: 49, loss is 1.016836166381836\n", + "epoch: 30 step: 50, loss is 1.0086643695831299\n", + "epoch: 30 step: 51, loss is 0.9705237746238708\n", + "epoch: 30 step: 52, loss is 1.065316915512085\n", + "epoch: 30 step: 53, loss is 0.9781915545463562\n", + "epoch: 30 step: 54, loss is 1.0137468576431274\n", + "epoch: 30 step: 55, loss is 0.9711437225341797\n", + "epoch: 30 step: 56, loss is 1.0661568641662598\n", + "epoch: 30 step: 57, loss is 0.9658325910568237\n", + "epoch: 30 step: 58, loss is 1.0562134981155396\n", + "epoch: 30 step: 59, loss is 0.987563967704773\n", + "epoch: 30 step: 60, loss is 1.0113986730575562\n", + "epoch: 30 step: 61, loss is 1.0163209438323975\n", + "epoch: 30 step: 62, loss is 1.013648271560669\n", + "epoch: 30 step: 63, loss is 0.9934686422348022\n", + "epoch: 30 step: 64, loss is 1.0562729835510254\n", + "epoch: 30 step: 65, loss is 0.9936209917068481\n", + "epoch: 30 step: 66, loss is 1.0419597625732422\n", + "epoch: 30 step: 67, loss is 1.0139927864074707\n", + "epoch: 30 step: 68, loss is 1.0246437788009644\n", + "epoch: 30 step: 69, loss is 1.0238059759140015\n", + "epoch: 30 step: 70, loss is 1.028327226638794\n", + "epoch: 30 step: 71, loss is 0.9995296001434326\n", + "epoch: 30 step: 72, loss is 1.0024293661117554\n", + "epoch: 30 step: 73, loss is 0.9447841644287109\n", + "epoch: 30 step: 74, loss is 1.0205175876617432\n", + "epoch: 30 step: 75, loss is 0.9948222637176514\n", + "epoch: 30 step: 76, loss is 0.9837127923965454\n", + "epoch: 30 step: 77, loss is 0.9822046756744385\n", + "epoch: 30 step: 78, loss is 1.0299687385559082\n", + "epoch: 30 step: 79, loss is 1.0917218923568726\n", + "epoch: 30 step: 80, loss is 0.9759995937347412\n", + "epoch: 30 step: 81, loss is 1.0068777799606323\n", + "epoch: 30 step: 82, loss is 0.9749420881271362\n", + "epoch: 30 step: 83, loss is 1.0240434408187866\n", + "epoch: 30 step: 84, loss is 1.0686020851135254\n", + "epoch: 30 step: 85, loss is 1.015528917312622\n", + "epoch: 30 step: 86, loss is 0.8828194737434387\n", + "epoch: 30 step: 87, loss is 0.9153423309326172\n", + "epoch: 30 step: 88, loss is 0.9601523876190186\n", + "epoch: 30 step: 89, loss is 1.0060290098190308\n", + "epoch: 30 step: 90, loss is 1.0266664028167725\n", + "epoch: 30 step: 91, loss is 1.023016095161438\n", + "epoch: 30 step: 92, loss is 1.0235247611999512\n", + "epoch: 30 step: 93, loss is 0.9886225461959839\n", + "epoch: 30 step: 94, loss is 1.0615264177322388\n", + "epoch: 30 step: 95, loss is 1.097748875617981\n", + "epoch: 30 step: 96, loss is 1.0476614236831665\n", + "epoch: 30 step: 97, loss is 0.9957391023635864\n", + "epoch: 30 step: 98, loss is 0.9595966935157776\n", + "epoch: 30 step: 99, loss is 1.0171613693237305\n", + "epoch: 30 step: 100, loss is 1.0911258459091187\n", + "epoch: 30 step: 101, loss is 0.9111320972442627\n", + "epoch: 30 step: 102, loss is 0.9473448991775513\n", + "epoch: 30 step: 103, loss is 0.9484125375747681\n", + "epoch: 30 step: 104, loss is 1.0491888523101807\n", + "epoch: 30 step: 105, loss is 0.9942584037780762\n", + "epoch: 30 step: 106, loss is 0.9581621885299683\n", + "epoch: 30 step: 107, loss is 1.06292724609375\n", + "epoch: 30 step: 108, loss is 0.9989573955535889\n", + "epoch: 30 step: 109, loss is 1.0070576667785645\n", + "epoch: 30 step: 110, loss is 0.9812424182891846\n", + "epoch: 30 step: 111, loss is 0.9693491458892822\n", + "epoch: 30 step: 112, loss is 1.0666463375091553\n", + "epoch: 30 step: 113, loss is 1.0300285816192627\n", + "epoch: 30 step: 114, loss is 0.9520283937454224\n", + "epoch: 30 step: 115, loss is 0.9943215250968933\n", + "epoch: 30 step: 116, loss is 1.0702053308486938\n", + "epoch: 30 step: 117, loss is 1.0856741666793823\n", + "epoch: 30 step: 118, loss is 1.0076801776885986\n", + "epoch: 30 step: 119, loss is 0.9428578019142151\n", + "epoch: 30 step: 120, loss is 0.9980770349502563\n", + "epoch: 30 step: 121, loss is 0.9769567251205444\n", + "epoch: 30 step: 122, loss is 0.945954442024231\n", + "epoch: 30 step: 123, loss is 0.9868796467781067\n", + "epoch: 30 step: 124, loss is 1.0389485359191895\n", + "epoch: 30 step: 125, loss is 0.9828017950057983\n", + "epoch: 30 step: 126, loss is 1.0442695617675781\n", + "epoch: 30 step: 127, loss is 0.9794780611991882\n", + "epoch: 30 step: 128, loss is 1.0771496295928955\n", + "epoch: 30 step: 129, loss is 0.9316577911376953\n", + "epoch: 30 step: 130, loss is 1.0196067094802856\n", + "epoch: 30 step: 131, loss is 0.9718804359436035\n", + "epoch: 30 step: 132, loss is 0.9848198890686035\n", + "epoch: 30 step: 133, loss is 0.9535552263259888\n", + "epoch: 30 step: 134, loss is 0.9916723966598511\n", + "epoch: 30 step: 135, loss is 0.9978818297386169\n", + "epoch: 30 step: 136, loss is 1.073012113571167\n", + "epoch: 30 step: 137, loss is 1.0254967212677002\n", + "epoch: 30 step: 138, loss is 1.0591708421707153\n", + "epoch: 30 step: 139, loss is 1.0574724674224854\n", + "epoch: 30 step: 140, loss is 1.0202463865280151\n", + "epoch: 30 step: 141, loss is 1.066016435623169\n", + "epoch: 30 step: 142, loss is 1.029584527015686\n", + "epoch: 30 step: 143, loss is 0.9262320399284363\n", + "epoch: 30 step: 144, loss is 1.0665034055709839\n", + "epoch: 30 step: 145, loss is 1.0242358446121216\n", + "epoch: 30 step: 146, loss is 0.9708994030952454\n", + "epoch: 30 step: 147, loss is 1.0380405187606812\n", + "epoch: 30 step: 148, loss is 0.9896032214164734\n", + "epoch: 30 step: 149, loss is 0.9488439559936523\n", + "epoch: 30 step: 150, loss is 1.0128631591796875\n", + "epoch: 30 step: 151, loss is 1.1007018089294434\n", + "epoch: 30 step: 152, loss is 1.0608108043670654\n", + "epoch: 30 step: 153, loss is 1.0035731792449951\n", + "epoch: 30 step: 154, loss is 1.0077438354492188\n", + "epoch: 30 step: 155, loss is 0.9462170600891113\n", + "epoch: 30 step: 156, loss is 0.9514768123626709\n", + "epoch: 30 step: 157, loss is 1.023903250694275\n", + "epoch: 30 step: 158, loss is 1.000046730041504\n", + "epoch: 30 step: 159, loss is 0.9574596285820007\n", + "epoch: 30 step: 160, loss is 1.044828176498413\n", + "epoch: 30 step: 161, loss is 0.9704321622848511\n", + "epoch: 30 step: 162, loss is 0.9842073917388916\n", + "epoch: 30 step: 163, loss is 1.029496192932129\n", + "epoch: 30 step: 164, loss is 0.9900120496749878\n", + "epoch: 30 step: 165, loss is 1.0255839824676514\n", + "epoch: 30 step: 166, loss is 0.9412161111831665\n", + "epoch: 30 step: 167, loss is 1.047389030456543\n", + "epoch: 30 step: 168, loss is 1.0444209575653076\n", + "epoch: 30 step: 169, loss is 1.022040843963623\n", + "epoch: 30 step: 170, loss is 0.9879748821258545\n", + "epoch: 30 step: 171, loss is 1.0184706449508667\n", + "epoch: 30 step: 172, loss is 1.0847430229187012\n", + "epoch: 30 step: 173, loss is 1.102391242980957\n", + "epoch: 30 step: 174, loss is 1.090242624282837\n", + "epoch: 30 step: 175, loss is 0.9275885820388794\n", + "epoch: 30 step: 176, loss is 0.9593775868415833\n", + "epoch: 30 step: 177, loss is 1.0207253694534302\n", + "epoch: 30 step: 178, loss is 1.1529581546783447\n", + "epoch: 30 step: 179, loss is 0.9909931421279907\n", + "epoch: 30 step: 180, loss is 0.9651861786842346\n", + "epoch: 30 step: 181, loss is 1.0440492630004883\n", + "epoch: 30 step: 182, loss is 1.0261582136154175\n", + "epoch: 30 step: 183, loss is 1.0038481950759888\n", + "epoch: 30 step: 184, loss is 1.0766545534133911\n", + "epoch: 30 step: 185, loss is 1.0063966512680054\n", + "epoch: 30 step: 186, loss is 1.0449681282043457\n", + "epoch: 30 step: 187, loss is 1.0551362037658691\n", + "epoch: 30 step: 188, loss is 1.029636025428772\n", + "epoch: 30 step: 189, loss is 1.0126051902770996\n", + "epoch: 30 step: 190, loss is 1.0136572122573853\n", + "epoch: 30 step: 191, loss is 1.044622540473938\n", + "epoch: 30 step: 192, loss is 1.010851263999939\n", + "epoch: 30 step: 193, loss is 1.0302350521087646\n", + "epoch: 30 step: 194, loss is 1.005608320236206\n", + "epoch: 30 step: 195, loss is 1.0252418518066406\n", + "Train epoch time: 97772.208 ms, per step time: 501.396 ms\n", + "epoch: 31 step: 1, loss is 0.987952709197998\n", + "epoch: 31 step: 2, loss is 0.9307501912117004\n", + "epoch: 31 step: 3, loss is 1.043690800666809\n", + "epoch: 31 step: 4, loss is 1.0037015676498413\n", + "epoch: 31 step: 5, loss is 0.9332219362258911\n", + "epoch: 31 step: 6, loss is 1.0045509338378906\n", + "epoch: 31 step: 7, loss is 1.0196335315704346\n", + "epoch: 31 step: 8, loss is 0.9809433817863464\n", + "epoch: 31 step: 9, loss is 0.9577615261077881\n", + "epoch: 31 step: 10, loss is 1.0089111328125\n", + "epoch: 31 step: 11, loss is 1.0202553272247314\n", + "epoch: 31 step: 12, loss is 1.0247050523757935\n", + "epoch: 31 step: 13, loss is 0.9934199452400208\n", + "epoch: 31 step: 14, loss is 0.9309762716293335\n", + "epoch: 31 step: 15, loss is 0.9891986846923828\n", + "epoch: 31 step: 16, loss is 0.9973708391189575\n", + "epoch: 31 step: 17, loss is 1.0509051084518433\n", + "epoch: 31 step: 18, loss is 1.0093646049499512\n", + "epoch: 31 step: 19, loss is 0.9517167806625366\n", + "epoch: 31 step: 20, loss is 1.0162217617034912\n", + "epoch: 31 step: 21, loss is 0.9673968553543091\n", + "epoch: 31 step: 22, loss is 1.0620301961898804\n", + "epoch: 31 step: 23, loss is 1.087332010269165\n", + "epoch: 31 step: 24, loss is 1.027300477027893\n", + "epoch: 31 step: 25, loss is 1.018043041229248\n", + "epoch: 31 step: 26, loss is 0.924838125705719\n", + "epoch: 31 step: 27, loss is 0.9898219108581543\n", + "epoch: 31 step: 28, loss is 0.9741712808609009\n", + "epoch: 31 step: 29, loss is 0.9851424694061279\n", + "epoch: 31 step: 30, loss is 1.0161082744598389\n", + "epoch: 31 step: 31, loss is 1.0095542669296265\n", + "epoch: 31 step: 32, loss is 0.9963427782058716\n", + "epoch: 31 step: 33, loss is 1.0589637756347656\n", + "epoch: 31 step: 34, loss is 1.0615408420562744\n", + "epoch: 31 step: 35, loss is 1.0661448240280151\n", + "epoch: 31 step: 36, loss is 1.0370662212371826\n", + "epoch: 31 step: 37, loss is 1.141281008720398\n", + "epoch: 31 step: 38, loss is 0.9669094085693359\n", + "epoch: 31 step: 39, loss is 1.0180485248565674\n", + "epoch: 31 step: 40, loss is 1.032826542854309\n", + "epoch: 31 step: 41, loss is 0.9965834617614746\n", + "epoch: 31 step: 42, loss is 0.9454797506332397\n", + "epoch: 31 step: 43, loss is 1.0831034183502197\n", + "epoch: 31 step: 44, loss is 0.9659842252731323\n", + "epoch: 31 step: 45, loss is 0.9633013010025024\n", + "epoch: 31 step: 46, loss is 1.054578423500061\n", + "epoch: 31 step: 47, loss is 1.003303050994873\n", + "epoch: 31 step: 48, loss is 0.9467571973800659\n", + "epoch: 31 step: 49, loss is 0.9395899176597595\n", + "epoch: 31 step: 50, loss is 0.9359961748123169\n", + "epoch: 31 step: 51, loss is 0.9640313386917114\n", + "epoch: 31 step: 52, loss is 0.8951718807220459\n", + "epoch: 31 step: 53, loss is 1.0718135833740234\n", + "epoch: 31 step: 54, loss is 0.9765453934669495\n", + "epoch: 31 step: 55, loss is 0.9727062582969666\n", + "epoch: 31 step: 56, loss is 1.0172019004821777\n", + "epoch: 31 step: 57, loss is 1.055733323097229\n", + "epoch: 31 step: 58, loss is 1.039243221282959\n", + "epoch: 31 step: 59, loss is 1.026850700378418\n", + "epoch: 31 step: 60, loss is 0.924216628074646\n", + "epoch: 31 step: 61, loss is 1.0257936716079712\n", + "epoch: 31 step: 62, loss is 1.0065267086029053\n", + "epoch: 31 step: 63, loss is 0.9486918449401855\n", + "epoch: 31 step: 64, loss is 1.0171295404434204\n", + "epoch: 31 step: 65, loss is 0.9638838768005371\n", + "epoch: 31 step: 66, loss is 0.9558964967727661\n", + "epoch: 31 step: 67, loss is 0.9971225261688232\n", + "epoch: 31 step: 68, loss is 0.9884364604949951\n", + "epoch: 31 step: 69, loss is 0.9778367280960083\n", + "epoch: 31 step: 70, loss is 0.9583196640014648\n", + "epoch: 31 step: 71, loss is 0.993865966796875\n", + "epoch: 31 step: 72, loss is 0.9847457408905029\n", + "epoch: 31 step: 73, loss is 1.0321286916732788\n", + "epoch: 31 step: 74, loss is 0.9286729693412781\n", + "epoch: 31 step: 75, loss is 1.0009673833847046\n", + "epoch: 31 step: 76, loss is 1.0323536396026611\n", + "epoch: 31 step: 77, loss is 1.0375151634216309\n", + "epoch: 31 step: 78, loss is 0.9017475843429565\n", + "epoch: 31 step: 79, loss is 0.9452623128890991\n", + "epoch: 31 step: 80, loss is 1.0135562419891357\n", + "epoch: 31 step: 81, loss is 1.001164197921753\n", + "epoch: 31 step: 82, loss is 0.977385401725769\n", + "epoch: 31 step: 83, loss is 1.041532039642334\n", + "epoch: 31 step: 84, loss is 1.02955162525177\n", + "epoch: 31 step: 85, loss is 0.9946223497390747\n", + "epoch: 31 step: 86, loss is 0.9839308857917786\n", + "epoch: 31 step: 87, loss is 1.054248571395874\n", + "epoch: 31 step: 88, loss is 0.9247597455978394\n", + "epoch: 31 step: 89, loss is 1.057218074798584\n", + "epoch: 31 step: 90, loss is 1.0001083612442017\n", + "epoch: 31 step: 91, loss is 0.9916760921478271\n", + "epoch: 31 step: 92, loss is 0.9396786689758301\n", + "epoch: 31 step: 93, loss is 1.042671799659729\n", + "epoch: 31 step: 94, loss is 0.962658166885376\n", + "epoch: 31 step: 95, loss is 0.9644718170166016\n", + "epoch: 31 step: 96, loss is 0.9811148643493652\n", + "epoch: 31 step: 97, loss is 0.9448500871658325\n", + "epoch: 31 step: 98, loss is 1.002381443977356\n", + "epoch: 31 step: 99, loss is 0.9257957339286804\n", + "epoch: 31 step: 100, loss is 1.018428087234497\n", + "epoch: 31 step: 101, loss is 1.010223150253296\n", + "epoch: 31 step: 102, loss is 0.9966460466384888\n", + "epoch: 31 step: 103, loss is 0.9482032060623169\n", + "epoch: 31 step: 104, loss is 0.9945806264877319\n", + "epoch: 31 step: 105, loss is 0.9303826093673706\n", + "epoch: 31 step: 106, loss is 1.0114492177963257\n", + "epoch: 31 step: 107, loss is 1.0041509866714478\n", + "epoch: 31 step: 108, loss is 0.9531430006027222\n", + "epoch: 31 step: 109, loss is 0.956932544708252\n", + "epoch: 31 step: 110, loss is 0.9520473480224609\n", + "epoch: 31 step: 111, loss is 1.0353673696517944\n", + "epoch: 31 step: 112, loss is 0.9892696142196655\n", + "epoch: 31 step: 113, loss is 1.0085324048995972\n", + "epoch: 31 step: 114, loss is 1.0248608589172363\n", + "epoch: 31 step: 115, loss is 1.01259183883667\n", + "epoch: 31 step: 116, loss is 0.9602001905441284\n", + "epoch: 31 step: 117, loss is 0.9198921322822571\n", + "epoch: 31 step: 118, loss is 0.9758714437484741\n", + "epoch: 31 step: 119, loss is 1.0056540966033936\n", + "epoch: 31 step: 120, loss is 0.9486638307571411\n", + "epoch: 31 step: 121, loss is 1.0076801776885986\n", + "epoch: 31 step: 122, loss is 1.033604383468628\n", + "epoch: 31 step: 123, loss is 0.9089370965957642\n", + "epoch: 31 step: 124, loss is 0.9247226715087891\n", + "epoch: 31 step: 125, loss is 0.9803535342216492\n", + "epoch: 31 step: 126, loss is 0.9525864124298096\n", + "epoch: 31 step: 127, loss is 0.9394665956497192\n", + "epoch: 31 step: 128, loss is 0.957627534866333\n", + "epoch: 31 step: 129, loss is 1.0628719329833984\n", + "epoch: 31 step: 130, loss is 1.028590440750122\n", + "epoch: 31 step: 131, loss is 1.0678069591522217\n", + "epoch: 31 step: 132, loss is 0.9601433873176575\n", + "epoch: 31 step: 133, loss is 0.9845988750457764\n", + "epoch: 31 step: 134, loss is 0.962968111038208\n", + "epoch: 31 step: 135, loss is 1.0448310375213623\n", + "epoch: 31 step: 136, loss is 1.0139074325561523\n", + "epoch: 31 step: 137, loss is 1.014094352722168\n", + "epoch: 31 step: 138, loss is 0.9866336584091187\n", + "epoch: 31 step: 139, loss is 0.9417576789855957\n", + "epoch: 31 step: 140, loss is 0.9983322024345398\n", + "epoch: 31 step: 141, loss is 1.0350584983825684\n", + "epoch: 31 step: 142, loss is 1.0637785196304321\n", + "epoch: 31 step: 143, loss is 0.993368923664093\n", + "epoch: 31 step: 144, loss is 0.9445899724960327\n", + "epoch: 31 step: 145, loss is 1.0218138694763184\n", + "epoch: 31 step: 146, loss is 0.9608167409896851\n", + "epoch: 31 step: 147, loss is 1.047001838684082\n", + "epoch: 31 step: 148, loss is 0.998282790184021\n", + "epoch: 31 step: 149, loss is 0.9029946327209473\n", + "epoch: 31 step: 150, loss is 0.984277069568634\n", + "epoch: 31 step: 151, loss is 0.9976775646209717\n", + "epoch: 31 step: 152, loss is 1.0588008165359497\n", + "epoch: 31 step: 153, loss is 1.034348726272583\n", + "epoch: 31 step: 154, loss is 1.0069347620010376\n", + "epoch: 31 step: 155, loss is 0.9259096384048462\n", + "epoch: 31 step: 156, loss is 0.9890650510787964\n", + "epoch: 31 step: 157, loss is 1.033698558807373\n", + "epoch: 31 step: 158, loss is 0.9530326128005981\n", + "epoch: 31 step: 159, loss is 1.024120807647705\n", + "epoch: 31 step: 160, loss is 0.9591293334960938\n", + "epoch: 31 step: 161, loss is 0.9581236839294434\n", + "epoch: 31 step: 162, loss is 1.0505201816558838\n", + "epoch: 31 step: 163, loss is 0.9308961033821106\n", + "epoch: 31 step: 164, loss is 1.0665966272354126\n", + "epoch: 31 step: 165, loss is 0.9772370457649231\n", + "epoch: 31 step: 166, loss is 0.9692177772521973\n", + "epoch: 31 step: 167, loss is 0.9637138843536377\n", + "epoch: 31 step: 168, loss is 1.015096664428711\n", + "epoch: 31 step: 169, loss is 1.0087863206863403\n", + "epoch: 31 step: 170, loss is 0.9632704854011536\n", + "epoch: 31 step: 171, loss is 0.9512054920196533\n", + "epoch: 31 step: 172, loss is 0.9861534833908081\n", + "epoch: 31 step: 173, loss is 0.9886916875839233\n", + "epoch: 31 step: 174, loss is 1.0721999406814575\n", + "epoch: 31 step: 175, loss is 0.9906779527664185\n", + "epoch: 31 step: 176, loss is 0.9927968978881836\n", + "epoch: 31 step: 177, loss is 0.9808393716812134\n", + "epoch: 31 step: 178, loss is 1.027931571006775\n", + "epoch: 31 step: 179, loss is 0.9408921003341675\n", + "epoch: 31 step: 180, loss is 0.9809917211532593\n", + "epoch: 31 step: 181, loss is 1.0130648612976074\n", + "epoch: 31 step: 182, loss is 1.0673617124557495\n", + "epoch: 31 step: 183, loss is 1.0480914115905762\n", + "epoch: 31 step: 184, loss is 1.0245640277862549\n", + "epoch: 31 step: 185, loss is 0.9672448635101318\n", + "epoch: 31 step: 186, loss is 0.9546476602554321\n", + "epoch: 31 step: 187, loss is 0.9604886174201965\n", + "epoch: 31 step: 188, loss is 0.9864917397499084\n", + "epoch: 31 step: 189, loss is 1.0084242820739746\n", + "epoch: 31 step: 190, loss is 1.0261335372924805\n", + "epoch: 31 step: 191, loss is 1.0221798419952393\n", + "epoch: 31 step: 192, loss is 0.8919714689254761\n", + "epoch: 31 step: 193, loss is 0.9900590181350708\n", + "epoch: 31 step: 194, loss is 1.0048694610595703\n", + "epoch: 31 step: 195, loss is 1.0296778678894043\n", + "Train epoch time: 93879.831 ms, per step time: 481.435 ms\n", + "epoch: 32 step: 1, loss is 0.9727519750595093\n", + "epoch: 32 step: 2, loss is 1.0093269348144531\n", + "epoch: 32 step: 3, loss is 1.0503531694412231\n", + "epoch: 32 step: 4, loss is 1.013859748840332\n", + "epoch: 32 step: 5, loss is 0.9514154195785522\n", + "epoch: 32 step: 6, loss is 1.0031652450561523\n", + "epoch: 32 step: 7, loss is 1.0172094106674194\n", + "epoch: 32 step: 8, loss is 1.0119503736495972\n", + "epoch: 32 step: 9, loss is 0.9868814945220947\n", + "epoch: 32 step: 10, loss is 0.9673086404800415\n", + "epoch: 32 step: 11, loss is 1.1603317260742188\n", + "epoch: 32 step: 12, loss is 1.018168330192566\n", + "epoch: 32 step: 13, loss is 1.0271035432815552\n", + "epoch: 32 step: 14, loss is 0.9553981423377991\n", + "epoch: 32 step: 15, loss is 0.9958783984184265\n", + "epoch: 32 step: 16, loss is 1.0046372413635254\n", + "epoch: 32 step: 17, loss is 0.9752585887908936\n", + "epoch: 32 step: 18, loss is 1.0115203857421875\n", + "epoch: 32 step: 19, loss is 0.9732968807220459\n", + "epoch: 32 step: 20, loss is 0.9576427936553955\n", + "epoch: 32 step: 21, loss is 0.9696131348609924\n", + "epoch: 32 step: 22, loss is 1.011948823928833\n", + "epoch: 32 step: 23, loss is 0.9545167684555054\n", + "epoch: 32 step: 24, loss is 1.0218638181686401\n", + "epoch: 32 step: 25, loss is 0.9200344085693359\n", + "epoch: 32 step: 26, loss is 1.0309834480285645\n", + "epoch: 32 step: 27, loss is 0.9353550672531128\n", + "epoch: 32 step: 28, loss is 0.9480347633361816\n", + "epoch: 32 step: 29, loss is 0.9664247035980225\n", + "epoch: 32 step: 30, loss is 0.9769915342330933\n", + "epoch: 32 step: 31, loss is 1.0147480964660645\n", + "epoch: 32 step: 32, loss is 1.056840419769287\n", + "epoch: 32 step: 33, loss is 0.9731079339981079\n", + "epoch: 32 step: 34, loss is 0.9432293176651001\n", + "epoch: 32 step: 35, loss is 1.005782127380371\n", + "epoch: 32 step: 36, loss is 0.9552923440933228\n", + "epoch: 32 step: 37, loss is 0.9836405515670776\n", + "epoch: 32 step: 38, loss is 0.9971442222595215\n", + "epoch: 32 step: 39, loss is 0.9418340921401978\n", + "epoch: 32 step: 40, loss is 0.9189355373382568\n", + "epoch: 32 step: 41, loss is 0.9781036376953125\n", + "epoch: 32 step: 42, loss is 0.9928750991821289\n", + "epoch: 32 step: 43, loss is 1.0135576725006104\n", + "epoch: 32 step: 44, loss is 0.9680731296539307\n", + "epoch: 32 step: 45, loss is 0.9615861177444458\n", + "epoch: 32 step: 46, loss is 1.0169023275375366\n", + "epoch: 32 step: 47, loss is 0.9423142671585083\n", + "epoch: 32 step: 48, loss is 1.0029501914978027\n", + "epoch: 32 step: 49, loss is 1.0153541564941406\n", + "epoch: 32 step: 50, loss is 1.0256456136703491\n", + "epoch: 32 step: 51, loss is 0.9594655632972717\n", + "epoch: 32 step: 52, loss is 0.9268237948417664\n", + "epoch: 32 step: 53, loss is 1.0137443542480469\n", + "epoch: 32 step: 54, loss is 1.0047104358673096\n", + "epoch: 32 step: 55, loss is 0.9914165139198303\n", + "epoch: 32 step: 56, loss is 0.9644348621368408\n", + "epoch: 32 step: 57, loss is 1.0289690494537354\n", + "epoch: 32 step: 58, loss is 0.9569023251533508\n", + "epoch: 32 step: 59, loss is 1.0414294004440308\n", + "epoch: 32 step: 60, loss is 0.9066263437271118\n", + "epoch: 32 step: 61, loss is 1.0041321516036987\n", + "epoch: 32 step: 62, loss is 0.9587948322296143\n", + "epoch: 32 step: 63, loss is 0.9889686107635498\n", + "epoch: 32 step: 64, loss is 0.9971898794174194\n", + "epoch: 32 step: 65, loss is 1.0355807542800903\n", + "epoch: 32 step: 66, loss is 1.0013622045516968\n", + "epoch: 32 step: 67, loss is 0.9881482124328613\n", + "epoch: 32 step: 68, loss is 0.959580659866333\n", + "epoch: 32 step: 69, loss is 1.0213029384613037\n", + "epoch: 32 step: 70, loss is 0.9844950437545776\n", + "epoch: 32 step: 71, loss is 0.9846016764640808\n", + "epoch: 32 step: 72, loss is 0.9832704067230225\n", + "epoch: 32 step: 73, loss is 0.9791756868362427\n", + "epoch: 32 step: 74, loss is 1.0077126026153564\n", + "epoch: 32 step: 75, loss is 0.9892047643661499\n", + "epoch: 32 step: 76, loss is 0.9903818368911743\n", + "epoch: 32 step: 77, loss is 0.9405379891395569\n", + "epoch: 32 step: 78, loss is 0.9128937125205994\n", + "epoch: 32 step: 79, loss is 0.9971332550048828\n", + "epoch: 32 step: 80, loss is 1.0085200071334839\n", + "epoch: 32 step: 81, loss is 0.9374455213546753\n", + "epoch: 32 step: 82, loss is 0.9527500867843628\n", + "epoch: 32 step: 83, loss is 0.9818970561027527\n", + "epoch: 32 step: 84, loss is 0.9983173608779907\n", + "epoch: 32 step: 85, loss is 1.0289878845214844\n", + "epoch: 32 step: 86, loss is 0.979159951210022\n", + "epoch: 32 step: 87, loss is 0.967627763748169\n", + "epoch: 32 step: 88, loss is 0.894608736038208\n", + "epoch: 32 step: 89, loss is 1.0280511379241943\n", + "epoch: 32 step: 90, loss is 0.9416210055351257\n", + "epoch: 32 step: 91, loss is 0.995324969291687\n", + "epoch: 32 step: 92, loss is 0.9776018261909485\n", + "epoch: 32 step: 93, loss is 0.9907704591751099\n", + "epoch: 32 step: 94, loss is 0.9842472672462463\n", + "epoch: 32 step: 95, loss is 1.0207390785217285\n", + "epoch: 32 step: 96, loss is 0.9260870218276978\n", + "epoch: 32 step: 97, loss is 1.0057733058929443\n", + "epoch: 32 step: 98, loss is 0.9417386054992676\n", + "epoch: 32 step: 99, loss is 1.020946741104126\n", + "epoch: 32 step: 100, loss is 1.0032260417938232\n", + "epoch: 32 step: 101, loss is 0.9883203506469727\n", + "epoch: 32 step: 102, loss is 0.9744449257850647\n", + "epoch: 32 step: 103, loss is 1.0006067752838135\n", + "epoch: 32 step: 104, loss is 0.9891201257705688\n", + "epoch: 32 step: 105, loss is 0.969596266746521\n", + "epoch: 32 step: 106, loss is 0.968818187713623\n", + "epoch: 32 step: 107, loss is 0.990060567855835\n", + "epoch: 32 step: 108, loss is 0.9956555366516113\n", + "epoch: 32 step: 109, loss is 1.0789504051208496\n", + "epoch: 32 step: 110, loss is 0.9467611312866211\n", + "epoch: 32 step: 111, loss is 0.9973459839820862\n", + "epoch: 32 step: 112, loss is 1.0277796983718872\n", + "epoch: 32 step: 113, loss is 1.0343546867370605\n", + "epoch: 32 step: 114, loss is 1.0399694442749023\n", + "epoch: 32 step: 115, loss is 1.002599835395813\n", + "epoch: 32 step: 116, loss is 0.9986469745635986\n", + "epoch: 32 step: 117, loss is 1.0270054340362549\n", + "epoch: 32 step: 118, loss is 1.0249686241149902\n", + "epoch: 32 step: 119, loss is 0.9976518154144287\n", + "epoch: 32 step: 120, loss is 0.9968780279159546\n", + "epoch: 32 step: 121, loss is 0.9318997263908386\n", + "epoch: 32 step: 122, loss is 0.9440889358520508\n", + "epoch: 32 step: 123, loss is 1.0574088096618652\n", + "epoch: 32 step: 124, loss is 1.0190807580947876\n", + "epoch: 32 step: 125, loss is 0.9031245708465576\n", + "epoch: 32 step: 126, loss is 0.9794694185256958\n", + "epoch: 32 step: 127, loss is 0.9345980882644653\n", + "epoch: 32 step: 128, loss is 0.9166961908340454\n", + "epoch: 32 step: 129, loss is 1.029698371887207\n", + "epoch: 32 step: 130, loss is 0.963123083114624\n", + "epoch: 32 step: 131, loss is 1.052844762802124\n", + "epoch: 32 step: 132, loss is 1.0304795503616333\n", + "epoch: 32 step: 133, loss is 0.9681320190429688\n", + "epoch: 32 step: 134, loss is 0.9591909050941467\n", + "epoch: 32 step: 135, loss is 0.9142433404922485\n", + "epoch: 32 step: 136, loss is 0.9989688992500305\n", + "epoch: 32 step: 137, loss is 0.8934341073036194\n", + "epoch: 32 step: 138, loss is 1.020044207572937\n", + "epoch: 32 step: 139, loss is 0.9778650999069214\n", + "epoch: 32 step: 140, loss is 1.0301164388656616\n", + "epoch: 32 step: 141, loss is 1.042757272720337\n", + "epoch: 32 step: 142, loss is 0.9408026933670044\n", + "epoch: 32 step: 143, loss is 0.9703319072723389\n", + "epoch: 32 step: 144, loss is 0.9833507537841797\n", + "epoch: 32 step: 145, loss is 0.9435824155807495\n", + "epoch: 32 step: 146, loss is 0.9943384528160095\n", + "epoch: 32 step: 147, loss is 1.0435158014297485\n", + "epoch: 32 step: 148, loss is 1.0221765041351318\n", + "epoch: 32 step: 149, loss is 0.9235331416130066\n", + "epoch: 32 step: 150, loss is 0.9806472063064575\n", + "epoch: 32 step: 151, loss is 0.9654334783554077\n", + "epoch: 32 step: 152, loss is 0.9468291997909546\n", + "epoch: 32 step: 153, loss is 0.9685972929000854\n", + "epoch: 32 step: 154, loss is 0.9833135604858398\n", + "epoch: 32 step: 155, loss is 0.9189764261245728\n", + "epoch: 32 step: 156, loss is 0.9845044612884521\n", + "epoch: 32 step: 157, loss is 0.971390962600708\n", + "epoch: 32 step: 158, loss is 0.9413172006607056\n", + "epoch: 32 step: 159, loss is 1.0012656450271606\n", + "epoch: 32 step: 160, loss is 0.9606106281280518\n", + "epoch: 32 step: 161, loss is 0.963945746421814\n", + "epoch: 32 step: 162, loss is 0.9620354175567627\n", + "epoch: 32 step: 163, loss is 0.9245710372924805\n", + "epoch: 32 step: 164, loss is 1.0729848146438599\n", + "epoch: 32 step: 165, loss is 0.9189924597740173\n", + "epoch: 32 step: 166, loss is 0.9477318525314331\n", + "epoch: 32 step: 167, loss is 1.0009266138076782\n", + "epoch: 32 step: 168, loss is 0.9912967681884766\n", + "epoch: 32 step: 169, loss is 0.9569296836853027\n", + "epoch: 32 step: 170, loss is 0.9556214809417725\n", + "epoch: 32 step: 171, loss is 1.0365077257156372\n", + "epoch: 32 step: 172, loss is 0.9548203945159912\n", + "epoch: 32 step: 173, loss is 0.9897408485412598\n", + "epoch: 32 step: 174, loss is 0.9474195241928101\n", + "epoch: 32 step: 175, loss is 1.0256831645965576\n", + "epoch: 32 step: 176, loss is 0.953402042388916\n", + "epoch: 32 step: 177, loss is 1.002616286277771\n", + "epoch: 32 step: 178, loss is 0.9422315955162048\n", + "epoch: 32 step: 179, loss is 0.992770791053772\n", + "epoch: 32 step: 180, loss is 0.9569404721260071\n", + "epoch: 32 step: 181, loss is 0.9320908784866333\n", + "epoch: 32 step: 182, loss is 0.9760331511497498\n", + "epoch: 32 step: 183, loss is 1.0146763324737549\n", + "epoch: 32 step: 184, loss is 1.0088882446289062\n", + "epoch: 32 step: 185, loss is 0.9397227764129639\n", + "epoch: 32 step: 186, loss is 0.9439008235931396\n", + "epoch: 32 step: 187, loss is 0.998565673828125\n", + "epoch: 32 step: 188, loss is 0.9851408004760742\n", + "epoch: 32 step: 189, loss is 0.9588061571121216\n", + "epoch: 32 step: 190, loss is 0.9632785320281982\n", + "epoch: 32 step: 191, loss is 0.905859112739563\n", + "epoch: 32 step: 192, loss is 1.0564799308776855\n", + "epoch: 32 step: 193, loss is 0.994613528251648\n", + "epoch: 32 step: 194, loss is 0.8792506456375122\n", + "epoch: 32 step: 195, loss is 1.126488208770752\n", + "Train epoch time: 98333.532 ms, per step time: 504.275 ms\n", + "epoch: 33 step: 1, loss is 0.9907193183898926\n", + "epoch: 33 step: 2, loss is 0.9423136711120605\n", + "epoch: 33 step: 3, loss is 0.9698041081428528\n", + "epoch: 33 step: 4, loss is 0.9568382501602173\n", + "epoch: 33 step: 5, loss is 0.9654214382171631\n", + "epoch: 33 step: 6, loss is 0.8946526646614075\n", + "epoch: 33 step: 7, loss is 1.0000503063201904\n", + "epoch: 33 step: 8, loss is 0.9416165351867676\n", + "epoch: 33 step: 9, loss is 0.9468167424201965\n", + "epoch: 33 step: 10, loss is 0.9540746212005615\n", + "epoch: 33 step: 11, loss is 0.9351815581321716\n", + "epoch: 33 step: 12, loss is 0.8961334228515625\n", + "epoch: 33 step: 13, loss is 0.91065913438797\n", + "epoch: 33 step: 14, loss is 0.9880750775337219\n", + "epoch: 33 step: 15, loss is 0.8792427778244019\n", + "epoch: 33 step: 16, loss is 0.9145146608352661\n", + "epoch: 33 step: 17, loss is 0.9588135480880737\n", + "epoch: 33 step: 18, loss is 1.0267060995101929\n", + "epoch: 33 step: 19, loss is 0.987146258354187\n", + "epoch: 33 step: 20, loss is 0.9402179718017578\n", + "epoch: 33 step: 21, loss is 0.9206116795539856\n", + "epoch: 33 step: 22, loss is 0.9126958847045898\n", + "epoch: 33 step: 23, loss is 0.9482122659683228\n", + "epoch: 33 step: 24, loss is 0.9939451217651367\n", + "epoch: 33 step: 25, loss is 0.9229466915130615\n", + "epoch: 33 step: 26, loss is 0.9099695682525635\n", + "epoch: 33 step: 27, loss is 0.931974470615387\n", + "epoch: 33 step: 28, loss is 0.9414529800415039\n", + "epoch: 33 step: 29, loss is 1.0232924222946167\n", + "epoch: 33 step: 30, loss is 0.9936307668685913\n", + "epoch: 33 step: 31, loss is 0.972650945186615\n", + "epoch: 33 step: 32, loss is 0.9173001050949097\n", + "epoch: 33 step: 33, loss is 0.9465340375900269\n", + "epoch: 33 step: 34, loss is 1.0216600894927979\n", + "epoch: 33 step: 35, loss is 0.9251874685287476\n", + "epoch: 33 step: 36, loss is 0.9185566902160645\n", + "epoch: 33 step: 37, loss is 0.9363542795181274\n", + "epoch: 33 step: 38, loss is 0.8995940685272217\n", + "epoch: 33 step: 39, loss is 0.8766794204711914\n", + "epoch: 33 step: 40, loss is 1.030022382736206\n", + "epoch: 33 step: 41, loss is 0.9764285683631897\n", + "epoch: 33 step: 42, loss is 0.9642433524131775\n", + "epoch: 33 step: 43, loss is 0.9558446407318115\n", + "epoch: 33 step: 44, loss is 0.9290832281112671\n", + "epoch: 33 step: 45, loss is 0.9113592505455017\n", + "epoch: 33 step: 46, loss is 0.923061728477478\n", + "epoch: 33 step: 47, loss is 0.9153684377670288\n", + "epoch: 33 step: 48, loss is 0.9305086731910706\n", + "epoch: 33 step: 49, loss is 1.0076467990875244\n", + "epoch: 33 step: 50, loss is 0.9320752620697021\n", + "epoch: 33 step: 51, loss is 0.9703488945960999\n", + "epoch: 33 step: 52, loss is 1.0085262060165405\n", + "epoch: 33 step: 53, loss is 0.8999782800674438\n", + "epoch: 33 step: 54, loss is 0.9922388195991516\n", + "epoch: 33 step: 55, loss is 0.9900482892990112\n", + "epoch: 33 step: 56, loss is 0.962464451789856\n", + "epoch: 33 step: 57, loss is 0.9313849806785583\n", + "epoch: 33 step: 58, loss is 0.9513900279998779\n", + "epoch: 33 step: 59, loss is 0.9682981371879578\n", + "epoch: 33 step: 60, loss is 0.9515159130096436\n", + "epoch: 33 step: 61, loss is 0.9461615085601807\n", + "epoch: 33 step: 62, loss is 0.9774394631385803\n", + "epoch: 33 step: 63, loss is 0.9274340867996216\n", + "epoch: 33 step: 64, loss is 0.9022936820983887\n", + "epoch: 33 step: 65, loss is 0.9760668873786926\n", + "epoch: 33 step: 66, loss is 0.9226057529449463\n", + "epoch: 33 step: 67, loss is 0.905648946762085\n", + "epoch: 33 step: 68, loss is 0.962477445602417\n", + "epoch: 33 step: 69, loss is 0.980886697769165\n", + "epoch: 33 step: 70, loss is 0.9658631682395935\n", + "epoch: 33 step: 71, loss is 0.9780702590942383\n", + "epoch: 33 step: 72, loss is 0.9108080863952637\n", + "epoch: 33 step: 73, loss is 1.0214650630950928\n", + "epoch: 33 step: 74, loss is 0.9129214286804199\n", + "epoch: 33 step: 75, loss is 0.98420250415802\n", + "epoch: 33 step: 76, loss is 0.9599598050117493\n", + "epoch: 33 step: 77, loss is 0.9178481101989746\n", + "epoch: 33 step: 78, loss is 0.9655606746673584\n", + "epoch: 33 step: 79, loss is 0.9388698935508728\n", + "epoch: 33 step: 80, loss is 0.9370100498199463\n", + "epoch: 33 step: 81, loss is 0.9385033249855042\n", + "epoch: 33 step: 82, loss is 0.9914324283599854\n", + "epoch: 33 step: 83, loss is 0.9824451804161072\n", + "epoch: 33 step: 84, loss is 0.9409775733947754\n", + "epoch: 33 step: 85, loss is 0.9051980972290039\n", + "epoch: 33 step: 86, loss is 0.9510829448699951\n", + "epoch: 33 step: 87, loss is 0.9394270181655884\n", + "epoch: 33 step: 88, loss is 0.9645894169807434\n", + "epoch: 33 step: 89, loss is 0.9356260299682617\n", + "epoch: 33 step: 90, loss is 0.9994325637817383\n", + "epoch: 33 step: 91, loss is 0.9333004951477051\n", + "epoch: 33 step: 92, loss is 0.9389945864677429\n", + "epoch: 33 step: 93, loss is 1.0533533096313477\n", + "epoch: 33 step: 94, loss is 0.9970570802688599\n", + "epoch: 33 step: 95, loss is 0.892000675201416\n", + "epoch: 33 step: 96, loss is 0.9615713357925415\n", + "epoch: 33 step: 97, loss is 0.9890347719192505\n", + "epoch: 33 step: 98, loss is 0.9741441011428833\n", + "epoch: 33 step: 99, loss is 0.9474959969520569\n", + "epoch: 33 step: 100, loss is 0.9669910669326782\n", + "epoch: 33 step: 101, loss is 0.8974794149398804\n", + "epoch: 33 step: 102, loss is 0.9752428531646729\n", + "epoch: 33 step: 103, loss is 0.9546570777893066\n", + "epoch: 33 step: 104, loss is 0.9948046803474426\n", + "epoch: 33 step: 105, loss is 0.9702937006950378\n", + "epoch: 33 step: 106, loss is 0.9506804943084717\n", + "epoch: 33 step: 107, loss is 1.019199252128601\n", + "epoch: 33 step: 108, loss is 0.9977153539657593\n", + "epoch: 33 step: 109, loss is 0.9261921048164368\n", + "epoch: 33 step: 110, loss is 0.9303886890411377\n", + "epoch: 33 step: 111, loss is 0.9579625725746155\n", + "epoch: 33 step: 112, loss is 0.9725943803787231\n", + "epoch: 33 step: 113, loss is 0.9252544641494751\n", + "epoch: 33 step: 114, loss is 0.9642075300216675\n", + "epoch: 33 step: 115, loss is 0.9240732789039612\n", + "epoch: 33 step: 116, loss is 0.9367196559906006\n", + "epoch: 33 step: 117, loss is 0.9224686622619629\n", + "epoch: 33 step: 118, loss is 0.9249471426010132\n", + "epoch: 33 step: 119, loss is 0.9216079711914062\n", + "epoch: 33 step: 120, loss is 0.9643285274505615\n", + "epoch: 33 step: 121, loss is 0.93719482421875\n", + "epoch: 33 step: 122, loss is 0.9412615299224854\n", + "epoch: 33 step: 123, loss is 0.9471601247787476\n", + "epoch: 33 step: 124, loss is 0.9969496726989746\n", + "epoch: 33 step: 125, loss is 0.9328744411468506\n", + "epoch: 33 step: 126, loss is 0.9558378458023071\n", + "epoch: 33 step: 127, loss is 0.9914580583572388\n", + "epoch: 33 step: 128, loss is 0.980971097946167\n", + "epoch: 33 step: 129, loss is 0.9833077192306519\n", + "epoch: 33 step: 130, loss is 0.9771716594696045\n", + "epoch: 33 step: 131, loss is 0.9754937887191772\n", + "epoch: 33 step: 132, loss is 0.9156402349472046\n", + "epoch: 33 step: 133, loss is 0.9571443796157837\n", + "epoch: 33 step: 134, loss is 1.0030956268310547\n", + "epoch: 33 step: 135, loss is 0.9565658569335938\n", + "epoch: 33 step: 136, loss is 0.9451103806495667\n", + "epoch: 33 step: 137, loss is 1.012218713760376\n", + "epoch: 33 step: 138, loss is 0.9712364673614502\n", + "epoch: 33 step: 139, loss is 1.0090887546539307\n", + "epoch: 33 step: 140, loss is 0.9140208959579468\n", + "epoch: 33 step: 141, loss is 0.9099339842796326\n", + "epoch: 33 step: 142, loss is 0.9825666546821594\n", + "epoch: 33 step: 143, loss is 0.913609504699707\n", + "epoch: 33 step: 144, loss is 0.9626963138580322\n", + "epoch: 33 step: 145, loss is 0.9872382879257202\n", + "epoch: 33 step: 146, loss is 0.9315009117126465\n", + "epoch: 33 step: 147, loss is 0.966808021068573\n", + "epoch: 33 step: 148, loss is 0.9387316703796387\n", + "epoch: 33 step: 149, loss is 0.9617617130279541\n", + "epoch: 33 step: 150, loss is 0.938126266002655\n", + "epoch: 33 step: 151, loss is 0.9167219400405884\n", + "epoch: 33 step: 152, loss is 0.9812620878219604\n", + "epoch: 33 step: 153, loss is 0.9926442503929138\n", + "epoch: 33 step: 154, loss is 0.9649367332458496\n", + "epoch: 33 step: 155, loss is 0.9269512891769409\n", + "epoch: 33 step: 156, loss is 0.9315884709358215\n", + "epoch: 33 step: 157, loss is 0.9137295484542847\n", + "epoch: 33 step: 158, loss is 0.9012141227722168\n", + "epoch: 33 step: 159, loss is 0.9782424569129944\n", + "epoch: 33 step: 160, loss is 0.9970387816429138\n", + "epoch: 33 step: 161, loss is 1.0136663913726807\n", + "epoch: 33 step: 162, loss is 0.904043436050415\n", + "epoch: 33 step: 163, loss is 0.9667502641677856\n", + "epoch: 33 step: 164, loss is 0.9247927665710449\n", + "epoch: 33 step: 165, loss is 0.9216172099113464\n", + "epoch: 33 step: 166, loss is 0.925471305847168\n", + "epoch: 33 step: 167, loss is 0.9819043874740601\n", + "epoch: 33 step: 168, loss is 1.0123785734176636\n", + "epoch: 33 step: 169, loss is 0.890501856803894\n", + "epoch: 33 step: 170, loss is 0.9105412364006042\n", + "epoch: 33 step: 171, loss is 0.9916898012161255\n", + "epoch: 33 step: 172, loss is 0.9607970714569092\n", + "epoch: 33 step: 173, loss is 0.9336222410202026\n", + "epoch: 33 step: 174, loss is 0.9394988417625427\n", + "epoch: 33 step: 175, loss is 1.0071054697036743\n", + "epoch: 33 step: 176, loss is 0.9656716585159302\n", + "epoch: 33 step: 177, loss is 0.952645480632782\n", + "epoch: 33 step: 178, loss is 0.9200826287269592\n", + "epoch: 33 step: 179, loss is 0.9284038543701172\n", + "epoch: 33 step: 180, loss is 1.0094330310821533\n", + "epoch: 33 step: 181, loss is 0.9907735586166382\n", + "epoch: 33 step: 182, loss is 0.9862605333328247\n", + "epoch: 33 step: 183, loss is 0.9692484140396118\n", + "epoch: 33 step: 184, loss is 0.99744713306427\n", + "epoch: 33 step: 185, loss is 0.9969068169593811\n", + "epoch: 33 step: 186, loss is 1.0110490322113037\n", + "epoch: 33 step: 187, loss is 1.0517942905426025\n", + "epoch: 33 step: 188, loss is 0.931124210357666\n", + "epoch: 33 step: 189, loss is 1.0303089618682861\n", + "epoch: 33 step: 190, loss is 0.898235559463501\n", + "epoch: 33 step: 191, loss is 0.9787768125534058\n", + "epoch: 33 step: 192, loss is 1.0047354698181152\n", + "epoch: 33 step: 193, loss is 1.0399696826934814\n", + "epoch: 33 step: 194, loss is 1.0336229801177979\n", + "epoch: 33 step: 195, loss is 0.9769865870475769\n", + "Train epoch time: 97095.972 ms, per step time: 497.928 ms\n", + "epoch: 34 step: 1, loss is 1.005261778831482\n", + "epoch: 34 step: 2, loss is 0.957710862159729\n", + "epoch: 34 step: 3, loss is 0.9294565916061401\n", + "epoch: 34 step: 4, loss is 0.9403433799743652\n", + "epoch: 34 step: 5, loss is 0.8939191102981567\n", + "epoch: 34 step: 6, loss is 0.9792404174804688\n", + "epoch: 34 step: 7, loss is 0.9423699975013733\n", + "epoch: 34 step: 8, loss is 0.9029444456100464\n", + "epoch: 34 step: 9, loss is 0.9264638423919678\n", + "epoch: 34 step: 10, loss is 0.9395664930343628\n", + "epoch: 34 step: 11, loss is 0.9439483880996704\n", + "epoch: 34 step: 12, loss is 0.9186091423034668\n", + "epoch: 34 step: 13, loss is 0.9641207456588745\n", + "epoch: 34 step: 14, loss is 0.8781425952911377\n", + "epoch: 34 step: 15, loss is 0.9926522970199585\n", + "epoch: 34 step: 16, loss is 0.8601833581924438\n", + "epoch: 34 step: 17, loss is 0.9352059364318848\n", + "epoch: 34 step: 18, loss is 0.9446736574172974\n", + "epoch: 34 step: 19, loss is 0.9285966157913208\n", + "epoch: 34 step: 20, loss is 0.930400013923645\n", + "epoch: 34 step: 21, loss is 0.9620046615600586\n", + "epoch: 34 step: 22, loss is 0.8920618295669556\n", + "epoch: 34 step: 23, loss is 0.985421895980835\n", + "epoch: 34 step: 24, loss is 0.9413615465164185\n", + "epoch: 34 step: 25, loss is 0.9814339876174927\n", + "epoch: 34 step: 26, loss is 0.9593209028244019\n", + "epoch: 34 step: 27, loss is 0.9319434762001038\n", + "epoch: 34 step: 28, loss is 0.9246756434440613\n", + "epoch: 34 step: 29, loss is 0.9071415662765503\n", + "epoch: 34 step: 30, loss is 0.9425755739212036\n", + "epoch: 34 step: 31, loss is 0.966593325138092\n", + "epoch: 34 step: 32, loss is 0.9768633246421814\n", + "epoch: 34 step: 33, loss is 0.9746881723403931\n", + "epoch: 34 step: 34, loss is 1.0401592254638672\n", + "epoch: 34 step: 35, loss is 0.8878148198127747\n", + "epoch: 34 step: 36, loss is 0.9496057629585266\n", + "epoch: 34 step: 37, loss is 0.927625298500061\n", + "epoch: 34 step: 38, loss is 0.9325002431869507\n", + "epoch: 34 step: 39, loss is 0.9589825868606567\n", + "epoch: 34 step: 40, loss is 0.867581307888031\n", + "epoch: 34 step: 41, loss is 0.9937237501144409\n", + "epoch: 34 step: 42, loss is 0.9697293639183044\n", + "epoch: 34 step: 43, loss is 1.0196071863174438\n", + "epoch: 34 step: 44, loss is 0.962571382522583\n", + "epoch: 34 step: 45, loss is 0.9319362640380859\n", + "epoch: 34 step: 46, loss is 0.9704923629760742\n", + "epoch: 34 step: 47, loss is 0.9312334060668945\n", + "epoch: 34 step: 48, loss is 0.9104936122894287\n", + "epoch: 34 step: 49, loss is 0.9343763589859009\n", + "epoch: 34 step: 50, loss is 0.8687146902084351\n", + "epoch: 34 step: 51, loss is 0.9356415867805481\n", + "epoch: 34 step: 52, loss is 1.007986307144165\n", + "epoch: 34 step: 53, loss is 0.9184497594833374\n", + "epoch: 34 step: 54, loss is 0.905766487121582\n", + "epoch: 34 step: 55, loss is 0.9783363938331604\n", + "epoch: 34 step: 56, loss is 0.9401350021362305\n", + "epoch: 34 step: 57, loss is 1.0037312507629395\n", + "epoch: 34 step: 58, loss is 0.9800042510032654\n", + "epoch: 34 step: 59, loss is 0.8990908265113831\n", + "epoch: 34 step: 60, loss is 0.8683254718780518\n", + "epoch: 34 step: 61, loss is 0.9745082259178162\n", + "epoch: 34 step: 62, loss is 0.956352710723877\n", + "epoch: 34 step: 63, loss is 0.9063120484352112\n", + "epoch: 34 step: 64, loss is 0.9468469619750977\n", + "epoch: 34 step: 65, loss is 0.92842036485672\n", + "epoch: 34 step: 66, loss is 0.9302608966827393\n", + "epoch: 34 step: 67, loss is 0.8988034725189209\n", + "epoch: 34 step: 68, loss is 0.9866562485694885\n", + "epoch: 34 step: 69, loss is 0.9896656274795532\n", + "epoch: 34 step: 70, loss is 0.9995354413986206\n", + "epoch: 34 step: 71, loss is 0.9994784593582153\n", + "epoch: 34 step: 72, loss is 0.9570066928863525\n", + "epoch: 34 step: 73, loss is 0.9869663715362549\n", + "epoch: 34 step: 74, loss is 0.998235821723938\n", + "epoch: 34 step: 75, loss is 0.9566341638565063\n", + "epoch: 34 step: 76, loss is 1.0312845706939697\n", + "epoch: 34 step: 77, loss is 0.9696617126464844\n", + "epoch: 34 step: 78, loss is 0.9193277359008789\n", + "epoch: 34 step: 79, loss is 0.9804571866989136\n", + "epoch: 34 step: 80, loss is 0.9295244216918945\n", + "epoch: 34 step: 81, loss is 0.9394053816795349\n", + "epoch: 34 step: 82, loss is 0.9657169580459595\n", + "epoch: 34 step: 83, loss is 1.0130637884140015\n", + "epoch: 34 step: 84, loss is 0.9400322437286377\n", + "epoch: 34 step: 85, loss is 0.9811661243438721\n", + "epoch: 34 step: 86, loss is 0.937900960445404\n", + "epoch: 34 step: 87, loss is 1.0114768743515015\n", + "epoch: 34 step: 88, loss is 0.9658342599868774\n", + "epoch: 34 step: 89, loss is 0.9771685600280762\n", + "epoch: 34 step: 90, loss is 0.9321880340576172\n", + "epoch: 34 step: 91, loss is 0.9932937622070312\n", + "epoch: 34 step: 92, loss is 0.9639115929603577\n", + "epoch: 34 step: 93, loss is 1.0306808948516846\n", + "epoch: 34 step: 94, loss is 0.9925023317337036\n", + "epoch: 34 step: 95, loss is 0.9442201852798462\n", + "epoch: 34 step: 96, loss is 1.0213894844055176\n", + "epoch: 34 step: 97, loss is 0.9306493997573853\n", + "epoch: 34 step: 98, loss is 0.9195504188537598\n", + "epoch: 34 step: 99, loss is 1.0473495721817017\n", + "epoch: 34 step: 100, loss is 0.9888571500778198\n", + "epoch: 34 step: 101, loss is 0.9059841632843018\n", + "epoch: 34 step: 102, loss is 1.0019805431365967\n", + "epoch: 34 step: 103, loss is 0.9085073471069336\n", + "epoch: 34 step: 104, loss is 0.9500687122344971\n", + "epoch: 34 step: 105, loss is 0.9718337059020996\n", + "epoch: 34 step: 106, loss is 0.879327654838562\n", + "epoch: 34 step: 107, loss is 0.913387656211853\n", + "epoch: 34 step: 108, loss is 0.9454108476638794\n", + "epoch: 34 step: 109, loss is 0.9569629430770874\n", + "epoch: 34 step: 110, loss is 0.9385117888450623\n", + "epoch: 34 step: 111, loss is 0.9306309819221497\n", + "epoch: 34 step: 112, loss is 0.9488819241523743\n", + "epoch: 34 step: 113, loss is 0.9893543720245361\n", + "epoch: 34 step: 114, loss is 0.9564896821975708\n", + "epoch: 34 step: 115, loss is 0.9062883853912354\n", + "epoch: 34 step: 116, loss is 0.920725405216217\n", + "epoch: 34 step: 117, loss is 1.0240261554718018\n", + "epoch: 34 step: 118, loss is 0.9472038149833679\n", + "epoch: 34 step: 119, loss is 0.9410871267318726\n", + "epoch: 34 step: 120, loss is 0.9483230710029602\n", + "epoch: 34 step: 121, loss is 0.9602822065353394\n", + "epoch: 34 step: 122, loss is 0.9836021065711975\n", + "epoch: 34 step: 123, loss is 0.9741107225418091\n", + "epoch: 34 step: 124, loss is 0.9448111653327942\n", + "epoch: 34 step: 125, loss is 0.9148879647254944\n", + "epoch: 34 step: 126, loss is 0.9231794476509094\n", + "epoch: 34 step: 127, loss is 0.8734809160232544\n", + "epoch: 34 step: 128, loss is 0.9096540212631226\n", + "epoch: 34 step: 129, loss is 1.007266879081726\n", + "epoch: 34 step: 130, loss is 0.9456959366798401\n", + "epoch: 34 step: 131, loss is 0.969253420829773\n", + "epoch: 34 step: 132, loss is 0.9721641540527344\n", + "epoch: 34 step: 133, loss is 0.8733981847763062\n", + "epoch: 34 step: 134, loss is 0.9193085432052612\n", + "epoch: 34 step: 135, loss is 0.9546386003494263\n", + "epoch: 34 step: 136, loss is 0.9295440912246704\n", + "epoch: 34 step: 137, loss is 0.9357640743255615\n", + "epoch: 34 step: 138, loss is 1.0047130584716797\n", + "epoch: 34 step: 139, loss is 0.9254540205001831\n", + "epoch: 34 step: 140, loss is 0.9504275321960449\n", + "epoch: 34 step: 141, loss is 0.9518612623214722\n", + "epoch: 34 step: 142, loss is 0.9449405670166016\n", + "epoch: 34 step: 143, loss is 0.9954859018325806\n", + "epoch: 34 step: 144, loss is 0.8884353637695312\n", + "epoch: 34 step: 145, loss is 0.9188235998153687\n", + "epoch: 34 step: 146, loss is 0.9585151672363281\n", + "epoch: 34 step: 147, loss is 0.9125869870185852\n", + "epoch: 34 step: 148, loss is 0.9569552540779114\n", + "epoch: 34 step: 149, loss is 0.8991785645484924\n", + "epoch: 34 step: 150, loss is 1.0240637063980103\n", + "epoch: 34 step: 151, loss is 0.9619371891021729\n", + "epoch: 34 step: 152, loss is 0.9595454931259155\n", + "epoch: 34 step: 153, loss is 0.9362510442733765\n", + "epoch: 34 step: 154, loss is 0.9112727642059326\n", + "epoch: 34 step: 155, loss is 0.9977883100509644\n", + "epoch: 34 step: 156, loss is 0.9611576795578003\n", + "epoch: 34 step: 157, loss is 0.9297802448272705\n", + "epoch: 34 step: 158, loss is 0.9746309518814087\n", + "epoch: 34 step: 159, loss is 0.913641631603241\n", + "epoch: 34 step: 160, loss is 0.9397619962692261\n", + "epoch: 34 step: 161, loss is 0.8862749338150024\n", + "epoch: 34 step: 162, loss is 0.9451261758804321\n", + "epoch: 34 step: 163, loss is 1.0204293727874756\n", + "epoch: 34 step: 164, loss is 0.9693633317947388\n", + "epoch: 34 step: 165, loss is 0.9350547790527344\n", + "epoch: 34 step: 166, loss is 0.9165432453155518\n", + "epoch: 34 step: 167, loss is 0.9691852331161499\n", + "epoch: 34 step: 168, loss is 0.872565507888794\n", + "epoch: 34 step: 169, loss is 0.9600955247879028\n", + "epoch: 34 step: 170, loss is 0.9326913356781006\n", + "epoch: 34 step: 171, loss is 0.9531115293502808\n", + "epoch: 34 step: 172, loss is 0.9260505437850952\n", + "epoch: 34 step: 173, loss is 1.0105878114700317\n", + "epoch: 34 step: 174, loss is 0.9469517469406128\n", + "epoch: 34 step: 175, loss is 0.8879437446594238\n", + "epoch: 34 step: 176, loss is 0.9398886561393738\n", + "epoch: 34 step: 177, loss is 0.957075834274292\n", + "epoch: 34 step: 178, loss is 0.9778575301170349\n", + "epoch: 34 step: 179, loss is 0.9232562780380249\n", + "epoch: 34 step: 180, loss is 0.9423199892044067\n", + "epoch: 34 step: 181, loss is 0.9656686186790466\n", + "epoch: 34 step: 182, loss is 0.9406300187110901\n", + "epoch: 34 step: 183, loss is 0.9173452854156494\n", + "epoch: 34 step: 184, loss is 0.9431878924369812\n", + "epoch: 34 step: 185, loss is 1.0200456380844116\n", + "epoch: 34 step: 186, loss is 0.9338588714599609\n", + "epoch: 34 step: 187, loss is 1.027175784111023\n", + "epoch: 34 step: 188, loss is 0.9322776794433594\n", + "epoch: 34 step: 189, loss is 0.9653850793838501\n", + "epoch: 34 step: 190, loss is 0.9863011240959167\n", + "epoch: 34 step: 191, loss is 1.0492424964904785\n", + "epoch: 34 step: 192, loss is 0.9124327301979065\n", + "epoch: 34 step: 193, loss is 1.0073680877685547\n", + "epoch: 34 step: 194, loss is 0.9105908870697021\n", + "epoch: 34 step: 195, loss is 1.0035138130187988\n", + "Train epoch time: 97241.619 ms, per step time: 498.675 ms\n", + "epoch: 35 step: 1, loss is 0.9239457845687866\n", + "epoch: 35 step: 2, loss is 0.9154649376869202\n", + "epoch: 35 step: 3, loss is 0.9394506216049194\n", + "epoch: 35 step: 4, loss is 0.9176620244979858\n", + "epoch: 35 step: 5, loss is 0.917726993560791\n", + "epoch: 35 step: 6, loss is 0.8716942667961121\n", + "epoch: 35 step: 7, loss is 0.9438626766204834\n", + "epoch: 35 step: 8, loss is 0.9144598245620728\n", + "epoch: 35 step: 9, loss is 0.8907779455184937\n", + "epoch: 35 step: 10, loss is 0.9666879177093506\n", + "epoch: 35 step: 11, loss is 0.8792580366134644\n", + "epoch: 35 step: 12, loss is 0.9292316436767578\n", + "epoch: 35 step: 13, loss is 0.9414930939674377\n", + "epoch: 35 step: 14, loss is 0.8791036009788513\n", + "epoch: 35 step: 15, loss is 0.8847464323043823\n", + "epoch: 35 step: 16, loss is 0.900596022605896\n", + "epoch: 35 step: 17, loss is 0.9096609354019165\n", + "epoch: 35 step: 18, loss is 0.9343972206115723\n", + "epoch: 35 step: 19, loss is 0.9598948359489441\n", + "epoch: 35 step: 20, loss is 0.9247642755508423\n", + "epoch: 35 step: 21, loss is 0.8956315517425537\n", + "epoch: 35 step: 22, loss is 0.9562872648239136\n", + "epoch: 35 step: 23, loss is 0.9352893233299255\n", + "epoch: 35 step: 24, loss is 0.9116159677505493\n", + "epoch: 35 step: 25, loss is 0.9409162998199463\n", + "epoch: 35 step: 26, loss is 0.8655871152877808\n", + "epoch: 35 step: 27, loss is 0.9282832741737366\n", + "epoch: 35 step: 28, loss is 0.9191992282867432\n", + "epoch: 35 step: 29, loss is 0.953628659248352\n", + "epoch: 35 step: 30, loss is 0.9874909520149231\n", + "epoch: 35 step: 31, loss is 0.9345219135284424\n", + "epoch: 35 step: 32, loss is 0.9920909404754639\n", + "epoch: 35 step: 33, loss is 0.9428569674491882\n", + "epoch: 35 step: 34, loss is 0.9358525276184082\n", + "epoch: 35 step: 35, loss is 0.9152392148971558\n", + "epoch: 35 step: 36, loss is 0.9281102418899536\n", + "epoch: 35 step: 37, loss is 0.9550911784172058\n", + "epoch: 35 step: 38, loss is 0.9223726391792297\n", + "epoch: 35 step: 39, loss is 0.8740618228912354\n", + "epoch: 35 step: 40, loss is 0.9388183951377869\n", + "epoch: 35 step: 41, loss is 0.9819018840789795\n", + "epoch: 35 step: 42, loss is 0.9731055498123169\n", + "epoch: 35 step: 43, loss is 0.9536066055297852\n", + "epoch: 35 step: 44, loss is 0.9099682569503784\n", + "epoch: 35 step: 45, loss is 0.921961784362793\n", + "epoch: 35 step: 46, loss is 0.9153497815132141\n", + "epoch: 35 step: 47, loss is 0.9768897891044617\n", + "epoch: 35 step: 48, loss is 0.9471563100814819\n", + "epoch: 35 step: 49, loss is 0.9435096979141235\n", + "epoch: 35 step: 50, loss is 0.8791388273239136\n", + "epoch: 35 step: 51, loss is 0.8911059498786926\n", + "epoch: 35 step: 52, loss is 0.8886808156967163\n", + "epoch: 35 step: 53, loss is 0.9194068908691406\n", + "epoch: 35 step: 54, loss is 0.9565324783325195\n", + "epoch: 35 step: 55, loss is 0.9505287408828735\n", + "epoch: 35 step: 56, loss is 0.8869645595550537\n", + "epoch: 35 step: 57, loss is 0.9144974946975708\n", + "epoch: 35 step: 58, loss is 0.9627023935317993\n", + "epoch: 35 step: 59, loss is 0.9133433103561401\n", + "epoch: 35 step: 60, loss is 0.9466632604598999\n", + "epoch: 35 step: 61, loss is 0.8485585451126099\n", + "epoch: 35 step: 62, loss is 0.9134420156478882\n", + "epoch: 35 step: 63, loss is 0.9303164482116699\n", + "epoch: 35 step: 64, loss is 0.8865699768066406\n", + "epoch: 35 step: 65, loss is 0.9602024555206299\n", + "epoch: 35 step: 66, loss is 0.9538173675537109\n", + "epoch: 35 step: 67, loss is 0.8910175561904907\n", + "epoch: 35 step: 68, loss is 0.9460276365280151\n", + "epoch: 35 step: 69, loss is 0.970134437084198\n", + "epoch: 35 step: 70, loss is 0.9221324920654297\n", + "epoch: 35 step: 71, loss is 0.9531739354133606\n", + "epoch: 35 step: 72, loss is 0.9200959205627441\n", + "epoch: 35 step: 73, loss is 0.9519055485725403\n", + "epoch: 35 step: 74, loss is 0.8795723915100098\n", + "epoch: 35 step: 75, loss is 0.9272162914276123\n", + "epoch: 35 step: 76, loss is 0.854468584060669\n", + "epoch: 35 step: 77, loss is 0.9663423299789429\n", + "epoch: 35 step: 78, loss is 0.9476649761199951\n", + "epoch: 35 step: 79, loss is 0.8947582244873047\n", + "epoch: 35 step: 80, loss is 0.9837154150009155\n", + "epoch: 35 step: 81, loss is 0.9721163511276245\n", + "epoch: 35 step: 82, loss is 0.9455710649490356\n", + "epoch: 35 step: 83, loss is 0.9312883615493774\n", + "epoch: 35 step: 84, loss is 0.8822461366653442\n", + "epoch: 35 step: 85, loss is 0.951657772064209\n", + "epoch: 35 step: 86, loss is 0.9295011758804321\n", + "epoch: 35 step: 87, loss is 0.8906410932540894\n", + "epoch: 35 step: 88, loss is 0.9861223101615906\n", + "epoch: 35 step: 89, loss is 0.9159753918647766\n", + "epoch: 35 step: 90, loss is 0.9588967561721802\n", + "epoch: 35 step: 91, loss is 1.0021733045578003\n", + "epoch: 35 step: 92, loss is 0.9075340628623962\n", + "epoch: 35 step: 93, loss is 0.8803862929344177\n", + "epoch: 35 step: 94, loss is 0.9666426181793213\n", + "epoch: 35 step: 95, loss is 0.8851079940795898\n", + "epoch: 35 step: 96, loss is 0.9815340638160706\n", + "epoch: 35 step: 97, loss is 0.9325132369995117\n", + "epoch: 35 step: 98, loss is 0.9516006708145142\n", + "epoch: 35 step: 99, loss is 0.8935682773590088\n", + "epoch: 35 step: 100, loss is 0.8776310682296753\n", + "epoch: 35 step: 101, loss is 0.946033775806427\n", + "epoch: 35 step: 102, loss is 0.9261639714241028\n", + "epoch: 35 step: 103, loss is 0.940876305103302\n", + "epoch: 35 step: 104, loss is 0.9193699359893799\n", + "epoch: 35 step: 105, loss is 0.9076818227767944\n", + "epoch: 35 step: 106, loss is 0.9697480797767639\n", + "epoch: 35 step: 107, loss is 0.9855345487594604\n", + "epoch: 35 step: 108, loss is 1.0292534828186035\n", + "epoch: 35 step: 109, loss is 0.9319785833358765\n", + "epoch: 35 step: 110, loss is 0.8795157670974731\n", + "epoch: 35 step: 111, loss is 0.900630533695221\n", + "epoch: 35 step: 112, loss is 0.9620702266693115\n", + "epoch: 35 step: 113, loss is 0.9295938611030579\n", + "epoch: 35 step: 114, loss is 0.9045097231864929\n", + "epoch: 35 step: 115, loss is 0.9304736852645874\n", + "epoch: 35 step: 116, loss is 1.003071665763855\n", + "epoch: 35 step: 117, loss is 0.9637635946273804\n", + "epoch: 35 step: 118, loss is 0.9709316492080688\n", + "epoch: 35 step: 119, loss is 0.9378883242607117\n", + "epoch: 35 step: 120, loss is 0.9546055793762207\n", + "epoch: 35 step: 121, loss is 0.9051007628440857\n", + "epoch: 35 step: 122, loss is 1.0221173763275146\n", + "epoch: 35 step: 123, loss is 0.9077088832855225\n", + "epoch: 35 step: 124, loss is 0.8609874248504639\n", + "epoch: 35 step: 125, loss is 0.9304866194725037\n", + "epoch: 35 step: 126, loss is 0.9096431136131287\n", + "epoch: 35 step: 127, loss is 0.9600012302398682\n", + "epoch: 35 step: 128, loss is 0.856858491897583\n", + "epoch: 35 step: 129, loss is 0.9497096538543701\n", + "epoch: 35 step: 130, loss is 0.992223858833313\n", + "epoch: 35 step: 131, loss is 0.9533534049987793\n", + "epoch: 35 step: 132, loss is 0.9194403290748596\n", + "epoch: 35 step: 133, loss is 0.9987988471984863\n", + "epoch: 35 step: 134, loss is 0.9300796389579773\n", + "epoch: 35 step: 135, loss is 0.9437918066978455\n", + "epoch: 35 step: 136, loss is 0.9327165484428406\n", + "epoch: 35 step: 137, loss is 0.9536823034286499\n", + "epoch: 35 step: 138, loss is 0.9257371425628662\n", + "epoch: 35 step: 139, loss is 0.9413491487503052\n", + "epoch: 35 step: 140, loss is 0.9226205945014954\n", + "epoch: 35 step: 141, loss is 0.8835639953613281\n", + "epoch: 35 step: 142, loss is 0.9819411039352417\n", + "epoch: 35 step: 143, loss is 0.9917725920677185\n", + "epoch: 35 step: 144, loss is 0.9766755104064941\n", + "epoch: 35 step: 145, loss is 0.9786219596862793\n", + "epoch: 35 step: 146, loss is 1.009907841682434\n", + "epoch: 35 step: 147, loss is 1.01816725730896\n", + "epoch: 35 step: 148, loss is 0.9578225016593933\n", + "epoch: 35 step: 149, loss is 0.9096161127090454\n", + "epoch: 35 step: 150, loss is 0.9621152877807617\n", + "epoch: 35 step: 151, loss is 1.0228428840637207\n", + "epoch: 35 step: 152, loss is 0.8552113771438599\n", + "epoch: 35 step: 153, loss is 0.9629679918289185\n", + "epoch: 35 step: 154, loss is 0.955923318862915\n", + "epoch: 35 step: 155, loss is 0.9457865953445435\n", + "epoch: 35 step: 156, loss is 0.9444022178649902\n", + "epoch: 35 step: 157, loss is 0.9545270204544067\n", + "epoch: 35 step: 158, loss is 0.9158713817596436\n", + "epoch: 35 step: 159, loss is 1.041218638420105\n", + "epoch: 35 step: 160, loss is 0.927413284778595\n", + "epoch: 35 step: 161, loss is 0.8857213258743286\n", + "epoch: 35 step: 162, loss is 1.008489966392517\n", + "epoch: 35 step: 163, loss is 0.989378809928894\n", + "epoch: 35 step: 164, loss is 0.936061680316925\n", + "epoch: 35 step: 165, loss is 0.9867419004440308\n", + "epoch: 35 step: 166, loss is 0.9768643379211426\n", + "epoch: 35 step: 167, loss is 0.9511885046958923\n", + "epoch: 35 step: 168, loss is 0.9771353602409363\n", + "epoch: 35 step: 169, loss is 0.9283730983734131\n", + "epoch: 35 step: 170, loss is 0.9303520917892456\n", + "epoch: 35 step: 171, loss is 0.9132835865020752\n", + "epoch: 35 step: 172, loss is 0.9784818887710571\n", + "epoch: 35 step: 173, loss is 0.9458488821983337\n", + "epoch: 35 step: 174, loss is 0.9341779947280884\n", + "epoch: 35 step: 175, loss is 1.0217242240905762\n", + "epoch: 35 step: 176, loss is 0.9185183048248291\n", + "epoch: 35 step: 177, loss is 1.0360103845596313\n", + "epoch: 35 step: 178, loss is 0.9605778455734253\n", + "epoch: 35 step: 179, loss is 0.908057689666748\n", + "epoch: 35 step: 180, loss is 0.906679630279541\n", + "epoch: 35 step: 181, loss is 1.0081735849380493\n", + "epoch: 35 step: 182, loss is 0.9574135541915894\n", + "epoch: 35 step: 183, loss is 0.9170638918876648\n", + "epoch: 35 step: 184, loss is 0.866042971611023\n", + "epoch: 35 step: 185, loss is 1.0067685842514038\n", + "epoch: 35 step: 186, loss is 0.9572407007217407\n", + "epoch: 35 step: 187, loss is 0.8996002674102783\n", + "epoch: 35 step: 188, loss is 0.9260600805282593\n", + "epoch: 35 step: 189, loss is 0.9420467615127563\n", + "epoch: 35 step: 190, loss is 0.9352391958236694\n", + "epoch: 35 step: 191, loss is 0.9582778811454773\n", + "epoch: 35 step: 192, loss is 0.9315602779388428\n", + "epoch: 35 step: 193, loss is 0.9593679308891296\n", + "epoch: 35 step: 194, loss is 0.8873510360717773\n", + "epoch: 35 step: 195, loss is 0.9888389110565186\n", + "Train epoch time: 97999.047 ms, per step time: 502.559 ms\n", + "epoch: 36 step: 1, loss is 0.9225066900253296\n", + "epoch: 36 step: 2, loss is 0.8736363053321838\n", + "epoch: 36 step: 3, loss is 0.9110411405563354\n", + "epoch: 36 step: 4, loss is 0.920359194278717\n", + "epoch: 36 step: 5, loss is 0.9792604446411133\n", + "epoch: 36 step: 6, loss is 0.9907101392745972\n", + "epoch: 36 step: 7, loss is 0.9227002859115601\n", + "epoch: 36 step: 8, loss is 0.9193114042282104\n", + "epoch: 36 step: 9, loss is 0.905387282371521\n", + "epoch: 36 step: 10, loss is 0.8868367075920105\n", + "epoch: 36 step: 11, loss is 0.9069879651069641\n", + "epoch: 36 step: 12, loss is 0.8058015704154968\n", + "epoch: 36 step: 13, loss is 0.9520671963691711\n", + "epoch: 36 step: 14, loss is 0.9119333028793335\n", + "epoch: 36 step: 15, loss is 0.9420934319496155\n", + "epoch: 36 step: 16, loss is 0.9307951331138611\n", + "epoch: 36 step: 17, loss is 0.8633630275726318\n", + "epoch: 36 step: 18, loss is 0.9541953802108765\n", + "epoch: 36 step: 19, loss is 0.9359545707702637\n", + "epoch: 36 step: 20, loss is 0.9141594171524048\n", + "epoch: 36 step: 21, loss is 0.8877518177032471\n", + "epoch: 36 step: 22, loss is 0.9248353838920593\n", + "epoch: 36 step: 23, loss is 0.9802047610282898\n", + "epoch: 36 step: 24, loss is 0.9127082824707031\n", + "epoch: 36 step: 25, loss is 0.9098509550094604\n", + "epoch: 36 step: 26, loss is 0.9421807527542114\n", + "epoch: 36 step: 27, loss is 0.9318506121635437\n", + "epoch: 36 step: 28, loss is 0.9416984915733337\n", + "epoch: 36 step: 29, loss is 0.8967021107673645\n", + "epoch: 36 step: 30, loss is 0.9466499090194702\n", + "epoch: 36 step: 31, loss is 0.904637336730957\n", + "epoch: 36 step: 32, loss is 0.9399408102035522\n", + "epoch: 36 step: 33, loss is 0.9361683130264282\n", + "epoch: 36 step: 34, loss is 0.9094458222389221\n", + "epoch: 36 step: 35, loss is 0.8948507308959961\n", + "epoch: 36 step: 36, loss is 0.8948855400085449\n", + "epoch: 36 step: 37, loss is 0.9155081510543823\n", + "epoch: 36 step: 38, loss is 0.9611328840255737\n", + "epoch: 36 step: 39, loss is 0.9338866472244263\n", + "epoch: 36 step: 40, loss is 0.9089374542236328\n", + "epoch: 36 step: 41, loss is 0.9684903025627136\n", + "epoch: 36 step: 42, loss is 0.952835202217102\n", + "epoch: 36 step: 43, loss is 0.9413487911224365\n", + "epoch: 36 step: 44, loss is 0.8764970898628235\n", + "epoch: 36 step: 45, loss is 0.92884361743927\n", + "epoch: 36 step: 46, loss is 0.9587048292160034\n", + "epoch: 36 step: 47, loss is 0.9648154973983765\n", + "epoch: 36 step: 48, loss is 0.9602484703063965\n", + "epoch: 36 step: 49, loss is 0.9403612613677979\n", + "epoch: 36 step: 50, loss is 0.9239209890365601\n", + "epoch: 36 step: 51, loss is 0.9988181591033936\n", + "epoch: 36 step: 52, loss is 0.9351505637168884\n", + "epoch: 36 step: 53, loss is 0.901810884475708\n", + "epoch: 36 step: 54, loss is 0.908833384513855\n", + "epoch: 36 step: 55, loss is 0.929661750793457\n", + "epoch: 36 step: 56, loss is 0.9026315212249756\n", + "epoch: 36 step: 57, loss is 0.9446922540664673\n", + "epoch: 36 step: 58, loss is 0.9327353239059448\n", + "epoch: 36 step: 59, loss is 0.8552476167678833\n", + "epoch: 36 step: 60, loss is 0.8947486877441406\n", + "epoch: 36 step: 61, loss is 0.9264059662818909\n", + "epoch: 36 step: 62, loss is 0.9436026811599731\n", + "epoch: 36 step: 63, loss is 0.9183288216590881\n", + "epoch: 36 step: 64, loss is 0.8857218027114868\n", + "epoch: 36 step: 65, loss is 0.8474714756011963\n", + "epoch: 36 step: 66, loss is 0.8990901708602905\n", + "epoch: 36 step: 67, loss is 0.8829777240753174\n", + "epoch: 36 step: 68, loss is 0.9301885366439819\n", + "epoch: 36 step: 69, loss is 0.9065649509429932\n", + "epoch: 36 step: 70, loss is 0.8434685468673706\n", + "epoch: 36 step: 71, loss is 0.9669203758239746\n", + "epoch: 36 step: 72, loss is 0.8795214891433716\n", + "epoch: 36 step: 73, loss is 0.9157518148422241\n", + "epoch: 36 step: 74, loss is 0.9285287857055664\n", + "epoch: 36 step: 75, loss is 0.9469645619392395\n", + "epoch: 36 step: 76, loss is 0.9267005920410156\n", + "epoch: 36 step: 77, loss is 0.9519708156585693\n", + "epoch: 36 step: 78, loss is 0.979928731918335\n", + "epoch: 36 step: 79, loss is 0.9716539978981018\n", + "epoch: 36 step: 80, loss is 0.8968724012374878\n", + "epoch: 36 step: 81, loss is 0.9309842586517334\n", + "epoch: 36 step: 82, loss is 1.009722352027893\n", + "epoch: 36 step: 83, loss is 0.8832423686981201\n", + "epoch: 36 step: 84, loss is 0.9802117347717285\n", + "epoch: 36 step: 85, loss is 0.8960124850273132\n", + "epoch: 36 step: 86, loss is 0.9501800537109375\n", + "epoch: 36 step: 87, loss is 0.9681417346000671\n", + "epoch: 36 step: 88, loss is 0.9139610528945923\n", + "epoch: 36 step: 89, loss is 0.9017143249511719\n", + "epoch: 36 step: 90, loss is 0.8911378383636475\n", + "epoch: 36 step: 91, loss is 0.8939880132675171\n", + "epoch: 36 step: 92, loss is 0.9143153429031372\n", + "epoch: 36 step: 93, loss is 0.9225345849990845\n", + "epoch: 36 step: 94, loss is 0.8885892033576965\n", + "epoch: 36 step: 95, loss is 0.8723638653755188\n", + "epoch: 36 step: 96, loss is 0.965596079826355\n", + "epoch: 36 step: 97, loss is 0.9686851501464844\n", + "epoch: 36 step: 98, loss is 0.9529507160186768\n", + "epoch: 36 step: 99, loss is 0.9129127860069275\n", + "epoch: 36 step: 100, loss is 0.9295639991760254\n", + "epoch: 36 step: 101, loss is 0.910538911819458\n", + "epoch: 36 step: 102, loss is 0.9687262773513794\n", + "epoch: 36 step: 103, loss is 0.8754831552505493\n", + "epoch: 36 step: 104, loss is 1.0223021507263184\n", + "epoch: 36 step: 105, loss is 0.9368555545806885\n", + "epoch: 36 step: 106, loss is 1.0081216096878052\n", + "epoch: 36 step: 107, loss is 0.9885897636413574\n", + "epoch: 36 step: 108, loss is 0.8737142086029053\n", + "epoch: 36 step: 109, loss is 0.8902431726455688\n", + "epoch: 36 step: 110, loss is 0.8874683380126953\n", + "epoch: 36 step: 111, loss is 0.8865456581115723\n", + "epoch: 36 step: 112, loss is 0.9380643367767334\n", + "epoch: 36 step: 113, loss is 0.8892648220062256\n", + "epoch: 36 step: 114, loss is 0.9875216484069824\n", + "epoch: 36 step: 115, loss is 0.9535948038101196\n", + "epoch: 36 step: 116, loss is 0.9016250371932983\n", + "epoch: 36 step: 117, loss is 0.9605327844619751\n", + "epoch: 36 step: 118, loss is 0.9687509536743164\n", + "epoch: 36 step: 119, loss is 0.9468566179275513\n", + "epoch: 36 step: 120, loss is 0.8941926956176758\n", + "epoch: 36 step: 121, loss is 0.9590359926223755\n", + "epoch: 36 step: 122, loss is 0.9631884694099426\n", + "epoch: 36 step: 123, loss is 0.9487501382827759\n", + "epoch: 36 step: 124, loss is 0.876774787902832\n", + "epoch: 36 step: 125, loss is 0.9332401156425476\n", + "epoch: 36 step: 126, loss is 0.8971723914146423\n", + "epoch: 36 step: 127, loss is 1.0112650394439697\n", + "epoch: 36 step: 128, loss is 0.9460260272026062\n", + "epoch: 36 step: 129, loss is 0.9195433855056763\n", + "epoch: 36 step: 130, loss is 0.954230546951294\n", + "epoch: 36 step: 131, loss is 0.9520456790924072\n", + "epoch: 36 step: 132, loss is 0.9358896613121033\n", + "epoch: 36 step: 133, loss is 0.9028415679931641\n", + "epoch: 36 step: 134, loss is 0.9035743474960327\n", + "epoch: 36 step: 135, loss is 0.9467328786849976\n", + "epoch: 36 step: 136, loss is 0.8412981629371643\n", + "epoch: 36 step: 137, loss is 0.9219302535057068\n", + "epoch: 36 step: 138, loss is 0.9813909530639648\n", + "epoch: 36 step: 139, loss is 0.8719720840454102\n", + "epoch: 36 step: 140, loss is 0.892081618309021\n", + "epoch: 36 step: 141, loss is 0.8887349963188171\n", + "epoch: 36 step: 142, loss is 1.0131382942199707\n", + "epoch: 36 step: 143, loss is 1.0146031379699707\n", + "epoch: 36 step: 144, loss is 0.8930062651634216\n", + "epoch: 36 step: 145, loss is 0.9432693123817444\n", + "epoch: 36 step: 146, loss is 0.9375790357589722\n", + "epoch: 36 step: 147, loss is 0.9088044762611389\n", + "epoch: 36 step: 148, loss is 0.9252309203147888\n", + "epoch: 36 step: 149, loss is 0.8957183361053467\n", + "epoch: 36 step: 150, loss is 0.9737062454223633\n", + "epoch: 36 step: 151, loss is 0.9182149171829224\n", + "epoch: 36 step: 152, loss is 0.9021044969558716\n", + "epoch: 36 step: 153, loss is 0.9487491846084595\n", + "epoch: 36 step: 154, loss is 0.974419355392456\n", + "epoch: 36 step: 155, loss is 0.9402234554290771\n", + "epoch: 36 step: 156, loss is 0.90462327003479\n", + "epoch: 36 step: 157, loss is 0.950606107711792\n", + "epoch: 36 step: 158, loss is 0.9047738313674927\n", + "epoch: 36 step: 159, loss is 0.9144648313522339\n", + "epoch: 36 step: 160, loss is 0.9378606081008911\n", + "epoch: 36 step: 161, loss is 0.9361125826835632\n", + "epoch: 36 step: 162, loss is 0.9304062128067017\n", + "epoch: 36 step: 163, loss is 0.8966818451881409\n", + "epoch: 36 step: 164, loss is 0.9595366716384888\n", + "epoch: 36 step: 165, loss is 0.9477845430374146\n", + "epoch: 36 step: 166, loss is 0.922385573387146\n", + "epoch: 36 step: 167, loss is 0.918380856513977\n", + "epoch: 36 step: 168, loss is 0.9310324192047119\n", + "epoch: 36 step: 169, loss is 0.9195016622543335\n", + "epoch: 36 step: 170, loss is 0.8632469177246094\n", + "epoch: 36 step: 171, loss is 0.8800954818725586\n", + "epoch: 36 step: 172, loss is 0.9220551252365112\n", + "epoch: 36 step: 173, loss is 0.9108235836029053\n", + "epoch: 36 step: 174, loss is 0.919916033744812\n", + "epoch: 36 step: 175, loss is 0.9799562096595764\n", + "epoch: 36 step: 176, loss is 0.9589033126831055\n", + "epoch: 36 step: 177, loss is 0.9697749614715576\n", + "epoch: 36 step: 178, loss is 0.9588885307312012\n", + "epoch: 36 step: 179, loss is 0.927115261554718\n", + "epoch: 36 step: 180, loss is 0.9250320792198181\n", + "epoch: 36 step: 181, loss is 0.9393066167831421\n", + "epoch: 36 step: 182, loss is 0.9036164283752441\n", + "epoch: 36 step: 183, loss is 0.9027825593948364\n", + "epoch: 36 step: 184, loss is 0.8637629747390747\n", + "epoch: 36 step: 185, loss is 1.0046014785766602\n", + "epoch: 36 step: 186, loss is 0.9129851460456848\n", + "epoch: 36 step: 187, loss is 0.9617010354995728\n", + "epoch: 36 step: 188, loss is 0.9765033721923828\n", + "epoch: 36 step: 189, loss is 0.8780584931373596\n", + "epoch: 36 step: 190, loss is 0.8934556245803833\n", + "epoch: 36 step: 191, loss is 0.89844810962677\n", + "epoch: 36 step: 192, loss is 0.936926007270813\n", + "epoch: 36 step: 193, loss is 0.9420064687728882\n", + "epoch: 36 step: 194, loss is 0.9494912028312683\n", + "epoch: 36 step: 195, loss is 0.943266749382019\n", + "Train epoch time: 94178.055 ms, per step time: 482.964 ms\n", + "epoch: 37 step: 1, loss is 0.894366979598999\n", + "epoch: 37 step: 2, loss is 0.8964793682098389\n", + "epoch: 37 step: 3, loss is 0.877840518951416\n", + "epoch: 37 step: 4, loss is 0.8932602405548096\n", + "epoch: 37 step: 5, loss is 0.9545726776123047\n", + "epoch: 37 step: 6, loss is 0.8845182657241821\n", + "epoch: 37 step: 7, loss is 0.9390895366668701\n", + "epoch: 37 step: 8, loss is 0.8816533088684082\n", + "epoch: 37 step: 9, loss is 0.859074592590332\n", + "epoch: 37 step: 10, loss is 0.9012227654457092\n", + "epoch: 37 step: 11, loss is 0.8810882568359375\n", + "epoch: 37 step: 12, loss is 0.9125542640686035\n", + "epoch: 37 step: 13, loss is 0.8527774810791016\n", + "epoch: 37 step: 14, loss is 0.8910964727401733\n", + "epoch: 37 step: 15, loss is 0.9371893405914307\n", + "epoch: 37 step: 16, loss is 0.9260865449905396\n", + "epoch: 37 step: 17, loss is 0.9471024870872498\n", + "epoch: 37 step: 18, loss is 0.8872158527374268\n", + "epoch: 37 step: 19, loss is 0.9474695324897766\n", + "epoch: 37 step: 20, loss is 0.9353369474411011\n", + "epoch: 37 step: 21, loss is 0.8991224765777588\n", + "epoch: 37 step: 22, loss is 0.9115789532661438\n", + "epoch: 37 step: 23, loss is 0.9187396764755249\n", + "epoch: 37 step: 24, loss is 0.9100955724716187\n", + "epoch: 37 step: 25, loss is 0.9374712705612183\n", + "epoch: 37 step: 26, loss is 0.8817296028137207\n", + "epoch: 37 step: 27, loss is 1.0468547344207764\n", + "epoch: 37 step: 28, loss is 0.9026957750320435\n", + "epoch: 37 step: 29, loss is 0.8777835965156555\n", + "epoch: 37 step: 30, loss is 0.8459382057189941\n", + "epoch: 37 step: 31, loss is 1.002854824066162\n", + "epoch: 37 step: 32, loss is 0.8899663686752319\n", + "epoch: 37 step: 33, loss is 0.9490312933921814\n", + "epoch: 37 step: 34, loss is 0.962378978729248\n", + "epoch: 37 step: 35, loss is 0.8727337121963501\n", + "epoch: 37 step: 36, loss is 0.8624219298362732\n", + "epoch: 37 step: 37, loss is 0.9518184661865234\n", + "epoch: 37 step: 38, loss is 0.9383136630058289\n", + "epoch: 37 step: 39, loss is 0.9451326131820679\n", + "epoch: 37 step: 40, loss is 0.9388296008110046\n", + "epoch: 37 step: 41, loss is 0.9262934923171997\n", + "epoch: 37 step: 42, loss is 0.8719084858894348\n", + "epoch: 37 step: 43, loss is 0.9447540044784546\n", + "epoch: 37 step: 44, loss is 0.8682041764259338\n", + "epoch: 37 step: 45, loss is 0.953051745891571\n", + "epoch: 37 step: 46, loss is 0.9113283157348633\n", + "epoch: 37 step: 47, loss is 0.9696716666221619\n", + "epoch: 37 step: 48, loss is 0.9186900854110718\n", + "epoch: 37 step: 49, loss is 0.8187614679336548\n", + "epoch: 37 step: 50, loss is 0.8410479426383972\n", + "epoch: 37 step: 51, loss is 0.8991901874542236\n", + "epoch: 37 step: 52, loss is 0.8831561207771301\n", + "epoch: 37 step: 53, loss is 0.9387684464454651\n", + "epoch: 37 step: 54, loss is 0.9105541706085205\n", + "epoch: 37 step: 55, loss is 0.9862140417098999\n", + "epoch: 37 step: 56, loss is 0.8763136267662048\n", + "epoch: 37 step: 57, loss is 0.8858389258384705\n", + "epoch: 37 step: 58, loss is 0.9521466493606567\n", + "epoch: 37 step: 59, loss is 0.9684453010559082\n", + "epoch: 37 step: 60, loss is 0.912433385848999\n", + "epoch: 37 step: 61, loss is 0.8792889714241028\n", + "epoch: 37 step: 62, loss is 0.9202752113342285\n", + "epoch: 37 step: 63, loss is 0.9285165667533875\n", + "epoch: 37 step: 64, loss is 0.9273291230201721\n", + "epoch: 37 step: 65, loss is 0.8190360069274902\n", + "epoch: 37 step: 66, loss is 0.9141345620155334\n", + "epoch: 37 step: 67, loss is 0.9265640377998352\n", + "epoch: 37 step: 68, loss is 0.9400726556777954\n", + "epoch: 37 step: 69, loss is 0.8580575585365295\n", + "epoch: 37 step: 70, loss is 0.8900380730628967\n", + "epoch: 37 step: 71, loss is 0.8867246508598328\n", + "epoch: 37 step: 72, loss is 0.919118344783783\n", + "epoch: 37 step: 73, loss is 0.9610574841499329\n", + "epoch: 37 step: 74, loss is 0.9194049835205078\n", + "epoch: 37 step: 75, loss is 0.9244626760482788\n", + "epoch: 37 step: 76, loss is 0.9280495643615723\n", + "epoch: 37 step: 77, loss is 0.8838629126548767\n", + "epoch: 37 step: 78, loss is 0.9550855159759521\n", + "epoch: 37 step: 79, loss is 0.9008275866508484\n", + "epoch: 37 step: 80, loss is 0.8913450837135315\n", + "epoch: 37 step: 81, loss is 0.9849715232849121\n", + "epoch: 37 step: 82, loss is 0.8992519974708557\n", + "epoch: 37 step: 83, loss is 0.8814723491668701\n", + "epoch: 37 step: 84, loss is 0.9645324945449829\n", + "epoch: 37 step: 85, loss is 0.911176323890686\n", + "epoch: 37 step: 86, loss is 0.9119610786437988\n", + "epoch: 37 step: 87, loss is 0.9354449510574341\n", + "epoch: 37 step: 88, loss is 0.9033266305923462\n", + "epoch: 37 step: 89, loss is 0.9147226810455322\n", + "epoch: 37 step: 90, loss is 0.899871826171875\n", + "epoch: 37 step: 91, loss is 0.9279531240463257\n", + "epoch: 37 step: 92, loss is 0.8689677715301514\n", + "epoch: 37 step: 93, loss is 0.882117748260498\n", + "epoch: 37 step: 94, loss is 0.959213137626648\n", + "epoch: 37 step: 95, loss is 0.9484282732009888\n", + "epoch: 37 step: 96, loss is 0.9256013035774231\n", + "epoch: 37 step: 97, loss is 0.9100347757339478\n", + "epoch: 37 step: 98, loss is 0.9413018226623535\n", + "epoch: 37 step: 99, loss is 0.9490039348602295\n", + "epoch: 37 step: 100, loss is 0.8832134008407593\n", + "epoch: 37 step: 101, loss is 0.9727935791015625\n", + "epoch: 37 step: 102, loss is 0.8947179317474365\n", + "epoch: 37 step: 103, loss is 0.9308130741119385\n", + "epoch: 37 step: 104, loss is 0.9014136791229248\n", + "epoch: 37 step: 105, loss is 0.8865413665771484\n", + "epoch: 37 step: 106, loss is 0.9205124378204346\n", + "epoch: 37 step: 107, loss is 0.9127756357192993\n", + "epoch: 37 step: 108, loss is 0.9357254505157471\n", + "epoch: 37 step: 109, loss is 0.9571647644042969\n", + "epoch: 37 step: 110, loss is 0.9149158000946045\n", + "epoch: 37 step: 111, loss is 0.9158039093017578\n", + "epoch: 37 step: 112, loss is 0.909244179725647\n", + "epoch: 37 step: 113, loss is 0.9500954151153564\n", + "epoch: 37 step: 114, loss is 0.8913596868515015\n", + "epoch: 37 step: 115, loss is 0.899511992931366\n", + "epoch: 37 step: 116, loss is 0.9976452589035034\n", + "epoch: 37 step: 117, loss is 0.8651297688484192\n", + "epoch: 37 step: 118, loss is 0.9072359800338745\n", + "epoch: 37 step: 119, loss is 0.9061567187309265\n", + "epoch: 37 step: 120, loss is 0.8353898525238037\n", + "epoch: 37 step: 121, loss is 0.9594079256057739\n", + "epoch: 37 step: 122, loss is 0.8893373012542725\n", + "epoch: 37 step: 123, loss is 0.9020209312438965\n", + "epoch: 37 step: 124, loss is 0.8697826862335205\n", + "epoch: 37 step: 125, loss is 0.9355045557022095\n", + "epoch: 37 step: 126, loss is 0.8889826536178589\n", + "epoch: 37 step: 127, loss is 0.8888669013977051\n", + "epoch: 37 step: 128, loss is 0.9076784253120422\n", + "epoch: 37 step: 129, loss is 0.9046095609664917\n", + "epoch: 37 step: 130, loss is 0.9013829827308655\n", + "epoch: 37 step: 131, loss is 0.8429166078567505\n", + "epoch: 37 step: 132, loss is 0.9306365251541138\n", + "epoch: 37 step: 133, loss is 0.8831478953361511\n", + "epoch: 37 step: 134, loss is 0.861400306224823\n", + "epoch: 37 step: 135, loss is 0.8817192316055298\n", + "epoch: 37 step: 136, loss is 0.8838294744491577\n", + "epoch: 37 step: 137, loss is 0.9292114973068237\n", + "epoch: 37 step: 138, loss is 0.9648375511169434\n", + "epoch: 37 step: 139, loss is 0.9086259603500366\n", + "epoch: 37 step: 140, loss is 0.9359986782073975\n", + "epoch: 37 step: 141, loss is 0.9445984363555908\n", + "epoch: 37 step: 142, loss is 0.9150552749633789\n", + "epoch: 37 step: 143, loss is 0.8997162580490112\n", + "epoch: 37 step: 144, loss is 0.85664302110672\n", + "epoch: 37 step: 145, loss is 0.9295265674591064\n", + "epoch: 37 step: 146, loss is 0.9444328546524048\n", + "epoch: 37 step: 147, loss is 0.9224900007247925\n", + "epoch: 37 step: 148, loss is 0.8474961519241333\n", + "epoch: 37 step: 149, loss is 0.8662912845611572\n", + "epoch: 37 step: 150, loss is 0.8703345060348511\n", + "epoch: 37 step: 151, loss is 0.9110129475593567\n", + "epoch: 37 step: 152, loss is 0.9377716779708862\n", + "epoch: 37 step: 153, loss is 0.9161810874938965\n", + "epoch: 37 step: 154, loss is 0.9030777215957642\n", + "epoch: 37 step: 155, loss is 0.9838343262672424\n", + "epoch: 37 step: 156, loss is 0.8766644597053528\n", + "epoch: 37 step: 157, loss is 0.9157836437225342\n", + "epoch: 37 step: 158, loss is 0.8986318111419678\n", + "epoch: 37 step: 159, loss is 0.9520909786224365\n", + "epoch: 37 step: 160, loss is 0.9331917762756348\n", + "epoch: 37 step: 161, loss is 0.9033013582229614\n", + "epoch: 37 step: 162, loss is 0.9533791542053223\n", + "epoch: 37 step: 163, loss is 0.8819625377655029\n", + "epoch: 37 step: 164, loss is 0.9204336404800415\n", + "epoch: 37 step: 165, loss is 0.9335941076278687\n", + "epoch: 37 step: 166, loss is 0.9113415479660034\n", + "epoch: 37 step: 167, loss is 0.9093854427337646\n", + "epoch: 37 step: 168, loss is 0.891006350517273\n", + "epoch: 37 step: 169, loss is 0.8945587873458862\n", + "epoch: 37 step: 170, loss is 0.9199824333190918\n", + "epoch: 37 step: 171, loss is 0.8894087076187134\n", + "epoch: 37 step: 172, loss is 0.8432344198226929\n", + "epoch: 37 step: 173, loss is 0.9223273992538452\n", + "epoch: 37 step: 174, loss is 0.9559364318847656\n", + "epoch: 37 step: 175, loss is 0.9855693578720093\n", + "epoch: 37 step: 176, loss is 0.8663846254348755\n", + "epoch: 37 step: 177, loss is 0.9415105581283569\n", + "epoch: 37 step: 178, loss is 0.9159643650054932\n", + "epoch: 37 step: 179, loss is 0.8524744510650635\n", + "epoch: 37 step: 180, loss is 0.9477188587188721\n", + "epoch: 37 step: 181, loss is 0.8645237684249878\n", + "epoch: 37 step: 182, loss is 0.9336286187171936\n", + "epoch: 37 step: 183, loss is 0.9332791566848755\n", + "epoch: 37 step: 184, loss is 0.9463033676147461\n", + "epoch: 37 step: 185, loss is 0.9446437358856201\n", + "epoch: 37 step: 186, loss is 0.9215521812438965\n", + "epoch: 37 step: 187, loss is 0.9491333365440369\n", + "epoch: 37 step: 188, loss is 0.9688184261322021\n", + "epoch: 37 step: 189, loss is 0.9154113531112671\n", + "epoch: 37 step: 190, loss is 0.9223686456680298\n", + "epoch: 37 step: 191, loss is 0.8899835348129272\n", + "epoch: 37 step: 192, loss is 0.9028563499450684\n", + "epoch: 37 step: 193, loss is 0.9726190567016602\n", + "epoch: 37 step: 194, loss is 0.9442843198776245\n", + "epoch: 37 step: 195, loss is 0.9650087952613831\n", + "Train epoch time: 96633.914 ms, per step time: 495.559 ms\n", + "epoch: 38 step: 1, loss is 0.9644353985786438\n", + "epoch: 38 step: 2, loss is 0.8591467142105103\n", + "epoch: 38 step: 3, loss is 0.9248469471931458\n", + "epoch: 38 step: 4, loss is 0.9636443853378296\n", + "epoch: 38 step: 5, loss is 0.865712583065033\n", + "epoch: 38 step: 6, loss is 0.8926432132720947\n", + "epoch: 38 step: 7, loss is 0.8932524919509888\n", + "epoch: 38 step: 8, loss is 0.9065390825271606\n", + "epoch: 38 step: 9, loss is 0.9481102824211121\n", + "epoch: 38 step: 10, loss is 0.9420967102050781\n", + "epoch: 38 step: 11, loss is 0.9447901248931885\n", + "epoch: 38 step: 12, loss is 0.8949570655822754\n", + "epoch: 38 step: 13, loss is 0.8373324871063232\n", + "epoch: 38 step: 14, loss is 0.8512460589408875\n", + "epoch: 38 step: 15, loss is 0.8996691107749939\n", + "epoch: 38 step: 16, loss is 0.9221575260162354\n", + "epoch: 38 step: 17, loss is 0.9411889314651489\n", + "epoch: 38 step: 18, loss is 0.8879414796829224\n", + "epoch: 38 step: 19, loss is 0.9283058047294617\n", + "epoch: 38 step: 20, loss is 0.8703025579452515\n", + "epoch: 38 step: 21, loss is 0.886993408203125\n", + "epoch: 38 step: 22, loss is 0.8451102375984192\n", + "epoch: 38 step: 23, loss is 0.9431037902832031\n", + "epoch: 38 step: 24, loss is 0.8845306038856506\n", + "epoch: 38 step: 25, loss is 0.9340087175369263\n", + "epoch: 38 step: 26, loss is 0.9032560586929321\n", + "epoch: 38 step: 27, loss is 0.8918323516845703\n", + "epoch: 38 step: 28, loss is 0.8394193649291992\n", + "epoch: 38 step: 29, loss is 1.0143547058105469\n", + "epoch: 38 step: 30, loss is 0.902428388595581\n", + "epoch: 38 step: 31, loss is 0.9359561204910278\n", + "epoch: 38 step: 32, loss is 0.8858641386032104\n", + "epoch: 38 step: 33, loss is 0.8918731212615967\n", + "epoch: 38 step: 34, loss is 0.9011649489402771\n", + "epoch: 38 step: 35, loss is 0.8741051554679871\n", + "epoch: 38 step: 36, loss is 0.8897653818130493\n", + "epoch: 38 step: 37, loss is 0.9547367691993713\n", + "epoch: 38 step: 38, loss is 0.8434375524520874\n", + "epoch: 38 step: 39, loss is 0.8406927585601807\n", + "epoch: 38 step: 40, loss is 0.9429190158843994\n", + "epoch: 38 step: 41, loss is 0.8791395425796509\n", + "epoch: 38 step: 42, loss is 0.843609631061554\n", + "epoch: 38 step: 43, loss is 0.8423075675964355\n", + "epoch: 38 step: 44, loss is 0.869741678237915\n", + "epoch: 38 step: 45, loss is 0.882320761680603\n", + "epoch: 38 step: 46, loss is 0.8864765167236328\n", + "epoch: 38 step: 47, loss is 0.8802717924118042\n", + "epoch: 38 step: 48, loss is 0.9112096428871155\n", + "epoch: 38 step: 49, loss is 0.8421928286552429\n", + "epoch: 38 step: 50, loss is 0.8910983800888062\n", + "epoch: 38 step: 51, loss is 0.9792097806930542\n", + "epoch: 38 step: 52, loss is 0.897674024105072\n", + "epoch: 38 step: 53, loss is 0.9176896810531616\n", + "epoch: 38 step: 54, loss is 0.9070497155189514\n", + "epoch: 38 step: 55, loss is 0.8279929161071777\n", + "epoch: 38 step: 56, loss is 0.8515253663063049\n", + "epoch: 38 step: 57, loss is 0.8706358075141907\n", + "epoch: 38 step: 58, loss is 0.9322777986526489\n", + "epoch: 38 step: 59, loss is 0.8738762140274048\n", + "epoch: 38 step: 60, loss is 0.8876544237136841\n", + "epoch: 38 step: 61, loss is 0.9523328542709351\n", + "epoch: 38 step: 62, loss is 0.8798788785934448\n", + "epoch: 38 step: 63, loss is 0.8854973316192627\n", + "epoch: 38 step: 64, loss is 0.9428043365478516\n", + "epoch: 38 step: 65, loss is 0.89760822057724\n", + "epoch: 38 step: 66, loss is 0.9756983518600464\n", + "epoch: 38 step: 67, loss is 0.8901194334030151\n", + "epoch: 38 step: 68, loss is 0.9210478067398071\n", + "epoch: 38 step: 69, loss is 0.9190508127212524\n", + "epoch: 38 step: 70, loss is 0.9916295409202576\n", + "epoch: 38 step: 71, loss is 0.9103949666023254\n", + "epoch: 38 step: 72, loss is 0.9580341577529907\n", + "epoch: 38 step: 73, loss is 0.8864830136299133\n", + "epoch: 38 step: 74, loss is 0.9072789549827576\n", + "epoch: 38 step: 75, loss is 0.8819640874862671\n", + "epoch: 38 step: 76, loss is 0.8499956130981445\n", + "epoch: 38 step: 77, loss is 0.9012879133224487\n", + "epoch: 38 step: 78, loss is 0.8768424987792969\n", + "epoch: 38 step: 79, loss is 0.8992652297019958\n", + "epoch: 38 step: 80, loss is 0.9279012084007263\n", + "epoch: 38 step: 81, loss is 0.8850351572036743\n", + "epoch: 38 step: 82, loss is 0.890498161315918\n", + "epoch: 38 step: 83, loss is 0.8792634010314941\n", + "epoch: 38 step: 84, loss is 0.8623813986778259\n", + "epoch: 38 step: 85, loss is 0.8485772609710693\n", + "epoch: 38 step: 86, loss is 0.8866424560546875\n", + "epoch: 38 step: 87, loss is 0.8761226534843445\n", + "epoch: 38 step: 88, loss is 0.8522550463676453\n", + "epoch: 38 step: 89, loss is 0.8976705074310303\n", + "epoch: 38 step: 90, loss is 0.8866127729415894\n", + "epoch: 38 step: 91, loss is 0.9145216941833496\n", + "epoch: 38 step: 92, loss is 0.8546053767204285\n", + "epoch: 38 step: 93, loss is 0.9164795875549316\n", + "epoch: 38 step: 94, loss is 0.9714679718017578\n", + "epoch: 38 step: 95, loss is 0.8887884616851807\n", + "epoch: 38 step: 96, loss is 0.9059304594993591\n", + "epoch: 38 step: 97, loss is 0.880599856376648\n", + "epoch: 38 step: 98, loss is 0.8514142036437988\n", + "epoch: 38 step: 99, loss is 0.8665508031845093\n", + "epoch: 38 step: 100, loss is 0.9600133895874023\n", + "epoch: 38 step: 101, loss is 0.8657805323600769\n", + "epoch: 38 step: 102, loss is 0.9078474044799805\n", + "epoch: 38 step: 103, loss is 0.8450144529342651\n", + "epoch: 38 step: 104, loss is 0.8731218576431274\n", + "epoch: 38 step: 105, loss is 0.9490156173706055\n", + "epoch: 38 step: 106, loss is 0.914395809173584\n", + "epoch: 38 step: 107, loss is 0.9186147451400757\n", + "epoch: 38 step: 108, loss is 0.9598707556724548\n", + "epoch: 38 step: 109, loss is 0.9346034526824951\n", + "epoch: 38 step: 110, loss is 0.8846397995948792\n", + "epoch: 38 step: 111, loss is 0.9224824905395508\n", + "epoch: 38 step: 112, loss is 0.9029296636581421\n", + "epoch: 38 step: 113, loss is 0.9110804796218872\n", + "epoch: 38 step: 114, loss is 0.8396177291870117\n", + "epoch: 38 step: 115, loss is 0.8675407767295837\n", + "epoch: 38 step: 116, loss is 0.9634933471679688\n", + "epoch: 38 step: 117, loss is 0.9367133378982544\n", + "epoch: 38 step: 118, loss is 0.8941707611083984\n", + "epoch: 38 step: 119, loss is 0.9336663484573364\n", + "epoch: 38 step: 120, loss is 0.8603336811065674\n", + "epoch: 38 step: 121, loss is 0.8572193384170532\n", + "epoch: 38 step: 122, loss is 0.9582822322845459\n", + "epoch: 38 step: 123, loss is 0.8935695886611938\n", + "epoch: 38 step: 124, loss is 0.9079141616821289\n", + "epoch: 38 step: 125, loss is 0.8736512064933777\n", + "epoch: 38 step: 126, loss is 0.8710961937904358\n", + "epoch: 38 step: 127, loss is 0.9240210056304932\n", + "epoch: 38 step: 128, loss is 0.8646270036697388\n", + "epoch: 38 step: 129, loss is 0.8671103715896606\n", + "epoch: 38 step: 130, loss is 0.942484438419342\n", + "epoch: 38 step: 131, loss is 0.8505088090896606\n", + "epoch: 38 step: 132, loss is 0.9132107496261597\n", + "epoch: 38 step: 133, loss is 0.8845546245574951\n", + "epoch: 38 step: 134, loss is 0.9309512376785278\n", + "epoch: 38 step: 135, loss is 0.8603273630142212\n", + "epoch: 38 step: 136, loss is 0.8995720148086548\n", + "epoch: 38 step: 137, loss is 0.9100026488304138\n", + "epoch: 38 step: 138, loss is 0.8660421967506409\n", + "epoch: 38 step: 139, loss is 0.883720874786377\n", + "epoch: 38 step: 140, loss is 0.9180278778076172\n", + "epoch: 38 step: 141, loss is 0.9450329542160034\n", + "epoch: 38 step: 142, loss is 0.9451298713684082\n", + "epoch: 38 step: 143, loss is 0.9168519973754883\n", + "epoch: 38 step: 144, loss is 0.985957145690918\n", + "epoch: 38 step: 145, loss is 0.8786382675170898\n", + "epoch: 38 step: 146, loss is 0.887488603591919\n", + "epoch: 38 step: 147, loss is 0.9288023710250854\n", + "epoch: 38 step: 148, loss is 0.9432166218757629\n", + "epoch: 38 step: 149, loss is 0.917360246181488\n", + "epoch: 38 step: 150, loss is 0.9051632881164551\n", + "epoch: 38 step: 151, loss is 0.9713149070739746\n", + "epoch: 38 step: 152, loss is 0.9611563086509705\n", + "epoch: 38 step: 153, loss is 0.8664149045944214\n", + "epoch: 38 step: 154, loss is 0.8701969385147095\n", + "epoch: 38 step: 155, loss is 0.9413514733314514\n", + "epoch: 38 step: 156, loss is 0.8948757648468018\n", + "epoch: 38 step: 157, loss is 0.884712815284729\n", + "epoch: 38 step: 158, loss is 0.8614862561225891\n", + "epoch: 38 step: 159, loss is 0.9159590601921082\n", + "epoch: 38 step: 160, loss is 0.8948489427566528\n", + "epoch: 38 step: 161, loss is 0.9464200735092163\n", + "epoch: 38 step: 162, loss is 0.8714253306388855\n", + "epoch: 38 step: 163, loss is 0.8899481296539307\n", + "epoch: 38 step: 164, loss is 0.9094343185424805\n", + "epoch: 38 step: 165, loss is 0.943204402923584\n", + "epoch: 38 step: 166, loss is 0.8506242036819458\n", + "epoch: 38 step: 167, loss is 0.9206901788711548\n", + "epoch: 38 step: 168, loss is 0.8430876135826111\n", + "epoch: 38 step: 169, loss is 0.9152131080627441\n", + "epoch: 38 step: 170, loss is 0.8731434345245361\n", + "epoch: 38 step: 171, loss is 0.9075946807861328\n", + "epoch: 38 step: 172, loss is 0.9113630056381226\n", + "epoch: 38 step: 173, loss is 0.8664947748184204\n", + "epoch: 38 step: 174, loss is 0.8932738900184631\n", + "epoch: 38 step: 175, loss is 0.9070514440536499\n", + "epoch: 38 step: 176, loss is 0.9515738487243652\n", + "epoch: 38 step: 177, loss is 0.9543778896331787\n", + "epoch: 38 step: 178, loss is 0.889481782913208\n", + "epoch: 38 step: 179, loss is 0.9639380574226379\n", + "epoch: 38 step: 180, loss is 0.9081448316574097\n", + "epoch: 38 step: 181, loss is 0.9227905869483948\n", + "epoch: 38 step: 182, loss is 0.8702630996704102\n", + "epoch: 38 step: 183, loss is 0.9639230966567993\n", + "epoch: 38 step: 184, loss is 0.8926862478256226\n", + "epoch: 38 step: 185, loss is 0.8436956405639648\n", + "epoch: 38 step: 186, loss is 0.8995475769042969\n", + "epoch: 38 step: 187, loss is 0.9211913347244263\n", + "epoch: 38 step: 188, loss is 0.9529311656951904\n", + "epoch: 38 step: 189, loss is 0.8916225433349609\n", + "epoch: 38 step: 190, loss is 0.8464163541793823\n", + "epoch: 38 step: 191, loss is 0.8770601749420166\n", + "epoch: 38 step: 192, loss is 0.9226069450378418\n", + "epoch: 38 step: 193, loss is 0.9974471926689148\n", + "epoch: 38 step: 194, loss is 0.9110836982727051\n", + "epoch: 38 step: 195, loss is 0.9417803287506104\n", + "Train epoch time: 96529.984 ms, per step time: 495.026 ms\n", + "epoch: 39 step: 1, loss is 0.8027544021606445\n", + "epoch: 39 step: 2, loss is 0.8580753803253174\n", + "epoch: 39 step: 3, loss is 0.9083578586578369\n", + "epoch: 39 step: 4, loss is 0.9060826301574707\n", + "epoch: 39 step: 5, loss is 0.8435655832290649\n", + "epoch: 39 step: 6, loss is 0.8793356418609619\n", + "epoch: 39 step: 7, loss is 0.9389184713363647\n", + "epoch: 39 step: 8, loss is 0.8587248921394348\n", + "epoch: 39 step: 9, loss is 0.9014296531677246\n", + "epoch: 39 step: 10, loss is 0.8740645051002502\n", + "epoch: 39 step: 11, loss is 0.9171005487442017\n", + "epoch: 39 step: 12, loss is 0.8847118616104126\n", + "epoch: 39 step: 13, loss is 0.8901522159576416\n", + "epoch: 39 step: 14, loss is 0.8555381298065186\n", + "epoch: 39 step: 15, loss is 0.8724911212921143\n", + "epoch: 39 step: 16, loss is 0.866057276725769\n", + "epoch: 39 step: 17, loss is 0.9040854573249817\n", + "epoch: 39 step: 18, loss is 0.8525365591049194\n", + "epoch: 39 step: 19, loss is 0.8856762051582336\n", + "epoch: 39 step: 20, loss is 0.8858839273452759\n", + "epoch: 39 step: 21, loss is 0.9518245458602905\n", + "epoch: 39 step: 22, loss is 0.9627732038497925\n", + "epoch: 39 step: 23, loss is 0.8523842692375183\n", + "epoch: 39 step: 24, loss is 0.8563827276229858\n", + "epoch: 39 step: 25, loss is 0.9409192204475403\n", + "epoch: 39 step: 26, loss is 0.7987247705459595\n", + "epoch: 39 step: 27, loss is 0.8559681177139282\n", + "epoch: 39 step: 28, loss is 0.8663652539253235\n", + "epoch: 39 step: 29, loss is 0.8291332125663757\n", + "epoch: 39 step: 30, loss is 0.9295628070831299\n", + "epoch: 39 step: 31, loss is 0.8988004922866821\n", + "epoch: 39 step: 32, loss is 0.8617967367172241\n", + "epoch: 39 step: 33, loss is 0.8988361954689026\n", + "epoch: 39 step: 34, loss is 0.8490512371063232\n", + "epoch: 39 step: 35, loss is 0.8358687162399292\n", + "epoch: 39 step: 36, loss is 0.9626249670982361\n", + "epoch: 39 step: 37, loss is 0.9226419925689697\n", + "epoch: 39 step: 38, loss is 0.8731620907783508\n", + "epoch: 39 step: 39, loss is 0.8752349615097046\n", + "epoch: 39 step: 40, loss is 0.898119330406189\n", + "epoch: 39 step: 41, loss is 0.8782503008842468\n", + "epoch: 39 step: 42, loss is 0.929202139377594\n", + "epoch: 39 step: 43, loss is 0.8496741056442261\n", + "epoch: 39 step: 44, loss is 0.9040846824645996\n", + "epoch: 39 step: 45, loss is 0.8624253273010254\n", + "epoch: 39 step: 46, loss is 0.8660885095596313\n", + "epoch: 39 step: 47, loss is 0.9186990261077881\n", + "epoch: 39 step: 48, loss is 0.933849573135376\n", + "epoch: 39 step: 49, loss is 0.976352870464325\n", + "epoch: 39 step: 50, loss is 0.8549307584762573\n", + "epoch: 39 step: 51, loss is 0.8720060586929321\n", + "epoch: 39 step: 52, loss is 0.9079470634460449\n", + "epoch: 39 step: 53, loss is 0.8489070534706116\n", + "epoch: 39 step: 54, loss is 0.8391618728637695\n", + "epoch: 39 step: 55, loss is 0.8667554259300232\n", + "epoch: 39 step: 56, loss is 0.884162187576294\n", + "epoch: 39 step: 57, loss is 0.9139655232429504\n", + "epoch: 39 step: 58, loss is 0.9017330408096313\n", + "epoch: 39 step: 59, loss is 0.8524740934371948\n", + "epoch: 39 step: 60, loss is 0.8741457462310791\n", + "epoch: 39 step: 61, loss is 0.8547707796096802\n", + "epoch: 39 step: 62, loss is 0.9069471955299377\n", + "epoch: 39 step: 63, loss is 0.8952859044075012\n", + "epoch: 39 step: 64, loss is 0.9131509065628052\n", + "epoch: 39 step: 65, loss is 0.9266204833984375\n", + "epoch: 39 step: 66, loss is 0.9079028367996216\n", + "epoch: 39 step: 67, loss is 0.9437500238418579\n", + "epoch: 39 step: 68, loss is 0.8634668588638306\n", + "epoch: 39 step: 69, loss is 0.8965356945991516\n", + "epoch: 39 step: 70, loss is 0.858785092830658\n", + "epoch: 39 step: 71, loss is 0.8371154069900513\n", + "epoch: 39 step: 72, loss is 0.8917064070701599\n", + "epoch: 39 step: 73, loss is 0.8897498250007629\n", + "epoch: 39 step: 74, loss is 0.8871984481811523\n", + "epoch: 39 step: 75, loss is 0.8450354337692261\n", + "epoch: 39 step: 76, loss is 0.9240155816078186\n", + "epoch: 39 step: 77, loss is 0.9342747926712036\n", + "epoch: 39 step: 78, loss is 0.9101995229721069\n", + "epoch: 39 step: 79, loss is 0.9041628837585449\n", + "epoch: 39 step: 80, loss is 0.833791196346283\n", + "epoch: 39 step: 81, loss is 0.902460515499115\n", + "epoch: 39 step: 82, loss is 0.906185507774353\n", + "epoch: 39 step: 83, loss is 0.878761351108551\n", + "epoch: 39 step: 84, loss is 0.9225038290023804\n", + "epoch: 39 step: 85, loss is 0.9569016695022583\n", + "epoch: 39 step: 86, loss is 0.9063615798950195\n", + "epoch: 39 step: 87, loss is 0.912956714630127\n", + "epoch: 39 step: 88, loss is 0.9538160562515259\n", + "epoch: 39 step: 89, loss is 0.9667307138442993\n", + "epoch: 39 step: 90, loss is 0.8833717107772827\n", + "epoch: 39 step: 91, loss is 0.8116464614868164\n", + "epoch: 39 step: 92, loss is 0.8190633058547974\n", + "epoch: 39 step: 93, loss is 0.8689693808555603\n", + "epoch: 39 step: 94, loss is 0.9694122672080994\n", + "epoch: 39 step: 95, loss is 0.9011635780334473\n", + "epoch: 39 step: 96, loss is 1.0066466331481934\n", + "epoch: 39 step: 97, loss is 0.9367119073867798\n", + "epoch: 39 step: 98, loss is 0.9303284287452698\n", + "epoch: 39 step: 99, loss is 0.8782893419265747\n", + "epoch: 39 step: 100, loss is 0.8343509435653687\n", + "epoch: 39 step: 101, loss is 0.906140923500061\n", + "epoch: 39 step: 102, loss is 0.905020534992218\n", + "epoch: 39 step: 103, loss is 0.9685481786727905\n", + "epoch: 39 step: 104, loss is 0.9819811582565308\n", + "epoch: 39 step: 105, loss is 0.9395359754562378\n", + "epoch: 39 step: 106, loss is 0.9055901765823364\n", + "epoch: 39 step: 107, loss is 0.8914735913276672\n", + "epoch: 39 step: 108, loss is 0.8964855670928955\n", + "epoch: 39 step: 109, loss is 0.8616411685943604\n", + "epoch: 39 step: 110, loss is 0.8889032006263733\n", + "epoch: 39 step: 111, loss is 0.906387209892273\n", + "epoch: 39 step: 112, loss is 0.9775859117507935\n", + "epoch: 39 step: 113, loss is 0.9375530481338501\n", + "epoch: 39 step: 114, loss is 0.8613241910934448\n", + "epoch: 39 step: 115, loss is 0.8923771381378174\n", + "epoch: 39 step: 116, loss is 0.8717877864837646\n", + "epoch: 39 step: 117, loss is 0.8757824897766113\n", + "epoch: 39 step: 118, loss is 0.903152346611023\n", + "epoch: 39 step: 119, loss is 0.8887309432029724\n", + "epoch: 39 step: 120, loss is 0.857620358467102\n", + "epoch: 39 step: 121, loss is 0.8692069053649902\n", + "epoch: 39 step: 122, loss is 0.9137523770332336\n", + "epoch: 39 step: 123, loss is 0.8851610422134399\n", + "epoch: 39 step: 124, loss is 0.8988949060440063\n", + "epoch: 39 step: 125, loss is 0.9286608695983887\n", + "epoch: 39 step: 126, loss is 0.928363025188446\n", + "epoch: 39 step: 127, loss is 0.9241108894348145\n", + "epoch: 39 step: 128, loss is 0.882233738899231\n", + "epoch: 39 step: 129, loss is 0.8481851816177368\n", + "epoch: 39 step: 130, loss is 0.8787713050842285\n", + "epoch: 39 step: 131, loss is 0.933021068572998\n", + "epoch: 39 step: 132, loss is 0.8940964937210083\n", + "epoch: 39 step: 133, loss is 0.9061247110366821\n", + "epoch: 39 step: 134, loss is 0.9096812009811401\n", + "epoch: 39 step: 135, loss is 0.8737320303916931\n", + "epoch: 39 step: 136, loss is 0.8787661790847778\n", + "epoch: 39 step: 137, loss is 0.9323121309280396\n", + "epoch: 39 step: 138, loss is 0.8327353000640869\n", + "epoch: 39 step: 139, loss is 0.9155620336532593\n", + "epoch: 39 step: 140, loss is 0.870255708694458\n", + "epoch: 39 step: 141, loss is 0.9140324592590332\n", + "epoch: 39 step: 142, loss is 0.9012588262557983\n", + "epoch: 39 step: 143, loss is 0.9256284832954407\n", + "epoch: 39 step: 144, loss is 0.9249073266983032\n", + "epoch: 39 step: 145, loss is 0.8844343423843384\n", + "epoch: 39 step: 146, loss is 0.8996870517730713\n", + "epoch: 39 step: 147, loss is 0.866692304611206\n", + "epoch: 39 step: 148, loss is 0.9015395641326904\n", + "epoch: 39 step: 149, loss is 0.8322407603263855\n", + "epoch: 39 step: 150, loss is 0.8717762231826782\n", + "epoch: 39 step: 151, loss is 0.860923171043396\n", + "epoch: 39 step: 152, loss is 0.8815950751304626\n", + "epoch: 39 step: 153, loss is 0.8252642154693604\n", + "epoch: 39 step: 154, loss is 0.908086895942688\n", + "epoch: 39 step: 155, loss is 0.9199147820472717\n", + "epoch: 39 step: 156, loss is 0.8596809506416321\n", + "epoch: 39 step: 157, loss is 0.9148196578025818\n", + "epoch: 39 step: 158, loss is 0.8298414349555969\n", + "epoch: 39 step: 159, loss is 0.8095508813858032\n", + "epoch: 39 step: 160, loss is 0.9726994633674622\n", + "epoch: 39 step: 161, loss is 0.8787619471549988\n", + "epoch: 39 step: 162, loss is 0.8837853670120239\n", + "epoch: 39 step: 163, loss is 0.8923978805541992\n", + "epoch: 39 step: 164, loss is 0.8788607716560364\n", + "epoch: 39 step: 165, loss is 0.8662934303283691\n", + "epoch: 39 step: 166, loss is 0.9107570648193359\n", + "epoch: 39 step: 167, loss is 0.8711446523666382\n", + "epoch: 39 step: 168, loss is 0.9672527313232422\n", + "epoch: 39 step: 169, loss is 0.9179725646972656\n", + "epoch: 39 step: 170, loss is 0.911964476108551\n", + "epoch: 39 step: 171, loss is 0.891067385673523\n", + "epoch: 39 step: 172, loss is 0.8727320432662964\n", + "epoch: 39 step: 173, loss is 0.8962955474853516\n", + "epoch: 39 step: 174, loss is 0.8755134344100952\n", + "epoch: 39 step: 175, loss is 0.8618196249008179\n", + "epoch: 39 step: 176, loss is 0.8956024646759033\n", + "epoch: 39 step: 177, loss is 0.9220932722091675\n", + "epoch: 39 step: 178, loss is 0.9435073137283325\n", + "epoch: 39 step: 179, loss is 0.9042304754257202\n", + "epoch: 39 step: 180, loss is 0.8795793056488037\n", + "epoch: 39 step: 181, loss is 0.8724938035011292\n", + "epoch: 39 step: 182, loss is 0.8581997752189636\n", + "epoch: 39 step: 183, loss is 0.916936993598938\n", + "epoch: 39 step: 184, loss is 0.9059590101242065\n", + "epoch: 39 step: 185, loss is 0.8147585391998291\n", + "epoch: 39 step: 186, loss is 0.961179256439209\n", + "epoch: 39 step: 187, loss is 0.8523001670837402\n", + "epoch: 39 step: 188, loss is 0.9018723368644714\n", + "epoch: 39 step: 189, loss is 0.9216148853302002\n", + "epoch: 39 step: 190, loss is 0.9251117706298828\n", + "epoch: 39 step: 191, loss is 0.9180147647857666\n", + "epoch: 39 step: 192, loss is 0.873049795627594\n", + "epoch: 39 step: 193, loss is 0.8247263431549072\n", + "epoch: 39 step: 194, loss is 0.9581166505813599\n", + "epoch: 39 step: 195, loss is 0.9077434539794922\n", + "Train epoch time: 95368.601 ms, per step time: 489.070 ms\n", + "epoch: 40 step: 1, loss is 0.8762657642364502\n", + "epoch: 40 step: 2, loss is 0.8528900742530823\n", + "epoch: 40 step: 3, loss is 0.9076035022735596\n", + "epoch: 40 step: 4, loss is 0.8281925916671753\n", + "epoch: 40 step: 5, loss is 0.8681368827819824\n", + "epoch: 40 step: 6, loss is 0.8725665211677551\n", + "epoch: 40 step: 7, loss is 0.8653631806373596\n", + "epoch: 40 step: 8, loss is 0.8300122022628784\n", + "epoch: 40 step: 9, loss is 0.815752387046814\n", + "epoch: 40 step: 10, loss is 0.8997087478637695\n", + "epoch: 40 step: 11, loss is 0.8517664670944214\n", + "epoch: 40 step: 12, loss is 0.8694826364517212\n", + "epoch: 40 step: 13, loss is 0.8695793747901917\n", + "epoch: 40 step: 14, loss is 0.8437262177467346\n", + "epoch: 40 step: 15, loss is 0.881360650062561\n", + "epoch: 40 step: 16, loss is 0.9284908771514893\n", + "epoch: 40 step: 17, loss is 0.8351243734359741\n", + "epoch: 40 step: 18, loss is 0.8817468881607056\n", + "epoch: 40 step: 19, loss is 0.8645861148834229\n", + "epoch: 40 step: 20, loss is 0.8115131258964539\n", + "epoch: 40 step: 21, loss is 0.94126296043396\n", + "epoch: 40 step: 22, loss is 0.84563809633255\n", + "epoch: 40 step: 23, loss is 0.9478163719177246\n", + "epoch: 40 step: 24, loss is 0.8843199014663696\n", + "epoch: 40 step: 25, loss is 0.8832527995109558\n", + "epoch: 40 step: 26, loss is 0.8674059510231018\n", + "epoch: 40 step: 27, loss is 0.8125513792037964\n", + "epoch: 40 step: 28, loss is 0.8911529779434204\n", + "epoch: 40 step: 29, loss is 0.8880247473716736\n", + "epoch: 40 step: 30, loss is 0.8762494325637817\n", + "epoch: 40 step: 31, loss is 0.8213388919830322\n", + "epoch: 40 step: 32, loss is 0.8662538528442383\n", + "epoch: 40 step: 33, loss is 0.8650593757629395\n", + "epoch: 40 step: 34, loss is 0.8765037655830383\n", + "epoch: 40 step: 35, loss is 0.8995137214660645\n", + "epoch: 40 step: 36, loss is 0.8687782287597656\n", + "epoch: 40 step: 37, loss is 0.9255151748657227\n", + "epoch: 40 step: 38, loss is 0.8546656370162964\n", + "epoch: 40 step: 39, loss is 0.8765796422958374\n", + "epoch: 40 step: 40, loss is 0.8323838114738464\n", + "epoch: 40 step: 41, loss is 0.8901685476303101\n", + "epoch: 40 step: 42, loss is 0.8344199061393738\n", + "epoch: 40 step: 43, loss is 0.8583341836929321\n", + "epoch: 40 step: 44, loss is 0.9051513671875\n", + "epoch: 40 step: 45, loss is 0.8372193574905396\n", + "epoch: 40 step: 46, loss is 0.8926254510879517\n", + "epoch: 40 step: 47, loss is 0.8856022357940674\n", + "epoch: 40 step: 48, loss is 0.9483100175857544\n", + "epoch: 40 step: 49, loss is 0.8533074259757996\n", + "epoch: 40 step: 50, loss is 0.8566700220108032\n", + "epoch: 40 step: 51, loss is 0.9350196123123169\n", + "epoch: 40 step: 52, loss is 0.8923542499542236\n", + "epoch: 40 step: 53, loss is 0.9363144040107727\n", + "epoch: 40 step: 54, loss is 0.8563419580459595\n", + "epoch: 40 step: 55, loss is 0.8496549725532532\n", + "epoch: 40 step: 56, loss is 0.8938441276550293\n", + "epoch: 40 step: 57, loss is 0.8260456323623657\n", + "epoch: 40 step: 58, loss is 0.9220339059829712\n", + "epoch: 40 step: 59, loss is 0.906122088432312\n", + "epoch: 40 step: 60, loss is 0.8865154385566711\n", + "epoch: 40 step: 61, loss is 0.9908982515335083\n", + "epoch: 40 step: 62, loss is 0.8852843046188354\n", + "epoch: 40 step: 63, loss is 0.934016227722168\n", + "epoch: 40 step: 64, loss is 0.8530588150024414\n", + "epoch: 40 step: 65, loss is 0.8716440796852112\n", + "epoch: 40 step: 66, loss is 0.927998423576355\n", + "epoch: 40 step: 67, loss is 0.8837061524391174\n", + "epoch: 40 step: 68, loss is 0.8415229916572571\n", + "epoch: 40 step: 69, loss is 0.891266942024231\n", + "epoch: 40 step: 70, loss is 0.9919365644454956\n", + "epoch: 40 step: 71, loss is 0.8262856006622314\n", + "epoch: 40 step: 72, loss is 0.8569415807723999\n", + "epoch: 40 step: 73, loss is 0.8836197257041931\n", + "epoch: 40 step: 74, loss is 0.8810811042785645\n", + "epoch: 40 step: 75, loss is 0.8880738019943237\n", + "epoch: 40 step: 76, loss is 0.8933544754981995\n", + "epoch: 40 step: 77, loss is 0.8301650285720825\n", + "epoch: 40 step: 78, loss is 0.9031656980514526\n", + "epoch: 40 step: 79, loss is 0.895923376083374\n", + "epoch: 40 step: 80, loss is 0.863783597946167\n", + "epoch: 40 step: 81, loss is 0.8808621764183044\n", + "epoch: 40 step: 82, loss is 0.8517656326293945\n", + "epoch: 40 step: 83, loss is 0.8789753913879395\n", + "epoch: 40 step: 84, loss is 0.9852768778800964\n", + "epoch: 40 step: 85, loss is 0.9087194800376892\n", + "epoch: 40 step: 86, loss is 0.8546143770217896\n", + "epoch: 40 step: 87, loss is 0.8822703957557678\n", + "epoch: 40 step: 88, loss is 0.8150819540023804\n", + "epoch: 40 step: 89, loss is 0.9080638885498047\n", + "epoch: 40 step: 90, loss is 0.8826425075531006\n", + "epoch: 40 step: 91, loss is 0.9026480913162231\n", + "epoch: 40 step: 92, loss is 0.9007980823516846\n", + "epoch: 40 step: 93, loss is 0.8165903091430664\n", + "epoch: 40 step: 94, loss is 0.8000932931900024\n", + "epoch: 40 step: 95, loss is 0.9155917167663574\n", + "epoch: 40 step: 96, loss is 0.898004412651062\n", + "epoch: 40 step: 97, loss is 0.8681968450546265\n", + "epoch: 40 step: 98, loss is 0.9234767556190491\n", + "epoch: 40 step: 99, loss is 0.9016731977462769\n", + "epoch: 40 step: 100, loss is 0.8975780606269836\n", + "epoch: 40 step: 101, loss is 0.8754329681396484\n", + "epoch: 40 step: 102, loss is 0.8476095199584961\n", + "epoch: 40 step: 103, loss is 0.8044964671134949\n", + "epoch: 40 step: 104, loss is 0.8619875907897949\n", + "epoch: 40 step: 105, loss is 0.8453196287155151\n", + "epoch: 40 step: 106, loss is 0.886151909828186\n", + "epoch: 40 step: 107, loss is 0.9384371638298035\n", + "epoch: 40 step: 108, loss is 0.874799907207489\n", + "epoch: 40 step: 109, loss is 0.8826981782913208\n", + "epoch: 40 step: 110, loss is 0.9271506071090698\n", + "epoch: 40 step: 111, loss is 0.8468139171600342\n", + "epoch: 40 step: 112, loss is 0.9657449126243591\n", + "epoch: 40 step: 113, loss is 0.8568753004074097\n", + "epoch: 40 step: 114, loss is 0.8244388699531555\n", + "epoch: 40 step: 115, loss is 0.973237156867981\n", + "epoch: 40 step: 116, loss is 0.8728870153427124\n", + "epoch: 40 step: 117, loss is 0.8786293268203735\n", + "epoch: 40 step: 118, loss is 0.9119448661804199\n", + "epoch: 40 step: 119, loss is 0.8571841716766357\n", + "epoch: 40 step: 120, loss is 0.9047677516937256\n", + "epoch: 40 step: 121, loss is 0.8940187692642212\n", + "epoch: 40 step: 122, loss is 0.8436989784240723\n", + "epoch: 40 step: 123, loss is 0.8536779880523682\n", + "epoch: 40 step: 124, loss is 0.8286898136138916\n", + "epoch: 40 step: 125, loss is 0.9042161703109741\n", + "epoch: 40 step: 126, loss is 0.87678462266922\n", + "epoch: 40 step: 127, loss is 0.8638030290603638\n", + "epoch: 40 step: 128, loss is 0.8999695777893066\n", + "epoch: 40 step: 129, loss is 0.9191534519195557\n", + "epoch: 40 step: 130, loss is 0.851913332939148\n", + "epoch: 40 step: 131, loss is 0.8223416805267334\n", + "epoch: 40 step: 132, loss is 0.9297193288803101\n", + "epoch: 40 step: 133, loss is 0.8339831233024597\n", + "epoch: 40 step: 134, loss is 0.7858599424362183\n", + "epoch: 40 step: 135, loss is 0.8889687061309814\n", + "epoch: 40 step: 136, loss is 0.8642129898071289\n", + "epoch: 40 step: 137, loss is 0.8607162833213806\n", + "epoch: 40 step: 138, loss is 0.8764044642448425\n", + "epoch: 40 step: 139, loss is 0.8892558813095093\n", + "epoch: 40 step: 140, loss is 0.8609368801116943\n", + "epoch: 40 step: 141, loss is 0.956720232963562\n", + "epoch: 40 step: 142, loss is 0.866104006767273\n", + "epoch: 40 step: 143, loss is 0.9686695337295532\n", + "epoch: 40 step: 144, loss is 0.8743617534637451\n", + "epoch: 40 step: 145, loss is 0.9050803184509277\n", + "epoch: 40 step: 146, loss is 0.8832632303237915\n", + "epoch: 40 step: 147, loss is 0.8458904027938843\n", + "epoch: 40 step: 148, loss is 0.8619515895843506\n", + "epoch: 40 step: 149, loss is 0.8613533973693848\n", + "epoch: 40 step: 150, loss is 0.8870968818664551\n", + "epoch: 40 step: 151, loss is 0.868848443031311\n", + "epoch: 40 step: 152, loss is 0.8231313824653625\n", + "epoch: 40 step: 153, loss is 0.8456589579582214\n", + "epoch: 40 step: 154, loss is 0.865229070186615\n", + "epoch: 40 step: 155, loss is 0.884575605392456\n", + "epoch: 40 step: 156, loss is 0.8572949767112732\n", + "epoch: 40 step: 157, loss is 0.8385735750198364\n", + "epoch: 40 step: 158, loss is 0.9785712957382202\n", + "epoch: 40 step: 159, loss is 0.8695060610771179\n", + "epoch: 40 step: 160, loss is 0.8963016867637634\n", + "epoch: 40 step: 161, loss is 0.8764256238937378\n", + "epoch: 40 step: 162, loss is 0.9001378417015076\n", + "epoch: 40 step: 163, loss is 0.8308455944061279\n", + "epoch: 40 step: 164, loss is 0.9265251159667969\n", + "epoch: 40 step: 165, loss is 0.8292546272277832\n", + "epoch: 40 step: 166, loss is 0.9521987438201904\n", + "epoch: 40 step: 167, loss is 0.8878471255302429\n", + "epoch: 40 step: 168, loss is 0.8525089025497437\n", + "epoch: 40 step: 169, loss is 0.867385745048523\n", + "epoch: 40 step: 170, loss is 0.8809480667114258\n", + "epoch: 40 step: 171, loss is 0.8697935938835144\n", + "epoch: 40 step: 172, loss is 0.9231935739517212\n", + "epoch: 40 step: 173, loss is 0.8755526542663574\n", + "epoch: 40 step: 174, loss is 0.9128471612930298\n", + "epoch: 40 step: 175, loss is 0.8693337440490723\n", + "epoch: 40 step: 176, loss is 0.9123734831809998\n", + "epoch: 40 step: 177, loss is 0.842204749584198\n", + "epoch: 40 step: 178, loss is 0.9479992389678955\n", + "epoch: 40 step: 179, loss is 0.8979036808013916\n", + "epoch: 40 step: 180, loss is 0.9802259206771851\n", + "epoch: 40 step: 181, loss is 0.8872257471084595\n", + "epoch: 40 step: 182, loss is 0.9265717267990112\n", + "epoch: 40 step: 183, loss is 0.8786666989326477\n", + "epoch: 40 step: 184, loss is 0.878097414970398\n", + "epoch: 40 step: 185, loss is 0.8596781492233276\n", + "epoch: 40 step: 186, loss is 0.8781620860099792\n", + "epoch: 40 step: 187, loss is 0.889441728591919\n", + "epoch: 40 step: 188, loss is 0.8479162454605103\n", + "epoch: 40 step: 189, loss is 0.8856616020202637\n", + "epoch: 40 step: 190, loss is 1.0099284648895264\n", + "epoch: 40 step: 191, loss is 0.8989887237548828\n", + "epoch: 40 step: 192, loss is 0.8858019113540649\n", + "epoch: 40 step: 193, loss is 0.8687416315078735\n", + "epoch: 40 step: 194, loss is 0.8946763277053833\n", + "epoch: 40 step: 195, loss is 0.8336684107780457\n", + "Train epoch time: 94447.356 ms, per step time: 484.345 ms\n", + "epoch: 41 step: 1, loss is 0.8517359495162964\n", + "epoch: 41 step: 2, loss is 0.8080976009368896\n", + "epoch: 41 step: 3, loss is 0.8793535828590393\n", + "epoch: 41 step: 4, loss is 0.8864065408706665\n", + "epoch: 41 step: 5, loss is 0.877299427986145\n", + "epoch: 41 step: 6, loss is 0.8743071556091309\n", + "epoch: 41 step: 7, loss is 0.9047248363494873\n", + "epoch: 41 step: 8, loss is 0.8313262462615967\n", + "epoch: 41 step: 9, loss is 0.8923026323318481\n", + "epoch: 41 step: 10, loss is 0.8955068588256836\n", + "epoch: 41 step: 11, loss is 0.9239131212234497\n", + "epoch: 41 step: 12, loss is 0.8746803402900696\n", + "epoch: 41 step: 13, loss is 0.8128442764282227\n", + "epoch: 41 step: 14, loss is 0.9087767601013184\n", + "epoch: 41 step: 15, loss is 0.8695989847183228\n", + "epoch: 41 step: 16, loss is 0.8692599534988403\n", + "epoch: 41 step: 17, loss is 0.8715766668319702\n", + "epoch: 41 step: 18, loss is 0.9362809062004089\n", + "epoch: 41 step: 19, loss is 0.9015077352523804\n", + "epoch: 41 step: 20, loss is 0.8027410507202148\n", + "epoch: 41 step: 21, loss is 0.8322054147720337\n", + "epoch: 41 step: 22, loss is 0.85272616147995\n", + "epoch: 41 step: 23, loss is 0.9068624973297119\n", + "epoch: 41 step: 24, loss is 0.8635039329528809\n", + "epoch: 41 step: 25, loss is 0.9360330104827881\n", + "epoch: 41 step: 26, loss is 0.8912704586982727\n", + "epoch: 41 step: 27, loss is 0.8545154333114624\n", + "epoch: 41 step: 28, loss is 0.911385178565979\n", + "epoch: 41 step: 29, loss is 0.834345817565918\n", + "epoch: 41 step: 30, loss is 0.8894293904304504\n", + "epoch: 41 step: 31, loss is 0.8599039316177368\n", + "epoch: 41 step: 32, loss is 0.8889623284339905\n", + "epoch: 41 step: 33, loss is 0.8047513961791992\n", + "epoch: 41 step: 34, loss is 0.8444423675537109\n", + "epoch: 41 step: 35, loss is 0.9237991571426392\n", + "epoch: 41 step: 36, loss is 0.8201794624328613\n", + "epoch: 41 step: 37, loss is 0.8618304133415222\n", + "epoch: 41 step: 38, loss is 0.8905352354049683\n", + "epoch: 41 step: 39, loss is 0.8658605217933655\n", + "epoch: 41 step: 40, loss is 0.8848867416381836\n", + "epoch: 41 step: 41, loss is 0.9289533495903015\n", + "epoch: 41 step: 42, loss is 0.8657922744750977\n", + "epoch: 41 step: 43, loss is 0.900287926197052\n", + "epoch: 41 step: 44, loss is 0.928754448890686\n", + "epoch: 41 step: 45, loss is 0.8403611779212952\n", + "epoch: 41 step: 46, loss is 0.8824472427368164\n", + "epoch: 41 step: 47, loss is 0.8668776750564575\n", + "epoch: 41 step: 48, loss is 0.8697275519371033\n", + "epoch: 41 step: 49, loss is 0.8535885810852051\n", + "epoch: 41 step: 50, loss is 0.8463236689567566\n", + "epoch: 41 step: 51, loss is 0.8463488221168518\n", + "epoch: 41 step: 52, loss is 0.8882981538772583\n", + "epoch: 41 step: 53, loss is 0.851629376411438\n", + "epoch: 41 step: 54, loss is 0.8265089988708496\n", + "epoch: 41 step: 55, loss is 0.8589456081390381\n", + "epoch: 41 step: 56, loss is 0.8276338577270508\n", + "epoch: 41 step: 57, loss is 0.8031258583068848\n", + "epoch: 41 step: 58, loss is 0.8447311520576477\n", + "epoch: 41 step: 59, loss is 0.8866144418716431\n", + "epoch: 41 step: 60, loss is 0.8436450958251953\n", + "epoch: 41 step: 61, loss is 0.9262243509292603\n", + "epoch: 41 step: 62, loss is 0.8487493991851807\n", + "epoch: 41 step: 63, loss is 0.8932136297225952\n", + "epoch: 41 step: 64, loss is 0.8420684337615967\n", + "epoch: 41 step: 65, loss is 0.8758710622787476\n", + "epoch: 41 step: 66, loss is 0.903826117515564\n", + "epoch: 41 step: 67, loss is 0.8111169338226318\n", + "epoch: 41 step: 68, loss is 0.8767874836921692\n", + "epoch: 41 step: 69, loss is 0.8798336982727051\n", + "epoch: 41 step: 70, loss is 0.8901389837265015\n", + "epoch: 41 step: 71, loss is 0.8268228769302368\n", + "epoch: 41 step: 72, loss is 0.865288496017456\n", + "epoch: 41 step: 73, loss is 0.8880175352096558\n", + "epoch: 41 step: 74, loss is 0.8671189546585083\n", + "epoch: 41 step: 75, loss is 0.9167948365211487\n", + "epoch: 41 step: 76, loss is 0.8449299335479736\n", + "epoch: 41 step: 77, loss is 0.9041973352432251\n", + "epoch: 41 step: 78, loss is 0.8705042004585266\n", + "epoch: 41 step: 79, loss is 0.89136803150177\n", + "epoch: 41 step: 80, loss is 0.8239651918411255\n", + "epoch: 41 step: 81, loss is 0.8462495803833008\n", + "epoch: 41 step: 82, loss is 0.8610972762107849\n", + "epoch: 41 step: 83, loss is 0.8784012794494629\n", + "epoch: 41 step: 84, loss is 0.8569244146347046\n", + "epoch: 41 step: 85, loss is 0.963731586933136\n", + "epoch: 41 step: 86, loss is 0.8318753838539124\n", + "epoch: 41 step: 87, loss is 0.8615751266479492\n", + "epoch: 41 step: 88, loss is 0.8710677623748779\n", + "epoch: 41 step: 89, loss is 0.887948751449585\n", + "epoch: 41 step: 90, loss is 0.861817479133606\n", + "epoch: 41 step: 91, loss is 0.8919062614440918\n", + "epoch: 41 step: 92, loss is 0.8847672939300537\n", + "epoch: 41 step: 93, loss is 0.8858005404472351\n", + "epoch: 41 step: 94, loss is 0.9096128940582275\n", + "epoch: 41 step: 95, loss is 0.8973648548126221\n", + "epoch: 41 step: 96, loss is 0.891867995262146\n", + "epoch: 41 step: 97, loss is 0.9316425919532776\n", + "epoch: 41 step: 98, loss is 0.8648695945739746\n", + "epoch: 41 step: 99, loss is 0.8466728329658508\n", + "epoch: 41 step: 100, loss is 0.8825789093971252\n", + "epoch: 41 step: 101, loss is 0.901749849319458\n", + "epoch: 41 step: 102, loss is 0.8663471341133118\n", + "epoch: 41 step: 103, loss is 0.8133040070533752\n", + "epoch: 41 step: 104, loss is 0.8640146255493164\n", + "epoch: 41 step: 105, loss is 0.88739013671875\n", + "epoch: 41 step: 106, loss is 0.9420163631439209\n", + "epoch: 41 step: 107, loss is 0.8612029552459717\n", + "epoch: 41 step: 108, loss is 0.8988096117973328\n", + "epoch: 41 step: 109, loss is 0.876319169998169\n", + "epoch: 41 step: 110, loss is 0.8912628293037415\n", + "epoch: 41 step: 111, loss is 0.8928229808807373\n", + "epoch: 41 step: 112, loss is 0.8356031179428101\n", + "epoch: 41 step: 113, loss is 0.8453516960144043\n", + "epoch: 41 step: 114, loss is 0.8754433393478394\n", + "epoch: 41 step: 115, loss is 0.875381350517273\n", + "epoch: 41 step: 116, loss is 0.8881886005401611\n", + "epoch: 41 step: 117, loss is 0.8499939441680908\n", + "epoch: 41 step: 118, loss is 0.8984473943710327\n", + "epoch: 41 step: 119, loss is 0.8221191763877869\n", + "epoch: 41 step: 120, loss is 0.8957082033157349\n", + "epoch: 41 step: 121, loss is 0.9271697998046875\n", + "epoch: 41 step: 122, loss is 0.8258509039878845\n", + "epoch: 41 step: 123, loss is 0.9593144655227661\n", + "epoch: 41 step: 124, loss is 0.83920818567276\n", + "epoch: 41 step: 125, loss is 0.8967593908309937\n", + "epoch: 41 step: 126, loss is 0.8466830253601074\n", + "epoch: 41 step: 127, loss is 0.9369308948516846\n", + "epoch: 41 step: 128, loss is 0.9431707859039307\n", + "epoch: 41 step: 129, loss is 0.8646366000175476\n", + "epoch: 41 step: 130, loss is 0.8328231573104858\n", + "epoch: 41 step: 131, loss is 0.8951641321182251\n", + "epoch: 41 step: 132, loss is 0.945981502532959\n", + "epoch: 41 step: 133, loss is 0.8417348861694336\n", + "epoch: 41 step: 134, loss is 0.865910530090332\n", + "epoch: 41 step: 135, loss is 0.86012864112854\n", + "epoch: 41 step: 136, loss is 0.9279564619064331\n", + "epoch: 41 step: 137, loss is 0.8614240884780884\n", + "epoch: 41 step: 138, loss is 0.891785740852356\n", + "epoch: 41 step: 139, loss is 0.9056267738342285\n", + "epoch: 41 step: 140, loss is 0.9354451894760132\n", + "epoch: 41 step: 141, loss is 0.837008535861969\n", + "epoch: 41 step: 142, loss is 0.9245694279670715\n", + "epoch: 41 step: 143, loss is 0.8966096639633179\n", + "epoch: 41 step: 144, loss is 0.9113236665725708\n", + "epoch: 41 step: 145, loss is 0.8865253925323486\n", + "epoch: 41 step: 146, loss is 0.8090176582336426\n", + "epoch: 41 step: 147, loss is 0.8295114636421204\n", + "epoch: 41 step: 148, loss is 0.8849141597747803\n", + "epoch: 41 step: 149, loss is 0.8674306273460388\n", + "epoch: 41 step: 150, loss is 0.9035176038742065\n", + "epoch: 41 step: 151, loss is 0.8824423551559448\n", + "epoch: 41 step: 152, loss is 0.9301256537437439\n", + "epoch: 41 step: 153, loss is 0.8845117688179016\n", + "epoch: 41 step: 154, loss is 0.90666264295578\n", + "epoch: 41 step: 155, loss is 0.915145993232727\n", + "epoch: 41 step: 156, loss is 0.8938807249069214\n", + "epoch: 41 step: 157, loss is 0.8312286138534546\n", + "epoch: 41 step: 158, loss is 0.8402537107467651\n", + "epoch: 41 step: 159, loss is 0.8631662130355835\n", + "epoch: 41 step: 160, loss is 0.8870527744293213\n", + "epoch: 41 step: 161, loss is 0.839425802230835\n", + "epoch: 41 step: 162, loss is 0.8688831329345703\n", + "epoch: 41 step: 163, loss is 0.837965190410614\n", + "epoch: 41 step: 164, loss is 0.8268542289733887\n", + "epoch: 41 step: 165, loss is 0.8079074621200562\n", + "epoch: 41 step: 166, loss is 0.8216615319252014\n", + "epoch: 41 step: 167, loss is 0.8261913061141968\n", + "epoch: 41 step: 168, loss is 0.8139563202857971\n", + "epoch: 41 step: 169, loss is 0.8962264060974121\n", + "epoch: 41 step: 170, loss is 0.9227042198181152\n", + "epoch: 41 step: 171, loss is 0.8258051872253418\n", + "epoch: 41 step: 172, loss is 0.872225821018219\n", + "epoch: 41 step: 173, loss is 0.8454023599624634\n", + "epoch: 41 step: 174, loss is 0.8294726610183716\n", + "epoch: 41 step: 175, loss is 0.9189668893814087\n", + "epoch: 41 step: 176, loss is 0.8586648106575012\n", + "epoch: 41 step: 177, loss is 0.9269784092903137\n", + "epoch: 41 step: 178, loss is 0.9404736757278442\n", + "epoch: 41 step: 179, loss is 0.805738627910614\n", + "epoch: 41 step: 180, loss is 0.8188043832778931\n", + "epoch: 41 step: 181, loss is 0.8407191038131714\n", + "epoch: 41 step: 182, loss is 0.8638138175010681\n", + "epoch: 41 step: 183, loss is 0.8986101746559143\n", + "epoch: 41 step: 184, loss is 0.9065485000610352\n", + "epoch: 41 step: 185, loss is 0.9035854339599609\n", + "epoch: 41 step: 186, loss is 0.8803765773773193\n", + "epoch: 41 step: 187, loss is 0.883901059627533\n", + "epoch: 41 step: 188, loss is 0.8889013528823853\n", + "epoch: 41 step: 189, loss is 0.8502857685089111\n", + "epoch: 41 step: 190, loss is 0.911389946937561\n", + "epoch: 41 step: 191, loss is 0.8688859939575195\n", + "epoch: 41 step: 192, loss is 0.8833852410316467\n", + "epoch: 41 step: 193, loss is 0.8949699997901917\n", + "epoch: 41 step: 194, loss is 0.8810214400291443\n", + "epoch: 41 step: 195, loss is 0.887438178062439\n", + "Train epoch time: 96078.753 ms, per step time: 492.712 ms\n", + "epoch: 42 step: 1, loss is 0.8216084241867065\n", + "epoch: 42 step: 2, loss is 0.8851800560951233\n", + "epoch: 42 step: 3, loss is 0.939353883266449\n", + "epoch: 42 step: 4, loss is 0.8789463043212891\n", + "epoch: 42 step: 5, loss is 0.9164166450500488\n", + "epoch: 42 step: 6, loss is 0.8502922058105469\n", + "epoch: 42 step: 7, loss is 0.8954850435256958\n", + "epoch: 42 step: 8, loss is 0.9129345417022705\n", + "epoch: 42 step: 9, loss is 0.8445621728897095\n", + "epoch: 42 step: 10, loss is 0.9198331236839294\n", + "epoch: 42 step: 11, loss is 0.8730967044830322\n", + "epoch: 42 step: 12, loss is 0.8530271053314209\n", + "epoch: 42 step: 13, loss is 0.8152531385421753\n", + "epoch: 42 step: 14, loss is 0.8442375659942627\n", + "epoch: 42 step: 15, loss is 0.8057538270950317\n", + "epoch: 42 step: 16, loss is 0.7697881460189819\n", + "epoch: 42 step: 17, loss is 0.8635202646255493\n", + "epoch: 42 step: 18, loss is 0.8407617807388306\n", + "epoch: 42 step: 19, loss is 0.8363590836524963\n", + "epoch: 42 step: 20, loss is 0.8269721269607544\n", + "epoch: 42 step: 21, loss is 0.8542466163635254\n", + "epoch: 42 step: 22, loss is 0.9020578861236572\n", + "epoch: 42 step: 23, loss is 0.8828875422477722\n", + "epoch: 42 step: 24, loss is 0.8343596458435059\n", + "epoch: 42 step: 25, loss is 0.829161524772644\n", + "epoch: 42 step: 26, loss is 0.8597906231880188\n", + "epoch: 42 step: 27, loss is 0.8849978446960449\n", + "epoch: 42 step: 28, loss is 0.7954426407814026\n", + "epoch: 42 step: 29, loss is 0.8402162790298462\n", + "epoch: 42 step: 30, loss is 0.8092691898345947\n", + "epoch: 42 step: 31, loss is 0.8710509538650513\n", + "epoch: 42 step: 32, loss is 0.8425696492195129\n", + "epoch: 42 step: 33, loss is 0.8501846194267273\n", + "epoch: 42 step: 34, loss is 0.9175065755844116\n", + "epoch: 42 step: 35, loss is 0.8520911931991577\n", + "epoch: 42 step: 36, loss is 0.8759634494781494\n", + "epoch: 42 step: 37, loss is 0.8666084408760071\n", + "epoch: 42 step: 38, loss is 0.8272801637649536\n", + "epoch: 42 step: 39, loss is 0.8493403196334839\n", + "epoch: 42 step: 40, loss is 0.8225031495094299\n", + "epoch: 42 step: 41, loss is 0.8162524700164795\n", + "epoch: 42 step: 42, loss is 0.8676652908325195\n", + "epoch: 42 step: 43, loss is 0.8029389381408691\n", + "epoch: 42 step: 44, loss is 0.872467041015625\n", + "epoch: 42 step: 45, loss is 0.8391228318214417\n", + "epoch: 42 step: 46, loss is 0.8044931888580322\n", + "epoch: 42 step: 47, loss is 0.8370736837387085\n", + "epoch: 42 step: 48, loss is 0.825875997543335\n", + "epoch: 42 step: 49, loss is 0.8381882905960083\n", + "epoch: 42 step: 50, loss is 0.8414760828018188\n", + "epoch: 42 step: 51, loss is 0.8481352925300598\n", + "epoch: 42 step: 52, loss is 0.8879495859146118\n", + "epoch: 42 step: 53, loss is 0.8610578179359436\n", + "epoch: 42 step: 54, loss is 0.8660099506378174\n", + "epoch: 42 step: 55, loss is 0.8396362066268921\n", + "epoch: 42 step: 56, loss is 0.8267471194267273\n", + "epoch: 42 step: 57, loss is 0.8610799908638\n", + "epoch: 42 step: 58, loss is 0.8493867516517639\n", + "epoch: 42 step: 59, loss is 0.864638090133667\n", + "epoch: 42 step: 60, loss is 0.7882175445556641\n", + "epoch: 42 step: 61, loss is 0.8376932740211487\n", + "epoch: 42 step: 62, loss is 0.8835535049438477\n", + "epoch: 42 step: 63, loss is 0.8648427128791809\n", + "epoch: 42 step: 64, loss is 0.8461316823959351\n", + "epoch: 42 step: 65, loss is 0.866771399974823\n", + "epoch: 42 step: 66, loss is 0.8197053670883179\n", + "epoch: 42 step: 67, loss is 0.8638685941696167\n", + "epoch: 42 step: 68, loss is 0.8417701721191406\n", + "epoch: 42 step: 69, loss is 0.9263538122177124\n", + "epoch: 42 step: 70, loss is 0.8258200883865356\n", + "epoch: 42 step: 71, loss is 0.883317232131958\n", + "epoch: 42 step: 72, loss is 0.8102097511291504\n", + "epoch: 42 step: 73, loss is 0.8757784366607666\n", + "epoch: 42 step: 74, loss is 0.8560203313827515\n", + "epoch: 42 step: 75, loss is 0.8573493957519531\n", + "epoch: 42 step: 76, loss is 0.878629207611084\n", + "epoch: 42 step: 77, loss is 0.8772190809249878\n", + "epoch: 42 step: 78, loss is 0.8488637208938599\n", + "epoch: 42 step: 79, loss is 0.8513084650039673\n", + "epoch: 42 step: 80, loss is 0.8866593837738037\n", + "epoch: 42 step: 81, loss is 0.8593792915344238\n", + "epoch: 42 step: 82, loss is 0.8569726347923279\n", + "epoch: 42 step: 83, loss is 0.8663491606712341\n", + "epoch: 42 step: 84, loss is 0.8355486392974854\n", + "epoch: 42 step: 85, loss is 0.825196385383606\n", + "epoch: 42 step: 86, loss is 0.8317797780036926\n", + "epoch: 42 step: 87, loss is 0.9115496277809143\n", + "epoch: 42 step: 88, loss is 0.8662617206573486\n", + "epoch: 42 step: 89, loss is 0.8663574457168579\n", + "epoch: 42 step: 90, loss is 0.8343465924263\n", + "epoch: 42 step: 91, loss is 0.8432985544204712\n", + "epoch: 42 step: 92, loss is 0.8476569652557373\n", + "epoch: 42 step: 93, loss is 0.8467200994491577\n", + "epoch: 42 step: 94, loss is 0.8419898748397827\n", + "epoch: 42 step: 95, loss is 0.8535125255584717\n", + "epoch: 42 step: 96, loss is 0.8854618072509766\n", + "epoch: 42 step: 97, loss is 0.8745183348655701\n", + "epoch: 42 step: 98, loss is 0.8518193960189819\n", + "epoch: 42 step: 99, loss is 0.8538920283317566\n", + "epoch: 42 step: 100, loss is 0.8082880973815918\n", + "epoch: 42 step: 101, loss is 0.8610857725143433\n", + "epoch: 42 step: 102, loss is 0.8797216415405273\n", + "epoch: 42 step: 103, loss is 0.8502271175384521\n", + "epoch: 42 step: 104, loss is 0.8515475392341614\n", + "epoch: 42 step: 105, loss is 0.8487423658370972\n", + "epoch: 42 step: 106, loss is 0.824591875076294\n", + "epoch: 42 step: 107, loss is 0.9288796186447144\n", + "epoch: 42 step: 108, loss is 0.9042727947235107\n", + "epoch: 42 step: 109, loss is 0.8609855771064758\n", + "epoch: 42 step: 110, loss is 0.8046507239341736\n", + "epoch: 42 step: 111, loss is 0.8284628391265869\n", + "epoch: 42 step: 112, loss is 0.8430736064910889\n", + "epoch: 42 step: 113, loss is 0.8333324193954468\n", + "epoch: 42 step: 114, loss is 0.8608826994895935\n", + "epoch: 42 step: 115, loss is 0.8663084506988525\n", + "epoch: 42 step: 116, loss is 0.8180195093154907\n", + "epoch: 42 step: 117, loss is 0.8854679465293884\n", + "epoch: 42 step: 118, loss is 0.8538902997970581\n", + "epoch: 42 step: 119, loss is 0.8129979968070984\n", + "epoch: 42 step: 120, loss is 0.8247725963592529\n", + "epoch: 42 step: 121, loss is 0.8576528429985046\n", + "epoch: 42 step: 122, loss is 0.9010546207427979\n", + "epoch: 42 step: 123, loss is 0.8564931154251099\n", + "epoch: 42 step: 124, loss is 0.8643726706504822\n", + "epoch: 42 step: 125, loss is 0.9982171654701233\n", + "epoch: 42 step: 126, loss is 0.8461268544197083\n", + "epoch: 42 step: 127, loss is 0.9366732835769653\n", + "epoch: 42 step: 128, loss is 0.8913699984550476\n", + "epoch: 42 step: 129, loss is 0.8656001687049866\n", + "epoch: 42 step: 130, loss is 0.8531943559646606\n", + "epoch: 42 step: 131, loss is 0.8707911372184753\n", + "epoch: 42 step: 132, loss is 0.8193789720535278\n", + "epoch: 42 step: 133, loss is 0.8503655195236206\n", + "epoch: 42 step: 134, loss is 0.8331116437911987\n", + "epoch: 42 step: 135, loss is 0.838228702545166\n", + "epoch: 42 step: 136, loss is 0.9074716567993164\n", + "epoch: 42 step: 137, loss is 0.8317863941192627\n", + "epoch: 42 step: 138, loss is 0.8884108066558838\n", + "epoch: 42 step: 139, loss is 0.8816577196121216\n", + "epoch: 42 step: 140, loss is 0.8344957828521729\n", + "epoch: 42 step: 141, loss is 0.8505727052688599\n", + "epoch: 42 step: 142, loss is 0.8733222484588623\n", + "epoch: 42 step: 143, loss is 0.8477810621261597\n", + "epoch: 42 step: 144, loss is 0.8274283409118652\n", + "epoch: 42 step: 145, loss is 0.8539963960647583\n", + "epoch: 42 step: 146, loss is 0.879974365234375\n", + "epoch: 42 step: 147, loss is 0.8467274308204651\n", + "epoch: 42 step: 148, loss is 0.8062554597854614\n", + "epoch: 42 step: 149, loss is 0.8943252563476562\n", + "epoch: 42 step: 150, loss is 0.909824013710022\n", + "epoch: 42 step: 151, loss is 0.8860570788383484\n", + "epoch: 42 step: 152, loss is 0.9440250396728516\n", + "epoch: 42 step: 153, loss is 0.8444277048110962\n", + "epoch: 42 step: 154, loss is 0.8705278038978577\n", + "epoch: 42 step: 155, loss is 0.8044403195381165\n", + "epoch: 42 step: 156, loss is 0.8030242919921875\n", + "epoch: 42 step: 157, loss is 0.8543550968170166\n", + "epoch: 42 step: 158, loss is 0.8043542504310608\n", + "epoch: 42 step: 159, loss is 0.8615754842758179\n", + "epoch: 42 step: 160, loss is 0.951087474822998\n", + "epoch: 42 step: 161, loss is 0.8362158536911011\n", + "epoch: 42 step: 162, loss is 0.9387021064758301\n", + "epoch: 42 step: 163, loss is 0.8275455236434937\n", + "epoch: 42 step: 164, loss is 0.8950150609016418\n", + "epoch: 42 step: 165, loss is 0.8120851516723633\n", + "epoch: 42 step: 166, loss is 0.9091084599494934\n", + "epoch: 42 step: 167, loss is 0.8625392913818359\n", + "epoch: 42 step: 168, loss is 0.790265679359436\n", + "epoch: 42 step: 169, loss is 0.8625034093856812\n", + "epoch: 42 step: 170, loss is 0.8300389051437378\n", + "epoch: 42 step: 171, loss is 0.8501790761947632\n", + "epoch: 42 step: 172, loss is 0.8944054841995239\n", + "epoch: 42 step: 173, loss is 0.8763883113861084\n", + "epoch: 42 step: 174, loss is 0.8379045724868774\n", + "epoch: 42 step: 175, loss is 0.7944656610488892\n", + "epoch: 42 step: 176, loss is 0.8225041627883911\n", + "epoch: 42 step: 177, loss is 0.8282984495162964\n", + "epoch: 42 step: 178, loss is 0.8995361328125\n", + "epoch: 42 step: 179, loss is 0.92469722032547\n", + "epoch: 42 step: 180, loss is 0.8554854989051819\n", + "epoch: 42 step: 181, loss is 0.8176083564758301\n", + "epoch: 42 step: 182, loss is 0.844767689704895\n", + "epoch: 42 step: 183, loss is 0.854779839515686\n", + "epoch: 42 step: 184, loss is 0.9090795516967773\n", + "epoch: 42 step: 185, loss is 0.8947147130966187\n", + "epoch: 42 step: 186, loss is 0.8911213874816895\n", + "epoch: 42 step: 187, loss is 0.8383690118789673\n", + "epoch: 42 step: 188, loss is 0.8429081439971924\n", + "epoch: 42 step: 189, loss is 0.884172797203064\n", + "epoch: 42 step: 190, loss is 0.8648539781570435\n", + "epoch: 42 step: 191, loss is 0.8368713855743408\n", + "epoch: 42 step: 192, loss is 0.8525528907775879\n", + "epoch: 42 step: 193, loss is 0.8659540414810181\n", + "epoch: 42 step: 194, loss is 0.8755007982254028\n", + "epoch: 42 step: 195, loss is 0.878858208656311\n", + "Train epoch time: 97147.911 ms, per step time: 498.194 ms\n", + "epoch: 43 step: 1, loss is 0.8105705976486206\n", + "epoch: 43 step: 2, loss is 0.8752914667129517\n", + "epoch: 43 step: 3, loss is 0.8160079717636108\n", + "epoch: 43 step: 4, loss is 0.7951188087463379\n", + "epoch: 43 step: 5, loss is 0.8115599155426025\n", + "epoch: 43 step: 6, loss is 0.8426706790924072\n", + "epoch: 43 step: 7, loss is 0.7998368144035339\n", + "epoch: 43 step: 8, loss is 0.8275105953216553\n", + "epoch: 43 step: 9, loss is 0.8804247379302979\n", + "epoch: 43 step: 10, loss is 0.8301496505737305\n", + "epoch: 43 step: 11, loss is 0.7793256044387817\n", + "epoch: 43 step: 12, loss is 0.8705196380615234\n", + "epoch: 43 step: 13, loss is 0.7956662178039551\n", + "epoch: 43 step: 14, loss is 0.8392859697341919\n", + "epoch: 43 step: 15, loss is 0.7850797176361084\n", + "epoch: 43 step: 16, loss is 0.8064440488815308\n", + "epoch: 43 step: 17, loss is 0.8386631608009338\n", + "epoch: 43 step: 18, loss is 0.8404859900474548\n", + "epoch: 43 step: 19, loss is 0.7927724123001099\n", + "epoch: 43 step: 20, loss is 0.8582428693771362\n", + "epoch: 43 step: 21, loss is 0.8209526538848877\n", + "epoch: 43 step: 22, loss is 0.8372832536697388\n", + "epoch: 43 step: 23, loss is 0.8717249631881714\n", + "epoch: 43 step: 24, loss is 0.8424895405769348\n", + "epoch: 43 step: 25, loss is 0.8230202198028564\n", + "epoch: 43 step: 26, loss is 0.8527168035507202\n", + "epoch: 43 step: 27, loss is 0.8230588436126709\n", + "epoch: 43 step: 28, loss is 0.8735199570655823\n", + "epoch: 43 step: 29, loss is 0.8566274642944336\n", + "epoch: 43 step: 30, loss is 0.8375533819198608\n", + "epoch: 43 step: 31, loss is 0.868703305721283\n", + "epoch: 43 step: 32, loss is 0.8222142457962036\n", + "epoch: 43 step: 33, loss is 0.7958176732063293\n", + "epoch: 43 step: 34, loss is 0.854280412197113\n", + "epoch: 43 step: 35, loss is 0.8644938468933105\n", + "epoch: 43 step: 36, loss is 0.8599567413330078\n", + "epoch: 43 step: 37, loss is 0.8192334175109863\n", + "epoch: 43 step: 38, loss is 0.8576755523681641\n", + "epoch: 43 step: 39, loss is 0.8442862629890442\n", + "epoch: 43 step: 40, loss is 0.8411554098129272\n", + "epoch: 43 step: 41, loss is 0.8349367380142212\n", + "epoch: 43 step: 42, loss is 0.842859148979187\n", + "epoch: 43 step: 43, loss is 0.7847148180007935\n", + "epoch: 43 step: 44, loss is 0.8628517389297485\n", + "epoch: 43 step: 45, loss is 0.8425265550613403\n", + "epoch: 43 step: 46, loss is 0.8412359952926636\n", + "epoch: 43 step: 47, loss is 0.8690286874771118\n", + "epoch: 43 step: 48, loss is 0.7699791193008423\n", + "epoch: 43 step: 49, loss is 0.8404026031494141\n", + "epoch: 43 step: 50, loss is 0.8487992286682129\n", + "epoch: 43 step: 51, loss is 0.8275365829467773\n", + "epoch: 43 step: 52, loss is 0.8241515159606934\n", + "epoch: 43 step: 53, loss is 0.8252999782562256\n", + "epoch: 43 step: 54, loss is 0.8086087107658386\n", + "epoch: 43 step: 55, loss is 0.8576048016548157\n", + "epoch: 43 step: 56, loss is 0.8575851917266846\n", + "epoch: 43 step: 57, loss is 0.8424643278121948\n", + "epoch: 43 step: 58, loss is 0.8858017921447754\n", + "epoch: 43 step: 59, loss is 0.8196586966514587\n", + "epoch: 43 step: 60, loss is 0.8263652324676514\n", + "epoch: 43 step: 61, loss is 0.8696781992912292\n", + "epoch: 43 step: 62, loss is 0.8830323815345764\n", + "epoch: 43 step: 63, loss is 0.8203916549682617\n", + "epoch: 43 step: 64, loss is 0.8542393445968628\n", + "epoch: 43 step: 65, loss is 0.863400936126709\n", + "epoch: 43 step: 66, loss is 0.8617480993270874\n", + "epoch: 43 step: 67, loss is 0.8650208711624146\n", + "epoch: 43 step: 68, loss is 0.7973204851150513\n", + "epoch: 43 step: 69, loss is 0.8061485290527344\n", + "epoch: 43 step: 70, loss is 0.9292039275169373\n", + "epoch: 43 step: 71, loss is 0.8306252956390381\n", + "epoch: 43 step: 72, loss is 0.863418459892273\n", + "epoch: 43 step: 73, loss is 0.8469650745391846\n", + "epoch: 43 step: 74, loss is 0.8443623781204224\n", + "epoch: 43 step: 75, loss is 0.8565720915794373\n", + "epoch: 43 step: 76, loss is 0.8196828365325928\n", + "epoch: 43 step: 77, loss is 0.8471055030822754\n", + "epoch: 43 step: 78, loss is 0.831911563873291\n", + "epoch: 43 step: 79, loss is 0.9005793333053589\n", + "epoch: 43 step: 80, loss is 0.8754910230636597\n", + "epoch: 43 step: 81, loss is 0.8774127960205078\n", + "epoch: 43 step: 82, loss is 0.8255667686462402\n", + "epoch: 43 step: 83, loss is 0.8528530597686768\n", + "epoch: 43 step: 84, loss is 0.8358618021011353\n", + "epoch: 43 step: 85, loss is 0.8541377782821655\n", + "epoch: 43 step: 86, loss is 0.8062739372253418\n", + "epoch: 43 step: 87, loss is 0.8911310434341431\n", + "epoch: 43 step: 88, loss is 0.8778983950614929\n", + "epoch: 43 step: 89, loss is 0.876808762550354\n", + "epoch: 43 step: 90, loss is 0.8380937576293945\n", + "epoch: 43 step: 91, loss is 0.8362681865692139\n", + "epoch: 43 step: 92, loss is 0.8735241889953613\n", + "epoch: 43 step: 93, loss is 0.8453265428543091\n", + "epoch: 43 step: 94, loss is 0.8451396822929382\n", + "epoch: 43 step: 95, loss is 0.8638437986373901\n", + "epoch: 43 step: 96, loss is 0.807098925113678\n", + "epoch: 43 step: 97, loss is 0.8719606399536133\n", + "epoch: 43 step: 98, loss is 0.8023010492324829\n", + "epoch: 43 step: 99, loss is 0.8655776977539062\n", + "epoch: 43 step: 100, loss is 0.8327716588973999\n", + "epoch: 43 step: 101, loss is 0.8596468567848206\n", + "epoch: 43 step: 102, loss is 0.8470516800880432\n", + "epoch: 43 step: 103, loss is 0.9108154773712158\n", + "epoch: 43 step: 104, loss is 0.8553939461708069\n", + "epoch: 43 step: 105, loss is 0.8158063888549805\n", + "epoch: 43 step: 106, loss is 0.8622720837593079\n", + "epoch: 43 step: 107, loss is 0.8434121608734131\n", + "epoch: 43 step: 108, loss is 0.8472058773040771\n", + "epoch: 43 step: 109, loss is 0.8221975564956665\n", + "epoch: 43 step: 110, loss is 0.8194869756698608\n", + "epoch: 43 step: 111, loss is 0.8291536569595337\n", + "epoch: 43 step: 112, loss is 0.8751624822616577\n", + "epoch: 43 step: 113, loss is 0.8600643277168274\n", + "epoch: 43 step: 114, loss is 0.8762838840484619\n", + "epoch: 43 step: 115, loss is 0.8671099543571472\n", + "epoch: 43 step: 116, loss is 0.763759434223175\n", + "epoch: 43 step: 117, loss is 0.8349450826644897\n", + "epoch: 43 step: 118, loss is 0.872563362121582\n", + "epoch: 43 step: 119, loss is 0.8698233366012573\n", + "epoch: 43 step: 120, loss is 0.9029512405395508\n", + "epoch: 43 step: 121, loss is 0.8211455345153809\n", + "epoch: 43 step: 122, loss is 0.821853518486023\n", + "epoch: 43 step: 123, loss is 0.825249195098877\n", + "epoch: 43 step: 124, loss is 0.8170123100280762\n", + "epoch: 43 step: 125, loss is 0.8711820840835571\n", + "epoch: 43 step: 126, loss is 0.8319066762924194\n", + "epoch: 43 step: 127, loss is 0.8610893487930298\n", + "epoch: 43 step: 128, loss is 0.8296843767166138\n", + "epoch: 43 step: 129, loss is 0.838518500328064\n", + "epoch: 43 step: 130, loss is 0.8265317678451538\n", + "epoch: 43 step: 131, loss is 0.8091706037521362\n", + "epoch: 43 step: 132, loss is 0.8590900301933289\n", + "epoch: 43 step: 133, loss is 0.8412557244300842\n", + "epoch: 43 step: 134, loss is 0.8400259017944336\n", + "epoch: 43 step: 135, loss is 0.8623496294021606\n", + "epoch: 43 step: 136, loss is 0.856477677822113\n", + "epoch: 43 step: 137, loss is 0.8386397361755371\n", + "epoch: 43 step: 138, loss is 0.8785245418548584\n", + "epoch: 43 step: 139, loss is 0.8547613620758057\n", + "epoch: 43 step: 140, loss is 0.8743089437484741\n", + "epoch: 43 step: 141, loss is 0.8720165491104126\n", + "epoch: 43 step: 142, loss is 0.8220722675323486\n", + "epoch: 43 step: 143, loss is 0.8697701096534729\n", + "epoch: 43 step: 144, loss is 0.8880708813667297\n", + "epoch: 43 step: 145, loss is 0.8196322917938232\n", + "epoch: 43 step: 146, loss is 0.8455183506011963\n", + "epoch: 43 step: 147, loss is 0.8588837385177612\n", + "epoch: 43 step: 148, loss is 0.7956787347793579\n", + "epoch: 43 step: 149, loss is 0.8873046636581421\n", + "epoch: 43 step: 150, loss is 0.8527228832244873\n", + "epoch: 43 step: 151, loss is 0.8084015846252441\n", + "epoch: 43 step: 152, loss is 0.9535974264144897\n", + "epoch: 43 step: 153, loss is 0.8968366384506226\n", + "epoch: 43 step: 154, loss is 0.8650041818618774\n", + "epoch: 43 step: 155, loss is 0.8163757920265198\n", + "epoch: 43 step: 156, loss is 0.8521450161933899\n", + "epoch: 43 step: 157, loss is 0.8719179034233093\n", + "epoch: 43 step: 158, loss is 0.9201545715332031\n", + "epoch: 43 step: 159, loss is 0.8701194524765015\n", + "epoch: 43 step: 160, loss is 0.9226886034011841\n", + "epoch: 43 step: 161, loss is 0.9018515348434448\n", + "epoch: 43 step: 162, loss is 0.8845228552818298\n", + "epoch: 43 step: 163, loss is 0.8626886606216431\n", + "epoch: 43 step: 164, loss is 0.8402309417724609\n", + "epoch: 43 step: 165, loss is 0.8505687713623047\n", + "epoch: 43 step: 166, loss is 0.8782742023468018\n", + "epoch: 43 step: 167, loss is 0.8257796168327332\n", + "epoch: 43 step: 168, loss is 0.904555082321167\n", + "epoch: 43 step: 169, loss is 0.8722604513168335\n", + "epoch: 43 step: 170, loss is 0.9047020077705383\n", + "epoch: 43 step: 171, loss is 0.9038271307945251\n", + "epoch: 43 step: 172, loss is 0.855259895324707\n", + "epoch: 43 step: 173, loss is 0.9248632788658142\n", + "epoch: 43 step: 174, loss is 0.867730975151062\n", + "epoch: 43 step: 175, loss is 0.8242920637130737\n", + "epoch: 43 step: 176, loss is 0.8442249298095703\n", + "epoch: 43 step: 177, loss is 0.815618634223938\n", + "epoch: 43 step: 178, loss is 0.8471901416778564\n", + "epoch: 43 step: 179, loss is 0.8275121450424194\n", + "epoch: 43 step: 180, loss is 0.8582911491394043\n", + "epoch: 43 step: 181, loss is 0.8711225986480713\n", + "epoch: 43 step: 182, loss is 0.8279826641082764\n", + "epoch: 43 step: 183, loss is 0.8686833381652832\n", + "epoch: 43 step: 184, loss is 0.8763387203216553\n", + "epoch: 43 step: 185, loss is 0.8577327132225037\n", + "epoch: 43 step: 186, loss is 0.8541305661201477\n", + "epoch: 43 step: 187, loss is 0.8131529092788696\n", + "epoch: 43 step: 188, loss is 0.8944928646087646\n", + "epoch: 43 step: 189, loss is 0.8394546508789062\n", + "epoch: 43 step: 190, loss is 0.8644832372665405\n", + "epoch: 43 step: 191, loss is 0.8668205738067627\n", + "epoch: 43 step: 192, loss is 0.9222733974456787\n", + "epoch: 43 step: 193, loss is 0.8304741382598877\n", + "epoch: 43 step: 194, loss is 0.8426336050033569\n", + "epoch: 43 step: 195, loss is 0.8396852016448975\n", + "Train epoch time: 95011.991 ms, per step time: 487.241 ms\n", + "epoch: 44 step: 1, loss is 0.8142489790916443\n", + "epoch: 44 step: 2, loss is 0.8170884847640991\n", + "epoch: 44 step: 3, loss is 0.8462104797363281\n", + "epoch: 44 step: 4, loss is 0.8292887210845947\n", + "epoch: 44 step: 5, loss is 0.9553828239440918\n", + "epoch: 44 step: 6, loss is 0.8571553230285645\n", + "epoch: 44 step: 7, loss is 0.80592942237854\n", + "epoch: 44 step: 8, loss is 0.7866925001144409\n", + "epoch: 44 step: 9, loss is 0.7809967994689941\n", + "epoch: 44 step: 10, loss is 0.8245784044265747\n", + "epoch: 44 step: 11, loss is 0.83949214220047\n", + "epoch: 44 step: 12, loss is 0.8279667496681213\n", + "epoch: 44 step: 13, loss is 0.8498185873031616\n", + "epoch: 44 step: 14, loss is 0.8215806484222412\n", + "epoch: 44 step: 15, loss is 0.824792742729187\n", + "epoch: 44 step: 16, loss is 0.8725595474243164\n", + "epoch: 44 step: 17, loss is 0.8887865543365479\n", + "epoch: 44 step: 18, loss is 0.8671740889549255\n", + "epoch: 44 step: 19, loss is 0.7730766534805298\n", + "epoch: 44 step: 20, loss is 0.8838001489639282\n", + "epoch: 44 step: 21, loss is 0.884152889251709\n", + "epoch: 44 step: 22, loss is 0.8491542935371399\n", + "epoch: 44 step: 23, loss is 0.822620689868927\n", + "epoch: 44 step: 24, loss is 0.7921663522720337\n", + "epoch: 44 step: 25, loss is 0.8183584809303284\n", + "epoch: 44 step: 26, loss is 0.8060388565063477\n", + "epoch: 44 step: 27, loss is 0.8672155737876892\n", + "epoch: 44 step: 28, loss is 0.8814725279808044\n", + "epoch: 44 step: 29, loss is 0.8305840492248535\n", + "epoch: 44 step: 30, loss is 0.8371352553367615\n", + "epoch: 44 step: 31, loss is 0.8556675910949707\n", + "epoch: 44 step: 32, loss is 0.8637475371360779\n", + "epoch: 44 step: 33, loss is 0.7981750965118408\n", + "epoch: 44 step: 34, loss is 0.8224976062774658\n", + "epoch: 44 step: 35, loss is 0.860569953918457\n", + "epoch: 44 step: 36, loss is 0.819395899772644\n", + "epoch: 44 step: 37, loss is 0.7868366241455078\n", + "epoch: 44 step: 38, loss is 0.7759487628936768\n", + "epoch: 44 step: 39, loss is 0.8153588771820068\n", + "epoch: 44 step: 40, loss is 0.8237171769142151\n", + "epoch: 44 step: 41, loss is 0.8998697400093079\n", + "epoch: 44 step: 42, loss is 0.9294415712356567\n", + "epoch: 44 step: 43, loss is 0.8146522641181946\n", + "epoch: 44 step: 44, loss is 0.7860899567604065\n", + "epoch: 44 step: 45, loss is 0.860426664352417\n", + "epoch: 44 step: 46, loss is 0.8390341997146606\n", + "epoch: 44 step: 47, loss is 0.7821813821792603\n", + "epoch: 44 step: 48, loss is 0.8017144799232483\n", + "epoch: 44 step: 49, loss is 0.7699670791625977\n", + "epoch: 44 step: 50, loss is 0.8302051424980164\n", + "epoch: 44 step: 51, loss is 0.8739362955093384\n", + "epoch: 44 step: 52, loss is 0.7946998476982117\n", + "epoch: 44 step: 53, loss is 0.839287519454956\n", + "epoch: 44 step: 54, loss is 0.8819726705551147\n", + "epoch: 44 step: 55, loss is 0.8328270316123962\n", + "epoch: 44 step: 56, loss is 0.8275318741798401\n", + "epoch: 44 step: 57, loss is 0.8045562505722046\n", + "epoch: 44 step: 58, loss is 0.9097446799278259\n", + "epoch: 44 step: 59, loss is 0.8522425889968872\n", + "epoch: 44 step: 60, loss is 0.8085294961929321\n", + "epoch: 44 step: 61, loss is 0.8435872793197632\n", + "epoch: 44 step: 62, loss is 0.8254014253616333\n", + "epoch: 44 step: 63, loss is 0.8339152336120605\n", + "epoch: 44 step: 64, loss is 0.7841963768005371\n", + "epoch: 44 step: 65, loss is 0.8264789581298828\n", + "epoch: 44 step: 66, loss is 0.8609036207199097\n", + "epoch: 44 step: 67, loss is 0.8801912665367126\n", + "epoch: 44 step: 68, loss is 0.7370222806930542\n", + "epoch: 44 step: 69, loss is 0.8203862309455872\n", + "epoch: 44 step: 70, loss is 0.8418514728546143\n", + "epoch: 44 step: 71, loss is 0.9002611637115479\n", + "epoch: 44 step: 72, loss is 0.866855800151825\n", + "epoch: 44 step: 73, loss is 0.8848982453346252\n", + "epoch: 44 step: 74, loss is 0.8895965218544006\n", + "epoch: 44 step: 75, loss is 0.8106572031974792\n", + "epoch: 44 step: 76, loss is 0.8198436498641968\n", + "epoch: 44 step: 77, loss is 0.8405521512031555\n", + "epoch: 44 step: 78, loss is 0.8685513138771057\n", + "epoch: 44 step: 79, loss is 0.8503077030181885\n", + "epoch: 44 step: 80, loss is 0.8165074586868286\n", + "epoch: 44 step: 81, loss is 0.9000285267829895\n", + "epoch: 44 step: 82, loss is 0.8580031394958496\n", + "epoch: 44 step: 83, loss is 0.8370569944381714\n", + "epoch: 44 step: 84, loss is 0.8794094324111938\n", + "epoch: 44 step: 85, loss is 0.9102057814598083\n", + "epoch: 44 step: 86, loss is 0.9289959073066711\n", + "epoch: 44 step: 87, loss is 0.8314516544342041\n", + "epoch: 44 step: 88, loss is 0.8374757766723633\n", + "epoch: 44 step: 89, loss is 0.8295714259147644\n", + "epoch: 44 step: 90, loss is 0.82182776927948\n", + "epoch: 44 step: 91, loss is 0.8580061793327332\n", + "epoch: 44 step: 92, loss is 0.7966165542602539\n", + "epoch: 44 step: 93, loss is 0.8688998222351074\n", + "epoch: 44 step: 94, loss is 0.8214893341064453\n", + "epoch: 44 step: 95, loss is 0.8485841155052185\n", + "epoch: 44 step: 96, loss is 0.9792789220809937\n", + "epoch: 44 step: 97, loss is 0.8300095796585083\n", + "epoch: 44 step: 98, loss is 0.862638533115387\n", + "epoch: 44 step: 99, loss is 0.8509677648544312\n", + "epoch: 44 step: 100, loss is 0.8321399688720703\n", + "epoch: 44 step: 101, loss is 0.8707855939865112\n", + "epoch: 44 step: 102, loss is 0.9088457822799683\n", + "epoch: 44 step: 103, loss is 0.8593429923057556\n", + "epoch: 44 step: 104, loss is 0.8398452997207642\n", + "epoch: 44 step: 105, loss is 0.8453642725944519\n", + "epoch: 44 step: 106, loss is 0.8299462795257568\n", + "epoch: 44 step: 107, loss is 0.8692185878753662\n", + "epoch: 44 step: 108, loss is 0.8149436116218567\n", + "epoch: 44 step: 109, loss is 0.8063647150993347\n", + "epoch: 44 step: 110, loss is 0.8874669075012207\n", + "epoch: 44 step: 111, loss is 0.8322079181671143\n", + "epoch: 44 step: 112, loss is 0.9100780487060547\n", + "epoch: 44 step: 113, loss is 0.7961269617080688\n", + "epoch: 44 step: 114, loss is 0.8677315711975098\n", + "epoch: 44 step: 115, loss is 0.8268213272094727\n", + "epoch: 44 step: 116, loss is 0.8606390953063965\n", + "epoch: 44 step: 117, loss is 0.8257926106452942\n", + "epoch: 44 step: 118, loss is 0.8364943265914917\n", + "epoch: 44 step: 119, loss is 0.8298732042312622\n", + "epoch: 44 step: 120, loss is 0.8452285528182983\n", + "epoch: 44 step: 121, loss is 0.7901890277862549\n", + "epoch: 44 step: 122, loss is 0.8400620818138123\n", + "epoch: 44 step: 123, loss is 0.7878503799438477\n", + "epoch: 44 step: 124, loss is 0.8462478518486023\n", + "epoch: 44 step: 125, loss is 0.8323038816452026\n", + "epoch: 44 step: 126, loss is 0.8654007911682129\n", + "epoch: 44 step: 127, loss is 0.863358736038208\n", + "epoch: 44 step: 128, loss is 0.865913987159729\n", + "epoch: 44 step: 129, loss is 0.9079986810684204\n", + "epoch: 44 step: 130, loss is 0.8633905053138733\n", + "epoch: 44 step: 131, loss is 0.8593575954437256\n", + "epoch: 44 step: 132, loss is 0.8249156475067139\n", + "epoch: 44 step: 133, loss is 0.8859285712242126\n", + "epoch: 44 step: 134, loss is 0.8362003564834595\n", + "epoch: 44 step: 135, loss is 0.8000448346138\n", + "epoch: 44 step: 136, loss is 0.8666777610778809\n", + "epoch: 44 step: 137, loss is 0.8335715532302856\n", + "epoch: 44 step: 138, loss is 0.8251416683197021\n", + "epoch: 44 step: 139, loss is 0.8388418555259705\n", + "epoch: 44 step: 140, loss is 0.7969424724578857\n", + "epoch: 44 step: 141, loss is 0.8372577428817749\n", + "epoch: 44 step: 142, loss is 0.7808161973953247\n", + "epoch: 44 step: 143, loss is 0.829387903213501\n", + "epoch: 44 step: 144, loss is 0.8327816724777222\n", + "epoch: 44 step: 145, loss is 0.8313049077987671\n", + "epoch: 44 step: 146, loss is 0.8308315277099609\n", + "epoch: 44 step: 147, loss is 0.8158052563667297\n", + "epoch: 44 step: 148, loss is 0.8457467555999756\n", + "epoch: 44 step: 149, loss is 0.8585597276687622\n", + "epoch: 44 step: 150, loss is 0.9038529992103577\n", + "epoch: 44 step: 151, loss is 0.8550215363502502\n", + "epoch: 44 step: 152, loss is 0.8189994096755981\n", + "epoch: 44 step: 153, loss is 0.8276633024215698\n", + "epoch: 44 step: 154, loss is 0.9072308540344238\n", + "epoch: 44 step: 155, loss is 0.823759913444519\n", + "epoch: 44 step: 156, loss is 0.7864620685577393\n", + "epoch: 44 step: 157, loss is 0.9005347490310669\n", + "epoch: 44 step: 158, loss is 0.8307033777236938\n", + "epoch: 44 step: 159, loss is 0.8202246427536011\n", + "epoch: 44 step: 160, loss is 0.870511531829834\n", + "epoch: 44 step: 161, loss is 0.8957492113113403\n", + "epoch: 44 step: 162, loss is 0.8554776906967163\n", + "epoch: 44 step: 163, loss is 0.8066375255584717\n", + "epoch: 44 step: 164, loss is 0.8552772998809814\n", + "epoch: 44 step: 165, loss is 0.878267765045166\n", + "epoch: 44 step: 166, loss is 0.8203029632568359\n", + "epoch: 44 step: 167, loss is 0.8404638767242432\n", + "epoch: 44 step: 168, loss is 0.8673349618911743\n", + "epoch: 44 step: 169, loss is 0.8546375036239624\n", + "epoch: 44 step: 170, loss is 0.8213407397270203\n", + "epoch: 44 step: 171, loss is 0.911909818649292\n", + "epoch: 44 step: 172, loss is 0.7917290329933167\n", + "epoch: 44 step: 173, loss is 0.8701563477516174\n", + "epoch: 44 step: 174, loss is 0.8724896907806396\n", + "epoch: 44 step: 175, loss is 0.8261943459510803\n", + "epoch: 44 step: 176, loss is 0.8547672033309937\n", + "epoch: 44 step: 177, loss is 0.8267720341682434\n", + "epoch: 44 step: 178, loss is 0.869617760181427\n", + "epoch: 44 step: 179, loss is 0.7969815731048584\n", + "epoch: 44 step: 180, loss is 0.8590774536132812\n", + "epoch: 44 step: 181, loss is 0.8498544692993164\n", + "epoch: 44 step: 182, loss is 0.8194735050201416\n", + "epoch: 44 step: 183, loss is 0.8355236053466797\n", + "epoch: 44 step: 184, loss is 0.831017255783081\n", + "epoch: 45 step: 166, loss is 0.7727643847465515\n", + "epoch: 45 step: 167, loss is 0.8448013067245483\n", + "epoch: 45 step: 168, loss is 0.8021665215492249\n", + "epoch: 45 step: 169, loss is 0.8692658543586731\n", + "epoch: 45 step: 170, loss is 0.835612952709198\n", + "epoch: 45 step: 171, loss is 0.8776623010635376\n", + "epoch: 45 step: 172, loss is 0.8394273519515991\n", + "epoch: 45 step: 173, loss is 0.8614158630371094\n", + "epoch: 45 step: 174, loss is 0.8525674343109131\n", + "epoch: 45 step: 175, loss is 0.9271657466888428\n", + "epoch: 45 step: 176, loss is 0.847465991973877\n", + "epoch: 45 step: 177, loss is 0.8388426303863525\n", + "epoch: 45 step: 178, loss is 0.819650411605835\n", + "epoch: 45 step: 179, loss is 0.8752077221870422\n", + "epoch: 45 step: 180, loss is 0.8301194906234741\n", + "epoch: 45 step: 181, loss is 0.8579357862472534\n", + "epoch: 45 step: 182, loss is 0.8102155327796936\n", + "epoch: 45 step: 183, loss is 0.833410918712616\n", + "epoch: 45 step: 184, loss is 0.796221137046814\n", + "epoch: 45 step: 185, loss is 0.8347312211990356\n", + "epoch: 45 step: 186, loss is 0.8865641951560974\n", + "epoch: 45 step: 187, loss is 0.7869356870651245\n", + "epoch: 45 step: 188, loss is 0.8568893671035767\n", + "epoch: 45 step: 189, loss is 0.8813834190368652\n", + "epoch: 45 step: 190, loss is 0.792428731918335\n", + "epoch: 45 step: 191, loss is 0.8332685232162476\n", + "epoch: 45 step: 192, loss is 0.896716833114624\n", + "epoch: 45 step: 193, loss is 0.8637272119522095\n", + "epoch: 45 step: 194, loss is 0.8630141615867615\n", + "epoch: 45 step: 195, loss is 0.8331015110015869\n", + "Train epoch time: 91826.418 ms, per step time: 470.905 ms\n", + "epoch: 46 step: 1, loss is 0.8138159513473511\n", + "epoch: 46 step: 2, loss is 0.8478540182113647\n", + "epoch: 46 step: 3, loss is 0.7949247360229492\n", + "epoch: 46 step: 4, loss is 0.7986679077148438\n", + "epoch: 46 step: 5, loss is 0.7713766098022461\n", + "epoch: 46 step: 6, loss is 0.8418244123458862\n", + "epoch: 46 step: 7, loss is 0.823578417301178\n", + "epoch: 46 step: 8, loss is 0.8573248386383057\n", + "epoch: 46 step: 9, loss is 0.7945516705513\n", + "epoch: 46 step: 10, loss is 0.7760710120201111\n", + "epoch: 46 step: 11, loss is 0.8128289580345154\n", + "epoch: 46 step: 12, loss is 0.903236985206604\n", + "epoch: 46 step: 13, loss is 0.8691461682319641\n", + "epoch: 46 step: 14, loss is 0.826056957244873\n", + "epoch: 46 step: 15, loss is 0.7746378779411316\n", + "epoch: 46 step: 16, loss is 0.7966079711914062\n", + "epoch: 46 step: 17, loss is 0.806016206741333\n", + "epoch: 46 step: 18, loss is 0.8240984678268433\n", + "epoch: 46 step: 19, loss is 0.8060644865036011\n", + "epoch: 46 step: 20, loss is 0.8018324375152588\n", + "epoch: 46 step: 21, loss is 0.875568687915802\n", + "epoch: 46 step: 22, loss is 0.8065637350082397\n", + "epoch: 46 step: 23, loss is 0.8130731582641602\n", + "epoch: 46 step: 24, loss is 0.7870255708694458\n", + "epoch: 46 step: 25, loss is 0.8116318583488464\n", + "epoch: 46 step: 26, loss is 0.8017151355743408\n", + "epoch: 46 step: 27, loss is 0.8525790572166443\n", + "epoch: 46 step: 28, loss is 0.8707572221755981\n", + "epoch: 46 step: 29, loss is 0.8131945133209229\n", + "epoch: 46 step: 30, loss is 0.8563050627708435\n", + "epoch: 46 step: 31, loss is 0.8267723321914673\n", + "epoch: 46 step: 32, loss is 0.8289658427238464\n", + "epoch: 46 step: 33, loss is 0.8276656866073608\n", + "epoch: 46 step: 34, loss is 0.8522688746452332\n", + "epoch: 46 step: 35, loss is 0.8059682846069336\n", + "epoch: 46 step: 36, loss is 0.8218958377838135\n", + "epoch: 46 step: 37, loss is 0.8163431882858276\n", + "epoch: 46 step: 38, loss is 0.7910774946212769\n", + "epoch: 46 step: 39, loss is 0.8619789481163025\n", + "epoch: 46 step: 40, loss is 0.8137123584747314\n", + "epoch: 46 step: 41, loss is 0.8859765529632568\n", + "epoch: 46 step: 42, loss is 0.9086934328079224\n", + "epoch: 46 step: 43, loss is 0.7847369909286499\n", + "epoch: 46 step: 44, loss is 0.7940647602081299\n", + "epoch: 46 step: 45, loss is 0.8475504517555237\n", + "epoch: 46 step: 46, loss is 0.7993408441543579\n", + "epoch: 46 step: 47, loss is 0.7804733514785767\n", + "epoch: 46 step: 48, loss is 0.8068385720252991\n", + "epoch: 46 step: 49, loss is 0.7529829144477844\n", + "epoch: 46 step: 50, loss is 0.8380992412567139\n", + "epoch: 46 step: 51, loss is 0.8176077008247375\n", + "epoch: 46 step: 52, loss is 0.8259710669517517\n", + "epoch: 46 step: 53, loss is 0.7978577613830566\n", + "epoch: 46 step: 54, loss is 0.8327544331550598\n", + "epoch: 46 step: 55, loss is 0.7925217747688293\n", + "epoch: 46 step: 56, loss is 0.7806233167648315\n", + "epoch: 46 step: 57, loss is 0.7758333683013916\n", + "epoch: 46 step: 58, loss is 0.8145202398300171\n", + "epoch: 46 step: 59, loss is 0.8004668354988098\n", + "epoch: 46 step: 60, loss is 0.7940555810928345\n", + "epoch: 46 step: 61, loss is 0.8146576285362244\n", + "epoch: 46 step: 62, loss is 0.8213710188865662\n", + "epoch: 46 step: 63, loss is 0.8510961532592773\n", + "epoch: 46 step: 64, loss is 0.7625746726989746\n", + "epoch: 46 step: 65, loss is 0.8438282012939453\n", + "epoch: 46 step: 66, loss is 0.7938213348388672\n", + "epoch: 46 step: 67, loss is 0.7920184135437012\n", + "epoch: 46 step: 68, loss is 0.7932766675949097\n", + "epoch: 46 step: 69, loss is 0.8000260591506958\n", + "epoch: 46 step: 70, loss is 0.8015080690383911\n", + "epoch: 46 step: 71, loss is 0.8700251579284668\n", + "epoch: 46 step: 72, loss is 0.8244695663452148\n", + "epoch: 46 step: 73, loss is 0.7812405824661255\n", + "epoch: 46 step: 74, loss is 0.814933180809021\n", + "epoch: 46 step: 75, loss is 0.868402361869812\n", + "epoch: 46 step: 76, loss is 0.8071072697639465\n", + "epoch: 46 step: 77, loss is 0.8321694135665894\n", + "epoch: 46 step: 78, loss is 0.8233215808868408\n", + "epoch: 46 step: 79, loss is 0.7774689197540283\n", + "epoch: 46 step: 80, loss is 0.8151963949203491\n", + "epoch: 46 step: 81, loss is 0.8039705157279968\n", + "epoch: 46 step: 82, loss is 0.7709826231002808\n", + "epoch: 46 step: 83, loss is 0.817217230796814\n", + "epoch: 46 step: 84, loss is 0.7725989818572998\n", + "epoch: 46 step: 85, loss is 0.8431345224380493\n", + "epoch: 46 step: 86, loss is 0.8639728426933289\n", + "epoch: 46 step: 87, loss is 0.8524305820465088\n", + "epoch: 46 step: 88, loss is 0.7952175736427307\n", + "epoch: 46 step: 89, loss is 0.8290038704872131\n", + "epoch: 46 step: 90, loss is 0.8577883243560791\n", + "epoch: 46 step: 91, loss is 0.7893970012664795\n", + "epoch: 46 step: 92, loss is 0.8706142902374268\n", + "epoch: 46 step: 93, loss is 0.7797247171401978\n", + "epoch: 46 step: 94, loss is 0.813592255115509\n", + "epoch: 46 step: 95, loss is 0.8339884281158447\n", + "epoch: 46 step: 96, loss is 0.856265664100647\n", + "epoch: 46 step: 97, loss is 0.8888800144195557\n", + "epoch: 46 step: 98, loss is 0.803231418132782\n", + "epoch: 46 step: 99, loss is 0.7825334072113037\n", + "epoch: 46 step: 100, loss is 0.7737492322921753\n", + "epoch: 46 step: 101, loss is 0.8236273527145386\n", + "epoch: 46 step: 102, loss is 0.9094458818435669\n", + "epoch: 46 step: 103, loss is 0.7608277797698975\n", + "epoch: 46 step: 104, loss is 0.8025792837142944\n", + "epoch: 46 step: 105, loss is 0.8181049823760986\n", + "epoch: 46 step: 106, loss is 0.8562993407249451\n", + "epoch: 46 step: 107, loss is 0.8569117188453674\n", + "epoch: 46 step: 108, loss is 0.8253383636474609\n", + "epoch: 46 step: 109, loss is 0.8232929706573486\n", + "epoch: 46 step: 110, loss is 0.8512239456176758\n", + "epoch: 46 step: 111, loss is 0.7878513336181641\n", + "epoch: 46 step: 112, loss is 0.7873772978782654\n", + "epoch: 46 step: 113, loss is 0.7628195285797119\n", + "epoch: 46 step: 114, loss is 0.7824795246124268\n", + "epoch: 46 step: 115, loss is 0.860386848449707\n", + "epoch: 46 step: 116, loss is 0.7953188419342041\n", + "epoch: 46 step: 117, loss is 0.8212662935256958\n", + "epoch: 46 step: 118, loss is 0.8155431151390076\n", + "epoch: 46 step: 119, loss is 0.8367188572883606\n", + "epoch: 46 step: 120, loss is 0.8165557384490967\n", + "epoch: 46 step: 121, loss is 0.852245569229126\n", + "epoch: 46 step: 122, loss is 0.8302613496780396\n", + "epoch: 46 step: 123, loss is 0.7959674596786499\n", + "epoch: 46 step: 124, loss is 0.8146615028381348\n", + "epoch: 46 step: 125, loss is 0.7796545028686523\n", + "epoch: 46 step: 126, loss is 0.8157024383544922\n", + "epoch: 46 step: 127, loss is 0.837139904499054\n", + "epoch: 46 step: 128, loss is 0.8203567266464233\n", + "epoch: 46 step: 129, loss is 0.8578354716300964\n", + "epoch: 46 step: 130, loss is 0.841575026512146\n", + "epoch: 46 step: 131, loss is 0.7982099652290344\n", + "epoch: 46 step: 132, loss is 0.7722775340080261\n", + "epoch: 46 step: 133, loss is 0.8269907236099243\n", + "epoch: 46 step: 134, loss is 0.8482730388641357\n", + "epoch: 46 step: 135, loss is 0.8106130361557007\n", + "epoch: 46 step: 136, loss is 0.7927663326263428\n", + "epoch: 46 step: 137, loss is 0.8688241839408875\n", + "epoch: 46 step: 138, loss is 0.846591591835022\n", + "epoch: 46 step: 139, loss is 0.861875057220459\n", + "epoch: 46 step: 140, loss is 0.8291619420051575\n", + "epoch: 46 step: 141, loss is 0.8537880778312683\n", + "epoch: 46 step: 142, loss is 0.8655561804771423\n", + "epoch: 46 step: 143, loss is 0.7859200239181519\n", + "epoch: 46 step: 144, loss is 0.8245643973350525\n", + "epoch: 46 step: 145, loss is 0.8259025812149048\n", + "epoch: 46 step: 146, loss is 0.8663377165794373\n", + "epoch: 46 step: 147, loss is 0.8677432537078857\n", + "epoch: 46 step: 148, loss is 0.8131957054138184\n", + "epoch: 46 step: 149, loss is 0.8035308122634888\n", + "epoch: 46 step: 150, loss is 0.811397135257721\n", + "epoch: 46 step: 151, loss is 0.8148013353347778\n", + "epoch: 46 step: 152, loss is 0.8453505039215088\n", + "epoch: 46 step: 153, loss is 0.808722972869873\n", + "epoch: 46 step: 154, loss is 0.7804681658744812\n", + "epoch: 46 step: 155, loss is 0.8089240789413452\n", + "epoch: 46 step: 156, loss is 0.8005784749984741\n", + "epoch: 46 step: 157, loss is 0.853817343711853\n", + "epoch: 46 step: 158, loss is 0.7814025282859802\n", + "epoch: 46 step: 159, loss is 0.8326728940010071\n", + "epoch: 46 step: 160, loss is 0.8707231283187866\n", + "epoch: 46 step: 161, loss is 0.8172518014907837\n", + "epoch: 46 step: 162, loss is 0.8233350515365601\n", + "epoch: 46 step: 163, loss is 0.8056081533432007\n", + "epoch: 46 step: 164, loss is 0.8081749677658081\n", + "epoch: 46 step: 165, loss is 0.8176319003105164\n", + "epoch: 46 step: 166, loss is 0.8349081873893738\n", + "epoch: 46 step: 167, loss is 0.8767983913421631\n", + "epoch: 46 step: 168, loss is 0.8229296207427979\n", + "epoch: 46 step: 169, loss is 0.8257471323013306\n", + "epoch: 46 step: 170, loss is 0.8255093693733215\n", + "epoch: 46 step: 171, loss is 0.7354624271392822\n", + "epoch: 46 step: 172, loss is 0.793816328048706\n", + "epoch: 46 step: 173, loss is 0.7577114701271057\n", + "epoch: 46 step: 174, loss is 0.7796987295150757\n", + "epoch: 46 step: 175, loss is 0.798031210899353\n", + "epoch: 46 step: 176, loss is 0.8483534455299377\n", + "epoch: 46 step: 177, loss is 0.8027951717376709\n", + "epoch: 46 step: 178, loss is 0.8099548816680908\n", + "epoch: 46 step: 179, loss is 0.8113390803337097\n", + "epoch: 46 step: 180, loss is 0.784812331199646\n", + "epoch: 46 step: 181, loss is 0.8548978567123413\n", + "epoch: 46 step: 182, loss is 0.8019320964813232\n", + "epoch: 46 step: 183, loss is 0.8407455086708069\n", + "epoch: 46 step: 184, loss is 0.8260337114334106\n", + "epoch: 46 step: 185, loss is 0.7906172275543213\n", + "epoch: 46 step: 186, loss is 0.8146207332611084\n", + "epoch: 46 step: 187, loss is 0.7889197468757629\n", + "epoch: 46 step: 188, loss is 0.797439694404602\n", + "epoch: 46 step: 189, loss is 0.8293863534927368\n", + "epoch: 46 step: 190, loss is 0.7862901091575623\n", + "epoch: 46 step: 191, loss is 0.7921267747879028\n", + "epoch: 46 step: 192, loss is 0.772505521774292\n", + "epoch: 46 step: 193, loss is 0.8160322308540344\n", + "epoch: 46 step: 194, loss is 0.801246166229248\n", + "epoch: 46 step: 195, loss is 0.8560971021652222\n", + "Train epoch time: 87201.970 ms, per step time: 447.190 ms\n", + "epoch: 47 step: 1, loss is 0.7880914807319641\n", + "epoch: 47 step: 2, loss is 0.7434355616569519\n", + "epoch: 47 step: 3, loss is 0.7812474966049194\n", + "epoch: 47 step: 4, loss is 0.7643132209777832\n", + "epoch: 47 step: 5, loss is 0.7617680430412292\n", + "epoch: 47 step: 6, loss is 0.7677497863769531\n", + "epoch: 47 step: 7, loss is 0.7832505702972412\n", + "epoch: 47 step: 8, loss is 0.7924575805664062\n", + "epoch: 47 step: 9, loss is 0.8511538505554199\n", + "epoch: 47 step: 10, loss is 0.807483971118927\n", + "epoch: 47 step: 11, loss is 0.8648809194564819\n", + "epoch: 47 step: 12, loss is 0.8065800666809082\n", + "epoch: 47 step: 13, loss is 0.7486437559127808\n", + "epoch: 47 step: 14, loss is 0.8188698887825012\n", + "epoch: 47 step: 15, loss is 0.7709288597106934\n", + "epoch: 47 step: 16, loss is 0.7877346873283386\n", + "epoch: 47 step: 17, loss is 0.7664358615875244\n", + "epoch: 47 step: 18, loss is 0.8223596215248108\n", + "epoch: 47 step: 19, loss is 0.82913738489151\n", + "epoch: 47 step: 20, loss is 0.7701148986816406\n", + "epoch: 47 step: 21, loss is 0.7898533344268799\n", + "epoch: 47 step: 22, loss is 0.8048619627952576\n", + "epoch: 47 step: 23, loss is 0.8741567134857178\n", + "epoch: 47 step: 24, loss is 0.7893668413162231\n", + "epoch: 47 step: 25, loss is 0.7682672739028931\n", + "epoch: 47 step: 26, loss is 0.8187801837921143\n", + "epoch: 47 step: 27, loss is 0.8490036725997925\n", + "epoch: 47 step: 28, loss is 0.8307133316993713\n", + "epoch: 47 step: 29, loss is 0.7628856897354126\n", + "epoch: 47 step: 30, loss is 0.8846727609634399\n", + "epoch: 47 step: 31, loss is 0.8092058300971985\n", + "epoch: 47 step: 32, loss is 0.7744818329811096\n", + "epoch: 47 step: 33, loss is 0.7987645268440247\n", + "epoch: 47 step: 34, loss is 0.7814244031906128\n", + "epoch: 47 step: 35, loss is 0.7907752990722656\n", + "epoch: 47 step: 36, loss is 0.812104344367981\n", + "epoch: 47 step: 37, loss is 0.7509253025054932\n", + "epoch: 47 step: 38, loss is 0.8232476115226746\n", + "epoch: 47 step: 39, loss is 0.7955788373947144\n", + "epoch: 47 step: 40, loss is 0.7972939014434814\n", + "epoch: 47 step: 41, loss is 0.7882742881774902\n", + "epoch: 47 step: 42, loss is 0.8122217059135437\n", + "epoch: 47 step: 43, loss is 0.7234066128730774\n", + "epoch: 47 step: 44, loss is 0.782073974609375\n", + "epoch: 47 step: 45, loss is 0.8593810200691223\n", + "epoch: 47 step: 46, loss is 0.8432165384292603\n", + "epoch: 47 step: 47, loss is 0.7858246564865112\n", + "epoch: 47 step: 48, loss is 0.8158643245697021\n", + "epoch: 47 step: 49, loss is 0.8046214580535889\n", + "epoch: 47 step: 50, loss is 0.7769066095352173\n", + "epoch: 47 step: 51, loss is 0.7669296860694885\n", + "epoch: 47 step: 52, loss is 0.816792368888855\n", + "epoch: 47 step: 53, loss is 0.8004930019378662\n", + "epoch: 47 step: 54, loss is 0.7919777035713196\n", + "epoch: 47 step: 55, loss is 0.7715615630149841\n", + "epoch: 47 step: 56, loss is 0.8186911940574646\n", + "epoch: 47 step: 57, loss is 0.7675042152404785\n", + "epoch: 47 step: 58, loss is 0.8154523968696594\n", + "epoch: 47 step: 59, loss is 0.88620924949646\n", + "epoch: 47 step: 60, loss is 0.8339549899101257\n", + "epoch: 47 step: 61, loss is 0.7824723124504089\n", + "epoch: 47 step: 62, loss is 0.7932273745536804\n", + "epoch: 47 step: 63, loss is 0.7842769622802734\n", + "epoch: 47 step: 64, loss is 0.8338103294372559\n", + "epoch: 47 step: 65, loss is 0.7686444520950317\n", + "epoch: 47 step: 66, loss is 0.8210930824279785\n", + "epoch: 47 step: 67, loss is 0.8144559860229492\n", + "epoch: 47 step: 68, loss is 0.8256890773773193\n", + "epoch: 47 step: 69, loss is 0.7978771924972534\n", + "epoch: 47 step: 70, loss is 0.8530007600784302\n", + "epoch: 47 step: 71, loss is 0.8204925060272217\n", + "epoch: 47 step: 72, loss is 0.8451026082038879\n", + "epoch: 47 step: 73, loss is 0.8673055171966553\n", + "epoch: 47 step: 74, loss is 0.8305833339691162\n", + "epoch: 47 step: 75, loss is 0.8051018118858337\n", + "epoch: 47 step: 76, loss is 0.8387352228164673\n", + "epoch: 47 step: 77, loss is 0.7785453200340271\n", + "epoch: 47 step: 78, loss is 0.8026903867721558\n", + "epoch: 47 step: 79, loss is 0.8314132690429688\n", + "epoch: 47 step: 80, loss is 0.8275285959243774\n", + "epoch: 47 step: 81, loss is 0.8412928581237793\n", + "epoch: 47 step: 82, loss is 0.7940285801887512\n", + "epoch: 47 step: 83, loss is 0.778278112411499\n", + "epoch: 47 step: 84, loss is 0.8384793996810913\n", + "epoch: 47 step: 85, loss is 0.7976016998291016\n", + "epoch: 47 step: 86, loss is 0.7830173969268799\n", + "epoch: 47 step: 87, loss is 0.8255695700645447\n", + "epoch: 47 step: 88, loss is 0.8390778303146362\n", + "epoch: 47 step: 89, loss is 0.7655835151672363\n", + "epoch: 47 step: 90, loss is 0.8895449638366699\n", + "epoch: 47 step: 91, loss is 0.8128546476364136\n", + "epoch: 47 step: 92, loss is 0.759125828742981\n", + "epoch: 47 step: 93, loss is 0.7894231081008911\n", + "epoch: 47 step: 94, loss is 0.8037126064300537\n", + "epoch: 47 step: 95, loss is 0.8052518367767334\n", + "epoch: 47 step: 96, loss is 0.8366881608963013\n", + "epoch: 47 step: 97, loss is 0.8051090240478516\n", + "epoch: 47 step: 98, loss is 0.8281242847442627\n", + "epoch: 47 step: 99, loss is 0.7763257026672363\n", + "epoch: 47 step: 100, loss is 0.8213671445846558\n", + "epoch: 47 step: 101, loss is 0.7937126755714417\n", + "epoch: 47 step: 102, loss is 0.7443878650665283\n", + "epoch: 47 step: 103, loss is 0.7796142101287842\n", + "epoch: 47 step: 104, loss is 0.8237670660018921\n", + "epoch: 47 step: 105, loss is 0.785972535610199\n", + "epoch: 47 step: 106, loss is 0.8078303933143616\n", + "epoch: 47 step: 107, loss is 0.7917026877403259\n", + "epoch: 47 step: 108, loss is 0.8473025560379028\n", + "epoch: 47 step: 109, loss is 0.8534147143363953\n", + "epoch: 47 step: 110, loss is 0.7920291423797607\n", + "epoch: 47 step: 111, loss is 0.7973663806915283\n", + "epoch: 47 step: 112, loss is 0.7895572781562805\n", + "epoch: 47 step: 113, loss is 0.782160758972168\n", + "epoch: 47 step: 114, loss is 0.8339346647262573\n", + "epoch: 47 step: 115, loss is 0.8598023653030396\n", + "epoch: 47 step: 116, loss is 0.881176233291626\n", + "epoch: 47 step: 117, loss is 0.8063434362411499\n", + "epoch: 47 step: 118, loss is 0.7916606068611145\n", + "epoch: 47 step: 119, loss is 0.751764178276062\n", + "epoch: 47 step: 120, loss is 0.8335962295532227\n", + "epoch: 47 step: 121, loss is 0.7951281070709229\n", + "epoch: 47 step: 122, loss is 0.8058286905288696\n", + "epoch: 47 step: 123, loss is 0.7980879545211792\n", + "epoch: 47 step: 124, loss is 0.8472440838813782\n", + "epoch: 47 step: 125, loss is 0.8078110218048096\n", + "epoch: 47 step: 126, loss is 0.763114333152771\n", + "epoch: 47 step: 127, loss is 0.7993894815444946\n", + "epoch: 47 step: 128, loss is 0.8254684209823608\n", + "epoch: 47 step: 129, loss is 0.8142693042755127\n", + "epoch: 47 step: 130, loss is 0.7754933834075928\n", + "epoch: 47 step: 131, loss is 0.8407120108604431\n", + "epoch: 47 step: 132, loss is 0.8223428726196289\n", + "epoch: 47 step: 133, loss is 0.7934738397598267\n", + "epoch: 47 step: 134, loss is 0.7830476760864258\n", + "epoch: 47 step: 135, loss is 0.7769019603729248\n", + "epoch: 47 step: 136, loss is 0.8153979778289795\n", + "epoch: 47 step: 137, loss is 0.8242188096046448\n", + "epoch: 47 step: 138, loss is 0.8133212327957153\n", + "epoch: 47 step: 139, loss is 0.8038349151611328\n", + "epoch: 47 step: 140, loss is 0.8014612793922424\n", + "epoch: 47 step: 141, loss is 0.7764590978622437\n", + "epoch: 47 step: 142, loss is 0.7954175472259521\n", + "epoch: 47 step: 143, loss is 0.8041995763778687\n", + "epoch: 47 step: 144, loss is 0.7969920635223389\n", + "epoch: 47 step: 145, loss is 0.8611563444137573\n", + "epoch: 47 step: 146, loss is 0.7898956537246704\n", + "epoch: 47 step: 147, loss is 0.8311989307403564\n", + "epoch: 47 step: 148, loss is 0.7853360176086426\n", + "epoch: 47 step: 149, loss is 0.8180839419364929\n", + "epoch: 47 step: 150, loss is 0.7750321626663208\n", + "epoch: 47 step: 151, loss is 0.8160470128059387\n", + "epoch: 47 step: 152, loss is 0.7864341735839844\n", + "epoch: 47 step: 153, loss is 0.8420885801315308\n", + "epoch: 47 step: 154, loss is 0.849713146686554\n", + "epoch: 47 step: 155, loss is 0.8107603788375854\n", + "epoch: 47 step: 156, loss is 0.8033335208892822\n", + "epoch: 47 step: 157, loss is 0.821945309638977\n", + "epoch: 47 step: 158, loss is 0.798574686050415\n", + "epoch: 47 step: 159, loss is 0.8141623735427856\n", + "epoch: 47 step: 160, loss is 0.801826000213623\n", + "epoch: 47 step: 161, loss is 0.8512998819351196\n", + "epoch: 47 step: 162, loss is 0.7924254536628723\n", + "epoch: 47 step: 163, loss is 0.8238711953163147\n", + "epoch: 47 step: 164, loss is 0.8007253408432007\n", + "epoch: 47 step: 165, loss is 0.8220685720443726\n", + "epoch: 47 step: 166, loss is 0.8188948035240173\n", + "epoch: 47 step: 167, loss is 0.8448215126991272\n", + "epoch: 47 step: 168, loss is 0.8026089668273926\n", + "epoch: 47 step: 169, loss is 0.8621459007263184\n", + "epoch: 47 step: 170, loss is 0.7696868181228638\n", + "epoch: 47 step: 171, loss is 0.8735466003417969\n", + "epoch: 47 step: 172, loss is 0.8163352012634277\n", + "epoch: 47 step: 173, loss is 0.8575673699378967\n", + "epoch: 47 step: 174, loss is 0.817703366279602\n", + "epoch: 47 step: 175, loss is 0.7867600917816162\n", + "epoch: 47 step: 176, loss is 0.7883313894271851\n", + "epoch: 47 step: 177, loss is 0.882347583770752\n", + "epoch: 47 step: 178, loss is 0.8636192083358765\n", + "epoch: 47 step: 179, loss is 0.8560759425163269\n", + "epoch: 47 step: 180, loss is 0.8261744379997253\n", + "epoch: 47 step: 181, loss is 0.7766510248184204\n", + "epoch: 47 step: 182, loss is 0.7933776378631592\n", + "epoch: 47 step: 183, loss is 0.8108515739440918\n", + "epoch: 47 step: 184, loss is 0.8135684728622437\n", + "epoch: 47 step: 185, loss is 0.8664296865463257\n", + "epoch: 47 step: 186, loss is 0.7638518810272217\n", + "epoch: 47 step: 187, loss is 0.8594498634338379\n", + "epoch: 47 step: 188, loss is 0.8303490877151489\n", + "epoch: 47 step: 189, loss is 0.8047741055488586\n", + "epoch: 47 step: 190, loss is 0.7763839960098267\n", + "epoch: 47 step: 191, loss is 0.8575188517570496\n", + "epoch: 47 step: 192, loss is 0.764435887336731\n", + "epoch: 47 step: 193, loss is 0.8561575412750244\n", + "epoch: 47 step: 194, loss is 0.8355329036712646\n", + "epoch: 47 step: 195, loss is 0.8051666617393494\n", + "Train epoch time: 87127.208 ms, per step time: 446.806 ms\n", + "epoch: 48 step: 1, loss is 0.8070216178894043\n", + "epoch: 48 step: 2, loss is 0.7383114099502563\n", + "epoch: 48 step: 3, loss is 0.7694671154022217\n", + "epoch: 48 step: 4, loss is 0.7970733642578125\n", + "epoch: 48 step: 5, loss is 0.7934207320213318\n", + "epoch: 48 step: 6, loss is 0.7954100966453552\n", + "epoch: 48 step: 7, loss is 0.7978282570838928\n", + "epoch: 48 step: 8, loss is 0.8255637288093567\n", + "epoch: 48 step: 9, loss is 0.761204719543457\n", + "epoch: 48 step: 10, loss is 0.8262426257133484\n", + "epoch: 48 step: 11, loss is 0.7676242589950562\n", + "epoch: 48 step: 12, loss is 0.7837436199188232\n", + "epoch: 48 step: 13, loss is 0.7927272319793701\n", + "epoch: 48 step: 14, loss is 0.8203850388526917\n", + "epoch: 48 step: 15, loss is 0.7809609174728394\n", + "epoch: 48 step: 16, loss is 0.7672886848449707\n", + "epoch: 48 step: 17, loss is 0.8050164580345154\n", + "epoch: 48 step: 18, loss is 0.8137116432189941\n", + "epoch: 48 step: 19, loss is 0.7847999334335327\n", + "epoch: 48 step: 20, loss is 0.7757312655448914\n", + "epoch: 48 step: 21, loss is 0.773746907711029\n", + "epoch: 48 step: 22, loss is 0.7689797282218933\n", + "epoch: 48 step: 23, loss is 0.7439678311347961\n", + "epoch: 48 step: 24, loss is 0.7561413049697876\n", + "epoch: 48 step: 25, loss is 0.7485251426696777\n", + "epoch: 48 step: 26, loss is 0.8079710006713867\n", + "epoch: 48 step: 27, loss is 0.8330177068710327\n", + "epoch: 48 step: 28, loss is 0.8482803106307983\n", + "epoch: 48 step: 29, loss is 0.8603761792182922\n", + "epoch: 48 step: 30, loss is 0.7765137553215027\n", + "epoch: 48 step: 31, loss is 0.7713529467582703\n", + "epoch: 48 step: 32, loss is 0.7585725784301758\n", + "epoch: 48 step: 33, loss is 0.7425748109817505\n", + "epoch: 48 step: 34, loss is 0.7865797281265259\n", + "epoch: 48 step: 35, loss is 0.7451978921890259\n", + "epoch: 48 step: 36, loss is 0.7522730827331543\n", + "epoch: 48 step: 37, loss is 0.7701215147972107\n", + "epoch: 48 step: 38, loss is 0.7641079425811768\n", + "epoch: 48 step: 39, loss is 0.7828800082206726\n", + "epoch: 48 step: 40, loss is 0.7781181335449219\n", + "epoch: 48 step: 41, loss is 0.7837473750114441\n", + "epoch: 48 step: 42, loss is 0.7544150352478027\n", + "epoch: 48 step: 43, loss is 0.8080761432647705\n", + "epoch: 48 step: 44, loss is 0.7653207182884216\n", + "epoch: 48 step: 45, loss is 0.7664883136749268\n", + "epoch: 48 step: 46, loss is 0.799407958984375\n", + "epoch: 48 step: 47, loss is 0.8000745177268982\n", + "epoch: 48 step: 48, loss is 0.7547155618667603\n", + "epoch: 48 step: 49, loss is 0.7864343523979187\n", + "epoch: 48 step: 50, loss is 0.9038225412368774\n", + "epoch: 48 step: 51, loss is 0.803726077079773\n", + "epoch: 48 step: 52, loss is 0.8004841804504395\n", + "epoch: 48 step: 53, loss is 0.7818324565887451\n", + "epoch: 48 step: 54, loss is 0.7767124176025391\n", + "epoch: 48 step: 55, loss is 0.7504115700721741\n", + "epoch: 48 step: 56, loss is 0.8216580152511597\n", + "epoch: 48 step: 57, loss is 0.8006070852279663\n", + "epoch: 48 step: 58, loss is 0.7855820655822754\n", + "epoch: 48 step: 59, loss is 0.7788699865341187\n", + "epoch: 48 step: 60, loss is 0.7554970979690552\n", + "epoch: 48 step: 61, loss is 0.7795459032058716\n", + "epoch: 48 step: 62, loss is 0.7916252613067627\n", + "epoch: 48 step: 63, loss is 0.8039137125015259\n", + "epoch: 48 step: 64, loss is 0.7620199918746948\n", + "epoch: 48 step: 65, loss is 0.7218988537788391\n", + "epoch: 48 step: 66, loss is 0.8028159141540527\n", + "epoch: 48 step: 67, loss is 0.819689154624939\n", + "epoch: 48 step: 68, loss is 0.7572978734970093\n", + "epoch: 48 step: 69, loss is 0.826836109161377\n", + "epoch: 48 step: 70, loss is 0.822571873664856\n", + "epoch: 48 step: 71, loss is 0.8449804186820984\n", + "epoch: 48 step: 72, loss is 0.7824056148529053\n", + "epoch: 48 step: 73, loss is 0.753966212272644\n", + "epoch: 48 step: 74, loss is 0.7949811816215515\n", + "epoch: 48 step: 75, loss is 0.7158651947975159\n", + "epoch: 48 step: 76, loss is 0.7615968585014343\n", + "epoch: 48 step: 77, loss is 0.7499538064002991\n", + "epoch: 48 step: 78, loss is 0.8040461540222168\n", + "epoch: 48 step: 79, loss is 0.7614387273788452\n", + "epoch: 48 step: 80, loss is 0.8040701746940613\n", + "epoch: 48 step: 81, loss is 0.8177032470703125\n", + "epoch: 48 step: 82, loss is 0.7904621362686157\n", + "epoch: 48 step: 83, loss is 0.7471120357513428\n", + "epoch: 48 step: 84, loss is 0.7990197539329529\n", + "epoch: 48 step: 85, loss is 0.8157428503036499\n", + "epoch: 48 step: 86, loss is 0.7961019277572632\n", + "epoch: 48 step: 87, loss is 0.7584925293922424\n", + "epoch: 48 step: 88, loss is 0.8274255990982056\n", + "epoch: 48 step: 89, loss is 0.7735026478767395\n", + "epoch: 48 step: 90, loss is 0.7706420421600342\n", + "epoch: 48 step: 91, loss is 0.7934364676475525\n", + "epoch: 48 step: 92, loss is 0.8163866996765137\n", + "epoch: 48 step: 93, loss is 0.780390202999115\n", + "epoch: 48 step: 94, loss is 0.8489292860031128\n", + "epoch: 48 step: 95, loss is 0.8141554594039917\n", + "epoch: 48 step: 96, loss is 0.8475919365882874\n", + "epoch: 48 step: 97, loss is 0.7738040089607239\n", + "epoch: 48 step: 98, loss is 0.8255811929702759\n", + "epoch: 48 step: 99, loss is 0.8174341917037964\n", + "epoch: 48 step: 100, loss is 0.7785818576812744\n", + "epoch: 48 step: 101, loss is 0.7654615640640259\n", + "epoch: 48 step: 102, loss is 0.7834342122077942\n", + "epoch: 48 step: 103, loss is 0.8633512258529663\n", + "epoch: 48 step: 104, loss is 0.7659666538238525\n", + "epoch: 48 step: 105, loss is 0.8041815757751465\n", + "epoch: 48 step: 106, loss is 0.8190065026283264\n", + "epoch: 48 step: 107, loss is 0.8386857509613037\n", + "epoch: 48 step: 108, loss is 0.7819321751594543\n", + "epoch: 48 step: 109, loss is 0.794692873954773\n", + "epoch: 48 step: 110, loss is 0.8100134134292603\n", + "epoch: 48 step: 111, loss is 0.8046061992645264\n", + "epoch: 48 step: 112, loss is 0.8302455544471741\n", + "epoch: 48 step: 113, loss is 0.8583369255065918\n", + "epoch: 48 step: 114, loss is 0.7997075319290161\n", + "epoch: 48 step: 115, loss is 0.7669072151184082\n", + "epoch: 48 step: 116, loss is 0.8360196352005005\n", + "epoch: 48 step: 117, loss is 0.8183032274246216\n", + "epoch: 48 step: 118, loss is 0.7804214954376221\n", + "epoch: 48 step: 119, loss is 0.8184652328491211\n", + "epoch: 48 step: 120, loss is 0.7673002481460571\n", + "epoch: 48 step: 121, loss is 0.7901793718338013\n", + "epoch: 48 step: 122, loss is 0.8326337337493896\n", + "epoch: 48 step: 123, loss is 0.7798853516578674\n", + "epoch: 48 step: 124, loss is 0.7440391778945923\n", + "epoch: 48 step: 125, loss is 0.8279293775558472\n", + "epoch: 48 step: 126, loss is 0.7993245124816895\n", + "epoch: 48 step: 127, loss is 0.8425076007843018\n", + "epoch: 48 step: 128, loss is 0.8147546052932739\n", + "epoch: 48 step: 129, loss is 0.7711003422737122\n", + "epoch: 48 step: 130, loss is 0.7690991759300232\n", + "epoch: 48 step: 131, loss is 0.8076768517494202\n", + "epoch: 48 step: 132, loss is 0.8219443559646606\n", + "epoch: 48 step: 133, loss is 0.8148748874664307\n", + "epoch: 48 step: 134, loss is 0.7581009864807129\n", + "epoch: 48 step: 135, loss is 0.7835977077484131\n", + "epoch: 48 step: 136, loss is 0.8352409601211548\n", + "epoch: 48 step: 137, loss is 0.8085135221481323\n", + "epoch: 48 step: 138, loss is 0.8589401245117188\n", + "epoch: 48 step: 139, loss is 0.8518928289413452\n", + "epoch: 48 step: 140, loss is 0.781096339225769\n", + "epoch: 48 step: 141, loss is 0.8151828050613403\n", + "epoch: 48 step: 142, loss is 0.8385299444198608\n", + "epoch: 48 step: 143, loss is 0.8584417700767517\n", + "epoch: 48 step: 144, loss is 0.8252860903739929\n", + "epoch: 48 step: 145, loss is 0.7989709973335266\n", + "epoch: 48 step: 146, loss is 0.7978705167770386\n", + "epoch: 48 step: 147, loss is 0.7945955991744995\n", + "epoch: 48 step: 148, loss is 0.7684528827667236\n", + "epoch: 48 step: 149, loss is 0.7810595035552979\n", + "epoch: 48 step: 150, loss is 0.7880604863166809\n", + "epoch: 48 step: 151, loss is 0.8927369713783264\n", + "epoch: 48 step: 152, loss is 0.803512454032898\n", + "epoch: 48 step: 153, loss is 0.8033830523490906\n", + "epoch: 48 step: 154, loss is 0.8252488970756531\n", + "epoch: 48 step: 155, loss is 0.9004841446876526\n", + "epoch: 48 step: 156, loss is 0.7582592368125916\n", + "epoch: 48 step: 157, loss is 0.8372893333435059\n", + "epoch: 48 step: 158, loss is 0.8276265263557434\n", + "epoch: 48 step: 159, loss is 0.8067104816436768\n", + "epoch: 48 step: 160, loss is 0.8375316858291626\n", + "epoch: 48 step: 161, loss is 0.8086793422698975\n", + "epoch: 48 step: 162, loss is 0.8366947174072266\n", + "epoch: 48 step: 163, loss is 0.8208978176116943\n", + "epoch: 48 step: 164, loss is 0.7553967237472534\n", + "epoch: 48 step: 165, loss is 0.7983588576316833\n", + "epoch: 48 step: 166, loss is 0.8199363350868225\n", + "epoch: 48 step: 167, loss is 0.7933790683746338\n", + "epoch: 48 step: 168, loss is 0.8488814830780029\n", + "epoch: 48 step: 169, loss is 0.8159921169281006\n", + "epoch: 48 step: 170, loss is 0.8281253576278687\n", + "epoch: 48 step: 171, loss is 0.8104822635650635\n", + "epoch: 48 step: 172, loss is 0.7942297458648682\n", + "epoch: 48 step: 173, loss is 0.851560115814209\n", + "epoch: 48 step: 174, loss is 0.831922173500061\n", + "epoch: 48 step: 175, loss is 0.7705106735229492\n", + "epoch: 48 step: 176, loss is 0.8042521476745605\n", + "epoch: 48 step: 177, loss is 0.8038192987442017\n", + "epoch: 48 step: 178, loss is 0.8659136295318604\n", + "epoch: 48 step: 179, loss is 0.7971333265304565\n", + "epoch: 48 step: 180, loss is 0.7992852926254272\n", + "epoch: 48 step: 181, loss is 0.8174002170562744\n", + "epoch: 48 step: 182, loss is 0.8013997077941895\n", + "epoch: 48 step: 183, loss is 0.8327763080596924\n", + "epoch: 48 step: 184, loss is 0.9013698101043701\n", + "epoch: 48 step: 185, loss is 0.8115021586418152\n", + "epoch: 48 step: 186, loss is 0.8359056115150452\n", + "epoch: 48 step: 187, loss is 0.809033989906311\n", + "epoch: 48 step: 188, loss is 0.8192574977874756\n", + "epoch: 48 step: 189, loss is 0.7850028276443481\n", + "epoch: 48 step: 190, loss is 0.7957602739334106\n", + "epoch: 48 step: 191, loss is 0.7984716892242432\n", + "epoch: 48 step: 192, loss is 0.819485068321228\n", + "epoch: 48 step: 193, loss is 0.780922532081604\n", + "epoch: 48 step: 194, loss is 0.8633145093917847\n", + "epoch: 48 step: 195, loss is 0.7823758721351624\n", + "Train epoch time: 89758.986 ms, per step time: 460.302 ms\n", + "epoch: 49 step: 1, loss is 0.740123987197876\n", + "epoch: 49 step: 2, loss is 0.7667059898376465\n", + "epoch: 49 step: 3, loss is 0.7389520406723022\n", + "epoch: 49 step: 4, loss is 0.729182243347168\n", + "epoch: 49 step: 5, loss is 0.8091617822647095\n", + "epoch: 49 step: 6, loss is 0.7569695711135864\n", + "epoch: 49 step: 7, loss is 0.8119943737983704\n", + "epoch: 49 step: 8, loss is 0.8093556761741638\n", + "epoch: 49 step: 9, loss is 0.7789075374603271\n", + "epoch: 49 step: 10, loss is 0.7636571526527405\n", + "epoch: 49 step: 11, loss is 0.8112369179725647\n", + "epoch: 49 step: 12, loss is 0.7956259250640869\n", + "epoch: 49 step: 13, loss is 0.8267476558685303\n", + "epoch: 49 step: 14, loss is 0.798517644405365\n", + "epoch: 49 step: 15, loss is 0.792787492275238\n", + "epoch: 49 step: 16, loss is 0.8138505220413208\n", + "epoch: 49 step: 17, loss is 0.7759577035903931\n", + "epoch: 49 step: 18, loss is 0.8438122272491455\n", + "epoch: 49 step: 19, loss is 0.7722023725509644\n", + "epoch: 49 step: 20, loss is 0.7665164470672607\n", + "epoch: 49 step: 21, loss is 0.7476140260696411\n", + "epoch: 49 step: 22, loss is 0.7764071226119995\n", + "epoch: 49 step: 23, loss is 0.7750494480133057\n", + "epoch: 49 step: 24, loss is 0.8130207061767578\n", + "epoch: 49 step: 25, loss is 0.7358185648918152\n", + "epoch: 49 step: 26, loss is 0.7529335021972656\n", + "epoch: 49 step: 27, loss is 0.7870751619338989\n", + "epoch: 49 step: 28, loss is 0.7971776723861694\n", + "epoch: 49 step: 29, loss is 0.7450048923492432\n", + "epoch: 49 step: 30, loss is 0.8066275119781494\n", + "epoch: 49 step: 31, loss is 0.7978290319442749\n", + "epoch: 49 step: 32, loss is 0.784147322177887\n", + "epoch: 49 step: 33, loss is 0.7757874727249146\n", + "epoch: 49 step: 34, loss is 0.8097735643386841\n", + "epoch: 49 step: 35, loss is 0.7783631086349487\n", + "epoch: 49 step: 36, loss is 0.7730857133865356\n", + "epoch: 49 step: 37, loss is 0.7591262459754944\n", + "epoch: 49 step: 38, loss is 0.7891271114349365\n", + "epoch: 49 step: 39, loss is 0.7082834839820862\n", + "epoch: 49 step: 40, loss is 0.7279878854751587\n", + "epoch: 49 step: 41, loss is 0.7381035089492798\n", + "epoch: 49 step: 42, loss is 0.8695238828659058\n", + "epoch: 49 step: 43, loss is 0.7745414972305298\n", + "epoch: 49 step: 44, loss is 0.7482977509498596\n", + "epoch: 49 step: 45, loss is 0.7395782470703125\n", + "epoch: 49 step: 46, loss is 0.8277543783187866\n", + "epoch: 49 step: 47, loss is 0.7713267803192139\n", + "epoch: 49 step: 48, loss is 0.775800347328186\n", + "epoch: 49 step: 49, loss is 0.7304474711418152\n", + "epoch: 49 step: 50, loss is 0.7464207410812378\n", + "epoch: 49 step: 51, loss is 0.8342118263244629\n", + "epoch: 49 step: 52, loss is 0.7877765893936157\n", + "epoch: 49 step: 53, loss is 0.7612963914871216\n", + "epoch: 49 step: 54, loss is 0.7526565790176392\n", + "epoch: 49 step: 55, loss is 0.8115242719650269\n", + "epoch: 49 step: 56, loss is 0.7682759165763855\n", + "epoch: 49 step: 57, loss is 0.8205258846282959\n", + "epoch: 49 step: 58, loss is 0.8263794183731079\n", + "epoch: 49 step: 59, loss is 0.8179684281349182\n", + "epoch: 49 step: 60, loss is 0.7485620379447937\n", + "epoch: 49 step: 61, loss is 0.7534496188163757\n", + "epoch: 49 step: 62, loss is 0.7808990478515625\n", + "epoch: 49 step: 63, loss is 0.7670007348060608\n", + "epoch: 49 step: 64, loss is 0.7705181837081909\n", + "epoch: 49 step: 65, loss is 0.801278829574585\n", + "epoch: 49 step: 66, loss is 0.7948403358459473\n", + "epoch: 49 step: 67, loss is 0.7904521226882935\n", + "epoch: 49 step: 68, loss is 0.7661962509155273\n", + "epoch: 49 step: 69, loss is 0.8414790630340576\n", + "epoch: 49 step: 70, loss is 0.758037805557251\n", + "epoch: 49 step: 71, loss is 0.7919736504554749\n", + "epoch: 49 step: 72, loss is 0.7926309108734131\n", + "epoch: 49 step: 73, loss is 0.7852786779403687\n", + "epoch: 49 step: 74, loss is 0.8184328675270081\n", + "epoch: 49 step: 75, loss is 0.8171156644821167\n", + "epoch: 49 step: 76, loss is 0.7803730964660645\n", + "epoch: 49 step: 77, loss is 0.8132398128509521\n", + "epoch: 49 step: 78, loss is 0.7787712216377258\n", + "epoch: 49 step: 79, loss is 0.7657983899116516\n", + "epoch: 49 step: 80, loss is 0.7684045433998108\n", + "epoch: 49 step: 81, loss is 0.78287672996521\n", + "epoch: 49 step: 82, loss is 0.7817456722259521\n", + "epoch: 49 step: 83, loss is 0.8048697710037231\n", + "epoch: 49 step: 84, loss is 0.7586679458618164\n", + "epoch: 49 step: 85, loss is 0.7393200397491455\n", + "epoch: 49 step: 86, loss is 0.8005464673042297\n", + "epoch: 49 step: 87, loss is 0.7937297821044922\n", + "epoch: 49 step: 88, loss is 0.749962329864502\n", + "epoch: 49 step: 89, loss is 0.7832708358764648\n", + "epoch: 49 step: 90, loss is 0.7430402636528015\n", + "epoch: 49 step: 91, loss is 0.7454699277877808\n", + "epoch: 49 step: 92, loss is 0.8044520616531372\n", + "epoch: 49 step: 93, loss is 0.7729016542434692\n", + "epoch: 49 step: 94, loss is 0.8143540620803833\n", + "epoch: 49 step: 95, loss is 0.8120086193084717\n", + "epoch: 49 step: 96, loss is 0.7648836970329285\n", + "epoch: 49 step: 97, loss is 0.7782962322235107\n", + "epoch: 49 step: 98, loss is 0.7711482644081116\n", + "epoch: 49 step: 99, loss is 0.7862786054611206\n", + "epoch: 49 step: 100, loss is 0.7757799625396729\n", + "epoch: 49 step: 101, loss is 0.7998440265655518\n", + "epoch: 49 step: 102, loss is 0.8050569891929626\n", + "epoch: 49 step: 103, loss is 0.7811152935028076\n", + "epoch: 49 step: 104, loss is 0.8149724006652832\n", + "epoch: 49 step: 105, loss is 0.767202615737915\n", + "epoch: 49 step: 106, loss is 0.8448664546012878\n", + "epoch: 49 step: 107, loss is 0.836307168006897\n", + "epoch: 49 step: 108, loss is 0.777419924736023\n", + "epoch: 49 step: 109, loss is 0.771072506904602\n", + "epoch: 49 step: 110, loss is 0.8446239233016968\n", + "epoch: 49 step: 111, loss is 0.7520431876182556\n", + "epoch: 49 step: 112, loss is 0.762545108795166\n", + "epoch: 49 step: 113, loss is 0.7363244295120239\n", + "epoch: 49 step: 114, loss is 0.7421356439590454\n", + "epoch: 49 step: 115, loss is 0.7760366201400757\n", + "epoch: 49 step: 116, loss is 0.8000571727752686\n", + "epoch: 49 step: 117, loss is 0.7952170372009277\n", + "epoch: 49 step: 118, loss is 0.7462528944015503\n", + "epoch: 49 step: 119, loss is 0.7862237691879272\n", + "epoch: 49 step: 120, loss is 0.7863883376121521\n", + "epoch: 49 step: 121, loss is 0.8537074327468872\n", + "epoch: 49 step: 122, loss is 0.8045443296432495\n", + "epoch: 49 step: 123, loss is 0.7128046751022339\n", + "epoch: 49 step: 124, loss is 0.8018407821655273\n", + "epoch: 49 step: 125, loss is 0.7810943722724915\n", + "epoch: 49 step: 126, loss is 0.7756640315055847\n", + "epoch: 49 step: 127, loss is 0.7836350202560425\n", + "epoch: 49 step: 128, loss is 0.8029485940933228\n", + "epoch: 49 step: 129, loss is 0.7903884649276733\n", + "epoch: 49 step: 130, loss is 0.7691841125488281\n", + "epoch: 49 step: 131, loss is 0.7838733196258545\n", + "epoch: 49 step: 132, loss is 0.8132412433624268\n", + "epoch: 49 step: 133, loss is 0.7761106491088867\n", + "epoch: 49 step: 134, loss is 0.8090250492095947\n", + "epoch: 49 step: 135, loss is 0.8557419180870056\n", + "epoch: 49 step: 136, loss is 0.8176759481430054\n", + "epoch: 49 step: 137, loss is 0.7827292680740356\n", + "epoch: 49 step: 138, loss is 0.7627129554748535\n", + "epoch: 49 step: 139, loss is 0.8127715587615967\n", + "epoch: 49 step: 140, loss is 0.7992714047431946\n", + "epoch: 49 step: 141, loss is 0.7703901529312134\n", + "epoch: 49 step: 142, loss is 0.7580201029777527\n", + "epoch: 49 step: 143, loss is 0.7465653419494629\n", + "epoch: 49 step: 144, loss is 0.7743222713470459\n", + "epoch: 49 step: 145, loss is 0.7838677167892456\n", + "epoch: 49 step: 146, loss is 0.8113902807235718\n", + "epoch: 49 step: 147, loss is 0.7909528613090515\n", + "epoch: 49 step: 148, loss is 0.8403122425079346\n", + "epoch: 49 step: 149, loss is 0.8453036546707153\n", + "epoch: 49 step: 150, loss is 0.860937237739563\n", + "epoch: 49 step: 151, loss is 0.8333653807640076\n", + "epoch: 49 step: 152, loss is 0.8258075714111328\n", + "epoch: 49 step: 153, loss is 0.812757134437561\n", + "epoch: 49 step: 154, loss is 0.8155959844589233\n", + "epoch: 49 step: 155, loss is 0.7819238305091858\n", + "epoch: 49 step: 156, loss is 0.7780805826187134\n", + "epoch: 49 step: 157, loss is 0.8239634037017822\n", + "epoch: 49 step: 158, loss is 0.8003720045089722\n", + "epoch: 49 step: 159, loss is 0.8503465056419373\n", + "epoch: 49 step: 160, loss is 0.8249146342277527\n", + "epoch: 49 step: 161, loss is 0.8512384295463562\n", + "epoch: 49 step: 162, loss is 0.7661252617835999\n", + "epoch: 49 step: 163, loss is 0.7996624708175659\n", + "epoch: 49 step: 164, loss is 0.7521639466285706\n", + "epoch: 49 step: 165, loss is 0.7496659755706787\n", + "epoch: 49 step: 166, loss is 0.8034989833831787\n", + "epoch: 49 step: 167, loss is 0.7905418872833252\n", + "epoch: 49 step: 168, loss is 0.8356937170028687\n", + "epoch: 49 step: 169, loss is 0.7580307722091675\n", + "epoch: 49 step: 170, loss is 0.7773733735084534\n", + "epoch: 49 step: 171, loss is 0.8221913576126099\n", + "epoch: 49 step: 172, loss is 0.7696707248687744\n", + "epoch: 49 step: 173, loss is 0.8158860206604004\n", + "epoch: 49 step: 174, loss is 0.8704688549041748\n", + "epoch: 49 step: 175, loss is 0.8103585839271545\n", + "epoch: 49 step: 176, loss is 0.8341559171676636\n", + "epoch: 49 step: 177, loss is 0.8307667970657349\n", + "epoch: 49 step: 178, loss is 0.7781575918197632\n", + "epoch: 49 step: 179, loss is 0.7989760637283325\n", + "epoch: 49 step: 180, loss is 0.8366765975952148\n", + "epoch: 49 step: 181, loss is 0.8158066272735596\n", + "epoch: 49 step: 182, loss is 0.829413115978241\n", + "epoch: 49 step: 183, loss is 0.8240547180175781\n", + "epoch: 49 step: 184, loss is 0.8007907867431641\n", + "epoch: 49 step: 185, loss is 0.7898519039154053\n", + "epoch: 49 step: 186, loss is 0.791805624961853\n", + "epoch: 49 step: 187, loss is 0.8016554117202759\n", + "epoch: 49 step: 188, loss is 0.8295035362243652\n", + "epoch: 49 step: 189, loss is 0.840206503868103\n", + "epoch: 49 step: 190, loss is 0.8235992193222046\n", + "epoch: 49 step: 191, loss is 0.7902940511703491\n", + "epoch: 49 step: 192, loss is 0.803874135017395\n", + "epoch: 49 step: 193, loss is 0.816228985786438\n", + "epoch: 49 step: 194, loss is 0.7717530131340027\n", + "epoch: 49 step: 195, loss is 0.7719392776489258\n", + "Train epoch time: 91293.980 ms, per step time: 468.174 ms\n", + "epoch: 50 step: 1, loss is 0.7868151068687439\n", + "epoch: 50 step: 2, loss is 0.7734774351119995\n", + "epoch: 50 step: 3, loss is 0.7394061088562012\n", + "epoch: 50 step: 4, loss is 0.786292314529419\n", + "epoch: 50 step: 5, loss is 0.7376856207847595\n", + "epoch: 50 step: 6, loss is 0.7751009464263916\n", + "epoch: 50 step: 7, loss is 0.7576465010643005\n", + "epoch: 50 step: 8, loss is 0.7216310501098633\n", + "epoch: 50 step: 9, loss is 0.7825462222099304\n", + "epoch: 50 step: 10, loss is 0.7634239196777344\n", + "epoch: 50 step: 11, loss is 0.7699771523475647\n", + "epoch: 50 step: 12, loss is 0.7857254147529602\n", + "epoch: 50 step: 13, loss is 0.774795651435852\n", + "epoch: 50 step: 14, loss is 0.8171766400337219\n", + "epoch: 50 step: 15, loss is 0.7832989692687988\n", + "epoch: 50 step: 16, loss is 0.8110255002975464\n", + "epoch: 50 step: 17, loss is 0.7639260292053223\n", + "epoch: 50 step: 18, loss is 0.7681317329406738\n", + "epoch: 50 step: 19, loss is 0.7983493804931641\n", + "epoch: 50 step: 20, loss is 0.7722886800765991\n", + "epoch: 50 step: 21, loss is 0.7853327989578247\n", + "epoch: 50 step: 22, loss is 0.7845486402511597\n", + "epoch: 50 step: 23, loss is 0.7694487571716309\n", + "epoch: 50 step: 24, loss is 0.7928953766822815\n", + "epoch: 50 step: 25, loss is 0.7474392652511597\n", + "epoch: 50 step: 26, loss is 0.7706735730171204\n", + "epoch: 50 step: 27, loss is 0.7919335961341858\n", + "epoch: 50 step: 28, loss is 0.8169548511505127\n", + "epoch: 50 step: 29, loss is 0.8084007501602173\n", + "epoch: 50 step: 30, loss is 0.7302160859107971\n", + "epoch: 50 step: 31, loss is 0.7972997426986694\n", + "epoch: 50 step: 32, loss is 0.8067298531532288\n", + "epoch: 50 step: 33, loss is 0.7702917456626892\n", + "epoch: 50 step: 34, loss is 0.7777935862541199\n", + "epoch: 50 step: 35, loss is 0.7490803003311157\n", + "epoch: 50 step: 36, loss is 0.7266936302185059\n", + "epoch: 50 step: 37, loss is 0.758266806602478\n", + "epoch: 50 step: 38, loss is 0.820824146270752\n", + "epoch: 50 step: 39, loss is 0.7912280559539795\n", + "epoch: 50 step: 40, loss is 0.7438652515411377\n", + "epoch: 50 step: 41, loss is 0.7782835960388184\n", + "epoch: 50 step: 42, loss is 0.7753317356109619\n", + "epoch: 50 step: 43, loss is 0.8091133832931519\n", + "epoch: 50 step: 44, loss is 0.7454946637153625\n", + "epoch: 50 step: 45, loss is 0.8425624370574951\n", + "epoch: 50 step: 46, loss is 0.7887136340141296\n", + "epoch: 50 step: 47, loss is 0.7789653539657593\n", + "epoch: 50 step: 48, loss is 0.7822670936584473\n", + "epoch: 50 step: 49, loss is 0.7569042444229126\n", + "epoch: 50 step: 50, loss is 0.8066112995147705\n", + "epoch: 50 step: 51, loss is 0.7610266208648682\n", + "epoch: 50 step: 52, loss is 0.7930648326873779\n", + "epoch: 50 step: 53, loss is 0.7628129720687866\n", + "epoch: 50 step: 54, loss is 0.7521274089813232\n", + "epoch: 50 step: 55, loss is 0.7797892093658447\n", + "epoch: 50 step: 56, loss is 0.8454664945602417\n", + "epoch: 50 step: 57, loss is 0.8189269304275513\n", + "epoch: 50 step: 58, loss is 0.7943598031997681\n", + "epoch: 50 step: 59, loss is 0.832430899143219\n", + "epoch: 50 step: 60, loss is 0.7778985500335693\n", + "epoch: 50 step: 61, loss is 0.8309261798858643\n", + "epoch: 50 step: 62, loss is 0.8002636432647705\n", + "epoch: 50 step: 63, loss is 0.7832939624786377\n", + "epoch: 50 step: 64, loss is 0.7631120085716248\n", + "epoch: 50 step: 65, loss is 0.795427680015564\n", + "epoch: 50 step: 66, loss is 0.750333309173584\n", + "epoch: 50 step: 67, loss is 0.7631804943084717\n", + "epoch: 50 step: 68, loss is 0.7703506350517273\n", + "epoch: 50 step: 69, loss is 0.778783917427063\n", + "epoch: 50 step: 70, loss is 0.7657511830329895\n", + "epoch: 50 step: 71, loss is 0.7472816705703735\n", + "epoch: 50 step: 72, loss is 0.7516396045684814\n", + "epoch: 50 step: 73, loss is 0.7871465086936951\n", + "epoch: 50 step: 74, loss is 0.7911516427993774\n", + "epoch: 50 step: 75, loss is 0.7966976165771484\n", + "epoch: 50 step: 76, loss is 0.8212704658508301\n", + "epoch: 50 step: 77, loss is 0.8013023138046265\n", + "epoch: 50 step: 78, loss is 0.7538939714431763\n", + "epoch: 50 step: 79, loss is 0.8254086971282959\n", + "epoch: 50 step: 80, loss is 0.7536981701850891\n", + "epoch: 50 step: 81, loss is 0.732988715171814\n", + "epoch: 50 step: 82, loss is 0.7595420479774475\n", + "epoch: 50 step: 83, loss is 0.7498779296875\n", + "epoch: 50 step: 84, loss is 0.75879967212677\n", + "epoch: 50 step: 85, loss is 0.8101364970207214\n", + "epoch: 50 step: 86, loss is 0.7501190900802612\n", + "epoch: 50 step: 87, loss is 0.7442508339881897\n", + "epoch: 50 step: 88, loss is 0.7729294300079346\n", + "epoch: 50 step: 89, loss is 0.7622008919715881\n", + "epoch: 50 step: 90, loss is 0.8038082122802734\n", + "epoch: 50 step: 91, loss is 0.8013412356376648\n", + "epoch: 50 step: 92, loss is 0.7716604471206665\n", + "epoch: 50 step: 93, loss is 0.7979996204376221\n", + "epoch: 50 step: 94, loss is 0.755279004573822\n", + "epoch: 50 step: 95, loss is 0.7878562808036804\n", + "epoch: 50 step: 96, loss is 0.7526587247848511\n", + "epoch: 50 step: 97, loss is 0.8020631074905396\n", + "epoch: 50 step: 98, loss is 0.7655640840530396\n", + "epoch: 50 step: 99, loss is 0.7682890295982361\n", + "epoch: 50 step: 100, loss is 0.8174672722816467\n", + "epoch: 50 step: 101, loss is 0.8175950050354004\n", + "epoch: 50 step: 102, loss is 0.7662414312362671\n", + "epoch: 50 step: 103, loss is 0.748903751373291\n", + "epoch: 50 step: 104, loss is 0.8411457538604736\n", + "epoch: 50 step: 105, loss is 0.7238205671310425\n", + "epoch: 50 step: 106, loss is 0.7835208177566528\n", + "epoch: 50 step: 107, loss is 0.7635159492492676\n", + "epoch: 50 step: 108, loss is 0.7746706008911133\n", + "epoch: 50 step: 109, loss is 0.7491503953933716\n", + "epoch: 50 step: 110, loss is 0.7925606966018677\n", + "epoch: 50 step: 111, loss is 0.7694236040115356\n", + "epoch: 50 step: 112, loss is 0.7721191644668579\n", + "epoch: 50 step: 113, loss is 0.7572339773178101\n", + "epoch: 50 step: 114, loss is 0.7645984888076782\n", + "epoch: 50 step: 115, loss is 0.7783839702606201\n", + "epoch: 50 step: 116, loss is 0.785241961479187\n", + "epoch: 50 step: 117, loss is 0.791893482208252\n", + "epoch: 50 step: 118, loss is 0.7745317220687866\n", + "epoch: 50 step: 119, loss is 0.7271198034286499\n", + "epoch: 50 step: 120, loss is 0.7480241656303406\n", + "epoch: 50 step: 121, loss is 0.7737218141555786\n", + "epoch: 50 step: 122, loss is 0.7858203649520874\n", + "epoch: 50 step: 123, loss is 0.7329063415527344\n", + "epoch: 50 step: 124, loss is 0.7243314981460571\n", + "epoch: 50 step: 125, loss is 0.7590391039848328\n", + "epoch: 50 step: 126, loss is 0.7386910915374756\n", + "epoch: 50 step: 127, loss is 0.8182297348976135\n", + "epoch: 50 step: 128, loss is 0.8105862140655518\n", + "epoch: 50 step: 129, loss is 0.7827305793762207\n", + "epoch: 50 step: 130, loss is 0.7686834335327148\n", + "epoch: 50 step: 131, loss is 0.745132565498352\n", + "epoch: 50 step: 132, loss is 0.7335160970687866\n", + "epoch: 50 step: 133, loss is 0.7750440835952759\n", + "epoch: 50 step: 134, loss is 0.7712717056274414\n", + "epoch: 50 step: 135, loss is 0.7662545442581177\n", + "epoch: 50 step: 136, loss is 0.7510520815849304\n", + "epoch: 50 step: 137, loss is 0.7773119211196899\n", + "epoch: 50 step: 138, loss is 0.7684515714645386\n", + "epoch: 50 step: 139, loss is 0.7682321071624756\n", + "epoch: 50 step: 140, loss is 0.7639929056167603\n", + "epoch: 50 step: 141, loss is 0.7550424337387085\n", + "epoch: 50 step: 142, loss is 0.7775219082832336\n", + "epoch: 50 step: 143, loss is 0.7523344159126282\n", + "epoch: 50 step: 144, loss is 0.8364748954772949\n", + "epoch: 50 step: 145, loss is 0.8016267418861389\n", + "epoch: 50 step: 146, loss is 0.8206592798233032\n", + "epoch: 50 step: 147, loss is 0.760895848274231\n", + "epoch: 50 step: 148, loss is 0.8097943067550659\n", + "epoch: 50 step: 149, loss is 0.8026451468467712\n", + "epoch: 50 step: 150, loss is 0.7573705911636353\n", + "epoch: 50 step: 151, loss is 0.7773959636688232\n", + "epoch: 50 step: 152, loss is 0.7762377262115479\n", + "epoch: 50 step: 153, loss is 0.7449778914451599\n", + "epoch: 50 step: 154, loss is 0.7569411396980286\n", + "epoch: 50 step: 155, loss is 0.7990249395370483\n", + "epoch: 50 step: 156, loss is 0.7714837789535522\n", + "epoch: 50 step: 157, loss is 0.8087083101272583\n", + "epoch: 50 step: 158, loss is 0.7680971622467041\n", + "epoch: 50 step: 159, loss is 0.8022409677505493\n", + "epoch: 50 step: 160, loss is 0.8091251254081726\n", + "epoch: 50 step: 161, loss is 0.7859609723091125\n", + "epoch: 50 step: 162, loss is 0.7636193633079529\n", + "epoch: 50 step: 163, loss is 0.7681258916854858\n", + "epoch: 50 step: 164, loss is 0.8202004432678223\n", + "epoch: 50 step: 165, loss is 0.7871946096420288\n", + "epoch: 50 step: 166, loss is 0.7725439071655273\n", + "epoch: 50 step: 167, loss is 0.7860214114189148\n", + "epoch: 50 step: 168, loss is 0.7455118894577026\n", + "epoch: 50 step: 169, loss is 0.7591338157653809\n", + "epoch: 50 step: 170, loss is 0.7982535362243652\n", + "epoch: 50 step: 171, loss is 0.8021451234817505\n", + "epoch: 50 step: 172, loss is 0.798964262008667\n", + "epoch: 50 step: 173, loss is 0.7496373653411865\n", + "epoch: 50 step: 174, loss is 0.8131213188171387\n", + "epoch: 50 step: 175, loss is 0.7803475856781006\n", + "epoch: 50 step: 176, loss is 0.7979919910430908\n", + "epoch: 50 step: 177, loss is 0.7866744995117188\n", + "epoch: 50 step: 178, loss is 0.7428652048110962\n", + "epoch: 50 step: 179, loss is 0.7871459722518921\n", + "epoch: 50 step: 180, loss is 0.8466029167175293\n", + "epoch: 50 step: 181, loss is 0.773989737033844\n", + "epoch: 50 step: 182, loss is 0.7879766225814819\n", + "epoch: 50 step: 183, loss is 0.774418830871582\n", + "epoch: 50 step: 184, loss is 0.8259389996528625\n", + "epoch: 50 step: 185, loss is 0.8076802492141724\n", + "epoch: 50 step: 186, loss is 0.8179675340652466\n", + "epoch: 50 step: 187, loss is 0.8389725685119629\n", + "epoch: 50 step: 188, loss is 0.793555498123169\n", + "epoch: 50 step: 189, loss is 0.8030107021331787\n", + "epoch: 50 step: 190, loss is 0.7401713132858276\n", + "epoch: 50 step: 191, loss is 0.7683703303337097\n", + "epoch: 50 step: 192, loss is 0.8145789504051208\n", + "epoch: 50 step: 193, loss is 0.7262159585952759\n", + "epoch: 50 step: 194, loss is 0.791434645652771\n", + "epoch: 50 step: 195, loss is 0.7810477018356323\n", + "Train epoch time: 90000.281 ms, per step time: 461.540 ms\n", + "epoch: 51 step: 1, loss is 0.7539764642715454\n", + "epoch: 51 step: 2, loss is 0.7738325595855713\n", + "epoch: 51 step: 3, loss is 0.7571220397949219\n", + "epoch: 51 step: 4, loss is 0.7831331491470337\n", + "epoch: 51 step: 5, loss is 0.7702909708023071\n", + "epoch: 51 step: 6, loss is 0.7842795848846436\n", + "epoch: 51 step: 7, loss is 0.7741776704788208\n", + "epoch: 51 step: 8, loss is 0.7591151595115662\n", + "epoch: 51 step: 9, loss is 0.7828868627548218\n", + "epoch: 51 step: 10, loss is 0.7535385489463806\n", + "epoch: 51 step: 11, loss is 0.768463134765625\n", + "epoch: 51 step: 12, loss is 0.7595900297164917\n", + "epoch: 51 step: 13, loss is 0.8038873672485352\n", + "epoch: 51 step: 14, loss is 0.7588367462158203\n", + "epoch: 51 step: 15, loss is 0.7565065026283264\n", + "epoch: 51 step: 16, loss is 0.7889422178268433\n", + "epoch: 51 step: 17, loss is 0.8063272833824158\n", + "epoch: 51 step: 18, loss is 0.7306532263755798\n", + "epoch: 51 step: 19, loss is 0.7871092557907104\n", + "epoch: 51 step: 20, loss is 0.7320317029953003\n", + "epoch: 51 step: 21, loss is 0.7885180711746216\n", + "epoch: 51 step: 22, loss is 0.7718982696533203\n", + "epoch: 51 step: 23, loss is 0.8009904623031616\n", + "epoch: 51 step: 24, loss is 0.7585402131080627\n", + "epoch: 51 step: 25, loss is 0.8014702200889587\n", + "epoch: 51 step: 26, loss is 0.7734503746032715\n", + "epoch: 51 step: 27, loss is 0.7653006315231323\n", + "epoch: 51 step: 28, loss is 0.7849850654602051\n", + "epoch: 51 step: 29, loss is 0.8062751889228821\n", + "epoch: 51 step: 30, loss is 0.7586081027984619\n", + "epoch: 51 step: 31, loss is 0.7532060742378235\n", + "epoch: 51 step: 32, loss is 0.759145975112915\n", + "epoch: 51 step: 33, loss is 0.725477933883667\n", + "epoch: 51 step: 34, loss is 0.7481433749198914\n", + "epoch: 51 step: 35, loss is 0.8085105419158936\n", + "epoch: 51 step: 36, loss is 0.7603217959403992\n", + "epoch: 51 step: 37, loss is 0.7671743035316467\n", + "epoch: 51 step: 38, loss is 0.800233781337738\n", + "epoch: 51 step: 39, loss is 0.7693682909011841\n", + "epoch: 51 step: 40, loss is 0.7534449696540833\n", + "epoch: 51 step: 41, loss is 0.7665390968322754\n", + "epoch: 51 step: 42, loss is 0.8152024745941162\n", + "epoch: 51 step: 43, loss is 0.7346867322921753\n", + "epoch: 51 step: 44, loss is 0.7436782717704773\n", + "epoch: 51 step: 45, loss is 0.7649158239364624\n", + "epoch: 51 step: 46, loss is 0.7166203856468201\n", + "epoch: 51 step: 47, loss is 0.7921698689460754\n", + "epoch: 51 step: 48, loss is 0.7369518280029297\n", + "epoch: 51 step: 49, loss is 0.814834713935852\n", + "epoch: 51 step: 50, loss is 0.7324678301811218\n", + "epoch: 51 step: 51, loss is 0.7401407957077026\n", + "epoch: 51 step: 52, loss is 0.7941484451293945\n", + "epoch: 51 step: 53, loss is 0.7885600328445435\n", + "epoch: 51 step: 54, loss is 0.7243140935897827\n", + "epoch: 51 step: 55, loss is 0.8485689759254456\n", + "epoch: 51 step: 56, loss is 0.7748115062713623\n", + "epoch: 51 step: 57, loss is 0.7721100449562073\n", + "epoch: 51 step: 58, loss is 0.7859256863594055\n", + "epoch: 51 step: 59, loss is 0.7875289916992188\n", + "epoch: 51 step: 60, loss is 0.7818407416343689\n", + "epoch: 51 step: 61, loss is 0.7379584312438965\n", + "epoch: 51 step: 62, loss is 0.7239323258399963\n", + "epoch: 51 step: 63, loss is 0.7919116020202637\n", + "epoch: 51 step: 64, loss is 0.7792614698410034\n", + "epoch: 51 step: 65, loss is 0.7338288426399231\n", + "epoch: 51 step: 66, loss is 0.7162268161773682\n", + "epoch: 51 step: 67, loss is 0.7496656775474548\n", + "epoch: 51 step: 68, loss is 0.7917309403419495\n", + "epoch: 51 step: 69, loss is 0.7653017044067383\n", + "epoch: 51 step: 70, loss is 0.7459226846694946\n", + "epoch: 51 step: 71, loss is 0.7327276468276978\n", + "epoch: 51 step: 72, loss is 0.7867012619972229\n", + "epoch: 51 step: 73, loss is 0.804814338684082\n", + "epoch: 51 step: 74, loss is 0.7630386352539062\n", + "epoch: 51 step: 75, loss is 0.7203052043914795\n", + "epoch: 51 step: 76, loss is 0.7835773825645447\n", + "epoch: 51 step: 77, loss is 0.7647280693054199\n", + "epoch: 51 step: 78, loss is 0.7334399223327637\n", + "epoch: 51 step: 79, loss is 0.7244154214859009\n", + "epoch: 51 step: 80, loss is 0.8146246075630188\n", + "epoch: 51 step: 81, loss is 0.8256821632385254\n", + "epoch: 51 step: 82, loss is 0.7600598931312561\n", + "epoch: 51 step: 83, loss is 0.7974986433982849\n", + "epoch: 51 step: 84, loss is 0.7589653134346008\n", + "epoch: 51 step: 85, loss is 0.7477532625198364\n", + "epoch: 51 step: 86, loss is 0.7655661702156067\n", + "epoch: 51 step: 87, loss is 0.7472043037414551\n", + "epoch: 51 step: 88, loss is 0.7536276578903198\n", + "epoch: 51 step: 89, loss is 0.7582600116729736\n", + "epoch: 51 step: 90, loss is 0.7706124782562256\n", + "epoch: 51 step: 91, loss is 0.7818140983581543\n", + "epoch: 51 step: 92, loss is 0.7598508596420288\n", + "epoch: 51 step: 93, loss is 0.7932120561599731\n", + "epoch: 51 step: 94, loss is 0.8145816326141357\n", + "epoch: 51 step: 95, loss is 0.7804402112960815\n", + "epoch: 51 step: 96, loss is 0.7249363660812378\n", + "epoch: 51 step: 97, loss is 0.7874945402145386\n", + "epoch: 51 step: 98, loss is 0.7806792259216309\n", + "epoch: 51 step: 99, loss is 0.8073625564575195\n", + "epoch: 51 step: 100, loss is 0.7585387229919434\n", + "epoch: 51 step: 101, loss is 0.7632029056549072\n", + "epoch: 51 step: 102, loss is 0.7517079710960388\n", + "epoch: 51 step: 103, loss is 0.762223482131958\n", + "epoch: 51 step: 104, loss is 0.799846351146698\n", + "epoch: 51 step: 105, loss is 0.7688097953796387\n", + "epoch: 51 step: 106, loss is 0.7786684632301331\n", + "epoch: 51 step: 107, loss is 0.7937853336334229\n", + "epoch: 51 step: 108, loss is 0.8177387714385986\n", + "epoch: 51 step: 109, loss is 0.8357375860214233\n", + "epoch: 51 step: 110, loss is 0.7658897042274475\n", + "epoch: 51 step: 111, loss is 0.8230286836624146\n", + "epoch: 51 step: 112, loss is 0.809973955154419\n", + "epoch: 51 step: 113, loss is 0.7463704347610474\n", + "epoch: 51 step: 114, loss is 0.7620877027511597\n", + "epoch: 51 step: 115, loss is 0.8088180422782898\n", + "epoch: 51 step: 116, loss is 0.7792838215827942\n", + "epoch: 51 step: 117, loss is 0.7573317289352417\n", + "epoch: 51 step: 118, loss is 0.7497552037239075\n", + "epoch: 51 step: 119, loss is 0.8094556331634521\n", + "epoch: 51 step: 120, loss is 0.7475991249084473\n", + "epoch: 51 step: 121, loss is 0.7786684036254883\n", + "epoch: 51 step: 122, loss is 0.7784476280212402\n", + "epoch: 51 step: 123, loss is 0.7946556806564331\n", + "epoch: 51 step: 124, loss is 0.810137927532196\n", + "epoch: 51 step: 125, loss is 0.8042782545089722\n", + "epoch: 51 step: 126, loss is 0.7833455801010132\n", + "epoch: 51 step: 127, loss is 0.7801973819732666\n", + "epoch: 51 step: 128, loss is 0.7262136936187744\n", + "epoch: 51 step: 129, loss is 0.8018174171447754\n", + "epoch: 51 step: 130, loss is 0.7846205830574036\n", + "epoch: 51 step: 131, loss is 0.7909127473831177\n", + "epoch: 51 step: 132, loss is 0.813868522644043\n", + "epoch: 51 step: 133, loss is 0.7921451330184937\n", + "epoch: 51 step: 134, loss is 0.7488135695457458\n", + "epoch: 51 step: 135, loss is 0.767019510269165\n", + "epoch: 51 step: 136, loss is 0.782373309135437\n", + "epoch: 51 step: 137, loss is 0.758270263671875\n", + "epoch: 51 step: 138, loss is 0.7613680362701416\n", + "epoch: 51 step: 139, loss is 0.7676947116851807\n", + "epoch: 51 step: 140, loss is 0.7549349069595337\n", + "epoch: 51 step: 141, loss is 0.7658544778823853\n", + "epoch: 51 step: 142, loss is 0.766094446182251\n", + "epoch: 51 step: 143, loss is 0.8191319108009338\n", + "epoch: 51 step: 144, loss is 0.8301276564598083\n", + "epoch: 51 step: 145, loss is 0.8329758644104004\n", + "epoch: 51 step: 146, loss is 0.7155963182449341\n", + "epoch: 51 step: 147, loss is 0.7806078195571899\n", + "epoch: 51 step: 148, loss is 0.8008660674095154\n", + "epoch: 51 step: 149, loss is 0.7847810983657837\n", + "epoch: 51 step: 150, loss is 0.7824167609214783\n", + "epoch: 51 step: 151, loss is 0.7386972904205322\n", + "epoch: 51 step: 152, loss is 0.7665109634399414\n", + "epoch: 51 step: 153, loss is 0.7602937817573547\n", + "epoch: 51 step: 154, loss is 0.793414294719696\n", + "epoch: 51 step: 155, loss is 0.7884495854377747\n", + "epoch: 51 step: 156, loss is 0.743966817855835\n", + "epoch: 51 step: 157, loss is 0.8446047902107239\n", + "epoch: 51 step: 158, loss is 0.7369906902313232\n", + "epoch: 51 step: 159, loss is 0.8352745771408081\n", + "epoch: 51 step: 160, loss is 0.7837091684341431\n", + "epoch: 51 step: 161, loss is 0.771787166595459\n", + "epoch: 51 step: 162, loss is 0.8139764070510864\n", + "epoch: 51 step: 163, loss is 0.793785572052002\n", + "epoch: 51 step: 164, loss is 0.829002320766449\n", + "epoch: 51 step: 165, loss is 0.806726336479187\n", + "epoch: 51 step: 166, loss is 0.7874969244003296\n", + "epoch: 51 step: 167, loss is 0.7387135028839111\n", + "epoch: 51 step: 168, loss is 0.7633905410766602\n", + "epoch: 51 step: 169, loss is 0.8001519441604614\n", + "epoch: 51 step: 170, loss is 0.776814341545105\n", + "epoch: 51 step: 171, loss is 0.7881447076797485\n", + "epoch: 51 step: 172, loss is 0.7990329265594482\n", + "epoch: 51 step: 173, loss is 0.7956557273864746\n", + "epoch: 51 step: 174, loss is 0.7453169822692871\n", + "epoch: 51 step: 175, loss is 0.7489666938781738\n", + "epoch: 51 step: 176, loss is 0.7304611206054688\n", + "epoch: 51 step: 177, loss is 0.7725709676742554\n", + "epoch: 51 step: 178, loss is 0.7947009205818176\n", + "epoch: 51 step: 179, loss is 0.8026424646377563\n", + "epoch: 51 step: 180, loss is 0.7738192081451416\n", + "epoch: 51 step: 181, loss is 0.8084973692893982\n", + "epoch: 51 step: 182, loss is 0.7916576862335205\n", + "epoch: 51 step: 183, loss is 0.7822614908218384\n", + "epoch: 51 step: 184, loss is 0.8289768695831299\n", + "epoch: 51 step: 185, loss is 0.8171886205673218\n", + "epoch: 51 step: 186, loss is 0.7447428703308105\n", + "epoch: 51 step: 187, loss is 0.8321574926376343\n", + "epoch: 51 step: 188, loss is 0.7726453542709351\n", + "epoch: 51 step: 189, loss is 0.7506189942359924\n", + "epoch: 51 step: 190, loss is 0.8371618986129761\n", + "epoch: 51 step: 191, loss is 0.8302661180496216\n", + "epoch: 51 step: 192, loss is 0.7592031955718994\n", + "epoch: 51 step: 193, loss is 0.7614352703094482\n", + "epoch: 51 step: 194, loss is 0.7757306098937988\n", + "epoch: 51 step: 195, loss is 0.8112229108810425\n", + "Train epoch time: 90738.075 ms, per step time: 465.323 ms\n", + "epoch: 52 step: 1, loss is 0.7391225099563599\n", + "epoch: 52 step: 2, loss is 0.796194314956665\n", + "epoch: 52 step: 3, loss is 0.7917208671569824\n", + "epoch: 52 step: 4, loss is 0.7595838308334351\n", + "epoch: 52 step: 5, loss is 0.7420258522033691\n", + "epoch: 52 step: 6, loss is 0.7993748188018799\n", + "epoch: 52 step: 7, loss is 0.7339998483657837\n", + "epoch: 52 step: 8, loss is 0.8428571820259094\n", + "epoch: 52 step: 9, loss is 0.7573251724243164\n", + "epoch: 52 step: 10, loss is 0.7240869402885437\n", + "epoch: 52 step: 11, loss is 0.8249849081039429\n", + "epoch: 52 step: 12, loss is 0.7759864330291748\n", + "epoch: 52 step: 13, loss is 0.7329550385475159\n", + "epoch: 52 step: 14, loss is 0.7688426375389099\n", + "epoch: 52 step: 15, loss is 0.7561831474304199\n", + "epoch: 52 step: 16, loss is 0.7585403323173523\n", + "epoch: 52 step: 17, loss is 0.7726701498031616\n", + "epoch: 52 step: 18, loss is 0.7784944772720337\n", + "epoch: 52 step: 19, loss is 0.7468262910842896\n", + "epoch: 52 step: 20, loss is 0.8064104318618774\n", + "epoch: 52 step: 21, loss is 0.7445454597473145\n", + "epoch: 52 step: 22, loss is 0.7294942140579224\n", + "epoch: 52 step: 23, loss is 0.8210270404815674\n", + "epoch: 52 step: 24, loss is 0.7341126203536987\n", + "epoch: 52 step: 25, loss is 0.7336419224739075\n", + "epoch: 52 step: 26, loss is 0.7321670651435852\n", + "epoch: 52 step: 27, loss is 0.6883751749992371\n", + "epoch: 52 step: 28, loss is 0.7479016184806824\n", + "epoch: 52 step: 29, loss is 0.7699449062347412\n", + "epoch: 52 step: 30, loss is 0.7179737091064453\n", + "epoch: 52 step: 31, loss is 0.7501429319381714\n", + "epoch: 52 step: 32, loss is 0.7530854940414429\n", + "epoch: 52 step: 33, loss is 0.7397246360778809\n", + "epoch: 52 step: 34, loss is 0.7691499590873718\n", + "epoch: 52 step: 35, loss is 0.8291693925857544\n", + "epoch: 52 step: 36, loss is 0.8669382333755493\n", + "epoch: 52 step: 37, loss is 0.7890077829360962\n", + "epoch: 52 step: 38, loss is 0.7265087366104126\n", + "epoch: 52 step: 39, loss is 0.7679667472839355\n", + "epoch: 52 step: 40, loss is 0.7201211452484131\n", + "epoch: 52 step: 41, loss is 0.7394700050354004\n", + "epoch: 52 step: 42, loss is 0.7457442283630371\n", + "epoch: 52 step: 43, loss is 0.7687172889709473\n", + "epoch: 52 step: 44, loss is 0.7525746822357178\n", + "epoch: 52 step: 45, loss is 0.7571216821670532\n", + "epoch: 52 step: 46, loss is 0.7793692350387573\n", + "epoch: 52 step: 47, loss is 0.7599920034408569\n", + "epoch: 52 step: 48, loss is 0.7659292221069336\n", + "epoch: 52 step: 49, loss is 0.7279974818229675\n", + "epoch: 52 step: 50, loss is 0.7090831398963928\n", + "epoch: 52 step: 51, loss is 0.7481513023376465\n", + "epoch: 52 step: 52, loss is 0.7206456661224365\n", + "epoch: 52 step: 53, loss is 0.7635244131088257\n", + "epoch: 52 step: 54, loss is 0.711439847946167\n", + "epoch: 52 step: 55, loss is 0.7052395343780518\n", + "epoch: 52 step: 56, loss is 0.7753621935844421\n", + "epoch: 52 step: 57, loss is 0.752206027507782\n", + "epoch: 52 step: 58, loss is 0.7755942940711975\n", + "epoch: 52 step: 59, loss is 0.7907271385192871\n", + "epoch: 52 step: 60, loss is 0.8410258293151855\n", + "epoch: 52 step: 61, loss is 0.7602777481079102\n", + "epoch: 52 step: 62, loss is 0.7621512413024902\n", + "epoch: 52 step: 63, loss is 0.7778069972991943\n", + "epoch: 52 step: 64, loss is 0.7612124681472778\n", + "epoch: 52 step: 65, loss is 0.7101671695709229\n", + "epoch: 52 step: 66, loss is 0.7910895347595215\n", + "epoch: 52 step: 67, loss is 0.8072198033332825\n", + "epoch: 52 step: 68, loss is 0.7503820657730103\n", + "epoch: 52 step: 69, loss is 0.7457320690155029\n", + "epoch: 52 step: 70, loss is 0.7392666339874268\n", + "epoch: 52 step: 71, loss is 0.698926568031311\n", + "epoch: 52 step: 72, loss is 0.7645866870880127\n", + "epoch: 52 step: 73, loss is 0.7629654407501221\n", + "epoch: 52 step: 74, loss is 0.8488677740097046\n", + "epoch: 52 step: 75, loss is 0.8051077127456665\n", + "epoch: 52 step: 76, loss is 0.7530734539031982\n", + "epoch: 52 step: 77, loss is 0.7820494174957275\n", + "epoch: 52 step: 78, loss is 0.7521462440490723\n", + "epoch: 52 step: 79, loss is 0.7599977254867554\n", + "epoch: 52 step: 80, loss is 0.8080859184265137\n", + "epoch: 52 step: 81, loss is 0.7402668595314026\n", + "epoch: 52 step: 82, loss is 0.7666797041893005\n", + "epoch: 52 step: 83, loss is 0.7937963008880615\n", + "epoch: 52 step: 84, loss is 0.7656261920928955\n", + "epoch: 52 step: 85, loss is 0.7803227305412292\n", + "epoch: 52 step: 86, loss is 0.8165535926818848\n", + "epoch: 52 step: 87, loss is 0.7548713684082031\n", + "epoch: 52 step: 88, loss is 0.761152982711792\n", + "epoch: 52 step: 89, loss is 0.7592363357543945\n", + "epoch: 52 step: 90, loss is 0.7456085681915283\n", + "epoch: 52 step: 91, loss is 0.7380908727645874\n", + "epoch: 52 step: 92, loss is 0.7937139868736267\n", + "epoch: 52 step: 93, loss is 0.722295343875885\n", + "epoch: 52 step: 94, loss is 0.7641340494155884\n", + "epoch: 52 step: 95, loss is 0.8075693845748901\n", + "epoch: 52 step: 96, loss is 0.7791263461112976\n", + "epoch: 52 step: 97, loss is 0.8257077932357788\n", + "epoch: 52 step: 98, loss is 0.7416501045227051\n", + "epoch: 52 step: 99, loss is 0.7166857123374939\n", + "epoch: 52 step: 100, loss is 0.7919708490371704\n", + "epoch: 52 step: 101, loss is 0.8137692213058472\n", + "epoch: 52 step: 102, loss is 0.7565401196479797\n", + "epoch: 52 step: 103, loss is 0.7589313983917236\n", + "epoch: 52 step: 104, loss is 0.7189929485321045\n", + "epoch: 52 step: 105, loss is 0.8044339418411255\n", + "epoch: 52 step: 106, loss is 0.7235034108161926\n", + "epoch: 52 step: 107, loss is 0.7878446578979492\n", + "epoch: 52 step: 108, loss is 0.7851925492286682\n", + "epoch: 52 step: 109, loss is 0.7673126459121704\n", + "epoch: 52 step: 110, loss is 0.7985787391662598\n", + "epoch: 52 step: 111, loss is 0.7588906288146973\n", + "epoch: 52 step: 112, loss is 0.7530708312988281\n", + "epoch: 52 step: 113, loss is 0.7586890459060669\n", + "epoch: 52 step: 114, loss is 0.7813761830329895\n", + "epoch: 52 step: 115, loss is 0.7718340158462524\n", + "epoch: 52 step: 116, loss is 0.7635319828987122\n", + "epoch: 52 step: 117, loss is 0.7402508854866028\n", + "epoch: 52 step: 118, loss is 0.7449092864990234\n", + "epoch: 52 step: 119, loss is 0.7315559387207031\n", + "epoch: 52 step: 120, loss is 0.804029643535614\n", + "epoch: 52 step: 121, loss is 0.8156679272651672\n", + "epoch: 52 step: 122, loss is 0.7396408319473267\n", + "epoch: 52 step: 123, loss is 0.8336457014083862\n", + "epoch: 52 step: 124, loss is 0.8039273023605347\n", + "epoch: 52 step: 125, loss is 0.8047870397567749\n", + "epoch: 52 step: 126, loss is 0.7585816383361816\n", + "epoch: 52 step: 127, loss is 0.7508736848831177\n", + "epoch: 52 step: 128, loss is 0.7643988728523254\n", + "epoch: 52 step: 129, loss is 0.7885091304779053\n", + "epoch: 52 step: 130, loss is 0.7596791982650757\n", + "epoch: 52 step: 131, loss is 0.7408576011657715\n", + "epoch: 52 step: 132, loss is 0.8177573084831238\n", + "epoch: 52 step: 133, loss is 0.8564642667770386\n", + "epoch: 52 step: 134, loss is 0.757601797580719\n", + "epoch: 52 step: 135, loss is 0.7461397051811218\n", + "epoch: 52 step: 136, loss is 0.7383006811141968\n", + "epoch: 52 step: 137, loss is 0.7920222282409668\n", + "epoch: 52 step: 138, loss is 0.7596107721328735\n", + "epoch: 52 step: 139, loss is 0.7650028467178345\n", + "epoch: 52 step: 140, loss is 0.8024780750274658\n", + "epoch: 52 step: 141, loss is 0.7695125341415405\n", + "epoch: 52 step: 142, loss is 0.7399258613586426\n", + "epoch: 52 step: 143, loss is 0.7900897264480591\n", + "epoch: 52 step: 144, loss is 0.7314162254333496\n", + "epoch: 52 step: 145, loss is 0.8119775056838989\n", + "epoch: 52 step: 146, loss is 0.7591579556465149\n", + "epoch: 52 step: 147, loss is 0.7566936016082764\n", + "epoch: 52 step: 148, loss is 0.8067940473556519\n", + "epoch: 52 step: 149, loss is 0.7310069799423218\n", + "epoch: 52 step: 150, loss is 0.7581437826156616\n", + "epoch: 52 step: 151, loss is 0.7200478911399841\n", + "epoch: 52 step: 152, loss is 0.7880841493606567\n", + "epoch: 52 step: 153, loss is 0.75236976146698\n", + "epoch: 52 step: 154, loss is 0.7774043083190918\n", + "epoch: 52 step: 155, loss is 0.7908428907394409\n", + "epoch: 52 step: 156, loss is 0.747913658618927\n", + "epoch: 52 step: 157, loss is 0.770351767539978\n", + "epoch: 52 step: 158, loss is 0.8173351883888245\n", + "epoch: 52 step: 159, loss is 0.7974437475204468\n", + "epoch: 52 step: 160, loss is 0.7588104009628296\n", + "epoch: 52 step: 161, loss is 0.8037292957305908\n", + "epoch: 52 step: 162, loss is 0.8008608818054199\n", + "epoch: 52 step: 163, loss is 0.7909975051879883\n", + "epoch: 52 step: 164, loss is 0.735126256942749\n", + "epoch: 52 step: 165, loss is 0.7420390844345093\n", + "epoch: 52 step: 166, loss is 0.7317427396774292\n", + "epoch: 52 step: 167, loss is 0.785866916179657\n", + "epoch: 52 step: 168, loss is 0.7975753545761108\n", + "epoch: 52 step: 169, loss is 0.7488222122192383\n", + "epoch: 52 step: 170, loss is 0.7782068252563477\n", + "epoch: 52 step: 171, loss is 0.7419046759605408\n", + "epoch: 52 step: 172, loss is 0.7932560443878174\n", + "epoch: 52 step: 173, loss is 0.7934079766273499\n", + "epoch: 52 step: 174, loss is 0.799501895904541\n", + "epoch: 52 step: 175, loss is 0.8047833442687988\n", + "epoch: 52 step: 176, loss is 0.7922064065933228\n", + "epoch: 52 step: 177, loss is 0.7880159616470337\n", + "epoch: 52 step: 178, loss is 0.7929332256317139\n", + "epoch: 52 step: 179, loss is 0.819445013999939\n", + "epoch: 52 step: 180, loss is 0.7223069071769714\n", + "epoch: 52 step: 181, loss is 0.769569993019104\n", + "epoch: 52 step: 182, loss is 0.7773095369338989\n", + "epoch: 52 step: 183, loss is 0.7438145875930786\n", + "epoch: 52 step: 184, loss is 0.7570129036903381\n", + "epoch: 52 step: 185, loss is 0.8130279779434204\n", + "epoch: 52 step: 186, loss is 0.7707012891769409\n", + "epoch: 52 step: 187, loss is 0.8096380233764648\n", + "epoch: 52 step: 188, loss is 0.8059810400009155\n", + "epoch: 52 step: 189, loss is 0.7660102844238281\n", + "epoch: 52 step: 190, loss is 0.7919553518295288\n", + "epoch: 52 step: 191, loss is 0.7629770040512085\n", + "epoch: 52 step: 192, loss is 0.7846937775611877\n", + "epoch: 52 step: 193, loss is 0.8070911169052124\n", + "epoch: 52 step: 194, loss is 0.8099333047866821\n", + "epoch: 52 step: 195, loss is 0.792921781539917\n", + "Train epoch time: 92361.394 ms, per step time: 473.648 ms\n", + "epoch: 53 step: 1, loss is 0.7417248487472534\n", + "epoch: 53 step: 2, loss is 0.7612911462783813\n", + "epoch: 53 step: 3, loss is 0.7910354733467102\n", + "epoch: 53 step: 4, loss is 0.7699326276779175\n", + "epoch: 53 step: 5, loss is 0.7294867634773254\n", + "epoch: 53 step: 6, loss is 0.7356681823730469\n", + "epoch: 53 step: 7, loss is 0.7288025617599487\n", + "epoch: 53 step: 8, loss is 0.7210437059402466\n", + "epoch: 53 step: 9, loss is 0.7944726943969727\n", + "epoch: 53 step: 10, loss is 0.7413511276245117\n", + "epoch: 53 step: 11, loss is 0.7008870840072632\n", + "epoch: 53 step: 12, loss is 0.7445635795593262\n", + "epoch: 53 step: 13, loss is 0.7231895923614502\n", + "epoch: 53 step: 14, loss is 0.7354140877723694\n", + "epoch: 53 step: 15, loss is 0.7561465501785278\n", + "epoch: 53 step: 16, loss is 0.7804425358772278\n", + "epoch: 53 step: 17, loss is 0.7744684815406799\n", + "epoch: 53 step: 18, loss is 0.768237829208374\n", + "epoch: 53 step: 19, loss is 0.7128997445106506\n", + "epoch: 53 step: 20, loss is 0.7567411661148071\n", + "epoch: 53 step: 21, loss is 0.8273086547851562\n", + "epoch: 53 step: 22, loss is 0.7131001353263855\n", + "epoch: 53 step: 23, loss is 0.7958806157112122\n", + "epoch: 53 step: 24, loss is 0.7468898296356201\n", + "epoch: 53 step: 25, loss is 0.7741433382034302\n", + "epoch: 53 step: 26, loss is 0.7783163785934448\n", + "epoch: 53 step: 27, loss is 0.8241274356842041\n", + "epoch: 53 step: 28, loss is 0.7784167528152466\n", + "epoch: 53 step: 29, loss is 0.8078915476799011\n", + "epoch: 53 step: 30, loss is 0.7619433403015137\n", + "epoch: 53 step: 31, loss is 0.7107453346252441\n", + "epoch: 53 step: 32, loss is 0.7509595155715942\n", + "epoch: 53 step: 33, loss is 0.7831566333770752\n", + "epoch: 53 step: 34, loss is 0.7937482595443726\n", + "epoch: 53 step: 35, loss is 0.7560998797416687\n", + "epoch: 53 step: 36, loss is 0.7545962333679199\n", + "epoch: 53 step: 37, loss is 0.7731825113296509\n", + "epoch: 53 step: 38, loss is 0.811977744102478\n", + "epoch: 53 step: 39, loss is 0.7849971055984497\n", + "epoch: 53 step: 40, loss is 0.7509487867355347\n", + "epoch: 53 step: 41, loss is 0.764445424079895\n", + "epoch: 53 step: 42, loss is 0.7397453784942627\n", + "epoch: 53 step: 43, loss is 0.7611008882522583\n", + "epoch: 53 step: 44, loss is 0.7702669501304626\n", + "epoch: 53 step: 45, loss is 0.7408236265182495\n", + "epoch: 53 step: 46, loss is 0.7492144107818604\n", + "epoch: 53 step: 47, loss is 0.7443912029266357\n", + "epoch: 53 step: 48, loss is 0.762601375579834\n", + "epoch: 53 step: 49, loss is 0.7459520101547241\n", + "epoch: 53 step: 50, loss is 0.7085963487625122\n", + "epoch: 53 step: 51, loss is 0.7669245004653931\n", + "epoch: 53 step: 52, loss is 0.784690260887146\n", + "epoch: 53 step: 53, loss is 0.784440279006958\n", + "epoch: 53 step: 54, loss is 0.7688843011856079\n", + "epoch: 53 step: 55, loss is 0.7967009544372559\n", + "epoch: 53 step: 56, loss is 0.7657407522201538\n", + "epoch: 53 step: 57, loss is 0.8094303607940674\n", + "epoch: 53 step: 58, loss is 0.7503698468208313\n", + "epoch: 53 step: 59, loss is 0.7752727270126343\n", + "epoch: 53 step: 60, loss is 0.7242671847343445\n", + "epoch: 53 step: 61, loss is 0.7294430136680603\n", + "epoch: 53 step: 62, loss is 0.7429369688034058\n", + "epoch: 53 step: 63, loss is 0.7871859073638916\n", + "epoch: 53 step: 64, loss is 0.7736207246780396\n", + "epoch: 53 step: 65, loss is 0.7688853740692139\n", + "epoch: 53 step: 66, loss is 0.7137338519096375\n", + "epoch: 53 step: 67, loss is 0.7600186467170715\n", + "epoch: 53 step: 68, loss is 0.7849471569061279\n", + "epoch: 53 step: 69, loss is 0.754218339920044\n", + "epoch: 53 step: 70, loss is 0.7915796041488647\n", + "epoch: 53 step: 71, loss is 0.7434567213058472\n", + "epoch: 53 step: 72, loss is 0.7928962707519531\n", + "epoch: 53 step: 73, loss is 0.7772682905197144\n", + "epoch: 53 step: 74, loss is 0.7656267285346985\n", + "epoch: 53 step: 75, loss is 0.7516249418258667\n", + "epoch: 53 step: 76, loss is 0.7903905510902405\n", + "epoch: 53 step: 77, loss is 0.7619006037712097\n", + "epoch: 53 step: 78, loss is 0.7753543853759766\n", + "epoch: 53 step: 79, loss is 0.7738161087036133\n", + "epoch: 53 step: 80, loss is 0.7418737411499023\n", + "epoch: 53 step: 81, loss is 0.7265351414680481\n", + "epoch: 53 step: 82, loss is 0.7857447862625122\n", + "epoch: 53 step: 83, loss is 0.7819921970367432\n", + "epoch: 53 step: 84, loss is 0.7615545988082886\n", + "epoch: 53 step: 85, loss is 0.7774088382720947\n", + "epoch: 53 step: 86, loss is 0.7427369952201843\n", + "epoch: 53 step: 87, loss is 0.7618368268013\n", + "epoch: 53 step: 88, loss is 0.6962130069732666\n", + "epoch: 53 step: 89, loss is 0.7259840965270996\n", + "epoch: 53 step: 90, loss is 0.7432724237442017\n", + "epoch: 53 step: 91, loss is 0.7088574171066284\n", + "epoch: 53 step: 92, loss is 0.7416876554489136\n", + "epoch: 53 step: 93, loss is 0.7818312644958496\n", + "epoch: 53 step: 94, loss is 0.7382770776748657\n", + "epoch: 53 step: 95, loss is 0.7623751163482666\n", + "epoch: 53 step: 96, loss is 0.6999504566192627\n", + "epoch: 53 step: 97, loss is 0.770034670829773\n", + "epoch: 53 step: 98, loss is 0.758446455001831\n", + "epoch: 53 step: 99, loss is 0.757785975933075\n", + "epoch: 53 step: 100, loss is 0.716611921787262\n", + "epoch: 53 step: 101, loss is 0.8119032382965088\n", + "epoch: 53 step: 102, loss is 0.7691274881362915\n", + "epoch: 53 step: 103, loss is 0.7495204210281372\n", + "epoch: 53 step: 104, loss is 0.7930694222450256\n", + "epoch: 53 step: 105, loss is 0.7602623701095581\n", + "epoch: 53 step: 106, loss is 0.7508853673934937\n", + "epoch: 53 step: 107, loss is 0.7164937853813171\n", + "epoch: 53 step: 108, loss is 0.7713504433631897\n", + "epoch: 53 step: 109, loss is 0.7451773881912231\n", + "epoch: 53 step: 110, loss is 0.7725291848182678\n", + "epoch: 53 step: 111, loss is 0.7120343446731567\n", + "epoch: 53 step: 112, loss is 0.728553056716919\n", + "epoch: 53 step: 113, loss is 0.7387804388999939\n", + "epoch: 53 step: 114, loss is 0.7764843702316284\n", + "epoch: 53 step: 115, loss is 0.7826195955276489\n", + "epoch: 53 step: 116, loss is 0.7783035635948181\n", + "epoch: 53 step: 117, loss is 0.7467634677886963\n", + "epoch: 53 step: 118, loss is 0.7389926910400391\n", + "epoch: 53 step: 119, loss is 0.8020252585411072\n", + "epoch: 53 step: 120, loss is 0.8038327693939209\n", + "epoch: 53 step: 121, loss is 0.8013508319854736\n", + "epoch: 53 step: 122, loss is 0.7739425897598267\n", + "epoch: 53 step: 123, loss is 0.7507538795471191\n", + "epoch: 53 step: 124, loss is 0.7713028192520142\n", + "epoch: 53 step: 125, loss is 0.7439754605293274\n", + "epoch: 53 step: 126, loss is 0.7802231311798096\n", + "epoch: 53 step: 127, loss is 0.7159410715103149\n", + "epoch: 53 step: 128, loss is 0.7654852867126465\n", + "epoch: 53 step: 129, loss is 0.6978714466094971\n", + "epoch: 53 step: 130, loss is 0.7404050230979919\n", + "epoch: 53 step: 131, loss is 0.805623471736908\n", + "epoch: 53 step: 132, loss is 0.7868697047233582\n", + "epoch: 53 step: 133, loss is 0.6983667612075806\n", + "epoch: 53 step: 134, loss is 0.7643942832946777\n", + "epoch: 53 step: 135, loss is 0.7653588056564331\n", + "epoch: 53 step: 136, loss is 0.739246666431427\n", + "epoch: 53 step: 137, loss is 0.754502534866333\n", + "epoch: 53 step: 138, loss is 0.756371259689331\n", + "epoch: 53 step: 139, loss is 0.7565354108810425\n", + "epoch: 53 step: 140, loss is 0.7213149070739746\n", + "epoch: 53 step: 141, loss is 0.7391492128372192\n", + "epoch: 53 step: 142, loss is 0.7483261823654175\n", + "epoch: 53 step: 143, loss is 0.7937095761299133\n", + "epoch: 53 step: 144, loss is 0.7820402383804321\n", + "epoch: 53 step: 145, loss is 0.7673259973526001\n", + "epoch: 53 step: 146, loss is 0.7499074935913086\n", + "epoch: 53 step: 147, loss is 0.7767620086669922\n", + "epoch: 53 step: 148, loss is 0.7442493438720703\n", + "epoch: 53 step: 149, loss is 0.8124383687973022\n", + "epoch: 53 step: 150, loss is 0.7514163851737976\n", + "epoch: 53 step: 151, loss is 0.7941665649414062\n", + "epoch: 53 step: 152, loss is 0.7933539152145386\n", + "epoch: 53 step: 153, loss is 0.79081130027771\n", + "epoch: 53 step: 154, loss is 0.8277047872543335\n", + "epoch: 53 step: 155, loss is 0.7474905252456665\n", + "epoch: 53 step: 156, loss is 0.8606705665588379\n", + "epoch: 53 step: 157, loss is 0.7674980163574219\n", + "epoch: 53 step: 158, loss is 0.7654047012329102\n", + "epoch: 53 step: 159, loss is 0.8190510869026184\n", + "epoch: 53 step: 160, loss is 0.7890478372573853\n", + "epoch: 53 step: 161, loss is 0.7853958606719971\n", + "epoch: 53 step: 162, loss is 0.8073251247406006\n", + "epoch: 53 step: 163, loss is 0.8145028352737427\n", + "epoch: 53 step: 164, loss is 0.8312052488327026\n", + "epoch: 53 step: 165, loss is 0.8050093650817871\n", + "epoch: 53 step: 166, loss is 0.7881602644920349\n", + "epoch: 53 step: 167, loss is 0.7800586223602295\n", + "epoch: 53 step: 168, loss is 0.7698689103126526\n", + "epoch: 53 step: 169, loss is 0.7342351675033569\n", + "epoch: 53 step: 170, loss is 0.8107371926307678\n", + "epoch: 53 step: 171, loss is 0.7967252731323242\n", + "epoch: 53 step: 172, loss is 0.7756185531616211\n", + "epoch: 53 step: 173, loss is 0.7911103963851929\n", + "epoch: 53 step: 174, loss is 0.765170156955719\n", + "epoch: 53 step: 175, loss is 0.7668988704681396\n", + "epoch: 53 step: 176, loss is 0.7472866773605347\n", + "epoch: 53 step: 177, loss is 0.7152329683303833\n", + "epoch: 53 step: 178, loss is 0.8101098537445068\n", + "epoch: 53 step: 179, loss is 0.7155725955963135\n", + "epoch: 53 step: 180, loss is 0.7979050874710083\n", + "epoch: 53 step: 181, loss is 0.7769243717193604\n", + "epoch: 53 step: 182, loss is 0.7654905915260315\n", + "epoch: 53 step: 183, loss is 0.7392020225524902\n", + "epoch: 53 step: 184, loss is 0.7238770723342896\n", + "epoch: 53 step: 185, loss is 0.7417476177215576\n", + "epoch: 53 step: 186, loss is 0.7984459400177002\n", + "epoch: 53 step: 187, loss is 0.779484748840332\n", + "epoch: 53 step: 188, loss is 0.7500965595245361\n", + "epoch: 53 step: 189, loss is 0.8348305225372314\n", + "epoch: 53 step: 190, loss is 0.7855576276779175\n", + "epoch: 53 step: 191, loss is 0.7756420373916626\n", + "epoch: 53 step: 192, loss is 0.7297533750534058\n", + "epoch: 53 step: 193, loss is 0.7421225309371948\n", + "epoch: 53 step: 194, loss is 0.7462403774261475\n", + "epoch: 53 step: 195, loss is 0.7601263523101807\n", + "Train epoch time: 90792.921 ms, per step time: 465.605 ms\n", + "epoch: 54 step: 1, loss is 0.7605640888214111\n", + "epoch: 54 step: 2, loss is 0.7299840450286865\n", + "epoch: 54 step: 3, loss is 0.7855246067047119\n", + "epoch: 54 step: 4, loss is 0.770726203918457\n", + "epoch: 54 step: 5, loss is 0.7268219590187073\n", + "epoch: 54 step: 6, loss is 0.7498196959495544\n", + "epoch: 54 step: 7, loss is 0.7628417015075684\n", + "epoch: 54 step: 8, loss is 0.7465298771858215\n", + "epoch: 54 step: 9, loss is 0.7460712194442749\n", + "epoch: 54 step: 10, loss is 0.7853412628173828\n", + "epoch: 54 step: 11, loss is 0.7739884257316589\n", + "epoch: 54 step: 12, loss is 0.7146508693695068\n", + "epoch: 54 step: 13, loss is 0.6778692007064819\n", + "epoch: 54 step: 14, loss is 0.7596269845962524\n", + "epoch: 54 step: 15, loss is 0.7914093732833862\n", + "epoch: 54 step: 16, loss is 0.7498944997787476\n", + "epoch: 54 step: 17, loss is 0.7271698117256165\n", + "epoch: 54 step: 18, loss is 0.7217979431152344\n", + "epoch: 54 step: 19, loss is 0.7579721212387085\n", + "epoch: 54 step: 20, loss is 0.7228319644927979\n", + "epoch: 54 step: 21, loss is 0.7076735496520996\n", + "epoch: 54 step: 22, loss is 0.7068609595298767\n", + "epoch: 54 step: 23, loss is 0.7466017603874207\n", + "epoch: 54 step: 24, loss is 0.7460129261016846\n", + "epoch: 54 step: 25, loss is 0.742989718914032\n", + "epoch: 54 step: 26, loss is 0.7903749942779541\n", + "epoch: 54 step: 27, loss is 0.7867679595947266\n", + "epoch: 54 step: 28, loss is 0.7428504824638367\n", + "epoch: 54 step: 29, loss is 0.7502157688140869\n", + "epoch: 54 step: 30, loss is 0.7410982847213745\n", + "epoch: 54 step: 31, loss is 0.7717219591140747\n", + "epoch: 54 step: 32, loss is 0.7481859922409058\n", + "epoch: 54 step: 33, loss is 0.7421512007713318\n", + "epoch: 54 step: 34, loss is 0.710815966129303\n", + "epoch: 54 step: 35, loss is 0.7364218235015869\n", + "epoch: 54 step: 36, loss is 0.6990156173706055\n", + "epoch: 54 step: 37, loss is 0.791452169418335\n", + "epoch: 54 step: 38, loss is 0.7630574703216553\n", + "epoch: 54 step: 39, loss is 0.7459633350372314\n", + "epoch: 54 step: 40, loss is 0.7510757446289062\n", + "epoch: 54 step: 41, loss is 0.7448790073394775\n", + "epoch: 54 step: 42, loss is 0.7536907196044922\n", + "epoch: 54 step: 43, loss is 0.7454284429550171\n", + "epoch: 54 step: 44, loss is 0.7766815423965454\n", + "epoch: 54 step: 45, loss is 0.7976597547531128\n", + "epoch: 54 step: 46, loss is 0.7811110019683838\n", + "epoch: 54 step: 47, loss is 0.7972372770309448\n", + "epoch: 54 step: 48, loss is 0.7484064102172852\n", + "epoch: 54 step: 49, loss is 0.7122199535369873\n", + "epoch: 54 step: 50, loss is 0.7303224802017212\n", + "epoch: 54 step: 51, loss is 0.756902813911438\n", + "epoch: 54 step: 52, loss is 0.7311450242996216\n", + "epoch: 54 step: 53, loss is 0.7235018610954285\n", + "epoch: 54 step: 54, loss is 0.7619531154632568\n", + "epoch: 54 step: 55, loss is 0.7645027041435242\n", + "epoch: 54 step: 56, loss is 0.7235090732574463\n", + "epoch: 54 step: 57, loss is 0.748947024345398\n", + "epoch: 54 step: 58, loss is 0.7927689552307129\n", + "epoch: 54 step: 59, loss is 0.7427936792373657\n", + "epoch: 54 step: 60, loss is 0.7038326859474182\n", + "epoch: 54 step: 61, loss is 0.7453955411911011\n", + "epoch: 54 step: 62, loss is 0.8087576627731323\n", + "epoch: 54 step: 63, loss is 0.791488528251648\n", + "epoch: 54 step: 64, loss is 0.8221422433853149\n", + "epoch: 54 step: 65, loss is 0.779529869556427\n", + "epoch: 54 step: 66, loss is 0.7396982908248901\n", + "epoch: 54 step: 67, loss is 0.7102468013763428\n", + "epoch: 54 step: 68, loss is 0.7437171936035156\n", + "epoch: 54 step: 69, loss is 0.7384310364723206\n", + "epoch: 54 step: 70, loss is 0.7548621296882629\n", + "epoch: 54 step: 71, loss is 0.7793996334075928\n", + "epoch: 54 step: 72, loss is 0.7205496430397034\n", + "epoch: 54 step: 73, loss is 0.7654577493667603\n", + "epoch: 54 step: 74, loss is 0.7136417031288147\n", + "epoch: 54 step: 75, loss is 0.7457461953163147\n", + "epoch: 54 step: 76, loss is 0.8111259937286377\n", + "epoch: 54 step: 77, loss is 0.7650452256202698\n", + "epoch: 54 step: 78, loss is 0.7071434855461121\n", + "epoch: 54 step: 79, loss is 0.7263909578323364\n", + "epoch: 54 step: 80, loss is 0.7073972225189209\n", + "epoch: 54 step: 81, loss is 0.784500241279602\n", + "epoch: 54 step: 82, loss is 0.7589775323867798\n", + "epoch: 54 step: 83, loss is 0.7014842629432678\n", + "epoch: 54 step: 84, loss is 0.7852917909622192\n", + "epoch: 54 step: 85, loss is 0.808820903301239\n", + "epoch: 54 step: 86, loss is 0.7182291746139526\n", + "epoch: 54 step: 87, loss is 0.7526835203170776\n", + "epoch: 54 step: 88, loss is 0.7738999128341675\n", + "epoch: 54 step: 89, loss is 0.78020179271698\n", + "epoch: 54 step: 90, loss is 0.7167055606842041\n", + "epoch: 54 step: 91, loss is 0.7363988161087036\n", + "epoch: 54 step: 92, loss is 0.73185795545578\n", + "epoch: 54 step: 93, loss is 0.7706427574157715\n", + "epoch: 54 step: 94, loss is 0.7294934988021851\n", + "epoch: 54 step: 95, loss is 0.7796855568885803\n", + "epoch: 54 step: 96, loss is 0.7376620769500732\n", + "epoch: 54 step: 97, loss is 0.7145836353302002\n", + "epoch: 54 step: 98, loss is 0.7527779340744019\n", + "epoch: 54 step: 99, loss is 0.7439799904823303\n", + "epoch: 54 step: 100, loss is 0.7617044448852539\n", + "epoch: 54 step: 101, loss is 0.7675077319145203\n", + "epoch: 54 step: 102, loss is 0.7387800216674805\n", + "epoch: 54 step: 103, loss is 0.7377983927726746\n", + "epoch: 54 step: 104, loss is 0.7198337316513062\n", + "epoch: 54 step: 105, loss is 0.7522182464599609\n", + "epoch: 54 step: 106, loss is 0.7287980318069458\n", + "epoch: 54 step: 107, loss is 0.7637331485748291\n", + "epoch: 54 step: 108, loss is 0.7496447563171387\n", + "epoch: 54 step: 109, loss is 0.7793915867805481\n", + "epoch: 54 step: 110, loss is 0.7334674596786499\n", + "epoch: 54 step: 111, loss is 0.7049298882484436\n", + "epoch: 54 step: 112, loss is 0.7607285976409912\n", + "epoch: 54 step: 113, loss is 0.719576358795166\n", + "epoch: 54 step: 114, loss is 0.7847287654876709\n", + "epoch: 54 step: 115, loss is 0.7573254704475403\n", + "epoch: 54 step: 116, loss is 0.7613715529441833\n", + "epoch: 54 step: 117, loss is 0.6975942850112915\n", + "epoch: 54 step: 118, loss is 0.7388083934783936\n", + "epoch: 54 step: 119, loss is 0.750504732131958\n", + "epoch: 54 step: 120, loss is 0.7790126800537109\n", + "epoch: 54 step: 121, loss is 0.754118025302887\n", + "epoch: 54 step: 122, loss is 0.7800121903419495\n", + "epoch: 54 step: 123, loss is 0.7729077339172363\n", + "epoch: 54 step: 124, loss is 0.7795729637145996\n", + "epoch: 54 step: 125, loss is 0.7316319942474365\n", + "epoch: 54 step: 126, loss is 0.7833150625228882\n", + "epoch: 54 step: 127, loss is 0.7429990768432617\n", + "epoch: 54 step: 128, loss is 0.7427091598510742\n", + "epoch: 54 step: 129, loss is 0.7716634273529053\n", + "epoch: 54 step: 130, loss is 0.7590633034706116\n", + "epoch: 54 step: 131, loss is 0.7848222851753235\n", + "epoch: 54 step: 132, loss is 0.7827194929122925\n", + "epoch: 54 step: 133, loss is 0.7837122082710266\n", + "epoch: 54 step: 134, loss is 0.7204021215438843\n", + "epoch: 54 step: 135, loss is 0.787850022315979\n", + "epoch: 54 step: 136, loss is 0.7528017163276672\n", + "epoch: 54 step: 137, loss is 0.812528133392334\n", + "epoch: 54 step: 138, loss is 0.7453964352607727\n", + "epoch: 54 step: 139, loss is 0.7565807700157166\n", + "epoch: 54 step: 140, loss is 0.8069277405738831\n", + "epoch: 54 step: 141, loss is 0.7313185930252075\n", + "epoch: 54 step: 142, loss is 0.7798742055892944\n", + "epoch: 54 step: 143, loss is 0.7680713534355164\n", + "epoch: 54 step: 144, loss is 0.7827830910682678\n", + "epoch: 54 step: 145, loss is 0.7339015007019043\n", + "epoch: 54 step: 146, loss is 0.7876734733581543\n", + "epoch: 54 step: 147, loss is 0.7257295846939087\n", + "epoch: 54 step: 148, loss is 0.7450059056282043\n", + "epoch: 54 step: 149, loss is 0.8141753673553467\n", + "epoch: 54 step: 150, loss is 0.7106942534446716\n", + "epoch: 54 step: 151, loss is 0.7905106544494629\n", + "epoch: 54 step: 152, loss is 0.7670482397079468\n", + "epoch: 54 step: 153, loss is 0.7677087187767029\n", + "epoch: 54 step: 154, loss is 0.790029764175415\n", + "epoch: 54 step: 155, loss is 0.780659556388855\n", + "epoch: 54 step: 156, loss is 0.7492438554763794\n", + "epoch: 54 step: 157, loss is 0.7539089918136597\n", + "epoch: 54 step: 158, loss is 0.7848547697067261\n", + "epoch: 54 step: 159, loss is 0.7970499396324158\n", + "epoch: 54 step: 160, loss is 0.7587244510650635\n", + "epoch: 54 step: 161, loss is 0.82500159740448\n", + "epoch: 54 step: 162, loss is 0.7669817805290222\n", + "epoch: 54 step: 163, loss is 0.7438445091247559\n", + "epoch: 54 step: 164, loss is 0.7449789047241211\n", + "epoch: 54 step: 165, loss is 0.7619990110397339\n", + "epoch: 54 step: 166, loss is 0.7656755447387695\n", + "epoch: 54 step: 167, loss is 0.768242359161377\n", + "epoch: 54 step: 168, loss is 0.744500458240509\n", + "epoch: 54 step: 169, loss is 0.6993416547775269\n", + "epoch: 54 step: 170, loss is 0.7830988168716431\n", + "epoch: 54 step: 171, loss is 0.7534293532371521\n", + "epoch: 54 step: 172, loss is 0.8177663087844849\n", + "epoch: 54 step: 173, loss is 0.7456715106964111\n", + "epoch: 54 step: 174, loss is 0.727094829082489\n", + "epoch: 54 step: 175, loss is 0.7160502076148987\n", + "epoch: 54 step: 176, loss is 0.7618540525436401\n", + "epoch: 54 step: 177, loss is 0.712407112121582\n", + "epoch: 54 step: 178, loss is 0.7740753889083862\n", + "epoch: 54 step: 179, loss is 0.7328314781188965\n", + "epoch: 54 step: 180, loss is 0.8171871304512024\n", + "epoch: 54 step: 181, loss is 0.7693115472793579\n", + "epoch: 54 step: 182, loss is 0.7350610494613647\n", + "epoch: 54 step: 183, loss is 0.7472743391990662\n", + "epoch: 54 step: 184, loss is 0.7241175174713135\n", + "epoch: 54 step: 185, loss is 0.7493669986724854\n", + "epoch: 54 step: 186, loss is 0.7595128417015076\n", + "epoch: 54 step: 187, loss is 0.7114911079406738\n", + "epoch: 54 step: 188, loss is 0.7420694231987\n", + "epoch: 54 step: 189, loss is 0.7711062431335449\n", + "epoch: 54 step: 190, loss is 0.7647891044616699\n", + "epoch: 54 step: 191, loss is 0.7045063972473145\n", + "epoch: 54 step: 192, loss is 0.7416011095046997\n", + "epoch: 54 step: 193, loss is 0.7286713123321533\n", + "epoch: 54 step: 194, loss is 0.727665901184082\n", + "epoch: 54 step: 195, loss is 0.7798998355865479\n", + "Train epoch time: 88708.398 ms, per step time: 454.915 ms\n", + "epoch: 55 step: 1, loss is 0.7165011167526245\n", + "epoch: 55 step: 2, loss is 0.7104589939117432\n", + "epoch: 55 step: 3, loss is 0.7289504408836365\n", + "epoch: 55 step: 4, loss is 0.7416871786117554\n", + "epoch: 55 step: 5, loss is 0.7174310684204102\n", + "epoch: 55 step: 6, loss is 0.7307755947113037\n", + "epoch: 55 step: 7, loss is 0.7592005133628845\n", + "epoch: 55 step: 8, loss is 0.7364214062690735\n", + "epoch: 55 step: 9, loss is 0.7324988842010498\n", + "epoch: 55 step: 10, loss is 0.7260823845863342\n", + "epoch: 55 step: 11, loss is 0.7038148045539856\n", + "epoch: 55 step: 12, loss is 0.7144333124160767\n", + "epoch: 55 step: 13, loss is 0.7943069338798523\n", + "epoch: 55 step: 14, loss is 0.7767334580421448\n", + "epoch: 55 step: 15, loss is 0.7419175505638123\n", + "epoch: 55 step: 16, loss is 0.699867844581604\n", + "epoch: 55 step: 17, loss is 0.7137309312820435\n", + "epoch: 55 step: 18, loss is 0.7711937427520752\n", + "epoch: 55 step: 19, loss is 0.724177360534668\n", + "epoch: 55 step: 20, loss is 0.6987912654876709\n", + "epoch: 55 step: 21, loss is 0.7210257053375244\n", + "epoch: 55 step: 22, loss is 0.7871016263961792\n", + "epoch: 55 step: 23, loss is 0.7333511114120483\n", + "epoch: 55 step: 24, loss is 0.7604600191116333\n", + "epoch: 55 step: 25, loss is 0.7533279061317444\n", + "epoch: 55 step: 26, loss is 0.7288804054260254\n", + "epoch: 55 step: 27, loss is 0.7138696908950806\n", + "epoch: 55 step: 28, loss is 0.7144385576248169\n", + "epoch: 55 step: 29, loss is 0.7607403993606567\n", + "epoch: 55 step: 30, loss is 0.7535392045974731\n", + "epoch: 55 step: 31, loss is 0.7463322877883911\n", + "epoch: 55 step: 32, loss is 0.7098947763442993\n", + "epoch: 55 step: 33, loss is 0.7737497091293335\n", + "epoch: 55 step: 34, loss is 0.7237499952316284\n", + "epoch: 55 step: 35, loss is 0.6952794194221497\n", + "epoch: 55 step: 36, loss is 0.7103814482688904\n", + "epoch: 55 step: 37, loss is 0.7132189273834229\n", + "epoch: 55 step: 38, loss is 0.7281615734100342\n", + "epoch: 55 step: 39, loss is 0.7196923494338989\n", + "epoch: 55 step: 40, loss is 0.7321971654891968\n", + "epoch: 55 step: 41, loss is 0.703976035118103\n", + "epoch: 55 step: 42, loss is 0.7427327632904053\n", + "epoch: 55 step: 43, loss is 0.7489078640937805\n", + "epoch: 55 step: 44, loss is 0.7375497817993164\n", + "epoch: 55 step: 45, loss is 0.7018141746520996\n", + "epoch: 55 step: 46, loss is 0.7521640062332153\n", + "epoch: 55 step: 47, loss is 0.6963937878608704\n", + "epoch: 55 step: 48, loss is 0.7068246006965637\n", + "epoch: 55 step: 49, loss is 0.744580864906311\n", + "epoch: 55 step: 50, loss is 0.7099051475524902\n", + "epoch: 55 step: 51, loss is 0.6795011162757874\n", + "epoch: 55 step: 52, loss is 0.7171489596366882\n", + "epoch: 55 step: 53, loss is 0.7195420265197754\n", + "epoch: 55 step: 54, loss is 0.7332103848457336\n", + "epoch: 55 step: 55, loss is 0.7785781621932983\n", + "epoch: 55 step: 56, loss is 0.741167426109314\n", + "epoch: 55 step: 57, loss is 0.7023954391479492\n", + "epoch: 55 step: 58, loss is 0.6915398240089417\n", + "epoch: 55 step: 59, loss is 0.7272984981536865\n", + "epoch: 55 step: 60, loss is 0.7111718058586121\n", + "epoch: 55 step: 61, loss is 0.7382531762123108\n", + "epoch: 55 step: 62, loss is 0.7488844394683838\n", + "epoch: 55 step: 63, loss is 0.7046263217926025\n", + "epoch: 55 step: 64, loss is 0.7152138948440552\n", + "epoch: 55 step: 65, loss is 0.7152711153030396\n", + "epoch: 55 step: 66, loss is 0.7169139385223389\n", + "epoch: 55 step: 67, loss is 0.7216523289680481\n", + "epoch: 55 step: 68, loss is 0.7794471383094788\n", + "epoch: 55 step: 69, loss is 0.7305091023445129\n", + "epoch: 55 step: 70, loss is 0.7420027256011963\n", + "epoch: 55 step: 71, loss is 0.7387264966964722\n", + "epoch: 55 step: 72, loss is 0.7446792125701904\n", + "epoch: 55 step: 73, loss is 0.676222562789917\n", + "epoch: 55 step: 74, loss is 0.7071503400802612\n", + "epoch: 55 step: 75, loss is 0.7525785565376282\n", + "epoch: 55 step: 76, loss is 0.7236475348472595\n", + "epoch: 55 step: 77, loss is 0.7368990182876587\n", + "epoch: 55 step: 78, loss is 0.707416296005249\n", + "epoch: 55 step: 79, loss is 0.7287236452102661\n", + "epoch: 55 step: 80, loss is 0.7587828636169434\n", + "epoch: 55 step: 81, loss is 0.7559760808944702\n", + "epoch: 55 step: 82, loss is 0.7239623665809631\n", + "epoch: 55 step: 83, loss is 0.746074914932251\n", + "epoch: 55 step: 84, loss is 0.7126964330673218\n", + "epoch: 55 step: 85, loss is 0.7186170220375061\n", + "epoch: 55 step: 86, loss is 0.7122585773468018\n", + "epoch: 55 step: 87, loss is 0.7229815125465393\n", + "epoch: 55 step: 88, loss is 0.7429572343826294\n", + "epoch: 55 step: 89, loss is 0.7432055473327637\n", + "epoch: 55 step: 90, loss is 0.7356162071228027\n", + "epoch: 55 step: 91, loss is 0.6846114993095398\n", + "epoch: 55 step: 92, loss is 0.7686871886253357\n", + "epoch: 55 step: 93, loss is 0.7288748025894165\n", + "epoch: 55 step: 94, loss is 0.7174065709114075\n", + "epoch: 55 step: 95, loss is 0.7306485176086426\n", + "epoch: 55 step: 96, loss is 0.7541142106056213\n", + "epoch: 55 step: 97, loss is 0.7096823453903198\n", + "epoch: 55 step: 98, loss is 0.7649966478347778\n", + "epoch: 55 step: 99, loss is 0.7300564050674438\n", + "epoch: 55 step: 100, loss is 0.7279300689697266\n", + "epoch: 55 step: 101, loss is 0.7102516293525696\n", + "epoch: 55 step: 102, loss is 0.7265956401824951\n", + "epoch: 55 step: 103, loss is 0.745022177696228\n", + "epoch: 55 step: 104, loss is 0.7230522632598877\n", + "epoch: 55 step: 105, loss is 0.7298338413238525\n", + "epoch: 55 step: 106, loss is 0.7323893308639526\n", + "epoch: 55 step: 107, loss is 0.7079269886016846\n", + "epoch: 55 step: 108, loss is 0.7548097968101501\n", + "epoch: 55 step: 109, loss is 0.7342112064361572\n", + "epoch: 55 step: 110, loss is 0.7654808759689331\n", + "epoch: 55 step: 111, loss is 0.7373621463775635\n", + "epoch: 55 step: 112, loss is 0.7332872748374939\n", + "epoch: 55 step: 113, loss is 0.7341809272766113\n", + "epoch: 55 step: 114, loss is 0.7479779720306396\n", + "epoch: 55 step: 115, loss is 0.7419748306274414\n", + "epoch: 55 step: 116, loss is 0.7316821813583374\n", + "epoch: 55 step: 117, loss is 0.7246876955032349\n", + "epoch: 55 step: 118, loss is 0.7146396636962891\n", + "epoch: 55 step: 119, loss is 0.7175823450088501\n", + "epoch: 55 step: 120, loss is 0.7503294348716736\n", + "epoch: 55 step: 121, loss is 0.7824023365974426\n", + "epoch: 55 step: 122, loss is 0.7468754053115845\n", + "epoch: 55 step: 123, loss is 0.7224631309509277\n", + "epoch: 55 step: 124, loss is 0.7560770511627197\n", + "epoch: 55 step: 125, loss is 0.6971018314361572\n", + "epoch: 55 step: 126, loss is 0.7370412349700928\n", + "epoch: 55 step: 127, loss is 0.7068716287612915\n", + "epoch: 55 step: 128, loss is 0.7583845853805542\n", + "epoch: 55 step: 129, loss is 0.7294102907180786\n", + "epoch: 55 step: 130, loss is 0.7392616271972656\n", + "epoch: 55 step: 131, loss is 0.8026449680328369\n", + "epoch: 55 step: 132, loss is 0.7645917534828186\n", + "epoch: 55 step: 133, loss is 0.7799665927886963\n", + "epoch: 55 step: 134, loss is 0.7218819856643677\n", + "epoch: 55 step: 135, loss is 0.7686904072761536\n", + "epoch: 55 step: 136, loss is 0.741483211517334\n", + "epoch: 55 step: 137, loss is 0.7282935380935669\n", + "epoch: 55 step: 138, loss is 0.7411624193191528\n", + "epoch: 55 step: 139, loss is 0.7999836802482605\n", + "epoch: 55 step: 140, loss is 0.7507987022399902\n", + "epoch: 55 step: 141, loss is 0.6961658596992493\n", + "epoch: 55 step: 142, loss is 0.7307020425796509\n", + "epoch: 55 step: 143, loss is 0.734775185585022\n", + "epoch: 55 step: 144, loss is 0.8414582014083862\n", + "epoch: 55 step: 145, loss is 0.7578983306884766\n", + "epoch: 55 step: 146, loss is 0.7474930882453918\n", + "epoch: 55 step: 147, loss is 0.8349819183349609\n", + "epoch: 55 step: 148, loss is 0.7607948184013367\n", + "epoch: 55 step: 149, loss is 0.7468876838684082\n", + "epoch: 55 step: 150, loss is 0.7643289566040039\n", + "epoch: 55 step: 151, loss is 0.78450608253479\n", + "epoch: 55 step: 152, loss is 0.7457252144813538\n", + "epoch: 55 step: 153, loss is 0.7821364402770996\n", + "epoch: 55 step: 154, loss is 0.7256453037261963\n", + "epoch: 55 step: 155, loss is 0.8293889164924622\n", + "epoch: 55 step: 156, loss is 0.7675606608390808\n", + "epoch: 55 step: 157, loss is 0.7164472341537476\n", + "epoch: 55 step: 158, loss is 0.7228515148162842\n", + "epoch: 55 step: 159, loss is 0.7807114124298096\n", + "epoch: 55 step: 160, loss is 0.7528897523880005\n", + "epoch: 55 step: 161, loss is 0.7175917029380798\n", + "epoch: 55 step: 162, loss is 0.7271732091903687\n", + "epoch: 55 step: 163, loss is 0.7566322088241577\n", + "epoch: 55 step: 164, loss is 0.7558115124702454\n", + "epoch: 55 step: 165, loss is 0.7418091297149658\n", + "epoch: 55 step: 166, loss is 0.7328822612762451\n", + "epoch: 55 step: 167, loss is 0.7361273765563965\n", + "epoch: 55 step: 168, loss is 0.7806099653244019\n", + "epoch: 55 step: 169, loss is 0.7629790902137756\n", + "epoch: 55 step: 170, loss is 0.7427593469619751\n", + "epoch: 55 step: 171, loss is 0.7190238237380981\n", + "epoch: 55 step: 172, loss is 0.7613548040390015\n", + "epoch: 55 step: 173, loss is 0.7459946870803833\n", + "epoch: 55 step: 174, loss is 0.7532992362976074\n", + "epoch: 55 step: 175, loss is 0.728524923324585\n", + "epoch: 55 step: 176, loss is 0.7694852352142334\n", + "epoch: 55 step: 177, loss is 0.7747730016708374\n", + "epoch: 55 step: 178, loss is 0.7739143371582031\n", + "epoch: 55 step: 179, loss is 0.7320873737335205\n", + "epoch: 55 step: 180, loss is 0.7747582197189331\n", + "epoch: 55 step: 181, loss is 0.7258234620094299\n", + "epoch: 55 step: 182, loss is 0.803260087966919\n", + "epoch: 55 step: 183, loss is 0.7292471528053284\n", + "epoch: 55 step: 184, loss is 0.7296419143676758\n", + "epoch: 55 step: 185, loss is 0.763705849647522\n", + "epoch: 55 step: 186, loss is 0.7540097832679749\n", + "epoch: 55 step: 187, loss is 0.7417121529579163\n", + "epoch: 55 step: 188, loss is 0.805894136428833\n", + "epoch: 55 step: 189, loss is 0.7827968597412109\n", + "epoch: 55 step: 190, loss is 0.776089072227478\n", + "epoch: 55 step: 191, loss is 0.7193812131881714\n", + "epoch: 55 step: 192, loss is 0.7594172954559326\n", + "epoch: 55 step: 193, loss is 0.7513241767883301\n", + "epoch: 55 step: 194, loss is 0.7418290376663208\n", + "epoch: 55 step: 195, loss is 0.7574759721755981\n", + "Train epoch time: 89928.663 ms, per step time: 461.173 ms\n", + "total time:1h 32m 41s\n", "============== Train Success ==============\n" ] } @@ -11282,16 +11343,25 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "[WARNING] ME(168277:281473695031312,MainProcess):2024-12-19-18:00:03.691.248 [mindspore/run_check/_check_version.py:396] Can not find the tbe operator implementation(need by mindspore-ascend). Please check whether the Environment Variable PYTHONPATH is set. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", - "[WARNING] ME(168277:281473695031312,MainProcess):2024-12-19-18:00:03.693.637 [mindspore/run_check/_check_version.py:396] Can not find the tbe operator implementation(need by mindspore-ascend). Please check whether the Environment Variable PYTHONPATH is set. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:03.698.450 [mindspore/core/utils/ms_context.cc:530] GetJitLevel] Set jit level to O2 for rank table startup method.\n" + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:40.699.000 [mindspore/run_check/_check_version.py:357] MindSpore version 2.3.1 and Ascend AI software package (Ascend Data Center Solution)version 7.6 does not match, the version of software package expect one of ['7.2', '7.3']. Please refer to the match info on: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:40.700.000 [mindspore/run_check/_check_version.py:375] MindSpore version 2.3.1 and \"te\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:40.701.000 [mindspore/run_check/_check_version.py:382] MindSpore version 2.3.1 and \"hccl\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:40.702.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 3\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:41.704.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 2\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:42.706.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 1\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:43.710.000 [mindspore/run_check/_check_version.py:357] MindSpore version 2.3.1 and Ascend AI software package (Ascend Data Center Solution)version 7.6 does not match, the version of software package expect one of ['7.2', '7.3']. Please refer to the match info on: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:43.711.000 [mindspore/run_check/_check_version.py:375] MindSpore version 2.3.1 and \"te\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:43.712.000 [mindspore/run_check/_check_version.py:382] MindSpore version 2.3.1 and \"hccl\" wheel package version 7.6 does not match. For details, refer to the installation guidelines: https://www.mindspore.cn/install\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:43.712.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 3\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:44.714.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 2\n", + "[WARNING] ME(1397:281473339277344,MainProcess):2025-05-12-19:33:45.716.000 [mindspore/run_check/_check_version.py:396] Please pay attention to the above warning, countdown: 1\n" ] }, { @@ -11305,77 +11375,77 @@ "name": "stderr", "output_type": "stream", "text": [ - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.897.329 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.897.376 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.897.481 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.897.493 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.897.742 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.897.754 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.034 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.046 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.261 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.271 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.427 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.439 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.648 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.659 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.787 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.798 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.933 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.898.944 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.076 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.086 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.270 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.281 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.431 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.442 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.622 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.632 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.902 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.912 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.970 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.899.980 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.376 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.388 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.448 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.459 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.612 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.622 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.874 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.885 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.941 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.900.951 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.191 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.202 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.311 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.321 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.532 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.542 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.673 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.683 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.975 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.901.986 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.105 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.114 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.258 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.268 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.611 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.622 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.804 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.815 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.907 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.902.917 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.903.057 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.903.067 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.903.249 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:00:17.903.259 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n" + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.504.663 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.504.750 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.504.907 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.504.933 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.505.346 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.505.372 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.505.859 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.505.884 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.506.231 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.506.257 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.506.510 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.506.535 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.506.872 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.506.896 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.507.129 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.507.153 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.507.349 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.507.373 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.507.615 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.507.639 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.507.923 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.507.946 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.508.222 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.508.246 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.508.537 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.508.561 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.509.045 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.509.071 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.509.173 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.509.196 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.509.855 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.509.881 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.509.982 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.510.005 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.510.215 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.510.238 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.510.703 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.510.729 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.510.803 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.510.827 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.511.190 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.511.214 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.511.414 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.511.438 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.511.782 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.511.806 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.512.043 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.512.067 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.512.542 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.512.566 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.512.786 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.512.810 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.513.010 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.513.034 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.513.631 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.513.657 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.513.941 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.513.965 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.514.100 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.514.123 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.514.370 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.514.394 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.514.747 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:33:55.514.772 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "result:{'Loss': 1.0264258415271075, 'Top_1_Acc': 0.8084935897435898, 'Top_5_Acc': 0.9829727564102564}, ckpt:'./shufflenetv1-55_195.ckpt', time: 0h 0m 50s\n" + "......result:{'Loss': 1.0218644340833027, 'Top_1_Acc': 0.8083934294871795, 'Top_5_Acc': 0.9824719551282052}, ckpt:'./shufflenetv1-55_195.ckpt', time: 0h 1m 12s\n" ] } ], @@ -11421,7 +11491,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 12, "metadata": {}, "outputs": [ { @@ -11435,90 +11505,104 @@ "name": "stderr", "output_type": "stream", "text": [ - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.709.415 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/1681751341.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.709.461 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/1681751341.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.711.990 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.006 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.089 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.100 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.265 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/778396864.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.276 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/778396864.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.356 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.366 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.641 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.653 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.868 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.712.879 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.049 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.061 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.273 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.284 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.409 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.419 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.552 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.562 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.692 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.702 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.888 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.713.898 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.046 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.057 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.240 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.251 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.455 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/778396864.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.467 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/778396864.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.557 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.566 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.624 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.714.633 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.024 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.035 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.094 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.104 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.258 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.267 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.516 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.526 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.580 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.590 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.809 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.819 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.927 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.715.936 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.028 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/778396864.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.038 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/778396864.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.173 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.183 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.308 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.318 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.608 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.619 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.735 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.745 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.888 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.716.898 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.256 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.268 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.450 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.460 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.552 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.562 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.701 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.711 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n", - "[ERROR] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.892 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_168277/3162391481.py]\n", - "[WARNING] CORE(168277,ffffb39b2010,python):2024-12-19-18:01:14.717.903 [mindspore/core/utils/info.cc:121] ToString] The file '/tmp/ipykernel_168277/3162391481.py' may not exists.\n" + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.637.534 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/1681751341.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.637.623 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/1681751341.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.640.168 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.640.201 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.640.327 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.640.351 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.640.649 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/778396864.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.640.675 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/778396864.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.640.799 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.640.822 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.641.307 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.641.332 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.641.671 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.641.695 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.641.932 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.641.956 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.642.285 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.642.310 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.642.560 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.642.585 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.642.780 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.642.804 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.643.043 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.643.068 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.643.354 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.643.378 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.643.654 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.643.678 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.643.961 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.643.985 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.644.356 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/778396864.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.644.380 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/778396864.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.644.508 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.644.530 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.644.630 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.644.652 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.645.312 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.645.336 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.645.438 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.645.461 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.645.670 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.645.692 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.646.130 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.646.154 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.646.228 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.646.250 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.646.645 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.646.671 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.646.865 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.646.889 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.647.063 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/778396864.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.647.086 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/778396864.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.647.280 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.647.303 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.647.531 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.647.555 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.648.025 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.648.050 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.648.266 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.648.289 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.648.488 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.648.511 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.649.103 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.649.128 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.649.410 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.649.434 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.649.566 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.649.589 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.649.835 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.649.858 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "[ERROR] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.650.187 [mindspore/core/utils/file_utils.cc:253] GetRealPath] Get realpath failed, path[/tmp/ipykernel_1397/3162391481.py]\n", + "[WARNING] CORE(1397,ffff9e66c020,python):2025-05-12-19:35:25.650.212 [mindspore/core/utils/info.cc:120] ToString] The file '/tmp/ipykernel_1397/3162391481.py' may not exists.\n", + "/usr/local/Ascend/ascend-toolkit/8.0.0/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[16, 31], [48, 48], [16, 63], [16, 63]] to [[16, 31], [48, 48], [16, 63], (16, 63)].\n", + " warnings.warn(to_print)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "-\r" + "." + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/Ascend/ascend-toolkit/8.0.0/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[16, 31], [480, 480], [16, 63], [16, 63]] to [[16, 31], [480, 480], [16, 63], (16, 63)].\n", + " warnings.warn(to_print)\n", + "/usr/local/Ascend/ascend-toolkit/8.0.0/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[16, 31], [960, 960], [4, 15], [4, 15]] to [[16, 31], [960, 960], [4, 15], (4, 15)].\n", + " warnings.warn(to_print)\n", + "/usr/local/Ascend/ascend-toolkit/8.0.0/opp/built-in/op_impl/ai_core/tbe/impl/util/util_conv2d_dynamic.py:130: UserWarning: conv2d fmap ori_range changed from [[16, 31], [1920, 1920], [7, 15], [7, 15]] to [[16, 31], [1920, 1920], [7, 15], (7, 15)].\n", + " warnings.warn(to_print)\n" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABOwAAAGACAYAAAAeS/0DAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9eZhdZZnvjd9r7XmqvWueUklVKgMhTBImEQhDK6JIo0Ja7WODimDj0JxW+/XorwWcON32aTxNH9tGu3F8+21H2hEEmUcRCAQyJ5Wpkppr156HNfz+4Eodv/ezhaCksiXfz3V5yb2HtZ/1rPu5n2et1Posy/d9XwghhBBCCCGEEEIIIU2BfaQbQAghhBBCCCGEEEII+b/wgh0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRPCCHSGEEEIIIYQQQgghTQQv2BFCCCGEEEIIIYQQ0kTwgh0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJEHLUX7J544gk588wzJZFIiGVZsn79+iPdJEJeMW644QaxLEumpqaOdFPIUQZzj/wuDuYGIa92BgcH5eKLL37Jz913331iWZbcd999869deeWVMjg4ePgaR17VMPeOPg513TU4OChXXnnlH/Rb5557rpx77rl/0DbIHw9c0zcHR+UFu3q9LpdffrnMzMzIzTffLN/61rdkyZIlR7pZ5FXEI488IjfccINks9kj3RRylMHcI4S8Gvnyl78sX//61490M8hRCHOPEELIkSJ4pBtwJNixY4fs3r1bvvrVr8pVV111pJtDXoU88sgjcuONN8qVV14pmUzmSDeHHEUw9wghr0a+/OUvS0dHxx/8FyLNxDnnnCPlclnC4fCRbgp5EZh75NXAli1bxLaPyr/VIeSPmqNy1E5MTIiIvOTJbLFYXIDWkKMZz/OkUqkc6WaQoxDmHjka4DxOmhnbtiUajfIkmiw4zL2jj0gkIqFQ6EU/wzmTLCS+70u5XD7SzWh6jroqfeWVV8ratWtFROTyyy8Xy7Lk3HPPlSuvvFKSyaTs2LFD3vSmN0kqlZI///M/F5EXitdHP/pRGRgYkEgkIitXrpR/+Id/EN/3Ydvlclk+8pGPSEdHh6RSKbnkkktkdHRULMuSG264YaF3lRwhbrjhBvn4xz8uIiJDQ0NiWZZYliW7du0Sy7LkQx/6kHznO9+R1atXSyQSkTvuuKOhS0RE5r+jb8XYvHmzrFu3Tjo7OyUWi8nKlSvlU5/61Iu2a/fu3bJs2TI57rjjZHx8/JXcZdIkMPdIs/DQQw/JqaeeKtFoVIaHh+Vf//VfG37u29/+tqxZs0ZisZi0tbXJO97xDtm7d6/xuccff1ze+MY3Sjqdlng8LmvXrpWHH34YPnPQtbJx40Z517veJa2trXLWWWcdlv0jL87u3bvl2muvlZUrV0osFpP29na5/PLLZdeuXfC53+U1/PrXvz5fu0RecC89//zzcv/998/Xtd/2KO3cuVMuv/xyaWtrk3g8LmeccYb87Gc/g20erHXf/e535cYbb5T+/n5JpVJy2WWXydzcnFSrVbnuuuukq6tLksmkvOc975FqtQrbcBxHPvvZz8rw8LBEIhEZHByUT37yk8bnDvLLX/5STjrpJIlGo3LsscfKD3/4w4Zt0vVX43mefOlLX5LVq1dLNBqV7u5uueaaa2R2dvZFv3c0wtx7Aebe0cfU1JSsW7dOWlpapL29Xf7qr/4K/mFWO+wO5vr9998v1157rXR1dcmiRYvm37/11ltleHhYYrGYnHbaafLggw8u5O6QJiKbzc7fuZNOp+U973mPlEql+fcPtT4ddGzeeeedcsopp0gsFptfH951111y1llnSSaTkWQyKStXrpRPfvKT8P1qtSrXX3+9LFu2TCKRiAwMDMjf/M3f/M46+GrhqLsl9pprrpH+/n75whe+IB/5yEfk1FNPle7ubvnOd74jjuPIhRdeKGeddZb8wz/8g8TjcfF9Xy655BK599575X3ve5+cdNJJcuedd8rHP/5xGR0dlZtvvnl+21deeaV897vflXe/+91yxhlnyP333y9vfvObj+DekiPB2972Ntm6dav8x3/8h9x8883S0dEhIiKdnZ0iInLPPffId7/7XfnQhz4kHR0dMjg4+LJ8Y88++6ycffbZEgqF5Oqrr5bBwUHZsWOH/OQnP5HPf/7zDb+zY8cOOf/886WtrU3uuuuu+TaRVxfMPdIMbNiwQd7whjdIZ2en3HDDDeI4jlx//fXS3d0Nn/v85z8vf/u3fyvr1q2Tq666SiYnJ+WWW26Rc845R55++un5v4K/55575KKLLpI1a9bI9ddfL7Zty2233Sbnn3++PPjgg3LaaafBdi+//HJZvny5fOELXzD+YY0sDE888YQ88sgj8o53vEMWLVoku3btkn/5l3+Rc889VzZu3CjxePxlbe9LX/qSfPjDH5ZkMjn/DwQH82l8fFzOPPNMKZVK8pGPfETa29vlG9/4hlxyySXy/e9/X9761rfCtm666SaJxWLyiU98QrZv3y633HKLhEIhsW1bZmdn5YYbbpDHHntMvv71r8vQ0JB8+tOfnv/uVVddJd/4xjfksssuk49+9KPy+OOPy0033SSbNm2SH/3oR/A727Ztkz/7sz+TD3zgA3LFFVfIbbfdJpdffrnccccd8vrXv/5l7f8111wjX//61+U973mPfOQjH5GRkRH553/+Z3n66afl4Ycffsm/mjmaYO4x945W1q1bJ4ODg3LTTTfJY489Jv/0T/8ks7Oz8s1vfvNFv3fttddKZ2enfPrTn57/C7t/+7d/k2uuuUbOPPNMue6662Tnzp1yySWXSFtbmwwMDCzE7pAmYt26dTI0NCQ33XSTPPXUU/K1r31Nurq65O/+7u9E5OXVpy1btsg73/lOueaaa+T973+/rFy5Up5//nm5+OKL5YQTTpDPfOYzEolEZPv27fAPs57nySWXXCIPPfSQXH311bJq1SrZsGGD3HzzzbJ161a5/fbbF7JLFhb/KOTee+/1RcT/3ve+N//aFVdc4YuI/4lPfAI+e/vtt/si4n/uc5+D1y+77DLfsix/+/btvu/7/pNPPumLiH/dddfB56688kpfRPzrr7/+8OwMaUq++MUv+iLij4yMwOsi4tu27T///PPw+sGcvPfee+H1kZERX0T82267bf61c845x0+lUv7u3bvhs57nzf/39ddf74uIPzk56W/atMnv6+vzTz31VH9mZuYV2T/SvDD3yJHm0ksv9aPRKOTJxo0b/UAg4B9cduzatcsPBAL+5z//efjuhg0b/GAwOP+653n+8uXL/QsvvBDyrFQq+UNDQ/7rX//6+dcO5t473/nOw7l75BAolUrGa48++qgvIv43v/nN+dcOHjPNbbfdZtSx1atX+2vXrjU+e9111/ki4j/44IPzr+XzeX9oaMgfHBz0Xdf1ff//1rrjjjvOr9Vq85995zvf6VuW5V900UWw3de+9rX+kiVL5uP169f7IuJfddVV8LmPfexjvoj499xzz/xrS5Ys8UXE/8EPfjD/2tzcnN/b2+u/5jWvmX+tUf294oor4HcffPBBX0T873znO/C7d9xxR8PXj3aYe8y9o42DuXzJJZfA69dee60vIv4zzzzj+/4LuXHFFVfMv38w18866yzfcZz512u1mt/V1eWfdNJJfrVanX/91ltv9UWk4Vggr04O5tZ73/teeP2tb32r397e7vv+71ef7rjjDvjszTffPH/u8Lv41re+5du2DfXW933/K1/5ii8i/sMPP/x77eMfA0fdLbEvxV/+5V9C/POf/1wCgYB85CMfgdc/+tGPiu/78otf/EJERO644w4ReeFfKX6bD3/4w4exteSPkbVr18qxxx77e313cnJSHnjgAXnve98rixcvhvca3drx3HPPydq1a2VwcFDuvvtuaW1t/b1+l7w6YO6Rw43runLnnXfKpZdeCnmyatUqufDCC+fjH/7wh+J5nqxbt06mpqbm/9fT0yPLly+Xe++9V0RE1q9fL9u2bZN3vetdMj09Pf+5YrEoF1xwgTzwwAPieR604QMf+MDC7Cz5ncRisfn/rtfrMj09LcuWLZNMJiNPPfXUK/pbP//5z+W0006D25+TyaRcffXVsmvXLtm4cSN8/i/+4i/gr4JOP/108X1f3vve98LnTj/9dNm7d684jjP/OyIif/3Xfw2f++hHPyoiYtwG2dfXB39h1dLSIn/xF38hTz/9tIyNjR3y/n3ve9+TdDotr3/962GsrFmzRpLJ5PxYIS/A3GPuHa188IMfhPjgOejB/PldvP/975dAIDAf/+Y3v5GJiQn5wAc+AA8lufLKKyWdTr+CLSZ/LOh11dlnny3T09OSy+Vedn0aGhqC9aDI/32uwH/9138Za7qDfO9735NVq1bJMcccA/Xo/PPPFxF5Vdejo+6W2BcjGAzCvfsiL7gw+vr6JJVKweurVq2af//g/9u2LUNDQ/C5ZcuWHcYWkz9GdI68HHbu3CkiIscdd9whff4tb3mLdHd3y5133inJZPL3/l3y6oC5Rw43k5OTUi6XZfny5cZ7K1eunF/Ybdu2TXzfb/g5EZk/qd22bZuIiFxxxRW/8zfn5ubggvAfkufklaFcLstNN90kt912m4yOjsKtyXNzc6/ob+3evVtOP/104/XfXqf9dt3S/+Bw8ARU3+aVTqfF8zyZm5uT9vb2+XWeXtf19PRIJpOZXw8eZNmyZcY/ZqxYsUJEXnCE9vT0HNL+bdu2Tebm5qSrq6vh+wcfpEZegLnH3Dta0fPp8PCw2LZt+Bs1es48mE96e6FQSJYuXfqHN5T80aFr18E11+zs7MuuT43WaH/2Z38mX/va1+Sqq66ST3ziE3LBBRfI2972NrnsssvmH4yzbds22bRp07zmR/Nqrke8YPdbRCIRPi2JHHZ++19/D9LoL5REXvhrlT+Et7/97fKNb3xDvvOd78g111zzB22L/PHD3CPNgud5YlmW/OIXv4B/2T/IwYu8B/+l9Ytf/KKcdNJJDbelLwg3ynOysHz4wx+W2267Ta677jp57WtfK+l0WizLkne84x3wr+eHq/68GI3y7cVe/+0LPiK/u82HC8/zpKurS77zne80fP93nbwcrTD3XjmYe3/cHGq+cM4kL8Wh1Kg/JN9isZg88MADcu+998rPfvYzueOOO+Q///M/5fzzz5df/vKXEggExPM8Of744+Uf//EfG2731exW5AW7l2DJkiVy9913Sz6fh7+y27x58/z7B//f8zwZGRmBf5HYvn37wjaYNAUvd1F18F8q9AMA9L9KHPyXreeee+6QtvvFL35RgsGgXHvttZJKpeRd73rXy2oX+eODuUeOJAefHnzwL+N+my1btsz/9/DwsPi+L0NDQ/N/+dGI4eFhEXnhlq4/+ZM/eeUbTA4L3//+9+WKK66Q//W//tf8a5VKxagzv11/Dt4SI2LWH5HfXduWLFkCuXUQvU77Qzm4ztu2bdv8X1CJvPDggWw2a/zO9u3bxfd9aPfWrVtF5IUn5R0qw8PDcvfdd8vrXvc6nlgfAsw95t7RyrZt2+Cvl7Zv3y6e572sYy7yf/N227Zt87cbirxwi/nIyIiceOKJr0h7yauDl1uffhe2bcsFF1wgF1xwgfzjP/6jfOELX5BPfepTcu+998qf/MmfyPDwsDzzzDNywQUXLPg/Xhxp+OdkL8Gb3vQmcV1X/vmf/xlev/nmm8WyLLnoootERObvxf7yl78Mn7vlllsWpqGkqUgkEiJiXgT5XSxZskQCgYA88MAD8LrOp87OTjnnnHPk3//932XPnj3wnv6XWJEXFpm33nqrXHbZZXLFFVfIj3/845exF+SPEeYeOZIEAgG58MIL5fbbb4c82bRpk9x5553z8dve9jYJBAJy4403Gvnj+75MT0+LiMiaNWtkeHhY/uEf/kEKhYLxe5OTk4dpT8gfQiAQMI7rLbfcYvz10sELsr9df4rFonzjG98wtplIJBrWtTe96U3y61//Wh599FHYxq233iqDg4O/t7ez0e+IvPDU0N/m4L/2v/nNb4bX9+/fD0/Hy+Vy8s1vflNOOumkQ74lUeSFp/O5riuf/exnjfccx3lZT/o+GmDuMfeOVv7P//k/EB88Bz14rnqonHLKKdLZ2Slf+cpXpFarzb/+9a9/ncecGLzc+tSImZkZ47WDd1VUq1UReaEejY6Oyle/+lXjs+Vyef4Jx69G+Bd2L8Fb3vIWOe+88+RTn/qU7Nq1S0488UT55S9/Kf/1X/8l11133fyEv2bNGnn7298uX/rSl2R6elrOOOMMuf/+++f/RetouxJ8tLNmzRoREfnUpz4l73jHOyQUCslb3vKW3/n5dDotl19+udxyyy1iWZYMDw/LT3/604b34//TP/2TnHXWWXLyySfL1VdfLUNDQ7Jr1y752c9+JuvXrzc+b9u2fPvb35ZLL71U1q1bJz//+c/hX8zIqwvmHjnS3HjjjXLHHXfI2WefLddee604jiO33HKLrF69Wp599lkReeFk+XOf+5z8j//xP2TXrl1y6aWXSiqVkpGREfnRj34kV199tXzsYx8T27bla1/7mlx00UWyevVqec973iP9/f0yOjoq9957r7S0tMhPfvKTI7zHRHPxxRfLt771LUmn03LsscfKo48+Knfffbe0t7fD597whjfI4sWL5X3ve598/OMfl0AgIP/+7/8unZ2dxj8MrFmzRv7lX/5FPve5z8myZcukq6tLzj//fPnEJz4h//Ef/yEXXXSRfOQjH5G2tjb5xje+ISMjI/KDH/zgFVOdnHjiiXLFFVfIrbfeKtlsVtauXSu//vWv5Rvf+IZceumlct5558HnV6xYIe973/vkiSeekO7ubvn3f/93GR8fl9tuu+1l/e7atWvlmmuukZtuuknWr18vb3jDGyQUCsm2bdvke9/7nvzv//2/5bLLLntF9vHVAHOPuXe0MjIyIpdccom88Y1vlEcffVS+/e1vy7ve9a6X/RdxoVBIPve5z8k111wj559/vvzZn/2ZjIyMyG233UaHHTF4ufWpEZ/5zGfkgQcekDe/+c2yZMkSmZiYkC9/+cuyaNGi+Yf6vPvd75bvfve78oEPfEDuvfdeed3rXieu68rmzZvlu9/9rtx5551yyimnHO7dPTIs9GNpm4GDjzL/3ve+N//aFVdc4ScSiYafz+fz/n//7//d7+vr80OhkL98+XL/i1/8ou95HnyuWCz6H/zgB/22tjY/mUz6l156qb9lyxZfRPz/+T//52HdJ9J8fPazn/X7+/t927Z9EfFHRkZ8EfE/+MEPNvz85OSk//a3v92Px+N+a2urf8011/jPPfecLyL+bbfdBp997rnn/Le+9a1+JpPxo9Gov3LlSv9v//Zv598/+Bju3348dqlU8teuXesnk0n/scceOyz7TJoD5h450tx///3+mjVr/HA47C9dutT/yle+Mp8bv80PfvAD/6yzzvITiYSfSCT8Y445xv/gBz/ob9myBT739NNP+29729v89vZ2PxKJ+EuWLPHXrVvn/+pXv5r/TKPcI0eG2dlZ/z3veY/f0dHhJ5NJ/8ILL/Q3b97sL1myxL/iiivgs08++aR/+umn++Fw2F+8eLH/j//4j/5tt902X7sOMjY25r/5zW/2U6mULyL+2rVr59/bsWOHf9lll83XpdNOO83/6U9/Cr/TaO3n+/78bz3xxBPweqN8qtfr/o033ugPDQ35oVDIHxgY8P/H//gffqVSge8uWbLEf/Ob3+zfeeed/gknnOBHIhH/mGOOMX77YJvuvffe+deuuOIKf8mSJUaf3nrrrf6aNWv8WCzmp1Ip//jjj/f/5m/+xt+/f7/x2aMZ5h5z72jjYL5s3LjRv+yyy/xUKuW3trb6H/rQh/xyuTz/OT0Gflf+HeTLX/6yPzQ05EciEf+UU07xH3jgAX/t2rWQ/+TVze9aV+k6+XLrk+ZXv/qV/6d/+qd+X1+fHw6H/b6+Pv+d73ynv3XrVvhcrVbz/+7v/s5fvXq1H4lE/NbWVn/NmjX+jTfe6M/Nzb2yO99EWL7f4F4m8oqxfv16ec1rXiPf/va35c///M+PdHMIIYQQQgghhBBCSJNDh90rSLlcNl770pe+JLZtyznnnHMEWkQIIYQQQgghhBBC/tigw+4V5O///u/lySeflPPOO0+CwaD84he/kF/84hdy9dVXv6ofNUwIIYQQQgghhBBCXjl4S+wryF133SU33nijbNy4UQqFgixevFje/e53y6c+9SkJBnltlBBCCCGEEEIIIYS8NLxgRwghhBBCCCGEEEJIE0GHHSGEEEIIIYQQQgghTQQv2BFCCCGEEEIIIYQQ0kTwgh0hhBBCCCGEEEIIIU3EIT8J4ay150Kczc5AHLE9iNvCqMZb3B43ttnZloC4I5OEOBwIQRyMxHADAWz+zGwW4pqDbWjNpI022G4d4mq1CnGlUoE4GotC7IoLcalcgDidacEf9PHzIiK1ag3igOB+BwIBiFNJ7KdEAvsxFMI2ltX2favBdVob+1K3yfEtiD/42a+Y2zgMfPXHd0O8b/OTEE+ObILYdXE/uhcfY2xz8fAqiFt7FkMcjeE2tj7/CMS7tz8LcT2Pxzyg2tDSauZdMIrj4bTXnQPxshXY7socjrfnn3saYs/D41WrY95ufH6D0YZcdgriag1zv17DvJuZLkFcKOFvOC5+v7OzDeLWNsxbERHXz+M2cDhKpYxj+PYf3mlsY6HwPO+lP/THiLKYWhaO9XIRj/v0DOZNW1srxG4N8yIWN2t/IBzBJqia5Am2ATPxyGDbC/PvWwN9WM9jMZz39PEJ2tg7jdrpeGreUdvIzuUgjtphiBNqfshXy/ibcTyesQh+X8Scp9LpDMSzs1jjakWsJ1q2W6+pYoG7JIGgmTXhEPZNOoFzZW8n5vLo+DjExRr2Y0sLft6pYyuLxTmjDYv6cU0QCmHf6gdUffcn641tHA6+97NHIdb1LhbBYxyOYt95AXxfRMTxsb+DaiQHVFqGdIlVimU/iNurW+p9owUitqte9XF9pY+Za+ux0mCj0ET/ReNG2/A89ZvqA3oLepv62Liuua402qlix2g3bvO9l6x+yW2+Enz9mqshLhfVelgdc2ugF+JsXJ0XiMgJaaw/e57F9dJPHl2P26hiLQkE1G+qehmKYO63dXYYbWiJ4TaWL+6E+NzXnQaxU8c2TM3hujKUwlqzaftuiH91H45fERFRfRfR9S+EYyEcxDyqqTY5dZXIKmciDWpAycfjOVvBvLNVGf/Jw48Z2zhcfOfpyyF++B6s96korsMTcVW7LfMUOpnAPu1I90HcGl8EcSaN5wcHpvZAvHPyGYhb+jEv2vuLRhtCEVyzlYtZiKNRHB8BKwOx5zoQuy6u01tbcB8iEXONFxT8zlwO5/Ppcey7SgH7oVTF8wVfVbDZmQP4+RJuX0QkV8D51xfcr9kZ7MtvfxrP9Q4XA8swr2w1JwXiOE8OrMSaZzWYk3bt2A+x52H/ptIpFWMNS4bxN3t7eyDOFvB4TmdnjTa0tWMdrM3iOrEwPg1xawrb1LOkHz/v4LnE3DR+v5A3cz+gLmvVq1jT5nKYE7FWnD/q6ppQXdVAV62lfb22FpGwWtPF1FqpVsOa+MzD641tNIJ/YUcIIYQQQgghhBBCSBPBC3aEEEIIIYQQQgghhDQRvGBHCCGEEEIIIYQQQkgTccgOu+c3Pg9xdkp5jPAWXbHa8YUOF+9VFhGxYl0QFz102BSUe8S38L77UgXvAy6VlYPLRb/CVMC88TsaxN9wHPxOQLl7IsrjUqrgPdSOcolZlXaI7QYyprry5sWC2HcF5ZObUX6BeBzdQJaN98NbygUoDRxHpYp2VSinR9B0UywEOeU1as+gF83v7MY4iI6J3sVLjW26Hu6b7aHvwSth/1Zm8b55v4z31fd3YB4vHlgG8cCyJUYb+vrRAdHVhfsRCmF/Oxl0RAwsQr+A42COVCroDsjOoqtBRGRqCvs2GNaDGJO1tR3bFE3gb8zl0GkQieLY8XzsVxGRkMqr3FwW4lq1kZXoyLBQDrNmo1pC58PMvp0Q792E78/lsCa+7vwLjG22KBeo/rcjSzmdjqaeDylnqavEjp6a16wwzotVxxxnhs9NSVAyKawvLco3V1OuEK+M9SYeQg9IuoFXKh7TzhScl6bU/O35GEejWCs6lTdqdhbrj/bNioj09WKtDigvTlcXzi8htY2RveiJCYdUP2aw35IYiohIu3IW6Vwvlkwvy0LgqeVRMILHp6ZcLcU5dNqEEub6KqDyQpQLV7sqHeWkc9W6pDKHc05Y5YQrpme0oLzCtoXfSSbwePhqG57yw2mf2Uv55kRE1G4ZDjvdD3oT2lmnf0M77HQbX/gN9Zsv4cVbKGZHRyAOqvoWUmv0UVUXtpWVBE1ETliF6z5P+Xm7O7B2xIxt4G/q/iypNfvcjOlzKlh4TKpqTXbiyadDXFdO4Klp3GZ3FMeSV0PvaCxi5p2ncrkrhV6w45biWnVyYhTichnHeKGg1pHqXCMSNOeevh4cX/Uw1uDtG3cZ31kotHIv0YH79+yT6DQb6DkZ4lTCnOcqyv1czuNxKWd0zcPzj9Y+XDcvH8C4HEXPXt7LGm3wcrgmiLg4EfkqV+outiEYwDxpa8HxEleO+nrRPL/PFdG7lp/GfN2zFR2MgYiqPyEck/tGxyBOJXEfC3nTJeY42qWra57xlQXB195UNbeWlUdt7ADWgq4Oc2ERVb5K28LcDHmYl9VZlXeduAZc1I3XLhLK7V7K4fnjCxvF8bNqFTrpes5Ed18yhgMwksS4qq6nVKt43pzLYn0SMb2Sk/snIR7ZjQc93IbXDAJRtf62sA2xFlwTRhv4mlNR9VwB5SXW8/+hcjSdBxFCCCGEEEIIIYQQ0vTwgh0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJEHLLDLhZUTgx17/8S5awb7EZvQVcnumFERGLavaY8EeUqOh0qdfRG+Orz4ZjyCTjKgefh90VE0m1437aj7i0PKweLUoVIIKzuua5hm+sOtjEeNl1wQeVBiKrPOBY6bWwf78F2lP9Eq/qSCdzHQhHvXX+hncrrpraRz6GnasFQLr1aFeNSCe8vH1yB98wXiqYPqFbHY9TWgbkaDOF17OXLV0B85hmnQNzfjffVp9OdENeDplshrtw7StMilvJQlYvoBqiqfonH8Bi3ZtATMrz0WKMNmzZtUT+K26xWMU/SLa0Qh9St+3M5dGv4gsem0X37s7N4fMolNcabR2HX0E30akDvl62ES2N70TH07KMPQFwvY56Ekpgn5Qa1o6UN5wPD4WThGGyGnm/khTochJWLxFJ90dqBbpGi7n/XFKU6qp5Y6pj39mC96OnE3xjZvgPijiDWzJ4+dGrajvlvgbbqP+0xbE+jB8cPKC+ecr/F1bwWsHEfO7vRuyMiElXePD2vOT7WwHQGf7NfrSkCagUVDOH7ES1JEhGvhvNBSwodKn79yIh1cmqOqas5ZmoSXa77RicgDkRNr04yhbUgYmN/KKWd1LSvsY7HtJTHNsaU61Vss+/yNfTc1Gr4o0uHlkO8bBids7Eo5ql2vRnutwZlwlcvelpqp0M1Pl/u3NOoVtm6DQ18f0eCkYryQpdxTIYtXK+Ji2PStkyH0NRuXIs8uX8fxJsn0AnlV1V9VP0XVTlQd9SaroHfNqr8TNky9vevN2yDuLcd96vq6GOoaouqPaFQg8RTh3jl8DDEg4sx17XLdOzALtycWjsnW9FT5mpnpYjEIzhm+zrQj7Y3gL+5kIxOYE3rG8J6FQjgnNSW1E5s0584OoKO35HRAxD39+F8XfTxN1qDmJtOy2aI7SS2uVpXjnIRyWcxn9uC2Mdh5aBrSeMxScXwnEafb9Qc9NGJY9aSuXE8D5rdiQm79TfrIU4MYJv7l+GaJJrA/czlsQ3ViulPFEt5cqfRZ6bPBReKSBj7wndx7LrK3y8Orum6Ws21TWUG86pcwP6IqvVUPI45sWol+iyXrxiEeK6gnLXRBn/vZWO7jz0etzE02AdxrYrnf75aw2nnfzCEx1OvpURE6kU876wVcW16RmUVxFYIa7sdVw67sLo2osqV3aDuhlXe6bXv73suyb+wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIk4ZIdd1MJ7i1Mp/OqKfrz3vz2G9wGHPPNe8cIM3mvsenj9sFxS9zMrVUVLBu+7Dyr3W3YO77kONtjbNuVsyOfwnupaRTm2Kng/s3aTJBPocanXyhDbrtmIUATb7br4G0Elpasqj1tYycRsD/utWkAnguj740Ukou4Vd5SXZa5o+v8WAqeC/Wcpd0gkjPflz01NQdzegy4GEZHFq/Fe/a4BvK8+pOVsyqtTdzCXNx9Ap0RpJ3oS6jbmuYjIlg3PQHzqKnTMnXPaqRDre95zyr20Z/d+iMPqvvxwGD1JIiIdnej727MXnSrhqHIflnEs5HLY10F1L39LC36/XDbdia7STjjKhRGJmH6aI8VCOcwWGl+JburKXbh/726IW+LKhZFBB8vELNbd6QOjxm92DyzGF5SsQlcoS0s1X8WkW7A/o8r11tWFbpeJaaw/0YjpTZubzULc3YF+mYiaAGIxdHD0D6AHJGHMcziQw2KO20hYu6qwtg/04X75IczLsKoFtRrW1Q7lgAo28JlVlTMlpWtUFduUn8O5s1rF+ae9A49VLIHze9AyHSvBGu5HpYi/6VRNL9JC8Mhjj0JcUE47WzAnylUcpRUX81BEJBTG1wJqjafUPVLxHfU+/kYijGMhZmF/R/VCRkRcNf8Wi9i/v3n2aYgnpnAuXTo0BHFHB/qDYsoF5DdwtbpKfuwpD7Gl+uUPlbf62qsnpvNZrykMF98CUVbr2xkb+8pyce3ZrhbySeXWFRGpFHF9lM3jNnJ6Ha9+Ux+vgPp8UP+tQ908XsUa/mZS9fevn3kW4hXLcF16zDDOkcEw5tngIProip7pMhs/gGvRXB5rjSjv5CnnnADx+ifuh7isXKj5OrZpumgei7Yyrpf7A7g+qBSO3Ny+dSu2ZXApzotDK/EY7Ny2HeJiCWukiEhCn1MqJ+NzWzZAnOxDh2Z7CuuVo+axfTtVnfVNB2BrGM9pfFE+szDuZ1u6G+LCHM5Rmzfh91sTuB5ItZh/+1Nvx1pcHMXvjI1nIB5ahJ+PJ3Gbjof7Watg3wfDZhtmZ/D4loqYi5Y5XSwIiYxaJ6j6n3LVPBfB2DJPKSUexM9UKuj4KxXwfM2P429O7MfvP+3ieUBF1bN2tQ4VEeldhMe4t0/NlRl1Xqq+r0/3omF1XqCuXdQbXZeI4UaqKi/8Ko4n45pMBOtRrAvXlU4M21BtcDB868XnVj3/Hyr8CztCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaiEN22LVG8KMx5clJJ9Br1NmCPgXXM10u+pVAUN1QbuP1xKqnPBLKZRFU9wW7ykfjB8zrkxMTWfxOHVuVL+F93CUX71dOxpQbTDluAsoPZVum6yKg7k8vq/vs4yH8jaByYVQq2KZyHX0DnjJCZQumTzBbwr4tKH9gpX5kru1WS+gcSiqfU0sbuhhOPvEkiAeWoh9CRCSvPBxbdu6FOKeOeSGbhXg6iw6JA2PoOWpJY5vENu+z/+l//gDi0Drs37WvPQvfD+Hx6elBR4X46CfIKo/YU0+jL0VEJBjCMZxIYZ45yhdQK2Qh1sOps7MNYleNlekZbKOIiC3opdBjOpNBfwD5w9HuIl2TJmcwv3ft2gNxVb2fiqIzolRAd8bmZ9ARJSLSo/w7mR70KWqHk1Y6vVp9giIiHR3tEGv/Ra2C9bu7B10i8SjOxSIikQDOrb2dWKPqdax501MTEKeUVy8YwsHv1bCNoaB5fGwbD2K5hHmidLBiR7HNVeWDrSqnSkStSQo5rIEiIokk1hvtqpqewVoeCaHjSaddTbUhX9DeN7Mfajn8zVoNa7v24C4U2YJaL/nYdkutI4JhXOPFLXM5GbDxNe02rKhVoKP+DTmv5v9yEeOIkhAlfdPfGFDNCkVwfFTUemjHXnRu7j4wBnGmBeekgUXoye1U41dEJNOKbq+gcnYG1NpV12iN1hB78uJ+uhde0x4d7dn5w7x5vy8Rawbi3jiuzzLKndjWisdvxG8wzmPKhavmOJ2r9QTmTV15iytVHOeuylPtMRQRCUew3T0DvRD3LRqAeErl4VgOx+Ppp58G8cw45uXb3v46ow0//+mdED/6yGMQLz7uZIjPP2ENxDtGd0I88vATEM/VcF4oOKabadWp+BvlOtbYjg5c0y8ke/dg/fEF+zzXjucGNRt9dG7Q9I1mWnEdvHwlOjDHJ3AbxToe92efx/WVo/yKmQ51TtMg/0MR3GZrG7YpGUe3WD6H9WNqHPPdqylXqFoP5Gqmu3BDZSnE1Tasi3YXupHjUdzv2SzWhQP7cT+dKtaJerWBJ7+IawzH0S4/c75YCAZXozMwUsFx4+SxXo2OZiHe8qzpi7V9PEbVHK7pLEf59JXLbeQ3yosexu05av7o6DYddrPKYZfw0InZ1bIK4p5e/Hw8gvut63ZNOTgLyp0sIlLL4XlnYZfyeE5g/anlMW/KgmO6YwXWaVvNP9EufJaCiIiVwfldO7hD9u8nT+Rf2BFCCCGEEEIIIYQQ0kTwgh0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNxCE/dKIzg2LQVAileVEliLYDKAuMxUwRdt1BmaYpzkV5YM3BbbpK1uz5GPtKeu8HUXgsIpKvocTYdXE/Sq6SQao4X8TfHJ3B7YVs/HxLwZRQ18dQxl+eQ1nk4o5lEHd1oeTYSqEssjqLQspCAds0lzflnFNzKHPctVfJVbW5eYGIKHFvPYCy03IMhY8jStS7/qFfG9ucmUYx+Oj+cYhDAS2IxGNYdTCv9EM/ejuxrybGUK4qItISwVzMZ1GOunVkBLfZi5LYUAh/o3cA5Z19Kt4zhvJcEZEtG/C1rl4U0e/aox4SUVfSaiWad4M4nrXQNRLEYykiUq7gd1pa1ANWgkdGCvvqRj/QAY/B6L59EI/swXjvdhRRd6RwDC7qQGn+gT1m/m/4DcqrTzk3A3Fcid0buPtftdjqQUU1JVN21YMOHF2fKjh/iIgE1RNickrobCn5v68exjB64ADE6STW4biaW3NVnD9ETBF+OKrE70r0Xlf7aamHUHl6/RDAOBI2641KfSmV8TfCEZTHh9WDeeJRTMSIquNz6gFFc1mzH5JRzG1LPRDEyP0FoqwfHBLSc75an7lqvSWmgN1Sx0Q/c6umhOt19ZOpONaWvBJp5/SDSDxTeh8O4zFKhbERgQC+X3QwJwKeevjZFB7TbBbXE4mkudbt7cWHRA0PoZA9qedK1eZ6XY0NtZu+YA55vtkPevzp51LoB1ksFOEEHvSlKZSZDymZejqsHlIwh/OTiEg8g/1ZDGPeeCHMy1NOwgcjdHdhG3Zu3w7x3j34YBI7YNYa38HcjqqHB7z2dPzNSVW2f33/fRBv2bIYYresvpAwxf/ZIuZyQT08bvsBPFcoephHRQc/P5HF7VWjOD6XL8G8FhHJdGPuT07jb55//mrjOwuFU8Xjlp3AtXy9pB5ClMBB0tqDD3MQEfEjKMLvWoZ9lPOwXhTK+JsxwW1OT2MepcI4P/QtyhhtqAs+NGrOw20U1QPgogHcpnr+kKRacDw5YeyXiaL5AIKf/wj3y/P3Qzwcxu8EfMy9qf14TlSrqLqtHmxVqZvzj6+eEpVMqbnXPzILyzdeejbExV14vB79BT4cJlDFc/iSenCViIjrqgfhqMVOOo65nlA1sD2Aa59MXK1D9ENB6+aDE+xRPGbrf/owxLvXb4T43DecCfFxxwyqNuJvhOdwbFlTZj9M78G1bWUzrl2LY/gQiop6OOn+XBbbvA3Pk4Pt2C/xxWbdPfb1x0MciquHGrnm/Hwo8C/sCCGEEEIIIYQQQghpInjBjhBCCCGEEEIIIYSQJoIX7AghhBBCCCGEEEIIaSIOWUzW14leopYw3kucjKNzw/L1/eSmIMNSno2qcjLYypnSru4/TyTQZZGbw/vy08qHla+Y97jvHsXvFKrqnml1q3F/HLssGFLut+ksxFV1X35IS1xEJN2CPqAzjz0F4twB5RcqqXvTO/De9GoJ21go4HXZSMj0bQz0YBu6urohHs+Z3ruFIB7HdkxkMe+278X7yzc+/xzEtuHhEXGrmAflPPoBAsoJVa7iffnZPMb5Ijopdu3bBHEihn0rIrJyeCW+oLx4Dz94H8RLhoYgXrFyBcTt6r76iPJDpVtMF5ztoIunWMU8KZfQV1LO5iF2XeVoiWFeFXL4+ZYUjscX2onjo6a8lKWS6eM6cmjvwEv5L34PP4avQ/2CaoNydFiH9G8w+B3PwzGlXWL5Eh7nfePoiBhXseuim2RRl9mmzU+gW7KrpxfiFaeepr6B+Wwr94hRVtVPNlKV6PnnJbEW5t+3LHXMw2Hcd+2icpRLrFpRAhoRaY3h/B2ysUOCNo7dSk3NgxGca2tV5ZfNYQ0NN/B4aZeYpfwkrnKHxaK4jbqqDamWDMTRKLbRsky/Sb6AtbpeU4415azT2xTlyamqGunWMEfCQfQXiYi0tKGjqF7H8ZcrHpmaV1auxKryXVmq1ui+aaRA0+POUwNVx0U1l0Zjyhmoc6auPEZVM/cdC8e5r34zbCsXjzHM8fNB5fLR28uXcB9EROa24ZpgahrXnSnlNVzUj57i1lb05IQjenypmu44onFUuXPUjrq+OV4WgkINa086gLWqPoW+rL1Z9MeddeIxxjbLyk3dr/Y9GsdjdkYGf/PYTnQGlzz8/FQE60RpDtsoIqIU2hKs4XpoyR70FMfU2ratMwNx/bmnIdbevEc3Yo6JiGzZj96wiqqxo8pPOzGNfqfTXnMGtjkzAPE//b+3Q1wrjxltePIJzPXx8R0Qn3yBefwWioilHNllrIGtPeiCHh1H33WugrkoIuLbWyE+8Thcq7/2QtxmIoznB/USxlu3Yk3LzeIxisXMtb0bxrG8L7cH4vYUzmN9rcrz2Yb1JaxqRVH55HfsMz3FOx/C84taHo+7NYDvlybwvKp3CTrVYhnloLfxWNkB01EfV962mvIFhmz8jYXiuJP6Id6uXLpzs7gGaI9jTjgNfH1TeVyH96r+WpbBbQSVtzhk4TqztQXn97BaQ7oNzjWias2WSOC8NDeBbdzy03shzoydAHFXK54zOsoX79XMhX2ojLkZUbW7lFVudjU3uOoZAtkprNvxSZxb6uq8WESk+hp0eQYGsW9d8/AdEvwLO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpqIQ3bYtaXw3uRgLQtxRLnC4hG8N7xaNm/arSt3UiaDng7t6qm5eH2xXsd72ONJ9MXsn8T7wnfsxnvmRUQm89iGklJ/LImhr+TSs0+CeFEv/ub3n9wJ8aPb0engeEpsISJBWzlQsugoKBVwP1Ip5aBztVsG3w8rT1jcMh12jos7vnigD39zxrxPeyHItKFLZPte9EMc2IUekHhIuQCKplukkJuA2PLwJvZsHh00WeW1CEaw/zq60dkVU67F/sETjTYMqGMy8syjEAcszJO6i76ByalpiI8/fhXEy5bjPfQDvZ1GG5JnvAbiZzej56JaQYdBNYT95An6BTwfc2hsDP0p4Yjp2ki3dqlX0A9QLptOoiNHI0vTi336EBx2epOq5uka6Av2seGsM5x2Zhte6pXFg4MQx5V7MFdUx0S53Z7bi+MrFjSPe1C5KJ5/5H6I2/vRXdm6CPPZUg4VS8mydN97tnnsGrz0oli/h5Lw98G2sT995eCIJXAurihHVziBrhEREbeIdVGUr6SnG/vbmVadoxybiTAe06qqmeke9LSJvLSPsqMba1S1gL8ZUPNWSPvmlNerUjZdYpEwfsYO4/w9p/qpXse6G1DzZEV7cT2s6zHtwBORoHL5Veq4n5NTOP8vFDXldLRcVe/VPOnZhzAgImpcBjC3PRv7M6hWpPUa1ppwEPszGcO+LNVM166jamZVpXZV1ZKIjY0IiHLWqZqr17GOmC44PabHZrBG7q/ifL59N87Fncqp1teHLrFkEv1E0YiZd75y9dV95bBzj4zDrjOAbe1X/d2iHM/rZ9G7Nls11/VLlBP1sgl0AIeUc7N9G24zsuMAxK6H43xQpX7INceCrXLVVfWr+uunIE4rv5zXoZxRWkKYw+PVEjB9mdUi7meb0jXGfeVHG0MXWf8q9K+llDf8tGF0cU3Mmec3YwWs+6USeqx2bttmfGehyM/iHNHSgbVgOod5EE3icS4UTVdk3cHjsnkjnqMcGMWxnUphn3Z349juGlQ1bjce072T6IYTEYmlMFfaO3EN19qi/G825n8wrPxlNp7TODWsR169wVzg4bnXquNxnB4zhHEqjvnf2on7UCrheKjVsF/y0+gXFBFxa7iNWFg569yXuQh8hUinsRZMqfO5kI37mlQ1ctZrcF7k4zENqzXx4hRuMxZR/nB1KlFVc29eud3CDdzsfgh/M25hu7s6MG/CQeWX24vXSw5M4FrIUWJQ2zZdyaKeGxBUaxDtZ6zmMO/iau6cKSjXonJ2p1NmG5IWrk1dtc6p/Z5px7+wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIk4ZIddV1s7xOUZdQ+8cuIUSuh8KNfMe/2DFt5rXFK+GH01saxcL5lWvC+/pu5H37kPHVozOdPR4QfxPviAcqy0RPE7XUF0uUVn8P7n5S09EB9ow+2NZ9FdIiJSLeF+Pb0VPW22clfUE7jfkkb/kCgHSzqN9+2nPPMG6koNj5dfy0E82Gl6kRaCHTt+DfHmHdsh3n8A/Q1uHv0OqbTZ7pXLByE+btVxEB+YxHv3d0/iNjt7sL+XDKMfJdWOXrbxWfy+iIg/hV6LPcpZM5lFp8GqY/H7r1+BzrpiAdvsqVT3a6Zb5PnH0Ju3fOVJEHf3ZyB+7NcPQDw2jjlSryu/Uxl/c3bW9CDGkvgbnvIoFUtm3x05Xt6/b1iH4CnQjjpRY9Pz8UDWlUssrHxYlvGjplvEaJaqw62t6Jk465xzId6wfjPEu0bQe+Mqh8v2AHopRESig+jIdLegx2bD/Q9DfPpb0G8Wi6OvRyuEtG+ukW3LeQknofb/HfJk+QcyOonODJ0jiSqOkaSqcZWaOc9pB0p/L/piI3Hc14BSf7bGMc8ycdxeqgdzptpAELhVOS0zGZzHqso3WlFC2ZDah3pO1ZuqckCpvBYRCYTwtUIBa5Kj1DB6TdGZwbm0rQX7cVseHbbtrfi+iDHcpEU5Cb266YZZCBxVezWucrVVVN8FtYBOzHEZtLF++doFFtJjTm1TefR0kU2GG/h5Vdn2VFxX2zQ8OcrR6av1mKucdW6gQV3R87H6iKX8Zk4dfyO3H8fG7gO7II4o51Q8rlxNIhJVPsWImjtCId13JxjbOBwck8K2JqanIA7Y2BcrFi2COD/ewPmoEqtf5Uk8rOqd8qpZah7Wq6eqchJK2PS0htRBDqq8Cdm45q6nlCuxhPXMUfJFV81P3ba5xjs/prxfFh5ztw/XstFduyAu4cdFlE9w9THLIO4tmW3oVevCFcM49y/rMN17C4XlYR/aQeWoK2ch7la+6oCg201EZP9+PK45H8ddbhb7KBjF/J0uYpxO4RwSTeJ80dKO40FEJBbButnd2qve13OjykV1Ll6v4/mIH8L8z82ajuwWdZp67uvxGkJE8Fy4twfzIKzauHUDjp+ZWXSqVXKm181Xa9G0yjW9Vl0oYqpeWKod+dksxLZa+wQt85kAvproHAf3tV7H+p6Iq3qkrn3k1bl0OIp5l0qantRQGI9Zsag8wi7mZVtGrV3VGk5rVetVdcyLWLdFRPJ5/Ew8gUWsVT3rYCKH4zEaxfnI93Cdo6+V7N1jnt8M7cUx3DWIY9T1lFP6EOFf2BFCCCGEEEIIIYQQ0kTwgh0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJEHLKWp7UD71FvVffR2zbeH53NoXOjru9lFhFb3aDsCd5T7YeweUl1z3RdMN60E91vxSregx2Nmp6JaBh/I5bA+5dbA+hfeHL7OMRODb9fTaPDrrMV22iJurFfROoO+gBLNbwXv1hSPg0H22Qpt58WNoVsfMG3TbdPSPlnHHUvue8egpDrMPDYA3dBHOxeCfHwquMhjtUwh1Ydu9zY5soV6n7yinKH2Kr/BZ0qwRAe00AgA3HdwTwr5s377NPK6eio/t0zgeMnmhzF7yt30tLhQYh9dS2+nMX7+kVENj++Hr9Txr477sI3Qnz8CUtxm79Bh92O7bsgjivPWDqDDosXwBqQU3WjWjXbfcTQwqVGYjT4vDlmfOVN05twfMyLbdvR7VYuY007ZhW6DCPK+2FrmVsDPB+/46lp4czXnQ3xnhHMxa995WsQO8pduGcya/xmJI5jZLlyfW558DcQdy7C3DvmdadBXBLlO1OiqnCDfpgpoSuuWlP+DOUVGepGV+XhoqpcRzMzWD/iJZwv2lT9DzWY1qNJ5Qop4dgtKF+cTsyAmnOqeeyrzhSO9S3b0NEpIpJUbpBkDNcQ1SrW3dbeNmySqzxfyvEUVbudr5h+mkgEa/fYOHr1xMM2JdMZiCtlrEdOHX0msSiOpVRCS6BEZvK4FqpU8XimkkfG6VRVeWSpMeMpr5d2KzpV0yFUVvU7pBxzAeWHiwTxfd/CsWDpWqX8c76Wt4qhBZWSi7lcU+tOW62PaqofQr5eTynHsN3AlazaYAfUGsxSTmj1T+l6JvFUfauVMadyxQZuJuXmkyp+Rx9vkXeb2zgMzOxH72PVwXaUA9i/pTSOj1jJ9DlVNim3cQD7w0lgsbAD2DcRVYMtda7hqBxwtVtRRHzlBNTHUMfBLpzjUlk8xhWljKotwTVgq2OeYyUquF9OFnO/MIFzYGk/umMP/OYZiFtWr4B4egxdTbU41mwR0wtamsY1Xi5keu8WikIe/VSBIvZ5Sp2D1ktYz2wx16exCM5LtqV8r60ZiF11jlmuYZ+WxrF/hvpXQ5yOmf44qWN21edwzLSq81xRx6BUUe7oILbRC2C/7NxuukNbu3GNd/IaXP/HBM/N6q6aF4s4xpw6nnvXynjsIgHz/D6WwNeMsmu/uLf1sKG8jiFVrkPq/C2TRndk3DP9cXtzeMyqyhen10OhEOZuMIJ95aj1wKIBPG9Ot5tjfWoaXYd1tQ1HrdHqyq0eCeF6qVJWa3K1/irlTBdcbgbXtr6j1nSdWDe1e71QxPmkVNUecRxblSnTzT6ydS/EHa9Fb2cwZF6DORT4F3aEEEIIIYQQQgghhDQRvGBHCCGEEEIIIYQQQkgTwQt2hBBCCCGEEEIIIYQ0EYfssBPlqLNC5j3rv00kiu/HJWF8JqiuF9pK3FFXbpFILA3x1BjeO1yaQjfC0ja8z1upYkREJKru5V853I9tUl9yArhf2rkVDKATIhXG/W5vHTbaMLx8McQje56AePNWdEaFg8ov5+O9/466UdwO4n3h2iMjYrpgPCUxsqwjc213Yi/6415z4pshjkTQ39Cmbg3v7TOdgTNZzJu929ERVfPwXn7bwnvYA0HsK9dX99Gr/ncbuH18F7eRTHdAPF1AH4Gt8sgz/GgqVmqGZNTsh8G+AYijAdyGLZhXxx+HDq9MJgPxj8u/hHjsAI6N/i68j19ExFXunpByhuRy6CM4kug+t1SXa6eTr1xJIiLGMFLeoL2jeyD+yc9/CnEuh/XlzKkJiM9bez7EkYjp9dD7oS0ejs7NFPozLv7TiyHevgXdoXf/Ar2TubrZD5tHxyButdAzEa1gRz12B+ZWsB2dLHZ3BuJiFvsp1MBtdSC3D+K5PH6nUsHcHHrT1cY2DgddbdjfTgXHYSqJx9R30AMSCJq1OhbDOUCXj5LyDtYc5RZTgrhVK5dBPDaGfplq1fQ3dnRirXZcdIV4otYMyrtXK2FeBmI4dgLKHVacweMpIjKnvIXpFqyLBeWLdT1sY0Ste+rK7de/GGuqnkdFRGZzeDz13Jtpa+AkWgBKKt+DWqTmqeWiane5iDkgIhIOY3+2daMHJ6aGpa1qZkDnrY3HY24WnTnlgjlfLBlC722+jnk1O4s5EYngmlB7eCzlXTXmYrPcGZ/RSuCw4H7Zymvl1LUzTR0bNbH4yt8sIuJl0aszPYruOPGPzBpvupCFeG9Rrbk97IuwhZ7oeCuunUREppXfqkf5rWJqfnFz2P/VmvLideBvJFZg/as08McVpjAXI56qV8oTXZ1ULqQIupasDM55QbUA8XLmCU5sNXrxJIzbiE8oX/MonmtkN2/H39iDYzyl5qqZjOkEmx7DvjkwgfPuULjX+M5CEYgo13MFj3thNx6T6hT2V1efOc8lYphrc+UsxCl1/tbWjSctk5PKu+biMXOr+PlKwfToRSyscbbybM9MKX9ZAmvatHLUlgsqv4O4vb2j5qWE3kVYV6NJHA9B5Vcsl7Hu+lX8jUX9+Pm0Oncf223WvERSbdNWtfzFL2McNnLK41hUcWscx1U0jDlRq5reTi+Ix7BkYa7OVpWfsQV3PqTORVoSuCbPpLEvU0nTzzuXVXmkzlcCgrncqeqHplJR59Y15fOvmfWmUMA6WFDPT4hEsN2ucvxPKa/lrGpDpe6p2PTo7R/F6xb6eHnB3++ZAPwLO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpqIQ3bY6Xv7rbr2cqFnoljE+9VrdfPaoGOjY65QwnuHcyruH8Dm+g6+v6QD70Ue7sN7tEsV0yfTv+JEiMM+3v88O4f7Hcu04wam0Scw0IM+hmwR76tfesxyow0trXEVr8I2KLfF7JzyMym/me3j/e515W/yzNu+xVWeKXVbt+HnWijiyTaIQ6oZ2Sw6vCJtGYhLjrmzStUjsVa8j167RqSC/eerUVOpow8iGlMOQQu9CSIino2fSbaj3y3so1cvEEOfiR/GvPMsbIPlaoeFOdRDCbyXP6acBE4V8256FP0l7Ql0Lf3pmy6E+DfP7IK4UDb7oVKdhLhaxrqSSWWM7xw5lHBJeYNmlU9pbhaPoYiIFcDcGpvE/H30N7+G+Mnnn4E4N5OFuKr8SquPPw7irk7T7xNQuZDLY+5ks/gbg4vQO9W3qAviK9//3yDeO7oD4sefedZoQ7WI+bttHzrt4j34/vRzz0Fc+iFub/h1J0M8W1B+05LptqpaWYhrykXheUem5iUjuO+rhtFxGovjfKHH9tjeA8Y2HQf3LZHEY5hV3o+AhbXAUi62/Bz27+QEOjvqpmJFRDnqCsqL4/n4pVIJ586CcjS1KM9LTXnAfMuUiQWUl61F+RljcezLYBCPRSqFa5aAreqwmlxH9qA3TETEUk7ZcAC3kS81kO0uAK7y8WktamsEnTYtyiFUjjdYTqq5L1TA+h5VrsSuLszLSgz7u+ao9VgU2xCIYxtFROLKU5hJ4Bqtp0OPe+WoUWufknp/bBLnxXoxa7QhpHI76Kjx5mE/1es4voIB3E9PsF/0ekKUw01EJLd/F8TVWWx3oWC6eBaCWbUgGythXajnsA50dOO6wx/AnBERieg1XQ5zO7gf1x015QErKLOrm8S8Ci3Bmhy0TEdqIoPbrG9FP21defIqys+YOudYiEtZrLGyZTPGToO/vziA36l6WYhDPbju7Fl7BsSRGNamma04t2dK+H56ienM3aP8pjHlSg6FTBfWQmH5mBe+Wut3tuD6KVBWbsl8Ax94BMdirYJjcWoK89kP4dyaCOHavVO5n7vasU2dGTP/pY7HJRQIq7dxjOWKOB72jY9APLYPj+GM0pU61ROMJqQyuM2xqY0Qpy2safEw5ntX3wqI+/pxTFsO1sD8KrP215Rb0lXnSaUGfvGFwFNjv67W4G1J3Ne5LK5fJ8umn7djCZ4jtiYwN8fUGrulgvNgJIifb1fn0sk49ncwYJ5bt7TgZ/bvwdpeLGKu67m2oOpwpYSxmiZltoG3M5vHD3k+xsExrInhFI63gnKmzql1UdXHfajq6wUiUvFw/DnqXMKtm+fChwL/wo4QQgghhBBCCCGEkCaCF+wIIYQQQgghhBBCCGkieMGOEEIIIYQQQgghhJAm4pAddq5yNPiuuvdfeT5iUbyfPJnC+9VFRPZP4v3jI/vwnvegEpaFx/dDXBnHzy/vwnuwLzgXfXE7Rk2nVKoffRgd7T0QTyg/SSaj3GAe/mZYOW0mJkchDkazRhsms+gcGj2A992HQth3mRa877tcxn7yg3gd1rL1feOmb8O28DOWcv24R0bnJL2LhyDW7apU8N7+8RymdDhjOrzqjvIzhfAYlpVbqe7jbwaD6OlwAhhrZ05Xe9Zogz+DuV9TDkHLw9+MxXA8qTQTT7k4XBePsR1SXxARP4C/USiia8NSfoGI6vucGhuxOPoGz3ktei227NhttOG5jehVKChfTTiEToSFRbuNtMMOw7kcuhEefOQhY4u79++DeCqXhXhWHQNbeQajVaw/E9P6Nx+EeHBwwGhDJIL5Oqrqbr2GfoVyCdtYyGMcUrPIqlOXQrx++wajDbU8FpR9ytERD2MbF6UxD0Z+8xTEgQjmpt2HuTjnoAtDRMQYET72dbV6ZJxOSeWnTMTxmIfCWK/SGdzXmKnUkNlp9Cs+v2krxI6qN5FwEuK2BPpR9o/ivDY9hXlYccxxm1PeO+2A9JUSJZudhVhrP2pVfCEex35ra08bbbDUb1Ydta5RrpFyBeu0r2qCo/0mKmfcBnNtTB1PTfBIOZ0c7M+0cgRmlKNu9AA6ucph019VVetEawzngKF29C91DfRDvHk/rvl85YuJF/H4pBNm3m3Yix7QZA/OMckIjqeRrehaclXuZ5bjvJbsWwZxcfcmow2BAta3Fh/XGKVCFuM8uk3DIRyPuQrmeiyD69j2BkWgoByPev7Sa6uFYmAAHan2CNaWmNJMuTUcoxHL9IjNKof2I3tx3u1TXrFjBH+kqhxTZVXvak9hjpS18FFErH7M5coKPLcoObiuP2EYHV5FG495WTkIw3Pob3JazLpR26O8eeOY+6EuzLNSN47HUBvW0NYL0BWbVb7UTIe5zjw5uQTiux7Cuh5Rubug1LEPw8rjlVQ1LeRiDXRqZn23IrjNeBS3MT2BueUqDdeqpbhm62/Hc6CgcqBWimb+hwTPF7Q7uaDG0JYRzJMDWYztuvKgZ/E323zTBbeiFeuJo9ystaDywdZxDaHrUTiG3+/uwPP7jhb0SoqI5IqYa1XlKU4ElZN+gQiqv5UKWcp7WMZ25vI4X5R9UxJ81uvPhHj1seioe+g7P4d4ahSPWW8az1vTKaw/tRr2f1U7b0XEc7FdVbVGExcXedMz6pqMh/vtq/VTsYDfz86ZDjvXwvFmqzE9No1zQ28G91uUBzfv4VxRVWtlxzJrXiCOfefqudb6/S6o8C/sCCGEEEIIIYQQQghpInjBjhBCCCGEEEIIIYSQJoIX7AghhBBCCCGEEEIIaSJ4wY4QQgghhBBCCCGEkCbikB86kcmgRM8JonCwUED5n68klXP5OWObu/egtL6gZP+xKF5PPDCCssDuKMo3+/tRbprpQ1lnKK+s1iIiURQSLjrxNHx7TAlwHRS0u4L7XSxi3BtHoWrNNdtgJbBvFyX6IE5lUFabn0ZR/8Q4CsXrSsJbqSl5um0KDxMRFIDWyurBF2FTbLoQ+EroWFcPZyjlUQgZUQ9nyOfMB43UKtgfpRxuI6QEkakESiw7W1Hy3tKGEvHODLbBDZry83IE92NmCR7zqosyX6mjNN9VgnBPybhdG/PMavDQiUwbyrQ9V/2G6ut0GvcrrMSZWfUwAr+OOXTSKsxjEZFMCvv2pz/9JcST4yiiXUie34Sy8qCSl+qHM8xmsxBnC2bN23MA60m6C6W3baqP2zuwfkzuwLzY9Bw+0OGuu+/C7bfg9kREAkHMhaoSENeqWMPuuBPjkPpnnr5FKKqOd2A/nXjSMUYbnn5oC8QlwXzdOq0eaOLiGGt1UIi//bEnIc52Yj2bsc26G6rhZxxdW0rqQRV/aWzisLCoB/tTP7igNYPjNqBqZKgD3xcR6enEPPvVvfdD7Hm4jUwK68nYASV8bsW+y6RxDstOmBLqqQmctzKtKPtNqAespNX7qQTW3VQa62oiiXnnlM027NyODz0IKIF3SUmSa2qM16p4LALqwT2WyuNY1HwQg6vm53odRc31qilSXghsJYzuSeIxHZ9FQX1d5UgwhWNSRMRWuenUUQK+5OTVEM+q/qu1opg/oOTcdgvmYVbN5SIiefXgEE89RKdaUfOc2uZetS4tTuJ6a0kmA3HfSnwohYhIdqNaJ45iHs6OY5wr4m+4DubZXBn7PtaK80RqwBT5OyVcP1eU2NzWT7JaIHr6uiHOj+KcH2/Vxm71IADbfMDGgSnsv6898zzEK9sxtz8SxfklruY4v4g5MLMBHzox02mu8XZW8QEPNfVgir4VuOZb3IrbqB3AOTCpHvBgeUronjf7IWLj/J8rqzXezp0Q+/uxRs+q9VliJT4gpG9oGOLKGLZZRKRTPWTnNcfhQ1oGhnCbC0lLGutLNIH95QexTxP6PNg1H0zlOHjcC3PY54GCemhKUK3Ryupcq4wPzrOCOLZdB9skIhJRD6mpq9o+h2VY/NwqiGN19SArH9sUCeADVcayvzHaMBjEdcyi6HHYJls92KWEY2yuhvnuzeB62vKwnmUSGIuIeDbmbz6H83c4Ya6VFoKIj3nX04nj6EkXx9GsYA71rca+FRE581x8aM0xq7C+tKuHRt3xH7+COJdVD0Iq4ridmcL+rdXN3NcPvMxX9cNO8Ji3qjkoInh8XPVgi2we+6HmmNcyQmGcvytqfTVbwTVGSJ3/lAPqYT+i6zh+v+Rgv4mIBFTdjKuHYbk+HzpBCCGEEEIIIYQQQsgfPbxgRwghhBBCCCGEEEJIE8ELdoQQQgghhBBCCCGENBGH7LDLZ9EJEaxp75e69qd0GMGA6ccoKcdTawrvmc6o+37Ls3gPdVcfenn6T1gL8XP70PGwdbtyPojImb14r342i5/pHj4RYlvdS16rotMu4+P9zbkJ7LeYuodbRKS3TbXBVY6OE/A++3IW7+1/+Oc/hnjfXmxTwPDPma6Lsrqluq6u5dp1s90LgnK1BZW3I40pIgNp3LdjlmaMTSaj6IwIqNwt5rIQV0qYp7EE9sXK5Xj8Bpagk8MOoVtRRKSgfGcDvb24zRH0BbW04Y62Kb9TULmYPHU8/QZ6mmgCPQqOcvlo1WHIxn6qCPoH2jvw3v+CcoAVs+hHERHp70Qfx6VveQPEt//sbuM7C8Ujv34E4nIOXQYJ5b25+OI/hdjxTX/Vkxs2Q5xOqbHtoeuorwv9PvVx9DHNFbGPS9vQDdcaMf9NJpHGdieV/yiawBqWzmDypFsw91pa8LjHkphX555/utGGuSkcU889hy4dt47jeE9WefRCWNOCY5i7+VmMnZTp8rNj6IYZVY6gnDreC4Wv5pCIqt/am1YvYjsjAbO++0rM6Xqqvtv4G0bWeFjzlixBP2yHGseLDphej0gEf6NF5WFAtXtiAn2PZ56OftmePnS0OD7mSG4a50ERkdkplPdMZ7HvggEsep0d6JXyVGH1XHSupJX3bXbOdKr5yrlVK2O7tTt0oWhrQQddRxLj7Ax6ddqU/zei5a9ieiG7hldCvLR3AOLn92AdyERwXnPqOP939WQgtjtMn1NReXXsFG5zdhLnpSVdOH+XwspV6mLOzMxintm9i402LDr2DIhH9+E8UFFusZAaC76LeRdQ47GaxfXCpJh556j52FZ1RKXygjHn4pgM+jg3hIJ4mlJTYzTrmK7KGbWgdXzcRi6E88FoCOesjI95W7Mx9n1c+8x5yncqIvsmME9abFzDzaop6cejuI5f2Y+esGG1BmyPoBO4uAvrpYiIW8Y2+C7ux6zKXZ1nNeXgrM+hX7D27DaI42K6maqqTiw5Fr2V9f3ob1xIAlVsr2th/9R9HGcltXulgrlGCIXxQy0W5lZEuSLDjnK5BvB8IVBFv5lXxjVhLJQx2iCucquqwd2bwt/oyWB9KrtYP4ozOMZGJvCYtQbRESkiklaetsVduB+bxnZAbFu4Fg5Z2PfaH1spY1xOPm60wQ0rh2MFx1BenUvL8W82tnE4KOVw3+wI5kBV1Ya+JThPvvHP8HiJiCxbievZcAzzcPVZ6Lhz1NWfh776E4jX78C52KriF1ynwTMBwpjbM8pR16bcx8EYzsVl5aDNzyl/rLqEEwiYl7Cq6prBXAXXVyU1/jaNYg3cM4Xfz6vnDnjKP1dtcD2lRa0bk+pce6ZB3TgU+Bd2hBBCCCGEEEIIIYQ0EbxgRwghhBBCCCGEEEJIE8ELdoQQQgghhBBCCCGENBGH7LDTWhy3jPcW++o+XlvQBeBapkRrVmnRcjm8N9iv4r3Evcp5c+p550G8aCXe1/3D2/4d4p6E6TcJ1PDe/NGdeF99z1K87zvavgzihI/3XJdm0CUS8/C+/FrZdF1M5fG1TCf6gdp7BiEuF/B+dxtDccN4z7alnDn1uunysxz0AVg+xo6+4X2BWPvaNRAvPRadgvtH0dvR34c+uRXL0ZsgItLT2QVxwMf+yeezEFfreHx0fyYTygmWxPv0A2HTnxVSLr5yEe+jP/k4dEwMrhiEuK4cNr669u54yrnSwGsVCOExrVeUn0n5h2zlArKiapvq/aryHgYD2qUo4tayEHcqB9FZZ59qfGeh2LkLHQ5zE+jaWT60HOJYDPNg/36sBSIiu0f2QJxMYG4YuZbD+lTOKr+VysVlw0shHu5El4KISEr5DycmlEu0DY9j7wDuVz6HbQwrlUXUw1rf0qANr38j1u4Z5Scd34d9N1XFH4nPKZ+p8uoFLczl/hTWBRGRRDc6gEZ37YK4VjI9UAvBnr37INb1JZ9H/4X2fNXE9I26QRx78RT6yWpl5RrrxHkrYmMeDi9Fv1JEtcEOmTUvrBx2sZjy5qlc9svY/9UcrjnqaWxTey/mmd3AbbVkAP1kkSjmUa6YxTaHsUYGLYwdVeMCQcx9t2rOtQHlvvQd9LwkE2auLgRLevB333bR+RDv3jkIcb6Cx6NaMffVqWJeDfah381XTkC/A8fknFqrFEv4m4s6cC53fNOrUyjieshXXq6kj7ke8HDt053GXC5O4FxdGMV6WK+abUh0Y971rT4bYq+ONXhiP65DSwVVi1QbWxKYd0Exc19p3KRewm3oNfxCEVbHLKjWNh3Kr1kLYE4FG6xnSxXcpnblLhpCJ9RoQfWX8hSFlYfNUuvhmodjWESktx2dUkE1deeUO9GfwTzaP411fi6ONXZxVXmwpkyHnai6bju2eht/o+RiX/rKuxcvY44cGMW5Km6ZOVR0sA0ZVRM6TlhhfGeh8CbUmjeGeVOzsXaElXMrHEKPuoiIXcNt+Mqp5anc6eo7CeKQi57Pyf1Yf7TT0YmZzlO3hvlYLmMbojE8rraqDekMOrXDLcpN1on7GFaOLhGRXAXXy+Pl5yBO9mAuRl2sw9UKngsEXHTW6no1NvO00YZICNc5bW0nQGzXzWsCC8G+aRz7j2xAX3bnMK5l1l39NoiXHou1RUTECmINq1ZxbNdqWO+PW7MK4t1P4Zxz93/eA3G4huuWetWUnnrK/ZlW54gDvbhuFLVOL6i8nVV1PFvFubvRX5yFQrjNfAi3Gcpgru7dh88ZGMvj5zsW4xpj/z6c/526eW3LtrBO5GZx/q445nxxKPAv7AghhBBCCCGEEEIIaSJ4wY4QQgghhBBCCCGEkCaCF+wIIYQQQgghhBBCCGkiDllMpm41Fle5Wywbr/0pnZX4ZdOrYynVR1s73lvcE8f7oU8+BV0Hq85EZ93sBPpNIg56QZYuQo+IiIinGtHTha4Lp4JtKGXRBVBTfoZ6GbvUFbxHfodyPoiIbHjuNxCfeQb+RnsPehJyefQ7hZQ+oGMQ7zX31LFxa+a9545y7cxNZiGu5k1HwUKw5oRjIF79GnTYlY9DR10ijS4r0yYj4ivPhq3cam0J9Oj4Kpf1VW7Pw19xlPtN6mbuV6vKCbUM3T6xMB7DchFz2dfSCeVW8tWA9ZSTRUTEVf3gKZ9QrYxtdD1skx3U3krsmfw0Oll2j+w12vC6s14DcamO9/rHtSdvASnOYZ+XKtgfkTh6QOby+Pnde3cZ28yo/HSVX8mqoNvgwNh2jPdP4edt/Py6t6PrwivMGG2456H7sJ3PovumPY3+hbFteAz6lYdqrj6OPxDC+tTW3m204fiVx0FcuxTz99//7VsQl/PYT/uzWOsliG2uKo9MYQo9FSIifepYhJVTraMrY3xnISiV8Zh6ytVSU77Rtk50j3me6bSpVLAGDQygw2njc1sgDqmx3duD82KnctwF1DwaMnWVEo7gMY6r8RPQns0y1uFyDn1zM5OYZ77yDcUa1A79my0prHm5Eo4X38V+i0XRJ2SpvNN+2JaYOW+6qm9blJsqZCpRFoSWAPbfa0/GcX7aavTP5EuYp3U9UYpI3cH+dUrKyanq3VANf6OkPDmFIn4/pDyssypHRESiQ9i/5Sr+pp9BH9Do2AGItynv6LGt6LTZM6lqrGceQDeKLqXkkpMhPnt4EOKZvegT2vLUkxBPjOF4TVjoixLlLxIRqbjYLkutW4JHKPFiZRwj+x30N3Wpcd1azkIcnMDjJSLi5LE/Vh2LXujFK9E/O/MM9mevdm4rL1JI5XqsYPZ3UPA78TjWjq07dkHcUcRtLh3Eur4vjLVofDvudyxvzvWWGn+WyoGK8gHW1LlCrYjvz7hqfRbHOTRfM91MxSq2YWYU1wvBxVjnF5JjF6Ej242jI8tVE1mvqhVRtYYQEbE8rO+Tk1g/ZlSfBqLoRa9UMhCX65j/0RiuM2s1fF9EpFzEtXexiPnpuq6KsU0tynEbS2LujqqaVwmY89wB5eVOTmMeBFpxm/XcLojjNtbt1tggxMEw9rNTxc+LiCQieO68qAfHfUiUU22B6BnGaxFOEtcNJ52C57nLTsQx4vpq/SsidRfzoKbWLvpBBOEkzp2Lj8e+KfzoXoiDdbVWKppjPawu/Jx0DDq1B4cwnivifhQncH4fK6maV8I5KxAwr2UEglijkj1Y8173pjNxmz/5NcT76/sh/tM//xOIH7jnUYgfu3+30YZR5bmrV3EtZTV4psOhwL+wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIk4ZIedp7w55SreSxxOoKstGMR7/wM23qMtIrKsBz040RhePxxcgp6dE886D+LelSdAvP7R2yBePIDb71l9vNGGcCc60IJx9GeUKniPdTmH90eP70cv1+w4OurcOroEYil06IiIdHRgX+3d/zTE3b14n71Twjb5ynlkFdHf4fp4X7j2m4mIxCLYhnAPxrnIkXGJxRLoTUtG0TGRiKsUDuK94Z65q2Jph512ufmY215dxcoHp/2NjjLn2Q26zrfwO8kM+kocF7fhai+O8mT4guPT1j/qmo1w1Rj1lXNFHByzloe/EVFtCrm4T4kKvu+PYx6KiEzuRJ/JopXodpiyTVfDQlFTnsGS8gJtH0G/3I9u/wHED91/v7FNy8fjMJ7D/ZvcjfUkpCSMdXUMwj1Yrx5+4EGIqzl03omIbNy2FeLiOPpLspP4G5l2rFmTY/j53Bz2S2sG3SQ1F39PROS++56CONaCrpHWDvRETdXRQVeqYhtGlePOV/UqPmc6hgLKgZZpx74MBA55enxF0U7NagXHYcTw9WH9j0TNf4ezVQ1za5jb+dksxKUCusCGFuM8GVP9m4yj8yat/DQiInUHfSSui/sVCGC7OzpwmxPKb3JAeXSefO5ZiJcpL6iIyMQk7tf+A+gacQT7MtOCbQip2h6J4Nhw1PxTrZh+IVW6Jd6WgThXODI1rzCD64Z9I89BvKgfPWD9veimDKocEBHxlFs1N4X1KJvF32xvwzpQVO7jUhlzpqjcYfkCjmERkZXD6M3RPqeKcrV2xnCNEapiG9acjg6cGeXZ2TWGjikRkZqNeeKWVV60oiOy7wTs684TXg+xM4vz5symxyEeee4Jow1TO7AO22HsBzvYyPh7+JkrYv/dN4e13cGUkNd5mAOxiTFjm1G17n7NmvMh7htAb9hPfr0B21TF4+MGsY115SCK+eb6qrIP2xVowzXe0lb0oVVczJtgAuv8CWedBvGMUkjNPInzmYhIVS2AvSDmdlm1O5FQnR1TLuWwWl+34zlWRUw305iq03NZrAGzm7dBfLGxhcPHCSeeC7GdxhpmJ3H/M1F0tQUi2J8iIgHB+fv5Legon96DY3dkDHM1FFQu1iT2aVh5nv266W4rzmFNc3xMlnAY21gq4DZ37kKHZjKKv+F6WNcLdfP8fjKPa7bh+iDEM6M4pvbs2gRxqIb7nUliv/UNYq2fc0yHo5fB49UWUl69iDlnLQSZXqwFV/33KyEOq2shdRuPjy2mu81Wl3NiMdw338fvOB7mRN8S9OStWIVOu30bsO9813TYBUJq7R/EeW/9DvS9TWSx5o1NqvOhOcyrnKq7dsA8p0xGMa9OP+9siE+76HSIH31mBOLSdjwHS2Qw99/ytnMg3vr8j4w2rP8Nrp3OfQv2Zc8g1s1DhX9hRwghhBBCCCGEEEJIE8ELdoQQQgghhBBCCCGENBG8YEcIIYQQQgghhBBCSBNxyJKekPL5zObxvnu3gi6EWBzvZQ7Ypkysqx3vL997IAvx8MlvhHjR8RiL4H3A9Tw6OdIpvMe9c8VJRhuKQbyX/Pmn0f1RLeM2czls49ToHogDyssTjWK/9Q+hj05E5IQV6NNwAuhNCAUyGIfxHu2g8uSUdo9CrP2DToPLtIUA3hseb8c2dPcpt8UCkUrj8fGV36lUxf72q3hffbVquhW096am/AtV5axxHHS71OvKZ6K+Xyrh2CgV0T8gIuJ4uM1UG+ZqKp2BOJNC30k0rJ0Saj8t9MDYgrGISEr5FKcncBuVMvoEPA/HmyXYBk85DVpS6PdYshidRyIi5RIeC9/DdqZTmIcLSVodk7oaNznl+dq4fj3E4yPoRhAxPRNx5REM29infg2PiS1YZxcpv2VbCo/RbMl0PCwdXAnxbhc9UtkZdI+4kQzE40VVb0pYX7Iz6BqxAqbXpmKp3yyhM8UO4/zhBVS/KJdOSbnFXDVmE2HTqZZMY19ph5rnm56QhaCnA10ikRC2Kx7BvojFMScc16x5IeUyaoniOBvux7GZUfN3X1cG4mQE+78lgbWkYpv9Hfaw3Tnlqoom8DuhOI4N7TfZO4N1dst2zLuxCdMfl5vDbdTrGB+7qhfiZBTb4JaUt0V5PH3lN40qV5CIiKvmY0utrRzXrNULQUb5qvLT6OA6oOasjh7Mu3QD52MilcEXlCMqYOFcmlJpk04qD4+qj46aezdt3Gy0obMT/XDxOLoNS2o9cOIg1tS1p5wMcdnBY1xSh2v5gFk3xqexDu8fQ9/S2Ah6c/a4+BsV5QeMZdD1mjkO18YnrXyt0Yb+EXQ8PvvIzyGeHDPnq4WgltsP8fZpHMdl5ejKLMK10IkhzCERkVQQD8rQAPqwW5K4rqyqmlktYRwO4TGt+Op92/SIhWvYhvIMHnM7iOPFC+AxH1fjb3bTRojjUaw9+Sh6xEVE8jE8x6qq8aR9jvEO7JeZGtbQvKpddl15RcdM/6YdxbqSU2M2kTOdjwvFshNOhdgPKdekchcGA9hfAdd0klsxtTZ5DvtsdC+ur2YqGKeSeBydMWxDPILvd7Wh71dEpL0F164Ftc6uqeNaV57cQhbXthW1LrfV+UahgvVLRKSgvpPz8DzIUtcEQhauQTZuxzVhugO/PxtUftmEWQcKyvc3PYv5OdR9CsRruv+bsY3DQbGK7Uq0YR55gvui/XNWwDyJd9RzBXxffwb7u1bHHMh0Y3++5e0XQfz/jf0Y4lK2kfMUc39aPbugo0vlpYNjv1rH7wcTWL9iAcyprk7znPL01x4L8Rl/sgZiK4P90jeENc/zcM22fTvOi295M7pEV67ENaOIyJNPbYF4364DEC9Z1md851DgX9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRByyw65axvud4xH8qqV8CiEb7zX2G3hZYkn8ziV/dgnEZ150AcQtHXi/8vjOTRAH1G9m83h/9OQuvK9YRGR/Hu8Nv+/22yFOxvB+5koV74Hv6cZ7sluUc2tkH97bX7PNfmjrG4R4xfF4z7W46AKbye6DuKT8gbNl/A3Lx2NVKZv3nheUe8cv4PFelTG+siDc/uNfQOyGHoR4dhZ9J4W5KYgbqBMNr934OG7DVb6ntk50RLR2oM8votw9xZksxFu3YZ6KiOQKmEcDQ0sgDoQw71pS+JtDQ+jhWTSA3quhpcptFsEcERFJKT+Tl27BDyj3WF2N4UAQr/cH1G90DyrvXgvmsYhIXbkZlKpM2tpUmxaQpHLYBdXYrk2jF2RqK471gSR+X0TEUq6bvKqrFVUfrBi6LSIWHpPJcfTiPPn4MxB3p9BLISIyPZuFeK6MHpqCKg/lKfSZiPLoBdVBi4WUf6lmOtUms9gG11YOzSDKrCwbc82Oai+earSPDpBi0XT55XL4Wmt7Rm3SHDMLga/2Nao8RCE17kIRjCt55VkTkXodx1k6hePqpJNwrOpjGArhMQ4GtUNT9b9t+uMiYayTyaTyN6r64Xv4+ZDql42bcT4vlpTDxsXxKWL6ScPKiWrbWKN8C9vk2diPOTV28iXcbz02RERqym3lVPE7tap5/BaCXlXvrBr21cz4BMTPPLsd4qefM9dX3f3oDjt77TkQ93fib1Zm0UsYUHVAbJ2HmCOL+9BLKSISU/NcJIx51BLG8SUp/I26i9vMl7Ffyi7myKZtu4w2zFYnIT55KXr1Cl24HyMH0F+2aTe6+Z7ZiX2fV57Rjha1TyJybDeuCU455/UQP/3oXcZ3FoI3LMF5dXIGHV1PjGBO3LUL1/WxpabnNp7EcZwKYH/U8zjmXAvHdVGNyaha47naIWWZf/vgqXo1U8Q1n1/BOhBWbth6VvmZd6AvO67+3qIWN9dKGxysJbumcAxHVdkOe1jPQsrBbdUx1ytZXH8UfXO9EVR13g3hNpa0ZozvLBTxNNYfx8M+dfUSIITHzPNLoomq89p6Ecf++DZ0EfpJzN/OntUQb9+CjseypdZGRXO+CPbj/G0pf9mBPbsgLpZwjVcqYa4GXOVQ89XcGs0abfDVOczeMVwft6ZxvwcWo5ezWsX9LNewTTV1Lp5qM32xFeV1qylfYkTQkyfHGZs4LDgOjm3P0M1hfweV283xzRNbX13O8dV5f93B+uLb2DdOCPNo4IRBiGM9WF/mNqErX0TEUk7ugdOHIL5k3RsgPjCObreJiSzE+aLyySs3e38vrltFRBYvxvP1mvJQzpbRGbloCTrsgjbm5c6tuJ+Jy7HfTjkZn0EgIvL0U9sgLhfxeLv1Rv6/l4Z/YUcIIYQQQgghhBBCSBPBC3aEEEIIIYQQQgghhDQRvGBHCCGEEEIIIYQQQkgTccgOO89XHiJP3dPuqPuhlUPIssx7rqMR5dFZg+62iLoHfuP6pyGe3Y/3n1eVdyI/i36FvdvRHSAiUvDxPvmQi9tIBvHe8Zao8g20ogPhwDi6R5w69kMpj/fdi4jsHdmjXnke21jIQxwNYl86Ebxne9rBfo0pD1Y8pbwwIhILovMjr5wGjme69xaCu+59BOLMopUQ+y7259OP3AvxkkXoRRAR6WhHH9zoPnXMVG7H2zIQ19S9/+PKU3jBaa+F+KQT0EkhIlJSuWqHlMNmz26It27DXN/wHI6FTBq9L2+/7K0Qv271CqMNYR+v1y/qRd9QTTnsLFv5nJRHoS7Yb3YQ40gG81BEJKY8L14A64xppVg4POU68pXQJKw8NiHlCVvcgm4EERFHudryyoEVaMHjaIexz8rj6OCoZtGhkp/GWjFlyDFEslX8zuDJJ0A8NomOh+ws/mZSOVcqJfSZ1EPY5krVrB1l5XCwVW5F1X77FtZRVznrAsplZTuYm552rInIxGQWYgcPnwTDR8ZhV6tjf+WLeLzsFPqYylk85nVHudxEJB5Dt1BAucCy0yqvlMNuroB5qr1evjrGoaDZdyGV+yVXuXdU/9fK+L725o6Nof+k6mPOVANmP4SVey+gXIilEjbCUf7FSBi/P1fBfhmbnoXYF+1aFBEf+8ZS/qxY5JCXZa8ozz79BMT+NM5B6Xb0rj35PHrVNjdwt73uPPQQf/s734L4LRecBXFrFPMuqvI2GFK5X8Gx0dmOayERES+C9Wr2JRyBlqrrdfXv2paqb9t3o1P45n+82djm1ASuRU8/A/f74svfDXFXD/Z1wsE863Mwh57PYn3zGriSJ9SaYvlidEIvXXms8Z2FYEUf5vt74+jnHYigQ+ieLbjm+9Uuc5yftKQP4sKOEYiz6pgG1PyQram8imMeur7ylHlmGyZ93OZUHOf2ShCPUcrCfkik8Tc95b6UaVyjRyKmy2+fqk/TLo6vHnWOFU9gG1MJ3KavnLtTNdx+MGA63QIz+NpxPtbQZN7su4VCTUmGa71ex/rvqPNDL2zWEk/tj1XA9ZRTQGd2ayd6vqqT+H5xAs8vHOXWrRe0Y1hkWm0jEMEdLZfzKsZt5EvY5oCt5qQA9sOiIXPO6urF89C4Ulj76vyhWMfzsKFBrANBFx2cpRqeJ9tBrMMiIjUXz3UTSTwfbDBsFwRLeaD1dYKguu6gl6+lkpl32lmnvc6uWheGlNu1pk4VYhlsQ7IvA/FYEXNIRCStPOhdw7hOTA9ifYn2obt9mYVxvYzjr1DB/fYaPBvBtrVvEfshEsBE7OjE6wGpFpzfwyGsgfEUXvM58bTlRhtaf3Q/tlPl2e+7xuNf2BFCCCGEEEIIIYQQ0kTwgh0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNxMsw3ymprYMyQC0DdpXBuyamHLA7jULCO3/8U4jbulEq2aWl+CUUZYdCKBNMJlCAGNSGURFJKOlqTxcKCMt5lEjHlLBwenIK4noN9zsVRellrWA+dGLb07+B+MDmrRBXlXBYQrgfrtqvxCIln03gsbIjKAwVEYmqh0q0CrZ71WoUoy4Ul7/zLyCOdKHgsZRHUem2Dc9A3NuDOSMiYqsHHcSimCc1D/t7xXH4m629KLYudWAeX3zRn0Dc6CEfRfXQCeWRFUeJMisOfn5CSax3j+zH34zjPo3tQ4msiMiu57dBbFfwN3aOTUB82htOgXjJIIqd60oAakdRLiwhZZUXEUs/zEQJ2MOW+bCAhSKrZP7VEo6jRA3HXWcP9sf0buw/EZHtu1D8PVnHPm9rwwdV2Kp+FD2sR25dyWuVjLZSNfvcUQ8AmhzDGlYsoCTar+Pn4xGs9TUlorYiWCOdiinIDWuZtavyvYp97dnYhpqafyIhzLVwVM0FSvgtIhJTr9XVfuo6sVBMzWYh7lNzkn4IheOpHGo3H3aSz6nvOBhX1cMVPPWMqM3bUdpuq3GpH8CyWNUGERE7icekUsTcdFUbHCU0j6jf0A9D2TqKY2uos9doQ5sSBgfbsE4Wi2gHnnXwN4JhXDLlVe7PqtjzzRyy1LIrZGENLDYQSi8Ek+oBNptDkxAHJnAO2XMAH/pxzgXnGtv85P/vUxDf8s9fhvhnP/kxxMf0Y66Hwmptk8Lj5bqYQ21pM/c72/DhCkH1gJqwepCIreT/BTWv1YJ4TP/lK7dBvHHzBqMNuj796Mffg3jRyuMhPn45PiQqFkERdouPbepT5c0JmnlXVA9N8muYZ0v6UfK+UFTVAx7aotjO167ogHiqiLXnyVEcoyIim8ZxnlyuHr5QU+PYVw9nyqs5y6/i8QtF9ffNh+rpIqqPYd7HWpFTDwFpX30MxAG1FNpwJ4rNBxrMs4ta8eEloubVaBA3OlfHfipO47HpUXNmXweO17B+OIGIhGbw+CxRD94byGSM7ywU5Roeg1oZ60lFzUGuj7Hj4DpcRMQR7OPSHK4j7QjmdzCBfZadwgdATB3AhynUVN44rvmgj2QG5z6noh5ioMZcqYy1vuLi2tUK43lyUD2UqmOROdcuW4HnjGPT+CCMMJZysWx8v1bEvu1pxRopNq4x/KT58I0tm7EO9HbiGEuotexCUa5h/wXU2ias5ihH8PMlNY5FRMoVlWfG+hW3kQjgWHYt/LxtY55levE81wmYjwS01TWYtjb8jj5HrAmut2wHa5il3hf1QIla3ewHSz3Uy1f7HQ6oh960YA1r7cD96u3HPHNtPHdpX2zW/sXDuE39wMKg9fs90I5/YUcIIYQQQgghhBBCSBPBC3aEEEIIIYQQQgghhDQRvGBHCCGEEEIIIYQQQkgTccgOO09JtsJBvCdeuxDEVvcRB5RXTUS8Gt6fPDWFPrLCJMaxOt6j7gm2oa0V7xvO9KG/wXFNx8PofvwNfb+zrZwMNQfvwQ5YeL9zIor3xDuqWwL6BRER5ZRya+h8sFXf50p4X34tgl6FVB/uZzGWhTjvmfd9V4p47ba9ZSnEHcqjtFBEwtiurZufgzg3p46fj31Zr5n7WigUIbbU/eTRCB7TegndAHOT+Bvje/ZC/Is7fwHxbB6/LyIyV8BjnGpBoUO6FV08iRZ0A+zbh866ro5+iKMt6Nl78GfYJhGRmW3PQuyq8bh9DJ0S+4q4H8tXodsv3YK5n25FX1Qsjg4XEZF0Avs6FMUxHY/jfi8oZeVoUOXDsdCFUFSKzAOW6cw8oMZ/oabqwTTmRSCkXCMeft5XtaGs6pPvmw67sPIpjSoPp6N8cpbgb0zOYv0RNX585ZUKxUyHY4vyRmnnqR7HAeVkigkeG1s5QEJqHy31eyIivupLS21Du6wWir37cWyHlLNUu90GBnogbuRAyxW0w071r/KglpQjcNP2nRBrH+z+vegz61DuEhGRdDoD8bZt2yHWc+8lb34txBEfa2RrJgVxLIf1azqbNdrgqfGm+zZXwBpWrOJcUVJ9b4eVl6+uc8rMIU/l3ayaCzoaOE8Xgv7BZRC7gvW+rnyb4QQ6cHoHcA4SEfHV2magbxHEd//XDyDOj2HexGPYvxGjlmDtiQRNr472V8ZjeIx1PYyG8Td85cOcLGO/PL9pI8R/8icXGG048aQTIf7q19B79+gDOD8v7clgG+OYp1NjuO55Zht6j0MJM4e6W3CbrvJ1xcJH5t/v9RixlMeoN4PrhjOHcF2Rq5lO5l3Kx1gKYJ50DaDbOBDGnKio+lhRa7hgXbl2Q2Z/p1XsjKMnrEX5nKrKMzqjakmmFcdGRjmnQhXTZdavXLFh9TcaVgJz2wrh5+0CzgPdQewnpRsUu4Ezt6T6Lh3Adg4vNteFC4Wr1k9aRRgN4xxTV/NBLYvznojITD0Lcbw9A/HaN5wN8X51Prd3ZhTizmE8Rp467m7dPO41QU9gogU9XBNqvq7UMDeXn6RcoDHsmOk59JlmuhrMWercuFzAvm7rxFxzfOyHjm4cQZ2d2rGGbsts2fTRdWbwO5EAfmZiv/LDLxAVrWZTa4K68iDW68rtZpnetHDkxdfUnkpu7YmuqLVRXS1dUmmcRwNh8/wmpJzbkRAeo2oJf8Oxcb+8KuZy0FPuRVVefDFdcE4d62qprHzNNvbTzAyO6bLyO8bVXDqlnJxO3ax5CeVKLipfc6mkEuAQ4V/YEUIIIYQQQgghhBDSRPCCHSGEEEIIIYQQQgghTQQv2BFCCCGEEEIIIYQQ0kQcsqTHtvA++mhEeT4E7xtOKE9IIoX3MouIlJQTpT2F9xYH1TZrc+jU8tS9yKUQ3h/d3T2En2/gM1t5AjpVHrn3V/ibPt7PHFK+prJyA7Wk0LMTDmIXByzTYVeoYD+MHMB7+bNZ5bqw8J7rzhV43bU/g8em5mM/zU6ZzoNwRbn4+tFZVy6Z92kvBPlpdLXc818/g3jv2D6I7To6CZ59Fr2HImI4txzl/RJ1jO766T0Qh0M4Fk56zckQ15T3Ilc1+3vnngmIp6c34TYq2Ib9Y7sgHtmFnz/lNWsg/sgH/xriXz/2qNEGR3koclX0CZSVU2rnb9DV9+CT6MFIBPG+/JByHAQipo8upRx2i5YMQvynb38HxLiXh5egcnDUlVetUMb+mslhrs3UTJeYE8J64DvYR5Uy1gJLeSbqPuaFrVxiiTTWn0DA9EwEVE3y1T/bGP44tQ0d28pXaqvtefoFEbH1NpUD1VWyCl//htEG5eZRY1wssw2e+g1dBoy6sEA4vvbFoDOjRbkgtZ9OH18R0/daVF4PfYh8T3lRY/j9iRn8/voNuyFOxNCJIyJS1eIWwWMeVv7KTdtwm91xXEPo2tHTg+9P78a5Q0TECmJeTExiOxctwnlPO46qym1VUl5PR33e9UxHTqoFXTA15ZYpaq/lAuEIjgdXtSscUWs6LDVGHoqIjE9g/07N4Npm3xjOQb6DOaLXmXXli9Emn0jIzP2EctIGlH85FsXxFFUeYk/5z/ZM4jpUfHz/0re+1WjDmWeeCfHevbhu+dGPfwLx088sgdit4DwwO441oTaN3qugi2sQEZGSg16rnbM4n8cjpudzIfBV//mqLoc9nEePbcNjPNmL40lEpKjWMo6aVzva0W8dTaJzKKtyv678vo6KqwHTo2crh22LqrHa3FbL4TEVdV7gj+GacZHyN4UC5nyVKuM2uwI4nmaV6y+SQk+eV8dGO6UsxHpt20BhJ57yvvUei37locV4LBaSWk37epVP0VMHzcX3Q1FzTRtVbtVkEeP8Thx3p6zG/R9erdZsdje2uYxteuIB3J6IyNQU1rxYCttQKmMtSLfh5084FevPyMQW/IEU5l7fYvToioi0tvZCnEygR6/sYB3NK/eu52Ob9k2hv7wto/1o2hopko5hPteVt7NaMdfoC0GxhmPVqWN9D4bwGOfzWYhTCdP72NmOaxc/hDVMr+vLak4pl3Ct4gb0mhzbbIdNf1y2gOdAu0dwvm/txTwMxDAPfRfrqlfHsZCvYBsrDc6xDI99XdVu1S97lM9xLo/7YKtjkStgm23fnDfLFfyNbdtxfp7L0WFHCCGEEEIIIYQQQsgfPbxgRwghhBBCCCGEEEJIE8ELdoQQQgghhBBCCCGENBGH7LALB/HaXkk5IgLRBMReAO/tL9VNl0tA3UscCaNfIRTCbYbjeI96ugXfH1NukVI/+um6BpYZbRidmIJ49amvg7gwuR/inVufh7hYyEIcDOB+ppVTyhLTT3NgFH9jz270TtgR3M+WbnSsdLap31DuC2sGv986ax72/q42iBdlsO+2b0Qf0HmmpuWw0NuNHoTlg+gl9FV/Bm2MA9plJSJ2AHPZ164elcsSQl9AX18/xOdeeCHEqTgen3QUPQoiIhufewbirdt3QNzTPwhxRYnGAsoR+dzWzbj9rVshjg+uMtqwfz+2qzWDcVcY782PJ3F8zoyhY2p6dDvEk1M4HiuuNg6J1JXz6UAWc/PMC8zjt1AU8ugqyOXQxVIs4FgvFtW4a9D0lgyO1UjMdKDANpRcLBbEYxIK4/e1Xy7UwOmkHWeuh2NGOyC0KUq/HdACNAs/4Lqm2Eb74QzvhHrfVW3QHqqg9vKp7UWjpvND+660OynSwLm4ELS2o5ulRc1zUdXumRx61GKqNoiI1Gu4bzUHY+1MCSufVU25RSZm8DcrDn6/LZUx2rBoKe5XvY7HOKc8Lbv2of8s3IlOG9vH7yfj2Gary6y7LTEcf4Us+kp27d4F8fCKxRDXlG+r5ip3lZreteNORGSxmq9jUWx3tWy6dheCqSz65OoO7ltQjXNf5dDTz6JjSETk+BPXqM9swN9Q/2ZcCyr/rnLYHDiA67VKFduoncEiIiGlhNJlORTGvNI101Xe0ILy6LR1oGOqQ7mERETyym/a04vOp5lZzPVf/vLnEFcKOPdMT+PcVFSOzmCDeSWgcre1G91ZXd2mh2oh8FTbXeXbFOU1TCsP5WsGTD/2dH4G4to4eorqRezPcALzrqLaVFfrL9vDNrl1c46zXOVKVtushXQmYj2z1PhyA8qVpLyubgPnqq/OBaIu5rqv3Flj0SzEdTUPeCqtQsojWiqZtSusxk+n8p1Fg0fGnSgi4qp50VX9FQziOsIKKrdrC+aNiIhbzkI8ugd909uew3VyKnoMxJU2PNcqq2PUHsM5yfZMf2Jn6wqIIzFcQ1TreEzSHRmI6w7+Zj6Pdbd/EdYOS8+DInL/PY9DHIrjb3YtVq5Kdc1gbD/WxJqL89NMAZ14bVE8LxMRSSdxrnXUdQzHOzK+2LzyoIVDOAYiQRxXYbXOty1znrPUa7UaHpNSCX2T2gerhbD6LKCu1luBqPn3XtksOut+9vO7IW5pfxPEg0vRP+qK8s25+Jsl5Q3X/Shinlvo+d32MD4wjnllrI0jwRd9323kKld5tX8PXuPR8/ehwr+wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIk4ZIddd6dyOkzjfb9lF+/ZVYoI8W3T8aC9Qy0t6P4Ih/Be43IRPSAx7WeqYfybRx6BeOlKdGqJiOzbh74AW3kh4hFsQ0DdZx9TbgDttSqXMXYc0/GQVL6RM1+D/oFoSt2HH1B+pzrem17ei/eu23n0N3XFU0YbXrNiNX4mg16WJw+MGN9ZCGYm0UVyxulnQnzm2rUQRyLKbRUwr0nbysXjKcdGQDlUtP+pXMP+nt6HfTNTwfvwZ6ZwH0REdipn3f4JzMNkF/oZJILH0Aqjp6rm4H30d93/EMRLho832jDQhs6HqI3jJx7CvKxW0Me0M4c+x6TKU1c5D8Zmzfv2OzoGIS4pt8Y99/8a4qve/xfGNg4XU6rG6TyoVHAs12oYh6JYO154DV0Vuj5ov6JtK5+Pin3lJdLOBzto5n8sjsdVe/K0pE477jSWkvVZhiXKRPs0tOcuqP1yqi7rNus2mB6+Bm1SH4lG0UdzpBx2edU3nvIl9XV3QRxWzrpS1ZxjEnHlOQ1if1sB7IxQGI+5pRx1pbLyz8SwPiXb0U0iIlK3MTedIMbRDO6Hpzwu+QL2y/KlS3B7Y1hfnKLpzZ0rYC1evmw5xPv2bsM2K1+JpZZMhZw6VurfQJNx0yeoXXvFIm4j0GB+XghcSx1z5cwqqLwsK3/M2CTWSxGRL93yzxDv3o7e04KqqdtH0Vuk/bK6TtTVutNyTZ9MQB0TXZ8slcu+pVxieoOqtsQS+JvT02Y/RJQPNjeHa9lqFX9z16592AaVh2qaFD+KeWbaYk1PUiKCY7RUNNfoC0FYraEDal9qWcwz7Yvry5hj7Pg5XANvyuLaf2z/HohzZTweBTXnVdR8E1J56fhm39k+1oqimqNKau4Oqjz1qp6KlSNXzYnimUe9ouq8p/xORfWdSkSNH3XeFlVrQs/FuSbhmeNvWTfWs9Yw/mZpOgvxQla/UAjn1rqaY4JhXG9VXHS57R9/1tjm5t+gpzMVwHGWqONcuem+9RBHBvG4TiuvXnw4A/HgIjP/943jcXBreNyDqh51K5+c5+OY80rKZ21jHoxswXlTROSRx7GGLToWx4OXUmPKwfN/J4e/2daJ3981gudQm+fM86w3nHc2xD2LcI1XdMxavRDElBsyqs4LwsopHG1Ff3+kgfexXMY8mcvOqfcxt5PK76cdznqNrv+8K5E28+41p54M8S61nvrq//kWxGvPOQ3iY04YgDjdjXnm+/r83nRTW4L74ajcn5zLQrx9xy7cgNpPfR7rKu96uWaut2NJldt5NRf8np5i/oUdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRPCCHSGEEEIIIYQQQgghTcQhO+wWD+A902kL7x3evhfvdx6fRE9BzTV9QMmkuq+3hPdcux7eR69dJDPKmZIv4L3GlTpuL+BjLCKSSrZCPD6G98HvK+J94Z7yTnR34n33lvINzWZnIY4kzH7IpNHaEFYeq6ryvIhy+xSr+PlaAd9PePj+soEeow19Pbgfe/eh82N6Ut3PvkAklG9rOofH4+lnn4S4qwuPZ3dXh7HNel0do9ksfkA5I4LqmPYPoV9uoBWP3+jWAxAXC6bXo6sbj0G8PQNxIIp+gZLyE/T2LoZ4bD/6IqamMdd7+5RUUkQs5eIpVHE/JYh9X1eOg4hyz0SUo6U2jT4isU2nW3f/IH5H+bcMFdkCUq8rz4CP4yioxqFWnkVi6MsQEUOIZKkKHAigo0FraVxVf7TTKaAcdwHlYBERsZUfI6z2Q/vf9G+YfjhEpYnhjBQRyWQyEOsxWVVeCNfC33wpZ52jXD2Oo3JbRMTVr734fi8U8QS6QVzlPa2qvgqG8BiHQqbfROeV/rc6PTSDoRf3FlZVTbSCuP142mxDPo8OzJgaH5PKVxoMKvdRDNscz2CNTEbRWdfdid4XEZEpH+fjeBx3vKsL58F8Dt1WeirWGqmWdAbiVItZA3LKoTI1hV4k3zb9fwtBW3ubegWPabmAc0g1ge20LXOcZ9Xc2t6J/sV0WyfEjip4no+579SVm0mN83rdHLNe/cXHdVXNOZ6ub8pxa6uxk1U58vAjDxttOO+88yB+fuMm1Sb8fE31g/bqeqqvtcvP1XO5iEgNt7l39178jciRcSdqL6tl4ZgMqiFUsXHfQmFzPlrcizV0ZJ/yzVYxl10P38+qmjulJuqUqqd6LSVizlFzqqSOqWKix0/Af3EXrB5tITHn+nFVp+eU36mg2tSvClpGjafADNbw7iCeB65pcG4xPIAHMF7G87qq8uAtZBbO1nEM1Ko4hyi9qIxn0U+3f/Z+Y5tTY1mIe0LoB2+38Djlyvj50BjOa+Ey1rh97laIV56PLlcRkWkPtzm7H/O3sxeP6wmnKmdaAo/r1BSeb+i5OpE0j9qqVYsgblmEnem72NduHds4NopjtDiD79eU0zFbMM/vR1fh+V8ihfPPgSnTQbgQhNQ4tNUYiAZwzPhqbeo38Ep7Ln4morznYeUt1P79fF65Ql08XtE4bs8RzEsRkeGVmIsrjkcX/s/+E8fLj/5fnCvfUEQH3ikX4PY85Vl3Gsz3lqqj2vM9MaGvG2EeDSxZrN7Hmjc2gee1Qdu8jJZux9fsEOZdQT/k4RDhX9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRByyw66lFb0SZeU0a+1S/gTl4ZkaNz1eFeUpCobx3n31tnjqfuW6i9ucK6OfJhFDqVSlhPcqi4iUK+iPqanfcOva34T7WchhP7QoZ01LC3p0ymXTBTc1je1OJvHecks5oCwH71UPK8mHunVdwspjNbhs0GhDuYTbfOCBjRA/u3XC+M5CEFEupWolC/Ejj/wKYr+Ox7glbjqE6nXlOiyjSyGormMvGRyA+LgzjoV4eDE67bJ70Sc3Nos5JiISVrk53I7uj8lJ9Akcv/I4iFcfvxLi/+/b34Q4KOgrqBfN3K/V8DXfUT6AKPZTQEnaBoeWQjyxdwt+X7lpYg38jatWrYC4UsL9HujFe/8XkvZ29FnZgjXQVc6IuqM8QpbptalUMNesAPoVtH/BU66KmnIVBTzTWwPvG+4yEc9XdVS129KiPYVS84jnaX8cbl+7NUREAsp5pp1zdR17GNvaIfQSTrtG/WC/hLNO9/1CEY3h2LUtjMs1nPciKgdiEdMfZynfSFh570TlYUsafWaVHPphakE1d0ewr8o1s94EAqomqSVBrYzH44Cam9v6+/H7B3BOiqnxFk2Zx7wzjfVkanoP/kYa1yBa7ldwsNEre7H2e2p9UCqZLrFSEV9rU967uqmGWRBcwWOo8z+o8ioSwTVeMGguJ1tblUNW1wZVO/S4dmq4XvKU68d1X7zNIqYH1VEdXCgqp1YVj7F2mbqOduDh53/6s58ZbXhuI66nfvPkUxBbKs9cVYMd7RVVXj1f1XDPNZNIv2Kr+TnqN/DeLQTKsVxVa2TtcrOUZ82vme1OJnAN3dGCx3BmEmtHfgzjOeWRfkS54FpVTrVYpp83oeakuo1fyql1fEV5rfQsHFDnAWE1VuIN5238TNDCPImrNnlqbNRc3GZMtTGdVFlVR5+jiEhhFn8z14J9ZSm/rGmdPnzMFpRvOjcGsVtG11S2sANiT63nRETScezT0tx2iBNteEzsJM45oSi6QVvqeA5pd2Pdbe1UJ3wi0pLG47ZnSxZiS+XFzLgagw7Ovd096KPbO4pjdHrKdHL5IRxzXaqZkYhe+2JcrWLeHNiKuZUI4QZXnDRktKGgvHZTs3hsQpEj4yl21PrIUX5RtTyWuDqPbegpVi61sPqMXhNXKzhvedqp6eI4darqvEEv4ERkZhb9cK89ZxXEp591CsSP3f88xCO78dy5Zy+eM0aSODbSae3dFamp+TqXw9zMFzB3lx87DHEmg+fiLa14MLJzmIfaGy4isng5rlUrJRxfpRoddoQQQgghhBBCCCGE/NHDC3aEEEIIIYQQQgghhDQRvGBHCCGEEEIIIYQQQkgTccgOu2AUPxptwfuj25J47S9YxvubQzHTLZKbVT/v4jZiUfTNuMpn5lazEIfjuL1QENsYCOC9/yIiVeUC0fc/+9qfodwVvroX3VXqnlBQuS3CpscrO4sOu7JycqQz6DgIKpeFrfazpGwl41N5iGcLpt8kX8R7/e++bzNuw1TvLQgl7fxT+37hRRdD7Kl7wwMNhECe8t74ygUSUP0ZVT7GsSx6K/LZrRDPlPE3rajpmNiyfifE049OQrx0CB11py5bDnGtjIkWU3nl1zGHSmXTKWUHcLx4SoFS1g4j5cVZsggddpUC+guObUGPzK+ffNpow/7d6L0rF/H4+SUcGwtJSwuOO0/5XMRX3g81bnPKxyciElTusICKtUdNKWMkpPLf8bS7SDmi/AaODuXJs3wtpTOdc/i28t7o8aT+HcjzzdpfK2Odrat89ZRfTrS3SLfJ023AT8QbjMGwEoXYyqHSyMm1EISVPykex/qjcySgkiQQMF1Grov96zhqnlO/mc9j35RzytuhfjOq1ge1BnW3rupiaQ7XCNrFmmrL4AZUjauXsA4Hwsrt2sDl54ewnSnlnI2onMi0deL3czMQWzb2QyWP9atcMsdfVB1P7e4xpGsLhGXhvodCqk7ovFL1MBQyPV56oPpqXyPaLaneD6shaAmOY+2jcxt5J/0X9+S1d6AHRztufVW/TG8eHuNi0VwsjY2PQzw4iL6lfFHP19qNpTyhL+W0a9APer9tvY60X9xderhw1Xzjq9hStSms1md+uYF7T+VdVwK/89SG5yCe3o/rL8fCxJtUfricqp9x1+zvuOrOiNoPP6xcpdpVbcxHynOojnnONftB+2R1Lof1n2yo3PcC+lxDzf2Cv5ktZI02BHzcZsROQWx5R2aeFREp59FZZwUwD0IpXDen1UGt7jTPKVOd2Cf1DjVnhLDe9LWhn3rfKLZpbhuemx3bjw7tZNKcLwYWYX5O78c27NyI3ynn1Lo0jjUsHMN61N2H+zC2z/R0Vz3l6VI1y1LO1JYMzu9Dw60QT27fC7FTx7kgN2M61cYO4Lql6mYhbu/IGN9ZCIrKbVt3dIzjrlbDvIvHzGNunDuodX1Ane+5yllXV3W0pK4TjI/i+V13p2mbbFU+3pLy3C05HtdTsxWMw0Hc74JSYtZt5WKOmetMV7lBg8q1292PPsbBpZh3tZo6f1c1slbHsTKn/M4iIokkritjUdWmeIO10iHAv7AjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmohDtn0WCkqSF0hCmEygnDOkpIiJiCn9TqdROlnIlVWMot6CEjjXKxinwu0QR5UE2amaUsqgkhxqCWsogoJBSxkI40nsQlv1qONqSaLZ5S0ZlCLOzOBDIvJKFNvShvtZUgLcbbtQDrl5A8o6u9tQpi8i0r1IyVNt/M2ONIpiF4pEEsW8aeXaTHWugLiqjnG0wTXpsKWkxTElHo/j+14FHx6QzysBexz7s2s4A/Fw3BSybhvZgS9o4XccRZijB/ZA3N7R+qJxrYzC12rVFGMWizhmq+ohCfUqimeDUSXv7ENh6O4DOF7H9+A+VgpmG3Y8vx7i9nYleW9Fue1CYomWQGPy1ZRQtVLF+lVXD7ARMcXf+gEyvpJX1xysH1UlkbaUKNzSInEttBdTbu0pSavW2eotaL22Fslr8btvNRCgB5WwPvDiElb9XAxfS9ddJcPWO9HgwRe2tsmqzzj1Bg/sWAAS6uEKQXUEdEWLqgdqFArmw04CKu/CEfyNmHqwjvG++tHyXBbi7q7FEFf001JEJJPAdoY6VR1Wh6guOL70XBpL4kNtQqpuG4krInWVqx2duI4JKwF6QIneI2od4/vYxngctxfTbRIRUceirB4woOOFwvexXb56CpGlOlSXFv3gF5EGD6II6vWUym29UfX5gKpdITXQ9cNrRBrIuHUtUdsIWGrdqPJOPydDPwgolsoYbehfrNYU6jfLWgCu5f+qb/WDGHQ9bHQsdA3Q/aLXTguFrXIkpGq3ftCbFdAPqzNrjVvEGtibwvrWHsLvhCo45lpU7lcsPa+qhz8Fzf4uqmNQ1nOSekhEwNFSfjU29IOC1DFvNM/qEhjS60zVlzG1X+pZgpKwVL8ZXW8ei6pai6pDI3HbfHDDQlGewYfrBSI4BqqqT8MprP+9q/uMbdbVusGJqPXWHJ4v5CZwnV3IYlw+gLm54Ql8yF17i3lOaYdwHjrjXOzjwaFuiNs6cb9butT83477bds9EE+N4kN0REQmZrZD7EXwHEbqam7wsEaG1dxpqec1ppL64T943iwiUlAPTnDUQwuiUTz3Wyiycy8+x7sujvVSWa37PfNhC1VVw/RDJiJqnRhW68xCCc8H66oepdrwGsBr164x2rB4sBdiO4TtTLXhmu2kU/EBKvEw5ql+8F9V1D7qCy4iYqlrOhFbTdiqDlfUg0P1GiKqrg+kUtgPeq0sIhJQT8uqqbm10XcOBf6FHSGEEEIIIYQQQgghTQQv2BFCCCGEEEIIIYQQ0kTwgh0hhBBCCCGEEEIIIU3EITvs9u3GuJrF+6FTnere8BjeB5zGW+pFRKStDX++UMR797PqXv7Z6bCKcXsBD+9V9gzPUQMnkYev6SuY2hEVCGKby65yiahby0Me9oNTmjGa4JZxP13lzckW8H2lO5EZ5f7btR07JjuNDola0eyHnjQ6CVYt6Yc4d2S0OlLKo69BPOWwsTCxxsfRk7Zt4y5jm9Eg3pMeTmcg7uhCH1xfRxpi7R1rT6NTUGnIpFKeNdrQ1YX35vf3oavtwNgYxFu3boJ4sIbOCO2fyeexH0ol9MuJiOTm0MWnHXZuTfkCIugfeP65DohrVfQudHWhJ6P/hOOMNnR14mc6OjEPo+o3FxLtAaqq/dOOuppyIej+EBGpaTeREndpb432DkWV+8BWjidXOe+050akgQ9JOR4Md47K97CWOCkqFewHxzF9G9pFpfdTt1vnd6mEualdWNrrpn9PRMSp4Ta1lyga/f08E38oIbXvtvagKjfJSx0vEfOYh7Xf1dHOLDWfq22mU1h31TQp0bDpJfLUxBVP4mfqarxU1Lyo/Y1x5QkJKSdLsYTfFxGJprDulmu4n2XVhpCP/RRQY8UOYJ6p5YCUyqZXKpvF+UD3fTjcwHu3ANSUE1iPKaVNM9xtDb1par1kqXrlK6GMp2LtDLaVXy4Uw9gPmA67iG64Ae6nrj36+NRrmCO6hjeqd6UafkavRSsOtlv3vQRUG9X3fT2+G+RQMPjiy/14/Mi4xGzVroD/4m5RMRx2pv80qApS0sJjdo5yj82V8P2n96B3eKqKx7SiHITVBsJMT7XTU2cXrtqGbencx+3ZtjmX/zYB7WQVkaD6Skw5n+I29l1KuWVTymXdrro+rhoZEjP3w6rdvprPKpUjdHIhIj3KKV6K4P4ERTlLtfO81Wx7bRYdV6UJfH92E56fhQs4l7ZU8XzCCeFvVn1Vf1xz3M6O4xosr9aqS4dw7V5V69KZvdhGu4A7EVVyw6GhE402dPfjedZsBefnyUl0znk1tWYL47E48fRBfN/FedQTc74vO2qdqI6n9RJj6nDhCdbnkDrnFzUuC0XcD7dm+kaLBTzPD6hcbc0oH2xQPVdAnVtE49iGHrXeSnSYruRYStc4jIOe8jO34m8k1PleSM0N9bJas7tm3dXu6Zw6F66qvtPOu6DaTz39RNR5QVB7ekWkWFLttJUvMI/j81DhX9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRByyw84N4T3v9fApEFc9dc+ugw6IaNq81zjTifdQt9p4H31bCW8ezs7gPfHZKbwnu1zE3XEd5fHQbgwR8Rz8jUoZ7y3WLpCAcrDkK/j9cgG/H1K+gZSNfgMREc9Gl1i9jvsRSeB99tEQ3g+dCSs/gWQgPv5EvC985Qmmb2Bw2TKITzsDfQD79pv3qy8EnvKC2eoac7COx6MlhMfjycfuN7Y5No65aan+PO20NRCf9VrM9bk5vCf+2aceh7ioHF5b9+w12rBz1y6Iy8q35PvKydXSCXEuh/6H/CzuUzGHfgdz9IkElRcnnUIXRt8QevJa23sh7upD31zfa46HuK0F866R+0y7y8RScYMxu1DU63UV4zgzXEXKfdTQGWT44hDdH9pH5is3T121Qf9mI2+npTxRgQA6GGzdRuvFHU8v5U9q5FR7Kc9dSHkhXqpf9H4aXrAGPrp4BPNdHwvDI7VAxMK473rffOVd1cevpQU9bSINvIVq37RXzVcOu3QM596k4flQc3G1Qd4pZ5NXx5qVSqDLR+sX9RaLykUSqmM/lMum58Wx0QUzNYd1tDCNc3Emg+ue6SL2UzSmxqeP/TI7Y3p18qrWx1Tf6nih0HOOHhGucgiKhXEkYo4xXUNdF+OQynWdp0FRY0G5lhyVIw2dnare2aqG6rFgqdoSiij3Twjrm/5+o5qr96uunHW2Gm+ermcqDqhj5R2Cu7TRa9CGBnV6QQgrl5LyoFm63WqOcxzTW+ipUxvtTetV2q+LT0Rnc7daR24fx7owXsTfnHXMuaKiamJV7YZjqWOmfY1qztNzoP7FkGce36DyLyWUVy+ifjNi4RdaAph3rcpxl1B+yGjIXPOoUyajJpSsBm7xBaLDQV91tRfnzol9WRWjC9qJm3NMsIbOa3sU9y86o9aNym8lDrYhsQyTtX1Yrd/U773Q0CyEYzux3e4szkFdQ6rNKndjVVz7z8yhLy3k7jGa0N6NfuqetmOxDZVRiPeOYhtjynHb2on95FSwbgRDDdZrU8o1OYfHol4xnYsLQa2uPKlqTJTLGBeV3z8SMh2lgWBCxfi+r86ttBO4quTr9RoeY1+twCIt5lh3LOXxVl5ct6q84EUcP7WAcggrt9/UDLoU21ozRhv0swumDkxCXFEO2o5ePI911Xw+o86lRa8ndEeLyIH9yq+oarPr/X41j39hRwghhBBCCCGEEEJIE8ELdoQQQgghhBBCCCGENBG8YEcIIYQQQgghhBBCSBNh+S8ltiCEEEIIIYQQQgghhCwY/As7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wa8C5554rxx133Et+bteuXWJZlnz9618//I0ihJBXgBtuuEEsyzrSzSCvEg7m09TU1JFuCjmKYN6RZuRQ83JwcFCuvPLKP+i3zj33XDn33HP/oG0Q8nJh7SVHgqM973jBjpA/cvbv3y833HCDrF+//kg3hRBCCCGEEHIYeOSRR+SGG26QbDZ7pJtCjiKYd0cWXrD7A1iyZImUy2V597vffaSbQo5i9u/fLzfeeCMv2BFCCCGENDlbtmyRr371q0e6GeSPkEceeURuvPFGXjghCwrz7sjCC3Z/AJZlSTQalUAgcKSbQgghTUuxWDzSTSB/pPi+L+Vy+Ug3gxxlMO/I4SQSiUgoFHrRz3DeJH8InudJpVI50s0gRxnMu8PDUXnBLp/Py3XXXSeDg4MSiUSkq6tLXv/618tTTz0Fn9u4caOcd955Eo/Hpb+/X/7+7/8e3m/ksLvyyislmUzKzp075cILL5REIiF9fX3ymc98RnzfX4jdI39EjI6Oyvve9z7p6+uTSCQiQ0ND8pd/+ZdSq9VkZmZGPvaxj8nxxx8vyWRSWlpa5KKLLpJnnnlm/vv33XefnHrqqSIi8p73vEcsy6JXkczz0EMPyamnnirRaFSGh4flX//1Xxt+7tvf/rasWbNGYrGYtLW1yTve8Q7Zu3ev8bnHH39c3vjGN0o6nZZ4PC5r166Vhx9+GD5z0DOxceNGede73iWtra1y1llnHZb9I81DNpuVK6+8UjKZjKTTaXnPe94jpVJp/n3HceSzn/2sDA8PSyQSkcHBQfnkJz8p1WoVtjM4OCgXX3yx3HnnnXLKKadILBabz9u77rpLzjrrLMlkMpJMJmXlypXyyU9+Er5frVbl+uuvl2XLlkkkEpGBgQH5m7/5G+N3yKsD5h1pRqampmTdunXS0tIi7e3t8ld/9VdwEqsddl//+tfFsiy5//775dprr5Wuri5ZtGjR/Pu33nqrDA8PSywWk9NOO00efPDBhdwd0iTccMMN8vGPf1xERIaGhubX/AfPRz/0oQ/Jd77zHVm9erVEIhG544475L777hPLsuS+++6Dbf0uD/vmzZtl3bp10tnZKbFYTFauXCmf+tSnXrRdu3fvlmXLlslxxx0n4+Pjr+QukyaAeXfkCR7pBhwJPvCBD8j3v/99+dCHPiTHHnusTE9Py0MPPSSbNm2Sk08+WUREZmdn5Y1vfKO87W1vk3Xr1sn3v/99+X/+n/9Hjj/+eLnoootedPuu68ob3/hGOeOMM+Tv//7v5Y477pDrr79eHMeRz3zmMwuxi+SPgP3798tpp50m2WxWrr76ajnmmGNkdHRUvv/970upVJKdO3fK7bffLpdffrkMDQ3J+Pi4/Ou//qusXbtWNm7cKH19fbJq1Sr5zGc+I5/+9Kfl6quvlrPPPltERM4888wjvHfkSLNhwwZ5wxveIJ2dnXLDDTeI4zhy/fXXS3d3N3zu85//vPzt3/6trFu3Tq666iqZnJyUW265Rc455xx5+umnJZPJiIjIPffcIxdddJGsWbNGrr/+erFtW2677TY5//zz5cEHH5TTTjsNtnv55ZfL8uXL5Qtf+AL/seIoYN26dTI0NCQ33XSTPPXUU/K1r31Nurq65O/+7u9EROSqq66Sb3zjG3LZZZfJRz/6UXn88cflpptukk2bNsmPfvQj2NaWLVvkne98p1xzzTXy/ve/X1auXCnPP/+8XHzxxXLCCSfIZz7zGYlEIrJ9+3a4YOx5nlxyySXy0EMPydVXXy2rVq2SDRs2yM033yxbt26V22+/fSG7hCwAzDvSjKxbt04GBwflpptukscee0z+6Z/+SWZnZ+Wb3/zmi37v2muvlc7OTvn0pz89/xd2//Zv/ybXXHONnHnmmXLdddfJzp075ZJLLpG2tjYZGBhYiN0hTcLb3vY22bp1q/zHf/yH3HzzzdLR0SEiIp2dnSLywjrtu9/9rnzoQx+Sjo4OGRwcfFm3MD777LNy9tlnSygUkquvvloGBwdlx44d8pOf/EQ+//nPN/zOjh075Pzzz5e2tja566675ttEXj0w75oA/ygknU77H/zgB3/n+2vXrvVFxP/mN785/1q1WvV7enr8t7/97fOvjYyM+CLi33bbbfOvXXHFFb6I+B/+8IfnX/M8z3/zm9/sh8Nhf3Jy8pXdGfJHy1/8xV/4tm37TzzxhPGe53l+pVLxXdeF10dGRvxIJOJ/5jOfmX/tiSeeMPKQkEsvvdSPRqP+7t2751/buHGjHwgE/IOlf9euXX4gEPA///nPw3c3bNjgB4PB+dc9z/OXL1/uX3jhhb7nefOfK5VK/tDQkP/6179+/rXrr7/eFxH/ne985+HcPdIkHDze733ve+H1t771rX57e7vv+76/fv16X0T8q666Cj7zsY99zBcR/5577pl/bcmSJb6I+HfccQd89uabb/ZF5EXn0G9961u+bdv+gw8+CK9/5Stf8UXEf/jhh3+vfSTNB/OONCMH8/KSSy6B16+99lpfRPxnnnnG9/0X8u2KK66Yf/+2227zRcQ/66yzfMdx5l+v1Wp+V1eXf9JJJ/nVanX+9VtvvdUXEX/t2rWHdX9I8/HFL37RFxF/ZGQEXhcR37Zt//nnn4fX7733Xl9E/HvvvRdeb3QOe8455/ipVArWjb7vw7rvYI5PTk76mzZt8vv6+vxTTz3Vn5mZeUX2jzQnzLsjy1F5S2wmk5HHH39c9u/f/zs/k0wm5b/9t/82H4fDYTnttNNk586dh/QbH/rQh+b/++Cfi9ZqNbn77rt//4aTVw2e58ntt98ub3nLW+SUU04x3rcsSyKRiNj2C0PUdV2Znp6evx1H375NyG/juq7ceeedcumll8rixYvnX1+1apVceOGF8/EPf/hD8TxP1q1bJ1NTU/P/6+npkeXLl8u9994rIiLr16+Xbdu2ybve9S6Znp6e/1yxWJQLLrhAHnjgAfE8D9rwgQ98YGF2ljQF+nifffbZMj09LblcTn7+85+LiMhf//Vfw2c++tGPiojIz372M3h9aGgI8lRE5v/S87/+67+MXDvI9773PVm1apUcc8wxkM/nn3++iMh8PpNXD8w70ox88IMfhPjDH/6wiMh8Tv4u3v/+94MX+zf///b+PcqyqzzvRt912/ddtevW1fd769YSrRsQQLIA2cY2vmBMMCFxZDzygT/IOCPnJDnjOB7+EmyHbzgkTkZwGBzHie0TM85nTIwvwYAMlhEgBJKQELp1t7q7+t5dVV21q2rXvq7L+UMHDZ5nLnqXZHVVAc9vDI2hd++115prrjnfOdfqmr/16KM2Oztrv/zLv2yFQuHFz3/xF3/RRkdHX8ESi+8H7rnnHrvpppte1m/n5ubswQcftF/6pV+CeaPZC/ckzFNPPWX33HOP7d271z7/+c/b2NjYyzqu+N5H7e7a8wP5wO7f/bt/Z0899ZTt2rXLXvOa19i/+Tf/xnkQt3PnTqehjI2N2eLi4tD9+75v+/fvh8+uu+46M3th7bYQc3Nztry8bDfffPN33SZNU/uP//E/2qFDh6xYLNrk5KRNTU3Zk08+aUtLS+tYWvG9xtzcnHU6HTt06JDz3fXXX//i/x8/ftyyLLNDhw7Z1NQU/Pfss8/a7Ozsi9uZmd13333Odr/3e79nvV7PaZP79u27hmcoNhs80fr2JGpxcdFOnz5tvu/bwYMHYZutW7dao9Gw06dPw+d5befnf/7n7Q1veIP9k3/yT2x6etre9a532Sc+8Ql4iHL8+HF7+umnnTb67fH32+1ZfP+gdic2Izz2HjhwwHzfH3oPwG3w222U9xdFkXOfIcTfZd717fvgq92XfCc/9VM/ZfV63T73uc/ZyMjIyz6u+N5H7e7a8wPpsHvnO99pd999t33qU5+y+++/3z784Q/bb/3Wb9mf/umfvuin+25vfs3kYhLrxIc+9CH7tV/7NfulX/ol+43f+A0bHx833/ftn/2zf/Zd/6VfiJdCmqbmeZ595jOfyc15tVrtxe3MzD784Q/brbfemruvb2/7bcrl8itbWLGpWcuYmfevpXnktZ1yuWwPPvigPfDAA/bpT3/aPvvZz9of//Ef25vf/Ga7//77LQgCS9PUbrnlFvvt3/7t3P3K9/T9h9qd+F7g79IGhVgree3nu7W9JEn+Tsf6uZ/7OfvDP/xD+/jHP27ve9/7/k77Et/bqN1de34gH9iZmW3bts3e//732/vf/36bnZ2122+/3f7tv/23Q18osRbSNLWTJ0+++K+rZmbHjh0zsxfeDCXE1NSUjYyM2FNPPfVdt/nkJz9pb3rTm+y//bf/Bp83m02Qa651Iih+cPj2W5a+/Zdx38nRo0df/P8DBw5YlmW2b98+yFfMgQMHzMxsZGTEfviHf/iVL7D4vmbPnj2WpqkdP37cbrzxxhc/v3z5sjWbTduzZ8+a9uP7vt17771277332m//9m/bhz70IfvVX/1Ve+CBB+yHf/iH7cCBA/bNb37T7r33XuVFoXYnNozjx4/DX508//zzlqbpS74H+HYbPX78+ItLrM3MBoOBnTp1yo4cOfKKlFd87/BSc8y3/+qYXwLAf2H87b/YvNp9yXfy4Q9/2MIwtPe///1Wr9ft3e9+90sql/jeQu1uY/mBWxKbJImzdGvLli22fft26/V6r9hxfud3fufF/8+yzH7nd37Hoiiye++99xU7hvjexfd9e9vb3mZ/+Zd/aY8++qjzfZZlFgSB8xedf/Inf2Lnz5+Hz6rVqpm5SVH84BIEgb3lLW+xP/uzP7MzZ868+Pmzzz5rn/vc516M3/72t1sQBPbBD37QaWtZltmVK1fMzOyOO+6wAwcO2L//9//eWq2Wc7y5ublrdCbi+4Gf+ImfMDOz//Sf/hN8/u2/SHrrW986dB8LCwvOZ9/+a89vj93vfOc77fz58/Zf/+t/dbbtdDovvnVR/GCgdic2iv/yX/4LxB/5yEfMzF7yHwXceeedNjU1ZR/72Mes3++/+Pkf/MEfaM73A8pLnfPv2bPHgiCwBx98ED7/6Ec/CvHU1JT90A/9kP33//7fYd5olr+6zPM8+93f/V17xzveYffdd5/9xV/8xUs4C/G9htrdxvID9xd2KysrtnPnTnvHO95hR44csVqtZp///OftkUcesf/wH/7DK3KMUqlkn/3sZ+2+++6z1772tfaZz3zGPv3pT9u/+lf/6sVXIAvxoQ99yO6//36755577L3vfa/deOONdvHiRfuTP/kT+/KXv2w/+ZM/ab/+679u73nPe+z1r3+9fetb37KPf/zjjrfkwIED1mg07GMf+5jV63WrVqv22te+Vg6xH3A++MEP2mc/+1m7++677f3vf7/FcWwf+chH7PDhw/bkk0+a2Qtt5zd/8zftV37lV2xmZsbe9ra3Wb1et1OnTtmnPvUpe+9732v/4l/8C/N9337v937PfvzHf9wOHz5s73nPe2zHjh12/vx5e+CBB2xkZMT+8i//coPPWGxWjhw5Yvfdd5/97u/+rjWbTbvnnnvs61//uv3hH/6hve1tb7M3velNQ/fx67/+6/bggw/aW9/6VtuzZ4/Nzs7aRz/6Udu5c6fdddddZmb2C7/wC/aJT3zCfvmXf9keeOABe8Mb3mBJkthzzz1nn/jEJ+xzn/tc7kt+xPcnandiozh16pT99E//tP3Yj/2YffWrX7U/+qM/sne/+90v+S/ioiiy3/zN37T3ve999uY3v9l+/ud/3k6dOmW///u/L4fdDyh33HGHmZn96q/+qr3rXe+yKIrsp37qp77r9qOjo/b3//7ft4985CPmeZ4dOHDA/tf/+l+5bs3//J//s9111112++2323vf+17bt2+fzczM2Kc//Wl74oknnO1937c/+qM/sre97W32zne+0/7qr/4K/hJUfP+gdrfBbMzLaTeOXq+X/ct/+S+zI0eOZPV6PatWq9mRI0eyj370oy9uc88992SHDx92fnvfffdle/bseTHOezXxfffdl1Wr1ezEiRPZj/7oj2aVSiWbnp7O/vW//tdZkiTX8tTE9yCnT5/O/vE//sfZ1NRUViwWs/3792cf+MAHsl6vl3W73eyf//N/nm3bti0rl8vZG97whuyrX/1qds8992T33HMP7OfP//zPs5tuuikLw9Bpk+IHly9+8YvZHXfckRUKhWz//v3Zxz72sRdfjf6d/M//+T+zu+66K6tWq1m1Ws1uuOGG7AMf+EB29OhR2O7xxx/P3v72t2cTExNZsVjM9uzZk73zne/MvvCFL7y4zXe+el18//Pdrvfv//7vZ2aWnTp1KsuyLBsMBtkHP/jBbN++fVkURdmuXbuyX/mVX8m63S78bs+ePdlb3/pW5zhf+MIXsp/5mZ/Jtm/fnhUKhWz79u3ZP/gH/yA7duwYbNfv97Pf+q3fyg4fPpwVi8VsbGwsu+OOO7IPfvCD2dLS0it78mLDULsTm5Fvt8tnnnkme8c73pHV6/VsbGws+6f/9J9mnU7nxe327NmT3XfffS/G3263jzzySO5+P/rRj2b79u3LisViduedd2YPPvhg7lxQ/GDwG7/xG9mOHTsy3/dfzHdmln3gAx/I3X5ubi77uZ/7uaxSqWRjY2PZ+973vuypp57KvV946qmnsp/92Z/NGo1GViqVsuuvvz77tV/7tRe/z8u97XY7u+eee7JarZY9/PDD1+ScxcajdrdxeFmmtyi8kvziL/6iffKTn8xdNiaEEEIIIYQQQgghxDB+4Bx2QgghhBBCCCGEEEJsZvTATgghhBBCCCGEEEKITYQe2AkhhBBCCCGEEEIIsYmQw04IIYQQQgghhBBCiE2E/sJOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixidADOyGEEEIIIYQQQgghNhHhWjf8P/96BuIkTShOIY7o9wXffTboBQWI+6kH8Uq/A3HAu+i2IRypFDGulSCOY6cItjIIIPY9LMPA8DzTDL/3KL4WsGYws5Q3gDB1tIRrKOMQk6FH9fKvf3zv8H2+Avwf/8e/hHjp0kWIu6tdiMNiFXeQ0+4OHDwA8f4DGHN9nj93FuJnHnkE4pmTJyFO6JB+5HazYrkCcaM+AvHI6OhV47HxMYhHR8chrtTw+3odf29mVq5hGUoVistYl0GhDHFK7YpapWVr+eeAhNou5RGfOv2rj9y4hp2+Mrzmza+B2EuxrH6CZaWvrVyltmhmo3Qd+XxXVlbwGB7utFTAzNpdxRxYLmDOKxTci1CsYnssRvibbjemuI9xD/Oy52M7qFVruP8S7t/MLI4HEPf7eIxiEdvalfkmxJcvz0EchJj7vQDrKQgwz5uZDQZXL8Pi4iLEF8+ec/ZxLfiPH/1/Q7x7GttMGGMbKQd4Hnt2bHf2Wa5OQXx+Ga/Z57/8BMSthSWI6yOYTz4zPwFxcNM9EC8/8v91ynBv+DjEv/iPfgHiTgWPkaYtiEOarizM4vX53Y/9PsRLi02nDP/P/9f/A+J9+/ZC/Oijj0J88LpDEJdL2C5rNWzrCwsLELdaeA5mZlu2bLnqb4olbMu3v/q1zj6uBR9/bhXiNMmZMH0HPCfIm0J4NEZ4Q6Yi3vepUfmaq6Jp92nGo7HZgEbomMYeG+BO3nsn5oxrxXjo5maA644akZ8zx2O4rQ77zbDrxfvjeC0M2wfHdHs0bMr+wjZ0Hi/1mK8EPMfhMiUJ3mPNt3FOfy353z7yRYhTuq91asOj+17PbUcZ3f1GdL7+Kt7DVPvnIb7t+t0QL13Buc6Xv/IViPtdnI+ZmY2P4/1AqcTzwgJ9j2POjTfehNtHOO4FNL/KT9x43Z3xwuljVE/+1dsitytuR3mfOfcXlAfe/e53X/WYrxT/2x9fok+orpznDLh15rv5nfcRpHiNU6r/mNoyP8pwDzE84wzfwrtq6OT6obz0vznLqJTO8xTe3ikSj1c57dR5ZkN5hdr2H7zLnbPnob+wE0IIIYQQQgghhBBiE6EHdkIIIYQQQgghhBBCbCL0wE4IIYQQQgghhBBCiE3Emh12Ga1ZZ38VP/rr9MiDlLjrfAskfeJ1vaGPxfNSdqrgQdkvt9pFF0Lg4ZruF46J58Vr2n0+T1ruzI6WVwJeMs1PVQOqJ5/WRw8GFF99ifYLxxx2GtfAbbEWxqZwbffUxDTEu3fuwe3HJyHue2xTNPNCbAfsUuiSE+L6rXshPnDDqyA+eewYxEuL6CRqkqPIzOzM6VMQnz2DcUjVXSZ3WdJHd1lEHphSCX1QYdH1iJXq6Fgr19HH1JhAh01jHK/FaAOPURtFD1+d4nKt7pQhKKI3Lwixz4c57rH1otdH/1QxwLKxK5K9HuwtMDNbbaN/LIqwLZbJw9ljXxw1jNooXrMC5UxL0cv2wjaYENj12WmhM8XP8DzKZSwjp5d+TMd0i2CVCjpRPJ8lHbjXWh3byfw8uUZJUBpQ1sxzErHDjv0mYbjm4fEV5bYbb4A4ous1ewHHtcb0DojT3GEd62OigX3zJ99yL8SXz12A+NwFdK4cpHzSijDHTe/B/ZuZJRexvr/8dXTxlCd3QnzdgV0Q18YaEH/l2a9D/MUvoo/IY0eXmf31/fdD/PafezvEt9x8GOJuh/ofOY4K5NisU7uuUV8xM6vSNoUAx6zBIKfDrANhwE7SIf+eu4Y5geuA8q76fa6a56o73ACGaHbYkfPCh1cveO5vrgLPOx0vT45A1ufxisqUbtA/3w/1pr2MuSdXtxMPOcSwq8ElWtssha4ZO7qGOSGH9J2Xg1P3JMvyXmJNZDntfNj1zfPLrhdehvMGP8f9iN9z/bikNO9j93qhhHO+KMTx4DP3fwbiE09/E+LVVXSNep473ns5br3vhB3CKc3xtj/8VYjvvfdHIL758BGI+7ljFjkzY3I+0/gcUEXx9jHN8dbiBeW5K1+wfryGm+NrQMDzXX4Wwn45ftbhvCXA1bUnPu7Dp7lLlafc1Ndjuh4JZbk4d4zBa+TmjyF+fceFOGTcfFluWMe2ftWt3fS1hjYzzM33Mp+n6C/shBBCCCGEEEIIIYTYROiBnRBCCCGEEEIIIYQQmwg9sBNCCCGEEEIIIYQQYhOxZknPgNZ6Z8nV1/L7Pq535t+bmaUprqP3eS0xL/5PcB+FAvph4gDj9gDXU5ejnDXXIZ2X46yj750103zmL1GGYeasZ+a1/eyA8L2r+5lcn8nwIgxbC/7y1or/3bnu+hshPn70OMTzS+gEq9RHIS6W3bX+3S66yQoFdEqkffQWrfbQFze1ZRvEr9uxF+LzZ2Ygbi81nTK87g13QXzx8nksU4RtuUH+t6eefATiL37hryBOZk9C7LMvxcwyaldBEeuB6yVIcfuIvg+LWOZKFd0co+QfNDOrj6O3amxsHOKJiQmI77gZ/V7XEnbQUfqxuNeDuFQiH1/qtr1yGZ1zIyPo+mqRn6Qfo6+sWEF3WJnaSUDdtNdxPXo+eSKWmugfSxPMy1GE5zGgpsTuEXbShKHrqOn18bz4mGnCORF/X6S2Gncw13MOzYOdKMPy7nqxaws6zZIEyxl3sO48H9tdkpOqPXJ5Vqn+PGoTo/uxX+7evgXigxH6K5+7gn1hbDe64MzManO4zcWL6EpsL8xDnO3cCnGxiPlk1559EO/ZvRvi3irmeTOzW25B/2i3i7m9XMQpUZ36WxzjOZw99TzEVcrTnEPNzAZd7OMBzRlix9W7PoQ0RrATeD36wxD10qZw2L0Mc9/waaDj/npp8y1n3pqXA6hZeTSnS3PmCJuBV6LdudVxdafQMAeYo37L+duHYaVOhznrnP1xGYf84GXg7vMlzvuHyrDzjrlx7Y5znnu2VOd8i5rjs/I99Lktz56G+MTJb0F85Rw6sOPlWYjr5LyrVXGM6Q/cOR7fr8UxefXYG0h+s5M0rrX+HMfSbhfHQR5XzVwHMN/v83VPEr7XJo/bkHtQL8c/2KU5wDz5xWdp7rte8JzYKTo/E/Bw+yinnxVj8pr7mPAn6hiPR0sQX76E87Hjl3D70iT64ot1nBOamRm9EyBbwzz870KuM3NI5k2pz2Yezftf4iQjv1nyh3j9PBYOrhH9hZ0QQgghhBBCCCGEEJsIPbATQgghhBBCCCGEEGIToQd2QgghhBBCCCGEEEJsItbssHM8aS/RaeZ57lpmZx+0rp6/5zXvgx66xgqG7oBCiP4Z1yjlMuD1zfT9UN3CS/7BcNitNOB64e0zfg47fB35MI/ExhjszMbq6GvYf/AQxOfOoh9iYeEyxCPktDMzK5bQhVQg8Ve1gPXX6WK7ysivRSosGx1Fv1Of2qmZWZzgPncdOABxudSAuFbBeHIX+pva1Cbu/9QfQxzE7hUsBNgjohTLlHYw9skz1mXnEbWhOe5Lz6N/8IWCkfeN3JdF8uK955/+7+4+rhFlctINyNvh+5g+3T7k9rsgxLaVkqeDXWLlKuawfkw5LmJPCP6+3nDbfxhgg71w/hLExSKet0/txKMyW0AuRHKFDqjMZmarLfJI0nWP2M1H9TYyii7Afoz76/Wx7tnDZ+Y6VnrkJKxT7lkvYnJottsYl0vYJkL/6uOmmZlHbpF+Bz1qSwuLEE9vQT9JqYLHmChh3e0gV2gpZzhJ69dBvHNyF5aB3HxpD9tNTNf08M3ozbn77rshnhxDP6SZ2Vt+7C0QnzyJrp7LFy5CXK9gO+ysojN1YRHrbbSBuZ/9g2ZmYYh1xS7FNnl3Dh2509nHtYCVwexrWo9ZwA/svyC/whqvNCcHpNTO/ITHHtc1uh6kfPJD6sKxRufMXfkzZwtyevpO46cyJtwy2Unk1ndIDlzHM8ieKpov+B5fH3Z8DW80rrt4mJeS3X5X75FDld6WcwtEv8nSjbq7MAv5OnN9UR2zeir0cE5sZnbiqa9BfPLJL0PcpnuUjO4vpsZwbjNNzuyQ7l+iyO23KyvLEMfU1wM6kf4A5xgpbb/YxP395af/AuJzF9DBbWZ25JYjEI+ONiAu0JzMqXoSu7E/drGJDra5y+hgMzObvXQBf0P10k3c67ce+DT3NLqnDDO616D5bTBw3XtjHn5W6mH9XL91B34f4rm3T85AXJjDuU13BdutP4b7MzMrbTmI+6g2IE498sVzunHumYbkhpwcmDkTF+rDw56PDHtm47wzIG+TIWWQw04IIYQQQgghhBBCiO999MBOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixiVizw25AK3W9IR41jv2cdcGDATo1goC9EPg8MTFeh4/7q0R4jCou9be43XbK0PPR19Szq3s8HB1Dxmuur70HxPEJDvn+leEVlqyskWe/9U2IRybQrVQmt9XilVmIOx3Xn7WF1vKbj+1qQGvc++R/82jhvU9xRF6xsRyX0le+8gDE9TK6km46/BqIe+R665NGbGRqK8SDEBv/IrmWzMwqIbbdCrnKiuRZ8EIsI7cy9hGwXsDtK2bWX6FtcCcr7Y3zm0RUhynlm+oIft8hL1ini04uM9ct4lEtpuztSDFHVimpZZQTyxX0mwWRm+IT+nea+uQW2gJ/s7KM/ozMJz8cJeJBhmVO2HlnZpPTkxAXKG+mCR4jpcof9OkY5ApLU/ZM5rnE8Dz7fcwVlQr2ufXiiW99A+LOKjkwY6ybchG9ICP1hrPP8Qb6ZDrL6Ds5e+IYxF6MbbdKdVGOWvQ9tssgdPt62JiAOKrhNe2eOQXxhYvnIK6QM2Wxhdfr+uuvh/jHfuSNThlGyKMzMYHt8PK5MxA359CBM0L9z6e23V5uQlymejEz63fQT+OxVyp12+p64JOvKmOHlivoHRIPx50nYux4v4Y6bdb84XeUAY+RvWRX3999bvTS3cj8Nc0JUzfnJn1sd4MetjMvxDyybvBEnnD9c8M8bGY++/jIWedFNE6S7zqoY17YeuT1EFe3oI/z/ELOWD+P+cu//CzE4SLmO6+PzqmB49EjB6HH1zjP5ccecGeTofu4+uZ8L5LXd9gnyC6/4Y7tawU7k3n+xfOSIMGx+OgTDzr7fOrhz0PcbaFbjaciIdnVPR/b4sTUNMT1cXTrFkK3/ywtVyFur+K9b7WG3/P8qUwOYb6lbK7iXLfdwXm8mdnjj38d90lO6JDu7ycnxnF7msvOkvvvwsV5iBeX3Hu9Ls11A7qHKZVdx/N6UAlpfpVifcYrmBtKA7x/K6V4H2FmtmMr1l9vFa9po0z3c1T/hTLOVbZtx/Eg8zFeWkV/vJnZyil0AHdr2yEub8W8WahP4THoeqWsl+NcknNvkfo0rtH9PMeZ4+nkG1kKX9Zwz2PWy3tOpL+wE0IIIYQQQgghhBBiE6EHdkIIIYQQQgghhBBCbCL0wE4IIYQQQgghhBBCiE2EHtgJIYQQQgghhBBCCLGJWPNLJ1i0x3LggMW9zvbus0FHSk92v5CE6T4dIwhw+wGJM7stFGG2LqAQ0cxs8rqbcR/0DJP83paSWZ/PwSPZOcs683yFL9U37Lx0YthLJl6Wt59Nixsj/19oorD1qSe+BnFEF2jrvj0Q9/kCmlmFhKuVyjaIWXzJu2DBKnlrbUBy5+e++ZhThm/87f0QV6tYpm1TWKbpXSgELVDfuOUmlMqHv/B+iM+fdQWhS02Utq6QiL5FAvVVEs12OijgHQwGEHN/9nJyQCHk80IB70aJ/83MEVXXaiSsDfH7iMo+SN2X3EQkCO4PsK2wFTolqWqpjPUx6OLvV+marHbd7FKp1fAYPral1RbuozyCYt72KrYTfttIfQSlyL2+KwPmFzxklEgLBazrHr3Ao0QvaUlJsh7QC1RYqpx3jGIRY27P68WZcygcDikfVUgI3VvFc/fzEr7HYyvJmCmH8QtUjF5ukkV4/UZLPC5i/ZuZZUV6MUUB292uPZi7KyP0sp4Stas5HM9vv/0OiOsjDacMCb2sZPs2FHp3l/dCHJKEvEj1xDmuH2M9RaErF04SEiXz2Jr3cp71gAayjHJPaNSHHBnzS//3X2qWFlAeSDgfvozJjGcpxfw9v3QC4WPySw6cOK/7DS0lQfvgl5s42SzAI7gvJDAb9PBFMX16T0KxtEEvneAX2A15M8Ka6pLaMr9QI6BcXyH5+fX3vhPi0dveBPGVSzh3KkU0jptZp34Q4t4kztF69BKK0tmvQBy28aUViYfH8DO6P+K3fplZ5uEYlg59wYNzU4bfum8Audqvc3Hmhf7G/d2I72EdVukMki6+6ODpx/AlE9969KvOPjsrVyDOyJzv0fgd0rhYpHuBffv3QTw22YA4yBHY830qz9X7PWxLZ89jW2vTHHDHFhwni/Tyhn7Vbf8+JcIleuHasydPQHzw4I0Qj47hi18uXsYXL8wtYj6rjWAfNjMbq41BPDKKc9lRnmOsE9cX8GVWlQTzSauAL6DxsbotG7jzK+clgSVsR7U6nusgxmtcKOJLPjwfr2mxVKLYfXw02sNBpUk5bPUUvhQyGd0JcWVyP8QRvfwn9qjv8FzKzDLqD1529RyV0Ev03EcdV3/msya4DC8z5+kv7IQQQgghhBBCCCGE2ETogZ0QQgghhBBCCCGEEJsIPbATQgghhBBCCCGEEGITsWaH3flTZyAOyIXArhavgGusvcB9NliM0Cvhk4co6uFvUlqjXSJvh5E/Js5w/8Wte50yLLbJAUUeljDAfWS0Lj/N2I+Cv/d5rXKOZ8JdNM1Olas7VoZZXTwWDuaZJjIsJ3smUm9jfE7sHDjVRm/B/CV0THRSLGd9couzT3aklGlt/sTUdohDcpX1OugmK5exjRw/hm6Sr375S04ZfFp735xHh8GFc2chLtYnIC5U0EPWGEVXw91vfDMez2kDZp0ueqrabXTzra6gR+HyOfTgzZxC19bx55+HmL18O3fucsowMYFujHIZfR7j4+POb9aLQYJ9m1Rv1o3R1+Bn5IsZkCTIzHqULyLH64VtqUa+Oc8wzyYJFYq9UzkOraUmXmcvwRzYbWEfq9exDOM17JNeij6zgDyesauZsHYb62Y1RitTYxTL7ZNzbUDHLJNfsN3Ca+HmQLOYjsmpOX05ropXgDtedRhix/PlODpwg2IR+52ZmUeejtFxvIYHr78O4pB8jBFJ7krUGditmEXu1MKj30Q05ng1dNR5Vez7V1Zw+8PXNSCemsB22um73sJeB+uhNoLndeAgeqeSNrkRaRzMyBWWsI8rx0eXxuyCozlE5pZ7PeB2xHMZL7v6v+++HMstT+H65B32aIMCjQ9c33kevWyIE43nOox/Lf5d++pTPmeG5s752DVG87fM9dF1VnE877bRYcTz8fUiJceXM2ceQp7zjvuQn+IYF3p4riNl3MdkFx1ThWfRXdZZwv1dV3RdWCsB5uGzNDe9GGN+a029EeJSH+eRhYXnII562FfSHHlinHKf5twypKXRPof18bxrx9fXYYPGWTOzIhW3v4h+3gcf+CTEK3M4560U3cKXS1shrtK4ViaHtk+OzTqNndPkjytXca5TLrjjfYl8ZKN0HzWIsS1OnDwJ8ZkTGI+NYfsu9HB+sLTs+pr7NP9dXMb8s0Dz0IPX3QTxrt17If7q1x6FuDG5G+KduzA2MxtvoNeuRtciZHnvOrEtwHYWl/B6BB7Op3iO3clxBPN7AjyamwTUNzO6N3DeGeC57mn4fU7HLZVwfJ4i916tj+e5sor3lM0WOu4KE3jPWJ1Cz3FUxnZtZhbTPJPLyXOyiJ+vOEnu6u9nyMN9r4AcdkIIIYQQQgghhBBCfN+hB3ZCCCGEEEIIIYQQQmwi9MBOCCGEEEIIIYQQQohNxJoddt84cxE/IFcSuwsidsHlrHdmN1hEvoSIlpd3aRdbRnFd/d5xjLeW8PRqFXetf6eL6+y9FA/K6+47fdw+IR9NQB6QQgEXcef5UgJy8/W66MfwqO58cnb0+rjWnMvEPqIyrTN/YZ+87huJN+rRboj11xhD78flkzMQl8gvt3wO3YtmZpcvo/fusW98A+KbbjoCcaWK7arfI3cZtcsnv/F1iJeWm04ZYhJ7pQm7EBFeEz8gF0ArQx9dBRUIVozca16m8xodQ99fiTyUBR/j5SWs6ze/+QDE09Po3qjVXc9LWMKCsu+kRH7B9YTrvNfHflkpYn1UK+RXity+7gf4Gz7/S3PoMmz38LpWK1iHpQjrJx506PucFJ9ifmCnQznC1peQi6RGvrJ+B/NPn/JXkOPRK7GLin1ytH2lisfsUh8cGUF/2WoL67FccnN/lmJSS8j3k+cEWg9u3IceNa4/bpc89qa5rjHcR4WuoT9J4zWNGQUao3zKV1xXeUXwaVQJ2NUW4tg58LGM8RUci6tV9NEUuZ1Fbu64skC+xmWMG1Vsl6mH/cnLyEvJitqEx2q3Inwfy5km7MXdGKlTZNiP0xSvuedxP6Y5YI6vj3sQt9WlORyLv/ApdEbVyeF53Q3XQ1weQ4dNdQqdRWZmlRrOGZKMnTTkUKPfu+6+7KphHk4rGHKJ2buXOM4bygHs8svcvL94BefwMyeegvj1r/vxqxfqGuEYltn/Q3XhXp88LzRe0yzBtj3oLEN85TLmoivP4e/feOstEO8cQZfSygDHGzOzC/NPQ9w+hfUfxJifOjfeDXFzy70Q908+CXHl+KchLqyccMrgD7itsg+b65qEszwGOpeG/U/utchzDA77zXrRXkAn3Vf/5i8gXlrA6xqEOD5s3YW+azOzQgV90rUi3wNiW+z20Bk8PYJzvDjBtknTUFu4jN4vM7PbbrsN4sYo3RDQNSmVMa/u2Io5sz1A39zJy+ge63rYn8zMlpfQ05ZWcB/bduH4fd0h9JXdduTVEGcJzVHI9xxGbjsLfKx7Hn+SJEewvA5U6B58JSZXK90HDfh+0dw5dTrAdpLRPriXRTTH47pxn1UMEa+aO/dMqW8X6JqNUjusU5mXqH82F85DXJ3e65Shvh3vQ70S9idnmsKez6HO2+E4dcd5M8epvRb0F3ZCCCGEEEIIIYQQQmwi9MBOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixiVizw86rNvADdmbQ9j36oG8uibPOF9dgV2ht8SBBb1e1jT6ZrEa+s3E8vW31nDXuDXSkzC+hM+rELHq6nr+C33sBryXH7T1yQBTJYWVmFpHThh1pvKSaz4IddoMB1hOvTS/lOuzYk4QLvQvOkvnDzj6uBd2YykHOL/b/xQOsiyzHn3XpAjofTpw6C/FXv/owxI53LMBjTo038ADk/ApzHouvLKPPYaKO7bBQxLX+HrsXUnIa9DGOyKU42kCvhpnrzeuSz/HY0Wch/srf/g3EMzMnId6+fQfE84tXIM7yPJbkFmN3Vkxt+d63/LCzj2tFmT1r5K8MqO9zXCb/kplZSE7LQYp1wl6JjBwbK4tN3F/GnkHcvjqCxzMzC8hX2elhHW+ZRC9Ul/JwTHmYrxn75cpF1yUWkqWOvZxxjMdYWiLvC7VVbu8Bd7ocH10Y4TYB+TcG6cb4TZ5/+ijEhTKeW20U3S+TU5MQ+757zUtFzJshD/2kk+MRnZ0c7Bz0AurbqVvfGXlvOTMH1E5CGhdHq/h9ISBHC5Xp3ByO1WZmR8+hk27XDuzjIzWMg5BmLo5rlNoQldnL3DbEVZOxOzHeGKeTn6KHKPQwDziuMWNHTt41J2+hh7miOX8J4icf/lv8fRfr/9ST6Dka2YGe1L23vMopw+vufgvEnoftKCGHHTvR2A/nQg6cnHFu2Cce9zd2g5ELMelj27584QLE01uwnl74DTogZ55/HOIRdjz/8B3OPq4FXL88FvC9Btels73lpnv8DY2raQc9YjNnZiBe2Ybbhz30Hq8u4HhlZpbSGHUD+c9Gtm2DeG4Sr89DlGvOR/sh9hq3Y9xddMoQxNguMmeez+UmwdOQenwl/HMb6bD72kP34wc+jg+HDqO7cEBzwJQ9rObOWVsknYv72NbSBL8f2YHz6Co57eZnMWcee/YZpwwzF9D9VavgXJQdl5cvoUu028f72LSI7f/45ech3r4X87CZ2Z5dDYjLFZoHdnBes9pD33jmoYdvahL31+mzczXHoZrgeSR2dVf7ejExhb7w9Ape0+UVHIuTmPulW/KI/HCZT3M02j507muv/gzAcYnmVZ7jUiXXMV2jheexnYbk8quSs75Ww76wPIv3oGZmC028v69uQd9odRvGXpnGPfYa0jmlwx7IWM744zjs3OcSa0F/YSeEEEIIIYQQQgghxCZCD+yEEEIIIYQQQgghhNhE6IGdEEIIIYQQQgghhBCbiDU77LIerrNn74pH63pTZ3V43kJf/gzXN8fkFimRD8ZPcY32pSX0D6T0/UwT17ObmfVSXEvcXCV3Uhv30U7wvJYH+L1Pz0C5nsLchd/knKN9eLQO3Fm+nuHa9TSldfpUZotdrw77hRxvSM7lWw8ak7jW//Jx9Krxuvtuh65xwW3iUYgnUy7iNq02OSfISZGGWN/LzXmIky76ZUYbDacMfWoXXepfrRZ6Ltib1+qS96KOa/vTAbaZeXJUmJmtrqJH7+gxrNtHH/kaxCdPoltrlcp46vQJiKMIy5xmbiPyA3KP0fWMyUv1wV//N84+rhWVCnq/mnRd45h9DXi+fC4vbINxu405i39TIued0XVNyDXiRfj99Oh2pwynyHc02cC2MzaGvsPlDuaGdgf7w4B8c2EB3RiuWcQsSa/utuh0sF6KRawHdv2lCebMkBx2aY6PLvDJfxmTEyVvzFoH/vgTn4T4+hsOQXzbHUcgrpIbplpx211MPiV2exYpTsjj4VO7HFYzWY6joxjhNVy8jK6RlUtNiOvb90G8vIDbf+aBv4Z4qYOd60q21SlDuYFur+1bb4Y4oA4ak38oTciRStsnA/KdJK7jKKPP2HPnuGPWiQtn0YW0bderIU4z9vexW2z4v//yuScx1u9okTw5NHdZnT0H8ZXlixDPNeecY5ZDzG+vuv0NeIwi+xrxenhrnya/sL+8qS7Fju+P3T/kNmUn57nTOBY//MXPQfya19zllOHMiachnrtwGuJHaN5j9n939nFNGDK55HsLjnN/MzQmdyxr81Ic08Ym0D934wiOPw8+jmOqmVmljO3Op2s+aKO3qvjNP4X45vITuD/DfHjWcP/tOo4LZmblAc7xggF68pw7MGeOxvceV7/vy7s2/JuNdNYxC/M4L969E72Ci0voBaxRQ2mTe8zMbEBzVp6bTzfQDRp55EomB/bZC+fxAJSHQ5q/mZld8ci9fuIYxKdmsNzNy3gPU6L5VcRzigjLuG0XenTNzCaX0GHd6WBb7Kzi/cOZZ74IcRDjMVeWsF7Zy93v4r2EmVl5BOs2LFFdZRvzN0uex/0K21WP7tETugdlZbCZWbGM9ysJ3QvzmQ71fA7ZIK/m+LmP8xSIPijSfW+xj/Wy1MNzKG1Fv+PoVtedGHfR/7d6Hu9rWys4jxzfvhfiyjjmACuy447nZ249OVXHH/gvr93pL+yEEEIIIYQQQgghhNhE6IGdEEIIIYQQQgghhBCbCD2wE0IIIYQQQgghhBBiE7FmOUcSs4uFXAYk7mAnkSteM/NoHS97JWJa91v3cV13iR43zrdwvXN3gOvw/ab7fLLdx2OUaHF4Sk6GKpWhP6C15gm5lthpZ65LKeVjsrOO17s7S6jx91zV6VqcEc6ia3aqDN/FtWDXrr0QH3vkIYivLKGTo7OIa+J37t3t7NOna+pzO2SfCZ18mmFfiPt4TatldEotr6C7wcxsZRXLWaYyPPaNb0A8M4vnWR9Ff0O1guvsCx62/WPHnnPKsEi+n5mZ4/Q9OigSdi2Rh4+FKOzBymtD7Ophvwlfm/WE/XnsZxn00We1vIxxMIJOCTMzz+f+j+dbLqNzY9DGnDY5jtc9CLGMUYLb95fdttdZQT9c1TBnzV3AdtFsk2OziO07KpFDky50Qo47M7NODx0oBXKe1Wo1LGMV2/cynVchwnprr+L+l5bQl2JmFlO5ogKeR9x3/WPrwTeeRddUdbwO8a3ZqyBuLaNnx2K3owUe1kelgvUVkCOTr1mcYeyRx4PUbXZ5qemUYXYey9mmdlgjB8sWH8v48f/x/4H4oa/gWJDU9kDcOOB6vG6roGuns4B+oMHoOJbxCvaF/gA9lmmKeTyhNpNQjjBz/VjsdeMcuOVWdK5dK04c+xbE23egE8unMYUHyjyzWBpg/o7JvXrsm4/hMQaYv7ZQHpiZRWedeZgX0iX015iZ/c1f/BnE1Qh/c9Ntt2AZ6UR4bsuar4TGQR4nzcxCZ46BsU9xQONC3MPzOvrEVyF+5vEvQdxaIu+VmV04cwbiJvm5Bjmez/XAmY/xBGyIJy3Xm8bXjOuf2iU7PTvUmo9dwnb7I+QRvd1rOGU4N4/57cxlnMNd6eA17cdNiMc89BT+vTK6l6ZqUxCfDF2PmO9j287mH4c4jRfwB05Vsgv26jcCeX46/oz3sZFOuwL11YUZ9DBz06qMYj7aPo6xmdnICDrqpqbwOpXLON/q9rCdzF7Befe3voWevXIN89fZ5Rw/NfniVi/hOHZxAZ11YYBzutYC/t6fx2tUCLHevtx8yilDtYjXuTGKxygXac53Cb16Tz/+JxC3V3Es3bED/WVXFtz5Wi/C6/PaN7we4m3bXMfzepBm/ByBPOnURwoRzk0dB6q581ef5tguV8+7rqP26s8I1vIbTi8+3Sv45GqtjeD169E9Gd9jmpkVyb8Y0nl1VzGPNo9i31iZQM/x+O7rIB6hOWKejy5hbyd9/3Jva/UXdkIIIYQQQgghhBBCbCL0wE4IIYQQQgghhBBCiE2EHtgJIYQQQgghhBBCCLGJWLPDzveHeCOGeCXyPAXuPjjE54lJhnHRx/XPrRCdN8sD/L5adl0XYQHLVYywSpY6uLa8GtG6+wJuP7OI6+zbdA5R4JaBz9Pjx6hcd7wLrlr63t1djmdig/wlw6iQW2EbOe0G5IOIe3i9en33XJvLuLZ/QFKaiBx07GtKyMMT+7hmPguwTGGR3D9mFvawvnvUtp86jj65K489AXGljGv7CyG2w4zOqdNBN5CZ61FgV18QcLmx7ZtP6/TZP0deLMtp+9y23baZZ0baGNij1mujFy2OsV31B66/ilRt5ihhyK0zSj6UAbW9Eu0w6+J1vnTmrFOGRmMbxN1WE+Il8kC1SFA2Mo3XNaY83CfPRFhEt4aZWYE+6y6jG2xkZATiNrn8IsrTAdVbkfpcmrp+ExrSrFDA3/B4s150yCc6oGKMjjcgHh8hf2Xg1jePAoGHfb+1jH6lbhfbMvfCIMX9DTy8Hn/1hb91SvCFB9G7FRUwh912wyGIC8WHIX7ySXSsbdmJzrrSntdBnI3i/szM5s8/D/HDX0CHWviqAxCvzDUhrjawXY7Usa4D8tOlSY4HMbn6NsM8UdeKpXl0uyRdzANheQvEXEzPy/H10di4QMc48eQjENdpPjVaxLH0yjz6ZmJyJY633bobm8TWe/TRL0N88tlvQlxroCf0yB23Q8zzg5SFNDlDFvvheh2sq84KjiUt8seePY1ey2ceRWddSs6q2fMzThlW6BilKjoj/XBj2l1Ic5dhjjp2XzvOOzNXEkTJPghw3AzIYefTvcTjZzAnPxPdBPFr3vMepwi7LuA1LH6DPF+nZyCM+zjGxXRN0xXsO7cWz0G8p4pzAzOzxw3nD60u5tyghXl/kGA9pJnbp/+urOXecL0YK2I7GauQo27bVoir5NScnCSfleU4wykOCzRno7GzT+6xo0dxzDKaV59fPO2U4bod2Ldv245+w51T+P3xC9jW5i6Q25B0aCHdK8zO5dxf8H2tYVvzjdzI5HcO6f4jIjdZ+blTtD3Og8zMUroPOnv2GMSjozie/+Iv/CNnH9eEIfc5Ht8X8T19jgQtGOJi5yzJju4B36/QNfbYzZo7XHDb53cZUJloEtHtYw4rlHEs7tF9wvJl1984PYV91qN7LvYBBtT/BsvoUlx4Fn2PK1M7Id6yC+ehZmaVRgNibod8fdeK/sJOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixidADOyGEEEIIIYQQQgghNhFrdtjxCmj2XTFr8RI426QYJ7QIu0susbiFa4szD30NURF9A9MjrtunTO6jPZO41n/fFlzrXy2RC4iq4UvP4/rnvz2OZVzou76NgBZ2s+Mhjtnzhb93XICOF2y4BywdcrnyNCHrQXcF3Qg7tu+CuNZAh0TncgfihUX0JpiZrbbJQUdr+dl3kg5xDvXp+i0uo/uH3VhmZh4do9NDf0CLPBa9AZcZfQIBPXvnS87eFzPX/cLuJG4Tvnf1RpIkwxw4Lz0nbFS7MzNLyHsWUhUG5LP0yV04GLjnW6bflMjZFJCbLSMP58oqeojSgJ1PmK/aHZKPmNni2QsQh+TdKpEXslLCuDE5BfHlK+iRyPg6D1w/Jl/XkOql3V6l7/E8yyX0SLVWsJ+H7LQruLm/38e67VEfLBbQY7ReFOt4bpPbJiBmD2roU5txJKiuMyM1vCarbay/3irm3W4L4/OzixAPQsxxj3wd/XNmZmdOoJdzvo25+pmj6BKLSD4yvQNdIdumMb7cxXMcnXD733NHH4V4yUd3z74x9LR949FvQLzQbWKZxusQHz64H+JbX4WuKzOzLOlRjO0uibE/rhcLV9CJderkkxBff/huiD0f+0eU0+4CandnZ2YgbjabEO/ehvMvW8W6cPRQNBZ3Vt3xfmycPDhL6AJ76pGvQ1wo4HksPo/tskQu03KN8kTOZKo5hz6zzgrmt3NnzkDcWsF2aeRaTmLsj75Hc2PfdSfWithWOwnlhBT743px6NY3YDnoIg9icgjzPCVn3sFzGZ5X5Bi1IfLJA9pK8fv/8Wl0X9rYPmePt998C8SvH8dt9i2iJ6y9gvHKPI7TrfmLEGdL6HMsVLCdm5nVuzsg/uuv4PfdszgORF28X4kzEkANuQ98OT66PB/XevGaW6+DeNd29F+l5FFbWcZ+Wam43rSEfJXcPn2aSHoDzHHtNo4HM6fOQzw6MQ1xELpzmztuwXvhm7dg2/j8Yzh+18kZXJ8kz3eL/NTU5YqZ67BjR7DF9EFKzmfDnNVLcS5bGWlAfOhmvBe88Trsb2Zml8+egHiVfH/V2sb4E7nNs5+PHc3chpx7/px9Ot+Tt7O5jG3g4gXMN2lC963O4OvmXac2h+QHz3gM4ntQ+j2N981FHFfNzHo0ry/XcNwrV/B+pkCe3Ijn01Sm+DLOky4uk+/RzBpbt0M8vgO9d8V6w/nNWtBf2AkhhBBCCCGEEEIIsYnQAzshhBBCCCGEEEIIITYRemAnhBBCCCGEEEIIIcQmQg/shBBCCCGEEEIIIYTYRKz5pRMDEu/zkz6fhMNrko8Oe1kCHSSh0kaGAvY7GygPPHLHnRBvGXFPN6WDFHwUM+6aQvGiT0LROMbtw+tRCLrcwe0/d6LplCHLcBuPBJMhybczn18wwPVIMkgSNbIU1cy9no44fg0vrrgW9LooQg5Jsj82gjLVmLbPe89Bu4PbFEKs304XZagpSWFDkr5z9fs+HrTbzROyUo3TTvp9FM8y3L9SakNOm0hdQajbCoYcgyrT97keXnobcfLEUDn0+hH3sR1kAV9oDFMSNOfJ//nlIlOjKC2u1TE+fx5f6JBEWIaEXqYQl1HUWyijfNjMbOFZlP/7JLmfrqBEvTaOL+/hPFyo4DEHdI6W5F1FbI9VErevkHQ9jPA8BzGK+xN6KYuX4LUIcq7FoI/nHZP8Pwo35qUTE2N4zaamMMdlfXppAXft0B3nfJKFG7nE+WUnBWrrhTLWxYNnUMT/2HNHIT49c8opQ0T1m8Yo3r+8hP1trNyA+MpiE+LsDEqSizvwZSgF35XoP0cvtgh3opS946F8e2znAYjv/9T/wB0OsMzPPYeS61178fdmZtNb8BiDHrZ1P9iYf0ftd1BCfeH8MxAfuv5WiFdbWL8xvczBzBWst+Yxn/WoLfdobrI4j/tcauOcj6XvYeiOQV5GL/WgF1NMVXGOF6RYpsUT38IydnA8jyn35E19y1XMoeN1zJnplZO4TxLPH7rhMMSlAr4cpUVlOj3nirCbA6w7r4rzwlJ9Y9rdf/nd34E4pZd2DGj86A/opV85c6WEXiSWJPQiAH4pRcryc9xfSr9fWMD6TXPmx/NNFKJndA9VKeBvZlfw+py9RC8GqGAbSscxiXPfMDPbWsN9HLkR89Fj1G56c5jHgx72P9/Dekic897IGdtLZ5JeGjQyiv2y08Pz7dML5Ir0Qi4zsx63R3pDw4DbIsV8v+YZzn2yIo7Vnb77kqLbbsGXZ7zp8EGIf/8vPwXxsodzjEoNX+bXTjDXe/SissSwHs3MEpL3uy+doPtcuocJ6UUv9Rq2/4M0tu7ZhaJ/M7PFJr4gIM7wBV6jFfeFHetBf4BthN8XEdE1dubUlvfCh6u/9M8zfvkJHqNaxZedtSnPZsb3mDnHc95LwXkWvx8U6EV9HTxPbxXH1oDmpUHslmGZXkDXatFLIOkFd9Pbsd0Uyzin4HcalenFfjz+m5ktncZ5YEB1v+uI+4KgtaC/sBNCCCGEEEIIIYQQYhOhB3ZCCCGEEEIIIYQQQmwi9MBOCCGEEEIIIYQQQohNxJoddhkt5GVHVuZf3V2Q57Tj9c0eeXZ4TXYQ4hrroL4Xf1/B5489cpUshLg22cysXsF9Hp/D9c6PPNeEePUKenMqW/dB7Cd4DoM2rvWv+e6a6y6t1c88vCyOaywjfwALN9g9Ro4q9neY5XjZ+JDZmpvKK0q7jQ6O0zPo3yqX0EHQGEGXQm/g+h38JsZTE+hrYCdKp42ejz7ts08OiZCceEGOk2hAfoA4JjfIkGvKjkHnknrU33L8ctwn2UHn9E//pTvqhsFlcLLEWlyY14iEfYgBXtcourr/Iq+fpeSxWW1x20IfQsz7oDLEdJ1XyY0xOYZeLzOzUhHzYEauLyfvRniMXg9dOYM+/Z4cLCF5QV/YiLxEHXSBlcjNF7IjlVpKzI6PFI/pONzM9WEalbPbcR1o60GliGPSgOqGh1oeUnLLTX03IZlIs4UeNY/a/tZxdGZt2boN4if/9M8gLnro+jEz2751F8QLM+jt4vxTK2M9ZNTOtjTQJVKdwHb9yJc+75RhpTkP8YUqXvNPfPaTEL/xtejBPbANz3vmFLpKzlw4D/HTzz3rlGHr1tdB7NN5BzkOwvWg38E2cObU0xCfPI7nUgwwtzz/9b919lkvkwPYGfcw/33tycchnqqh66VDnqOkhblocoub7xLKiautJsQTDTxG0qdc0acO1sFzqFCHDEvuuLBtLzqlAvI3ni/huLDcozkbzUnqNWzrOyfRzTRebzhl+L8++9cQbzmEfbqxw/WdrgcL59ArmVGuKhTQnzkxMQlxUHf7i0f5J4qwvoKAxyQ8ZkrjE8/P0gTnjI4U1MxmL5F/scluJWz7SQ/z22gN25FPzrvHv4n585tPoGvRzCygsbhA/bGcYo5NK5jfekXyp3Yu4v475OnLGWevbtZao+/8GlEkhxb7xANy9rHDnO+tzMxSdobTWMuORnOOid8XaS7USzDnZb7r0CpFVK6UPV50ncgFFgZYL4FHzjWPvejuPY7jTGPnc8I+RLoXIB9pJ8X+sdIn/2KKOdXMzAuxDFeW8TdHDm2Mw65Dc7oBjVEFciX2e3iN8+4tGI9yls8udprvluh+xjO8xo4nndtxDnwPmdKzjmgS82hGz2N6PDcqYrvcVcKxwMysi8W2dhvbRaeNdR/T/Xvg4/c98qGGND/LU7dHVNelBPdZDfipTs49Ug76CzshhBBCCCGEEEIIITYRemAnhBBCCCGEEEIIIcQmQg/shBBCCCGEEEIIIYTYRKxZlhKwX449abRW2XFT5Tnshji02HPkpbiu+2wb4+eWcN3wM1fOQjw6jn4zM7M0wWM0l3Cd/ODcMxCHizMQv+0fosNu7jw67g6MojvDL7lleOg0etoCqqrRAl6mehHXO7PjwCM/R4/WaHfaruNoqYtrqud6G+PRYb7+yBchPn/mFMRRSA4v8tOEJfSfmJnVajWId5KXaGkB97FIroVyGdfRLzZxe3Y1xInrG+h0cF19YORSeIleD2cdPX+wBoed8/1LKkGOA28NOWAYG+k38chPEpNXiLNnoYgfRGXXSxCE5Pby2B2Jv2k00PEwN78AcaWOHq8C7a9ad9v/OO1ztYmunXiAvoXWMnpqGtPoiWqS065Ibrgox32YkjNldRWPuWP7Duc338n83BzEhRD7TzHCeul20R9kZuaxC5TK5Edr80q80rRW0CU2N4vnys4N3v6rTzzh7DMoYrvrxehMabewfm674UY8Jnl4xsfRbWTkGFzJGWOmatiXC+TJKVWwrY7VcazstrGd9andNjvfgHjh7IxTBi/FultoXob44hydx/IeiIvsDiW/SYs8e+cvo/PJzCyh8cSnurNsuJ/mWuBTrm0uXIL4Evn57r7jJohvfOMbnH2eeAb9ZK3z6BAMfayLJnlzRmmus+0AXo+zz6JDsMcCGzOLxrGdReSIZFdSP8ZjegXMJT1DH1RAXtJS4HqRauTKCgydRFMN9MfNrWDOnW/iHNFL8PdZD8u0bcJ1+Y2WsAy9Nu6jTN+vF098/SGIKyM8R8brM0nnVqng9TFzHcHVKs75ymXMNa6zmTyuNKcOaRwvFt1xdqyGvykH2O7OdXAs37KzAXEhwvNkv1qU4TkefQ77gpnZ5QvYh7MFysvkS4tCPA+/gG5Eq+C1iWMcN2JyceWycVM6B58m6wNyT5GyzvGysR/zhd9c3Rnu0d/J8PfszEporPY6VKicW7VegNdppY/ttUt9PyjjRamOYC5IaU7oJzTHS3PuLyi3lyL8TbVIjsYIj9lcRd9cwbnXo3kq+UzNzCIfjzFaxjx7111unlwP+JrHNCcokieN8w+327zPfPbckSPT93CfCb+nwOmodK+S424bdr/Gv1nuksOb+s7oGOYf9uZ5fbf/lSk3+zQ+VCo4FrCTLiEHJd++JAOsx7TnOiQDyiMFyt2V8OXdW+gv7IQQQgghhBBCCCGE2ETogZ0QQgghhBBCCCGEEJsIPbATQgghhBBCCCGEEGITsXaHHTvrjH00uCt23vC6fLO89c4UZ/g80TNcB9yjdfNXunjMQoDf17voDTMzo+XKVuuiY6WboRtkQOcVL6Kj5tLZo/g9eSZe96Yfc8owWUa3xZYarsHeNYE+gnKE9VQiF4CzJpvXy+esuT51qQnx7315BuKL5LhbL04cfQrihXm8Pvv3k2OI6rLbd9tdv4/ugyi8ejsLaOE9+5kyn5yC5M2LV9ExZWaWkbOgn2I5U0cFkCMM+M798dbsRssRDuR9di15OT66PFfDelGIsF+l/tWdfCldw6hAvroc4pg8H0Xy8ZDfanJqEmKfnE+FEnkpUtfxENJ5TIw1IF5cJTfYInomaqMjWAZqy7Ua5quEnGtmjjrHqhH2mdUm9plikfxKMe6gGGBdryw1Ie533XoY9PCzhMabINgYj2eXvDhLLayLlQ5ej3PncAz65lPfcvYZVTAvtskd4pFj49DevRAPaKCslbG+t2/Fdvn4E087ZTiXYduMqf+MV9H3OjWGnrzFGPPu8uwZiC+20G/aW8E8b2YWUn+pULspDLBeTj6NDraFOXRCxeQ0avXwmO2csTahOUTI7k83+a8LSZ/ct+S4CchBFKd4PQol1902UsHfbCOP4b4pzHclcotF9d0QH7kVfbNpF/tsv5tzzWkMyah/zZML8SJ7Qsl5UyT3JbuBSgO3HpYW0EPpUTsrUv7rU85s92nuGmJfWVzEeVGL/INmZgUP9+mXcR8jE66HbT2olLAcCQ8X1B+65K4qh5jbzMzKBfysRz6sMjlPK1Uad4f4tX26t8hit93FA5pf0ZjlZfh9mTx4O3Zsh3hlCb2GNXJbF3OGKz/AvO5l7GzrXjX2Ok3cIaUmnvOYl+PfZCcnDf7rOwtFVsj/2u5QW6N5w6CLjTMKmu5OA6zjHrmQU7rufK/cJVdyTPcrUzSvvLTsjhf/1yePQ/y1BvrcvQL2uek96Aqb3LIf4lNHn4N4cQ7bYrziXkX2NXs17HPbrrsO4hsO3QDxQw98CeK5y+iHn5k5B3FrxfUU9z0cf0Nq/4vLG3Nf65MYzR/i/3byzxrui5IBnrtHLsTEw+uz2sF2ltK8nucpeTjecuN7JtwH32PNXcZxbHkJx70y+WdH8vzYNCfr09jaJU94QM9LWFoXsmuRzjFecZ8rFeiZQmsR5xgpuZNtatrZRx76CzshhBBCCCGEEEIIITYRemAnhBBCCCGEEEIIIcQmQg/shBBCCCGEEEIIIYTYRKxZ0lMgf4nn4/rm0TI6htrka+gsux4vflo4THFVIDdARvaDkFwAu0ewTDdNN5x9Liw2IV5aQdfFIMXznF1uQfy3X/wixDff+TqIiySWGKuRK8PMdk1PQTxFDrtGBc/DJ09EhdwxPtUT+1CaLTxHM7OjZ9EPkLDLIsV13+vF/LnzEKcJrVlPsX7LlQbEs3PoOTAzq5XRSbPSWoQ4KuAxuuTF6ZAOq1xBp9fSEu4vi12HV4X8McsdbGcp9R92HLD5g10BztYvw1c3zDnnk7vP8Re8DGfdWtx760VUwnbCSrMuOTEH5NjqdFw/hu9j36b0Yp02Nq7SCLatbTu2QtzroLej3cX8VCuR+83MSqT8WbmCnk5j5UyCJ750BXN5v40em+UYvy9HrtMppHpot7Aul7pNiMfIZ1b08byai+idurKAfbBSxd+bmRWpXN0BX6+NcYkNyPHTIofNPOWXZ59Dv8yFOfJjmNnE9BaI2WF3hX5z4swMxNUI63sreQzf8TPoZj13EfO2mVnSw3YSROS4IXdIQv63uE3tysP9lcnDl6xiPZmZ+eRcGvdwPK4sYV9Y6mMZOuR1aZMjskN+tKjgtn0mI8fsy8mbrwSZYVnbqzSH62L9z86fhjjMyzU19MfcdiO6kS6eR9fh3JPoJdx1EJ11e7ahKzF4Fe7v0Ye+5pRhZQm9OGGFnGkdvOaL5Eqap2nyKPkbSyHWU7XiXvPmKh6jQ+6sVVKBrfaxHcVt/H1sOGcskTd39UpO/6PxaXQEvTnl2saMtSdOnIA48WhOR/7SdgvHuMuXXF9frYbXOKL7lz7llkZjFMtAA3OhgGXg/cVxjquShpMqzU1jmhceJU9YQrlquY1j5BPHsP/NX3HzftzFdsPz53RI7snonoq3dycLeblriKt8AyV2zvkPsM65nVhC9ZNT9pTGIZ/Ol+/P2KMeJ1Qm2mB6tELfu3X+ta/NYJluRV/c9HXY91fqOJe/856bId6+D8/pwmnyny2495Qt8v216EKf99C9e3kG83S7hnHq4/6WyOsZFF1PcRBgnh0pYbkfI9XuP3D2cG0Iye/H3vmY/Mop9Uv2spq5XruI3GtZcPV7q/GJcYgXW+hdc5p6nhedP+DXElA+4bxaoJsTdgCXqO+kA/eas3uv28X+s0jzUAux7YeU20Oaw4XjOPYOBu79fVjA8fjscWpoXczVP3Pofc4+8tBf2AkhhBBCCCGEEEIIsYnQAzshhBBCCCGEEEIIITYRemAnhBBCCCGEEEIIIcQmYs0Ou2oV180HAS5OXiCvTruP3yc56+yN1lw7vipa7+yTV4IdD7fvbED8Q4dwTXbKa5fNbIlqIIlxTXR7BR1RtRF0XRy5406I7/x7d+H25J/r99w11z4v/GYxAoUFcnrwGupzM+hte/DRb0L86EXXJ/hsE+t2qY8OED/cGNHEcgf9TZUI17gvN5sQh2X8vkKxmRktUbdeF9fJ18hx0+2i+yXrYX0PMrymGbWhPCVRQh+yt4IvuueRv/Eleo5eCS8S7yOg/suehYQlLi8D15myfgSlOsSt9hzEfgHzSalMDSt2y84u0ISua6eLbWdhEfOqF2G7qJTw90vL6HLbtmXCKcOh67ZD/NRj+Jv2Cpa7O8DrOojRIVQM0AGxQj66uODmXY9y3GobHSjs4/BS8nOQ/2xAnk6P+k/guw7OAu7C+jH3kY3Jec1VrN8zl9CpdeoC5vf5FnqKzl12nU5hBfPggUMHcR/z6CsJ6JqSOsRKEV7TO287BPFdd9/hlOHcGWzLFxdwHFoin2yRPHsJufziAPMLKe1sfMTN/X1ygxUp75ZSmtcsY72sUB9forEgoTlLlVxaZu48h/Nk9grkzZfDocPoOVpsYv13ltC78tST6Bj6+izWlZlZ1MGx81/8394P8c+OYP00JtAJvDqPnqPq7HGIr6thmzjhXnI7dwZdX8GuvRAPqN/3MmzsLfIvd1bx+tXIaecHbiFWyPO50MS6W2XP8CqeF6fQE6cxB+yawHlpFLn5rkdtPWQHbezm6fXgE5/4NMRhxH4nvB4BTZg5V5mZBeyIIl9puYyOIY//dIFidkwFnBBz5ikp5ZJyGe+hEnKTLZA/0w+xzF4BvaG9lHxPrkbMuuTEzfNOIXwe2VWinE9y5pmuh5j8y3kiuHVijFyslmK/K9J8rUtetlLB7evsP+Q4o/rIfLz/8GggS1I8ZkAut1t20kTGzE5fpLa1MAPx4gKNU/tugHh8DNvqoQl0iXYPUplyPN0XFrAMn/oc5vLd+9CT55fpHmfnPogrAd73Pvfs4xAfOOAUwV59EK9vQvffp2c2xhcbBOzAZBck5RfyzzkTMjPzKS9GFbyG3QhzHt+nlopXz3GcZdM8x/NL7MolevfBzl14bzIgr7Rna3DYUS4u0ZxvC9VtTPnHo3sPdrXH9Nypn+Xco9K8MCMX8nNPPeX+Zg3oL+yEEEIIIYQQQgghhNhE6IGdEEIIIYQQQgghhBCbCD2wE0IIIYQQQgghhBBiE7Fmh93yMvoVkgGu2+2zl4DWARfWcKSM1kTz08TAw+8PTqP/5B/ecxjiJfKALC41nWOO0brt8y101r3q5psgfu1db8bfj49BXCZ3RjHDtcxjOV6dElVOwcc10lfm0Z319HNHIf7SVx+G+Ctf+grEi2ED4vHX/6RThnaM5U498uikG+M36ZDbJTByM8yj32lqeivEO7ZvcfZZKqIbZOEK+mTm565AnCZYhgo5JAq0xn3LdizDpXlsU2Zmi8voqRrusLu6HIC/5/haOOwScgU43rEhrqa83zDDzvtaktB1LVbQAVGqYtnKEZ7L4gW8xmZmNqB+RFUSkiiCnVu9FczD5QBzYEyOjtVVtwyjNezrpTK5cpbROxWT+9MPMa6OoitjjhyZozXyxJhZZxWPMejjPiPqoyuruM8KOVVj6i8pOx9zxp+CRx6RFtXVYGP+PSsj32shQs8Hu0lWyEXSzXFqLC6QHynFc59uTEJcor5aDtDBca55AuKkhsecmnLr7rFHsQydGLcp0ljc7eH1yKizpDFe84UVHO/DKjoozcy2bJvC31C9zHUwt3f6nOOwDB1yz5TJaTRSyXHY0fXtDTivboxXZ2IrOn+3TGNdWYr1vUze4rlldBSZma2cx23OXETP3fZJdNb86D33Qnz2m49BvHABfbz+VAPibZM4HzMze/7EsxCzLohzR4u8ex451Po0T13qYN/oXMb5hJlZQPlopYdzgpBcxx558RbJo7fawjL2OugN3T5Vc8rQJh9QkfJ+EG5QvvNwXOUhkudCq+QU5nmImeuE6tO5Z5QjC+S488iLFzkOO/ye/b1m7v0Me/OCEvmt2eUWUG4Zxd9XKpjfAh/7mplZSrnGp3HQLTZ94L3yDmFnrurYsdaPi7M41x+t8n0QzyuMYrd+kgzbWkyuQq7igNtJiVyHNE85cwllhauh64+rk3/ZC/E3hRKeyNIAc9ajTz+DRZ7F+6zZ02fx+4qbOwYjeF1bPayr5ALWfVbBMsXUOEsVarwe9tljF884Zei08DfZKl6LZgf3sV7E5Atlt32Z3G7+AOsy776IP+tTXlylxJqRr7JFEkznnpHjnFszp1xD7t9i8u87vnjqcAnl9TjP90vHDAt4jYt0kxXTLjp0L8KO28TjOMcXS+VMO7hPnm+vFf2FnRBCCCGEEEIIIYQQmwg9sBNCCCGEEEIIIYQQYhOhB3ZCCCGEEEIIIYQQQmwi1uyw6ydXd0CEIXkJAnJo5Sw1jul5YYG9W7S4eLqGa5F/9jX7Id7ZwO/b5AmbbrhOm7Eirj+erL4O4huvvxHikVH0vPT76C8pBlhmnxx2C7Ou5+X0DPqAvv7oNyB+5BvobXn+xEmIV1rkFyQnxNhr3wZxJ3E9eh55kKKAnuVmG/NsN+6g6yXlZ8wJedMyXCsehq5jYus2dMxtmZyG+DMn/gri7dvQs0N6GWt3yRtGvpQ4x0nE5+H7uM0w5dwwZ51zvBzPCzsK3H1kV4ncfQ7z0eV9z59xmV4J997LJYywPjotbFsBObWK5K+sllx/lU9OLEspX0TYd+sV9L9FBXJkUj+dbGB+qpAPxcys3UXX12obvREhnQcrUioVzLMTU6MQNxcWIM7M9V/y+NAnh2NGA0ZATk3PsFBphGUe+OSe8d0BKCMJR0BjWMpyi3XCY7HhgHIzeT0r5MtolNz8vtrFsXB+Eb2oJXKmtDvkzOpiHj52Bccsv0vOjgDbmJnZIEbP1vIVLJOX4lhaJ48hK7ZWyFHL3pB6FZ1PZmZ7dm3D30yh4/TJbz2Hx6xj/9u2Hb1uzaPHIa6Ss258xPU3Wp535TvJcaKsCx47brDdZdQHyzXsL9M7XF9s2cf8M6B81yIPnpdhP371j7wD4uNP41jdG2CbKTxy2i1DDdtBRuNck9zGMft6vSEuH4rDgeuU8igflSexTLe99lUQT42j4+Zv7/8axJfOYv89v4BlaHWxXszMBpRzqxN4bdINanYBjXEh+eIKRcxnJXIv9buup3XQw/MvO/4mGkfrDYg9Glf9gCZ95LzLc9ilNKcu0nn6AfmzqC2HEbaR8hjOW1mAG+dMAVmL54qnuNz0vTPvf4m/X/M2G8OnP/s3EI/VcRwcIUd5rYrfN0ZcV2SJXIURzU2KIV13x9NF83LD7ZsddI3NFt3xJGxjn5gw7EPRAMfiIzdin+osYR599jTO6S7P4HwgG3XnHEWaq47twLpbjbGMXXIn9+j7Vh+PGY/i/k933Xo4dhL3We3h9eot4b3zetGj/Mx+b74vYmdm7r0WjWttyoEl8mHzLpZWcM6Xsgt5yP3h/3+jq0RuzHmT0yj7SXn7PN2vc0/JPmu6985ovE/JL8h6eb7vzbtP6NL1Xbl8GeL5GazrtaK/sBNCCCGEEEIIIYQQYhOhB3ZCCCGEEEIIIYQQQmwi9MBOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixiVjzSyc8RzCIYj4vI1Gsj/EoicrNzHqkIIxJ9heQvH9nDZ8vXr8NhaAdkv97CYr/8iTwe/btgdjfvwPiYgFFmUkfJZYr85cgfuz55yF++umnIX78m/gCCTOzEyfpJRIr9BIJqpeUBJUBXZrSBIqZ61N4TlnsSuDTlKW8bCB2X1qwHuyeRDHyxDjGjTE814jEvt0E24SZ2dz8LMR7dhyAeNeO3RBPTTYgjhOUSl94+lmI55solOT3DJiZeSTG9FhsnSv0/O4MezlD/ksp+MUVQ7532sBLe/FFniiVZaqcAzaSIEapfYnkpfEy9sMuvRwgHrgXvhxgXszoOrO+tFDAvDkyQi/OIRH8WAPbfyFwU3x7hV7k4rxACH8TRixhxXpYXsL+4PuYM6e2oKj/hWPgeV1YeBziqIAS44De9NInQX6V5M/VKuaJ/gBFzWZm7RX8rFjCcnfbG5PzwjL2o1IDy7VM8mUL6EU7IznXnFrW5aQJsedh272Q4Bg0mWJdHV9GafXFk/gyJb/nvuBp/404Dg2+hS+uuHgJyxRTfhmvleh7PO/GGL78ZPc2lrSbVSjP3v26V0NcoxeufPlhlP1Xijsxphd8TE9OQLwtp+0H3N82iYPdpzlAn+ZPURH7fXsVpeAxS6rNLChhP/+zv/hTiG/bj+P37Cy27S033g1xmcb7Rx9CafyZ+XmnDJU6zvt6PSxntXL1djUxjdfUpzEroDZTCNy3N+zYgW1x52GMJ7dh3i562IebNKf43OyXIB7QGyNWem6j2rKHXra1G19Q5BXcudJ6EFWwHBG90KFcxFzu0wuJei2ck5uZ9frYz316eUKxjPmp1sB+nVL903sAzCvgiwVyhlmLu5gjQ5qbsmE9MxqXPcz7QYjHTFO8Xqnnzq/4xUrDX+I1TBM/bPuXu83GcPkijltJB3PFufMX8AfOywDcOh8dxetcreI+G6MN/H4Ev+cX1BRDvGb7t+Hv73qTO8ZcOouS+8V5mpvG2GBvH8GcOFfCtrtwkM5pO+bITs+dt68Yjg/8Lp6VFD+I+eU9/HIAeinLwMMye0V6MYyZVbbQc4oljAfLG3O/wfPT5VWsb75X4nurvHsp58WCCdZnQC+zKkTYBqbpBVytVbxPZnLv94a8NJBfElGkF4vx2NrrYRtJ6MVi1ndf+JDQPVWvz20ft4/pOVPSx/u+fhfnvu0W5umlxaZThitz+FKo1jL+ZnTUfRngWtBf2AkhhBBCCCGEEEIIsYnQAzshhBBCCCGEEEIIITYRemAnhBBCCCGEEEIIIcQmYs0Ou2JA68NJ03Hddlz/fGAbrqvfM46eEDOzZgvXbS9RXCCHVH2wCHG/Sy4LWkdfr+M64UrRXTdMy+CtWsVyLi7iOu4HHkB3yEMPoePm2efQyzN/hcoc4zp8M7NkyNpz9pkFJMwICnhe0QQ62Dz63k9dV4nHbi3y0WTZxqz1P7BrEuJKHV1VUbUB8ekL6LC5Qj5AM7P2Kp7/3O4FiLfu2Ibfz6Gn8OTMWYjPX8L16kaugMxznTYZLeYf5n97qbA7wPfd/bM/zagdOkWiD1JyyWQZP//ndpxzjq+EIuUakXXQweGThCNLsL5WO9i3g4Lr7SyX0EOTUD9b7qGPJyTPBHsqUnI0LlB7b5DTzszMp+s4Po4u0D75f/rUhVpdzAXLAZ53uYL5prncdMqQUPsMyuSyIGddz66ef0J2cJKownVEmtVqeC0Wr3Rpi41pfBPbMectRFi/X5tDT2qMp2HJPryeZmZ+gvVxNkYnViEiH+WgCfGVE+hiPX4e8+zJ59GZMxZyXZrd8+ofgnj7FvRGfeKTn4E49jFvsiHt1be/CuJ9u9FHO53jj7MOzjEOTtP48urbIH74oYcgPvk8ju/sbNk2hcecHGs4RQgoL0acm9ONaXetNraJNrlbeBhrrZI7LHOnkwn5Lz/71w9AfPHZ7RDPko8sfRrrm/1yvR66YQrj1BnMrH8J52DtFo5bHZrbTJHb7afe9aMQeyXyelsViwAAI8ZJREFUCQV4zP6Km6u2TpJvOcCk2hngWFMpYw49dCN6dr/yxUcg7q1gvvRLbj1cd/h6iLeM43l2Bnj914tyFftgWKCGRuU6e/oYxMvLOH8zM0sSzv/4fUT+xZS8rZPbDkLsB3TvQO7KUpTj6Pbws5QddNTufMPcxPMpn7x6CY1pfs7tXMgnTmFGblPP2eCV+JuOIfPADZzjbZ3Afnn4hv0QN8k91Y2xsEefP+3s89SpoxCzh7hAc51KA9tWvYYu1l3bMa4Zzgf6Z12H4y+943aI/+DjX4b4wiWcNzaKOLed9fEYCxmWuc1DVurmvLiPfaTax7quUdvqU5/1E3T7lal9hzG53ntuPfA9ziDB67maYR5YL/oDGsf65Ofjew26Tyjm3FusLNNEnefYNM8oROS0I38cv7eA7yn5HtbMLI0xn3C7SDMsQ5ccde02zykw5O37A/dZBud+9o9W6N7CD3CfvYTGG+rPSws4n0g67ly3RfdhPt/H+fgcY63oL+yEEEIIIYQQQgghhNhE6IGdEEIIIYQQQgghhBCbCD2wE0IIIYQQQgghhBBiE7Fmh909rzoEcaOCC4MPTKErqZrgWubR0F3jPghxzXSnimuL41V0OvTa9HzRp5icDpUCfh/57prr1vwFjC/g2uMvfO1xiP/ok5+GeH4W/WWso0vpmWia4zPzM1qvTrYeLypCXCAXX6GA9RZu2YEHCMkfmOK1eaGcuIbecapl7m/Wg+ooegz8YgPidkL1G2AcejkesSJeg5VV8hoM0N1zcuYUxAsL2EZiZy0/+aByJB3sA+Bn544vIHuJzjvqC1nO5iE5DVJ2FrAvjc/LwzIPyB3AfrYcjZ7jXeEyuP6TdYR8k+yaqlbQyZHQ+fUy16/Q7qCjIaK+W61SeyevBDs5ygXMDVMjmIdLZfzezGyBHAxBgAWvVLDP7BypQ/zcDHpbShXML4MeekE6fdct4mg6uS167N8w+p7aJuUn3j7Pk8N1WyzhtVhtueVeD64/dB3Ex5pnIF4J8NwLo3h9tjTQTWVm5vewftodbJsB923KgTMnzkHcW8Ixa7Q/AXE5JeetmQXk+tg5hu6qrRPowT0/i168qRE8r5v3ogNvYoRcQIGbO8Iqub1WsC9MlbBN3Pv37oT4Mw+jO2ylh/VYL5PPrI31aGbW8zmvkveF5zXrBPsyszb7MnF7zv9RyS13merj0M3YtveP41zFX0ZncNPH+p2eIOfgxD6IB23XJ7N4AZ00KwtNiHn8XlrC/LXSxXloQFOKPkk+vcRt+5eXyKtTwHLytHCR/IBJSHNb8jMvzWIZExY+mtnifBPibIB1HyTu3HQ9eMNrb4E4o3H36w99EeK4h32qELr1nfCtAWvTKO4uYbvr17B+G9tvwDKW0EHEcykzsyDGsbdHzq7YsNweefJq5DqbHsc834+xb2SLrq82a+FnaYp5O0mpv3DKdHya2VWivHmt2TBn3QbO8KwxivUzOoa+OC8kT3of45tvwPtiM7MvLWK+6ZHXK+uS63iZ7oNXr0B80/5piCdpvD930Z1nxl28zm95IzoZ/+zTz0HMGu7LLfLkLWMez1bonGpu7qepqYU+3ed6eB7FFOeRGd2v9ws0R6TTDrtuHuBEsDNAx2wnoBNfJzo0FypRZfX7VFd0L9btuHPTlH4TJxTHGLdaWIaVpSYeo43joEf+xmTgDjLslEsczzm29S7NQ9ttHMf6A/z9cgvLtErzNzOz5SY6TQ/eeDPEf+92HG/OzaAT9egsenP7Ldxfle6PmlRmM7MBJbXqKLa78jT2x7Wiv7ATQgghhBBCCCGEEGIToQd2QgghhBBCCCGEEEJsIvTATgghhBBCCCGEEEKITcSaHXbvfDW6QgpFXKR7+iKuBX/oi1+C+PAWcseYmRfhWuA+eYtOHH0K4oPk9vENvSDN87j2eHUR3WSXLqKnwszs+An8zdl59AfEla0Qj+/AesgCXHue9MlVQo9Ee4Mc30AbnQflCNeK+7QOvEtrppMSel3KY+gCyngte47DLiOPDjvSksR1EK4Ho5NY/2cuYl1xu0uo3P2OW25eN99cJZ8MuXx6tI6elXVhSB42kpmlLDY013VoXo505jsY7rSjMpHEK83c/WfU/dmVmJEIJ6CDpOSpjBMuI3mxMvffBzyPysD14G2MO9HMbDDAtlMdKdP36PlIffQz9BK3r5dJVpRQHbIXokd9d6SCjrtR8ssVqQzZwG3/MTlVikW87qUSukRW6DwHKfozvAIeY6SCLp5+G39vZtZeRhfFCDmZohKODUGR+jXVbauFuX7HFswbrXbTKUO/i/2+UHB9lxtBuoD1u6+K/osqtaFSjNeriEPYC5/F2M+KZfQvhtRu4h55Oivoh0npmvuTuL9SIaev9/AasXHmhm3bIG4tNyF+/a3oHrlpF27v97DeyjmzGy/Ao5YjLCfnwDe+4TUQf5P8jSsz6PZr1NFt1SHnipmZR/MWP8S2nYVrnpa9osQ97Ke1MvZJHue6KTlLB26u9n38zRi1k5UOtrMDR3bjPinncn5bJM9OVEEHlZnZ6HacD12YwXa4i3LFxaVLGF/ADjVVxGuc0vUcHcV6MzML2K1bwX2w66dY4HyI7XLnAfQ3nj+BHh5L3f537sxFiDs99LJFVdd3uh68+x1vhbhLDqLVeexjy+S27nZch5Cl7GTGdhOE2K6q5KR73WG813jdm+7CMgxwez/HTT3oYNtconknz1Vbq7j9zq3o7Dx8/fUQ98kN+8DfuNf8K1/GfQ76WC8JjaMpi8GoXfo+u5ZxvhLH7nyD54mOK9n5xfqxZ88uiFOaV09OkvOM3GEjddeb1hjFtjG72IS4Rp7iW27EfhiWcTxPO3iPs2MHtotHHkPHtpnZieewnDcfxnniZAnb/8ljOPdJyUl/zy70gD0+g27303MzThmuO3IY4tES1uXsKXTULvcxLweTmI+qIdar18My7yjtccrgk6Lx3lveAPHF2tPOb9aDpE/9jFy4EY0Xy9Tusti9n6uT37VLnrzJMWw3J2dmID5/HseHK7PzEBdr2Lfz3Ow9erYwoHwwaON5Ll/BXD83j21idh6/X2ji/X93Gb83M+sNyA9IczIvuxXi7ZNYL81xnEOMvvo2iNkv+2SKc0IzM28K2+LWg0cgrk1sd36zFvQXdkIIIYQQQgghhBBCbCL0wE4IIYQQQgghhBBCiE2EHtgJIYQQQgghhBBCCLGJWLMspZPhpgvk/XqO3GJfeeoZiM9V3DXXEzVcRz8akQupjuvuy3VcW3zuIq6xPn4aXSOPPfEN/P7cBacMK10qV4jr5t98200Q/8SN+yEu0SPPUgF/f34WvXnnaF24mdlyC9dEH3sa3X1HH3sIYnZCFLYdwu/Zq9emdd45vg2ffIKuw25jXGI9UmKcu0D1eQn9Dn0WzOW4XGLyDFbIKRGSHyAZkHODjuGTB4l1cXkOO/Z2ePTsnF0hTJpe3WHn8REy1xTC1zQgP5BHZShQGbOA3Et0DD7vNMlx+ZFTxafK84MNNJyE5O3ysSxxin65jKxcYeD2s0JI3k5yWvb7eIw+uSMjj1xIYw2IE3LWBaFbhmIRHSmej+dRreH3zSuY23ftRRcJX6NqhVxwOf7E7mwb4toI5vYildsP8bxLRXKuFbEeC0UsQynFczIz63XxvLg/sLNrvRhvk0+U8k+Vylkh92vB3GseUd+t1XGMKJC7bdDGvl0qoHusUMPvA4+cTm4RHDeb5+H4/2iEPyqQx2V6vAHxlgZ6doIB7i8IXL9QQv3HyLMZRvibg3vxvPfvQTfJqXPofdm3G51IIzUcW8zMvATHe3Y69dhvs05wpq2Qi9Kjumu1mvi9uf08pPlQZQTrY7yB/bJCHp6mYV0MyPEZROTb7Lm+zImd6LCL6uh8OnKE3GBPkrOzj8ecnJiAOAswf1YK7jUfkN81jXisYDcYbl8i59RBmoc+/bWzENcqOe2Ork9CTtlGw/X/rQd8Dae2olPwrW/5UYhb5CWeuejO63vkXfWpdY9UMXfcch066/7RT/8YxLtvxO/7hr+vlFxHdzJAt95sE8e8foq5p0POuyDEMu/ejde8Tf7G2cs3OmVYWsK5f4e8VoHjX6b+Q0479ryyG5C9v2ZmMX3G80K+11hPtm2fhvj8efQl9nrYR6rk9XSE1mY2MYZ9r7mMbraUckFMc+CDB9DjOX8B5ymXZ7GMXsGtv8tXcI5wC82nJkbxOi7FeyHue5iHa238fbiKOa+34Ob+1SrmOL+M7XV5Ad1fS4t4/35dFcfaAnnWLxw/gwdM3EnHnhEcf5pHH4Z4W2Nj5njHn8HnI30at1Ka7y6To9nP8cctLWD9LS+hH5Y0xObR2BzQ/UpzHu+1yzQP7XbdsfbyHP7myhK23VVy0C3T9istLLNFmFfHp7C/dsjxbWYWUd01ySE5N4tztuv24D6PvOYOiE+ex3q98Ay22/G9+IzIzKzY2AFxoYxjq583QV4D+gs7IYQQQgghhBBCCCE2EXpgJ4QQQgghhBBCCCHEJkIP7IQQQgghhBBCCCGE2ESseQH3wxcWIe7R+uWLl3FtMulPbKGN35uZnbqE65e319GD8/a33Q3xTbccgbhQRsfdxDb0x2y5Ad0kb+q7foUt47i2uFHGKhklZ0GxhL6NKsUReb9atDZ9oe36aS420Svx4BT6AzrkSbhwBddUZ+SQai+g0yOh5e7lCtazmVnG/jLv6n6y9aKzit6PwQDXrPvk1UkGXL85Xh3yxQR0bqQOsQL5AtIirv3vx+z3Y79ATt3RR6zx8H06pnsaV92efTWBuQ5C9rr4CflNaJ9l8p2EIbcZ8orRtYpzHHZm7CCgcgcb5zchdZH5AfoUikWUQvR72PZK1E7MzMplci5dQa+HRx6vEreDLvqv4hjzS0A+xUEfr6mZWaOEOW2xj/tcTTGub8F8EfXQf5JSWu31sc9mvtv2JraMYzmp7tgNM+jgeUYlau8eHiMiR1RvMaftZVcf/tghtF7sIbeI65rEc498PI8oz53In8V4jYIAj1Gssm8Of57SOOexa9J3+20Q1mkTLFPq4zXqksstIZ9jfZScWwn5HYuuV4r/iTIh72SBvg/og8YonkO1gsfYMoZlyktfLXJfpjT+ZPGQZH+NiLlu6Bpyvi9Q/uutorPLzKxUwXw3vgX9byXS4ATkUswG2C7L5AoLKPnkObR27kUn2sxenF+NTmMZDx9BX1mlisesj6C/rE0uzH5Ozk2onJ6P+0jIa9VZRWcRO9LKNczB2/fhOe3egw4dM7ML5y5BPDdPx9jqeu/Wg6iC1zyiMWzfAbwe//s/wvHr8hVyNJvZRfI3sRtpzzZsE4f3oTdsegq9h0mEdeNR3vCLri+zR/koo/7ELsQkxTYxP38Z99fDdhXTpLAXu21/hTx3KytYD2mMuWjQw7GfB/fIcditwXXN9w70m0Lk1t16sWsX9hMeY44dOwZxM21CHATu37zUq1hHBfaRreDY+8yxkxCXyQE82cD2PqCcOD3p9lu+J6nVdkJ8402Ys1oDvO6nFtANvriI8etvxf3fPermmwc+jw75i8vY9n7spw9D3CjhPqrkjB4Zw7H3ZAO/P3vazQPv+ln04FkX5zVLvY1xsyfkklzt4NgZ1vCa871Er41tyMxsfg7zRXMR6+PxDj43GN+GbWJ1lZzOdC99Zgbdr1fmcX9mZqdP4TYhvXeA5/WtFRyDEspp1VGsh3IV28BqxZ3jdSlPdkmEv7SI35/M8DyeO4vPpc4u4iSl72Permxx3a9egOVmZ90wR/13Q39hJ4QQQgghhBBCCCHEJkIP7IQQQgghhBBCCCGE2ETogZ0QQgghhBBCCCGEEJuINTvsFhfQYce6BC9BF0LBw3X4fd/1OW0dx/XMOw/eCvH+I6+GuN4gBw6tAx6poRthegIddoUcn4yf4Zppj7xeHvnIEvYxJLS+mfwz7OmpFFxfw/QoXobX3nknxMVaA+L/9TdfgPjMhdNYJHJQxRE6WvzALUNoeL14zTW7KtaLbgvX1ccdPDcvYVcbXh92Upi5rrVsgNcwZP8ShVkR6zPOuA3gMTPHaefCa/cdr9EQnVaWkeuMjpn3ZL4S4jErEf5mhNwylQq3I6zHkBx33D+zzHUzcbNiv2DEUql1pEf+Fj/E8w+N/EnkYfNyLtqA2kahRHmR2h77E8uUP9ihkpH/p7XkukOjhNsalunMpXmIx7ajH6nfRe9HjzyTXojfs5/JLMd/mGK5Y6qnfox1yz6gXg/L0OmguyfM8brF5HmJCnh908x1cq0H7KDzjX191Gm4DYTusJ6SJ6hA7ahUQQ8Ou4qCCPeZ0vdchih0x5gitfWAznPfZfSZ7JrDOUdYQF/J6Di2y0EX20xA19PMLCWXZzcmV2vKTk36fUpuIPLulshpy9ubmfnh1a9v4K15WvaKEpTxmrYT7HPFEMtZG0XnFo+9ZmYDmhd6lJ/aK5ifqimWgdOjDdA/w/M3dhKbmcUVrO/Dd6ATLaCmvH8MXchn5tD9trSI7TIq4g4G5C02M4vJD1spksOOclG9TM40Os9qFStmx4EpiHcfQgebmdkyefGWl7Hu2zS3Wi+KnEsovbGreuc+9FIdvOVWZ588zT51Aj1h9RFsJ2M1amiUvwpl8hYnPE/Jcdj18De1KvaP0RG8xjH535rUznj+FJE3b6XjtrszF9DH1FrCffbJcZeR2zTLrp4P2W2d5szxsiEC5iBnvFovKmUcUw4dPAjx2OgYxKdnZiDudtw5wr4ats/Mx/b9zHPo+VpYwmvw6BNPQ3zzTfsgnp7EMlV81+N14SJe9//x8SexjHuxzt/z7lsgfnYGO+Hzz2PuOHIDXvebbnXL8A/fuBfifoxjY30M+8eDXzkD8VwT89GBHbj9O370NohXm67XzS/hGHb0KfS6LbU3xlPc2I5jTPM0tonGKHrSdmxH52aTHINmZhmNtadpLnPiOWxXk+SXK2eYfzIaz9lvWa+67kSeQ+/eiX2BFHb2fAvbftLBMcknN3Kni+Oo57l510txGzote+J5dP0FET3b8mieWsaxtEzPU8x37y0sozmexw67l+ft1F/YCSGEEEIIIYQQQgixidADOyGEEEIIIYQQQgghNhF6YCeEEEIIIYQQQgghxCZizfKAbaO4XnlAvoWB14C4WMX4jKtXsMIoOmju/qE7IB4nP8wgZvcSlqFFqoRCiM8j66TKyCMkF5hPbp7A8ZvRM09y1mQpOXLyZGT0UWMEnR3XH0CHwTNHt0F8/jw67GIqQ0BrrNl3llcG9k5szEp/szTG9ejjI7j2OyQPW4+UdVnqXvSIHH4F9pVQfSUpfr9Efq0S+Z3iEtZvv+86POIBuT9oE3bacbtht2JAa/0L5BEbrbo+p2ny/YyW8TxKBVp3T/2JXVpBgL8PqV7zPIiej+VmrxU72taTUgW9HMtt9JWU2DdH23ue22tiuq7FUgXi3gC9E+wyLJI3gltWv42JNklch1bq4TEG5IcbqTcgzmK8rj3yQvbICzZGXphGxfWbtJawLpfII9nvX90LWaziPsfHxiHukusiL+/yMQYDrM087916UB/Hc+ExpkD5plTCugiLri/WJ08U+ybZvcbn7lEc8zjocy5w646dlpwPaiPo9doyiV6uLl2fHo2t7JdNE9e/lFBbjTNy81F/8Sj/cH+rUn+sVLA/57U79jylJHbx0o1pd6xV6XUxL8RtrM+EHFpByZ1Oej45ZiPynlYaEHdjciFH2JY98ugFCcZRjhvGi7B+r7sF51PGOZK8hm1yWXo0no+O4DW/0nZdcIM+lsGnYwaUU6OA6xJ/X6H8Vx3F/j057br8duzCvNKjnFvcGE2xFSg3RZRLuh6Ws0PdetB1x7gK5YYopDGIPEXFIt5rROSsS/l6kPjQy/EYcd/n+VCvR/dQMZ8H52hsZzHlMs6vZq6nlQWBnKfTlL3D9D21W05vPC994UPeJ3mmnTKuH8WIxs4Ay7JnN+b3XXt2Q9zrY440M+vTZ7fehm61vbu+CfEjjz8D8fnL6BA+fgodmhHNw0uh6yluLWMnOX4Gy3BxBdv3a05hmVdQ9WbZAI95fhZzXPerriu8tYLXeaWDc7K90a0Qv+lHXovHpLb5/LFnIf613/wUxOWy61Tbf+MhiJcWqd9H6IpbLwIqa6mGcx+jvlymOfWgis8IzMz+/M/+FLfp4jVaXcI2MHMCnYED8qwtNtFb2O/T+J+6+aZSx/MYUF9IKD8Ui5jT+tRGzJkL0T0lC2jNLKa8mUZ4jKUY9zFSwTIXSzgWeDSnSJ170px7VMp5zn2sf3Wv53dDf2EnhBBCCCGEEEIIIcQmQg/shBBCCCGEEEIIIYTYROiBnRBCCCGEEEIIIYQQmwg9sBNCCCGEEEIIIYQQYhOx5pdO7J9EMV+SokywSfL/9mgD4kNjY84+D9xxBOIdO1Do2ScBexDQCxx4h/QBy5xZoGpmFvJLJegZpsdybeN98tsanENQmVzZIJezGGI5RyooBD+4G+vpxMmTEJ9bWMYihfh738sRM5MA3KfzztKNee2EZ9gGpsZRMjk1QUJIeuGGb66APfCv3uz5GnE80sa2HxVRIMp118uRIpPvfuhLJjj2SXJciEhSWsB6q1XceqiUUcbJYsyAxKc+9T+uR98RfpNsOK9zOP9kQL/ZoHZnZhaRmJ9LQr5za5PEu1JwhajVOspiO32U9Xokck1SvI7tHsZREY+RDGh/OS/6KFaxLUQxv+iCXjiQYD20SUZfoDLwNSuV3HyzytL1gF8+Qi8U6GH/4BdEVEkS3m6hcDfLEeSmKdbVYMAv63H7zHqwY+9+iLnfRKFbn9+Jq382SzifU2Pucn6ha+jT6xay3tVfguMV3PpOnJLhb1pNlGd32ihJvjyHJuwLc4sQV4uUj3qu/D+lF8FkAebAokfnSXm2PorzoIj6eJbx+O72v2E5La/PrgsZ5w4s54Be/NLrcx92y80vN0mofgckZ+Y5X4eOmSTU76t4/QYD90Uj/AKVYp1eKMBzMnq52c79WyEulfGa87BXznnBU1TCY3baLTwknWfo05yC6s2nF2dt3Y7y9ErOeL//wC6IZ+fmIC5GG/Pv9xndhvDLq4ICnkuNTq0du+3Oo3YyPon1E5JoPOJrSnOfHkn3Y+qjmedm3ZjvR+g3A+5v9EIVjt2sgWWKIne+4cz1M37xD88zaXPnpXk0N+BxwV76CyQ2LN+ZmUf5KSSpvU9lS+kqhJHbz4pFeikdvSzk7rvuhvimw7dBfPw0vkTw6w8/BPH8LI6T5ZJ7X1urYw7atQ9fvnDuzGWIP/DPPwtxm25QeE7o0/1/3svNetQB/BBz2j33otx/6zZ84UCrhfexzx/Hl0488vAMxHfccYtThpGtW/ADav9R6L68YT3g9z5OTuELOEtFetkL9bMkZwrxraeegzii5wg1eiHW3zz8GMRbd+yE2AuxbdcbWFddfsOjmYXLOK4tr2Ic0LgVFeglEjRP7dPYHNF8LKIXl5qZ7dqJ8+eJPddD3BjHl3ZGHr9ckXIm3efyrUTebM7nl5HSHIRfvrFW9Bd2QgghhBBCCCGEEEJsIvTATgghhBBCCCGEEEKITYQe2AkhhBBCCCGEEEIIsYlYs8Nusl6GeNDHn7bauJ65cvMdEO8iB56Z2fX7pyAu0PNDP8JjRLTsN6Kl+7Rk2/HqhF6OBYL2yToFdlkMc7uxw4E0FY63wswso30GhifCfqZX3XIjxD1aRX3/lx+FeHYJ3QDsZTAzC3jNNTuJNsyrQ86I0L9qHEXoboiCPA8VeznwGOxj6JNnjN1t9RF0A6QZ+h08cx0TRp95Ph6T3SLDrgdfU+dq5lw+3ofnsd+E2iX5BwLyCbDDzvPYcZfjmuH+xCVn2cM6EtKx2Xfp+lvI6xXmeLzYEUN1VKpgns0M21K3h14vW1nFOMbtR3JcRitt9pPgeXa7+H1Ew0TGjkc+KXIhsZ/JzCwmf9zk1DjE1R62rd45dK6QqtI5Rr+P/rIoxHo1M6uQa4qddc1FEk2uExkNbAPyEiZUd/0eXvNVis3MErrGnS7WT6eDY0REYy/39biDrrCMfDXsNTTLy7PYjmYv4jW+MncFvyfP1Kmz5yEerZBPLnGvn9NHI/Tq1ArUH8sYr3aw3ro9PEarhf0xyel/7KPhPBsneRbCa09CLsuM+zV1ug65LM3Pm1/x/IlyA421LWqHjpOODlHvogepVsHraWZWJXdPSI6aLrlHi+TVGSTc/7BMPl2uct3NNVUP22a3g+2Kz5PHykIBc5VHOXn3vh1YxhynVLmO9bCthG4+C1y/8nrge9hHYsolYRGvcbmEcc6U2soR7oPHh4S8zjH7e8lr1Hfm8dQQfbfuODcUiniePs8nKA+w/5FzF/elUfKGm5kF7JCksfulzq7ceeTV55Bmw33Mjgd8HRlQW+Pz43HQX8N9kR+Rp4tcYJUS/qZaG4V4YgvOhbaMo//98a+he2yQuq7WchXLMHP6LMTPPY1jbZ+uW8/DfNQdoDcvSOkcc01e5Gik26A/+Z9/jh9kfL+BcYVcfbt2bYe4VnNzf5/KWSSXaDvemDlej8ba0Qa2gTLNn2Lqtz4/7DCzn3jrWyFeXkTH75nTZyCe3obOuj37D0L87PETEK92cLxP++41T8h1GCdcbrweu/bug7jVQeddVsQ8XZlAJ2FjAn10ZmYTk9MQh0V2tWPdcq73uG4pz/JcOrO8cZOd9NgXopx74bWgv7ATQgghhBBCCCGEEGIToQd2QgghhBBCCCGEEEJsIvTATgghhBBCCCGEEEKITcSaHXYZrfVmP0OZvEWHD+6GePsYrkU2MyuTt8unNesBe7xYG0HuA96cvV5ejishI9VH6l/drxAntJ6ZXCED8r6s9nEtc6vrrpnv9HCbJMPL0onxGAm5xLbt3APxxNgMxFeW0V/g1KuZeRm5LRx32Ma4xDxaPx4EuL68QL6ZUgnjkOrKzHUbpuQH4GvKzq5KhI6aKGAHEfnocvwmpC9xnDV83lxm53JwV6E4z2Hns4OON2IfieOs498P+d53/32APRWW8Xlv3L8pVMgvx3Xs+PcidCOkOf2szy6dlD1/WGcZ12kBHRyOayzAOE3cttdsktOSnGnlErZvbgYFrgfKedxWeyycMzOPXGFl8nReWVyCuFJGP0mR3GBJgm6/0BGaDvdMcLxR9sQ2Oeg4n3TJP8feNMctZmZJ6tM25GLrYpvgnJc5tYFxErvXmIkK5JhjP2kRc/X+vXshPrAfx7nJaXTYFAMWTblun4TaYhZgW08GWA/HniePyyo6VnbtQnfY+fPnIO5fQVeQmVnPozkA5d3IsB5e4+zh2uCRGzFk6RCVc35xAb/nXG5m9RF0FweUz68sNiFeWcX6Z49XRDl2mdo+t1szswF7PUfRF9Tt4/WIyVEXp/j7jHJ4geYcxZzxvligvJ5SXg94DoIxl4ndptw/+7FbBp/mQiGNHbFtjM+pQ/cS1RJeY3bBhpU6xDVzc49P9xaDZTxGqYReo6hE80zyFIckKuyuYm7xzZ1nhuRKspTnNld3lXYpj8dVPKdSGY+Z5Lgv2Xea0ZzEjandeOxWdg5BDB81N5PDLqAxybkXoO0LwZA5s+V5/NgFyq42ahfkXzxy880Q75raBfG5eRyjzMxaq+icS1L0vR66CedPpQrenw+oHbTpvjWh+UIUYH8yMxsM8DzaVCauucYYuvoOHjwE8fTUJMTj5H2rVdxnDKUq9WN6TsFusfUioHuh1TbOX1eW6PrReLAwd8HZJ/utQ7o/2boNfW+79x2A+KGvPwLxxVl0CFeqOJYn7Lg1s8EAyxkWaH5F497CCtb/1K6bMN6DbaAyht69Qsn1FjpzBqqHiL5PqSWyXzujZyPsvGOPvpnZSB3LtWe6AfG+bRPOb9aC/sJOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixidADOyGEEEIIIYQQQgghNhFetpECASGEEEIIIYQQQgghBKC/sBNCCCGEEEIIIYQQYhOhB3ZCCCGEEEIIIYQQQmwi9MBOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixidADOyGEEEIIIYQQQgghNhF6YCeEEEIIIYQQQgghxCZCD+yEEEIIIYQQQgghhNhE6IGdEEIIIYQQQgghhBCbCD2wE0IIIYQQQgghhBBiE/H/A1TNrQli3wF5AAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAABOwAAAGACAYAAAAeS/0DAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQABAABJREFUeJzs/XmYXWWZ743fa+15qr1rnlJJVSoDIUwSJhEIQyuiSKNCWu1jg4pg49CcVvv16K8FnDjd9mk8TR/bRrtxfPttR9oRBJlHEQgEMieVqZKaa9eehzX8/uBKHb/3s4WgpLIl3891ecm9h7Wf9az7uZ9nrdT6LMv3fV8IIYQQQgghhBBCCCFNgX2kG0AIIYQQQgghhBBCCPm/8IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRPCCHSGEEEIIIYQQQgghTQQv2BFCCCGEEEIIIYQQ0kTwgh0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRBy1F+yeeOIJOfPMMyWRSIhlWbJ+/foj3SRCXjFuuOEGsSxLpqamjnRTyFEGc4/8Lg7mBiGvdgYHB+Xiiy9+yc/dd999YlmW3HffffOvXXnllTI4OHj4Gkde1TD3jj4Odd01ODgoV1555R/0W+eee66ce+65f9A2yB8PXNM3B0flBbt6vS6XX365zMzMyM033yzf+ta3ZMmSJUe6WeRVxCOPPCI33HCDZLPZI90UcpTB3COEvBr58pe/LF//+tePdDPIUQhzjxBCyJEieKQbcCTYsWOH7N69W7761a/KVVdddaSbQ16FPPLII3LjjTfKlVdeKZlM5kg3hxxFMPcIIa9GvvzlL0tHR8cf/BcizcQ555wj5XJZwuHwkW4KeRGYe+TVwJYtW8S2j8q/1SHkj5qjctROTEyIiLzkyWyxWFyA1pCjGc/zpFKpHOlmkKMQ5h45GuA8TpoZ27YlGo3yJJosOMy9o49IJCKhUOhFP8M5kywkvu9LuVw+0s1oeo66Kn3llVfK2rVrRUTk8ssvF8uy5Nxzz5Urr7xSksmk7NixQ970pjdJKpWSP//zPxeRF4rXRz/6URkYGJBIJCIrV66Uf/iHfxDf92Hb5XJZPvKRj0hHR4ekUim55JJLZHR0VCzLkhtuuGGhd5UcIW644Qb5+Mc/LiIiQ0NDYlmWWJYlu3btEsuy5EMf+pB85zvfkdWrV0skEpE77rijoUtEROa/o2/F2Lx5s6xbt046OzslFovJypUr5VOf+tSLtmv37t2ybNkyOe6442R8fPyV3GXSJDD3SLPw0EMPyamnnirRaFSGh4flX//1Xxt+7tvf/rasWbNGYrGYtLW1yTve8Q7Zu3ev8bnHH39c3vjGN0o6nZZ4PC5r166Vhx9+GD5z0LWyceNGede73iWtra1y1llnHZb9Iy/O7t275dprr5WVK1dKLBaT9vZ2ufzyy2XXrl3wud/lNfz6178+X7tEXnAvPf/883L//ffP17Xf9ijt3LlTLr/8cmlra5N4PC5nnHGG/OxnP4NtHqx13/3ud+XGG2+U/v5+SaVSctlll8nc3JxUq1W57rrrpKurS5LJpLznPe+RarUK23AcRz772c/K8PCwRCIRGRwclE9+8pPG5w7yy1/+Uk466SSJRqNy7LHHyg9/+MOGbdL1V+N5nnzpS1+S1atXSzQale7ubrnmmmtkdnb2Rb93NMLcewHm3tHH1NSUrFu3TlpaWqS9vV3+6q/+Cv5hVjvsDub6/fffL9dee610dXXJokWL5t+/9dZbZXh4WGKxmJx22mny4IMPLuTukCYim83O37mTTqflPe95j5RKpfn3D7U+HXRs3nnnnXLKKadILBabXx/eddddctZZZ0kmk5FkMikrV66UT37yk/D9arUq119/vSxbtkwikYgMDAzI3/zN3/zOOvhq4ai7Jfaaa66R/v5++cIXviAf+chH5NRTT5Xu7m75zne+I47jyIUXXihnnXWW/MM//IPE43HxfV8uueQSuffee+V973ufnHTSSXLnnXfKxz/+cRkdHZWbb755fttXXnmlfPe735V3v/vdcsYZZ8j9998vb37zm4/g3pIjwdve9jbZunWr/Md//IfcfPPN0tHRISIinZ2dIiJyzz33yHe/+1350Ic+JB0dHTI4OPiyfGPPPvusnH322RIKheTqq6+WwcFB2bFjh/zkJz+Rz3/+8w2/s2PHDjn//POlra1N7rrrrvk2kVcXzD3SDGzYsEHe8IY3SGdnp9xwww3iOI5cf/310t3dDZ/7/Oc/L3/7t38r69atk6uuukomJyfllltukXPOOUeefvrp+b+Cv+eee+Siiy6SNWvWyPXXXy+2bcttt90m559/vjz44INy2mmnwXYvv/xyWb58uXzhC18w/mGNLAxPPPGEPPLII/KOd7xDFi1aJLt27ZJ/+Zd/kXPPPVc2btwo8Xj8ZW3vS1/6knz4wx+WZDI5/w8EB/NpfHxczjzzTCmVSvKRj3xE2tvb5Rvf+IZccskl8v3vf1/e+ta3wrZuuukmicVi8olPfEK2b98ut9xyi4RCIbFtW2ZnZ+WGG26Qxx57TL7+9a/L0NCQfPrTn57/7lVXXSXf+MY35LLLLpOPfvSj8vjjj8tNN90kmzZtkh/96EfwO9u2bZM/+7M/kw984ANyxRVXyG233SaXX3653HHHHfL617/+Ze3/NddcI1//+tflPe95j3zkIx+RkZER+ed//md5+umn5eGHH37Jv5o5mmDuMfeOVtatWyeDg4Ny0003yWOPPSb/9E//JLOzs/LNb37zRb937bXXSmdnp3z605+e/wu7f/u3f5NrrrlGzjzzTLnuuutk586dcskll0hbW5sMDAwsxO6QJmLdunUyNDQkN910kzz11FPyta99Tbq6uuTv/u7vROTl1actW7bIO9/5Trnmmmvk/e9/v6xcuVKef/55ufjii+WEE06Qz3zmMxKJRGT79u3wD7Oe58kll1wiDz30kFx99dWyatUq2bBhg9x8882ydetWuf322xeySxYW/yjk3nvv9UXE/973vjf/2hVXXOGLiP+JT3wCPnv77bf7IuJ/7nOfg9cvu+wy37Isf/v27b7v+/6TTz7pi4h/3XXXweeuvPJKX0T866+//vDsDGlKvvjFL/oi4o+MjMDrIuLbtu0///zz8PrBnLz33nvh9ZGREV9E/Ntuu23+tXPOOcdPpVL+7t274bOe583/9/XXX++LiD85Oelv2rTJ7+vr80899VR/ZmbmFdk/0rww98iR5tJLL/Wj0SjkycaNG/1AIOAfXHbs2rXLDwQC/uc//3n47oYNG/xgMDj/uud5/vLly/0LL7wQ8qxUKvlDQ0P+61//+vnXDubeO9/5zsO5e+QQKJVKxmuPPvqoLyL+N7/5zfnXDh4zzW233WbUsdWrV/tr1641Pnvdddf5IuI/+OCD86/l83l/aGjIHxwc9F3X9X3//9a64447zq/VavOffec73+lbluVfdNFFsN3Xvva1/pIlS+bj9evX+yLiX3XVVfC5j33sY76I+Pfcc8/8a0uWLPFFxP/BD34w/9rc3Jzf29vrv+Y1r5l/rVH9veKKK+B3H3zwQV9E/O985zvwu3fccUfD1492mHvMvaONg7l8ySWXwOvXXnutLyL+M8884/v+C7lxxRVXzL9/MNfPOuss33Gc+ddrtZrf1dXln3TSSX61Wp1//dZbb/VFpOFYIK9ODubWe9/7Xnj9rW99q9/e3u77/u9Xn+644w747M033zx/7vC7+Na3vuXbtg311vd9/ytf+YovIv7DDz/8e+3jHwNH3S2xL8Vf/uVfQvzzn/9cAoGAfOQjH4HXP/rRj4rv+/KLX/xCRETuuOMOEXnhXyl+mw9/+MOHsbXkj5G1a9fKscce+3t9d3JyUh544AF573vfK4sXL4b3Gt3a8dxzz8natWtlcHBQ7r77bmltbf29fpe8OmDukcON67py5513yqWXXgp5smrVKrnwwgvn4x/+8IfieZ6sW7dOpqam5v/X09Mjy5cvl3vvvVdERNavXy/btm2Td73rXTI9PT3/uWKxKBdccIE88MAD4nketOEDH/jAwuws+Z3EYrH5/67X6zI9PS3Lli2TTCYjTz311Cv6Wz//+c/ltNNOg9ufk8mkXH311bJr1y7ZuHEjfP4v/uIv4K+CTj/9dPF9X9773vfC504//XTZu3evOI4z/zsiIn/9138Nn/voRz8qImLcBtnX1wd/YdXS0iJ/8Rd/IU8//bSMjY0d8v5973vfk3Q6La9//ethrKxZs0aSyeT8WCEvwNxj7h2tfPCDH4T44Dnowfz5Xbz//e+XQCAwH//mN7+RiYkJ+cAHPgAPJbnyyislnU6/gi0mfyzoddXZZ58t09PTksvlXnZ9GhoagvWgyP99rsB//dd/GWu6g3zve9+TVatWyTHHHAP16PzzzxcReVXXo6PultgXIxgMwr37Ii+4MPr6+iSVSsHrq1atmn//4P/bti1DQ0PwuWXLlh3GFpM/RnSOvBx27twpIiLHHXfcIX3+LW95i3R3d8udd94pyWTy9/5d8uqAuUcON5OTk1Iul2X58uXGeytXrpxf2G3btk1832/4ORGZP6ndtm2biIhcccUVv/M35+bm4ILwH5Ln5JWhXC7LTTfdJLfddpuMjo7Crclzc3Ov6G/t3r1bTj/9dOP1316n/Xbd0v/gcPAEVN/mlU6nxfM8mZubk/b29vl1nl7X9fT0SCaTmV8PHmTZsmXGP2asWLFCRF5whPb09BzS/m3btk3m5uakq6ur4fsHH6RGXoC5x9w7WtHz6fDwsNi2bfgbNXrOPJhPenuhUEiWLl36hzeU/NGha9fBNdfs7OzLrk+N1mh/9md/Jl/72tfkqquukk984hNywQUXyNve9ja57LLL5h+Ms23bNtm0adO85kfzaq5HvGD3W0QiET4tiRx2fvtffw/S6C+URF74a5U/hLe//e3yjW98Q77zne/INddc8wdti/zxw9wjzYLneWJZlvziF7+Af9k/yMGLvAf/pfWLX/yinHTSSQ23pS8IN8pzsrB8+MMflttuu02uu+46ee1rXyvpdFosy5J3vOMd8K/nh6v+vBiN8u3FXv/tCz4iv7vNhwvP86Srq0u+853vNHz/d528HK0w9145mHt/3BxqvnDOJC/FodSoPyTfYrGYPPDAA3LvvffKz372M7njjjvkP//zP+X888+XX/7ylxIIBMTzPDn++OPlH//xHxtu99XsVuQFu5dgyZIlcvfdd0s+n4e/stu8efP8+wf/3/M8GRkZgX+R2L59+8I2mDQFL3dRdfBfKvQDAPS/Shz8l63nnnvukLb7xS9+UYLBoFx77bWSSqXkXe9618tqF/njg7lHjiQHnx588C/jfpstW7bM//fw8LD4vi9DQ0Pzf/nRiOHhYRF54ZauP/mTP3nlG0wOC9///vfliiuukP/1v/7X/GuVSsWoM79dfw7eEiNi1h+R313blixZArl1EL1O+0M5uM7btm3b/F9Qibzw4IFsNmv8zvbt28X3fWj31q1bReSFJ+UdKsPDw3L33XfL6173Op5YHwLMPebe0cq2bdvgr5e2b98unue9rGMu8n/zdtu2bfO3G4q8cIv5yMiInHjiia9Ie8mrg5dbn34Xtm3LBRdcIBdccIH84z/+o3zhC1+QT33qU3LvvffKn/zJn8jw8LA888wzcsEFFyz4P14cafjnZC/Bm970JnFdV/75n/8ZXr/55pvFsiy56KKLRETm78X+8pe/DJ+75ZZbFqahpKlIJBIiYl4E+V0sWbJEAoGAPPDAA/C6zqfOzk4555xz5N///d9lz5498J7+l1iRFxaZt956q1x22WVyxRVXyI9//OOXsRfkjxHmHjmSBAIBufDCC+X222+HPNm0aZPceeed8/Hb3vY2CQQCcuONNxr54/u+TE9Pi4jImjVrZHh4WP7hH/5BCoWC8XuTk5OHaU/IH0IgEDCO6y233GL89dLBC7K/XX+KxaJ84xvfMLaZSCQa1rU3velN8utf/1oeffRR2Matt94qg4ODv7e3s9HviLzw1NDf5uC/9r/5zW+G1/fv3w9Px8vlcvLNb35TTjrppEO+JVHkhafzua4rn/3sZ433HMd5WU/6Phpg7jH3jlb+z//5PxAfPAc9eK56qJxyyinS2dkpX/nKV6RWq82//vWvf53HnBi83PrUiJmZGeO1g3dVVKtVEXmhHo2OjspXv/pV47Plcnn+CcevRvgXdi/BW97yFjnvvPPkU5/6lOzatUtOPPFE+eUvfyn/9V//Jdddd938hL9mzRp5+9vfLl/60pdkenpazjjjDLn//vvn/0XraLsSfLSzZs0aERH51Kc+Je94xzskFArJW97ylt/5+XQ6LZdffrnccsstYlmWDA8Py09/+tOG9+P/0z/9k5x11lly8skny9VXXy1DQ0Oya9cu+dnPfibr1683Pm/btnz729+WSy+9VNatWyc///nP4V/MyKsL5h450tx4441yxx13yNlnny3XXnutOI4jt9xyi6xevVqeffZZEXnhZPlzn/uc/I//8T9k165dcumll0oqlZKRkRH50Y9+JFdffbV87GMfE9u25Wtf+5pcdNFFsnr1annPe94j/f39Mjo6Kvfee6+0tLTIT37ykyO8x0Rz8cUXy7e+9S1Jp9Ny7LHHyqOPPip33323tLe3w+fe8IY3yOLFi+V973uffPzjH5dAICD//u//Lp2dncY/DKxZs0b+5V/+RT73uc/JsmXLpKurS84//3z5xCc+If/xH/8hF110kXzkIx+RtrY2+cY3viEjIyPygx/84BVTnZx44olyxRVXyK233irZbFbWrl0rv/71r+Ub3/iGXHrppXLeeefB51esWCHve9/75IknnpDu7m7593//dxkfH5fbbrvtZf3u2rVr5ZprrpGbbrpJ1q9fL294wxskFArJtm3b5Hvf+5787//9v+Wyyy57Rfbx1QBzj7l3tDIyMiKXXHKJvPGNb5RHH31Uvv3tb8u73vWul/0XcaFQSD73uc/JNddcI+eff7782Z/9mYyMjMhtt91Ghx0xeLn1qRGf+cxn5IEHHpA3v/nNsmTJEpmYmJAvf/nLsmjRovmH+rz73e+W7373u/KBD3xA7r33Xnnd614nruvK5s2b5bvf/a7ceeedcsoppxzu3T0yLPRjaZuBg48y/973vjf/2hVXXOEnEomGn8/n8/5//+//3e/r6/NDoZC/fPly/4tf/KLveR58rlgs+h/84Af9trY2P5lM+pdeeqm/ZcsWX0T8//k//+dh3SfSfHz2s5/1+/v7fdu2fRHxR0ZGfBHxP/jBDzb8/OTkpP/2t7/dj8fjfmtrq3/NNdf4zz33nC8i/m233Qaffe655/y3vvWtfiaT8aPRqL9y5Ur/b//2b+ffP/gY7t9+PHapVPLXrl3rJ5NJ/7HHHjss+0yaA+YeOdLcf//9/po1a/xwOOwvXbrU/8pXvjKfG7/ND37wA/+ss87yE4mEn0gk/GOOOcb/4Ac/6G/ZsgU+9/TTT/tve9vb/Pb2dj8SifhLlizx161b5//qV7+a/0yj3CNHhtnZWf8973mP39HR4SeTSf/CCy/0N2/e7C9ZssS/4oor4LNPPvmkf/rpp/vhcNhfvHix/4//+I/+bbfdNl+7DjI2Nua/+c1v9lOplC8i/tq1a+ff27Fjh3/ZZZfN16XTTjvN/+lPfwq/02jt5/v+/G898cQT8HqjfKrX6/6NN97oDw0N+aFQyB8YGPD/x//4H36lUoHvLlmyxH/zm9/s33nnnf4JJ5zgRyIR/5hjjjF++2Cb7r333vnXrrjiCn/JkiVGn956663+mjVr/Fgs5qdSKf/444/3/+Zv/sbfv3+/8dmjGeYec+9o42C+bNy40b/sssv8VCrlt7a2+h/60If8crk8/zk9Bn5X/h3ky1/+sj80NORHIhH/lFNO8R944AF/7dq1kP/k1c3vWlfpOvly65PmV7/6lf+nf/qnfl9fnx8Oh/2+vj7/ne98p79161b4XK1W8//u7/7OX716tR+JRPzW1lZ/zZo1/o033ujPzc29sjvfRFi+3+BeJvKKsX79ennNa14j3/72t+XP//zPj3RzCCGEEEIIIYQQQkiTQ4fdK0i5XDZe+9KXviS2bcs555xzBFpECCGEEEIIIYQQQv7YoMPuFeTv//7v5cknn5TzzjtPgsGg/OIXv5Bf/OIXcvXVV7+qHzVMCCGEEEIIIYQQQl45eEvsK8hdd90lN954o2zcuFEKhYIsXrxY3v3ud8unPvUpCQZ5bZQQQgghhBBCCCGEvDS8YEcIIYQQQgghhBBCSBNBhx0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNxCE/CeGstedCnM3OQByxPYjbwqjGW9weN7bZ2ZaAuCOThDgcCEEcjMRwAwFs/sxsFuKag21ozaSNNthuHeJqtQpxpVKBOBqLQuyKC3GpXIA4nWnBH/Tx8yIitWoN4oDgfgcCAYhTSeynRAL7MRTCNpbV9n2rwXVaG/tSt8nxLYg/+NmvmNs4DHz1x3dDvG/zkxBPjmyC2HVxP7oXH2Nsc/HwKohbexZDHI3hNrY+/wjEu7c/C3E9j8c8oNrQ0mrmXTCK4+G0150D8bIV2O7KHI635597GmLPw+NVq2Pebnx+g9GGXHYK4moNc79ew7ybmS5BXCjhbzgufr+zsw3i1jbMWxER18/jNnA4SqWMY/j2H95pbGOh8DzvpT/0x4iymFoWjvVyEY/79AzmTVtbK8RuDfMiFjdrfyAcwSaomuQJtgEz8chg2wvz71sDfVjPYzGc9/TxCdrYO43a6Xhq3lHbyM7lII7aYYgTan7IV8v4m3E8nrEIfl/EnKfS6QzEs7NY42pFrCdatluvqWKBuySBoJk14RD2TTqBc2VvJ+by6Pg4xMUa9mNLC37eqWMri8U5ow2L+nFNEAph3+oHVH33J+uNbRwOvvezRyHW9S4WwWMcjmLfeQF8X0TE8bG/g2okB1RahnSJVYplP4jbq1vqfaMFIrarXvVxfaWPmWvrsdJgo9BE/0XjRtvwPPWb6gN6C3qb+ti4rrmuNNqpYsdoN27zvZesfsltvhJ8/ZqrIS4X1XpYHXNroBfibFydF4jICWmsP3uexfXSTx5dj9uoYi0JBNRvqnoZimDut3V2GG1oieE2li/uhPjc150GsVPHNkzN4boylMJas2n7boh/dR+OXxERUX0X0fUvhGMhHMQ8qqk2OXWVyCpnIg1qQMnH4zlbwbyzVRn/ycOPGds4XHzn6cshfvgerPepKK7DE3FVuy3zFDqZwD7tSPdB3BpfBHEmjecHB6b2QLxz8hmIW/oxL9r7i0YbQhFcs5WLWYijURwfASsDsec6ELsurtNbW3AfIhFzjRcU/M5cDufz6XHsu0oB+6FUxfMFX1Ww2ZkD+PkSbl9EJFfA+dcX3K/ZGezLb38az/UOFwPLMK9sNScF4jhPDqzEmmc1mJN27dgPsedh/6bSKRVjDUuG8Td7e3sgzhbweE5nZ402tLVjHazN4jqxMD4NcWsK29SzpB8/7+C5xNw0fr+QN3M/oC5r1atY0+ZymBOxVpw/6uqaUF3VQFetpX29thaRsFrTxdRaqVbDmvjMw+uNbTSCf2FHCCGEEEIIIYQQQkgTwQt2hBBCCCGEEEIIIYQ0EbxgRwghhBBCCCGEEEJIE3HIDrvnNz4PcXZKeYzwFl2x2vGFDhfvVRYRsWJdEBc9dNgUlHvEt/C++1IF7wMulZWDy0W/wlTAvPE7GsTfcBz8TkC5eyLK41Kq4D3UjnKJWZV2iO0GMqa68ubFgth3BeWTm1F+gXgc3UCWjffDW8oFKA0cR6WKdlUop0fQdFMsBDnlNWrPoBfN7+zGOIiOid7FS41tuh7um+2h78ErYf9WZvG+eb+M99X3d2AeLx5YBvHAsiVGG/r60QHR1YX7EQphfzsZdEQMLEK/gONgjlQq6A7IzqKrQURkagr7NhjWgxiTtbUd2xRN4G/M5dBpEIni2PF87FcRkZDKq9xcFuJatZGV6MiwUA6zZqNaQufDzL6dEO/dhO/P5bAmvu78C4xttigXqP63I0s5nY6mng8pZ6mrxI6emtesMM6LVcccZ4bPTUlQMimsLy3KN1dTrhCvjPUmHkIPSLqBVyoe084UnJem1Pzt+RhHo1grOpU3anYW64/2zYqI9PVirQ4oL05XF84vIbWNkb3oiQmHVD9msN+SGIqISLtyFulcL5ZML8tC4KnlUTCCx6emXC3FOXTahBLm+iqg8kKUC1e7Kh3lpHPVuqQyh3NOWOWEK6ZntKC8wraF30km8Hj4ahue8sNpn9lL+eZERNRuGQ473Q96E9pZp39DO+x0G1/4DfWbL+HFWyhmR0cgDqr6FlJr9FFVF7aVlQRNRE5Yhes+T/l5uzuwdsSMbeBv6v4sqTX73IzpcypYeEyqak124smnQ1xXTuCpadxmdxTHkldD72gsYuadp3K5K4VesOOW4lp1cmIU4nIZx3ihoNaR6lwjEjTnnr4eHF/1MNbg7Rt3Gd9ZKLRyL9GB+/fsk+g0G+g5GeJUwpznKsr9XM7jcSlndM3D84/WPlw3Lx/AuBxFz17eyxpt8HK4Joi4OBH5KlfqLrYhGMA8aWvB8RJXjvp60Ty/zxXRu5afxnzdsxUdjIGIqj8hHJP7RscgTiVxHwt50yXmONqlq2ue8ZUFwdfeVDW3lpVHbewA1oKuDnNhEVW+StvC3Ax5mJfVWZV3nbgGXNSN1y4Syu1eyuH54wsbxfGzahU66XrORHdfMoYDMJLEuKqup1SreN6cy2J9EjG9kpP7JyEe2Y0HPdyG1wwCUbX+trANsRZcE0Yb+JpTUfVcAeUl1vP/oXI0nQcRQgghhBBCCCGEENL08IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRByywy4WVE4Mde//EuWsG+xGb0FXJ7phRERi2r2mPBHlKjodKnX0Rvjq8+GY8gk4yoHn4fdFRNJteN+2o+4tDysHi1KFSCCs7rmuYZvrDrYxHjZdcEHlQYiqzzgWOm1sH+/BdpT/RKv6kgncx0IR711/oZ3K66a2kc+hp2rBUC69WhXjUgnvLx9cgffMF4qmD6hWx2PU1oG5Ggzhdezly1dAfOYZp0Dc34331afTnRDXg6ZbIa7cO0rTIpbyUJWL6Aaoqn6Jx/AYt2bQEzK89FijDZs2bVE/itusVjFP0i2tEIfUrftzOXRr+ILHptF9+7OzeHzKJTXGm0dh19BN9GpA75ethEtje9Ex9OyjD0BcL2OehJKYJ+UGtaOlDecDw+Fk4Rhshp5v5IU6HISVi8RSfdHagW6Rou5/1xSlOqqeWOqY9/ZgvejpxN8Y2b4D4o4g1syePnRq2o75b4G26j/tMWxPowfHDygvnnK/xdW8FrBxHzu70bsjIhJV3jw9rzk+1sB0Bn+zX60pAmoFFQzh+xEtSRIRr4bzQUsKHSp+/ciIdXJqjqmrOWZqEl2u+0YnIA5ETa9OMoW1IGJjfyilndS0r7GOx7SUxzbGlOtVbLPv8jX03NRq+KNLh5ZDvGwYnbOxKOapdr0Z7rcGZcJXL3paaqdDNT5f7tzTqFbZug0NfH9HgpGK8kKXcUyGLVyviYtj0rZMh9DUblyLPLl/H8SbJ9AJ5VdVfVT9F1U5UHfUmq6B3zaq/EzZMvb3rzdsg7i3Hfer6uhjqGqLqj2hUIPEU4d45fAwxIOLMde1y3TswC7cnFo7J1vRU+ZqZ6WIxCM4Zvs60I+2N4C/uZCMTmBN6xvCehUI4JzUltRObNOfODqCjt+R0QMQ9/fhfF308Tdag5ibTstmiO0ktrlaV45yEclnMZ/bgtjHYeWga0njMUnF8JxGn2/UHPTRiWPWkrlxPA+a3YkJu/U36yFODGCb+5fhmiSawP3M5bEN1YrpTxRLeXKn0WemzwUXikgY+8J3cey6yt8vDq7pulrNtU1lBvOqXMD+iKr1VDyOObFqJfosl68YhHiuoJy10QZ/72Vju489HrcxNNgHca2K53++WsNp538whMdTr6VEROpFPO+sFXFtekZlFcRWCGu7HVcOu7C6NqLKld2g7oZV3um17+97Lsm/sCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJOGSHXdTCe4tTKfzqin689789hvcBhzzzXvHCDN5r7Hp4/bBcUvczK1VFSwbvuw8q91t2Du+5DjbY2zblbMjn8J7qWkU5tip4P7N2kyQT6HGp18oQ267ZiFAE2+26+BtBJaWrKo9bWMnEbA/7rVpAJ4Lo++NFJKLuFXeUl2WuaPr/FgKngv1nKXdIJIz35c9NTUHc3oMuBhGRxavxXv2uAbyvPqTlbMqrU3cwlzcfQKdEaSd6Euo25rmIyJYNz0B86ip0zJ1z2qkQ63vec8q9tGf3fojD6r78cBg9SSIiHZ3o+9uzF50q4ahyH5ZxLORy2NdBdS9/Swt+v1w23Ymu0k44yoURiZh+miPFQjnMFhpfiW7qyl24f+9uiFviyoWRQQfLxCzW3ekDo8Zvdg8sxheUrEJXKEtLNV/FpFuwP6PK9dbVhW6XiWmsP9GI6U2bm81C3N2BfpmImgBiMXRw9A+gByRhzHM4kMNijttIWLuqsLYP9OF++SHMy7CqBbUa1tUO5YAKNvCZVZUzJaVrVBXblJ/DubNaxfmnvQOPVSyB83vQMh0rwRruR6WIv+lUTS/SQvDIY49CXFBOO1swJ8pVHKUVF/NQRCQUxtcCao2n1D1S8R31Pv5GIoxjIWZhf0f1QkZEXDX/FovYv7959mmIJ6ZwLl06NARxRwf6g2LKBeQ3cLW6Sn7sKQ+xpfrlD5W3+tqrJ6bzWa8pDBffAlFW69sZG/vKcnHt2a4W8knl1hURqRRxfZTN4zZyeh2vflMfr4D6fFD/rUPdPF7FGv5mUvX3r595FuIVy3BdeswwzpHBMObZ4CD66Iqe6TIbP4Br0Vwea40o7+Qp55wA8fon7oe4rFyo+Tq2abpoHou2Mq6X+wO4PqgUjtzcvnUrtmVwKc6LQyvxGOzcth3iYglrpIhIQp9TKifjc1s2QJzsQ4dmewrrlaPmsX07VZ31TQdgaxjPaXxRPrMw7mdbuhviwhzOUZs34fdbE7geSLWYf/tTb8daXBzF74yNZyAeWoSfjydxm46H+1mrYN8Hw2YbZmfw+JaKmIuWOV0sCImMWieo+p9y1TwXwdgyTyklHsTPVCro+CsV8HzNj+NvTuzH7z/t4nlARdWzdrUOFRHpXYTHuLdPzZUZdV6qvq9P96JhdV6grl3UG12XiOFGqiov/CqOJ+OaTATrUawL15VODNtQbXAwfOvF51Y9/x8q/As7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmohDdti1RvCjMeXJSSfQa9TZgj4F1zNdLvqVQFDdUG7j9cSqpzwSymURVPcFu8pH4wfM65MTE1n8Th1blS/hfdwlF+9XTsaUG0w5bgLKD2VbpusioO5PL6v77OMh/I2gcmFUKtimch19A54yQmULpk8wW8K+LSh/YKV+ZK7tVkvoHEoqn1NLG7oYTj7xJIgHlqIfQkQkrzwcW3buhTinjnkhm4V4OosOiQNj6DlqSWObxDbvs//pf/4A4tA67N+1rz0L3w/h8enpQUeF+OgnyCqP2FNPoy9FRCQYwjGcSGGeOcoXUCtkIdbDqbOzDWJXjZXpGWyjiIgt6KXQYzqTQX8A+cPR7iJdkyZnML937doDcVW9n4qiM6JUQHfG5mfQESUi0qP8O5ke9Clqh5NWOr1afYIiIh0d7RBr/0WtgvW7uwddIvEozsUiIpEAzq29nVij6nWsedNTExCnlFcvGMLB79WwjaGgeXxsGw9iuYR5onSwYkexzVXlg60qp0pErUkKOayBIiKJJNYb7aqansFaHgmh40mnXU21IV/Q3jezH2o5/M1aDWu79uAuFNmCWi/52HZLrSOCYVzjxS1zORmw8TXtNqyoVaCj/g05r+b/chHjiJIQJX3T3xhQzQpFcHxU1Hpox150bu4+MAZxpgXnpIFF6MntVONXRCTTim6voHJ2BtTaVddojdYQe/LifroXXtMeHe3Z+cO8eb8vEWsG4t44rs8yyp3Y1orHb8RvMM5jyoWr5jidq/UE5k1deYsrVRznrspT7TEUEQlHsN09A70Q9y0agHhK5eFYDsfj6aefBvHMOObl297+OqMNP//pnRA/+shjEC8+7mSIzz9hDcQ7RndCPPLwExDP1XBeKDimm2nVqfgb5TrW2I4OXNMvJHv3YP3xBfs8147nBjUbfXRu0PSNZlpxHbx8JTowxydwG8U6Hvdnn8f1laP8ipkOdU7TIP9DEdxmaxu2KRlHt1g+h/Vjahzz3aspV6haD+RqprtwQ2UpxNU2rIt2F7qR41Hc79ks1oUD+3E/nSrWiXq1gSe/iGsMx9EuP3O+WAgGV6MzMFLBcePksV6NjmYh3vKs6Yu1fTxG1Ryu6SxH+fSVy23kN8qLHsbtOWr+6Og2HXazymGX8NCJ2dWyCuKeXvx8PIL7ret2TTk4C8qdLCJSy+F5Z2GX8nhOYP2p5TFvyoJjumMF1mlbzT/RLnyWgoiIlcH5XTu4Q/bvJ0/kX9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRPCCHSGEEEIIIYQQQgghTcQhP3SiM4Ni0FQIpXlRJYi2AygLjMVMEXbdQZmmKc5FeWDNwW26Stbs+Rj7SnrvB1F4LCKSr6HE2HVxP0qukkGqOF/E3xydwe2FbPx8S8GUUNfHUMZfnkNZ5OKOZRB3daHk2EqhLLI6i0LKQgHbNJc35ZxTcyhz3LVXyVW1uXmBiChxbz2AstNyDIWPI0rUu/6hXxvbnJlGMfjo/nGIQwEtiMRjWHUwr/RDP3o7sa8mxlCuKiLSEsFczGdRjrp1ZAS32YuS2FAIf6N3AOWdfSreM4byXBGRLRvwta5eFNHv2qMeElFX0molmneDOJ610DUSxGMpIlKu4HdaWtQDVoJHRgr76kY/0AGPwei+fRCP7MF473YUUXekcAwu6kBp/oE9Zv5v+A3Kq085NwNxXIndG7j7X7XY6kFFNSVTdtWDDhxdnyo4f4iIBNUTYnJK6Gwp+b+vHsYweuAAxOkk1uG4mltzVZw/REwRfjiqxO9K9F5X+2mph1B5ev0QwDgSNuuNSn0plfE3whGUx4fVg3niUUzEiKrjc+oBRXNZsx+SUcxtSz0QxMj9BaKsHxwS0nO+Wp+5ar0lpoDdUsdEP3OrpoTrdfWTqTjWlrwSaef0g0g8U3ofDuMxSoWxEYEAvl90MCcCnnr42RQe02wW1xOJpLnW7e3Fh0QND6GQPannStXmel2NDbWbvmAOeb7ZD3r86edS6AdZLBThBB70pSmUmQ8pmXo6rB5SMIfzk4hIPIP9WQxj3nghzMtTTsIHI3R3YRt2bt8O8d49+GASO2DWGt/B3I6qhwe89nT8zUlVtn99/30Qb9myGGK3rL6QMMX/2SLmckE9PG77ATxXKHqYR0UHPz+Rxe1Vozg+ly/BvBYRyXRj7k9O42+ef/5q4zsLhVPF45adwLV8vaQeQpTAQdLagw9zEBHxIyjC71qGfZTzsF4UyvibMcFtTk9jHqXCOD/0LcoYbagLPjRqzsNtFNUD4KIB3KZ6/pCkWnA8OWHsl4mi+QCCn/8I98vz90M8HMbvBHzMvan9eE5Uq6i6rR5sVamb84+vnhKVTKm51z8yC8s3Xno2xMVdeLwe/QU+HCZQxXP4knpwlYiI66oH4ajFTjqOuZ5QNbA9gGufTFytQ/RDQevmgxPsUTxm63/6MMS712+E+Nw3nAnxcccMqjbib4TncGxZU2Y/TO/BtW1lM65di2P4EIqKejjp/lwW27wNz5OD7dgv8cVm3T329cdDHIqrhxq55vx8KPAv7AghhBBCCCGEEEIIaSJ4wY4QQgghhBBCCCGEkCaCF+wIIYQQQgghhBBCCGkiDllM1teJXqKWMN5LnIyjc8Py9f3kpiDDUp6NqnIy2MqZ0q7uP08k0GWRm8P78tPKh5WvmPe47x7F7xSq6p5pdatxfxy7LBhS7rfpLMRVdV9+SEtcRCTdgj6gM489BeLcAeUXKql70zvw3vRqCdtYKOB12UjI9G0M9GAburq6IR7Pmd67hSAex3ZMZDHvtu/F+8s3Pv8cxLbh4RFxq5gH5Tz6AQLKCVWu4n352TzG+SI6KXbt2wRxIoZ9KyKycnglvqC8eA8/eB/ES4aGIF6xcgXE7eq++ojyQ6VbTBec7aCLp1jFPCmX0FdSzuYhdl3laIlhXhVy+PmWFI7HF9qJ46OmvJSlkunjOnJo78BL+S9+Dz+Gr0P9gmqDcnRYh/RvMPgdz8MxpV1i+RIe533j6IgYV7HroptkUZfZps1PoFuyq6cX4hWnnqa+gflsK/eIUVbVTzZSlej55yWxFubftyx1zMNh3HftonKUS6xaUQIaEWmN4fwdsrFDgjaO3UpNzYMRnGtrVeWXzWENDTfweGmXmKX8JK5yh8WiuI26qg2plgzE0Si20bJMv0m+gLW6XlOONeWs09sU5cmpqhrp1jBHwkH0F4mItLSho6hex/GXKx6ZmldWrsSq8l1ZqtbovmmkQNPjzlMDVcdFNZdGY8oZqHOmrjxGVTP3HQvHua9+M2wrF48xzPHzQeXy0dvLl3AfRETmtuGaYGoa150p5TVc1I+e4tZW9OSEI3p8qZruOKJxVLlz1I66vjleFoJCDWtPOoC1qj6Fvqy9WfTHnXXiMcY2y8pN3a/2PRrHY3ZGBn/z2E50Bpc8/PxUBOtEaQ7bKCKiFNoSrOF6aMke9BTH1Nq2rTMDcf25pyHW3rxHN2KOiYhs2Y/esIqqsaPKTzsxjX6n015zBrY5MwDxP/2/t0NcK48ZbXjyCcz18fEdEJ98gXn8FoqIpRzZZayBrT3ogh4dR991roK5KCLi21shPvE4XKu/9kLcZiKM5wf1EsZbt2JNy83iMYrFzLW9G8axvC+3B+L2FM5jfa3K89mG9SWsakVR+eR37DM9xTsfwvOLWh6PuzWA75cm8Lyqdwk61WIZ5aC38VjZAdNRH1fetpryBYZs/I2F4riT+iHerly6c7O4BmiPY044DXx9U3lch/eq/lqWwW0Elbc4ZOE6s7UF5/ewWkO6Dc41omrNlkjgvDQ3gW3c8tN7Ic6MnQBxVyueMzrKF+/VzIV9qIy5GVG1u5RVbnY1N7jqGQLZKazb8UmcW+rqvFhEpPoadHkGBrFvXfPwHRL8CztCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaiEN22LWl8N7kYC0LcUS5wuIRvDe8WjZv2q0rd1Img54O7eqpuXh9sV7He9jjSfTF7J/E+8J37MZ75kVEJvPYhpJSfyyJoa/k0rNPgnhRL/7m95/cCfGj29Hp4HhKbCEiQVs5ULLoKCgVcD9SKeWgc7VbBt8PK09Y3DIddo6LO754oA9/c8a8T3shyLShS2T7XvRDHNiFHpB4SLkAiqZbpJCbgNjy8Cb2bB4dNFnltQhGsP86utHZFVOuxf7BE402DKhjMvLMoxAHLMyTuou+gcmpaYiPP34VxMuW4z30A72dRhuSZ7wG4mc3o+eiWkGHQTWE/eQJ+gU8H3NobAz9KeGI6dpIt3apV9APUC6bTqIjRyNL04t9+hAcdnqTqubpGugL9rHhrDOcdmYbXuqVxYODEMeVezBXVMdEud2e24vjKxY0j3tQuSief+R+iNv70V3Zugjz2VIOFUvJsnTfe7Z57Bq89KJYv4eS8PfBtrE/feXgiCVwLq4oR1c4ga4RERG3iHVRlK+kpxv725lWnaMcm4kwHtOqqpnpHvS0iby0j7KjG2tUtYC/GVDzVkj75pTXq1I2XWKRMH7GDuP8Paf6qV7HuhtQ82RFe3E9rOsx7cATkaBy+VXquJ+TUzj/LxQ15XS0XFXv1Tzp2YcwICJqXAYwtz0b+zOoVqT1GtaacBD7MxnDvizVTNeuo2pmVaV2VdWSiI2NCIhy1qmaq9exjpguOD2mx2awRu6v4ny+fTfOxZ3KqdbXhy6xZBL9RNGImXe+cvXVfeWwc4+Mw64zgG3tV/3dohzP62fRuzZbNdf1S5QT9bIJdACHlHOzfRtuM7LjAMSuh+N8UKV+yDXHgq1y1VX1q/rrpyBOK7+c16GcUVpCmMPj1RIwfZnVIu5nm9I1xn3lRxtDF1n/KvSvpZQ3/LRhdHFNzJnnN2MFrPulEnqsdm7bZnxnocjP4hzR0oG1YDqHeRBN4nEuFE1XZN3B47J5I56jHBjFsZ1KYZ92d+PY7hpUNW43HtO9k+iGExGJpTBX2jtxDdfaovxvNuZ/MKz8ZTae0zg1rEdevcFc4OG516rjcZweM4RxKo7539qJ+1Aq4Xio1bBf8tPoFxQRcWu4jVhYOevcl7kIfIVIp7EWTKnzuZCN+5pUNXLWa3Be5OMxDas18eIUbjMWUf5wdSpRVXNvXrndwg3c7H4IfzNuYbu7OjBvwkHll9uL10sOTOBayFFiUNs2XcminhsQVGsQ7Wes5jDv4mrunCko16JydqdTZhuSFq5NXbXOqf2eace/sCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJOGSHXVdbO8TlGXUPvHLiFErofCjXzHv9gxbea1xSvhh9NbGsXC+ZVrwvv6buR9+5Dx1aMznT0eEH8T74gHKstETxO11BdLlFZ/D+5+UtPRAfaMPtjWfRXSIiUi3hfj29FT1ttnJX1BO435JG/5AoB0s6jfftpzzzBupKDY+XX8tBPNhpepEWgh07fg3x5h3bId5/AP0Nbh79Dqm02e6VywchPm7VcRAfmMR793dP4jY7e7C/lwyjHyXVjl628Vn8voiIP4Veiz3KWTOZRafBqmPx+69fgc66YgHb7KlU92umW+T5x9Cbt3zlSRB392cgfuzXD0A8No45Uq8rv1MZf3N21vQgxpL4G57yKBVLZt8dOV7ev29Yh+Ap0I46UWPT8/FA1pVLLKx8WJbxo6ZbxGiWqsOtreiZOOuccyHesH4zxLtG0HvjKofL9gB6KUREooPoyHS3oMdmw/0PQ3z6W9BvFoujr0crhLRvrpFty3kJJ6H2/x3yZPkHMjqJzgydI4kqjpGkqnGVmjnPaQdKfy/6YiNx3NeAUn+2xjHPMnHcXqoHc6baQBC4VTktMxmcx6rKN1pRQtmQ2od6TtWbqnJAqbwWEQmE8LVCAWuSo9Qwek3RmcG5tK0F+3FbHh227a34vogx3KRFOQm9uumGWQgcVXs1rnK1VVTfBbWATsxxGbSxfvnaBRbSY05tU3n0dJFNhhv4eVXZ9lRcV9s0PDnK0emr9ZirnHVuoEFd0fOx+oil/GZOHX8jtx/Hxu4DuyCOKOdUPK5cTSISVT7FiJo7QiHddycY2zgcHJPCtiampyAO2NgXKxYtgjg/3sD5qBKrX+VJPKzqnfKqWWoe1qunqnISStj0tIbUQQ6qvAnZuOaup5QrsYT1zFHyRVfNT922ucY7P6a8XxYec7cP17LRXbsgLuHHRZRPcPUxyyDuLZlt6FXrwhXDOPcv6zDdewuF5WEf2kHlqCtnIe5WvuqAoNtNRGT/fjyuOR/HXW4W+ygYxfydLmKcTuEcEk3ifNHSjuNBRCQWwbrZ3dqr3tdzo8pFdS5er+P5iB/C/M/Nmo7sFnWaeu7r8RpCRPBcuLcH8yCs2rh1A46fmVl0qlVyptfNV2vRtMo1vVZdKGKqXliqHfnZLMS2WvsELfOZAL6a6BwH97Vex/qeiKt6pK595NW5dDiKeZdKmp7UUBiPWbGoPMIu5mVbRq1d1RpOa1XrVXXMi1i3RUTyefxMPIFFrFU962Aih+MxGsX5yPdwnaOvlezdY57fDO3FMdw1iGPU9ZRT+hDhX9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRByylqe1A+9Rb1X30ds23h+dzaFzo67vZRYRW92g7AneU+2HsHlJdc90XTDetBPdb8Uq3oMdjZqeiWgYfyOWwPuXWwPoX3hy+zjETg2/X02jw66zFdtoibqxX0TqDvoASzW8F79YUj4NB9tkKbefFjaFbHzBt023T0j5Zxx1L7nvHoKQ6zDw2AN3QRzsXgnx8KrjIY7VMIdWHbvc2ObKFep+8opyh9iq/wWdKsEQHtNAIANx3cE8K+bN++zTyunoqP7dM4HjJ5ocxe8rd9LS4UGIfXUtvpzF+/pFRDY/vh6/U8a+O+7CN0J8/AlLcZu/QYfdju27II4rz1g6gw6LF8AakFN1o1o1233E0MKlRmI0+Lw5ZnzlTdObcHzMi23b0e1WLmNNO2YVugwjyvtha5lbAzwfv+OpaeHM150N8Z4RzMWvfeVrEDvKXbhnMmv8ZiSOY2S5cn1uefA3EHcuwtw75nWnQVwS5TtToqpwg36YKaErrlpT/gzlFRnqRlfl4aKqXEczM1g/4iWcL9pU/Q81mNajSeUKKeHYLShfnE7MgJpzqnnsq84UjvUt29DRKSKSVG6QZAzXENUq1t3W3jZskqs8X8rxFFW7na+YfppIBGv32Dh69cTDNiXTGYgrZaxHTh19JrEojqVUQkugRGbyuBaqVPF4ppJHxulUVXlkqTHjKa+Xdis6VdMhVFb1O6QccwHlh4sE8X3fwrFg6Vql/HO+lreKoQWVkou5XFPrTlutj2qqH0K+Xk8px7DdwJWs2mAH1BrMUk5o9U/peibxVH2rlTGncsUGbibl5pMqfkcfb5F3m9s4DMzsR+9j1cF2lAPYv6U0jo9YyfQ5VTYpt3EA+8NJYLGwA9g3EVWDLXWu4agccLVbUUR85QTUx1DHwS6c41JZPMYVpYyqLcE1YKtjnmMlKrhfThZzvzCBc2BpP7pjD/zmGYhbVq+AeHoMXU21ONZsEdMLWprGNV4uZHrvFopCHv1UgSL2eUqdg9ZLWM9sMdensQjOS7alfK+tGYhddY5ZrmGflsaxf4b6V0Ocjpn+OKljdtXncMy0qvNcUcegVFHu6CC20Qtgv+zcbrpDW7txjXfyGlz/xwTPzequmheLOMacOp5718p47CIB8/w+lsDXjLJrv7i39bChvI4hVa5D6vwtk0Z3ZNwz/XF7c3jMqsoXp9dDoRDmbjCCfeWo9cCiATxvTrebY31qGl2HdbUNR63R6sqtHgnheqlSVmtytf4q5UwXXG4G17a+o9Z0nVg3tXu9UMT5pFTVHnEcW5Up080+snUvxB2vRW9nMGRegzkU+Bd2hBBCCCGEEEIIIYQ0EbxgRwghhBBCCCGEEEJIE8ELdoQQQgghhBBCCCGENBGH7LAT5aizQuY9679NJIrvxyVhfCaorhfaStxRV26RSCwN8dQY3jtcmkI3wtI2vM9bqWJERCSq7uVfOdyPbVJfcgK4X9q5FQygEyIVxv1ubx022jC8fDHEI3uegHjzVnRGhYPKL+fjvf+OulHcDuJ94dojI2K6YDwlMbKsI3Ntd2Iv+uNec+KbIY5E0N/Qpm4N7+0znYEzWcybvdvREVXz8F5+28J72ANB7CvXV/fRq/53G7h9fBe3kUx3QDxdQB+BrfLIM/xoKlZqhmTU7IfBvgGIowHchi2YV8cfhw6vTCYD8Y/Lv4R47ACOjf4uvI9fRMRV7p6QcobkcugjOJLoPrdUl2unk69cSSIixjBS3qC9o3sg/snPfwpxLof15cypCYjPW3s+xJGI6fXQ+6EtHo7OzRT6My7+04sh3r4F3aF3/wK9k7m62Q+bR8cgbrXQMxGtYEc9dgfmVrAdnSx2dwbiYhb7KdTAbXUgtw/iuTx+p1LB3Bx609XGNg4HXW3Y304Fx2EqicfUd9ADEgiatToWwzlAl4+S8g7WHOUWU4K4VSuXQTw2hn6ZatX0N3Z0Yq12XHSFeKLWDMq7VythXgZiOHYCyh1WnMHjKSIyp7yF6RasiwXli3U9bGNErXvqyu3Xvxhrqp5HRURmc3g89dybaWvgJFoASirfg1qk5qnlomp3uYg5ICISDmN/tnWjByemhqWtamZA562Nx2NuFp055YI5XywZQu9tvo55NTuLORGJ4JpQe3gs5V015mKz3Bmf0UrgsOB+2cpr5dS1M00dGzWx+MrfLCLiZdGrMz2K7jjxj8wab7qQhXhvUa25PeyLsIWe6Hgrrp1ERKaV36pH+a1ian5xc9j/1Zry4nXgbyRWYP2rNPDHFaYwFyOeqlfKE12dVC6kCLqWrAzOeUG1APFy5glObDV68SSM24hPKF/zKJ5rZDdvx9/Yg2M8peaqmYzpBJsew745MIHz7lC41/jOQhGIKNdzBY97YTcek+oU9ldXnznPJWKYa3PlLMQpdf7W1o0nLZOTyrvm4jFzq/j5SsH06EUsrHG28mzPTCl/WQJr2rRy1JYLKr+DuL29o+alhN5FWFejSRwPQeVXLJex7vpV/I1F/fj5tDp3H9tt1rxEUm3TVrX8xS9jHDZyyuNYVHFrHMdVNIw5Uaua3k4viMewZGGuzlaVn7EFdz6kzkVaErgmz6SxL1NJ0887l1V5pM5XAoK53Knqh6ZSUefWNeXzr5n1plDAOlhQz0+IRLDdrnL8Tymv5axqQ6Xuqdj06O0fxesW+nh5wd/vmQD8CztCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaiEN22Ol7+6269nKhZ6JYxPvVa3Xz2qBjo2OuUMJ7h3Mq7h/A5voOvr+kA+9FHu7De7RLFdMn07/iRIjDPt7/PDuH+x3LtOMGptEnMNCDPoZsEe+rX3rMcqMNLa1xFa/CNii3xeyc8jMpv5nt4/3udeVv8szbvsVVnil1W7fh51oo4sk2iEOqGdksOrwibRmIS465s0rVI7FWvI9eu0akgv3nq1FTqaMPIhpTDkELvQkiIp6Nn0m2o98t7KNXLxBDn4kfxrzzLGyD5WqHhTnUQwm8lz+mnAROFfNuehT9Je0JdC396ZsuhPg3z+yCuFA2+6FSnYS4Wsa6kklljO8cOZRwSXmDZpVPaW4Wj6GIiBXA3BqbxPx99De/hvjJ55+BODeThbiq/Eqrjz8O4q5O0+8TULmQy2PuZLP4G4OL0DvVt6gL4ivf/98g3ju6A+LHn3nWaEO1iPm7bR867eI9+P70c89BXPohbm/4dSdDPFtQftOS6baqWlmIa8pF4XlHpuYlI7jvq4bRcRqL43yhx/bY3gPGNh0H9y2RxGOYVd6PgIW1wFIutvwc9u/kBDo76qZiRUQ56grKi+P5+KVSCefOgnI0tSjPS015wHzLlIkFlJetRfkZY3Hsy2AQj0UqhWuWgK3qsJpcR/agN0xExFJO2XAAt5EvNZDtLgCu8vFpLWprBJ02LcohVI43WE6quS9UwPoeVa7Eri7My0oM+7vmqPVYFNsQiGMbRUTiylOYSeAaradDj3vlqFFrn5J6f2wS58V6MWu0IaRyO+io8eZhP9XrOL6CAdxPT7Bf9HpClMNNRCS3fxfE1Vlsd6FgungWglm1IBsrYV2o57AOdHTjusMfwJwREYnoNV0Oczu4H9cdNeUBKyizq5vEvAotwZoctExHaiKD26xvRT9tXXnyKsrPmDrnWIhLWayxsmUzxk6Dv784gN+pelmIQz247uxZewbEkRjWppmtOLdnSvh+eonpzN2j/KYx5UoOhUwX1kJh+ZgXvlrrd7bg+ilQVm7JfAMfeATHYq2CY3FqCvPZD+Hcmgjh2r1TuZ+72rFNnRkz/6WOxyUUCKu3cYzlijge9o2PQDy2D4/hjNKVOtUTjCakMrjNsamNEKctrGnxMOZ7V98KiPv6cUxbDtbA/Cqz9teUW9JV50mlBn7xhcBTY7+u1uBtSdzXuSyuXyfLpp+3YwmeI7YmMDfH1Bq7pYLzYCSIn29X59LJOPZ3MGCeW7e04Gf278HaXixiruu5tqDqcKWEsZomZbaBtzObxw95PsbBMayJ4RSOt4Jyps6pdVHVx32o6usFIlLxcPw56lzCrZvnwocC/8KOEEIIIYQQQgghhJAmghfsCCGEEEIIIYQQQghpInjBjhBCCCGEEEIIIYSQJuKQHXaucjT4rrr3X3k+YlG8nzyZwvvVRUT2T+L94yP78J73oBKWhcf3Q1wZx88v78J7sC84F31xO0ZNp1SqH30YHe09EE8oP0kmo9xgHv5mWDltJiZHIQ5Gs0YbJrPoHBo9gPfdh0LYd5kWvO+7XMZ+8oN4Hday9X3jpm/DtvAzlnL9uEdG5yS9i4cg1u2qVPDe/vEcpnQ4Yzq86o7yM4XwGJaVW6nu428Gg+jpcAIYa2dOV3vWaIM/g7lfUw5By8PfjMVwPKk0E0+5OFwXj7EdUl8QET+Av1EoomvDUn6BiOr7nBobsTj6Bs95LXottuzYbbThuY3oVSgoX004hE6EhUW7jbTDDsO5HLoRHnzkIWOLu/fvg3gql4V4Vh0DW3kGo1WsPxPT+jcfhHhwcMBoQySC+Tqq6m69hn6FcgnbWMhjHFKzyKpTl0K8fvsGow21PBaUfcrREQ9jGxelMQ9GfvMUxIEI5qbdh7k456ALQ0TEGBE+9nW1emScTknlp0zE8ZiHwliv0hnc15ip1JDZafQrPr9pK8SOqjeRcBLitgT6UfaP4rw2PYV5WHHMcZtT3jvtgPSVEiWbnYVYaz9qVXwhHsd+a2tPG22w1G9WHbWuUa6RcgXrtK9qgqP9Jipn3AZzbUwdT03wSDmdHOzPtHIEZpSjbvQAOrnKYdNfVVXrRGsM54ChdvQvdQ30Q7x5P675fOWLiRfx+KQTZt5t2Ise0GQPzjHJCI6nka3oWnJV7meW47yW7FsGcXH3JqMNgQLWtxYf1xilQhbjPLpNwyEcj7kK5nosg+vY9gZFoKAcj3r+0murhWJgAB2p9gjWlpjSTLk1HKMRy/SIzSqH9iN7cd7tU16xYwR/pKocU2VV72pPYY6UtfBRRKx+zOXKCjy3KDm4rj9hGB1eRRuPeVk5CMNz6G9yWsy6UdujvHnjmPuhLsyzUjeOx1Ab1tDWC9AVm1W+1EyHuc48ObkE4rsewroeUbm7oNSxD8PK45VUNS3kYg10amZ9tyK4zXgUtzE9gbnlKg3XqqW4Zutvx3OgoHKgVopm/ocEzxe0O7mgxtCWEcyTA1mM7bryoGfxN9t80wW3ohXriaPcrLWg8sHWcQ2h61E4ht/v7sDz+44W9EqKiOSKmGtV5SlOBJWTfoEIqr+VClnKe1jGdubyOF+UfVMSfNbrz4R49bHoqHvoOz+HeGoUj1lvGs9b0ymsP7Ua9n9VO29FxHOxXVW1RhMXF3nTM+qajIf77av1U7GA38/OmQ4718LxZqsxPTaNc0NvBvdblAc37+FcUVVrZccya14gjn3n6rnW+v0uqPAv7AghhBBCCCGEEEIIaSJ4wY4QQgghhBBCCCGEkCaCF+wIIYQQQgghhBBCCGkieMGOEEIIIYQQQgghhJAm4pAfOpHJoETPCaJwsFBA+Z+vJJVz+Tljm7v3oLS+oGT/sSheTzwwgrLA7ijKN/v7UW6a6UNZZyivrNYiIlEUEi468TR8e0wJcB0UtLuC+10sYtwbR6FqzTXbYCWwbxcl+iBOZVBWm59GUf/EOArF60rCW6kpebptCg8TERSA1srqwRdhU2y6EPhK6FhXD2co5VEIGVEPZ8jnzAeN1CrYH6UcbiOkBJGpBEosO1tR8t7ShhLxzgy2wQ2a8vNyBPdjZgke86qLMl+pozTfVYJwT8m4XRvzzGrw0IlMG8q0PVf9hurrdBr3K6zEmVn1MAK/jjl00irMYxGRTAr79qc//SXEk+Mool1Int+EsvKgkpfqhzPMZrMQZwtmzdtzAOtJugult22qj9s7sH5M7sC82PQcPtDhrrvvwu234PZERAJBzIWqEhDXqljD7rgT45D6Z56+RSiqjndgP5140jFGG55+aAvEJcF83TqtHmji4hhrdVCIv/2xJyHOdmI9m7HNuhuq4WccXVtK6kEVf2ls4rCwqAf7Uz+4oDWD4zagamSoA98XEenpxDz71b33Q+x5uI1MCuvJ2AElfG7FvsukcQ7LTpgS6qkJnLcyrSj7TagHrKTV+6kE1t1UGutqIol555TNNuzcjg89CCiBd0lJkmtqjNeqeCwC6sE9lsrjWNR8EIOr5ud6HUXN9aopUl4IbCWM7kniMR2fRUF9XeVIMIVjUkTEVrnp1FECvuTk1RDPqv6rtaKYP6Dk3HYL5mFWzeUiInn14BBPPUSnWlHznNrmXrUuLU7iemtJJgNx30p8KIWISHajWieOYh7OjmOcK+JvuA7m2VwZ+z7WivNEasAU+TslXD9XlNjc1k+yWiB6+rohzo/inB9v1cZu9SAA23zAxoEp7L+vPfM8xCvbMbc/EsX5Ja7mOL+IOTCzAR86MdNprvF2VvEBDzX1YIq+FbjmW9yK26gdwDkwqR7wYHlK6J43+yFi4/yfK6s13s6dEPv7sUbPqvVZYiU+IKRvaBjiyhi2WUSkUz1k5zXH4UNaBoZwmwtJSxrrSzSB/eUHsU8T+jzYNR9M5Th43Atz2OeBgnpoSlCt0crqXKuMD86zgji2XQfbJCISUQ+pqavaPodlWPzcKohjdfUgKx/bFAngA1XGsr8x2jAYxHXMouhx2CZbPdilhGNsrob57s3getrysJ5lEhiLiHg25m8+h/N3OGGulRaCiI9519OJ4+hJF8fRrGAO9a3GvhUROfNcfGjNMauwvrSrh0bd8R+/gjiXVQ9CKuK4nZnC/q3VzdzXD7zMV/XDTvCYt6o5KCJ4fFz1YItsHvuh5pjXMkJhnL8ran01W8E1Rkid/5QD6mE/ous4fr/kYL+JiARU3Yyrh2G5Ph86QQghhBBCCCGEEELIHz28YEcIIYQQQgghhBBCSBPBC3aEEEIIIYQQQgghhDQRh+ywy2fRCRGsae+XuvandBjBgOnHKCnHU2sK75nOqPt+y7N4D3VXH3p5+k9YC/Fz+9DxsHW7cj6IyJm9eK9+Nouf6R4+EWJb3Uteq6LTLuPj/c25Cey3mLqHW0Skt021wVWOjhPwPvtyFu/tf/jnP4Z4315sU8Dwz5mui7K6pbquruXadbPdC4JytQWVtyONKSIDady3Y5ZmjE0mo+iMCKjcLeayEFdKmKexBPbFyuV4/AaWoJPDDqFbUUSkoHxnA729uM0R9AW1tOGOtim/U1C5mDx1PP0GeppoAj0KjnL5aNVhyMZ+qgj6B9o78N7/gnKAFbPoRxER6e9EH8elb3kDxLf/7G7jOwvFI79+BOJyDl0GCeW9ufjiP4XY8U1/1ZMbNkOcTqmx7aHrqK8L/T71cfQxzRWxj0vb0A3XGjH/TSaRxnYnlf8omsAals5g8qRbMPdaWvC4x5KYV+eef7rRhrkpHFPPPYcuHbeO43hPVnn0QljTgmOYu/lZjJ2U6fKzY+iGGVWOoJw63guFr+aQiKrf2ptWL2I7IwGzvvtKzOl6qr7b+BtG1nhY85YsQT9shxrHiw6YXo9IBH+jReVhQLV7YgJ9j2eejn7Znj50tDg+5khuGudBEZHZKZT3TGex74IBLHqdHeiV8lRh9Vx0rqSV9212znSq+cq5VStju7U7dKFoa0EHXUcS4+wMenXalP83ouWvYnohu4ZXQry0dwDi5/dgHchEcF5z6jj/d/VkILY7TJ9TUXl17BRuc3YS56UlXTh/l8LKVepizszMYp7ZvYuNNiw69gyIR/fhPFBRbrGQGgu+i3kXUOOxmsX1wqSYeeeo+dhWdUSl8oIx5+KYDPo4N4SCeJpSU2M065iuyhm1oHV83EYuhPPBaAjnrIyPeVuzMfZ9XPvMecp3KiL7JjBPWmxcw82qKenHo7iOX9mPnrBhtQZsj6ATuLgL66WIiFvGNvgu7sesyl2dZzXl4KzPoV+w9uw2iONiupmqqk4sORa9lfX96G9cSAJVbK9rYf/UfRxnJbV7pYK5RgiF8UMtFuZWRLkiw45yuQbwfCFQRb+ZV8Y1YSyUMdogrnKrqsHdm8Lf6MlgfSq7WD+KMzjGRibwmLUG0REpIpJWnrbFXbgfm8Z2QGxbuBYOWdj32h9bKWNcTj5utMENK4djBcdQXp1Ly/FvNrZxOCjlcN/sCOZAVdWGviU4T77xz/B4iYgsW4nr2XAM83D1Wei4c9TVn4e++hOI1+/Audiq4hdcp8EzAcKY2zPKUdem3MfBGM7FZeWgzc8pf6y6hBMImJewquqawVwF11clNf42jWIN3DOF38+r5w54yj9XbXA9pUWtG5PqXHumQd04FPgXdoQQQgghhBBCCCGENBG8YEcIIYQQQgghhBBCSBPBC3aEEEIIIYQQQgghhDQRh+yw01oct4z3FvvqPl5b0AXgWqZEa1Zp0XI5vDfYr+K9xL3KeXPqeedBvGgl3tf9w9v+HeKehOk3CdTw3vzRnXhffc9SvO872r4M4oSP91yXZtAlEvPwvvxa2XRdTOXxtUwn+oHaewYhLhfwfncbQ3HDeM+2pZw59brp8rMc9AFYPsaOvuF9gVj72jUQLz0WnYL7R9Hb0d+HPrkVy9GbICLS09kFccDH/snnsxBX63h8dH8mE8oJlsT79ANh058VUi6+chHvoz/5OHRMDK4YhLiuHDa+uvbueMq50sBrFQjhMa1XlJ9J+Yds5QKyomqb6v2q8h4GA9qlKOLWshB3KgfRWWefanxnodi5Cx0OcxPo2lk+tBziWAzzYP9+rAUiIrtH9kCcTGBuGLmWw/pUziq/lcrFZcNLIR7uRJeCiEhK+Q8nJpRLtA2PY+8A7lc+h20MK5VF1MNa39KgDa9/I9buGeUnHd+HfTdVxR+JzymfqfLqBS3M5f4U1gURkUQ3OoBGd+2CuFYyPVALwZ69+yDW9SWfR/+F9nzVxPSNukEce/EU+slqZeUa68R5K2JjHg4vRb9SRLXBDpk1L6wcdrGY8uapXPbL2P/VHK456mlsU3sv5pndwG21ZAD9ZJEo5lGumMU2h7FGBi2MHVXjAkHMfbdqzrUB5b70HfS8JBNmri4ES3rwd9920fkQ7945CHG+gsejWjH31aliXg32od/NV05AvwPH5JxaqxRL+JuLOnAud3zTq1Mo4nrIV16upI+5HvBw7dOdxlwuTuBcXRjFelivmm1IdGPe9a0+G2KvjjV4Yj+uQ0sFVYtUG1sSmHdBMXNfadykXsJt6DX8QhFWxyyo1jYdyq9ZC2BOBRusZ0sV3KZ25S4aQifUaEH1l/IUhZWHzVLr4ZqHY1hEpLcdnVJBNXXnlDvRn8E82j+NdX4ujjV2cVV5sKZMh52oum47tnobf6PkYl/6yrsXL2OOHBjFuSpumTlUdLANGVUTOk5YYXxnofAm1Jo3hnlTs7F2hJVzKxxCj7qIiF3DbfjKqeWp3OnqOwnikIuez8n9WH+009GJmc5Tt4b5WC5jG6IxPK62qg3pDDq1wy3KTdaJ+xhWji4RkVwF18vj5ecgTvZgLkZdrMPVCp4LBFx01up6NTbztNGGSAjXOW1tJ0Bs181rAgvBvmkc+49sQF925zCuZdZd/TaIlx6LtUVExApiDatWcWzXaljvj1uzCuLdT+Gcc/d/3gNxuIbrlnrVlJ56yv2ZVueIA724bhS1Ti+ovJ1VdTxbxbm70V+chUK4zXwItxnKYK7u3YfPGRjL4+c7FuMaY/8+nP+dunlty7awTuRmcf6uOOZ8cSjwL+wIIYQQQgghhBBCCGkieMGOEEIIIYQQQgghhJAmghfsCCGEEEIIIYQQQghpIg5ZTKZuNRZXuVssG6/9KZ2V+GXTq2Mp1UdbO95b3BPH+6FPPgVdB6vORGfd7AT6TSIOekGWLkKPiIiIpxrR04WuC6eCbShl0QVQU36Gehm71BW8R36Hcj6IiGx47jcQn3kG/kZ7D3oScnn0O4WUPqBjEO8199SxcWvmveeOcu3MTWYhruZNR8FCsOaEYyBe/Rp02JWPQ0ddIo0uK9MmI+Irz4at3GptCfTo+CqX9VVuz8NfcZT7Tepm7lerygm1DN0+sTAew3IRc9nX0gnlVvLVgPWUk0VExFX94CmfUK2MbXQ9bJMd1N5K7Jn8NDpZdo/sNdrwurNeA3Gpjvf6x7UnbwEpzmGflyrYH5E4ekDm8vj53Xt3GdvMqPx0lV/JqqDb4MDYdoz3T+Hnbfz8urej68IrzBhtuOeh+7Cdz6L7pj2N/oWxbXgM+pWHaq4+jj8QwvrU1t5ttOH4lcdBXLsU8/ff/+1bEJfz2E/7s1jrJYhtriqPTGEKPRUiIn3qWISVU62jK2N8ZyEolfGYesrVUlO+0bZOdI95num0qVSwBg0MoMNp43NbIA6psd3bg/Nip3LcBdQ8GjJ1lRKO4DGOq/ET0J7NMtbhcg59czOTmGe+8g3FGtQO/ZstKax5uRKOF9/FfotF0SdkqbzTftiWmDlvuqpvW5SbKmQqURaElgD232tPxnF+2mr0z+RLmKd1PVGKSN3B/nVKysmp6t1QDX+jpDw5hSJ+P6Q8rLMqR0REokPYv+Uq/qafQR/Q6NgBiLcp7+ixrei02TOpaqxnHkA3ii6l5JKTIT57eBDimb3oE9ry1JMQT4zheE1Y6IsS5S8SEam42C5LrVuCRyjxYmUcI/sd9Dd1qXHdWs5CHJzA4yUi4uSxP1Ydi17oxSvRPzvzDPZnr3ZuKy9SSOV6rGD2d1DwO/E41o6tO3ZB3FHEbS4dxLq+L4y1aHw77ncsb871lhp/lsqBivIB1tS5Qq2I78+4an0Wxzk0XzPdTMUqtmFmFNcLwcVY5xeSYxehI9uNoyPLVRNZr6oVUbWGEBGxPKzvk5NYP2ZUnwai6EWvVDIQl+uY/9EYrjNrNXxfRKRcxLV3sYj56bquirFNLcpxG0ti7o6qmlcJmPPcAeXlTk5jHgRacZv13C6I4zbW7dbYIMTBMPazU8XPi4gkInjuvKgHx31IlFNtgegZxmsRThLXDSedgue5y07EMeL6av0rInUX86Cm1i76QQThJM6di4/Hvin86F6Ig3W1ViqaYz2sLvycdAw6tQeHMJ4r4n4UJ3B+HyupmlfCOSsQMK9lBIJYo5I9WPNe96YzcZs/+TXE++v7If7TP/8TiB+451GIH7t/t9GGUeW5q1dxLWU1eKbDocC/sCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJOGSHnae8OeUq3kscTqCrLRjEe/8DNt6jLSKyrAc9ONEYXj8cXIKenRPPOg/i3pUnQLz+0dsgXjyA2+9ZfbzRhnAnOtCCcfRnlCp4j3U5h/dHj+9HL9fsODrq3Dq6BGIpdOiIiHR0YF/t3f80xN29eJ+9U8I2+cp5ZBXR3+H6eF+49puJiMQi2IZwD8a5yJFxicUS6E1LRtExkYirFA7iveGeuatiaYeddrn5mNteXcXKB6f9jY4y59kNus638DvJDPpKHBe34WovjvJk+ILj09Y/6pqNcNUY9ZVzRRwcs5aHvxFRbQq5uE+JCr7vj2MeiohM7kSfyaKV6HaYsk1Xw0JRU57BkvICbR9Bv9yPbv8BxA/df7+xTcvH4zCew/2b3I31JKQkjHV1DMI9WK8efuBBiKs5dN6JiGzcthXi4jj6S7KT+BuZdqxZk2P4+dwc9ktrBt0kNRd/T0TkvvuegjjWgq6R1g70RE3V0UFXqmIbRpXjzlf1Kj5nOoYCyoGWace+DAQOeXp8RdFOzWoFx2HE8PVh/Y9EzX+Hs1UNc2uY2/nZLMSlArrAhhbjPBlT/ZuMo/Mmrfw0IiJ1B30krov7FQhguzs6cJsTym9yQHl0nnzuWYiXKS+oiMjEJO7X/gPoGnEE+zLTgm0IqdoeieDYcNT8U62YfiFVuiXeloE4VzgyNa8wg+uGfSPPQbyoHz1g/b3opgyqHBAR8ZRbNTeF9Sibxd9sb8M6UFTu41IZc6ao3GH5Ao5hEZGVw+jN0T6ninK1dsZwjRGqYhvWnI4OnBnl2dk1ho4pEZGajXnillVetKIjsu8E7OvOE14PsTOL8+bMpschHnnuCaMNUzuwDtth7Ac72Mj4e/iZK2L/3TeHtd3BlJDXeZgDsYkxY5tRte5+zZrzIe4bQG/YT369AdtUxePjBrGNdeUgivnm+qqyD9sVaMM13tJW9KFVXMybYALr/AlnnQbxjFJIzTyJ85mISFUtgL0g5nZZtTuRUJ0dUy7lsFpft+M5VkVMN9OYqtNzWawBs5u3QXyxsYXDxwknnguxncYaZidx/zNRdLUFItifIiIBwfn7+S3oKJ/eg2N3ZAxzNRRULtYk9mlYeZ79uuluK85hTXN8TJZwGNtYKuA2d+5Ch2Yyir/heljXC3Xz/H4yj2u24fogxDOjOKb27NoEcaiG+51JYr/1DWKtn3NMh6OXwePVFlJevYg5Zy0EmV6sBVf99yshDqtrIXUbj48tprvNVpdzYjHcN9/H7zge5kTfEvTkrViFTrt9G7DvfNd02AVCau0fxHlv/Q70vU1kseaNTarzoTnMq5yqu3bAPKdMRjGvTj/vbIhPu+h0iB99ZgTi0nY8B0tkMPff8rZzIN76/I+MNqz/Da6dzn0L9mXPINbNQ4V/YUcIIYQQQgghhBBCSBPBC3aEEEIIIYQQQgghhDQRvGBHCCGEEEIIIYQQQkgTcciSnpDy+czm8b57t4IuhFgc72UO2KZMrKsd7y/feyAL8fDJb4R40fEYi+B9wPU8OjnSKbzHvXPFSUYbikG8l/z5p9H9US3jNnM5bOPU6B6IA8rLE41iv/UPoY9OROSEFejTcALoTQgFMhiH8R7toPLklHaPQqz9g06Dy7SFAN4bHm/HNnT3KbfFApFK4/Hxld+pVMX+9qt4X321aroVtPempvwLVeWscRx0u9Trymeivl8q4dgoFdE/ICLieLjNVBvmaiqdgTiTQt9JNKydEmo/LfTA2IKxiEhK+RSnJ3AblTL6BDwPx5sl2AZPOQ1aUuj3WLIYnUciIuUSHgvfw3amU5iHC0laHZO6Gjc55fnauH49xOMj6EYQMT0TceURDNvYp34Nj4ktWGcXKb9lWwqP0WzJdDwsHVwJ8W4XPVLZGXSPuJEMxONFVW9KWF+yM+gasQKm16Ziqd8soTPFDuP84QVUvyiXTkm5xVw1ZhNh06mWTGNfaYea55uekIWgpwNdIpEQtisewb6IxTEnHNeseSHlMmqJ4jgb7sexmVHzd19XBuJkBPu/JYG1pGKb/R32sN055aqKJvA7oTiODe032TuDdXbLdsy7sQnTH5ebw23U6xgfu6oX4mQU2+CWlLdFeTx95TeNKleQiIir5mNLra0c16zVC0FG+ary0+jgOqDmrI4ezLt0A+djIpXBF5QjKmDhXJpSaZNOKg+Pqo+Omns3bdxstKGzE/1w8Ti6DUtqPXDiINbUtaecDHHZwWNcUodr+YBZN8ansQ7vH0Pf0tgIenP2uPgbFeUHjGXQ9Zo5DtfGJ618rdGG/hF0PD77yM8hnhwz56uFoJbbD/H2aRzHZeXoyizCtdCJIcwhEZFUEA/K0AD6sFuSuK6sqppZLWEcDuExrfjqfdv0iIVr2IbyDB5zO4jjxQvgMR9X429200aI41GsPfkoesRFRPIxPMeqqvGkfY7xDuyXmRrW0LyqXXZdeUXHTP+mHcW6klNjNpEznY8LxbITToXYDynXpHIXBgPYXwHXdJJbMbU2eQ77bHQvrq9mKhinkngcnTFsQzyC73e1oe9XRKS9BdeuBbXOrqnjWlee3EIW17YVtS631flGoYL1S0SkoL6T8/A8yFLXBEIWrkE2bsc1YboDvz8bVH7ZhFkHCsr3Nz2L+TnUfQrEa7r/m7GNw0Gxiu1KtGEeeYL7ov1zVsA8iXfUcwV8X38G+7tWxxzIdGN/vuXtF0H8/439GOJStpHzFHN/Wj27oKNL5aWDY79ax+8HE1i/YgHMqa5O85zy9NceC/EZf7IGYiuD/dI3hDXP83DNtn07zotveTO6RFeuxDWjiMiTT22BeN+uAxAvWdZnfOdQ4F/YEUIIIYQQQgghhBDSRPCCHSGEEEIIIYQQQgghTQQv2BFCCCGEEEIIIYQQ0kQcssOuWsb7neMR/KqlfAohG+819ht4WWJJ/M4lf3YJxGdedAHELR14v/L4zk0QB9RvZvN4f/TkLryvWERkfx7vDb/v9tshTsbwfuZKFe+B7+nGe7JblHNrZB/e21+zzX5o6xuEeMXxeM+1uOgCm8nug7ik/IGzZfwNy8djVSmb954XlHvHL+DxXpUxvrIg3P7jX0Dshh6EeHYWfSeFuSmIG6gTDa/d+Dhuw1W+p7ZOdES0dqDPL6LcPcWZLMRbt2GeiojkCphHA0NLIA6EMO9aUvibQ0Po4Vk0gN6roaXKbRbBHBERSSk/k5duwQ8o91hdjeFAEK/3B9RvdA8q714L5rGISF25GZSqTNraVJsWkKRy2AXV2K5NoxdkaiuO9YEkfl9ExFKum7yqqxVVH6wYui0iFh6TyXH04jz5+DMQd6fQSyEiMj2bhXiujB6agioP5Sn0mYjy6AXVQYuFlH+pZjrVJrPYBtdWDs0gyqwsG3PNjmovnmq0jw6QYtF0+eVy+Fpre0Zt0hwzC4Gv9jWqPEQhNe5CEYwreeVZE5F6HcdZOoXj6qSTcKzqYxgK4TEOBrVDU/W/bfrjImGsk8mk8jeq+uF7+PmQ6peNm3E+L5aUw8bF8Sli+knDyolq21ijfAvb5NnYjzk1dvIl3G89NkREaspt5VTxO7WqefwWgl5V76wa9tXM+ATEzzy7HeKnnzPXV9396A47e+05EPd34m9WZtFLGFB1QGydh5gji/vQSykiElPzXCSMedQSxvElKfyNuovbzJexX8ou5simbbuMNsxWJyE+eSl69QpduB8jB9Bftmk3uvme2Yl9n1ee0Y4WtU8icmw3rglOOef1ED/96F3GdxaCNyzBeXVyBh1dT4xgTty1C9f1saWm5zaexHGcCmB/1PM45lwLx3VRjcmoWuO52iFlmX/74Kl6NVPENZ9fwToQVm7Yelb5mXegLzuu/t6iFjfXShscrCW7pnAMR1XZDntYz0LKwW3VMdcrWVx/FH1zvRFUdd4N4TaWtGaM7ywU8TTWH8fDPnX1EiCEx8zzS6KJqvPaehHH/vg2dBH6Sczfzp7VEG/fgo7HsqXWRkVzvgj24/xtKX/ZgT27IC6WcI1XKmGuBlzlUPPV3BrNGm3w1TnM3jFcH7emcb8HFqOXs1rF/SzXsE01dS6eajN9sRXldaspX2JE0JMnxxmbOCw4Do5tz9DNYX8HldvN8c0TW19dzvHVeX/dwfri29g3TgjzaOCEQYhjPVhf5jahK19ExFJO7oHThyC+ZN0bID4wjm63iYksxPmi8skrN3t/L65bRUQWL8bz9ZryUM6W0Rm5aAk67II25uXOrbificux3045GZ9BICLy9FPbIC4X8Xi79Ub+v5eGf2FHCCGEEEIIIYQQQkgTwQt2hBBCCCGEEEIIIYQ0EbxgRwghhBBCCCGEEEJIE3HIDjvPVx4iT93T7qj7oZVDyLLMe66jEeXRWYPutoi6B37j+qchnt2P959XlXciP4t+hb3b0R0gIlLw8T75kIvbSAbx3vGWqPINtKID4cA4ukecOvZDKY/33YuI7B3Zo155HttYyEMcDWJfOhG8Z3vawX6NKQ9WPKW8MCISC6LzI6+cBo5nuvcWgrvufQTizKKVEPsu9ufTj9wL8ZJF6EUQEeloRx/c6D51zFRux9syENfUvf/jylN4wWmvhfikE9BJISJSUrlqh5TDZs9uiLduw1zf8ByOhUwavS9vv+ytEL9u9QqjDWEfr9cv6kXfUE057Cxb+ZyUR6Eu2G92EONIBvNQRCSmPC9eAOuMaaVYODzlOvKV0CSsPDYh5Qlb3IJuBBERR7na8sqBFWjB42iHsc/K4+jgqGbRoZKfxloxZcgxRLJV/M7gySdAPDaJjofsLP5mUjlXKiX0mdRD2OZK1awdZeVwsFVuRdV++xbWUVc56wLKZWU7mJuedqyJyMRkFmIHD58Ew0fGYVerY3/li3i87BT6mMpZPOZ1R7ncRCQeQ7dQQLnAstMqr5TDbq6Aeaq9Xr46xqGg2XchlfslV7l3VP/Xyvi+9uaOjaH/pOpjzlQDZj+ElXsvoFyIpRI2wlH+xUgYvz9XwX4Zm56F2BftWhQRH/vGUv6sWOSQl2WvKM8+/QTE/jTOQel29K49+Tx61TY3cLe97jz0EH/7O9+C+C0XnAVxaxTzLqryNhhSuV/BsdHZjmshEREvgvVq9iUcgZaq63X179qWqm/bd6NT+OZ/vNnY5tQErkVPPwP3++LL3w1xVw/2dcLBPOtzMIeez2J98xq4kifUmmL5YnRCL115rPGdhWBFH+b7e+Po5x2IoEPoni245vvVLnOcn7SkD+LCjhGIs+qYBtT8kK2pvIpjHrq+8pR5ZhsmfdzmVBzn9koQj1HKwn5IpPE3PeW+lGlco0cipstvn6pP0y6Orx51jhVPYBtTCdymr5y7UzXcfjBgOt0CM/jacT7W0GTe7LuFQk1Jhmu9Xsf676jzQy9s1hJP7Y9VwPWUU0Bndmsner6qk/h+cQLPLxzl1q0XtGNYZFptIxDBHS2X8yrGbeRL2OaAreakAPbDoiFzzurqxfPQuFJY++r8oVjH87ChQawDQRcdnKUanifbQazDIiI1F891E0k8H2wwbBcES3mg9XWCoLruoJevpZKZd9pZp73OrloXhpTbtaZOFWIZbEOyLwPxWBFzSEQkrTzoXcO4TkwPYn2J9qG7fZmFcb2M469Qwf32Gjwbwba1bxH7IRLAROzoxOsBqRac38MhrIHxFF7zOfG05UYbWn90P7ZT5dnvu8bjX9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNBC/YEUIIIYQQQgghhBDSRPCCHSGEEEIIIYQQQgghTcTLMN8pqa2DMkAtA3aVwbsmphywO41Cwjt//FOI27pRKtmlpfglFGWHQigTTCZQgBjUhlERSSjpak8XCgjLeZRIx5SwcHpyCuJ6Dfc7FUXpZa1gPnRi29O/gfjA5q0QV5VwWEK4H67ar8QiJZ9N4LGyIygMFRGJqodKtAq2e9VqFKMuFJe/8y8gjnSh4LGUR1Hptg3PQNzbgzkjImKrBx3EopgnNQ/7e8Vx+JutvSi2LnVgHl980Z9A3OghH0X10AnlkRVHiTIrDn5+Qkmsd4/sx9+M4z6N7UOJrIjIrue3QWxX8Dd2jk1AfNobToF4ySCKnetKAGpHUS4sIWWVFxFLP8xECdjDlvmwgIUiq2T+1RKOo0QNx11nD/bH9G7sPxGR7btQ/D1Zxz5va8MHVdiqfhQ9rEduXclrlYy2UjX73FEPAJocwxpWLKAk2q/j5+MRrPU1JaK2IlgjnYopyA1rmbWr8r2Kfe3Z2Iaamn8iIcy1cFTNBUr4LSISU6/V1X7qOrFQTM1mIe5Tc5J+CIXjqRxqNx92ks+p7zgYV9XDFTz1jKjN21HabqtxqR/AsljVBhERO4nHpFLE3HRVGxwlNI+o39APQ9k6imNrqLPXaEObEgYH27BOFotoB5518DeCYVwy5VXuz6rY880cstSyK2RhDSw2EEovBJPqATabQ5MQByZwDtlzAB/6cc4F5xrb/OT/71MQ3/LPX4b4Zz/5McTH9GOuh8JqbZPC4+W6mENtaTP3O9vw4QpB9YCasHqQiK3k/wU1r9WCeEz/5Su3Qbxx8wajDbo+/ejH34N40crjIT5+OT4kKhZBEXaLj23qU+XNCZp5V1QPTfJrmGdL+lHyvlBU1QMe2qLYzteu6IB4qoi158lRHKMiIpvGcZ5crh6+UFPj2FcPZ8qrOcuv4vELRfX3zYfq6SKqj2Hex1qRUw8BaV99DMQBtRTacCeKzQcazLOLWvHhJaLm1WgQNzpXx34qTuOx6VFzZl8HjtewfjiBiIRm8PgsUQ/eG8hkjO8sFOUaHoNaGetJRc1Bro+x4+A6XETEEezj0hyuI+0I5ncwgX2WncIHQEwdwIcp1FTeOK75oI9kBuc+p6IeYqDGXKmMtb7i4trVCuN5clA9lKpjkTnXLluB54xj0/ggjDCWcrFsfL9WxL7tacUaKTauMfyk+fCNLZuxDvR24hhLqLXsQlGuYf8F1NomrOYoR/DzJTWORUTKFZVnxvoVt5EI4Fh2Lfy8bWOeZXrxPNcJmI8EtNU1mLY2/I4+R6wJrrdsB2uYpd4X9UCJWt3sB0s91MtX+x0OqIfetGANa+3A/ertxzxzbTx3aV9s1v7Fw7hN/cDCoPX7PdCOf2FHCCGEEEIIIYQQQkgTwQt2hBBCCCGEEEIIIYQ0EbxgRwghhBBCCCGEEEJIE3HIDjtPSbbCQbwnXrsQxFb3EQeUV01EvBrenzw1hT6ywiTGsTreo+4JtqGtFe8bzvShv8FxTcfD6H78DX2/s62cDDUH78EOWHi/cyKK98Q7qlsC+gUREeWUcmvofLBV3+dKeF9+LYJehVQf7mcxloU475n3fVeKeO22vWUpxB3Ko7RQRMLYrq2bn4M4N6eOn499Wa+Z+1ooFCG21P3k0Qge03oJ3QBzk/gb43v2QvyLO38B8Wwevy8iMlfAY5xqQaFDuhVdPIkWdAPs24fOuq6OfoijLejZe/Bn2CYRkZltz0LsqvG4fQydEvuKuB/LV6HbL92CuZ9uRV9ULI4OFxGRdAL7OhTFMR2P434vKGXlaFDlw7HQhVBUiswDlunMPKDGf6Gm6sE05kUgpFwjHn7eV7WhrOqT75sOu7DyKY0qD6ejfHKW4G9MzmL9ETV+fOWVCsVMh2OL8kZp56kexwHlZIoJHhtbOUBCah8t9XsiIr7qS0ttQ7usFoq9+3Fsh5SzVLvdBgZ6IG7kQMsVtMNO9a/yoJaUI3DT9p0Qax/s/r3oM+tQ7hIRkXQ6A/G2bdsh1nPvJW9+LcQRH2tkayYFcSyH9Ws6mzXa4Knxpvs2V8AaVqziXFFSfW+HlZevrnPKzCFP5d2smgs6GjhPF4L+wWUQu4L1vq58m+EEOnB6B3AOEhHx1dpmoG8RxHf/1w8gzo9h3sRj2L8Ro5Zg7YkETa+O9lfGY3iMdT2MhvE3fOXDnCxjvzy/aSPEf/InFxhtOPGkEyH+6tfQe/foAzg/L+3JYBvjmKdTY7jueWYbeo9DCTOHultwm67ydcXCR+bf7/UYsZTHqDeD64Yzh3BdkauZTuZdysdYCmCedA2g2zgQxpyoqPpYUWu4YF25dkNmf6dV7IyjJ6xF+ZyqyjM6o2pJphXHRkY5p0IV02XWr1yxYfU3GlYCc9sK4eftAs4D3UHsJ6UbFLuBM7ek+i4dwHYOLzbXhQuFq9ZPWkUYDeMcU1fzQS2L856IyEw9C3G8PQPx2jecDfF+dT63d2YU4s5hPEaeOu5u3TzuNUFPYKIFPVwTar6u1DA3l5+kXKAx7JjpOfSZZroazFnq3LhcwL5u68Rcc3zsh45uHEGdndqxhm7LbNn00XVm8DuRAH5mYr/ywy8QFa1mU2uCuvIg1uvK7WaZ3rRw5MXX1J5Kbu2Jrqi1UV0tXVJpnEcDYfP8JqSc25EQHqNqCX/DsXG/vCrmctBT7kVVXnwxXXBOHetqqax8zTb208wMjumy8jvG1Vw6pZycTt2seQnlSi4qX3OppBLgEOFf2BFCCCGEEEIIIYQQ0kTwgh0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJEHLKkx7bwPvpoRHk+BO8bTihPSCKF9zKLiJSUE6U9hfcWB9U2a3Po1PLUvcilEN4f3d09hJ9v4DNbeQI6VR6591f4mz7ezxxSvqaycgO1pNCzEw5iFwcs02FXqGA/jBzAe/mzWeW6sPCe684VeN21P4PHpuZjP81Omc6DcEW5+PrRWVcumfdpLwT5aXS13PNfP4N479g+iO06OgmefRa9hyJiOLcc5f0SdYzu+uk9EIdDOBZOes3JENeU9yJXNft7554JiKenN+E2KtiG/WO7IB7ZhZ8/5TVrIP7IB/8a4l8/9qjRBkd5KHJV9AmUlVNq52/Q1ffgk+jBSATxvvyQchwEIqaPLqUcdouWDEL8p29/B8S4l4eXoHJw1JVXrVDG/prJYa7N1EyXmBPCeuA72EeVMtYCS3km6j7mha1cYok01p9AwPRMBFRN8tU/2xj+OLUNHdvKV2qr7Xn6BRGx9TaVA9VVsgpf/4bRBuXmUWNcLLMNnvoNXQaMurBAOL72xaAzo0W5ILWfTh9fEdP3WlReD32IfE95UWP4/YkZ/P76DbshTsTQiSMiUtXiFsFjHlb+yk3bcJvdcVxD6NrR04PvT+/GuUNExApiXkxMYjsXLcJ5TzuOqsptVVJeT0d93vVMR06qBV0wNeWWKWqv5QLhCI4HV7UrHFFrOiw1Rh6KiIxPYP9OzeDaZt8YzkG+gzmi15l15YvRJp9IyMz9hHLSBpR/ORbF8RRVHmJP+c/2TOI6VHx8/9K3vtVow5lnngnx3r24bvnRj38C8dPPLIHYreA8MDuONaE2jd6roItrEBGRkoNeq52zOJ/HI6bncyHwVf/5qi6HPZxHj23DYzzZi+NJRKSo1jKOmlc72tFvHU2icyircr+u/L6OiqsB06NnK4dti6qx2txWy+ExFXVe4I/hmnGR8jeFAuZ8lSrjNrsCOJ5mlesvkkJPnlfHRjulLMR6bdtAYSee8r71Hot+5aHFeCwWklpN+3qVT9FTB83F90NRc00bVW7VZBHj/E4cd6esxv0fXq3WbHY3trmMbXriAdyeiMjUFNa8WArbUCpjLUi34edPOBXrz8jEFvyBFOZe32L06IqItLb2QpxMoEev7GAdzSv3rudjm/ZNob+8LaP9aNoaKZKOYT7XlbezWjHX6AtBsYZj1aljfQ+G8Bjn81mIUwnT+9jZjmsXP4Q1TK/ry2pOKZdwreIG9Joc22yHTX9ctoDnQLtHcL5v7cU8DMQwD30X66pXx7GQr2AbKw3OsQyPfV3VbtUve5TPcS6P+2CrY5ErYJtt35w3yxX8jW3bcX6ey9FhRwghhBBCCCGEEELIHz28YEcIIYQQQgghhBBCSBPBC3aEEEIIIYQQQgghhDQRh+ywCwfx2l5JOSIC0QTEXgDv7S/VTZdLQN1LHAmjXyEUwm2G43iPeroF3x9TbpFSP/rpugaWGW0YnZiCePWpr4O4MLkf4p1bn4e4WMhCHAzgfqaVU8oS009zYBR/Y89u9E7YEdzPlm50rHS2qd9Q7gtrBr/fOmse9v6uNogXZbDvtm9EH9B5pqblsNDbjR6E5YPoJfRVfwZtjAPaZSUidgBz2deuHpXLEkJfQF9fP8TnXnghxKk4Hp90FD0KIiIbn3sG4q3bd0Dc0z8IcUWJxgLKEfnc1s24/a1bIY4PrjLasH8/tqs1g3FXGO/NjydxfM6MoWNqenQ7xJNTOB4rrjYOidSV8+lAFnPzzAvM47dQFPLoKsjl0MVSLOBYLxbVuGvQ9JYMjtVIzHSgwDaUXCwWxGMSCuP3tV8u1MDppB1nrodjRjsgtClKvx3QAjQLP+C6pthG++EM74R631Vt0B6qoPbyqe1Fo6bzQ/uutDsp0sC5uBC0tqObpUXNc1HV7pkcetRiqjaIiNRruG81B2PtTAkrn1VNuUUmZvA3Kw5+vy2VMdqwaCnuV72OxzinPC279qH/LNyJThvbx+8n49hmq8usuy0xHH+FLPpKdu3eBfHwisUQ15Rvq+Yqd5Wa3rXjTkRksZqvY1Fsd7VsunYXgqks+uTqDu5bUI1zX+XQ08+iY0hE5PgT16jPbMDfUP9mXAsq/65y2Bw4gOu1ShXbqJ3BIiIhpYTSZTkUxrzSNdNV3tCC8ui0daBjqkO5hERE8spv2tOLzqeZWcz1X/7y5xBXCjj3TE/j3FRUjs5gg3kloHK3tRvdWV3dpodqIfBU213l2xTlNUwrD+VrBkw/9nR+BuLaOHqK6kXsz3AC866i2lRX6y/bwza5dXOOs1zlSlbbrIV0JmI9s9T4cgPKlaS8rm4D56qvzgWiLua6r9xZY9EsxHU1D3gqrULKI1oqmbUrrMZPp/KdRYNHxp0oIuKqedFV/RUM4jrCCiq3awvmjYiIW85CPLoHfdPbnsN1cip6DMSVNjzXKqtj1B7DOcn2TH9iZ+sKiCMxXENU63hM0h0ZiOsO/mY+j3W3fxHWDkvPgyJy/z2PQxyK4292LVauSnXNYGw/1sSai/PTTAGdeG1RPC8TEUknca511HUMxzsyvti88qCFQzgGIkEcV2G1zrctc56z1Gu1Gh6TUgl9k9oHq4Ww+iygrtZbgaj5917ZLDrrfvbzuyFuaX8TxINL0T/qivLNufibJeUN1/0oYp5b6Pnd9jA+MI55ZayNI8EXfd9t5CpXebV/D17j0fP3ocK/sCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJOGSHXXencjpM432/ZRfv2VWKCPFt0/GgvUMtLej+CIfwXuNyET0gMe1nqmH8m0cegXjpSnRqiYjs24e+AFt5IeIRbENA3WcfU24A7bUqlzF2HNPxkFS+kTNfg/6BaErdhx9Qfqc63pte3ov3rtt59Dd1xVNGG16zYjV+JoNelicPjBjfWQhmJtFFcsbpZ0J85tq1EEciym0VMK9J28rF4ynHRkA5VLT/qVzD/p7eh30zU8H78GemcB9ERHYqZ93+CczDZBf6GSSCx9AKo6eq5uB99Hfd/xDES4aPN9ow0IbOh6iN4ycewrysVtDHtDOHPsekylNXOQ/GZs379js6BiEuKbfGPff/GuKr3v8XxjYOF1Oqxuk8qFRwLNdqGIeiWDteeA1dFbo+aL+ibSufj4p95SXSzgc7aOZ/LI7HVXvytKROO+40lpL1WYYlykT7NLTnLqj9cqou6zbrNpgevgZtUh+JRtFHc6QcdnnVN57yJfV1d0EcVs66UtWcYxJx5TkNYn9bAeyMUBiPuaUcdaWy8s/EsD4l29FNIiJStzE3nSDG0Qzuh6c8LvkC9svypUtwe2NYX5yi6c2dK2AtXr5sOcT79m7DNitfiaWWTIWcOlbq30CTcdMnqF17xSJuI9Bgfl4IXEsdc+XMKqi8LCt/zNgk1ksRkS/d8s8Q796O3tOCqqnbR9FbpP2yuk7U1brTck2fTEAdE12fLJXLvqVcYnqDqrbEEvib09NmP0SUDzY3h2vZahV/c9eufdgGlYdqmhQ/inlm2mJNT1IigmO0VDTX6AtBWK2hA2pfalnMM+2L68uYY+z4OVwDb8ri2n9s/x6Ic2U8HgU151XUfBNSeen4Zt/ZPtaKopqjSmruDqo89aqeipUjV82J4plHvaLqvKf8TkX1nUpEjR913hZVa0LPxbkm4Znjb1k31rPWMP5maToL8UJWv1AI59a6mmOCYVxvVVx0ue0ff9bY5ubfoKczFcBxlqjjXLnpvvUQRwbxuE4rr158OAPx4CIz//eN43Fwa3jcg6oedSufnOfjmPNKymdtYx6MbMF5U0Tkkcexhi06FseDl1JjysHzfyeHv9nWid/fNYLnUJvnzPOsN5x3NsQ9i3CNV3TMWr0QxJQbMqrOC8LKKRxtRX9/pIH3sVzGPJnLzqn3MbeTyu+nHc56ja7/vCuRNvPuNaeeDPEutZ766v/5FsRrzzkN4mNOGIA43Y155vv6/N50U1uC++Go3J+cy0K8fccu3IDaT30e6yrverlmrrdjSZXbeTUX/J6eYv6FHSGEEEIIIYQQQgghTQQv2BFCCCGEEEIIIYQQ0kTwgh0hhBBCCCGEEEIIIU3EITvsFg/gPdNpC+8d3r4X73cen0RPQc01fUDJpLqvt4T3XLse3kevXSQzypmSL+C9xpU6bi/gYywikkq2Qjw+hvfB7yvifeGe8k50d+J995byDc1mZyGOJMx+yKTR2hBWHquq8ryIcvsUq/j5WgHfT3j4/rKBHqMNfT24H3v3ofNjelLdz75AJJRvazqHx+PpZ5+EuKsLj2d3V4exzXpdHaPZLH5AOSOC6pj2D6FfbqAVj9/o1gMQFwum16OrG49BvD0DcSCKfoGS8hP09i6GeGw/+iKmpjHXe/uUVFJELOXiKVRxPyWIfV9XjoOIcs9ElKOlNo0+IrFNp1t3/yB+R/m3DBXZAlKvK8+Aj+MoqMahVp5FYujLEBFDiGSpChwIoKNBa2lcVX+00ymgHHcB5WAREbGVHyOs9kP73/RvmH44RKWJ4YwUEclkMhDrMVlVXgjXwt98KWedo1w9jqNyW0TE1a+9+H4vFPEEukFc5T2tqr4KhvAYh0Km30Tnlf63Oj00g6EX9xZWVU20grj9eNpsQz6PDsyYGh+TylcaDCr3UQzbHM9gjUxG0VnX3YneFxGRKR/n43gcd7yrC+fBfA7dVnoq1hqplnQG4lSLWQNyyqEyNYVeJN82/X8LQVt7m3oFj2m5gHNINYHttC1znGfV3Nreif7FdFsnxI4qeJ6Pue/UlZtJjfN63RyzXv3Fx3VVzTmerm/KcWursZNVOfLwIw8bbTjvvPMgfn7jJtUm/HxN9YP26nqqr7XLz9VzuYhIDbe5d/de/I3IkXEnai+rZeGYDKohVLFx30Jhcz5a3Is1dGSf8s1WMZddD9/Pqpo7pSbqlKqnei0lYs5Rc6qkjqliosdPwH9xF6webSEx5/pxVafnlN+poNrUrwpaRo2nwAzW8O4gngeuaXBuMTyABzBexvO6qvLgLWQWztZxDNSqOIcovaiMZ9FPt3/2fmObU2NZiHtC6Advt/A45cr4+dAYzmvhMta4fe5WiFeejy5XEZFpD7c5ux/zt7MXj+sJpypnWgKP69QUnm/ouTqRNI/aqlWLIG5ZhJ3pu9jXbh3bODaKY7Q4g+/XlNMxWzDP70dX4flfIoXzz4Ep00G4EITUOLTVGIgGcMz4am3qN/BKey5+JqK852HlLdT+/XxeuUJdPF7ROG7PEcxLEZHhlZiLK45HF/7P/hPHy4/+X5wr31BEB94pF+D2POVZdxrM95aqo9rzPTGhrxthHg0sWazex5o3NoHntUHbvIyWbsfX7BDmXUE/5OEQ4V/YEUIIIYQQQgghhBDSRPCCHSGEEEIIIYQQQgghTQQv2BFCCCGEEEIIIYQQ0kQcssOupRW9EmXlNGvtUv4E5eGZGjc9XhXlKQqG8d599bZ46n7luovbnCujnyYRQ6lUpYT3KouIlCvoj6mp33Dr2t+E+1nIYT+0KGdNSwt6dMpl0wU3NY3tTibx3nJLOaAsB+9VDyvJh7p1XcLKYzW4bNBoQ7mE23zggY0QP7t1wvjOQhBRLqVqJQvxI4/8CmK/jse4JW46hOp15Toso0shqK5jLxkcgPi4M46FeHgxOu2ye9EnNzaLOSYiEla5OdyO7o/JSfQJHL/yOIhXH78S4v/v29+EOCjoK6gXzdyv1fA131E+gCj2U0BJ2gaHlkI8sXcLfl+5aWIN/I2rVq2AuFLC/R7oxXv/F5L2dvRZ2YI10FXOiLqjPEKW6bWpVDDXrAD6FbR/wVOuippyFQU801sD7xvuMhHPV3VUtdvSoj2FUvOI52l/HG5fuzVERALKeaadc3Udexjb2iH0Ek67Rv1gv4SzTvf9QhGN4di1LYzLNZz3IioHYhHTH2cp30hYee9E5WFLGn1mlRz6YWpBNXdHsK/KNbPeBAKqJqklQa2Mx+OAmpvb+vvx+wdwToqp8RZNmce8M431ZGp6D/5GGtcgWu5XcLDRK3ux9ntqfVAqmS6xUhFfa1Peu7qphlkQXMFjqPM/qPIqEsE1XjBoLidbW5VDVtcGVTv0uHZquF7ylOvHdV+8zSKmB9VRHVwoKqdWFY+xdpm6jnbg4ed/+rOfGW14biOup37z5FMQWyrPXFWDHe0VVV49X9VwzzWTSL9iq/k56jfw3i0EyrFcVWtk7XKzlGfNr5ntTiZwDd3RgsdwZhJrR34M4znlkX5EueBaVU61WKafN6HmpLqNX8qpdXxFea30LBxQ5wFhNVbiDedt/EzQwjyJqzZ5amzUXNxmTLUxnVRZVUefo4hIYRZ/M9eCfWUpv6xpnT58zBaUbzo3BrFbRtdUtrADYk+t50RE0nHs09LcdogTbXhM7CTOOaEoukFb6ngOaXdj3W3tVCd8ItKSxuO2Z0sWYkvlxcy4GoMOzr3dPeij2zuKY3R6ynRy+SEcc12qmZGIXvtiXK1i3hzYirmVCOEGV5w0ZLShoLx2U7N4bEKRI+MpdtT6yFF+UbU8lrg6j23oKVYutbD6jF4TVys4b3naqeniOHWq6rxBL+BEZGYW/XCvPWcVxKefdQrEj93/PMQju/HcuWcvnjNGkjg20mnt3RWpqfk6l8PczBcwd5cfOwxxJoPn4i2teDCyc5iH2hsuIrJ4Oa5VKyUcX6UaHXaEEEIIIYQQQgghhPzRwwt2hBBCCCGEEEIIIYQ0EbxgRwghhBBCCCGEEEJIE3HIDrtgFD8abcH7o9uSeO0vWMb7m0Mx0y2Sm1U/7+I2YlH0zbjKZ+ZWsxCH47i9UBDbGAjgvf8iIlXlAtH3P/van6HcFb66F91V6p5QULktwqbHKzuLDruycnKkM+g4CCqXha32s6RsJeNTeYhnC6bfJF/Ee/3vvm8zbsNU7y0IJe38U/t+4UUXQ+ype8MDDYRAnvLe+MoFElD9GVU+xrEseivy2a0Qz5TxN62o6ZjYsn4nxNOPTkK8dAgddacuWw5xrYyJFlN55dcxh0pl0yllB3C8eEqBUtYOI+XFWbIIHXaVAvoLjm1Bj8yvn3zaaMP+3ei9Kxfx+PklHBsLSUsLjjtP+VzEV94PNW5zyscnIhJU7rCAirVHTSljJKTy3/G0u0g5ovwGjg7lybN8LaUznXP4tvLe6PGk/h3I883aXytjna2rfPWUX060t0i3ydNtwE/EG4zBsBKF2Mqh0sjJtRCElT8pHsf6o3MkoJIkEDBdRq6L/es4ap5Tv5nPY9+Uc8rboX4zqtYHtQZ1t67qYmkO1wjaxZpqy+AGVI2rl7AOB8LK7drA5eeHsJ0p5ZyNqJzItHXi93MzEFs29kMlj/WrXDLHX1QdT+3uMaRrC4Rl4b6HQqpO6LxS9TAUMj1eeqD6al8j2i2p3g+rIWgJjmPto3MbeSf9F/fktXegB0c7bn1Vv0xvHh7jYtFcLI2Nj0M8OIi+pXxRz9fajaU8oS/ltGvQD3q/bb2OtF/cXXq4cNV846vYUrUprNZnfrmBe0/lXVcCv/PUhucgnt6P6y/HwsSbVH64nKqfcdfs77jqzojaDz+sXKXaVW3MR8pzqI55zjX7QftkdS6H9Z9sqNz3AvpcQ839gr+ZLWSNNgR83GbETkFseUdmnhURKefRWWcFMA9CKVw3p9VBre40zylTndgn9Q41Z4Sw3vS1oZ963yi2aW4bnpsd248O7WTSnC8GFmF+Tu/HNuzciN8p59S6NI41LBzDetTdh/swts/0dFc95elSNctSztSWDM7vQ8OtEE9u3wuxU8e5IDdjOtXGDuC6pepmIW7vyBjfWQiKym1bd3SM465Ww7yLx8xjbpw7qHV9QJ3vucpZV1d1tKSuE4yP4vldd6dpm2xVPt6S8twtOR7XU7MVjMNB3O+CUmLWbeVijpnrTFe5QYPKtdvdjz7GwaWYd7WaOn9XNbJWx7Eyp/zOIiKJJK4rY1HVpniDtdIhwL+wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpqIQ7Z9FgpKkhdIQphMoJwzpKSIiYgp/U6nUTpZyJVVjKLeghI41ysYp8LtEEeVBNmpmlLKoJIcaglrKIKCQUsZCONJ7EJb9ajjakmi2eUtGZQizszgQyLyShTb0ob7WVIC3G27UA65eQPKOrvbUKYvItK9SMlTbfzNjjSKYheKRBLFvGnl2kx1roC4qo5xtME16bClpMUxJR6P4/teBR8ekM8rAXsc+7NrOAPxcNwUsm4b2YEvaOF3HEWYowf2QNze0fqica2Mwtdq1RRjFos4ZqvqIQn1Kopng1El7+xDYejuAzhex/fgPlYKZht2PL8e4vZ2JXlvRbntQmKJlkBj8tWUULVSxfpVVw+wETHF3/oBMr6SV9ccrB9VJZG2lCjc0iJxLbQXU27tKUmr1tnqLWi9thbJa/G7bzUQoAeVsD7w4hJW/VwMX0vXXSXD1jvR4MEXtrbJqs849QYP7FgAEurhCkF1BHRFi6oHahQK5sNOAirvwhH8jZh6sI7xvvrR8lwW4u6uxRBX9NNSRCSTwHaGOlUdVoeoLji+9FwaS+JDbUKqbhuJKyJ1lasdnbiOCSsBekCJ3iNqHeP72MZ4HLcX020SEVHHoqweMKDjhcL3sV2+egqRpTpUlxb94BeRBg+iCOr1lMptvVH1+YCqXSE10PXDa0QayLh1LVHbCFhq3ajyTj8nQz8IKJbKGG3oX6zWFOo3y1oAruX/qm/1gxh0PWx0LHQN0P2i104Lha1yJKRqt37QmxXQD6sza41bxBrYm8L61h7C74QqOOZaVO5XLD2vqoc/Bc3+LqpjUNZzknpIRMDRUn41NvSDgtQxbzTP6hIY0utM1ZcxtV/qWYKSsFS/GV1vHouqWouqQyNx23xww0JRnsGH6wUiOAaqqk/DKaz/vav7jG3W1brBiaj11hyeL+QmcJ1dyGJcPoC5ueEJfMhde4t5TmmHcB4641zs48GhbojbOnG/W7rU/N+O+23bPRBPjeJDdEREJma2Q+xF8BxG6mpu8LBGhtXcaannNaaS+uE/eN4sIlJQD05w1EMLolE891sosnMvPse7Lo71Ulmt+z3zYQtVVcP0QyYiap0YVuvMQgnPB+uqHqXa8BrAa9euMdqweLAXYjuE7Uy14ZrtpFPxASrxMOapfvBfVdQ+6gsuImKpazoRW03Yqg5X1IND9Roiqq4PpFLYD3qtLCISUE/Lqqm5tdF3DgX+hR0hhBBCCCGEEEIIIU0EL9gRQgghhBBCCCGEENJE8IIdIYQQQgghhBBCCCFNxCE77PbtxriaxfuhU53q3vAY3gecxlvqRUSkrQ1/vlDEe/ez6l7+2emwinF7AQ/vVfYMz1EDJ5GHr+krmNoRFQhim8uucomoW8tDHvaDU5oxmuCWcT9d5c3JFvB9pTuRGeX+27UdOyY7jQ6JWtHsh540OglWLemHOHdktDpSyqOvQTzlsLEwscbH0ZO2beMuY5vRIN6THk5nIO7oQh9cX0caYu0da0+jU1BpyKRSnjXa0NWF9+b396Gr7cDYGMRbt26CeLCGzgjtn8nnsR9KJfTLiYjk5tDFpx12bk35AiLoH3j+uQ6Ia1X0LnR1oSej/4TjjDZ0deJnOjoxD6PqNxcS7QGqqv3TjrqaciHo/hARqWk3kRJ3aW+N9g5FlfvAVo4nVznvtOdGpIEPSTkeDHeOyvewljgpKhXsB8cxfRvaRaX3U7db53ephLmpXVja66Z/T0TEqeE2tZcoGv39PBN/KCG177b2oCo3yUsdLxHzmIe139XRziw1n6ttplNYd9U0KdGw6SXy1MQVT+Jn6mq8VNS8qP2NceUJCSknS7GE3xcRiaaw7pZruJ9l1YaQj/0UUGPFDmCeqeWAlMqmVyqbxflA93043MB7twDUlBNYjymlTTPcbQ29aWq9ZKl65SuhjKdi7Qy2lV8uFMPYD5gOu4huuAHup649+vjUa5gjuoY3qnelGn5Gr0UrDrZb970EVBvV9309vhvkUDD44sv9ePzIuMRs1a6A/+JuUTEcdqb/NKgKUtLCY3aOco/NlfD9p/egd3iqise0ohyE1QbCTE+101NnF67ahm3p3Mft2bY5l/82Ae1kFZGg+kpMOZ/iNvZdSrllU8pl3a66Pq4aGRIz98Oq3b6azyqVI3RyISI9yileiuD+BEU5S7XzvNVse20WHVelCXx/dhOen4ULOJe2VPF8wgnhb1Z9VX9cc9zOjuMaLK/WqkuHcO1eVevSmb3YRruAOxFVcsOhoRONNnT343nWbAXn58lJdM55NbVmC+OxOPH0QXzfxXnUE3O+LztqnaiOp/USY+pw4QnW55A65xc1LgtF3A+3ZvpGiwU8zw+oXG3NKB9sUD1XQJ1bROPYhh613kp0mK7kWErXOIyDnvIzt+JvJNT5XkjNDfWyWrO7Zt3V7umcOheuqr7Tzrug2k89/UTUeUFQe3pFpFhS7bSVLzCP4/NQ4V/YEUIIIYQQQgghhBDSRPCCHSGEEEIIIYQQQgghTQQv2BFCCCGEEEIIIYQQ0kQcssPODeE97/XwKRBXPXXProMOiGjavNc404n3ULfaeB99WwlvHs7O4D3x2Sm8J7tcxN1xHeXx0G4MEfEc/I1KGe8t1i6QgHKw5Cv4/XIBvx9SvoGUjX4DERHPRpdYvY77EUngffbREN4PnQkrP4FkID7+RLwvfOUJpm9gcNkyiE87A30A+/ab96svBJ7ygtnqGnOwjsejJYTH48nH7je2OTaOuWmp/jzttDUQn/VazPW5Obwn/tmnHoe4qBxeW/fsNdqwc9cuiMvKt+T7ysnV0glxLof+h/ws7lMxh34Hc/SJBJUXJ51CF0bfEHryWtt7Ie7qQ99c32uOh7itBfOukftMu8vEUnGDMbtQ1Ot1FeM4M1xFyn3U0Blk+OIQ3R/aR+YrN09dtUH/ZiNvp6U8UYEAOhhs3UbrxR1PL+VPauRUeynPXUh5IV6qX/R+Gl6wBj66eATzXR8LwyO1QMTCuO9633zlXdXHr6UFPW0iDbyFat+0V81XDrt0DOfepOH5UHNxtUHeKWeTV8ealUqgy0frF/UWi8pFEqpjP5TLpufFsdEFMzWHdbQwjXNxJoPrnuki9lM0psanj/0yO2N6dfKq1sdU3+p4odBzjh4RrnIIioVxJGKOMV1DXRfjkMp1nadBUWNBuZYclSMNnZ2q3tmqhuqxYKnaEooo908I65v+fqOaq/errpx1thpvnq5nKg6oY+Udgru00WvQhgZ1ekEIK5eS8qBZut1qjnMc01voqVMb7U3rVdqvi09EZ3O3WkduH8e6MF7E35x1zLmiompiVe2GY6ljpn2Nas7Tc6D+xZBnHt+g8i8llFcvon4zYuEXWgKYd63KcZdQfshoyFzzqFMmoyaUrAZu8QWiw0FfdbUX586JfVkVowvaiZtzTLCGzmt7FPcvOqPWjcpvJQ62IbEMk7V9WK3f1O+90NAshGM7sd3uLM5BXUOqzSp3Y1Vc+8/MoS8t5O4xmtDejX7qnrZjsQ2VUYj3jmIbY8px29qJ/eRUsG4EQw3Wa1PKNTmHx6JeMZ2LC0GtrjypakyUyxgXld8/EjIdpYFgQsX4vq/OrbQTuKrk6/UaHmNfrcAiLeZYdyzl8VZeXLeqvOBFHD+1gHIIK7ff1Ay6FNtaM0Yb9LMLpg5MQlxRDtqOXjyPddV8PqPOpUWvJ3RHi8iB/cqvqGqz6/1+NY9/YUcIIYQQQgghhBBCSBPBC3aEEEIIIYQQQgghhDQRvGBHCCGEEEIIIYQQQkgTYfkvJbYghBBCCCGEEEIIIYQsGPwLO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesCOEEEIIIYQQQgghpIngBTtCCCGEEEIIIYQQQpoIXrAjhBBCCCGEEEIIIaSJ4AU7QgghhBBCCCGEEEKaCF6wI4QQQgghhBBCCCGkieAFO0IIIYQQQgghhBBCmghesGvAueeeK8cdd9xLfm7Xrl1iWZZ8/etfP/yNIoSQV4AbbrhBLMs60s0grxIO5tPU1NSRbgo5imDekWbkUPNycHBQrrzyyj/ot84991w599xz/6BtEPJyYe0lR4KjPe94wY6QP3L2798vN9xwg6xfv/5IN4UQQgghhBByGHjkkUfkhhtukGw2e6SbQo4imHdHFl6w+wNYsmSJlMtlefe7332km0KOYvbv3y833ngjL9gRQgghhDQ5W7Zska9+9atHuhnkj5BHHnlEbrzxRl44IQsK8+7Iwgt2fwCWZUk0GpVAIHCkm0IIIU1LsVg80k0gf6T4vi/lcvlIN4McZTDvyOEkEolIKBR60c9w3iR/CJ7nSaVSOdLNIEcZzLvDw1F5wS6fz8t1110ng4ODEolEpKurS17/+tfLU089BZ/buHGjnHfeeRKPx6W/v1/+/u//Ht5v5LC78sorJZlMys6dO+XCCy+URCIhfX198pnPfEZ831+I3SN/RIyOjsr73vc+6evrk0gkIkNDQ/KXf/mXUqvVZGZmRj72sY/J8ccfL8lkUlpaWuSiiy6SZ555Zv779913n5x66qkiIvKe97xHLMuiV5HM89BDD8mpp54q0WhUhoeH5V//9V8bfu7b3/62rFmzRmKxmLS1tck73vEO2bt3r/G5xx9/XN74xjdKOp2WeDwua9eulYcffhg+c9AzsXHjRnnXu94lra2tctZZZx2W/SPNQzablSuvvFIymYyk02l5z3veI6VSaf59x3Hks5/9rAwPD0skEpHBwUH55Cc/KdVqFbYzODgoF198sdx5551yyimnSCwWm8/bu+66S8466yzJZDKSTCZl5cqV8slPfhK+X61W5frrr5dly5ZJJBKRgYEB+Zu/+Rvjd8irA+YdaUampqZk3bp10tLSIu3t7fJXf/VXcBKrHXZf//rXxbIsuf/+++Xaa6+Vrq4uWbRo0fz7t956qwwPD0ssFpPTTjtNHnzwwYXcHdIk3HDDDfLxj39cRESGhobm1/wHz0c/9KEPyXe+8x1ZvXq1RCIRueOOO+S+++4Ty7Lkvvvug239Lg/75s2bZd26ddLZ2SmxWExWrlwpn/rUp160Xbt375Zly5bJcccdJ+Pj46/kLpMmgHl35Ake6QYcCT7wgQ/I97//ffnQhz4kxx57rExPT8tDDz0kmzZtkpNPPllERGZnZ+WNb3yjvO1tb5N169bJ97//ffl//p//R44//ni56KKLXnT7ruvKG9/4RjnjjDPk7//+7+WOO+6Q66+/XhzHkc985jMLsYvkj4D9+/fLaaedJtlsVq6++mo55phjZHR0VL7//e9LqVSSnTt3yu233y6XX365DA0Nyfj4uPzrv/6rrF27VjZu3Ch9fX2yatUq+cxnPiOf/vSn5eqrr5azzz5bRETOPPPMI7x35EizYcMGecMb3iCdnZ1yww03iOM4cv3110t3dzd87vOf/7z87d/+raxbt06uuuoqmZyclFtuuUXOOeccefrppyWTyYiIyD333CMXXXSRrFmzRq6//nqxbVtuu+02Of/88+XBBx+U0047DbZ7+eWXy/Lly+ULX/gC/7HiKGDdunUyNDQkN910kzz11FPyta99Tbq6uuTv/u7vRETkqquukm984xty2WWXyUc/+lF5/PHH5aabbpJNmzbJj370I9jWli1b5J3vfKdcc8018v73v19Wrlwpzz//vFx88cVywgknyGc+8xmJRCKyfft2uGDseZ5ccskl8tBDD8nVV18tq1atkg0bNsjNN98sW7duldtvv30hu4QsAMw70oysW7dOBgcH5aabbpLHHntM/umf/klmZ2flm9/85ot+79prr5XOzk759Kc/Pf8Xdv/2b/8m11xzjZx55ply3XXXyc6dO+WSSy6RtrY2GRgYWIjdIU3C2972Ntm6dav8x3/8h9x8883S0dEhIiKdnZ0i8sI67bvf/a586EMfko6ODhkcHHxZtzA+++yzcvbZZ0soFJKrr75aBgcHZceOHfKTn/xEPv/5zzf8zo4dO+T888+XtrY2ueuuu+bbRF49MO+aAP8oJJ1O+x/84Ad/5/tr1671RcT/5je/Of9atVr1e3p6/Le//e3zr42MjPgi4t92223zr11xxRW+iPgf/vCH51/zPM9/85vf7IfDYX9ycvKV3RnyR8tf/MVf+LZt+0888YTxnud5fqVS8V3XhddHRkb8SCTif+Yzn5l/7YknnjDykJBLL73Uj0aj/u7du+df27hxox8IBPyDpX/Xrl1+IBDwP//5z8N3N2zY4AeDwfnXPc/zly9f7l944YW+53nznyuVSv7Q0JD/+te/fv6166+/3hcR/53vfOfh3D3SJBw83u9973vh9be+9a1+e3u77/u+v379el9E/Kuuugo+87GPfcwXEf+ee+6Zf23JkiW+iPh33HEHfPbmm2/2ReRF59Bvfetbvm3b/oMPPgivf+UrX/FFxH/44Yd/r30kzQfzjjQjB/PykksugdevvfZaX0T8Z555xvf9F/LtiiuumH//tttu80XEP+uss3zHceZfr9VqfldXl3/SSSf51Wp1/vVbb73VFxF/7dq1h3V/SPPxxS9+0RcRf2RkBF4XEd+2bf/555+H1++9915fRPx7770XXm90DnvOOef4qVQK1o2+78O672COT05O+ps2bfL7+vr8U0891Z+ZmXlF9o80J8y7I8tReUtsJpORxx9/XPbv3/87P5NMJuW//bf/Nh+Hw2E57bTTZOfOnYf0Gx/60Ifm//vgn4vWajW5++67f/+Gk1cNnufJ7bffLm95y1vklFNOMd63LEsikYjY9gtD1HVdmZ6enr8dR9++Tchv47qu3HnnnXLppZfK4sWL519ftWqVXHjhhfPxD3/4Q/E8T9atWydTU1Pz/+vp6ZHly5fLvffeKyIi69evl23btsm73vUumZ6env9csViUCy64QB544AHxPA/a8IEPfGBhdpY0Bfp4n3322TI9PS25XE5+/vOfi4jIX//1X8NnPvrRj4qIyM9+9jN4fWhoCPJUROb/0vO//uu/jFw7yPe+9z1ZtWqVHHPMMZDP559/vojIfD6TVw/MO9KMfPCDH4T4wx/+sIjIfE7+Lt7//veDF/s3///2/j3Ksqs870bfddv3XbXr1tX3e+vWEq0bEECyANnGNr5gTDAhcWQ88oE/yDgj5yQ54zge/hJsh284JE5GcBgcx4ntEzPOZ0yML8GADJYRIASSkBC6dbe6u/reXVVdtatq176uy/lDBw2eZy56l2R1VQHPbwyNoXfvtdeaa6453znX6pq/9eijNjs7a7/8y79shULhxc9/8Rd/0UZHR1/BEovvB+655x676aabXtZv5+bm7MEHH7Rf+qVfgnmj2Qv3JMxTTz1l99xzj+3du9c+//nP29jY2Ms6rvjeR+3u2vMD+cDu3/27f2dPPfWU7dq1y17zmtfYv/k3/8Z5ELdz506noYyNjdni4uLQ/fu+b/v374fPrrvuOjN7Ye22EHNzc7a8vGw333zzd90mTVP7j//xP9qhQ4esWCza5OSkTU1N2ZNPPmlLS0vrWFrxvcbc3Jx1Oh07dOiQ893111//4v8fP37csiyzQ4cO2dTUFPz37LPP2uzs7IvbmZndd999zna/93u/Z71ez2mT+/btu4ZnKDYbPNH69iRqcXHRTp8+bb7v28GDB2GbrVu3WqPRsNOnT8PneW3n53/+5+0Nb3iD/ZN/8k9senra3vWud9knPvEJeIhy/Phxe/rpp502+u3x99vtWXz/oHYnNiM89h44cMB83x96D8Bt8NttlPcXRZFznyHE32Xe9e374Kvdl3wnP/VTP2X1et0+97nP2cjIyMs+rvjeR+3u2vMD6bB75zvfaXfffbd96lOfsvvvv98+/OEP22/91m/Zn/7pn77op/tub37N5GIS68SHPvQh+7Vf+zX7pV/6JfuN3/gNGx8fN9/37Z/9s3/2Xf+lX4iXQpqm5nmefeYzn8nNebVa7cXtzMw+/OEP26233pq7r29v+23K5fIrW1ixqVnLmJn3r6V55LWdcrlsDz74oD3wwAP26U9/2j772c/aH//xH9ub3/xmu//++y0IAkvT1G655Rb77d/+7dz9yvf0/Yfanfhe4O/SBoVYK3nt57u1vSRJ/k7H+rmf+zn7wz/8Q/v4xz9u73vf+/5O+xLf26jdXXt+IB/YmZlt27bN3v/+99v73/9+m52dtdtvv93+7b/9t0NfKLEW0jS1kydPvvivq2Zmx44dM7MX3gwlxNTUlI2MjNhTTz31Xbf55Cc/aW9605vsv/23/wafN5tNkGuudSIofnD49luWvv2Xcd/J0aNHX/z/AwcOWJZltm/fPshXzIEDB8zMbGRkxH74h3/4lS+w+L5mz549lqapHT9+3G688cYXP798+bI1m03bs2fPmvbj+77de++9du+999pv//Zv24c+9CH71V/9VXvggQfsh3/4h+3AgQP2zW9+0+69917lRaF2JzaM48ePw1+dPP/885am6Uu+B/h2Gz1+/PiLS6zNzAaDgZ06dcqOHDnyipRXfO/wUnPMt//qmF8CwH9h/O2/2Lzafcl38uEPf9jCMLT3v//9Vq/X7d3vfvdLKpf43kLtbmP5gVsSmySJs3Rry5Yttn37duv1eq/YcX7nd37nxf/Pssx+53d+x6IosnvvvfcVO4b43sX3fXvb295mf/mXf2mPPvqo832WZRYEgfMXnX/yJ39i58+fh8+q1aqZuUlR/OASBIG95S1vsT/7sz+zM2fOvPj5s88+a5/73OdejN/+9rdbEAT2wQ9+0GlrWZbZlStXzMzsjjvusAMHDti///f/3lqtlnO8ubm5a3Qm4vuBn/iJnzAzs//0n/4TfP7tv0h661vfOnQfCwsLzmff/mvPb4/d73znO+38+fP2X//rf3W27XQ6L751UfxgoHYnNor/8l/+C8Qf+chHzMxe8h8F3HnnnTY1NWUf+9jHrN/vv/j5H/zBH2jO9wPKS53z79mzx4IgsAcffBA+/+hHPwrx1NSU/dAP/ZD99//+32HeaJa/uszzPPvd3/1de8c73mH33Xef/cVf/MVLOAvxvYba3cbyA/cXdisrK7Zz5057xzveYUeOHLFarWaf//zn7ZFHHrH/8B/+wytyjFKpZJ/97Gftvvvus9e+9rX2mc98xj796U/bv/pX/+rFVyAL8aEPfcjuv/9+u+eee+y9732v3XjjjXbx4kX7kz/5E/vyl79sP/mTP2m//uu/bu95z3vs9a9/vX3rW9+yj3/844635MCBA9ZoNOxjH/uY1et1q1ar9trXvlYOsR9wPvjBD9pnP/tZu/vuu+3973+/xXFsH/nIR+zw4cP25JNPmtkLbec3f/M37Vd+5VdsZmbG3va2t1m9XrdTp07Zpz71KXvve99r/+Jf/Avzfd9+7/d+z378x3/cDh8+bO95z3tsx44ddv78eXvggQdsZGTE/vIv/3KDz1hsVo4cOWL33Xef/e7v/q41m02755577Otf/7r94R/+ob3tbW+zN73pTUP38eu//uv24IMP2lvf+lbbs2ePzc7O2kc/+lHbuXOn3XXXXWZm9gu/8Av2iU98wn75l3/ZHnjgAXvDG95gSZLYc889Z5/4xCfsc5/7XO5LfsT3J2p3YqM4deqU/fRP/7T92I/9mH31q1+1P/qjP7J3v/vdL/kv4qIost/8zd+0973vffbmN7/Zfv7nf95OnTplv//7vy+H3Q8od9xxh5mZ/eqv/qq9613vsiiK7Kd+6qe+6/ajo6P29//+37ePfOQj5nmeHThwwP7X//pfuW7N//yf/7Pddddddvvtt9t73/te27dvn83MzNinP/1pe+KJJ5ztfd+3P/qjP7K3ve1t9s53vtP+6q/+Cv4SVHz/oHa3wWzMy2k3jl6vl/3Lf/kvsyNHjmT1ej2rVqvZkSNHso9+9KMvbnPPPfdkhw8fdn573333ZXv27Hkxzns18X333ZdVq9XsxIkT2Y/+6I9mlUolm56ezv71v/7XWZIk1/LUxPcgp0+fzv7xP/7H2dTUVFYsFrP9+/dnH/jAB7Jer5d1u93sn//zf55t27YtK5fL2Rve8Ibsq1/9anbPPfdk99xzD+znz//8z7ObbropC8PQaZPiB5cvfvGL2R133JEVCoVs//792cc+9rEXX43+nfzP//k/s7vuuiurVqtZtVrNbrjhhuwDH/hAdvToUdju8ccfz97+9rdnExMTWbFYzPbs2ZO9853vzL7whS+8uM13vnpdfP/z3a737//+72dmlp06dSrLsiwbDAbZBz/4wWzfvn1ZFEXZrl27sl/5lV/Jut0u/G7Pnj3ZW9/6Vuc4X/jCF7Kf+ZmfybZv354VCoVs+/bt2T/4B/8gO3bsGGzX7/ez3/qt38oOHz6cFYvFbGxsLLvjjjuyD37wg9nS0tIre/Jiw1C7E5uRb7fLZ555JnvHO96R1ev1bGxsLPun//SfZp1O58Xt9uzZk913330vxt9ut4888kjufj/60Y9m+/bty4rFYnbnnXdmDz74YO5cUPxg8Bu/8RvZjh07Mt/3X8x3ZpZ94AMfyN1+bm4u+7mf+7msUqlkY2Nj2fve977sqaeeyr1feOqpp7Kf/dmfzRqNRlYqlbLrr78++7Vf+7UXv8/Lve12O7vnnnuyWq2WPfzww9fknMXGo3a3cXhZprcovJL84i/+on3yk5/MXTYmhBBCCCGEEEIIIcQwfuAcdkIIIYQQQgghhBBCbGb0wE4IIYQQQgghhBBCiE2EHtgJIYQQQgghhBBCCLGJkMNOCCGEEEIIIYQQQohNhP7CTgghhBBCCCGEEEKITYQe2AkhhBBCCCGEEEIIsYnQAzshhBBCCCGEEEIIITYR4Vo3/D//egbiJE0oTiGO6PcF33026AUFiPupB/FKvwNxwLvotiEcqRQxrpUgjmOnCLYyCCD2PSzDwPA80wy/9yi+FrBmMLOUN4AwdbSEayjjEJOhR/Xyr3987/B9vgL8H//Hv4R46dJFiLurXYjDYhV3kNPuDhw8APH+AxhzfZ4/dxbiZx55BOKZkychTuiQfuR2s2K5AnGjPgLxyOjoVeOx8TGIR0fHIa7U8Pt6HX9vZlauYRlKFYrLWJdBoQxxSu2KWqVla/nngITaLuURnzr9q4/cuIadvjK85s2vgdhLsax+gmWlr61cpbZoZqN0Hfl8V1ZW8Bge7rRUwMzaXcUcWC5gzisU3ItQrGJ7LEb4m243priPcQ/zsudjO6hVa7j/Eu7fzCyOBxD3+3iMYhHb2pX5JsSXL89BHISY+70A6ykIMM+bmQ0GVy/D4uIixBfPnnP2cS34jx/9f0O8exrbTBhjGykHeB57dmx39lmuTkF8fhmv2ee//ATErYUliOsjmE8+Mz8BcXDTPRAvP/L/dcpwb/g4xL/4j34B4k4Fj5GmLYhDmq4szOL1+d2P/T7ES4tNpwz/z//X/wPiffv2Qvzoo49CfPC6QxCXS9guazVs6wsLCxC3WngOZmZbtmy56m+KJWzLt7/6tc4+rgUff24V4jTJmTB9BzwnyJtCeDRGeEOmIt73qVH5mquiafdpxqOx2YBG6JjGHhvgTt57J+aMa8V46OZmgOuOGpGfM8djuK0O+82w68X743gtDNsHx3R7NGzK/sI2dB4v9ZivBDzH4TIlCd5jzbdxTn8t+d8+8kWIU7qvdWrDo/tez21HGd39RnS+/irew1T75yG+7frdEC9dwbnOl7/yFYj7XZyPmZmNj+P9QKnE88ICfY9jzo033oTbRzjuBTS/yk/ceN2d8cLpY1RP/tXbIrcrbkd5nzn3F5QH3v3ud1/1mK8U/9sfX6JPqK6c5wy4dea7+Z33EaR4jVOq/5jaMj/KcA8xPOMM38K7aujk+qG89L85y6iUzvMU3t4pEo9XOe3UeWZDeYXa9h+8y52z56G/sBNCCCGEEEIIIYQQYhOhB3ZCCCGEEEIIIYQQQmwi9MBOCCGEEEIIIYQQQohNxJoddhmtWWd/FT/66/TIg5S463wLJH3idb2hj8XzUnaq4EHZL7faRRdC4OGa7heOiefFa9p9Pk9a7syOllcCXjLNT1UDqief1kcPBhRffYn2C8ccdhrXwG2xFsamcG331MQ0xLt37sHtxych7ntsUzTzQmwH7FLokhPi+q17IT5ww6sgPnnsGMRLi+gkapKjyMzszOlTEJ89g3FI1V0md1nSR3dZRB6YUgl9UGHR9YiV6uhYK9fRx9SYQIdNYxyvxWgDj1EbRQ9fneJyre6UISiiNy8Isc+HOe6x9aLXR/9UMcCysSuSvR7sLTAzW22jfyyKsC2WycPZY18cNYzaKF6zAuVMS9HL9sI2mBDY9dlpoTPFz/A8ymUsI6eXfkzHdItglQo6UTyfJR2411od28n8PLlGSVAaUNbMcxKxw479JmG45uHxFeW2G2+AOKLrNXsBx7XG9A6I09xhHetjooF98yffci/El89dgPjcBXSuHKR80oowx03vwf2bmSUXsb6//HV08ZQnd0J83YFdENfGGhB/5dmvQ/zFL6KPyGNHl5n99f33Q/z2n3s7xLfcfBjibof6HzmOCuTYrFO7rlFfMTOr0jaFAMeswSCnw6wDYcBO0iH/nruGOYHrgPKu+n2umueqO9wAhmh22JHzwodXL3jub64CzzsdL0+OQNbn8YrKlG7QP98P9aa9jLknV7cTDznEsKvBJVrbLIWuGTu6hjkhh/Sdl4NT9yTL8l5iTWQ57XzY9c3zy64XXobzBj/H/Yjfc/24pDTvY/d6oYRzvijE8eAz938G4hNPfxPi1VV0jXqeO957OW6974QdwinN8bY//FWI7733RyC++fARiPu5YxY5M2NyPtP4HFBF8fYxzfHW4gXluStfsH68hpvja0DA811+FsJ+OX7W4bwlwNW1Jz7uw6e5S5Wn3NTXY7oeCWW5OHeMwWvk5o8hfn3HhThk3HxZbljHtn7Vrd30tYY2M8zN9zKfp+gv7IQQQgghhBBCCCGE2ETogZ0QQgghhBBCCCGEEJsIPbATQgghhBBCCCGEEGITsWZJz4DWemfJ1dfy+z6ud+bfm5mlKa6j93ktMS/+T3AfhQL6YeIA4/YA11OXo5w11yGdl+Oso++dNdN85i9RhmHmrGfmtf3sgPC9q/uZXJ/J8CIMWwv+8taK/9257vobIT5+9DjE80voBKvURyEult21/t0uuskKBXRKpH30Fq320Bc3tWUbxK/bsRfi82dmIG4vNZ0yvO4Nd0F88fJ5LFOEbblB/rennnwE4i9+4a8gTmZPQuyzL8XMMmpXQRHrgeslSHH7iL4Pi1jmShXdHKPkHzQzq4+jt2psbBziiYkJiO+4Gf1e1xJ20FH6sbjXg7hUIh9f6ra9chmdcyMj6PpqkZ+kH6OvrFhBd1iZ2klA3bTXcT16PnkilproH0sTzMtRhOcxoKbE7hF20oSh66jp9fG8+JhpwjkRf1+kthp3MNdzDs2DnSjD8u56sWsLOs2SBMsZd7DuPB/bXZKTqj1yeVap/jxqE6P7sV/u3r4F4oMR+iufu4J9YWw3uuDMzGpzuM3Fi+hKbC/MQ5zt3ApxsYj5ZNeefRDv2b0b4t4q5nkzs1tuQf9ot4u5vVzEKVGd+lsc4zmcPfU8xFXK05xDzcwGXezjAc0ZYsfVuz6ENEawE3g9+sMQ9dKmcNi9DHPf8Gmg4/56afMtZ96alwOoWXk0p0tz5gibgVei3bnVcXWn0DAHmKN+y/nbh2GlToc565z9cRmH/OBl4O7zJc77h8qw8465ce2Oc557tlTnfIua47PyPfS5Lc+ehvjEyW9BfOUcOrDj5VmI6+S8q1VxjOkP3Dke36/FMXn12BtIfrOTNK61/hzH0m4Xx0EeV81cBzDf7/N1TxK+1yaP25B7UC/HP9ilOcA8+cVnae67XvCc2Ck6PxPwcPsop58VY/Ka+5jwJ+oYj0dLEF++hPOx45dw+9Ik+uKLdZwTmpkZvRMgW8M8/O9CrjNzSOZNqc9mHs37X+IkI79Z8od4/TwWDq4R/YWdEEIIIYQQQgghhBCbCD2wE0IIIYQQQgghhBBiE6EHdkIIIYQQQgghhBBCbCLW7LBzPGkv0Wnmee5aZmcftK6ev+c174MeusYKhu6AQoj+Gdco5TLg9c30/VDdwkv+wXDYrTTgeuHtM34OO3wd+TCPxMYY7MzG6uhr2H/wEMTnzqIfYmHhMsQj5LQzMyuW0IVUIPFXtYD11+liu8rIr0UqLBsdRb9Tn9qpmVmc4D53HTgAcbnUgLhWwXhyF/qb2tQm7v/UH0McxO4VLATYI6IUy5R2MPbJM9Zl5xG1oTnuS8+jf/CFgpH3jdyXRfLiveef/u/uPq4RZXLSDcjb4fuYPt0+5Pa7IMS2lZKng11i5SrmsH5MOS5iTwj+vt5w238YYIO9cP4SxMUinrdP7cSjMltALkRyhQ6ozGZmqy3ySNJ1j9jNR/U2MoouwH6M++v1se7Zw2fmOlZ65CSsU+5ZL2JyaLbbGJdL2CZC/+rjppmZR26Rfgc9aksLixBPb0E/SamCx5goYd3tIFdoKWc4SevXQbxzcheWgdx8aQ/bTUzX9PDN6M25++67IZ4cQz+kmdlbfuwtEJ88ia6eyxcuQlyvYDvsrKIzdWER6220gbmf/YNmZmGIdcUuxTZ5dw4dudPZx7WAlcHsa1qPWcAP7L8gv8IarzQnB6TUzvyExx7XNboepHzyQ+rCsUbnzF35M2cLcnr6TuOnMibcMtlJ5NZ3SA5cxzPIniqaL/geXx92fA1vNK67eJiXkt1+V++RQ5XelnMLRL/J0o26uzAL+TpzfVEds3oq9HBObGZ24qmvQXzyyS9D3KZ7lIzuL6bGcG4zTc7skO5fosjttysryxDH1NcDOpH+AOcYKW2/2MT9/eWn/wLicxfQwW1mduSWIxCPjjYgLtCczKl6EruxP3axiQ62ucvoYDMzm710AX9D9dJN3Ou3Hvg09zS6pwwzuteg+W0wcN17Yx5+Vuph/Vy/dQd+H+K5t0/OQFyYw7lNdwXbrT+G+zMzK205iPuoNiBOPfLFc7px7pmG5IacHJg5Exfqw8Oejwx7ZuO8MyBvkyFlkMNOCCGEEEIIIYQQQojvffTATgghhBBCCCGEEEKITYQe2AkhhBBCCCGEEEIIsYlYs8NuQCt1vSEeNY79nHXBgwE6NYKAvRD4PDExXoeP+6tEeIwqLvW3uN12ytDz0dfUs6t7PBwdQ8Zrrq+9B8TxCQ75/pXhFZasrJFnv/VNiEcm0K1UJrfV4pVZiDsd15+1hdbym4/takBr3Pvkf/No4b1PcUResbEcl9JXvvIAxPUyupJuOvwaiHvkeuuTRmxkaivEgxAb/yK5lszMKiG23Qq5yorkWfBCLCO3MvYRsF7A7Stm1l+hbXAnK+2N85tEVIcp5ZvqCH7fIS9Yp4tOLjPXLeJRLabs7UgxR1YpqWWUE8sV9JsFkZviE/p3mvrkFtoCf7OyjP6MzCc/HCXiQYZlTth5Z2aT05MQFyhvpgkeI6XKH/TpGOQKS1P2TOa5xPA8+33MFZUK9rn14olvfQPizio5MGOsm3IRvSAj9Yazz/EG+mQ6y+g7OXviGMRejG23SnVRjlr0PbbLIHT7etiYgDiq4TXtnjkF8YWL5yCukDNlsYXX6/rrr4f4x37kjU4ZRsijMzGB7fDyuTMQN+fQgTNC/c+ntt1ebkJcpnoxM+t30E/jsVcqddvqeuCTrypjh5Yr6B0SD8edJ2LseL+GOm3W/OF3lAGPkb1kV9/ffW700t3I/DXNCVM35yZ9bHeDHrYzL8Q8sm7wRJ5w/XPDPGxmPvv4yFnnRTROku86qGNe2Hrk9RBXt6CP8/xCzlg/j/nLv/wsxOEi5juvj86pgePRIwehx9c4z+XHHnBnk6H7uPrmfC+S13fYJ8guv+GO7WsFO5N5/sXzkiDBsfjoEw86+3zq4c9D3G2hW42nIiHZ1T0f2+LE1DTE9XF06xZCt/8sLVchbq/ivW+1ht/z/KlMDmG+pWyu4ly33cF5vJnZ449/HfdJTuiQ7u8nJ8Zxe5rLzpL778LFeYgXl9x7vS7NdQO6hymVXcfzelAJaX6VYn3GK5gbSgO8fyuleB9hZrZjK9ZfbxWvaaNM93NU/4UyzlW2bcfxIPMxXlpFf7yZ2copdAB3a9shLm/FvFmoT+Ex6HqlrJfjXJJzb5H6NK7R/TzHmePp5BtZCl/WcM9j1st7TqS/sBNCCCGEEEIIIYQQYhOhB3ZCCCGEEEIIIYQQQmwi9MBOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixiVjzSydYtMdy4IDFvc727rNBR0pPdr+QhOk+HSMIcPsBiTO7LRRhti6gENHMbPK6m3Ef9AyT/N6Wklmfz8Ej2TnLOvN8hS/VN+y8dGLYSyZelrefTYsbI/9faKKw9aknvgZxRBdo6749EPf5AppZhYSrlco2iFl8ybtgwSp5a21AcufnvvmYU4Zv/O39EFerWKZtU1im6V0oBC1Q37jlJpTKh7/wfojPn3UFoUtNlLaukIi+RQL1VRLNdjoo4B0MBhBzf/ZyckAh5PNCAe9Gif/NzBFV12okrA3x+4jKPkjdl9xEJAjuD7CtsBU6JalqqYz1Meji71fpmqx23exSqdXwGD62pdUW7qM8gmLe9iq2E37bSH0Epci9visD5hc8ZJRICwWs6x69wKNEL2lJSbIe0AtUWKqcd4xiEWNuz+vFmXMoHA4pH1VICN1bxXP38xK+x2MryZgph/ELVIxebpJFeP1GSzwuYv2bmWVFejFFAdvdrj2Yuysj9LKeErWrORzPb7/9DojrIw2nDAm9rGT7NhR6d5f3QhyShLxI9cQ5rh9jPUWhKxdOEhIl89ia93Ke9YAGsoxyT2jUhxwZ80v/919qlhZQHkg4H76MyYxnKcX8Pb90AuFj8ksOnDiv+w0tJUH74JebONkswCO4LyQwG/TwRTF9ek9CsbRBL53gF9gNeTPCmuqS2jK/UCOgXF8h+fn1974T4tHb3gTxlUs4dypFNI6bWad+EOLeJM7RevQSitLZr0ActvGlFYmHx/Azuj/it36ZWebhGJYOfcGDc1OG37pvALnar3Nx5oX+xv3diO9hHVbpDJIuvujg6cfwJRPfevSrzj47K1cgzsic79H4HdK4WKR7gX3790E8NtmAOMgR2PN9Ks/V+z1sS2fPY1tr0xxwxxYcJ4v08oZ+1W3/PiXCJXrh2rMnT0B88OCNEI+O4YtfLl7GFy/MLWI+q41gHzYzG6uNQTwyinPZUZ5jrBPXF/BlVpUE80mrgC+g8bG6LRu48yvnJYElbEe1Op7rIMZrXCjiSz48H69psVSi2H18NNrDQaVJOWz1FL4UMhndCXFlcj/EEb38J/ao7/Bcyswy6g9edvUcldBL9NxHHVd/5rMmuAwvM+fpL+yEEEIIIYQQQgghhNhE6IGdEEIIIYQQQgghhBCbCD2wE0IIIYQQQgghhBBiE7Fmh935U2cgDsiFwK4Wr4BrrL3AfTZYjNAr4ZOHKOrhb1Jao10ib4eRPybOcP/FrXudMiy2yQFFHpYwwH1ktC4/zdiPgr/3ea1yjmfCXTTNTpWrO1aGWV08Fg7mmSYyLCd7JlJvY3xO7Bw41UZvwfwldEx0UixnfXKLs092pJRpbf7E1HaIQ3KV9TroJiuXsY0cP4Zukq9++UtOGXxae9+cR4fBhXNnIS7WJyAuVNBD1hhFV8Pdb3wzHs9pA2adLnqq2m10862uoEfh8jn04M2cQtfW8eefh5i9fDt37nLKMDGBboxyGX0e4+Pjzm/Wi0GCfZtUb9aN0dfgZ+SLGZAkyMx6lC8ix+uFbalGvjnPMM8mCRWKvVM5Dq2lJl5nL8Ec2G1hH6vXsQzjNeyTXoo+s4A8nrGrmbB2G+tmNUYrU2MUy+2Tc21AxyyTX7Ddwmvh5kCzmI7JqTl9Oa6KV4A7XnUYYsfz5Tg6cINiEfudmZlHno7RcbyGB6+/DuKQfIwRSe5K1BnYrZhF7tTCo99ENOZ4NXTUeVXs+1dWcPvD1zUgnprAdtrpu97CXgfroTaC53XgIHqnkja5EWkczMgVlrCPK8dHl8bsgqM5ROaWez3gdsRzGS+7+r/vvhzLLU/h+uQd9miDAo0PXN95Hr1siBON5zqMfy3+XfvqUz5nhubO+dg1RvO3zPXRdVZxPO+20WHE8/H1IiXHlzNnHkKe8477kJ/iGBd6eK4jZdzHZBcdU4Vn0V3WWcL9XVd0XVgrAebhszQ3vRhjfmtNvRHiUh/nkYWF5yCOethX0hx5Ypxyn+bcMqSl0T6H9fG8a8fX12GDxlkzsyIVt7+Ift4HH/gkxCtzOOetFN3Cl0tbIa7SuFYmh7ZPjs06jZ3T5I8rV3GuUy64432JfGSjdB81iLEtTpw8CfGZExiPjWH7LvRwfrC07Pqa+zT/XVzG/LNA89CD190E8a7deyH+6tcehbgxuRvinbswNjMbb6DXrkbXImR57zqxLcB2FpfwegQezqd4jt3JcQTzewI8mpsE1Dczujdw3hngue5p+H1Oxy2VcHyeIvderY/nubKK95TNFjruChN4z1idQs9xVMZ2bWYW0zyTy8lzsoifrzhJ7urvZ8jDfa+AHHZCCCGEEEIIIYQQQnzfoQd2QgghhBBCCCGEEEJsIvTATgghhBBCCCGEEEKITcSaHXbfOHMRPyBXErsLInbB5ax3ZjdYRL6EiJaXd2kXW0ZxXf3ecYy3lvD0ahV3rX+ni+vsvRQPyuvuO33cPiEfTUAekEIBF3Hn+VICcvP1uujH8KjufHJ29Pq41pzLxD6iMq0zf2GfvO4biTfq0W6I9dcYQ+/H5ZMzEJfIL7d8Dt2LZmaXL6P37rFvfAPim246AnGliu2q3yN3GbXLJ7/xdYiXlptOGWISe6UJuxARXhM/IBdAK0MfXQUVCFaM3GtepvMaHUPfX4k8lAUf4+UlrOs3v/kAxNPT6N6o1V3PS1jCgrLvpER+wfWE67zXx35ZKWJ9VCvkV4rcvu4H+Bs+/0tz6DJs9/C6VitYh6UI6ycedOj7nBSfYn5gp0M5wtaXkIukRr6yfgfzT5/yV5Dj0Suxi4p9crR9pYrH7FIfHBlBf9lqC+uxXHJzf5ZiUkvI95PnBFoPbtyHHjWuP26XPPamua4x3EeFrqE/SeM1jRkFGqN8yldcV3lF8GlUCdjVFuLYOfCxjPEVHIurVfTRFLmdRW7uuLJAvsZljBtVbJeph/3Jy8hLyYrahMdqtyJ8H8uZJuzF3RipU2TYj9MUr7nncT+mOWCOr497ELfVpTkci7/wKXRG1cnhed0N10NcHkOHTXUKnUVmZpUazhmSjJ005FCj37vuvuyqYR5OKxhyidm7lzjOG8oB7PLL3Ly/eAXn8DMnnoL49a/78asX6hrhGJbZ/0N14V6fPC80XtMswbY96CxDfOUy5qIrz+Hv33jrLRDvHEGX0soAxxszswvzT0PcPoX1H8SYnzo33g1xc8u9EPdPPglx5finIS6snHDK4A+4rbIPm+uahLM8BjqXhv1P7rXIcwwO+8160V5AJ91X/+YvIF5awOsahDg+bN2Fvmszs0IFfdK1It8DYlvs9tAZPD2Cc7w4wbZJ01BbuIzeLzOz2267DeLGKN0Q0DUplTGv7tiKObM9QN/cycvoHut62J/MzJaX0NOWVnAf23bh+H3dIfSV3Xbk1RBnCc1RyPccRm47C3ysex5/kiRHsLwOVOgefCUmVyvdBw34ftHcOXU6wHaS0T64l0U0x+O6cZ9VDBGvmjv3TKlvF+iajVI7rFOZl6h/NhfOQ1yd3uuUob4d70O9EvYnZ5rCns+hztvhOHXHeTPHqb0W9Bd2QgghhBBCCCGEEEJsIvTATgghhBBCCCGEEEKITYQe2AkhhBBCCCGEEEIIsYlYs8POqzbwA3Zm0PY9+qBvLomzzhfXYFdobfEgQW9XtY0+maxGvrNxPL1t9Zw17g10pMwvoTPqxCx6up6/gt97Aa8lx+09ckAUyWFlZhaR04Ydabykms+CHXaDAdYTr00v5Trs2JOEC70LzpL5w84+rgXdmMpBzi/2/8UDrIssx5916QI6H06cOgvxV7/6MMSOdyzAY06NN/AA5PwKcx6Lryyjz2Giju2wUMS1/h67F1JyGvQxjsilONpAr4aZ683rks/x2NFnIf7K3/4NxDMzJyHevn0HxPOLVyDO8jyW5BZjd1ZMbfnet/yws49rRZk9a+SvDKjvc1wm/5KZWUhOy0GKdcJeiYwcGyuLTdxfxp5B3L46gsczMwvIV9npYR1vmUQvVJfycEx5mK8Z++XKRdclFpKljr2ccYzHWFoi7wu1VW7vAXe6HB9dGOE2Afk3BunG+E2ef/ooxIUynlttFN0vk1OTEPu+e81LRcybIQ/9pJPjEZ2dHOwc9ALq26lb3xl5bzkzB9ROQhoXR6v4fSEgRwuV6dwcjtVmZkfPoZNu1w7s4yM1jIOQZi6Oa5TaEJXZy9w2xFWTsTsx3hink5+ihyj0MA84rjFjR07eNSdvoYe5ojl/CeInH/5b/H0X6//Uk+g5GtmBntS9t7zKKcPr7n4LxJ6H7Sghhx070dgP50IOnJxxbtgnHvc3doORCzHpY9u+fOECxNNbsJ5e+A06IGeefxziEXY8//Adzj6uBVy/PBbwvQbXpbO95aZ7/A2Nq2kHPWIzZ2YgXtmG24c99B6vLuB4ZWaW0hh1A/nPRrZtg3huEq/PQ5Rrzkf7IfYat2PcXXTKEMTYLjJnns/lJsHTkHp8JfxzG+mw+9pD9+MHPo4Phw6ju3BAc8CUPazmzllbJJ2L+9jW0gS/H9mB8+gqOe3mZzFnHnv2GacMMxfQ/VWr4FyUHZeXL6FLtNvH+9i0iO3/+OXnId6+F/OwmdmeXQ2IyxWaB3ZwXrPaQ9945qGHb2oS99fps3M1x6Ga4HkkdnVX+3oxMYW+8PQKXtPlFRyLk5j7pVvyiPxwmU9zNNo+dO5rr/4MwHGJ5lWe41Il1zFdo4XnsZ2G5PKrkrO+VsO+sDyL96BmZgtNvL+vbkHfaHUbxl6Zxj32GtI5pcMeyFjO+OM47NznEmtBf2EnhBBCCCGEEEIIIcQmQg/shBBCCCGEEEIIIYTYROiBnRBCCCGEEEIIIYQQm4g1O+yyHq6zZ++KR+t6U2d1eN5CX/4M1zfH5BYpkQ/GT3GN9qUl9A+k9P1ME9ezm5n1UlxL3Fwld1Ib99FO8LyWB/i9T89AuZ7C3IXf5JyjfXi0DtxZvp7h2vU0pXX6VGaLXa8O+4Ucb0jO5VsPGpO41v/ycfSq8br7boeuccFt4lGIJ1Mu4jatNjknyEmRhljfy815iJMu+mVGGw2nDH1qF13qX60Wei7Ym9fqkveijmv70wG2mXlyVJiZra6iR+/oMazbRx/5GsQnT6Jba5XKeOr0CYijCMucZm4j8gNyj9H1jMlL9cFf/zfOPq4VlQp6v5p0XeOYfQ14vnwuL2yDcbuNOYt/UyLnndF1Tcg14kX4/fTodqcMp8h3NNnAtjM2hr7D5Q7mhnYH+8OAfHNhAd0YrlnELEmv7rbodLBeikWsB3b9pQnmzJAcdmmOjy7wyX8ZkxMlb8xaB/74E5+E+PobDkF82x1HIK6SG6ZacdtdTD4ldnsWKU7I4+FTuxxWM1mOo6MY4TVcvIyukZVLTYjr2/dBvLyA23/mgb+GeKmDnetKttUpQ7mBbq/tW2+GOKAOGpN/KE3IkUrbJwPynSSu4yijz9hz57hj1okLZ9GFtG3XqyFOM/b3sVts+L//8rknMdbvaJE8OTR3WZ09B/GV5YsQzzXnnGOWQ8xvr7r9DXiMIvsa8Xp4a58mv7C/vKkuxY7vj90/5DZlJ+e50zgWP/zFz0H8mtfc5ZThzImnIZ67cBriR2jeY/Z/d/ZxTRgyueR7C45zfzM0Jncsa/NSHNPGJtA/d+MIjj8PPo5jqplZpYztzqdrPmijt6r4zT+F+ObyE7g/w3x41nD/7TqOC2Zm5QHO8YIBevKcOzBnjsb3Hle/78u7NvybjXTWMQvzOC/evRO9gotL6AWsUUNpk3vMzGxAc1aem0830A0aeeRKJgf22Qvn8QCUh0Oav5mZXfHIvX7iGMSnZrDczct4D1Oi+VXEc4oIy7htF3p0zcwml9Bh3elgW+ys4v3DmWe+CHEQ4zFXlrBe2cvd7+K9hJlZeQTrNixRXWUb8zdLnsf9CttVj+7RE7oHZWWwmVmxjPcrCd0L85kO9XwO2SCv5vi5j/MUiD4o0n1vsY/1stTDcyhtRb/j6FbXnRh30f+3eh7va1srOI8c374X4so45gArsuOO52duPTlVxx/4L6/d6S/shBBCCCGEEEIIIYTYROiBnRBCCCGEEEIIIYQQmwg9sBNCCCGEEEIIIYQQYhOxZjlHErOLhVwGJO5gJ5ErXjPzaB0veyViWvdb93Fdd4keN863cL1zd4Dr8P2m+3yy3cdjlGhxeEpOhiqVoT+gteYJuZbYaWeuSynlY7Kzjte7O0uo8fdc1elanBHOomt2qgzfxbVg1669EB975CGIryyhk6OziGvid+7d7ezTp2vqcztknwmdfJphX4j7eE2rZXRKLa+gu8HMbGUVy1mmMjz2jW9APDOL51kfRX9DtYLr7Asetv1jx55zyrBIvp+ZmeP0PTooEnYtkYePhSjswcprQ+zqYb8JX5v1hP157GcZ9NFntbyMcTCCTgkzM8/n/o/nWy6jc2PQxpw2OY7XPQixjFGC2/eX3bbXWUE/XNUwZ81dwHbRbJNjs4jtOyqRQ5MudEKOOzOzTg8dKAVyntVqNSxjFdv3Mp1XIcJ6a6/i/peW0JdiZhZTuaICnkfcd/1j68E3nkXXVHW8DvGt2asgbi2jZ8dit6MFHtZHpYL1FZAjk69ZnGHskceD1G12eanplGF2HsvZpnZYIwfLFh/L+PH/8f+B+KGv4FiQ1PZA3Djgerxuq6Brp7OAfqDB6DiW8Qr2hf4APZZpink8oTaTUI4wc/1Y7HXjHLjlVnSuXStOHPsWxNt3oBPLpzGFB8o8s1gaYP6Oyb167JuP4TEGmL+2UB6YmUVnnXmYF9Il9NeYmf3NX/wZxNUIf3PTbbdgGelEeG7Lmq+ExkEeJ83MQmeOgbFPcUDjQtzD8zr6xFchfubxL0HcWiLvlZldOHMG4ib5uQY5ns/1wJmP8QRsiCct15vG14zrn9olOz071JqPXcJ2+yPkEb3dazhlODeP+e3MZZzDXengNe3HTYjHPPQU/r0yupemalMQnwxdj5jvY9vO5h+HOI0X8AdOVbIL9uo3Anl+Ov6M97GRTrsC9dWFGfQwc9OqjGI+2j6OsZnZyAg66qam8DqVyzjf6vawncxewXn3t76Fnr1yDfPX2eUcPzX54lYv4Th2cQGddWGAc7rWAv7en8drVAix3r7cfMopQ7WI17kxiscoF2nOdwm9ek8//icQt1dxLN2xA/1lVxbc+Vovwuvz2je8HuJt21zH83qQZvwcgTzp1EcKEc5NHQequfNXn+bYLlfPu66j9urPCNbyG04vPt0r+ORqrY3g9evRPRnfY5qZFcm/GNJ5dVcxjzaPYt9YmUDP8fju6yAeoTlino8uYW8nff9yb2v1F3ZCCCGEEEIIIYQQQmwi9MBOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixiVizw873h3gjhngl8jwF7j44xOeJSYZx0cf1z60QnTfLA/y+WnZdF2EBy1WMsEqWOri2vBrRuvsCbj+ziOvs23QOUeCWgc/T48eoXHe8C65a+t7dXY5nYoP8JcOokFthGzntBuSDiHt4vXp991yby7i2f0BSmogcdOxrSsjDE/u4Zj4LsExhkdw/Zhb2sL571LafOo4+uSuPPQFxpYxr+wshtsOMzqnTQTeQmetRYFdfEHC5se2bT+v02T9HXizLafvctt22mWdG2hjYo9ZroxctjrFd9Qeuv4pUbeYoYcitM0o+lAG1vRLtMOvidb505qxThkZjG8TdVhPiJfJAtUhQNjKN1zWmPNwnz0RYRLeGmVmBPusuoxtsZGQE4ja5/CLK0wHVW5H6XJq6fhMa0qxQwN/weLNedMgnOqBijI43IB4fIX9l4NY3jwKBh32/tYx+pW4X2zL3wiDF/Q08vB5/9YW/dUrwhQfRuxUVMIfddsMhiAvFhyF+8kl0rG3Zic660p7XQZyN4v7MzObPPw/xw19Ah1r4qgMQr8w1Ia42sF2O1LGuA/LTpUmOBzG5+jbDPFHXiqV5dLskXcwDYXkLxFxMz8vx9dHYuEDHOPHkIxDXaT41WsSx9Mo8+mZiciWOt926G5vE1nv00S9DfPLZb0Jca6An9Mgdt0PM84OUhTQ5Qxb74XodrKvOCo4lLfLHnj2NXstnHkVnXUrOqtnzM04ZVugYpSo6I/1wY9pdSHOXYY46dl87zjszVxJEyT4IcNwMyGHn073E42cwJz8T3QTxa97zHqcIuy7gNSx+gzxfp2cgjPs4xsV0TdMV7Du3Fs9BvKeKcwMzs8cN5w+tLubcoIV5f5BgPaSZ26f/rqzl3nC9GCtiOxmrkKNu21aIq+TUnJwkn5XlOMMpDgs0Z6Oxs0/usaNHccwymlefXzztlOG6Hdi3b9uOfsOdU/j98QvY1uYukNuQdGgh3SvMzuXcX/B9rWFb843cyOR3Dun+IyI3Wfm5U7Q9zoPMzFK6Dzp79hjEo6M4nv/iL/wjZx/XhCH3OR7fF/E9fY4ELRjiYucsyY7uAd+v0DX22M2aO1xw2+d3GVCZaBLR7WMOK5RxLO7RfcLyZdffOD2Ffdajey72AQbU/wbL6FJceBZ9jytTOyHesgvnoWZmlUYDYm6HfH3Xiv7CTgghhBBCCCGEEEKITYQe2AkhhBBCCCGEEEIIsYnQAzshhBBCCCGEEEIIITYRa3bY8Qpo9l0xa/ESONukGCe0CLtLLrG4hWuLMw99DVERfQPTI67bp0zuoz2TuNZ/3xZc618tkQuIquFLz+P65789jmVc6Lu+jYAWdrPjIY7Z84W/d1yAjhdsuAcsHXK58jQh60F3Bd0IO7bvgrjWQIdE53IH4oVF9CaYma22yUFHa/nZd5IOcQ716fotLqP7h91YZmYeHaPTQ39AizwWvQGXGX0CAT1750vO3hcz1/3C7iRuE7539UaSJMMcOC89J2xUuzMzS8h7FlIVBuSz9MldOBi451um35TI2RSQmy0jD+fKKnqI0oCdT5iv2h2Sj5jZ4tkLEIfk3SqRF7JSwrgxOQXx5Svokcj4Og9cPyZf15Dqpd1epe/xPMsl9Ei1VrCfh+y0K7i5v9/Huu1RHywW0GO0XhTreG6T2yYgZg9q6FObcSSorjMjNbwmq22sv94q5t1uC+Pzs4sQD0LMcY98Hf1zZmZnTqCXc76NufqZo+gSi0g+Mr0DXSHbpjG+3MVzHJ1w+99zRx+FeMlHd8++MfS0fePRb0C80G1imcbrEB8+uB/iW1+FriszsyzpUYztLomxP64XC1fQiXXq5JMQX3/4bog9H/tHlNPuAmp3Z2dmIG42mxDv3obzL1vFunD0UDQWd1bd8X5snDw4S+gCe+qRr0NcKOB5LD6P7bJELtNyjfJEzmSqOYc+s84K5rdzZ85A3FrBdmnkWk5i7I++R3Nj33Un1orYVjsJ5YQU++N6cejWN2A56CIPYnII8zwlZ97BcxmeV+QYtSHyyQPaSvH7//FpdF/a2D5nj7fffAvErx/HbfYtoiesvYLxyjyO0635ixBnS+hzLFSwnZuZ1bs7IP7rr+D33bM4DkRdvF+JMxJADbkPfDk+ujwf13rxmluvg3jXdvRfpeRRW1nGflmpuN60hHyV3D59mkh6A8xx7TaOBzOnzkM8OjENcRC6c5s7bsF74Zu3YNv4/GM4ftfJGVyfJM93i/zU1OWKmeuwY0ewxfRBSs5nw5zVS3EuWxlpQHzoZrwXvPE67G9mZpfPnoB4lXx/1drG+BO5zbOfjx3N3Iace/6cfTrfk7ezuYxt4OIFzDdpQvetzuDr5l2nNofkB894DOJ7UPo9jffNRRxXzcx6NK8v13DcK1fwfqZAntyI59NUpvgyzpMuLpPv0cwaW7dDPL4DvXfFesP5zVrQX9gJIYQQQgghhBBCCLGJ0AM7IYQQQgghhBBCCCE2EXpgJ4QQQgghhBBCCCHEJkIP7IQQQgghhBBCCCGE2ESs+aUTAxLv85M+n4TDa5KPDntZAh0kodJGhgL2OxsoDzxyx50QbxlxTzelgxR8FDPumkLxok9C0TjG7cPrUQi63MHtP3ei6ZQhy3AbjwSTIcm3M59fMMD1SDJIEjWyFNXMvZ6OOH4NL664FvS6KEIOSbI/NoIy1Zi2z3vPQbuD2xRCrN9OF2WoKUlhQ5K+c/X7Ph60280TslKN0076fRTPMty/UmpDTptIXUGo2wqGHIMq0/e5Hl56G3HyxFA59PoR97EdZAFfaAxTEjTnyf/55SJToygtrtUxPn8eX+iQRFiGhF6mEJdR1Fsoo3zYzGzhWZT/+yS5n66gRL02ji/v4TxcqOAxB3SOluRdRWyPVRK3r5B0PYzwPAcxivsTeimLl+C1CHKuxaCP5x2T/D8KN+alExNjeM2mpjDHZX16aQF37dAd53yShRu5xPllJwVq64Uy1sWDZ1DE/9hzRyE+PXPKKUNE9ZvGKN6/vIT9bazcgPjKYhPi7AxKkos78GUoBd+V6D9HL7YId6KUveOhfHts5wGI7//U/8AdDrDMzz2Hkutde/H3ZmbTW/AYgx62dT/YmH9H7XdQQn3h/DMQH7r+VohXW1i/Mb3MwcwVrLfmMZ/1qC33aG6yOI/7XGrjnI+l72HojkFeRi/1oBdTTFVxjhekWKbFE9/CMnZwPI8p9+RNfctVzKHjdcyZ6ZWTuE8Szx+64TDEpQK+HKVFZTo954qwmwOsO6+K88JSfWPa3X/53d+BOKWXdgxo/OgP6KVfOXOlhF4kliT0IgB+KUXK8nPcX0q/X1jA+k1z5sfzTRSiZ3QPVSngb2ZX8PqcvUQvBqhgG0rHMYlz3zAz21rDfRy5EfPRY9RuenOYx4Me9j/fw3pInPPeyBnbS2eSXho0Mor9stPD8+3TC+SK9EIuM7Met0d6Q8OA2yLFfL/mGc59siKO1Z2++5Ki227Bl2e86fBBiH//Lz8F8bKHc4xKDV/m104w13v0orLEsB7NzBKS97svnaD7XLqHCelFL/Uatv+DNLbu2YWifzOzxSa+ICDO8AVeoxX3hR3rQX+AbYTfFxHRNXbm1Jb3woerv/TPM375CR6jWsWXnbUpz2bG95g5x3PeS8F5Fr8fFOhFfR08T28Vx9aA5qVB7JZhmV5A12rRSyDpBXfT27HdFMs4p+B3GpXpxX48/puZLZ3GeWBAdb/riPuCoLWgv7ATQgghhBBCCCGEEGIToQd2QgghhBBCCCGEEEJsIvTATgghhBBCCCGEEEKITcSaHXYZLeRlR1bmX91dkOe04/XNHnl2eE12EOIa66C+F39fweePPXKVLIS4NtnMrF7BfR6fw/XOjzzXhHj1CnpzKlv3QewneA6DNq71r/numusurdXPPLwsjmssI38ACzfYPUaOKvZ3mOV42fiQ2ZqbyitKu40OjtMz6N8ql9BB0BhBl0Jv4Pod/CbGUxPoa2AnSqeNno8+7bNPDomQnHhBjpNoQH6AOCY3yJBryo5B55J61N9y/HLcJ9lB5/RP/6U76obBZXCyxFpcmNeIhH2IAV7XKLq6/yKvn6XksVltcdtCH0LM+6AyxHSdV8mNMTmGXi8zs1IR82BGri8n70Z4jF4PXTmDPv2eHCwheUFf2Ii8RB10gZXIzReyI5VaSsyOjxSP6TjczPVhGpWz23EdaOtBpYhj0oDqhodaHlJyy019NyGZSLOFHjWP2v7WcXRmbdm6DeIn//TPIC566PoxM9u+dRfECzPo7eL8UytjPWTUzrY00CVSncB2/ciXPu+UYaU5D/GFKl7zT3z2kxC/8bXowT2wDc975hS6Ss5cOA/x088965Rh69bXQezTeQc5DsL1oN/BNnDm1NMQnzyO51IMMLc8//W/dfZZL5MD2Bn3MP997cnHIZ6qoeulQ56jpIW5aHKLm+8SyomrrSbEEw08RtKnXNGnDtbBc6hQhwxL7riwbS86pQLyN54v4biw3KM5G81J6jVs6zsn0c00Xm84Zfi/PvvXEG85hH26scP1na4HC+fQK5lRrioU0J85MTEJcVB3+4tH+SeKsL6CgMckPGZK4xPPz9IE54yOFNTMZi+Rf7HJbiVs+0kP89toDduRT867x7+J+fObT6Br0cwsoLG4QP2xnGKOTSuY33pF8qd2LuL+O+Tpyxlnr27WWqPv/BpRJIcW+8QDcvaxw5zvrczMUnaG01jLjkZzjonfF2ku1Esw52W+69AqRVSulD1edJ3IBRYGWC+BR841j73o7j2O40xj53PCPkS6FyAfaSfF/rHSJ/9iijnVzMwLsQxXlvE3Rw5tjMOuQ3O6AY1RBXIl9nt4jfPuLRiPcpbPLnaa75bofsYzvMaOJ53bcQ58D5nSs45oEvNoRs9jejw3KmK73FXCscDMrIvFtnYb20WnjXUf0/174OP3PfKhhjQ/y1O3R1TXpQT3WQ34qU7OPVIO+gs7IYQQQgghhBBCCCE2EXpgJ4QQQgghhBBCCCHEJkIP7IQQQgghhBBCCCGE2ESsWZYSsF+OPWm0VtlxU+U57IY4tNhz5KW4rvtsG+PnlnDd8DNXzkI8Oo5+MzOzNMFjNJdwnfzg3DMQh4szEL/tH6LDbu48Ou4OjKI7wy+5ZXjoNHraAqqq0QJepnoR1zuz48AjP0eP1mh32q7jaKmLa6rnehvj0WG+/sgXIT5/5hTEUUgOL/LThCX0n5iZ1Wo1iHeSl2hpAfexSK6FchnX0S82cXt2NcSJ6xvodHBdfWDkUniJXg9nHT1/sAaHnfP9SypBjgNvDTlgGBvpN/HITxKTV4izZ6GIH0Rl10sQhOT28tgdib9pNNDxMDe/AHGljh6vAu2vWnfb/zjtc7WJrp14gL6F1jJ6ahrT6IlqktOuSG64KMd9mJIzZXUVj7lj+w7nN9/J/NwcxIUQ+08xwnrpdtEfZGbmsQuUyuRHa/NKvNK0VtAlNjeL58rODd7+q0884ewzKGK768XoTGm3sH5uu+FGPCZ5eMbH0W1k5BhcyRljpmrYlwvkySlVsK2O1XGs7LaxnfWp3TY734B44eyMUwYvxbpbaF6G+OIcncfyHoiL7A4lv0mLPHvnL6PzycwsofHEp7qzbLif5lrgU65tLlyC+BL5+e6+4yaIb3zjG5x9nngG/WSt8+gQDH2siyZ5c0ZprrPtAF6Ps8+iQ7DHAhszi8axnUXkiGRXUj/GY3oFzCU9Qx9UQF7SUuB6kWrkygoMnURTDfTHza1gzp1v4hzRS/D3WQ/LtG3CdfmNlrAMvTbuo0zfrxdPfP0hiCsjPEfG6zNJ51ap4PUxcx3B1SrO+cplzDWus5k8rjSnDmkcLxbdcXashr8pB9juznVwLN+yswFxIcLzZL9alOE5Hn0O+4KZ2eUL2IezBcrL5EuLQjwPv4BuRKvgtYljHDdicnHlsnFTOgefJusDck+Rss7xsrEf84XfXN0Z7tHfyfD37MxKaKz2OlSonFu1XoDXaaWP7bVLfT8o40WpjmAuSGlO6Cc0x0tz7i8ot5ci/E21SI7GCI/ZXEXfXMG516N5KvlMzcwiH48xWsY8e9ddbp5cD/iaxzQnKJInjfMPt9u8z3z23JEj0/dwnwm/p8DpqHSvkuNuG3a/xr9Z7pLDm/rO6BjmH/bmeX23/5UpN/s0PlQqOBawky4hByXfviQDrMe05zokA8ojBcrdlfDl3VvoL+yEEEIIIYQQQgghhNhE6IGdEEIIIYQQQgghhBCbCD2wE0IIIYQQQgghhBBiE7F2hx0764x9NLgrdt7wunyzvPXOFGf4PNEzXAfco3XzV7p4zEKA39e76A0zM6PlylbromOlm6EbZEDnFS+io+bS2aP4PXkmXvemH3PKMFlGt8WWGq7B3jWBPoJyhPVUIheAsyab18vnrLk+dakJ8e99eQbii+S4Wy9OHH0K4oV5vD7795NjiOqy23fbXb+P7oMovHo7C2jhPfuZMp+cguTNi1fRMWVmlpGzoJ9iOVNHBZAjDPjO/fHW7EbLEQ7kfXYteTk+ujxXw3pRiLBfpf7VnXwpXcOoQL66HOKYPB9F8vGQ32pyahJin5xPhRJ5KVLX8RDSeUyMNSBeXCU32CJ6JmqjI1gGasu1GuarhJxrZo46x6oR9pnVJvaZYpH8SjHuoBhgXa8sNSHud916GPTws4TGmyDYGI9nl7w4Sy2si5UOXo9z53AM+uZT33L2GVUwL7bJHeKRY+PQ3r0QD2igrJWxvrdvxXb5+BNPO2U4l2HbjKn/jFfR9zo1hp68xRjz7vLsGYgvttBv2lvBPG9mFlJ/qVC7KQywXk4+jQ62hTl0QsXkNGr18JjtnLE2oTlEyO5PN/mvC0mf3LfkuAnIQRSneD0KJdfdNlLB32wjj+G+Kcx3JXKLRfXdEB+5FX2zaRf7bL+bc81pDMmof82TC/Eie0LJeVMk9yW7gUoDtx6WFtBD6VE7K1L+61PObPdp7hpiX1lcxHlRi/yDZmYFD/fpl3EfIxOuh209qJSwHAkPF9QfuuSuKoeY28zMygX8rEc+rDI5TytVGneH+LV9urfIYrfdxQOaX9GY5WX4fZk8eDt2bId4ZQm9hjVyWxdzhis/wLzuZexs61419jpN3CGlJp7zmJfj32QnJw3+6zsLRVbI/9ruUFujecOgi40zCpruTgOs4x65kFO67nyv3CVXckz3K1M0r7y07I4X/9cnj0P8tQb63L0C9rnpPegKm9yyH+JTR5+DeHEO22K84l5F9jV7Nexz2667DuIbDt0A8UMPfAniucvoh5+ZOQdxa8X1FPc9HH9Dav+LyxtzX+uTGM0f4v928s8a7ouSAZ67Ry7ExMPrs9rBdpbSvJ7nKXk43nLjeybcB99jzV3GcWx5Cce9MvlnR/L82DQn69PY2iVPeEDPS1haF7Jrkc4xXnGfKxXomUJrEecYKbmTbWra2Uce+gs7IYQQQgghhBBCCCE2EXpgJ4QQQgghhBBCCCHEJkIP7IQQQgghhBBCCCGE2ESsWdJTIH+J5+P65tEyOoba5GvoLLseL35aOExxVSA3QEb2g5BcALtHsEw3TTecfS4sNiFeWkHXxSDF85xdbkH8t1/8IsQ33/k6iIsklhirkSvDzHZNT0E8RQ67RgXPwydPRIXcMT7VE/tQmi08RzOzo2fRD5CwyyLFdd/rxfy58xCnCa1ZT7F+y5UGxLNz6DkwM6uV0Umz0lqEOCrgMbrkxemQDqtcQafX0hLuL4tdh1eF/DHLHWxnKfUfdhyw+YNdAc7WL8NXN8w555O7z/EXvAxn3Vrce+tFVMJ2wkqzLjkxB+TY6nRcP4bvY9+m9GKdNjau0gi2rW07tkLc66C3o93F/FQrkfvNzEqk/Fm5gp5OY+VMgie+dAVzeb+NHpvlGL8vR67TKaR6aLewLpe6TYjHyGdW9PG8movonbqygH2wUsXfm5kVqVzdAV+vjXGJDcjx0yKHzTzll2efQ7/MhTnyY5jZxPQWiNlhd4V+c+LMDMTVCOt7K3kM3/Ez6GY9dxHztplZ0sN2EkTkuCF3SEL+t7hN7crD/ZXJw5esYj2ZmfnkXBr3cDyuLGFfWOpjGTrkdWmTI7JDfrSo4LZ9JiPH7MvJm68EmWFZ26s0h+ti/c/On4Y4zMs1NfTH3HYjupEunkfX4dyT6CXcdRCddXu2oSsxeBXu79GHvuaUYWUJvThhhZxpHbzmi+RKmqdp8ij5G0sh1lO14l7z5ioeo0PurFVSga32sR3Fbfx9bDhnLJE3d/VKTv+j8Wl0BL055drGjLUnTpyAOPFoTkf+0nYLx7jLl1xfX62G1zii+5c+5ZZGYxTLQANzoYBl4P3FcY6rkoaTKs1NY5oXHiVPWEK5armNY+QTx7D/zV9x837cxXbD8+d0SO7J6J6Kt3cnC3m5a4irfAMlds75D7DOuZ1YQvWTU/aUxiGfzpfvz9ijHidUJtpgerRC37t1/rWvzWCZbkVf3PR12PdX6jiXv/OemyHevg/P6cJp8p8tuPeULfL9tehCn/fQvXt5BvN0u4Zx6uP+lsjrGRRdT3EQYJ4dKWG5HyPV7j9w9nBtCMnvx975mPzKKfVL9rKauV67iNxrWXD1e6vxiXGIF1voXXOaep4XnT/g1xJQPuG8WqCbE3YAl6jvpAP3mrN7r9vF/rNI81ALse2HlNtDmsOF4zj2Dgbu/X1YwPH47HFqaF3M1T9z6H3OPvLQX9gJIYQQQgghhBBCCLGJ0AM7IYQQQgghhBBCCCE2EXpgJ4QQQgghhBBCCCHEJmLNDrtqFdfNBwEuTl4gr067j98nOevsjdZcO74qWu/sk1eCHQ+372xA/EOHcE12ymuXzWyJaiCJcU10ewUdUbURdF0cueNOiO/8e3fh9uSf6/fcNdc+L/xmMQKFBXJ68BrqczPobXvw0W9C/OhF1yf4bBPrdqmPDhA/3BjRxHIH/U2VCNe4LzebEIdl/L5CsZkZLVG3XhfXydfIcdPtovsl62F9DzK8phm1oTwlUUIfsreCL7rnkb/xJXqOXgkvEu8joP7LnoWEJS4vA9eZsn4EpTrErfYcxH4B80mpTA0rdsvOLtCErmuni21nYRHzqhdhu6iU8PdLy+hy27ZlwinDoeu2Q/zUY/ib9gqWuzvA6zqI0SFUDNABsUI+urjg5l2PctxqGx0o7OPwUvJzkP9sQJ5Oj/pP4LsOzgLuwvox95GNyXnNVazfM5fQqXXqAub3+RZ6is5ddp1OYQXz4IFDB3Ef8+grCeiakjrEShFe0ztvOwTxXXff4ZTh3BlsyxcXcBxaIp9skTx7Cbn84gDzCyntbHzEzf19coMVKe+WUprXLGO9rFAfX6KxIKE5S5VcWmbuPIfzZPYK5M2Xw6HD6DlabGL9d5bQu/LUk+gY+vos1pWZWdTBsfNf/N/eD/HPjmD9NCbQCbw6j56j6uxxiK+rYZs44V5yO3cGXV/Brr0QD6jf9zJs7C3yL3dW8frVyGnnB24hVsjzudDEultlz/Aqnhen0BOnMQfsmsB5aRS5+a5HbT1kB23s5un14BOf+DTEYcR+J7weAU2YOVeZmQXsiCJfabmMjiGP/3SBYnZMBZwQc+YpKeWSchnvoRJyky2QP9MPscxeAb2hvZR8T65GzLrkxM3zTiF8HtlVopxPcuaZroeY/Mt5Irh1YoxcrJZivyvSfK1LXrZSwe3r7D/kOKP6yHy8//BoIEtSPGZALrdbdtJExsxOX6S2tTAD8eICjVP7boB4fAzb6qEJdIl2D1KZcjzdFxawDJ/6HOby3fvQk+eX6R5n5z6IKwHe9z737OMQHzjgFMFefRCvb0L336dnNsYXGwTswGQXJOUX8s85EzIz8ykvRhW8ht0Icx7fp5aKV89xnGXTPMfzS+zKJXr3wc5deG8yIK+0Z2tw2FEuLtGcbwvVbUz5x6N7D3a1x/TcqZ/l3KPSvDAjF/JzTz3l/mYN6C/shBBCCCGEEEIIIYTYROiBnRBCCCGEEEIIIYQQmwg9sBNCCCGEEEIIIYQQYhOxZofd8jL6FZIBrtvts5eA1gEX1nCkjNZE89PEwMPvD06j/+Qf3nMY4iXygCwuNZ1jjtG67fMtdNa96uabIH7tXW/G34+PQVwmd0Yxw7XMYzlenRJVTsHHNdJX5tGd9fRzRyH+0lcfhvgrX/oKxIthA+Lx1/+kU4Z2jOVOPfLopBvjN+mQ2yUwcjPMo99panorxDu2b3H2WSqiG2ThCvpk5ueuQJwmWIYKOSQKtMZ9y3Ysw6V5bFNmZovL6Kka7rC7uhyAv+f4WjjsEnIFON6xIa6mvN8ww877WpLQdS1W0AFRqmLZyhGey+IFvMZmZjagfkRVEpIogp1bvRXMw+UAc2BMjo7VVbcMozXs66UyuXKW0TsVk/vTDzGujqIrY44cmaM18sSYWWcVjzHo4z4j6qMrq7jPCjlVY+ovKTsfc8afgkcekRbV1WBj/j0rI99rIULPB7tJVshF0s1xaiwukB8pxXOfbkxCXKK+Wg7QwXGueQLipIbHnJpy6+6xR7EMnRi3KdJY3O3h9cios6QxXvOFFRzvwyo6KM3Mtmybwt9Qvcx1MLd3+pzjsAwdcs+UyWk0Uslx2NH17Q04r26MV2diKzp/t0xjXVmK9b1M3uK5ZXQUmZmtnMdtzlxEz932SXTW/Og990J89puPQbxwAX28/lQD4m2TOB8zM3v+xLMQsy6Ic0eLvHseOdT6NE9d6mDf6FzG+YSZWUD5aKWHc4KQXMceefEWyaO32sIy9jroDd0+VXPK0CYfUJHyfhBuUL7zcFzlIZLnQqvkFOZ5iJnrhOrTuWeUIwvkuPPIixc5Djv8nv29Zu79DHvzghL5rdnlFlBuGcXfVyqY3wIf+5qZWUq5xqdx0C02feC98g5hZ67q2LHWj4uzONcfrfJ9EM8rjGK3fpIM21pMrkKu4oDbSYlchzRPOXMJZYWroeuPq5N/2QvxN4USnsjSAHPWo08/g0Wexfus2dNn8fuKmzsGI3hdWz2sq+QC1n1WwTLF1DhLFWq8HvbZYxfPOGXotPA32Spei2YH97FexOQLZbd9mdxu/gDrMu++iD/rU15cpcSaka+yRRJM556R45xbM6dcQ+7fYvLvO7546nAJ5fU4z/dLxwwLeI2LdJMV0y46dC/CjtvE4zjHF0vlTDu4T55vrxX9hZ0QQgghhBBCCCGEEJsIPbATQgghhBBCCCGEEGIToQd2QgghhBBCCCGEEEJsItbssOsnV3dAhCF5CQJyaOUsNY7peWGBvVu0uHi6hmuRf/Y1+yHe2cDv2+QJm264TpuxIq4/nqy+DuIbr78R4pFR9Lz0++gvKQZYZp8cdguzrufl9Az6gL7+6DcgfuQb6G15/sRJiFda5BckJ8TYa98GcSdxPXoeeZCigJ7lZhvzbDfuoOsl5WfMCXnTMlwrHoauY2LrNnTMbZmchvgzJ/4K4u3b0LNDehlrd8kbRr6UOMdJxOfh+7jNMOXcMGedc7wczws7Ctx9ZFeJ3H0O89Hlfc+fcZleCffeyyWMsD46LWxbATm1iuSvrJZcf5VPTixLKV9E2HfrFfS/RQVyZFI/nWxgfqqQD8XMrN1F19dqG70RIZ0HK1IqFcyzE1OjEDcXFiDOzPVf8vjQJ4djRgNGQE5Nz7BQaYRlHvjknvHdASgjCUdAY1jKcot1wmOx4YByM3k9K+TLaJTc/L7axbFwfhG9qCVyprQ75MzqYh4+dgXHLL9Lzo4A25iZ2SBGz9byFSyTl+JYWiePISu2VshRy96QehWdT2Zme3Ztw99MoeP0yW89h8esY//bth29bs2jxyGukrNufMT1N1qed+U7yXGirAseO26w3WXUB8s17C/TO1xfbNnH/DOgfNciD56XYT9+9Y+8A+LjT+NY3Rtgmyk8ctotQw3bQUbjXJPcxjH7er0hLh+Kw4HrlPIoH5UnsUy3vfZVEE+No+Pmb+//GsSXzmL/Pb+AZWh1sV7MzAaUc6sTeG3SDWp2AY1xIfniCkXMZyVyL/W7rqd10MPzLzv+JhpH6w2IPRpX/YAmfeS8y3PYpTSnLtJ5+gH5s6gthxG2kfIYzltZgBvnTAFZi+eKp7jc9L0z73+Jv1/zNhvDpz/7NxCP1XEcHCFHea2K3zdGXFdkiVyFEc1NiiFdd8fTRfNyw+2bHXSNzRbd8SRsY5+YMOxD0QDH4iM3Yp/qLGEeffY0zukuz+B8IBt15xxFmquO7cC6W42xjF1yJ/fo+1YfjxmP4v5Pd916OHYS91nt4fXqLeG983rRo/zMfm++L2JnZu69Fo1rbcqBJfJh8y6WVnDOl7ILecj94f9/o6tEbsx5k9Mo+0l5+zzdr3NPyT5ruvfOaLxPyS/Ienm+7827T+jS9V25fBni+Rms67Wiv7ATQgghhBBCCCGEEGIToQd2QgghhBBCCCGEEEJsIvTATgghhBBCCCGEEEKITYQe2AkhhBBCCCGEEEIIsYlY80snPEcwiGI+LyNRrI/xKInKzcx6pCCMSfYXkLx/Zw2fL16/DYWgHZL/ewmK//Ik8Hv27YHY378D4mIBRZlJHyWWK/OXIH7s+echfvrppyF+/Jv4AgkzsxMn6SUSK/QSCaqXlASVAV2a0gSKmetTeE5Z7Erg05SlvGwgdl9asB7snkQx8sQ4xo0xPNeIxL7dBNuEmdnc/CzEe3YcgHjXjt0QT002II4TlEpfePpZiOebKJTk9wyYmXkkxvRYbJ0r9PzuDHs5Q/5LKfjFFUO+d9rAS3vxRZ4olWWqnAM2kiBGqX2J5KXxMvbDLr0cIB64F74cYF7M6DqzvrRQwLw5MkIvziER/FgD238hcFN8e4Ve5OK8QAh/E0YsYcV6WF7C/uD7mDOntqCo/4Vj4HldWHgc4qiAEuOA3vTSJ0F+leTP1Srmif4ARc1mZu0V/KxYwnJ32xuT88Iy9qNSA8u1TPJlC+hFOyM515xa1uWkCbHnYdu9kOAYNJliXR1fRmn1xZP4MiW/577gaf+NOA4NvoUvrrh4CcsUU34Zr5Xoezzvxhi+/GT3Npa0m1Uoz979uldDXKMXrnz5YZT9V4o7MaYXfExPTkC8LaftB9zfNomD3ac5QJ/mT1ER+317FaXgMUuqzSwoYT//s7/4U4hv24/j9+wstu0tN94NcZnG+0cfQmn8mfl5pwyVOs77ej0sZ7Vy9XY1MY3X1KcxK6A2Uwjctzfs2IFtcedhjCe3Yd4uetiHmzSn+NzslyAe0BsjVnpuo9qyh162tRtfUOQV3LnSehBVsBwRvdChXMRc7tMLiXotnJObmfX62M99enlCsYz5qdbAfp1S/dN7AMwr4IsFcoZZi7uYI0Oam7JhPTMalz3M+0GIx0xTvF6p586v+MVKw1/iNUwTP2z7l7vNxnD5Io5bSQdzxbnzF/AHzssA3DofHcXrXK3iPhujDfx+BL/nF9QUQ7xm+7fh7+96kzvGXDqLkvvFeZqbxthgbx/BnDhXwra7cJDOaTvmyE7PnbevGI4P/C6elRQ/iPnlPfxyAHopy8DDMntFejGMmVW20HOKJYwHyxtzv8Hz0+VVrG++V+J7q7x7KefFggnWZ0AvsypE2Aam6QVcrVW8T2Zy7/eGvDSQXxJRpBeL8dja62EbSejFYtZ3X/iQ0D1Vr89tH7eP6TlT0sf7vn4X577tFubppcWmU4Yrc/hSqNYy/mZ01H0Z4FrQX9gJIYQQQgghhBBCCLGJ0AM7IYQQQgghhBBCCCE2EXpgJ4QQQgghhBBCCCHEJmLNDrtiQOvDSdNx3XZc/3xgG66r3zOOnhAzs2YL120vUVwgh1R9sAhxv0suC1pHX6/jOuFK0V03TMvgrVrFci4u4jruBx5Ad8hDD6Hj5tnn0Mszf4XKHOM6fDOzZMjac/aZBSTMCAp4XtEEOtg8+t5PXVeJx24t8tFk2cas9T+waxLiSh1dVVG1AfHpC+iwuUI+QDOz9iqe/9zuBYi37tiG38+hp/DkzFmIz1/C9epGroDMc502GS3mH+Z/e6mwO8D33f2zP82oHTpFog9ScslkGT//53acc46vhCLlGpF10MHhk4QjS7C+VjvYt4OC6+0sl9BDk1A/W+6hjyckzwR7KlJyNC5Qe2+Q087MzKfrOD6OLtA++X/61IVaXcwFywGed7mC+aa53HTKkFD7DMrksiBnXc+unn9CdnCSqMJ1RJrVangtFq90aYuNaXwT2zHnLURYv1+bQ09qjKdhyT68nmZmfoL1cTZGJ1YhIh/loAnxlRPoYj1+HvPsyefRmTMWcl2a3fPqH4J4+xb0Rn3ik5+BOPYxb7Ih7dW3vwrifbvRRzud44+zDs4xDk7T+PLq2yB++KGHID75PI7v7GzZNoXHnBxrOEUIKC9GnJvTjWl3rTa2iTa5W3gYa62SOyxzp5MJ+S8/+9cPQHzx2e0Qz5KPLH0a65v9cr0eumEK49QZzKx/Cedg7RaOWx2a20yR2+2n3vWjEHsl8npbFYsAACPGSURBVAkFeMz+ipurtk6SbznApNoZ4FhTKWMOPXQjena/8sVHIO6tYL70S249XHf4eoi3jON5dgZ4/deLchX7YFighkblOnv6GMTLyzh/MzNLEs7/+H1E/sWUvK2T2w5C7Ad070DuylKU4+j28LOUHXTU7nzD3MTzKZ+8egmNaX7O7VzIJ05hRm5Tz9nglfibjiHzwA2c422dwH55+Ib9EDfJPdWNsbBHnz/t7PPUqaMQs4e4QHOdSgPbVr2GLtZd2zGuGc4H+mddh+MvveN2iP/g41+G+MIlnDc2iji3nfXxGAsZlrnNQ1bq5ry4j32k2se6rlHb6lOf9RN0+5WpfYcxud57bj3wPc4gweu5mmEeWC/6AxrH+uTn43sNuk8o5txbrCzTRJ3n2DTPKETktCN/HL+3gO8p+R7WzCyNMZ9wu0gzLEOXHHXtNs8pMOTt+wP3WQbnfvaPVujewg9wn72Exhvqz0sLOJ9IOu5ct0X3YT7fx/n4HGOt6C/shBBCCCGEEEIIIYTYROiBnRBCCCGEEEIIIYQQmwg9sBNCCCGEEEIIIYQQYhOxZofdPa86BHGjgguDD0yhK6ma4Frm0dBd4z4Icc10p4pri+NVdDr02vR80aeYnA6VAn4f+e6a69b8BYwv4NrjL3ztcYj/6JOfhnh+Fv1lrKNL6ZlomuMz8zNar062Hi8qQlwgF1+hgPUWbtmBBwjJH5jitXmhnLiG3nGqZe5v1oPqKHoM/GID4nZC9RtgHHo5HrEiXoOVVfIaDNDdc3LmFMQLC9hGYmctP/mgciQd7APgZ+eOLyB7ic476gtZzuYhOQ1SdhawL43Py8MyD8gdwH62HI2e413hMrj+k3WEfJPsmqpW0MmR0Pn1Mtev0O6goyGivlutUnsnrwQ7OcoFzA1TI5iHS2X83sxsgRwMQYAFr1Swz+wcqUP83Ax6W0oVzC+DHnpBOn3XLeJoOrkteuzfMPqe2iblJ94+z5PDdVss4bVYbbnlXg+uP3QdxMeaZyBeCfDcC6N4fbY00E1lZub3sH7aHWybAfdtyoEzJ85B3FvCMWu0PwFxOSXnrZkF5PrYOYbuqq0T6ME9P4tevKkRPK+b96IDb2KEXECBmzvCKrm9VrAvTJWwTdz79+6E+DMPoztspYf1WC+Tz6yN9Whm1vM5r5L3hec16wT7MrM2+zJxe87/Ucktd5nq49DN2Lb3j+NcxV9GZ3DTx/qdniDn4MQ+iAdt1yezeAGdNCsLTYh5/F5awvy10sV5aEBTij5JPr3EbfuXl8irU8By8rRwkfyASUhzW/IzL81iGRMWPprZ4nwT4myAdR8k7tx0PXjDa2+BOKNx9+sPfRHiuId9qhC69Z3wrQFr0yjuLmG769ewfhvbb8AyltBBxHMpM7MgxrG3R86u2LDcHnnyauQ6mx7HPN+PsW9ki66vNmvhZ2mKeTtJqb9wynR8mtlVorx5rdkwZ90GzvCsMYr1MzqGvjgvJE96H+Obb8D7YjOzLy1ivumR1yvrkut4me6DV69AfNP+aYgnabw/d9GdZ8ZdvM5veSM6Gf/s089BzBruyy3y5C1jHs9W6Jxqbu6nqamFPt3nengexRTnkRndr/cLNEek0w67bh7gRLAzQMdsJ6ATXyc6NBcqUWX1+1RXdC/W7bhz05R+EycUxxi3WliGlaUmHqON46BH/sZk4A4y7JRLHM85tvUuzUPbbRzH+gP8/XILy7RK8zczs+UmOk0P3ngzxH/vdhxvzs2gE/XoLHpz+y3cX5Xuj5pUZjOzASW16ii2u/I09se1or+wE0IIIYQQQgghhBBiE6EHdkIIIYQQQgghhBBCbCL0wE4IIYQQQgghhBBCiE3Emh1273w1ukIKRVyke/oirgV/6ItfgvjwFnLHmJkX4VrgPnmLThx9CuKD5PbxDb0gzfO49nh1Ed1kly6ip8LM7PgJ/M3ZefQHxJWtEI/vwHrIAlx7nvTJVUKPRHuDHN9AG50H5QjXivu0DrxLa6aTEnpdymPoAsp4LXuOwy4jjw470pLEdRCuB6OTWP9nLmJdcbtLqNz9jltuXjffXCWfDLl8erSOnpV1YUgeNpKZpSw2NNd1aF6OdOY7GO60ozKRxCvN3P1n1P3ZlZiRCCegg6TkqYwTLiN5sTL33wc8j8rA9eBtjDvRzGwwwLZTHSnT9+j5SH30M/QSt6+XSVaUUB2yF6JHfXekgo67UfLLFakM2cBt/zE5VYpFvO6lErpEVug8Byn6M7wCHmOkgi6efht/b2bWXkYXxQg5maISjg1Bkfo11W2rhbl+xxbMG6120ylDv4v9vlBwfZcbQbqA9buviv6LKrWhUozXq4hD2AufxdjPimX0L4bUbuIeeTor6IdJ6Zr7k7i/UiGnr/fwGrFx5oZt2yBuLTchfv2t6B65aRdu7/ew3so5sxsvwKOWIywn58A3vuE1EH+T/I0rM+j2a9TRbdUh54qZmUfzFj/Etp2Fa56WvaLEPeyntTL2SR7nuik5SwdurvZ9/M0YtZOVDrazA0d24z4p53J+WyTPTlRBB5WZ2eh2nA9dmMF2uItyxcWlSxhfwA41VcRrnNL1HB3FejMzC9itW8F9sOunWOB8iO1y5wH0N54/gR4eS93+d+7MRYg7PfSyRVXXd7oevPsdb4W4Sw6i1XnsY8vktu52XIeQpexkxnYThNiuquSke91hvNd43ZvuwjIMcHs/x0096GDbXKJ5J89VW6u4/c6t6Ow8fP31EPfJDfvA37jX/Ctfxn0O+lgvCY2jKYvBqF36PruWcb4Sx+58g+eJjivZ+cX6sWfPLohTmldPTpLzjNxhI3XXm9YYxbYxu9iEuEae4ltuxH4YlnE8Tzt4j7NjB7aLRx5Dx7aZ2YnnsJw3H8Z54mQJ2//JYzj3SclJf88u9IA9PoNu99NzM04ZrjtyGOLREtbl7Cl01C73MS8Hk5iPqiHWq9fDMu8o7XHK4JOi8d5b3gDxxdrTzm/Wg6RP/YxcuBGNF8vU7rLYvZ+rk9+1S568yTFsNydnZiA+fx7Hhyuz8xAXa9i389zsPXq2MKB8MGjjeS5fwVw/N49tYnYev19o4v1/dxm/NzPrDcgPSHMyL7sV4u2TWC/NcZxDjL76NojZL/tkinNCMzNvCtvi1oNHIK5NbHd+sxb0F3ZCCCGEEEIIIYQQQmwi9MBOCCGEEEIIIYQQQohNhB7YCSGEEEIIIYQQQgixiVizLKWT4aYL5P16jtxiX3nqGYjPVdw11xM1XEc/GpELqY7r7st1XFt87iKusT5+Gl0jjz3xDfz+3AWnDCtdKleI6+bffNtNEP/EjfshLtEjz1IBf39+Fr1552hduJnZcgvXRB97Gt19Rx97CGJ2QhS2HcLv2avXpnXeOb4Nn3yCrsNuY1xiPVJinLtA9XkJ/Q59FszluFxi8gxWyCkRkh8gGZBzg47hkweJdXF5Djv2dnj07JxdIUyaXt1h5/ERMtcUwtc0ID+QR2UoUBmzgNxLdAw+7zTJcfmRU8WnyvODDTSchOTt8rEscYp+uYysXGHg9rNCSN5Oclr2+3iMPrkjI49cSGMNiBNy1gWhW4ZiER0pno/nUa3h980rmNt37UUXCV+jaoVccDn+xO5sG+LaCOb2IpXbD/G8S0VyrhWxHgtFLEMpxXMyM+t18by4P7Cza70Yb5NPlPJPlcpZIfdrwdxrHlHfrdVxjCiQu23Qxr5dKqB7rFDD7wOPnE5uERw3m+fh+P9ohD8qkMdlerwB8ZYGenaCAe4vCFy/UEL9x8izGUb4m4N78bz370E3yalz6H3ZtxudSCM1HFvMzLwEx3t2OvXYb7NOcKatkIvSo7prtZr4vbn9PKT5UGUE62O8gf2yQh6epmFdDMjxGUTk2+y5vsyJneiwi+rofDpyhNxgT5Kzs4/HnJyYgDgLMH9WCu41H5DfNY14rGA3GG5fIufUQZqHPv21sxDXKjntjq5PQk7ZRsP1/60HfA2ntqJT8K1v+VGIW+Qlnrnozut75F31qXWPVDF33HIdOuv+0U//GMS7b8Tv+4a/r5RcR3cyQLfebBPHvH6KuadDzrsgxDLv3o3XvE3+xtnLNzplWFrCuX+HvFaB41+m/kNOO/a8shuQvb9mZjF9xvNCvtdYT7Ztn4b4/Hn0JfZ62Eeq5PV0hNZmNjGGfa+5jG62lHJBTHPggwfQ4zl/Aecpl2exjF7Brb/LV3COcAvNpyZG8TouxXsh7nuYh2tt/H24ijmvt+Dm/tUq5ji/jO11eQHdX0uLeP9+XRXH2gJ51i8cP4MHTNxJx54RHH+aRx+GeFtjY+Z4x5/B5yN9GrdSmu8uk6PZz/HHLS1g/S0voR+WNMTm0dgc0P1Kcx7vtcs0D+123bH28hz+5soStt1VctAt0/YrLSyzRZhXx6ewv3bI8W1mFlHdNckhOTeLc7br9uA+j7zmDohPnsd6vfAMttvxvfiMyMys2NgBcaGMY6ufN0FeA/oLOyGEEEIIIYQQQgghNhF6YCeEEEIIIYQQQgghxCZCD+yEEEIIIYQQQgghhNhErHkB98MXFiHu0frli5dxbTLpT2yhjd+bmZ26hOuXt9fRg/P2t90N8U23HIG4UEbH3cQ29MdsuQHdJG/qu36FLeO4trhRxioZJWdBsYS+jSrFEXm/WrQ2faHt+mkuNtEr8eAU+gM65Em4cAXXVGfkkGovoNMjoeXu5QrWs5lZxv4y7+p+svWis4rej8EA16z75NVJBly/OV4d8sUEdG6kDrEC+QLSIq7978fs92O/QE7d0Ues8fB9OqZ7Glfdnn01gbkOQva6+An5TWifZfKdhCG3GfKK0bWKcxx2ZuwgoHIHG+c3IXWR+QH6FIpFlEL0e9j2StROzMzKZXIuXUGvh0cerxK3gy76r+IY80tAPsVBH6+pmVmjhDltsY/7XE0xrm/BfBH10H+SUlrt9bHPZr7b9ia2jGM5qe7YDTPo4HlGJWrvHh4jIkdUbzGn7WVXH/7YIbRe7CG3iOuaxHOPfDyPKM+dyJ/FeI2CAI9RrLJvDn+e0jjnsWvSd/ttENZpEyxT6uM16pLLLSGfY32UnFsJ+R2LrleK/4kyIe9kgb4P6IPGKJ5DtYLH2DKGZcpLXy1yX6Y0/mTxkGR/jYi5bugacr4vUP7rraKzy8ysVMF8N74F/W8l0uAE5FLMBtguy+QKCyj55Dm0du5FJ9rMXpxfjU5jGQ8fQV9ZpYrHrI+gv6xNLsx+Ts5NqJyej/tIyGvVWUVnETvSyjXMwdv34Tnt3oMOHTOzC+cuQTw3T8fY6nrv1oOogtc8ojFs3wG8Hv/7P8Lx6/IVcjSb2UXyN7Ebac82bBOH96E3bHoKvYdJhHXjUd7wi64vs0f5KKP+xC7EJMU2MT9/GffXw3YV06SwF7ttf4U8dysrWA9pjLlo0MOxnwf3yHHYrcF1zfcO9JtC5NbderFrF/YTHmOOHTsGcTNtQhwE7t+81KtYRwX2ka3g2PvMsZMQl8kBPNnA9j6gnDg96fZbviep1XZCfONNmLNaA7zupxbQDb64iPHrb8X93z3q5psHPo8O+YvL2PZ+7KcPQ9wo4T6q5IweGcOx92QDvz972s0D7/pZ9OBZF+c1S72NcbMn5JJc7eDYGdbwmvO9RK+NbcjMbH4O80VzEevj8Q4+Nxjfhm1idZWcznQvfWYG3a9X5nF/ZmanT+E2Ib13gOf1rRUcgxLKadVRrIdyFdvAasWd43UpT3ZJhL+0iN+fzPA8njuLz6XOLuIkpe9j3q5scd2vXoDlZmfdMEf9d0N/YSeEEEIIIYQQQgghxCZCD+yEEEIIIYQQQgghhNhE6IGdEEIIIYQQQgghhBCbiDU77BYX0GHHugQvQRdCwcN1+H3f9TltHcf1zDsP3grx/iOvhrjeIAcOrQMeqaEbYXoCHXaFHJ+Mn+GaaY+8Xh75yBL2MSS0vpn8M+zpqRRcX8P0KF6G1955J8TFWgPi//U3X4D4zIXTWCRyUMUROlr8wC1DaHi9eM01uyrWi24L19XHHTw3L2FXG14fdlKYua61bIDXMGT/EoVZEeszzrgN4DEzx2nnwmv3Ha/REJ1WlpHrjI6Z92S+EuIxKxH+ZoTcMpUKtyOsx5Acd9w/s8x1M3GzYr9gxFKpdaRH/hY/xPMPjfxJ5GHzci7agNpGoUR5kdoe+xPLlD/YoZKR/6e15LpDo4TbGpbpzKV5iMe2ox+p30XvR488k16I37OfySzHf5hiuWOqp36Mdcs+oF4Py9DpoLsnzPG6xeR5iQp4fdPMdXKtB+yg8419fdRpuA2E7rCekieoQO2oVEEPDruKggj3mdL3XIYodMeYIrX1gM5z32X0meyawzlHWEBfyeg4tstBF9tMQNfTzCwll2c3Jldryk5N+n1KbiDy7pbIacvbm5n54dWvb+CteVr2ihKU8Zq2E+xzxRDLWRtF5xaPvWZmA5oXepSf2iuYn6oploHTow3QP8PzN3YSm5nFFazvw3egEy2gprx/DF3IZ+bQ/ba0iO0yKuIOBuQtNjOLyQ9bKZLDjnJRvUzONDrPahUrZseBKYh3H0IHm5nZMnnxlpex7ts0t1ovipxLKL2xq3rnPvRSHbzlVmefPM0+dQI9YfURbCdjNWpolL8KZfIWJzxPyXHY9fA3tSr2j9ERvMYx+d+a1M54/hSRN2+l47a7MxfQx9Rawn32yXGXkds0y66eD9ltnebM8bIhAuYgZ7xaLyplHFMOHTwI8djoGMSnZ2Yg7nbcOcK+GrbPzMf2/cxz6PlaWMJr8OgTT0N88037IJ6exDJVfNfjdeEiXvf/8fEnsYx7sc7f8+5bIH52Bjvh889j7jhyA173m251y/AP37gX4n6MY2N9DPvHg185A/FcE/PRgR24/Tt+9DaIV5uu180v4Rh29Cn0ui21N8ZT3NiOY0zzNLaJxih60nZsR+dmkxyDZmYZjbWnaS5z4jlsV5PklytnmH8yGs/Zb1mvuu5EnkPv3ol9gRR29nwL237SwTHJJzdyp4vjqOe5eddLcRs6LXvieXT9BRE92/JonlrGsbRMz1PMd+8tLKM5nscOu5fn7dRf2AkhhBBCCCGEEEIIsYnQAzshhBBCCCGEEEIIITYRemAnhBBCCCGEEEIIIcQmYs3ygG2juF55QL6FgdeAuFjF+IyrV7DCKDpo7v6hOyAeJz/MIGb3EpahRaqEQojPI+ukysgjJBeYT26ewPGb0TNPctZkKTly8mRk9FFjBJ0d1x9Ah8EzR7dBfP48OuxiKkNAa6zZd5ZXBvZObMxKf7M0xvXo4yO49jskD1uPlHVZ6l70iBx+BfaVUH0lKX6/RH6tEvmd4hLWb7/vOjziAbk/aBN22nG7YbdiQGv9C+QRG626Pqdp8v2MlvE8SgVad0/9iV1aQYC/D6le8zyIno/lZq8VO9rWk1IFvRzLbfSVlNg3R9t7nttrYrquxVIF4t4AvRPsMiySN4JbVr+NiTZJXIdW6uExBuSHG6k3IM5ivK498kL2yAs2Rl6YRsX1m7SWsC6XyCPZ71/dC1ms4j7Hx8Yh7pLrIi/v8jEGA6zNPO/delAfx3PhMaZA+aZUwroIi64v1idPFPsm2b3G5+5RHPM46HMucOuOnZacD2oj6PXaMoleri5dnx6NreyXTRPXv5RQW40zcvNRf/Eo/3B/q1J/rFSwP+e1O/Y8pSR28dKNaXesVel1MS/EbazPhBxaQcmdTno+OWYj8p5WGhB3Y3IhR9iWPfLoBQnGUY4bxouwfq+7BedTxjmSvIZtcll6NJ6PjuA1v9J2XXCDPpbBp2MGlFOjgOsSf1+h/Fcdxf49Oe26/HbswrzSo5xb3BhNsRUoN0WUS7oelrND3XrQdce4CuWGKKQxiDxFxSLea0TkrEv5epD40MvxGHHf5/lQr0f3UDGfB+dobGcx5TLOr2aup5UFgZyn05S9w/Q9tVtObzwvfeFD3id5pp0yrh/FiMbOAMuyZzfm9117dkPc62OONDPr02e33oZutb27vgnxI48/A/H5y+gQPn4KHZoRzcNLoespbi1jJzl+BstwcQXb92tOYZlXUPVm2QCPeX4Wc1z3q64rvLWC13mlg3OyvdGtEL/pR16Lx6S2+fyxZyH+td/8FMTlsutU23/jIYiXFqnfR+iKWy8CKmuphnMfo75cpjn1oIrPCMzM/vzP/hS36eI1Wl3CNjBzAp2BA/KsLTbRW9jv0/ifuvmmUsfzGFBfSCg/FIuY0/rURsyZC9E9JQtozSymvJlGeIylGPcxUsEyF0s4Fng0p0ide9Kce1TKec59rH91r+d3Q39hJ4QQQgghhBBCCCHEJkIP7IQQQgghhBBCCCGE2ETogZ0QQgghhBBCCCGEEJsIPbATQgghhBBCCCGEEGITseaXTuyfRDFfkqJMsEny//ZoA+JDY2POPg/ccQTiHTtQ6NknAXsQ0AsceIf0AcucWaBqZhbySyXoGabHcm3jffLbGpxDUJlc2SCXsxhiOUcqKAQ/uBvr6cTJkxCfW1jGIoX4e9/LETOTANyn887SjXnthGfYBqbGUTI5NUFCSHrhhm+ugD3wr97s+RpxPNLGth8VUSDKddfLkSKT737oSyY49klyXIhIUlrAeqtV3HqolFHGyWLMgMSnPvU/rkffEX6TbDivczj/ZEC/2aB2Z2YWkZifS0K+c2uTxLtScIWo1TrKYjt9lPV6JHJNUryO7R7GURGPkQxofzkv+ihWsS1EMb/ogl44kGA9tElGX6Ay8DUrldx8s8rS9YBfPkIvFOhh/+AXRFRJEt5uoXA3yxHkpinW1WDAL+tx+8x6sGPvfoi530ShW5/fiat/Nks4n1Nj7nJ+oWvo0+sWst7VX4LjFdz6TpyS4W9aTZRnd9ooSb48hybsC3OLEFeLlI96rvw/pRfBZAHmwKJH50l5tj6K86CI+niW8fju9r9hOS2vz64LGecOLOeAXvzS63MfdsvNLzdJqH4HJGfmOV+Hjpkk1O+reP0GA/dFI/wClWKdXijAczJ6udnO/VshLpXxmvOwV855wVNUwmN22i08JJ1n6NOcgurNpxdnbd2O8vRKzni//8AuiGfn5iAuRhvz7/cZ3Ybwy6uCAp5LjU6tHbvtzqN2Mj6J9ROSaDzia0pznx5J92Pqo5nnZt2Y70foNwPub/RCFY7drIFliiJ3vuHM9TN+8Q/PM2lz56V5NDfgccFe+gskNizfmZlH+Skkqb1PZUvpKoSR28+KRXopHb0s5O677ob4psO3QXz8NL5E8OsPPwTx/CyOk+WSe19bq2MO2rUPX75w7sxliD/wzz8LcZtuUHhO6NP9f97LzXrUAfwQc9o996Lcf+s2fOFAq4X3sc8fx5dOPPLwDMR33HGLU4aRrVvwA2r/Uei+vGE94Pc+Tk7hCzhLRXrZC/WzJGcK8a2nnoM4oucINXoh1t88/BjEW3fshNgLsW3XG1hXXX7Do5mFyziuLa9iHNC4FRXoJRI0T+3T2BzRfCyiF5eame3aifPniT3XQ9wYx5d2Rh6/XJFyJt3n8q1E3mzO55eR0hyEX76xVvQXdkIIIYQQQgghhBBCbCL0wE4IIYQQQgghhBBCiE2EHtgJIYQQQgghhBBCCLGJWLPDbrJehnjQx5+22rieuXLzHRDvIgeemdn1+6cgLtDzQz/CY0S07Deipfu0ZNvx6oRejgWC9sk6BXZZDHO7scOBNBWOt8LMLKN9BoYnwn6mV91yI8Q9WkV9/5cfhXh2Cd0A7GUwMwt4zTU7iTbMq0POiNC/ahxF6G6IgjwPFXs58BjsY+iTZ4zdbfURdAOkGfodPHMdE0afeT4ek90iw64HX1PnauZcPt6H57HfhNol+QcC8gmww87z2HGX45rh/sQlZ9nDOhLSsdl36fpbyOsV5ni82BFDdVSqYJ7NDNtSt4deL1tZxTjG7UdyXEYrbfaT4Hl2u/h9RMNExo5HPilyIbGfycwsJn/c5NQ4xNUetq3eOXSukKrSOUa/j/6yKMR6NTOrkGuKnXXNRRJNrhMZDWwD8hImVHf9Hl7zVYrNzBK6xp0u1k+ng2NERGMv9/W4g66wjHw17DU0y8uz2I5mL+I1vjJ3Bb8nz9Sps+chHq2QTy5xr5/TRyP06tQK1B/LGK92sN66PTxGq4X9Mcnpf+yj4TwbJ3kWwmtPQi7LjPs1dboOuSzNz5tf8fyJcgONtS1qh46Tjg5R76IHqVbB62lmViV3T0iOmi65R4vk1Rkk3P+wTD5drnLdzTVVD9tmt4Ptis+Tx8pCAXOVRzl5974dWMYcp1S5jvWwrYRuPgtcv/J64HvYR2LKJWERr3G5hHHOlNrKEe6Dx4eEvM4x+3vJa9R35vHUEH237jg3FIp4nj7PJygPsP+Rcxf3pVHyhpuZBeyQpLH7pc6u3Hnk1eeQZsN9zI4HfB0ZUFvj8+Nx0F/DfZEfkaeLXGCVEv6mWhuFeGILzoW2jKP//fGvoXtskLqu1nIVyzBz+izEzz2NY22frlvPw3zUHaA3L0jpHHNNXuRopNugP/mff44fZHy/gXGFXH27dm2HuFZzc3+fylkkl2g73pg5Xo/G2tEGtoEyzZ9i6rc+P+wws59461shXl5Ex++Z02cgnt6Gzro9+w9C/OzxExCvdnC8T/vuNU/IdRgnXG68Hrv27oO41UHnXVbEPF2ZQCdhYwJ9dGZmE5PTEIdFdrVj3XKu97huKc/yXDqzvHGTnfTYF6Kce+G1oL+wE0IIIYQQQgghhBBiE6EHdkIIIYQQQgghhBBCbCL0wE4IIYQQQgghhBBCiE3Emh12Ga31Zj9DmbxFhw/uhnj7GK5FNjMrk7fLpzXrAXu8WBtB7gPenL1eXo4rISPVR+pf3a8QJ7SemVwhA/K+rPZxLXOr666Z7/RwmyTDy9KJ8RgJucS27dwD8cTYDMRXltFf4NSrmXkZuS0cd9jGuMQ8Wj8eBLi+vEC+mVIJ45Dqysx1G6bkB+Brys6uSoSOmihgBxH56HL8JqQvcZw1fN5cZudycFehOM9h57ODjjdiH4njrOPfD/ned/99gD0VlvF5b9y/KVTIL8d17Pj3InQjpDn9rM8unZQ9f1hnGddpAR0cjmsswDhN3LbXbJLTkpxp5RK2b24GBa4HynncVnssnDMzj1xhZfJ0XllcgrhSRj9JkdxgSYJuv9ARmg73THC8UfbENjnoOJ90yT/H3jTHLWZmSerTNuRi62Kb4JyXObWBcRK715iJCuSYYz9pEXP1/r17IT6wH8e5yWl02BQDFk25bp+E2mIWYFtPBlgPx54nj8sqOlZ27UJ32Pnz5yDuX0FXkJlZz6M5AOXdyLAeXuPs4drgkRsxZOkQlXN+cQG/51xuZvURdBcHlM+vLDYhXlnF+mePV0Q5dpnaPrdbM7MBez1H0RfU7eP1iMlRF6f4+4xyeIHmHMWc8b5YoLyeUl4PeA6CMZeJ3abcP/uxWwaf5kIhjR2xbYzPqUP3EtUSXmN2wYaVOsQ1c3OPT/cWg2U8RqmEXqOoRPNM8hSHJCrsrmJu8c2dZ4bkSrKU5zZXd5V2KY/HVTynUhmPmeS4L9l3mtGcxI2p3XjsVnYOQQwfNTeTwy6gMcm5F6DtC8GQObPlefzYBcquNmoX5F88cvPNEO+a2gXxuXkco8zMWqvonEtS9L0eugnnT6UK3p8PqB206b41oflCFGB/MjMbDPA82lQmrrnGGLr6Dh48BPH01CTE4+R9q1XcZwylKvVjek7BbrH1IqB7odU2zl9Xluj60XiwMHfB2Sf7rUO6P9m6DX1vu/cdgPihrz8C8cVZdAhXqjiWJ+y4NbPBAMsZFmh+RePewgrW/9SumzDeg22gMobevULJ9RY6cwaqh4i+T6klsl87o2cj7Lxjj76Z2Ugdy7VnugHxvm0Tzm/Wgv7CTgghhBBCCCGEEEKITYQe2AkhhBBCCCGEEEIIsYnQAzshhBBCCCGEEEIIITYRXraRAgEhhBBCCCGEEEIIIQSgv7ATQgghhBBCCCGEEGIToQd2QgghhBBCCCGEEEJsIvTATgghhBBCCCGEEEKITYQe2AkhhBBCCCGEEEIIsYnQAzshhBBCCCGEEEIIITYRemAnhBBCCCGEEEIIIcQmQg/shBBCCCGEEEIIIYTYROiBnRBCCCGEEEIIIYQQmwg9sBNCCCGEEEIIIYQQYhPx/wNUza0JYt8BeQAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -11566,13 +11650,20 @@ " plt.axis(\"off\")\n", "plt.show()\n" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "MindSpore", + "display_name": "Python 3 (ipykernel)", "language": "python", - "name": "mindspore" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -11584,7 +11675,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.18" + "version": "3.10.8" }, "vscode": { "interpreter": {