update for new version of torch

This commit is contained in:
morvanzhou
2018-11-08 19:40:27 +08:00
parent 906cf71b6f
commit bf4cf1e700
2 changed files with 11 additions and 3 deletions

View File

@ -59,10 +59,18 @@
"metadata": {
"collapsed": true
},
"outputs": [],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\morvanzhou\\AppData\\Local\\Programs\\Python\\Python36\\lib\\site-packages\\torch\\nn\\functional.py:1006: UserWarning: nn.functional.sigmoid is deprecated. Use torch.sigmoid instead.\n warnings.warn(\"nn.functional.sigmoid is deprecated. Use torch.sigmoid instead.\")\nC:\\Users\\morvanzhou\\AppData\\Local\\Programs\\Python\\Python36\\lib\\site-packages\\torch\\nn\\functional.py:995: UserWarning: nn.functional.tanh is deprecated. Use torch.tanh instead.\n warnings.warn(\"nn.functional.tanh is deprecated. Use torch.tanh instead.\")\n"
]
}
],
"source": [
"y_relu = F.relu(x).data.numpy()\n",
"y_sigmoid = F.sigmoid(x).data.numpy()\n",
"y_sigmoid = torch.sigmoid(x).data.numpy()\n",
"y_tanh = F.tanh(x).data.numpy()\n",
"y_softplus = F.softplus(x).data.numpy()\n",
"\n",

View File

@ -249,7 +249,7 @@
" opt.zero_grad() # clear gradients for next train\n",
" loss.backward() # backpropagation, compute gradients\n",
" opt.step() # apply gradients\n",
" l_his.append(loss.data[0]) # loss recoder\n",
" l_his.append(loss.item()) # loss recoder\n",
"\n",
"labels = ['SGD', 'Momentum', 'RMSprop', 'Adam']\n",
"for i, l_his in enumerate(losses_his):\n",