Skip to content

Commit bf4cf1e

Browse files
committed
update for new version of torch
1 parent 906cf71 commit bf4cf1e

File tree

2 files changed

+11
-3
lines changed

2 files changed

+11
-3
lines changed

tutorial-contents-notebooks/203_activation.ipynb

+10-2
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,18 @@
5959
"metadata": {
6060
"collapsed": true
6161
},
62-
"outputs": [],
62+
"outputs": [
63+
{
64+
"name": "stderr",
65+
"output_type": "stream",
66+
"text": [
67+
"C:\\Users\\morvanzhou\\AppData\\Local\\Programs\\Python\\Python36\\lib\\site-packages\\torch\\nn\\functional.py:1006: UserWarning: nn.functional.sigmoid is deprecated. Use torch.sigmoid instead.\n warnings.warn(\"nn.functional.sigmoid is deprecated. Use torch.sigmoid instead.\")\nC:\\Users\\morvanzhou\\AppData\\Local\\Programs\\Python\\Python36\\lib\\site-packages\\torch\\nn\\functional.py:995: UserWarning: nn.functional.tanh is deprecated. Use torch.tanh instead.\n warnings.warn(\"nn.functional.tanh is deprecated. Use torch.tanh instead.\")\n"
68+
]
69+
}
70+
],
6371
"source": [
6472
"y_relu = F.relu(x).data.numpy()\n",
65-
"y_sigmoid = F.sigmoid(x).data.numpy()\n",
73+
"y_sigmoid = torch.sigmoid(x).data.numpy()\n",
6674
"y_tanh = F.tanh(x).data.numpy()\n",
6775
"y_softplus = F.softplus(x).data.numpy()\n",
6876
"\n",

tutorial-contents-notebooks/306_optimizer.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -249,7 +249,7 @@
249249
" opt.zero_grad() # clear gradients for next train\n",
250250
" loss.backward() # backpropagation, compute gradients\n",
251251
" opt.step() # apply gradients\n",
252-
" l_his.append(loss.data[0]) # loss recoder\n",
252+
" l_his.append(loss.item()) # loss recoder\n",
253253
"\n",
254254
"labels = ['SGD', 'Momentum', 'RMSprop', 'Adam']\n",
255255
"for i, l_his in enumerate(losses_his):\n",

0 commit comments

Comments
 (0)