{"id":208,"date":"2020-06-24T15:55:43","date_gmt":"2020-06-24T06:55:43","guid":{"rendered":"http:\/\/cedartrees.co.kr\/?p=208"},"modified":"2021-04-03T19:20:01","modified_gmt":"2021-04-03T10:20:01","slug":"pytorch-dataloader-example","status":"publish","type":"post","link":"http:\/\/blog.cedartrees.co.kr\/index.php\/2020\/06\/24\/pytorch-dataloader-example\/","title":{"rendered":"PyTorch DataLoader Example"},"content":{"rendered":"\n<p>sklearn\uc758 \ubd93\uaf43 \ub370\uc774\ud130\ub97c \ud65c\uc6a9\ud558\uc5ec pytorch\uc640 dataloader\ub97c \ud65c\uc6a9\ud558\uc5ec \ubd84\ub958 \ubb38\uc81c\ub97c \ud480\uc5b4 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>iris \ub370\uc774\ud130\uc14b\uc744 \ubc1b\uc544\uc11c pandas\ub85c \ub370\uc774\ud130\ub97c \ubcc0\ud658\ud569\ub2c8\ub2e4. \ubcc0\ud658 \uacfc\uc815\uc774 \ubc18\ub4dc\uc2dc \ud544\uc694\ud55c \uac83\uc740 \uc544\ub2c8\uc9c0\ub9cc \ub370\uc774\ud130\uc14b\uc744 \ubcc0\uacbd\ud558\uac70\ub098 \ud559\uc2b5\uc6a9 \uceec\ub7fc \uc815\ubcf4\ub97c \uc218\uc815\ud560 \ub54c\uc5d0 \ub3c4\uc6c0\uc774 \ub429\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\n\nfrom sklearn.datasets import load_iris\niris = load_iris()\n\ndf = pd.DataFrame(iris.data)\ndf.columns = iris.feature_names\ndf['class'] = iris.target<\/pre>\n\n\n\n<p>\ub2e4\uc74c\uc73c\ub85c PyTorch\ub85c \ub370\uc774\ud130\ub97c import\ud558\uc5ec \ud559\uc2b5\uc6a9 \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4. \ud559\uc2b5\uc6a9 \ub370\uc774\ud130\ub294 train_data\uc640 valid_data\ub85c \ubd84\ub9ac\ud558\ub418 8:2 \ube44\uc728\ub85c \ubd84\ub9ac\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">import torch\nimport torch.nn as nn\nimport torch.optim as optim\nimport torch.nn.functional as F\n\n## Prepare Dataset\ndata = torch.from_numpy(df.values).float()\n#data.shape = torch.Size([150, 5])\n\n# \ub370\uc774\ud130\uc14b\uc5d0\uc11c feature \uc815\ubcf4\uc640 label \ub370\uc774\ud130\ub97c \ubd84\ub9ac\ud558\uc5ec x,y \ub370\uc774\ud130\ub97c \uc0dd\uc131\nx = data[:,:4]\ny = data[:,[-1]]\n\n# train, valid \ub370\uc774\ud130\uc14b \ubd84\ub9ac, \ub370\uc774\ud130\ub294 8:2 or 7:3 \uc0dd\uc131\nratio = [.8, .2]\n\ntrain_cnt = int(data.size(0) * ratio[0])\nvalid_cnt = data.size(0) - train_cnt\nprint(train_cnt, valid_cnt) #120, 30\n\n# torch.randperm\uc744 \uc0ac\uc6a9\ud574\uc11c \ub79c\ub364\ud55c int \uc21c\uc5f4\uc744 \uc0dd\uc131, train\/valid \ub370\uc774\ud130\ub85c \ubd84\ub9ac\nindices = torch.randperm(data.size(0))\nx = torch.index_select(x, dim=0, index=indice).split([train_cnt, valid_cnt], dim=0)\ny = torch.index_select(y, dim=0, index=indice).split([train_cnt, valid_cnt], dim=0)<\/pre>\n\n\n\n<p>pytorch\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 Dataset\uacfc DataLoader\ub97c import\ud569\ub2c8\ub2e4. <\/p>\n\n\n\n<p>Dataset \ud074\ub798\uc2a4\ub97c \uc0c1\uc18d\ud558\uc5ec IrisDataset \ud074\ub798\uc2a4\ub97c \uc0dd\uc131\ud558\uace0 data, label\uc744 \uc785\ub825\ud569\ub2c8\ub2e4.<br>IrisDataset\uc744 DataLoader\uc5d0 \uc785\ub825\ud558\uc5ec \ub370\uc774\ud130\ub97c batch_size \ub9cc\ud07c \ub370\uc774\ud130\ub97c \ubd84\ub9ac\ud558\uc5ec train_loader\uc5d0 \ub123\uc5b4\uc90d\ub2c8\ub2e4. <\/p>\n\n\n\n<p>iris \ub370\uc774\ud130\uc14b\uc740 \ucd1d 150\uac1c \ub370\uc774\ud130\uc785\ub2c8\ub2e4. \uc774\uac83\uc744 train\/valid \ud615\ud0dc\ub85c 8:2\ub85c \ubd84\ub9ac\ud588\uae30 \ub54c\ubb38\uc5d0 train 120, valid 30\uac1c\uc758 \ub370\uc774\ud130\ub85c \uac01\uac01 \uc0dd\uc131\ub410\uc2b5\ub2c8\ub2e4. \uc774\ub807\uac8c \uc0dd\uc131\ub41c \ub370\uc774\ud130\ub97c \ud55c\ubc88\uc5d0 \ud6c8\ub828\ud558\uc9c0 \uc54a\uace0 \uc77c\uc815 \uac2f\uc218\ub85c \ub370\uc774\ud130\ub97c \ubb36\uc5b4 \uc90d\ub2c8\ub2e4. \uc0ac\uc2e4 \uc18c\uaddc\ubaa8\uc758 \ub370\uc774\ud130 \uc14b\uc5d0\uc11c\ub294 \uc774\ub7ec\ud55c batch \uc791\uc5c5\uc774 \ubd88\ud544\uc694\ud569\ub2c8\ub2e4. \uadf8\ub7ec\ub098 \ub9ce\uc740 \uc218\uc758 \ub370\uc774\ud130\ub97c \ud6c8\ub828\ud558\uae30 \uc704\ud574\uc11c\ub294 \uc774\ub7ec\ud55c \uc791\uc5c5\uc774 \ud544\uc218\uc785\ub2c8\ub2e4. \uc774\ubc88 \uc608\uc81c\uc5d0\uc11c\ub294 30\uac1c \ub2e8\uc704\ub85c \ubb36\uc74c\uc744 \ub9cc\ub4e4\uc5b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\ud30c\uc774\ud1a0\uce58\uc5d0\uc11c\ub294 \uc774\ub7ec\ud55c \ubb36\uc74c \uc791\uc5c5\uc744 \ud560 \uc218 \uc788\ub294 DataLoader\ub77c\ub294 \ud3b8\ub9ac\ud55c \ud328\ud0a4\uc9c0\ub97c \uc81c\uacf5\ud569\ub2c8\ub2e4. \uc774\ub7ec\ud55c \uacfc\uc815\uc744 \ud1b5\ud574\uc11c 120\uac1c\uc758 \ub370\uc774\ud130\uac00 30\uac1c\uc2dd 4\ubb36\uc74c\uc73c\ub85c train_loader\uc5d0 \uc800\uc7a5\ub418\uac8c \ub429\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">from torch.utils.data import Dataset, DataLoader\n\n# Dataset \uc0c1\uc18d\nclass IrisDataset(Dataset):\n    \n    def __init__(self, data, labels):\n        super().__init__()\n        self.data = data\n        self.labels = labels\n\n    def __len__(self):\n        return len(self.data)\n    \n    def __getitem__(self, idx):\n        return self.data[idx], self.labels[idx]\n\n# DataLoader\ntrain_loader = DataLoader(dataset=IrisDataset(x[0],y[0]), batch_size=config['batch_size'], shuffle=True)\nvalid_loader = DataLoader(dataset=IrisDataset(x[1],y[1]), batch_size=config['batch_size'], shuffle=False)<\/pre>\n\n\n\n<p><span class=\"has-inline-color has-vivid-cyan-blue-color\">\ucc38\uace0\ub85c data, train, validate, bacth_size, epoch\uc744 \uc774\ud574\ud558\uae30 \uc704\ud574 \uc608\ub97c \ub4e4\uc5b4\ubcf4\uba74&#8230;<br>\uc120\uc0dd\ub2d8\uc774 \ud559\uc0dd\ub4e4\uc758 \ud559\ub825 \uc218\uc900\uc744 \uc54c\uc544\ubcf4\uae30 \uc704\ud574\uc11c 100\ubb38\uc81c\ub97c \ub9cc\ub4e4\uc5c8\uc2b5\ub2c8\ub2e4. \uc120\uc0dd\ub2d8\uc740 \ud559\uc0dd\ub4e4\uc5d0\uac8c 100\ubb38\uc81c \uc911\uc5d0\uc11c 80 \ubb38\uc81c\ub97c \ud480\uc5b4\ubcf4\uba74\uc11c \uc218\ud559\uc801 \uc6d0\ub9ac\ub97c \uc124\uba85\ud569\ub2c8\ub2e4. \uadf8\ub7ec\ub098 \ud55c\ubc88\uc5d0 80\ubb38\uc81c\ub97c \ud480\uae30 \uc5b4\ub824\uc6b0\ub2c8 20\ubb38\uc81c\uc529 1~4\uad50\uc2dc \ub3d9\uc548 \ud480\uc5b4\ubcf4\uac8c \ud569\ub2c8\ub2e4. \ud55c\ubc88\ub9cc \ubb38\uc81c\ub97c \ud480\uc5b4\ubcf4\ub294 \uac83\ubcf4\ub2e4\ub294 \uac19\uc740 \ubb38\uc81c\ub97c \ubc18\ubcf5\ud574\uc11c \ud480\uc5b4\ubcf4\ub294 \uac83\uc774 \ud6a8\uacfc\uc801\uc774\uae30 \ub54c\ubb38\uc5d0 5~8\uad50\uc2dc \ub2e4\uc2dc \ubb38\uc81c\ub97c \ud480\uc5b4\ubd05\ub2c8\ub2e4.<\/span><\/p>\n\n\n\n<p>\uc774\uc81c \ud559\uc0dd\ub4e4\uc740 80\ubb38\uc81c\ub97c 20\ubb38\uc81c\uc529 \ub098\ub220\uc11c 2\ubc88\uc5d0 \uac78\uccd0 \ud480\uc5b4\ubcf8\uac83\uc774 \ub429\ub2c8\ub2e4. \ub9cc\uc57d \uc2dc\uac04\uc801 \uc5ec\uc720\uac00 \uc788\ub2e4\uba74 2\ubc88\uc774 \uc544\ub2c8\ub77c 3\ubc88, 4\ubc88 \ud480\uc5b4\ubcf8\ub2e4\uba74 \uc544\ub9c8\ub3c4 \ub354 \ud559\uc2b5\uc774 \uc798\ub418\uaca0\uc8e0.<\/p>\n\n\n\n<p>\uc774\uc81c \ud559\uc0dd\ub4e4\uc774 \uc218\ud559\uc6d0\ub9ac\ub97c \uc798 \uc774\ud574\ud588\ub294\uc9c0 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uae30 \uc704\ud574\uc11c \ub0a8\uaca8\ub454 20\ubb38\uc81c\ub97c \ud480\uc5b4\ubcf4\uac8c \ud569\ub2c8\ub2e4. \uadf8\ub9ac\uace0 20\uac1c\uc758 \ubb38\uc81c\ub97c \uc5bc\ub9c8\ub098 \ub9ce\uc740 \ud559\uc0dd\uc774 \ub9de\ucdc4\ub294\uc9c0\ub97c \uacc4\uc0b0\ud574\ubd05\ub2c8\ub2e4.<\/p>\n\n\n\n<p><span class=\"has-inline-color has-luminous-vivid-orange-color\">\uc774\ub7ec\ud55c \uacfc\uc815\uc740 \ubcf4\ud1b5\uc758 \ud559\uc2b5\uc5d0\uc11c \ub9e4\uc6b0 \uc77c\ubc18\uc801\uc778 \ubc29\ubc95\uc785\ub2c8\ub2e4. \uc774\uc81c \uc0dd\uac01\ud574\ubcf4\uba74 100\ubb38\uc81c\uac00 data, 80\ubb38\uc81c\uac00 train_data, 20\ubb38\uc81c\uac00 valid_data, 80\ubb38\uc81c\ub97c 20\ubb38\uc81c\uc529 \ub098\ub220\uc11c 4\ubb36\uc74c\uc744 \ub9cc\ub4dc\ub294 \uacfc\uc815 batch, \uac19\uc740 \ubb38\uc81c\ub97c \ucd1d 2\ud68c \ud480\uc5b4\ubd04 epoch \uc774\uac83\uc774 \uc9c0\uae08\uae4c\uc9c0\uc758 \uacfc\uc815\uc5d0\uc11c \uc0ac\uc6a9\ud588\ub358 \uc6a9\uc5b4\ub97c \uc815\uc758\ud55c \uac83\uc785\ub2c8\ub2e4.<\/span><\/p>\n\n\n\n<p>\uc989, train_loader\ub294 120\uac1c\uc758 \ub370\uc774\ud130\uac00 30\uac1c\uc529 4\ubb36\uc74c\uc73c\ub85c \ub418\uc5b4 \uc788\ub294 \uac83\uc774 \ub429\ub2c8\ub2e4. valid_loader\ub294 30\uac1c\uc758 \ub370\uc774\ud130\uac00 30\uac1c\uc529 1\ubb36\uc74c\uc774 \ub418\uaca0\ub124\uc694.<\/p>\n\n\n\n<p>\uc790, \uc774\uc81c \ubaa8\ub378\uc744 \uac04\ub2e8\ud788 \uad6c\uc131\ud569\ub2c8\ub2e4. \ud559\uc2b5\uc744 \uc704\ud55c \ubaa8\ub378\uc774\ub77c\uae30 \ubcf4\ub2e4\ub294 \uac04\ub2e8\ud788 \ud14c\uc2a4\ud2b8\ud558\uae30 \uc704\ud55c \uac83\uc784\uc73c\ub85c \uac04\ub2e8\ud55c \ubaa8\ub378\uc744 \ub9cc\ub4e4\uc5b4\ubcf4\uaca0\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uc608\uce21 \ub370\uc774\ud130\ub294 \ubd93\uaf43\uc758 \uaf43\ubc1b\uce68\uc758 \uae38\uc774\uc640 \ub108\ube44, \uaf43\uc78e\uc758 \uae38\uc774\uc640 \ub108\ube44\uc5d0 \ub530\ub77c 3\uc885\ub958 \uc911 \ud558\ub098\ub85c \uc608\uce21\ud558\ub294 \uac83\uc784\uc73c\ub85c \ucd5c\uc885 \uc544\uc6c3\ud48b\uc758 \ud615\ud0dc\ub294  3\uc785\ub2c8\ub2e4. \uadf8\ub9ac\uace0 \ud574\ub2f9 \ub370\uc774\ud130\ub97c \ud655\ub960 \uac12\uc73c\ub85c \ub098\ud0c0\ub0b4\uae30 \uc704\ud558\uc5ec softmax_classification\uc744 \ud65c\uc6a9\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># model \uc0dd\uc131\nmodel = nn.Sequential(\n    nn.Linear(4,3)\n)\n\noptimizer = optim.Adam(model.parameters())\n\nfrom copy import deepcopy\nlowest_loss = np.inf\nbest_model = None\nlowest_epoch = np.inf<\/pre>\n\n\n\n<p>copy \ud328\ud0a4\uc9c0\ub85c\ubd80\ud130 deepcopy\ub97c import\ud569\ub2c8\ub2e4. \uc774\uac83\uc740 \uc774\ubc88\uc5d0 \ub370\uc774\ud130\ub97c \ub9cc\ub4dc\ub294 \uacfc\uc815\uacfc \uc9c1\uc811\uc801\uc778 \uad00\ub828\uc774 \uc5c6\uae30 \ub54c\ubb38\uc5d0 \uac04\ub2e8\ud788\ub9cc \uc124\uba85\ud558\uba74 \uac1d\uccb4\uc758 \ubaa8\ub4e0 \ub0b4\uc6a9\uc744 \ubcf5\uc0ac\ud574\uc11c \uc0c8\ub85c\uc6b4 \ud558\ub098\uc758 \uac1d\uccb4\ub97c \ub9cc\ub4dc\ub294 \uac83\uc744 deep copy\ub77c\uace0 \ud569\ub2c8\ub2e4. \ubc18\ub300\uc758 \uac1c\ub150\uc740 shallow copy \uc785\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uc774\uc81c \ud559\uc2b5\uc744 \uc2dc\uc791\ud569\ub2c8\ub2e4. \uc774 \ubaa8\ub378\uc740 2\uac1c\uc758 for loop\uc73c\ub85c \ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. \uac00\uc7a5 \uba3c\uc800 \ub098\uc624\ub294 for loop\uc740 epoch\uc5d0 \ub300\ud55c \uc815\uc758\ub85c train data\ub97c \ucd1d \uba87\ubc88 \ud559\uc2b5\ud558\ub294\uac00\uc5d0 \ub300\ud55c \uc815\uc758\uc785\ub2c8\ub2e4.  \ub2e4\uc74c\uc5d0 \ub098\uc624\ub294 \ub610 \ud558\ub098\uc758 for loop\uc740 \ud559\uc2b5 \ub370\uc774\ud130\ub97c \uba87\uac1c\ub85c \ub098\ub220\uc11c \ud559\uc2b5\ud560 \uac83\uc778\uac00 \uc989, batch\uc5d0 \ub300\ud55c \ubb38\uc81c\uc785\ub2c8\ub2e4.<\/p>\n\n\n\n<p>1\ubc88 \ud559\uc2b5\uc774 \ub05d\ub098\uba74 \ud559\uc2b5\uc758 loss\ub97c \uacc4\uc0b0\ud574\ubd05\ub2c8\ub2e4. loss\ub294 \uc815\ub2f5\uacfc\uc758 \ucc28\uc774\ub97c \uc758\ubbf8\ud558\ub294 \uac83\uc73c\ub85c \uc791\uc73c\uba74 \uc791\uc744 \uc218\ub85d \ud559\uc2b5\uc774 \uc798\ub410\ub2e4\ub294 \uc758\ubbf8\uc785\ub2c8\ub2e4. \ud55c\ubc88 \ud559\uc2b5\uc774 \ub05d\ub098\uba74 valid data\ub97c \uc2e4\ud589\ud574\ubd05\ub2c8\ub2e4. \uadf8\ub9ac\uace0 valid\uc5d0\uc11c \ub098\uc628 loss\uc640 train\uc5d0\uc11c \ub098\uc628 loss\ub97c \ube44\uad50\ud574\ubcf4\uace0 valid\uc758 loss\uac00 \ub354 \uc88b\uc744 \ub54c\uc5d0 \ud574\ub2f9 \ud559\uc2b5\uc5d0 \uc0ac\uc6a9\ud55c \ubaa8\ub378\uc744 deepcopy\ud574\uc11c \uc800\uc7a5\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uadf8 \uc774\uc720\ub294 \ubb34\uc870\uac74 \ud559\uc2b5\uc744 \uc624\ub798 \ud55c\ub2e4\uace0 \ud574\uc11c \uc88b\uc740 \uacb0\uacfc\uac00 \ub098\uc624\ub294 \uac83\uc774 \uc544\ub2c8\uace0 \uc5b4\ub290 \uc21c\uac04\uc5d0 \ud559\uc2b5\uc774 \uc815\uccb4\ub418\uac70\ub098 \uacfc\uc801\ud569 \ub418\ub294 \uc77c\uc774 \uc788\uae30 \ub54c\ubb38\uc5d0 \uac00\uc7a5 \uc88b\uc740 \ubaa8\ub378\uc744 \uc800\uc7a5\ud558\ub294 \uac83\uc785\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">train_history, valid_history = [], []\n\nfor i in range(config['n_epochs']+1):\n    model.train()\n    \n    train_loss, valid_loss = 0, 0\n    y_hat = []\n    \n    # train_batch start\n    for x_i, y_i in train_loader:\n        y_hat_i = model(x_i)\n        loss = F.cross_entropy(y_hat_i, y_i.long().squeeze())\n        \n        optimizer.zero_grad()\n        loss.backward()\n\n        optimizer.step()        \n        train_loss += float(loss) # This is very important to prevent memory leak.\n\n    train_loss = train_loss \/ len(train_loader)\n    \n    model.eval()\n    with torch.no_grad():\n        valid_loss = 0\n        \n        for x_i, y_i in valid_loader:\n            y_hat_i = model(x_i)\n            loss = F.cross_entropy(y_hat_i, y_i.long().squeeze())\n            \n            valid_loss += float(loss)\n            \n            y_hat += [y_hat_i]\n            \n    valid_loss = valid_loss \/ len(valid_loader)\n    \n    train_history.append(train_loss)\n    valid_history.append(valid_loss)\n    \n    if i % config['print_interval'] == 0:\n        print('Epoch %d: train loss=%.4e  valid_loss=%.4e  lowest_loss=%.4e' % (i, train_loss, valid_loss, lowest_loss))\n        \n    if valid_loss &lt;= lowest_loss:\n        lowest_loss = valid_loss\n        lowest_epoch = i\n        best_model = deepcopy(model.state_dict())\n        \n    model.load_state_dict(best_model)<\/pre>\n\n\n\n<p>\uc774\uc81c \ud559\uc2b5\uc774 \uc798\ub410\ub294\uc9c0 \uc544\ub798\uc640 \uac19\uc740 \ubc29\ubc95\uc73c\ub85c train_loss\uc640 valid_loss\ub97c \ud45c\uc2dc\ud574\ubd05\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">import matplotlib.pyplot as plt\n\nfig, loss_ax = plt.subplots()\n\nloss_ax.plot(train_history, 'y', label='train loss')\nloss_ax.plot(valid_history, 'r', label='val loss')\n\nloss_ax.set_xlabel('epoch')\nloss_ax.set_ylabel('loss')\n\nloss_ax.legend(loc='upper left')\n\nplt.show()<\/pre>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" width=\"837\" height=\"543\" src=\"http:\/\/cedartrees.co.kr\/wp-content\/uploads\/2020\/06\/train_valid_loss.png\" alt=\"\" class=\"wp-image-216\" srcset=\"http:\/\/blog.cedartrees.co.kr\/wp-content\/uploads\/2020\/06\/train_valid_loss.png 837w, http:\/\/blog.cedartrees.co.kr\/wp-content\/uploads\/2020\/06\/train_valid_loss-300x195.png 300w, http:\/\/blog.cedartrees.co.kr\/wp-content\/uploads\/2020\/06\/train_valid_loss-768x498.png 768w\" sizes=\"(max-width: 706px) 89vw, (max-width: 767px) 82vw, 740px\" \/><\/figure>\n\n\n\n<p>\uc0ac\uc2e4 \uc774 \uc608\uc81c\ub294 torch\uc758 Dataset\uacfc DataLoader\ub97c \uc0ac\uc6a9\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud55c \uc608\uc81c\uc600\ub294\ub370 \uc774\uac83\uc800\uac83 \uc124\uba85\ud558\ub2e4 \ubcf4\ub2c8 \uae00\uc774 \uae38\uc5b4\uc84c\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uc5ec\uae30\uc11c \uc911\uc694\ud55c \uac83\uc740 Dataset\uc744 \ub9cc\ub4e4\uace0 DataLoader\ub97c \ud1b5\ud574\uc11c \ud559\uc2b5\uc5d0 \uc0ac\uc6a9\ud558\ub294 \ubc29\ubc95\uc5d0 \ub300\ud55c \ub0b4\uc6a9\uc774 \uc911\uc694\ud558\ub2c8 \uc608\uc81c \ucf54\ub4dc\ub97c \ud65c\uc6a9\ud574\uc11c \uc9c1\uc811 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.<\/p>\n","protected":false},"excerpt":{"rendered":"<p>sklearn\uc758 \ubd93\uaf43 \ub370\uc774\ud130\ub97c \ud65c\uc6a9\ud558\uc5ec pytorch\uc640 dataloader\ub97c \ud65c\uc6a9\ud558\uc5ec \ubd84\ub958 \ubb38\uc81c\ub97c \ud480\uc5b4 \ubcf4\uaca0\uc2b5\ub2c8\ub2e4. iris \ub370\uc774\ud130\uc14b\uc744 \ubc1b\uc544\uc11c pandas\ub85c \ub370\uc774\ud130\ub97c \ubcc0\ud658\ud569\ub2c8\ub2e4. \ubcc0\ud658 \uacfc\uc815\uc774 \ubc18\ub4dc\uc2dc \ud544\uc694\ud55c \uac83\uc740 \uc544\ub2c8\uc9c0\ub9cc \ub370\uc774\ud130\uc14b\uc744 \ubcc0\uacbd\ud558\uac70\ub098 \ud559\uc2b5\uc6a9 \uceec\ub7fc \uc815\ubcf4\ub97c \uc218\uc815\ud560 \ub54c\uc5d0 \ub3c4\uc6c0\uc774 \ub429\ub2c8\ub2e4. \ub2e4\uc74c\uc73c\ub85c PyTorch\ub85c \ub370\uc774\ud130\ub97c import\ud558\uc5ec \ud559\uc2b5\uc6a9 \ub370\uc774\ud130\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4. \ud559\uc2b5\uc6a9 \ub370\uc774\ud130\ub294 train_data\uc640 valid_data\ub85c \ubd84\ub9ac\ud558\ub418 8:2 \ube44\uc728\ub85c \ubd84\ub9ac\ud569\ub2c8\ub2e4. pytorch\uc5d0\uc11c \uc81c\uacf5\ud558\ub294 Dataset\uacfc DataLoader\ub97c import\ud569\ub2c8\ub2e4. Dataset \ud074\ub798\uc2a4\ub97c \uc0c1\uc18d\ud558\uc5ec &hellip; <\/p>\n<p class=\"link-more\"><a href=\"http:\/\/blog.cedartrees.co.kr\/index.php\/2020\/06\/24\/pytorch-dataloader-example\/\" class=\"more-link\">\ub354 \ubcf4\uae30<span class=\"screen-reader-text\"> &#8220;PyTorch DataLoader Example&#8221;<\/span><\/a><\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":[],"categories":[24,21,27],"tags":[128,6],"_links":{"self":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/208"}],"collection":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/comments?post=208"}],"version-history":[{"count":12,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/208\/revisions"}],"predecessor-version":[{"id":741,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/208\/revisions\/741"}],"wp:attachment":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/media?parent=208"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/categories?post=208"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/tags?post=208"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}