{"id":252,"date":"2020-08-04T16:07:54","date_gmt":"2020-08-04T07:07:54","guid":{"rendered":"http:\/\/cedartrees.co.kr\/?p=252"},"modified":"2021-04-03T19:17:54","modified_gmt":"2021-04-03T10:17:54","slug":"sequence2sequence-with-pytorch","status":"publish","type":"post","link":"http:\/\/blog.cedartrees.co.kr\/index.php\/2020\/08\/04\/sequence2sequence-with-pytorch\/","title":{"rendered":"Seq2Seq \ubb38\uc7a5\ubc88\uc5ed"},"content":{"rendered":"\n<p>\ud30c\uc774\ud1a0\uce58 Seq2Seq \uc608\uc81c<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">import random\nimport torch\nimport torch.nn as nn\nimport torch.optim as optim<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">torch.manual_seed(0)\ndevice = torch.device('cuda' if torch.cuda.is_available() else 'cpu')<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">raw = ['I called Tom for help.\t\ub098\ub294 \ud1b0\uc5d0\uac8c \ub3c4\uc6c0\uc744 \uc694\uccad\ud588\ub2e4.',\n'I do not like science.\t\ub098\ub294 \uacfc\ud559\uc774 \uc2eb\uc5b4.',\n'I hate myself as well.\t\ub098\ub3c4 \ub0b4 \uc790\uc2e0\uc744 \uc2eb\uc5b4\ud574.',\n'I knew Tom would lose.\t\ud1b0\uc774 \uc9c8 \uac70\ub77c\ub294 \uac83\uc744 \ub09c \uc54c\uace0 \uc788\uc5c8\uc5b4.',\n'I know Tom personally.\t\ub09c \ud1b0\uc744 \uac1c\uc778\uc801\uc73c\ub85c \uc54c\uace0 \uc788\uc5b4.',\n'I like Korean cuisine.\t\uc804 \ud55c\uad6d \uc694\ub9ac\uac00 \uc88b\uc544\uc694.',\n'I like Korean cuisine.\t\uc804 \ud55c\uad6d \uc694\ub9ac\ub97c \uc88b\uc544\ud574\uc694.',\n'I like helping others.\t\ub098\ub294 \ub0a8\uc744 \ub3d5\ub294 \uac83\uc744 \uc88b\uc544\ud55c\ub2e4.',\n'I really like puppies.\t\uc800\ub294 \uac15\uc544\uc9c0\uac00 \uc815\ub9d0 \uc88b\uc544\uc694.',\n'I run faster than Tom.\t\ub098\ub294 \ud1b0\ubcf4\ub2e4 \ube60\ub974\uac8c \ub2ec\ub9b4 \uc218 \uc788\uc5b4.',\n'I think Tom is lonely.\t\ud1b0\uc774 \uc678\ub85c\uc6cc\ud558\ub294 \uac83 \uac19\uc544.',\n'I think they like you.\t\uadf8\ub4e4\uc774 \ub110 \uc88b\uc544\ud558\ub294 \uac83 \uac19\uc544.',\n'I want to go to sleep.\t\ub098 \uc790\ub7ec \uac00\uace0 \uc2f6\uc5b4.',\n'I want to go to sleep.\t\ub098 \uc790\uace0 \uc2f6\uc5b4.',\n'I want to visit Korea.\t\ub098\ub294 \ud55c\uad6d\uc5d0 \ub4e4\ub974\uace0 \uc2f6\ub2e4.']<\/pre>\n\n\n\n<p>\uc0ac\uc6a9\ud55c \ub370\uc774\ud130\ub294 <a href=\"http:\/\/www.manythings.org\/anki\/\">http:\/\/www.manythings.org\/anki\/<\/a> \uc5d0\uc11c kor-eng.zip \ud30c\uc77c\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc544 \uc77c\ubd80 \ub370\uc774\ud130\ub9cc \uc0ac\uc6a9\ud588\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 \uc0ac\uc774\ud2b8\uc5d0 \ub4e4\uc5b4\uac00\uba74 \ud55c\uad6d\uc5b4 \uc678\uc5d0\ub3c4 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ud30c\uc77c\uc744 \ub2e4\uc6b4 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" width=\"782\" height=\"282\" src=\"http:\/\/cedartrees.co.kr\/wp-content\/uploads\/2020\/08\/\u1109\u1173\u110f\u1173\u1105\u1175\u11ab\u1109\u1163\u11ba-2020-08-04-\u110b\u1169\u1112\u116e-4.13.33.png\" alt=\"\" class=\"wp-image-256\" srcset=\"http:\/\/blog.cedartrees.co.kr\/wp-content\/uploads\/2020\/08\/\u1109\u1173\u110f\u1173\u1105\u1175\u11ab\u1109\u1163\u11ba-2020-08-04-\u110b\u1169\u1112\u116e-4.13.33.png 782w, http:\/\/blog.cedartrees.co.kr\/wp-content\/uploads\/2020\/08\/\u1109\u1173\u110f\u1173\u1105\u1175\u11ab\u1109\u1163\u11ba-2020-08-04-\u110b\u1169\u1112\u116e-4.13.33-300x108.png 300w, http:\/\/blog.cedartrees.co.kr\/wp-content\/uploads\/2020\/08\/\u1109\u1173\u110f\u1173\u1105\u1175\u11ab\u1109\u1163\u11ba-2020-08-04-\u110b\u1169\u1112\u116e-4.13.33-768x277.png 768w\" sizes=\"(max-width: 706px) 89vw, (max-width: 767px) 82vw, 740px\" \/><\/figure>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">SOS_token = 0 # \ubb38\uc7a5\uc758 \uc2dc\uc791 Start of Sentence\nEOS_token = 1 #  \ubb38\uc7a5\uc758 \ub05d End of Sentence<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">class Vocab:\n    def __init__(self):\n        self.vocab2index = {\"&lt;SOS>\":SOS_token, \"&lt;EOS>\":EOS_token}\n        self.index2vocab = {SOS_token:\"&lt;SOS>\", EOS_token:\"&lt;SOS>\"}\n        self.vocab_count = {}\n        self.n_vocab = len(self.vocab2index)\n    \n    def add_vocab(self, sentence):\n        for word in sentence.split(' '):\n            if word not in self.vocab2index:\n                self.vocab2index[word] = self.n_vocab\n                self.vocab_count[word] = 1\n                self.index2vocab[self.n_vocab] = word\n                self.n_vocab += 1\n            else:\n                self.vocab_count[word] += 1<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># declare simple encoder\nclass Encoder(nn.Module):\n    def __init__(self, input_size, hidden_size):\n        super(Encoder, self).__init__()\n        self.hidden_size = hidden_size\n        self.embedding = nn.Embedding(input_size, hidden_size) # Embedding(17, 16)\n        self.gru = nn.GRU(hidden_size, hidden_size)\n\n    def forward(self, x, hidden):\n        x = self.embedding(x).view(1, 1, -1)\n        x, hidden = self.gru(x, hidden)\n        return x, hidden\n\n    \n# declare simple decoder\nclass Decoder(nn.Module):\n    def __init__(self, hidden_size, output_size):\n        super(Decoder, self).__init__()\n        self.hidden_size = hidden_size\n        self.embedding = nn.Embedding(output_size, hidden_size)\n        self.gru = nn.GRU(hidden_size, hidden_size, num_layers=1, batch_first=True)\n        self.out = nn.Linear(hidden_size, output_size)\n        self.softmax = nn.Softmax(dim=1)\n\n    def forward(self, x, hidden):\n        x = self.embedding(x).view(1, 1, -1)\n        x, hidden = self.gru(x, hidden) # lstm\uc744 \uc0ac\uc6a9\ud560 \uacbd\uc6b0 \ud574\ub2f9 \uc704\uce58 \uc218\uc815\n        x = self.softmax(self.out(x[0]))\n        return x, hidden<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># read and preprocess the corpus data\ndef preprocess(corpus):\n    print(\"reading corpus...\")\n    pairs = []\n    for line in corpus:\n        pairs.append([s for s in line.strip().lower().split(\"\\t\")])\n    print(\"Read {} sentence pairs\".format(len(pairs)))\n\n    pairs = [pair for pair in pairs]\n    print(\"Trimmed to {} sentence pairs\".format(len(pairs)))\n\n    source_vocab = Vocab()\n    target_vocab = Vocab()\n\n    print(\"Counting words...\")\n    for pair in pairs:\n        source_vocab.add_vocab(pair[0])\n        target_vocab.add_vocab(pair[1])\n    print(\"source vocab size =\", source_vocab.n_vocab)\n    print(\"target vocab size =\", target_vocab.n_vocab)\n\n    return pairs, source_vocab, target_vocab\n\n# \ub370\uc774\ud130\uc14b, \uc785\ub825\ub2e8\uc5b4\uc815\ubcf4, \ucd9c\ub825\ub2e8\uc5b4\uc815\ubcf4\npairs, source_vocab, target_vocab = preprocess(raw)<\/pre>\n\n\n\n<p>\ud6c8\ub828\uc6a9 \uc785\ucd9c\ub825 \ub370\uc774\ud130\uc14b\uc744 \uc704\uc640 \uac19\uc774 \ub9cc\ub4e0\ud6c4 \uc774\uc81c \uc778\ucf54\ub354, \ub514\ucf54\ub354 \ubaa8\ub378\uc744 \ub9cc\ub4e4\uc5b4\uc57c \ud569\ub2c8\ub2e4. \uba3c\uc800 \ub9cc\ub4e4\uae30 \uc804\uc5d0 \uc778\ucf54\ub354-\ub514\ucf54\ub354\uc758 \uc785\ucd9c\ub825 \uc815\ubcf4\uc5d0 \ub300\ud558\uc5ec \uc9c1\uc811 \uadf8\ub9bc\uc73c\ub85c \uadf8\ub824\ubcf4\uc2dc\uae30\ub97c \ucd94\ucc9c\ud569\ub2c8\ub2e4. \uac00\uc7a5 \uc88b\uc740 \uac83\uc740 \ub178\ud2b8\uc5d0 \ud39c\uc73c\ub85c \uadf8\ub824\ubcf4\uc2dc\ub294 \uac83\uc774 \uc88b\uaca0\uc9c0\ub9cc \uadf8\ub807\uc9c0 \uc54a\ub2e4\uba74 \uba38\ub9ac\uc18d\uc73c\ub85c \uc5b4\ub5a4 \uc785\ub825\uc774 \ub4e4\uc5b4\uc624\uace0 \uc5b4\ub5a4 \ucd9c\ub825\uc774 \ub098\uac00\ub294\uc9c0\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uc124\uacc4\ud558\ub294 \uacfc\uc815\uc774 \ud544\uc694\ud569\ub2c8\ub2e4.<br><br>\uc774\ub7f0 \uacfc\uc815\uc774 \uc5c6\uc73c\uba74 \ub098\uc911\uc5d0 \uc778\ucf54\ub354\uc640 \ub514\ucf54\ub354\ub97c \uc124\uacc4\ud560 \ub54c\uc5d0 \ud63c\ub3d9\ud558\uae30 \uc27d\uae30 \ub54c\ubb38\uc5d0 \ubc18\ub4dc\uc2dc \ubaa8\ub378\uc758 \uc785\ucd9c\ub825 \ud750\ub984\uc744 \uad6c\uc0c1\ud574\ubcf4\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\ubcf8 \uc608\uc81c\uc758 \uc778\ucf54\ub354-\ub514\ucf54\ub354 \uc815\ubcf4\ub294 \ub2e4\uc74c\uacfc \uac19\uc2b5\ub2c8\ub2e4.<br>\uc778\ucf54\ub354 : input_vector(41) -&gt; Embedding(41,30) -&gt; LSTM(30,30) <br>\ub514\ucf54\ub354 : Embedding(52,30) -&gt; LSTM(30, 52) &#8211; hidden_vector(52)<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">enc_hidden_size = 30\ndec_hidden_size = enc_hidden_size\nenc = Encoder(source_vocab.n_vocab, enc_hidden_size).to(device)\ndec = Decoder(dec_hidden_size, target_vocab.n_vocab).to(device)<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">def tensorize(vocab, sentence):\n    idx = [vocab.vocab2index[word] for word in sentence.lower().split(' ')]\n    idx.append(vocab.vocab2index['&lt;EOS>'])\n    return torch.Tensor(idx).long().to(device).view(-1,1)<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">tensorize(source_vocab, 'I called Tom for help.')\noutput : tensor([[2], [3], [4], [5], [6], [1]])<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">training_source = [tensorize(source_vocab, pair[0]) for pair in pairs]\ntraining_target = [tensorize(target_vocab, pair[1]) for pair in pairs]<\/pre>\n\n\n\n<p>Train<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">loss_total = 0\nnumber_epoch = 5001\n\nencoder_optimizer = optim.SGD(enc.parameters(), lr=0.01)\ndecoder_optimizer = optim.SGD(dec.parameters(), lr=0.01)\n\ncriterion = nn.NLLLoss()\n\nfor epoch in range(number_epoch):\n    epoch_loss = 0\n    \n    for i in range(len(training_source)):\n        encoder_optimizer.zero_grad()\n        decoder_optimizer.zero_grad()\n        \n        source_tensor = training_source[i]\n        target_tensor = training_target[i]\n\n        encoder_hidden = torch.zeros([1, 1, enc.hidden_size]).to(device)\n\n        source_length = source_tensor.size(0)\n        target_length = target_tensor.size(0)\n        \n        loss = 0\n\n        for enc_input in range(source_length):\n            _, encoder_hidden = enc(source_tensor[enc_input], encoder_hidden)\n\n        decoder_input = torch.Tensor([[SOS_token]]).long().to(device)\n        decoder_hidden = encoder_hidden # connect encoder output to decoder input\n\n        for di in range(target_length):\n            decoder_output, decoder_hidden = dec(decoder_input, decoder_hidden)\n            #print(decoder_output, target_tensor[di], criterion(decoder_output, target_tensor[di]))\n            loss += criterion(decoder_output, target_tensor[di])\n            decoder_input = target_tensor[di]  # teacher forcing\n        \n        loss.backward()\n\n        encoder_optimizer.step()\n        decoder_optimizer.step()\n        \n        #print(loss.item(),target_length)\n        epoch_loss += loss.item()\/target_length\n        #loss_total += loss_epoch\n    if epoch % 100 == 0:\n        print('--- epoch {}, total loss {} '.format(epoch,float(epoch_loss\/15)))<\/pre>\n\n\n\n<p>Evaluate<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">for pair in pairs:\n    print(\">\", pair[0])\n    print(\"=\", pair[1])\n    source_tensor = tensorize(source_vocab, pair[0])\n    source_length = source_tensor.size()[0]\n    encoder_hidden = torch.zeros([1, 1, enc.hidden_size]).to(device)\n\n    for ei in range(source_length):\n        _, encoder_hidden = enc(source_tensor[ei], encoder_hidden)\n        #print(encoder_hidden.size()) # 1,1,16\n\n    decoder_input = torch.Tensor([[SOS_token]], device=device).long()\n    decoder_hidden = encoder_hidden\n    decoded_words = []\n\n    for di in range(20):\n        decoder_output, decoder_hidden = dec(decoder_input, decoder_hidden)\n        #print('decoder_iput',decoder_input, 'decoder_output',decoder_output)\n        _, top_index = decoder_output.data.topk(1)\n        if top_index.item() == EOS_token:\n            decoded_words.append(\"&lt;EOS>\")\n            break\n        else:\n            decoded_words.append(target_vocab.index2vocab[top_index.item()])\n\n        decoder_input = top_index.squeeze().detach()\n\n    predict_words = decoded_words\n    predict_sentence = \" \".join(predict_words)\n    print(\"&lt;\", predict_sentence)\n    print(\"\")<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">> i called tom for help.\n= \ub098\ub294 \ud1b0\uc5d0\uac8c \ub3c4\uc6c0\uc744 \uc694\uccad\ud588\ub2e4.\n&lt; \ub098\ub294 \ud1b0\uc5d0\uac8c \ub3c4\uc6c0\uc744 \uc694\uccad\ud588\ub2e4. &lt;EOS>\n\n> i do not like science.\n= \ub098\ub294 \uacfc\ud559\uc774 \uc2eb\uc5b4.\n&lt; \ub098\ub294 \uacfc\ud559\uc774 \uc2eb\uc5b4. &lt;EOS>\n\n> i hate myself as well.\n= \ub098\ub3c4 \ub0b4 \uc790\uc2e0\uc744 \uc2eb\uc5b4\ud574.\n&lt; \ub098\ub3c4 \ub0b4 \uc790\uc2e0\uc744 \uc2eb\uc5b4\ud574. &lt;EOS>\n\n> i knew tom would lose.\n= \ud1b0\uc774 \uc9c8 \uac70\ub77c\ub294 \uac83\uc744 \ub09c \uc54c\uace0 \uc788\uc5c8\uc5b4.\n&lt; \ud1b0\uc774 \uc9c8 \uac70\ub77c\ub294 \uac83\uc744 \ub09c \uc54c\uace0 \uc788\uc5c8\uc5b4. &lt;EOS>\n\n> i know tom personally.\n= \ub09c \ud1b0\uc744 \uac1c\uc778\uc801\uc73c\ub85c \uc54c\uace0 \uc788\uc5b4.\n&lt; \ub09c \ud1b0\uc744 \uac1c\uc778\uc801\uc73c\ub85c \uc54c\uace0 \uc788\uc5b4. &lt;EOS>\n\n> i like korean cuisine.\n= \uc804 \ud55c\uad6d \uc694\ub9ac\uac00 \uc88b\uc544\uc694.\n&lt; \uc804 \ud55c\uad6d \uc694\ub9ac\ub97c \uc88b\uc544\ud574\uc694. &lt;EOS>\n\n> i like korean cuisine.\n= \uc804 \ud55c\uad6d \uc694\ub9ac\ub97c \uc88b\uc544\ud574\uc694.\n&lt; \uc804 \ud55c\uad6d \uc694\ub9ac\ub97c \uc88b\uc544\ud574\uc694. &lt;EOS>\n\n> i like helping others.\n= \ub098\ub294 \ub0a8\uc744 \ub3d5\ub294 \uac83\uc744 \uc88b\uc544\ud55c\ub2e4.\n&lt; \ub098\ub294 \ub0a8\uc744 \ub3d5\ub294 \uac83\uc744 \uc88b\uc544\ud55c\ub2e4. &lt;EOS>\n\n> i really like puppies.\n= \uc800\ub294 \uac15\uc544\uc9c0\uac00 \uc815\ub9d0 \uc88b\uc544\uc694.\n&lt; \uc800\ub294 \uac15\uc544\uc9c0\uac00 \uc815\ub9d0 \uc88b\uc544\uc694. &lt;EOS>\n\n> i run faster than tom.\n= \ub098\ub294 \ud1b0\ubcf4\ub2e4 \ube60\ub974\uac8c \ub2ec\ub9b4 \uc218 \uc788\uc5b4.\n&lt; \ub098\ub294 \ud1b0\ubcf4\ub2e4 \ube60\ub974\uac8c \ub2ec\ub9b4 \uc218 \uc788\uc5b4. &lt;EOS>\n\n> i think tom is lonely.\n= \ud1b0\uc774 \uc678\ub85c\uc6cc\ud558\ub294 \uac83 \uac19\uc544.\n&lt; \ud1b0\uc774 \uc678\ub85c\uc6cc\ud558\ub294 \uac83 \uac19\uc544. &lt;EOS>\n\n> i think they like you.\n= \uadf8\ub4e4\uc774 \ub110 \uc88b\uc544\ud558\ub294 \uac83 \uac19\uc544.\n&lt; \uadf8\ub4e4\uc774 \ub110 \uc88b\uc544\ud558\ub294 \uac83 \uac19\uc544. &lt;EOS>\n\n> i want to go to sleep.\n= \ub098 \uc790\ub7ec \uac00\uace0 \uc2f6\uc5b4.\n&lt; \ub098 \uc790\uace0 \uc2f6\uc5b4. &lt;EOS>\n\n> i want to go to sleep.\n= \ub098 \uc790\uace0 \uc2f6\uc5b4.\n&lt; \ub098 \uc790\uace0 \uc2f6\uc5b4. &lt;EOS>\n\n> i want to visit korea.\n= \ub098\ub294 \ud55c\uad6d\uc5d0 \ub4e4\ub974\uace0 \uc2f6\ub2e4.\n&lt; \ub098\ub294 \ud55c\uad6d\uc5d0 \ub4e4\ub974\uace0 \uc2f6\ub2e4. &lt;EOS><\/pre>\n","protected":false},"excerpt":{"rendered":"<p>\ud30c\uc774\ud1a0\uce58 Seq2Seq \uc608\uc81c \uc0ac\uc6a9\ud55c \ub370\uc774\ud130\ub294 http:\/\/www.manythings.org\/anki\/ \uc5d0\uc11c kor-eng.zip \ud30c\uc77c\uc744 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc544 \uc77c\ubd80 \ub370\uc774\ud130\ub9cc \uc0ac\uc6a9\ud588\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 \uc0ac\uc774\ud2b8\uc5d0 \ub4e4\uc5b4\uac00\uba74 \ud55c\uad6d\uc5b4 \uc678\uc5d0\ub3c4 \ub2e4\uc591\ud55c \ud615\ud0dc\uc758 \ud30c\uc77c\uc744 \ub2e4\uc6b4 \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud6c8\ub828\uc6a9 \uc785\ucd9c\ub825 \ub370\uc774\ud130\uc14b\uc744 \uc704\uc640 \uac19\uc774 \ub9cc\ub4e0\ud6c4 \uc774\uc81c \uc778\ucf54\ub354, \ub514\ucf54\ub354 \ubaa8\ub378\uc744 \ub9cc\ub4e4\uc5b4\uc57c \ud569\ub2c8\ub2e4. \uba3c\uc800 \ub9cc\ub4e4\uae30 \uc804\uc5d0 \uc778\ucf54\ub354-\ub514\ucf54\ub354\uc758 \uc785\ucd9c\ub825 \uc815\ubcf4\uc5d0 \ub300\ud558\uc5ec \uc9c1\uc811 \uadf8\ub9bc\uc73c\ub85c \uadf8\ub824\ubcf4\uc2dc\uae30\ub97c \ucd94\ucc9c\ud569\ub2c8\ub2e4. \uac00\uc7a5 \uc88b\uc740 \uac83\uc740 \ub178\ud2b8\uc5d0 \ud39c\uc73c\ub85c \uadf8\ub824\ubcf4\uc2dc\ub294 &hellip; <\/p>\n<p class=\"link-more\"><a href=\"http:\/\/blog.cedartrees.co.kr\/index.php\/2020\/08\/04\/sequence2sequence-with-pytorch\/\" class=\"more-link\">\ub354 \ubcf4\uae30<span class=\"screen-reader-text\"> &#8220;Seq2Seq \ubb38\uc7a5\ubc88\uc5ed&#8221;<\/span><\/a><\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":[],"categories":[24,40,73,76,21,14],"tags":[97,96,6,61,74,55],"_links":{"self":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/252"}],"collection":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/comments?post=252"}],"version-history":[{"count":9,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/252\/revisions"}],"predecessor-version":[{"id":865,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/252\/revisions\/865"}],"wp:attachment":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/media?parent=252"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/categories?post=252"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/tags?post=252"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}