{"id":828,"date":"2021-02-02T19:24:27","date_gmt":"2021-02-02T10:24:27","guid":{"rendered":"http:\/\/cedartrees.co.kr\/?p=828"},"modified":"2021-04-03T19:07:01","modified_gmt":"2021-04-03T10:07:01","slug":"sequence2sequence-attention","status":"publish","type":"post","link":"http:\/\/blog.cedartrees.co.kr\/index.php\/2021\/02\/02\/sequence2sequence-attention\/","title":{"rendered":"Seq2Seq \uc5b4\ud150\uc158 \ubb38\uc7a5\uc0dd\uc131"},"content":{"rendered":"\n<p>\ud574\ub2f9 \ubaa8\ub378\uc740 \uc774\uc804\uc5d0 \ud14c\uc2a4\ud2b8\ud588\ub358 Sequence2Sequence \ubaa8\ub378\uc5d0 Attention\uc744 \uc801\uc6a9\ud574\ubcf8 \uac83\uc785\ub2c8\ub2e4. \uc774\uc804 \ub0b4\uc6a9\uc774 \uad81\uae08\ud558\uc2e0 \ubd84\uc740 \uc544\ub798\uc758 \uac8c\uc2dc\ubb3c\uc744 \ud655\uc778\ud574\ubcf4\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.<\/p>\n\n\n\n<figure class=\"wp-block-embed-wordpress wp-block-embed is-type-wp-embed is-provider-\uc138\ub2e4\ud2b8\ub9ac\uc2a4-\uc778\uacf5\uc9c0\ub2a5\uc5f0\uad6c\uc18c\"><div class=\"wp-block-embed__wrapper\">\n<blockquote class=\"wp-embedded-content\" data-secret=\"UCpeQGojKZ\"><a href=\"http:\/\/cedartrees.co.kr\/index.php\/2021\/01\/23\/sequence2sequence-nlg\/\">Seq2Seq \ubb38\uc7a5\uc0dd\uc131<\/a><\/blockquote><iframe class=\"wp-embedded-content\" sandbox=\"allow-scripts\" security=\"restricted\" style=\"position: absolute; clip: rect(1px, 1px, 1px, 1px);\" title=\"&#8220;Seq2Seq \ubb38\uc7a5\uc0dd\uc131&#8221; &#8212; \uc138\ub2e4\ud2b8\ub9ac\uc2a4 \uc778\uacf5\uc9c0\ub2a5\uc5f0\uad6c\uc18c\" src=\"http:\/\/cedartrees.co.kr\/index.php\/2021\/01\/23\/sequence2sequence-nlg\/embed\/#?secret=UCpeQGojKZ\" data-secret=\"UCpeQGojKZ\" width=\"525\" height=\"296\" frameborder=\"0\" marginwidth=\"0\" marginheight=\"0\" scrolling=\"no\"><\/iframe>\n<\/div><\/figure>\n\n\n\n<p>\uc774\uc804 \ubaa8\ub378\uc5d0\uc11c\ub294 Sequence2Sequence\ub9cc \uc0ac\uc6a9\ud588\uace0 \uc601\uc5b4\ubb38\uc7a5\uc744 \ud65c\uc6a9\ud588\uc2b5\ub2c8\ub2e4. \uc774\ubc88\uc5d0\ub294 \uc5d0\ud134\uc158(Attention)\uc744 \uc801\uc6a9\ud558\uace0 \ud55c\uae00\ubb38\uc11c\ub97c \ud1b5\ud574\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \uc774\ubc88\uc5d0\ub3c4 \uad6c\uae00 Colab\uc758 GPU\ub97c \ud1b5\ud574\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. <\/p>\n\n\n\n<p>\uba3c\uc800 \ud14d\uc2a4\ud2b8 \ub370\uc774\ud130\ub97c \uc900\ube44\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \ud14d\uc2a4\ud2b8 \ub370\uc774\ud130\ub294 \uc694\ud55c\ubcf5\uc74c 1-2\uc7a5\uc758 \ud55c\uae00 \ud14d\uc2a4\ud2b8\ub97c \ud65c\uc6a9\ud588\uc2b5\ub2c8\ub2e4. \ub3d9\uc77c\ud55c \ub370\uc774\ud130\ub85c \ud14c\uc2a4\ud2b8\ub97c \ud574\ubcf4\uc2dc\uae30 \uc6d0\ud558\uc2dc\uba74 \uc544\ub798\uc758 \ub9c1\ud06c\uc5d0\uc11c \ud14d\uc2a4\ud2b8 \ub370\uc774\ud130\ub97c \ub2e4\uc6b4\ubc1b\uc73c\uc2e0 \ud6c4\uc5d0 *.txt \ud30c\uc77c\ub85c \uc800\uc7a5\ud558\uc2dc\uace0 \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4.<\/p>\n\n\n\n<p><a href=\"http:\/\/www.holybible.or.kr\/B_RHV\/cgi\/bibleftxt.php?VR=RHV&amp;VL=43&amp;CN=1&amp;CV=99\">http:\/\/www.holybible.or.kr\/B_RHV\/cgi\/bibleftxt.php?VR=RHV&amp;VL=43&amp;CN=1&amp;CV=99<\/a><\/p>\n\n\n\n<p><em>\ud0dc\ucd08\uc5d0 \ub9d0\uc500\uc774 \uacc4\uc2dc\ub2c8\ub77c \uc774 \ub9d0\uc500\uc774 \ud558\ub098\ub2d8\uacfc \ud568\uaed8 \uacc4\uc168\uc73c\ub2c8 \uc774 \ub9d0\uc500\uc740 \uace7 \ud558\ub098\ub2d8\uc774\uc2dc\ub2c8\ub77c<br>\uadf8\uac00 \ud0dc\ucd08\uc5d0 \ud558\ub098\ub2d8\uacfc \ud568\uaed8 \uacc4\uc168\uace0<br>\ub9cc\ubb3c\uc774 \uadf8\ub85c \ub9d0\ubbf8\uc554\uc544 \uc9c0\uc740\ubc14 \ub418\uc5c8\uc73c\ub2c8 \uc9c0\uc740 \uac83\uc774 \ud558\ub098\ub3c4 \uadf8\uac00 \uc5c6\uc774\ub294 \ub41c \uac83\uc774 \uc5c6\ub290\ub2c8\ub77c<br>\uadf8 \uc548\uc5d0 \uc0dd\uba85\uc774 \uc788\uc5c8\uc73c\ub2c8 \uc774 \uc0dd\uba85\uc740 \uc0ac\ub78c\ub4e4\uc758 \ube5b\uc774\ub77c<br>\ube5b\uc774 \uc5b4\ub450\uc6c0\uc5d0 \ube44\ucde8\ub418 \uc5b4\ub450\uc6c0\uc774 \uae68\ub2eb\uc9c0 \ubabb\ud558\ub354\ub77c&#8230;<\/em> [\ud14c\uc2a4\ud2b8 \ub370\uc774\ud130 \uc77c\ubd80]<\/p>\n\n\n\n<p>\ud559\uc2b5\uc744 \uc704\ud55c \uae30\ubcf8 \uc124\uc815\uc740 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4. \uad6c\uae00 Colab\uc5d0\uc11c \ud30c\uc77c\uc744 \ub85c\ub529\ud558\ub294 \ubd80\ubd84\uc740 \uc774\uc804 \uac8c\uc2dc\ubb3c\uc744 \ucc38\uc870\ud558\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4. \uc544\ub798\uc758 config\uc5d0 \ud30c\uc77c\uc758 \uc704\uce58, \ud06c\uae30, \uc784\ubca0\ub529 \uc0ac\uc774\uc988 \ub4f1\uc744 \uc815\uc758\ud588\uc2b5\ub2c8\ub2e4. \ud559\uc2b5\uc740 \ubc30\uce58 \uc0ac\uc774\uc988\ub97c 100\uc73c\ub85c \ud574\uc11c epochs 1,000\ubc88 \uc218\ud589\ud588\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">from argparse import Namespace\nconfig = Namespace(\n    train_file='gdrive\/MyDrive\/*\/gospel_john.txt', \n    seq_size=14, batch_size=100, sample=30, dropout=0.1, max_length=14,\n    enc_hidden_size=10, number_of_epochs=1000\n)<\/pre>\n\n\n\n<p>\uc0dd\uc131\ud55c \ud14d\uc2a4\ud2b8 \ud30c\uc77c\uc744 \uc77d\uc5b4\uc11c  train_data\uc5d0 \uc800\uc7a5\ud569\ub2c8\ub2e4. \uc800\uc7a5\ub41c \ub370\uc774\ud130\ub294 john_note\uc5d0 \ubc30\uc5f4 \ud615\ud0dc\ub85c \uc800\uc7a5\ub418\uac8c \ub418\uace0 \uc0dd\uc131\ub41c \ub370\uc774\ud130\ub294 note\ub77c\ub294 \ubc30\uc5f4\uc5d0 \uc5b4\uc808 \ub2e8\uc704\ub85c \ubd84\ub9ac\ub418\uc5b4 \uc800\uc7a5\ub429\ub2c8\ub2e4. \ud615\ud0dc\uc18c \ubd84\uc11d\uacfc\uc815\uc740 \uc0dd\ub7b5\ud558\uc600\uace0 \uc74c\uc808 \ubd84\ub9ac\ub9cc \uc218\ud589\ud588\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 \ubaa8\ub378\uc744 \ud1b5\ud574\uc11c \ub354 \ub9ce\uc740 \ud14c\uc2a4\ud2b8\ub97c \ud574\ubcf4\uace0\uc790 \ud558\uc2dc\ub294 \ubd84\uc740 \uc74c\uc808\ubd84\ub9ac \uc678\uc5d0\ub3c4 \ud615\ud0dc\uc18c \uc791\uc5c5\uae4c\uc9c0 \uac19\uc774 \ud574\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcf4\uc2dc\uae38 \ucd94\ucc9c\ud569\ub2c8\ub2e4. \ucd5c\uc885 \uc0dd\uc131\ub41c note \ub370\uc774\ud130\ub294  [&#8216;\ud0dc\ucd08\uc5d0&#8217;, &#8216;\ub9d0\uc500\uc774&#8217;, &#8216;\uacc4\uc2dc\ub2c8\ub77c&#8217;, &#8216;\uc774&#8217;, &#8216;\ub9d0\uc500\uc774&#8217;, &#8216;\ud558\ub098\ub2d8\uacfc&#8217;, &#8216;\ud568\uaed8&#8217;, &#8216;\uacc4\uc168\uc73c\ub2c8&#8217;, &#8216;\uc774&#8217;, &#8216;\ub9d0\uc500\uc740&#8217;,&#8217;\ud558\ub098\ub2d8\uc774\ub2c8\ub77c&#8217;,&#8230;] \uc758 \ud615\ud0dc\uac00 \ub429\ub2c8\ub2e4.<\/p>\n\n\n\n<p><\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">def read_data(filename):\n    with io.open(filename, 'r',encoding='utf-8') as f:\n        data = [line for line in f.read().splitlines()]\n    return data \n\ntrain_data = read_data(config.train_file)\n\njohn_note = np.array(df['john'])\nnote = [n for note in john_note for n in note.split()]<\/pre>\n\n\n\n<p>note\uc5d0 \uc800\uc7a5\ub41c \ud615\ud0dc\ub294 \uc790\uc5f0\uc5b4\ub85c \uc774\ub97c \uc22b\uc790\ub85c \ubcc0\ud658\ud560 \ud544\uc694\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774\ub294 \uc790\uc5f0\uc5b4 \uc790\uccb4\ub97c \ucef4\ud4e8\ud130\uac00 \uc778\uc2dd\ud560 \uc218 \uc5c6\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4. \uadf8\ub807\uae30 \ub54c\ubb38\uc5d0 \uac01 \ub2e8\uc5b4\ub4e4\uc744 \uc22b\uc790\ud654 \ud560 \ud544\uc694\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc77c\uc608\ub85c &#8216;\ud0dc\ucd08\uc5d0&#8217; -&gt; 0, &#8216;\ub9d0\uc500\uc774&#8217;-&gt;1 \uc774\ub7f0 \ubc29\ubc95\uc73c\ub85c \ub9cc\ub4dc\ub294 \uacfc\uc815\uc774 \ud544\uc694\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uadf8\ub9ac\uace0 \uadf8\uc5d0 \uc55e\uc11c\uc11c \uc911\ubcf5\ub41c \ub2e8\uc5b4\ub4e4\uc740 \uc0ad\uc81c\ud560 \ud544\uc694\uac00 \uc788\uc2b5\ub2c8\ub2e4. &#8216;\uc774&#8217;\ub77c\ub294 \ub2e8\uc5b4\uac00 \uc5ec\ub7ec\ubc88 \ub098\uc624\uc9c0\ub9cc \ub098\uc62c \ub54c\ub9c8\ub2e4 \ubca1\ud130\ud654 \ud55c\ub2e4\uba74 \ubca1\ud130\uc758 \uc0ac\uc774\uc988\uac00 \uc99d\uac00\ud558\uac8c \ub418\uace0 \uc774\ub85c \uc778\ud55c \uacc4\uc0b0\ub7c9\uc774 \uc99d\uac00\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4. \ub2e8, \ud615\ud0dc\uc18c \ubd84\uc11d\uc744 \ud1b5\ud574 \ubcf4\uba74 &#8216;\uc774&#8217;\ub77c\ub294 \ub2e8\uc5b4\uac00 \uac01\uae30 \ub2e4\ub978 \uc758\ubbf8\ub97c \uac00\uc9c8 \uc218\ub294 \uc788\uc9c0\ub9cc \uc774\ubc88 \ud14c\uc2a4\ud2b8\uc5d0\uc11c\ub294 \ub3d9\uc77c\ud55c \ub370\uc774\ud130\ub85c \uc778\uc2dd\ud574\uc11c \ucd08\uae30\ud654 \uacb9\uce58\uc9c0 \uc54a\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\ucd5c\uc885 \uc0dd\uc131\ud560 \ub370\uc774\ud130\ub294 \ub2e8\uc5b4-\uc22b\uc790, \uc22b\uc790-\ub2e8\uc5b4 \ud615\ud0dc\ub97c \uac00\uc9c0\ub294 python dict \uc785\ub2c8\ub2e4. \ud574\ub2f9 dict\ub97c \uc0dd\uc131\ud558\ub294 \ubc29\ubc95\uc740 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">word_count = Counter(note)\nsorted_vocab = sorted(word_count, key=word_count.get, reverse=True)\nint_to_vocab = {k:w for k,w in enumerate(sorted_vocab)}\nvocab_to_int = {w:k for k,w in int_to_vocab.items()}\nn_vocab = len(int_to_vocab)<\/pre>\n\n\n\n<p>\ucd5c\uc885\uc801\uc73c\ub85c \uc0dd\uc131\ub418\ub294 \ub2e8\uc5b4\ub294 \uc14b\uc740 Vocabulary size = 598 \uc785\ub2c8\ub2e4. \uc0dd\uc131\ub418\ub294 \ub370\uc774\ud130 \uc0d8\ud50c(\ub2e8\uc5b4-\uc22b\uc790)\uc740 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"wp-block-preformatted\">{0: '\uc774', 1: '\uace7', 2: '\uadf8', 3: '\uac00\ub85c\ub418', 4: '\ub098\ub294', 5: '\uadf8\uac00', 6: '\ub9d0\ubbf8\uc554\uc544', 7: '\uac83\uc774', 8: '\uc0ac\ub78c\uc774', 9: '\ub300\ud558\uc5ec', 10: '\uc694\ud55c\uc774' ... }<\/pre>\n\n\n\n<p>\ud559\uc2b5\uc5d0 \uc0ac\uc6a9\ub418\ub294 \ubb38\uc7a5\uc740 \uac01\uac01 \ub2e8\uc5b4\uc758 \uc778\ub371\uc2a4 \uac12\uc73c\ub85c \uce58\ud658\ub41c \ub370\uc774\ud130(int_text)\ub97c \uc0ac\uc6a9\ud558\uac8c \ub429\ub2c8\ub2e4. \uc774\ub97c \uc0dd\uc131\ud558\ub294 \uacfc\uc815\uc740 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">int_text = [vocab_to_int[w] for w in note]<\/pre>\n\n\n\n<p>\uc0dd\uc131\ub41c \uc804\uccb4 \ubb38\uc7a5\uc5d0\uc11c \uc785\ub825 \ub370\uc774\ud130\uc640 \uc815\ub2f5 \ub370\uc774\ud130\ub97c \ub098\ub215\ub2c8\ub2e4. \uc774 \uacfc\uc815\uc740 \uc774\uc804\uc5d0 \uc5c5\ub85c\ub4dc \ud588\ub358 \uac8c\uc2dc\ubb3c\uc5d0 \uc124\uba85\ud588\uc73c\ub2c8 \ub118\uc5b4\uac00\ub3c4\ub85d \ud558\uaca0\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">source_words = []\ntarget_words = []\nfor i in range(len(int_text)):\n    ss_idx, se_idx, ts_idx, te_idx = i, (config.seq_size+i), i+1, (config.seq_size+i)+1\n    #print('{}:{}-{}:{}'.format(ss_idx,se_idx,ts_idx,te_idx))\n    if len(int_text[ts_idx:te_idx]) >= config.seq_size:\n        source_words.append(int_text[ss_idx:se_idx])\n        target_words.append(int_text[ts_idx:te_idx])<\/pre>\n\n\n\n<p>\uc0dd\uc131\ub41c \uc785\ub825 \ub370\uc774\ud130\uc640 \uc815\ub2f5 \ub370\uc774\ud130\ub97c 10\uac1c \ucd9c\ub825\ud574\ubcf4\uba74 \uc544\ub798\uc640 \uac19\uc740 \ud589\ud0dc\uac00 \ub429\ub2c8\ub2e4. \uc785\ub825\/\uc815\ub2f5 \ub370\uc774\ud130\uc758 \uae38\uc774\ub97c \ub298\ub824\uc8fc\uba74 \uc774\uc804\uc758 Sequence2Sequence  \ubaa8\ub378\uc5d0\uc11c\ub294 \ud559\uc2b5\uc774 \uc81c\ub300\ub85c \uc77c\uc5b4\ub098\uc9c0 \uc54a\uc558\uc2b5\ub2c8\ub2e4. \uadf8 \uc774\uc720\ub294 Encoding \ubaa8\ub378\uc5d0\uc11c \ucd5c\uc885 \uc0dd\uc131\ub418\ub294 Context Vector\uac00 \uc9e7\uc740 \ubb38\uc7a5\uc758 \uacbd\uc6b0\uc5d0\ub294 \uc9c0\uc7a5\uc774 \uc5c6\uaca0\uc9c0\ub9cc \uae34 \ubb38\uc7a5\uc758 \uc815\ubcf4\ub97c \ucd95\uc57d\ud574\uc11c \ub2f4\uae30\uc5d0\ub294 \ub2e4\uc18c \ubb34\ub9ac\uac00 \uc788\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4. \uc774\ub7ec\ud55c \ubb38\uc81c\ub97c \ud574\uacb0\ud558\uae30 \uc704\ud574\uc11c \ub098\uc628 \ubaa8\ub378\uc774 \ubc14\ub85c Attention \ubaa8\ub378\uc785\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">for s,t in zip(source_words[0:10], target_words[0:10]):\n    print('source {} -> target {}'.format(s,t))<\/pre>\n\n\n\n<p>source [21, 14, 57, 0, 14, 22, 23, 58, 0, 59, 1, 60, 5, 21] -&gt; target [14, 57, 0, 14, 22, 23, 58, 0, 59, 1, 60, 5, 21, 22] source [14, 57, 0, 14, 22, 23, 58, 0, 59, 1, 60, 5, 21, 22] -&gt; target [57, 0, 14, 22, 23, 58, 0, 59, 1, 60, 5, 21, 22, 23] source [57, 0, 14, 22, 23, 58, 0, 59, 1, 60, 5, 21, 22, 23] -&gt; target [0, 14, 22, 23, 58, 0, 59, 1, 60, 5, 21, 22, 23, 61] source [0, 14, 22, 23, 58, 0, 59, 1, 60, 5, 21, 22, 23, 61] -&gt; target [14, 22, 23, 58, 0, 59, 1, 60, 5, 21, 22, 23, 61, 62] source [14, 22, 23, 58, 0, 59, 1, 60, 5, 21, 22, 23, 61, 62] -&gt; target [22, 23, 58, 0, 59, 1, 60, 5, 21, 22, 23, 61, 62, 24] source [22, 23, 58, 0, 59, 1, 60, 5, 21, 22, 23, 61, 62, 24] -&gt; target [23, 58, 0, 59, 1, 60, 5, 21, 22, 23, 61, 62, 24, 6] source [23, 58, 0, 59, 1, 60, 5, 21, 22, 23, 61, 62, 24, 6] -&gt; target [58, 0, 59, 1, 60, 5, 21, 22, 23, 61, 62, 24, 6, 25] source [58, 0, 59, 1, 60, 5, 21, 22, 23, 61, 62, 24, 6, 25] -&gt; target [0, 59, 1, 60, 5, 21, 22, 23, 61, 62, 24, 6, 25, 63] source [0, 59, 1, 60, 5, 21, 22, 23, 61, 62, 24, 6, 25, 63] -&gt; target [59, 1, 60, 5, 21, 22, 23, 61, 62, 24, 6, 25, 63, 64] source [59, 1, 60, 5, 21, 22, 23, 61, 62, 24, 6, 25, 63, 64] -&gt; target [1, 60, 5, 21, 22, 23, 61, 62, 24, 6, 25, 63, 64, 7]<\/p>\n\n\n\n<p>\ud30c\uc774\ud1a0\uce58 \ub77c\uc774\ube0c\ub7ec\ub9ac\ub97c \uc544\ub798\uc640 \uac19\uc774 \uc784\ud3ec\ud2b8\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">import torch\nimport torch.nn as nn\nimport torch.optim as optim\nimport torch.nn.functional as F<\/pre>\n\n\n\n<p>\ud559\uc2b5 \ubaa8\ub378\uc744 \uc544\ub798\uc640 \uac19\uc774 \uc0dd\uc131\ud569\ub2c8\ub2e4. Encoder \ubd80\ubd84\uc740 \uc774\uc804\uc5d0 \uc0dd\uc131\ud588\ub358 \ubaa8\ub378\uacfc \ub2e4\ub974\uc9c0 \uc54a\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">class Encoder(nn.Module):\n    def __init__(self, input_size, hidden_size):\n        super().__init__()\n        self.hidden_size = hidden_size\n        self.embedding = nn.Embedding(input_size, hidden_size)\n        self.gru = nn.GRU(hidden_size, hidden_size)\n        \n    def forward(self, x, hidden):\n        x = self.embedding(x).view(1,1,-1)\n        x, hidden = self.gru(x, hidden)\n        return x, hidden\n    \n    def initHidden(self):\n        return torch.zeros(1,1,self.hidden_size, device=device )<\/pre>\n\n\n\n<p>\uac00\uc7a5 \uc911\uc694\ud55c AttndDecoder  \ubaa8\ub378 \ubd80\ubd84\uc785\ub2c8\ub2e4. \ud575\uc2ec\uc740 \uc774\uc804 \ub2e8\uacc4\uc758 Hidden \uac12\uc744 \uc774\uc6a9\ud558\ub294 \uac83\uc5d0 \ucd94\uac00\ub85c Encoder\uc5d0\uc11c \uc0dd\uc131\ub41c \ubaa8\ub4e0 Output \ub370\uc774\ud130\ub97c Decoder\uc758 \uc785\ub825 \ub370\uc774\ud130\ub85c \ud65c\uc6a9\ud55c\ub2e4\ub294 \uac83\uc785\ub2c8\ub2e4. \uc778\ucf54\ub354\uc5d0\uc11c \uc140\uc774 10\uac1c\ub77c\uba74 10\uac1c\uc758 \ud788\ub4e0 \ub370\uc774\ud130\uac00 \ub098\uc628\ub2e4\ub294 \uc758\ubbf8\uc774\uace0 \uc774 \ud788\ub4e0 \uac12 \ubaa8\ub450\ub97c \uc5b4\ud150\uc158 \ubaa8\ub378\uc5d0\uc11c \ud65c\uc6a9\ud55c\ub2e4\ub294 \uac83\uc785\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uc544\ub798 \uadf8\ub9bc\uc740 \ud30c\uc774\ud1a0\uce58 \uacf5\uc2dd \ud648\ud398\uc774\uc9c0\uc5d0 \uc788\ub294 Attention Decoder\uc5d0 \ub300\ud55c Diagram\uc785\ub2c8\ub2e4. \uc774 \uadf8\ub9bc\uc5d0\uc11c\uc640 \uac19\uc774 AttentionDecoder\uc5d0 \ub4e4\uc5b4\uac00\ub294 \uc785\ub825\uc740 prev_hidden, input, encoder_outputs 3\uac00\uc9c0\uc785\ub2c8\ub2e4. <\/p>\n\n\n\n<div class=\"wp-block-image\"><figure class=\"aligncenter size-large\"><img src=\"https:\/\/tutorials.pytorch.kr\/_images\/attention-decoder-network.png\" alt=\"\"\/><figcaption>https:\/\/tutorials.pytorch.kr\/intermediate\/seq2seq_translation_tutorial.html<\/figcaption><\/figure><\/div>\n\n\n\n<p>\uc774 \ubaa8\ub378\uc740 \ubcf5\uc7a1\ud574 \ubcf4\uc774\uc9c0\ub9cc \ud06c\uac8c 3\uac00\uc9c0 \ubd80\ubd84\uc73c\ub85c \ub098\ub220\ubcfc \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uccab\ubc88\uc9f8\ub294 \uc774\uc804 \ub2e8\uacc4\uc758 \ud788\ub4e0 \uac12\uacfc \ud604\uc7ac \ub2e8\uacc4\uc758 \uc785\ub825 \uac12\uc744 \ud1b5\ud574\uc11c attention_weight\ub97c \uad6c\ud558\ub294 \ubd80\ubd84\uc785\ub2c8\ub2e4. \uc774 \ubd80\ubd84\uc774 \uac00\uc7a5 \uc911\uc694\ud569\ub2c8\ub2e4.  \ub450\ubc88\uc9f8\ub294 \uc778\ucf54\ub354\uc758 \uac01 \uc140\uc5d0\uc11c \ub098\uc628 \ucd9c\ub825\uac12\uacfc attention_wieght\ub97c \uacf1\ud574\uc90d\ub2c8\ub2e4. \uc138\ubc88\uc9f8\ub294 \uc774\ub807\uac8c \ub098\uc628 \uac12\uacfc \uc2e0\uaddc \uc785\ub825\uac12\uc744 \uacf1\ud574\uc90d\ub2c8\ub2e4. \uc774\ub54c \ub098\uc628 \uac12\uc774 \uc774\uc804 \ub2e8\uacc4\uc758 \ud788\ub4e0 \uac12\uacfc \ud568\uaed8 \uc785\ub825\ub418\uae30  GRU(RNN\uc758 \ud55c \uc885\ub958)\uc5d0 \uc785\ub825\ub418\uae30 \ub54c\ubb38\uc5d0 \ucd5c\uc885 Shape\uc740 [[[&#8230;]]] \ud615\ud0dc\uc758 \uac12\uc774 \ub429\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">class AttnDecoder(nn.Module):\n    def __init__(self, hidden_size, output_size, dropout=config.dropout, max_length=config.max_length):\n        super().__init__()\n        self.hidden_size = hidden_size\n        self.output_size = output_size\n        self.dropout = dropout\n        self.max_length = max_length\n        \n        self.embedding = nn.Embedding(self.output_size, self.hidden_size)\n        self.attn = nn.Linear(self.hidden_size*2, self.max_length)\n        self.attn_combine = nn.Linear(self.hidden_size*2, self.hidden_size)\n        self.dropout = nn.Dropout(self.dropout)\n        self.gru = nn.GRU(self.hidden_size, self.hidden_size)\n        self.out = nn.Linear(self.hidden_size, self.output_size)\n        \n    def forward(self, input, hidden, encoder_outputs):\n        embedded = self.embedding(input).view(1,1,-1)\n        embedded = self.dropout(embedded)\n        # Step1  Attention Weight \uc0dd\uc131\n        attn_weights = F.softmax(self.attn(torch.cat((embedded[0], hidden[0]), 1)), dim=1)\n       # Step2 \uc0dd\uc131\ub41c Attention Weight\uc640 \uc778\ucf54\ub354\uc5d0\uc11c \uc0dd\uc131\ud55c \ubaa8\ub4e0 Output \ub370\uc774\ud130\ub97c \ud569\uce5c \ud6c4 RNN\uc5d0 \ub9de\ub3c4\ub85d [[[...]]] \ud615\ud0dc\ub85c shape \ubcc0\uacbd\n        attn_applied = torch.bmm(attn_weights.unsqueeze(0), encoder_outputs.unsqueeze(0))\n        #Step3 \uc785\ub825\uac12\uacfc attn_applied\ub97c dim=1\ub85c \ud569\uce68\n        output = torch.cat((embedded[0], attn_applied[0]),1)\n        output = self.attn_combine(output).unsqueeze(0)\n        #Step4 output => [[[...]]] \ud615\ud0dc\uc758 \uac12\uc73c\ub85c reshape\ub41c output\uacfc \uc774\uc804\ub2e8\uacc4 \uc785\ub825\uac12\uc744 gru cell\uc5d0 \uc785\ub825\n        output = F.relu(output)\n        output, hidden = self.gru(output, hidden)\n        \n        output = F.log_softmax(self.out(output[0]), dim=1)\n        return output, hidden, attn_weights\n    \n    def initHidden(self):\n        return torch.zeros(1,1,self.hidden_size, device=device)        \n        <\/pre>\n\n\n\n<p>Colab\uc758 GPU\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud574\uc11c device \uc815\ubcf4\ub97c \uc124\uc815\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\nprint(device)<\/pre>\n\n\n\n<p>\ud559\uc2b5\uc744 \uc704\ud574 \uc778\ucf54\ub354, \ub514\ucf54\ub354\ub97c \uc815\uc758\ud574\uc90d\ub2c8\ub2e4. \ucd5c\uc801\ud654\ub97c \uc704\ud574\uc11c Adam Gradient Descent \uc54c\uace0\ub9ac\uc998\uc744 \uc0ac\uc6a9\ud569\ub2c8\ub2e4. Gradient Descent \uc54c\uace0\ub9ac\uc998\uc740 \uc5ec\ub7ec \uc885\ub958\uac00 \uc788\uc2b5\ub2c8\ub2e4. \uc774\uc5d0 \ub300\ud55c \uc815\ubcf4\ub97c \uc54c\uace0 \uc2f6\uc73c\uc2e0 \ubd84\uc744 \uc704\ud574\uc11c \uc798 \uc815\ub9ac\ub41c \ub9c1\ud06c\ub97c \ucca8\ubd80\ud558\uaca0\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p><a href=\"http:\/\/shuuki4.github.io\/deep%20learning\/2016\/05\/20\/Gradient-Descent-Algorithm-Overview.html\">http:\/\/shuuki4.github.io\/deep%20learning\/2016\/05\/20\/Gradient-Descent-Algorithm-Overview.html<\/a><\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">enc_hidden_size = config.enc_hidden_size\ndec_hidden_size = enc_hidden_size\n\nencoder = Encoder(n_vocab, enc_hidden_size).to(device)\ndecoder = AttnDecoder(dec_hidden_size,n_vocab).to(device)\n\nencoder_optimizer = optim.Adam(encoder.parameters(), lr=0.001)\ndecoder_optimizer = optim.Adam(decoder.parameters(), lr=0.001)\n\ncriterion = nn.NLLLoss()\n\nprint(encoder)\nprint(decoder)<\/pre>\n\n\n\n<p>\uc778\ucf54\ub354\uc640 \ub514\ucf54\ub354 \uc815\ubcf4\ub97c \ucd9c\ub825\ud574\ubd05\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">Encoder(\n  (embedding): Embedding(258, 10)\n  (gru): GRU(10, 10)\n)\nAttnDecoder(\n  (embedding): Embedding(258, 10)\n  (attn): Linear(in_features=20, out_features=14, bias=True)\n  (attn_combine): Linear(in_features=20, out_features=10, bias=True)\n  (dropout): Dropout(p=0.1, inplace=False)\n  (gru): GRU(10, 10)\n  (out): Linear(in_features=10, out_features=258, bias=True)\n)<\/pre>\n\n\n\n<p>\uc785\ub825 \ub370\uc774\ud130\ub294 100\uac1c\uc529 batch \ud615\ud0dc\ub85c \ud559\uc2b5\ud569\ub2c8\ub2e4. \ud559\uc2b5\uc5d0 Batch\ub97c \uc801\uc6a9\ud558\ub294 \uc774\uc720\ub294 \uc774\uc804 \ube14\ub85c\uadf8\uc5d0\uc11c \uc124\uba85\ud55c \ubc14\uac00 \uc788\uc9c0\ub9cc \ub2e4\uc2dc \uac04\ub7b5\ud788 \uc124\uba85\ud558\uaca0\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\ud559\uc2b5 \ub370\uc774\ud130 \uc804\uccb4\ub97c \ud55c\ubc88\uc5d0 \ud559\uc2b5\ud558\uc9c0 \uc54a\uace0 \uc77c\uc815 \uac2f\uc218\uc758 \ubb36\uc74c\uc73c\ub85c \uc218\ud589\ud558\ub294 \uc774\uc720\ub294 \uccab\ubc88\uc9f8\ub294 \uc801\uc740 \uc591\uc758 \uba54\ubaa8\ub9ac\ub97c \uc0ac\uc6a9\ud558\uae30 \uc704\ud568\uc774\uba70 \ub610 \ud558\ub098\ub294 \ubaa8\ub378\uc758 \ud559\uc2b5\ud6a8\uacfc\ub97c \ub192\uc774\uae30 \uc704\ud568\uc785\ub2c8\ub2e4. \uccab\ubc88\uc9f8 \uc774\uc720\ub294 \uc27d\uac8c \uc774\ud574\ud560 \uc218 \uc788\uc9c0\ub9cc \ub450\ubc88\uc9f8 \uc774\uc720\ub294 \uc774\uc640 \uac19\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\uc608\ub97c \ub4e4\uc5b4\uc11c \ud55c \ud559\uc0dd\uc774 \uc2dc\ud5d8\ubb38\uc81c\ub97c 100\uac1c\ub97c \ud480\uc5b4 \ubcf4\ub294\ub370&#8230; 100\uac1c\uc758 \ubb38\uc81c\ub97c \ud55c\ubc88\uc5d0 \ubaa8\ub450 \ud480\uace0 \ud55c\ubc88\uc5d0 \ucc44\uc810\ud558\ub294 \uac83\ubcf4\ub2e4\ub294 100\uac1c\uc758 \ubb38\uc81c\ub97c 20\uac1c\ub97c \uba3c\uc800 \ud480\uc5b4\ubcf4\uace0 \ucc44\uc810\ud558\uace0 \ud2c0\ub9b0 \ubb38\uc81c\ub97c \ud655\uc778\ud55c \ud6c4\uc5d0 20\uac1c\ub97c \ud480\uba74 \ucc98\uc74c\uc5d0 \ud2c0\ub838\ub358 \ubb38\uc81c\ub97c \ub2e4\uc2dc \ud2c0\ub9ac\uc9c0 \uc54a\uc744 \uc218 \uc788\uc744 \uac81\ub2c8\ub2e4. \uc774\ub7f0 \ubc29\ubc95\uc73c\ub85c \ub0a8\uc740 \ubb38\uc81c\ub97c \ud480\uc5b4 \ubcf8\ub2e4\uba74 \ucc98\uc74c \ubcf4\ub2e4\ub294 \ud2c0\ub9b4 \ud655\ub960\uc774 \uc904\uc5b4\ub4e0\ub2e4\uace0 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\uc640 \uac19\uc740 \uc774\uc720\ub85c \ubc30\uce58 \uc791\uc5c5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<p>\ube44\uc2b7\ud55c \uac1c\ub150\uc774\uc9c0\ub9cc Epoch\uc758 \uacbd\uc6b0\ub294 20\uac1c\uc529 100\ubb38\uc81c\ub97c \ud480\uc5b4 \ubcf8 \ud6c4\uc5d0 \ub2e4\uc2dc 100\ubb38\uc81c\ub97c \ud480\uc5b4\ubcf4\ub294 \ud69f\uc218\uc785\ub2c8\ub2e4. 100\ubb38\uc81c\ub97c 1\ubc88 \ud478\ub294 \uac83\ubcf4\ub2e4\ub294 2,3\ubc88 \ud480\uc5b4\ubcf4\uba74 \uc880 \ub354 \ud559\uc2b5 \ud6a8\uacfc\uac00 \ub192\uc544\uc9c0\uaca0\uc8e0~<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">pairs = list(zip(source_words, target_words))\ndef getBatch(pairs, batch_size):\n    pairs_length = len(pairs)\n    for ndx in range(0, pairs_length, batch_size):\n        #print(ndx, min(ndx+batch_size, pairs_length))\n        yield pairs[ndx:min(ndx+batch_size, pairs_length)]<\/pre>\n\n\n\n<p>\uc774\uc81c \ud574\ub2f9 \ud559\uc2b5\uc744 \uc704\uc5d0 \uc124\uba85\ud55c\ub300\ub85c Batch\uc640 Epoch\uc744 \uc0ac\uc6a9\ud574\uc11c \ud559\uc2b5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4. \ubcf8 \uc608\uc81c\uc5d0\uc11c\ub294 100\uac1c\uc529 \ubb36\uc74c\uc73c\ub85c 1,000\ubc88 \ud559\uc2b5\uc744 \uc218\ud589\ud569\ub2c8\ub2e4.<br>(\uc88b\uc740 \uac1c\ubc1c\ud658\uacbd\uc744 \uac00\uc9c0\uc2e0 \ubd84\uc740 \ub354 \ub9ce\uc740 \ud559\uc2b5\uc744 \ud574\ubcf4\uc2dc\uae38 \ucd94\ucc9c\ud569\ub2c8\ub2e4.)<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">epochs = config.number_of_epochs\nprint(epochs)\n\nencoder.train()\ndecoder.train()\n\nfor epoch in range(epochs):\n    total_loss = 0\n    \n    for pair in getBatch(pairs,config.batch_size):\n        batch_loss = 0\n        \n        for si, ti in pair:\n            x = torch.tensor(si, dtype=torch.long).to(device)\n            y = torch.tensor(ti, dtype=torch.long).to(device)\n            #print(x.size(), y.size())\n            encoder_hidden = encoder.initHidden()\n            encoder_outputs = torch.zeros(config.max_length, encoder.hidden_size, device=device)\n            \n            for ei in range(config.seq_size):\n                #print(x[ei].size())\n                encoder_output, encoder_hidden = encoder(x[ei], encoder_hidden)\n                encoder_outputs[ei] = encoder_output[0,0] # \ub9c8\uc9c0\ub9c9 input_length\n                \n            decoder_input = torch.tensor([0], device=device)\n            decoder_hidden = encoder_hidden\n            loss = 0\n            \n            for di in range(config.seq_size):\n                #print(y[di])\n                decoder_output, decoder_hidden, decoder_attention = decoder(decoder_input, decoder_hidden, encoder_outputs)\n                loss += criterion(decoder_output, y[di].view(1))\n                #print(decoder_output.size(), y[di].view(1).size())\n                decoder_input = y[di] # Force Teaching\n            \n            batch_loss += loss.item()\/config.seq_size\n            encoder_optimizer.zero_grad()\n            decoder_optimizer.zero_grad()\n            loss.backward()\n            encoder_optimizer.step()\n            decoder_optimizer.step()\n            \n        total_loss += batch_loss\/config.batch_size\n        #print('batch_loss {:.5f}'.format(batch_loss\/config.batch_size))\n    print('epoch {}, loss {:.10f}'.format(epoch, total_loss\/(len(pairs)\/\/config.batch_size)))\n    <\/pre>\n\n\n\n<p>\ud559\uc2b5\uc774 \uc885\ub8cc\ub418\uace0 \uc544\ub798\uc640 \uac19\uc774 2\uac1c\uc758 \ub2e8\uc5b4\ub97c \uc8fc\uace0 14\uac1c\uc758 \ub2e8\uc5b4\ub85c \uad6c\uc131\ub41c \ubb38\uc7a5\uc744 \uc0dd\uc131\ud574\ubd05\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">decode_word = []\nwords = [vocab_to_int['\ud0dc\ucd08\uc5d0'], vocab_to_int['\ub9d0\uc500\uc774']]\nx = torch.tensor(words, dtype=torch.long).view(-1,1).to(device)\n\nencoder_hidden = encoder.initHidden()\nencoder_outputs = torch.zeros(config.max_length, encoder.hidden_size, device=device)\n\nfor ei in range(x.size(0)):\n  encoder_output, encoder_hidden = encoder(x[ei], encoder_hidden)\n  encoder_outputs[ei] = encoder_output[0,0]\n\ndecoder_input = torch.tensor([0], device=device)\ndecoder_hidden = encoder_hidden\n\nfor di in range(config.seq_size):\n    decoder_output, decoder_hidden, decoder_attention = decoder(decoder_input, decoder_hidden, encoder_outputs)\n    _, ndx = decoder_output.data.topk(1)\n    decode_word.append(int_to_vocab[ndx.item()])\n\nprint(decode_word)<\/pre>\n\n\n\n<p>\ud559\uc2b5\uc774 \uc644\ub8cc\ub418\uba74 \uc544\ub798\uc640 \uac19\uc774 \ubaa8\ub378, \ud658\uacbd\uc124\uc815 \uc815\ubcf4, \ud14d\uc2a4\ud2b8 \ub370\uc774\ud130 \ub4f1\uc744 \uc800\uc7a5\ud574\uc11c \ub2e4\uc74c \uc608\uce21 \ubaa8\ub378\uc5d0\uc11c \ud65c\uc6a9\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">torch.save({\n  'encoder': encoder.state_dict(), 'decoder':decoder.state_dict(), 'config':config\n}, 'gdrive\/***\/model.john.210202')<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">import pickle\n\ndef save_obj(obj, name):\n  with open('gdrive\/***\/'+ name + '.pkl', 'wb') as f:\n    pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)\n\nsave_obj(int_text,'int_text')\nsave_obj(int_to_vocab,'int_to_vocab')\nsave_obj(vocab_to_int,'vocab_to_int')<\/pre>\n","protected":false},"excerpt":{"rendered":"<p>\ud574\ub2f9 \ubaa8\ub378\uc740 \uc774\uc804\uc5d0 \ud14c\uc2a4\ud2b8\ud588\ub358 Sequence2Sequence \ubaa8\ub378\uc5d0 Attention\uc744 \uc801\uc6a9\ud574\ubcf8 \uac83\uc785\ub2c8\ub2e4. \uc774\uc804 \ub0b4\uc6a9\uc774 \uad81\uae08\ud558\uc2e0 \ubd84\uc740 \uc544\ub798\uc758 \uac8c\uc2dc\ubb3c\uc744 \ud655\uc778\ud574\ubcf4\uc2dc\uae30 \ubc14\ub78d\ub2c8\ub2e4. \uc774\uc804 \ubaa8\ub378\uc5d0\uc11c\ub294 Sequence2Sequence\ub9cc \uc0ac\uc6a9\ud588\uace0 \uc601\uc5b4\ubb38\uc7a5\uc744 \ud65c\uc6a9\ud588\uc2b5\ub2c8\ub2e4. \uc774\ubc88\uc5d0\ub294 \uc5d0\ud134\uc158(Attention)\uc744 \uc801\uc6a9\ud558\uace0 \ud55c\uae00\ubb38\uc11c\ub97c \ud1b5\ud574\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \uc774\ubc88\uc5d0\ub3c4 \uad6c\uae00 Colab\uc758 GPU\ub97c \ud1b5\ud574\uc11c \ud14c\uc2a4\ud2b8\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \uba3c\uc800 \ud14d\uc2a4\ud2b8 \ub370\uc774\ud130\ub97c \uc900\ube44\ud574\ubcf4\uaca0\uc2b5\ub2c8\ub2e4. \ud14d\uc2a4\ud2b8 \ub370\uc774\ud130\ub294 \uc694\ud55c\ubcf5\uc74c 1-2\uc7a5\uc758 \ud55c\uae00 \ud14d\uc2a4\ud2b8\ub97c \ud65c\uc6a9\ud588\uc2b5\ub2c8\ub2e4. \ub3d9\uc77c\ud55c \ub370\uc774\ud130\ub85c \ud14c\uc2a4\ud2b8\ub97c \ud574\ubcf4\uc2dc\uae30 \uc6d0\ud558\uc2dc\uba74 \uc544\ub798\uc758 \ub9c1\ud06c\uc5d0\uc11c \ud14d\uc2a4\ud2b8 \ub370\uc774\ud130\ub97c &hellip; <\/p>\n<p class=\"link-more\"><a href=\"http:\/\/blog.cedartrees.co.kr\/index.php\/2021\/02\/02\/sequence2sequence-attention\/\" class=\"more-link\">\ub354 \ubcf4\uae30<span class=\"screen-reader-text\"> &#8220;Seq2Seq \uc5b4\ud150\uc158 \ubb38\uc7a5\uc0dd\uc131&#8221;<\/span><\/a><\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":[],"categories":[40,76,14],"tags":[97,96,83,86,61,74,72,55],"_links":{"self":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/828"}],"collection":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/comments?post=828"}],"version-history":[{"count":14,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/828\/revisions"}],"predecessor-version":[{"id":866,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/828\/revisions\/866"}],"wp:attachment":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/media?parent=828"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/categories?post=828"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/tags?post=828"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}