{"id":153,"date":"2020-06-06T23:17:34","date_gmt":"2020-06-06T14:17:34","guid":{"rendered":"http:\/\/cedartrees.co.kr\/?p=153"},"modified":"2021-04-03T19:21:25","modified_gmt":"2021-04-03T10:21:25","slug":"judegement-prediction","status":"publish","type":"post","link":"http:\/\/blog.cedartrees.co.kr\/index.php\/2020\/06\/06\/judegement-prediction\/","title":{"rendered":"\uc2ec\ud310 \ud310\uacb0 \uacb0\uacfc \uc608\uce21 \ud574\ubcf4\uae30"},"content":{"rendered":"\n<p class=\"has-medium-font-size\">Step. 1 \ub370\uc774\ud130 \uc900\ube44<\/p>\n\n\n\n<p>\uc2ec\ud310\ubb38 \ub370\uc774\ud130\ub294 2018\ub144\ub3c4 \uc2ec\ud310\ubb38 \uc911 \uacb0\uc815\uc694\uc9c0 \ub370\uc774\ud130\ub97c \ud65c\uc6a9\ud588\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 \ub370\uc774\ud130\ub294 \uc870\uc138\uc2ec\ud310\uc6d0 \ud648\ud398\uc774\uc9c0\uc5d0 \uacf5\uac1c \ub418\uc5b4 \uc788\uc5b4 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">def read_data(filename):\n    with io.open(filename, 'r',encoding='utf-8') as f:\n        data = [line for line in f.read().splitlines()]\n        data = data[1:]\n    return data \n\ntrain_data = read_data('.\/data\/2018_simpan_newgroup.csv')<\/pre>\n\n\n\n<p>\ud574\ub2f9 \ud30c\uc77c\uc740 &#8220;\ubc88\ud638|\uc2ec\ud310\uacb0\uc815\uc694\uc9c0|\uc720\ud615&#8221;\uc73c\ub85c \uad6c\ubd84\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. train_data\ub97c \uc77d\uc5b4\ubcf4\uba74 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">train_data[0:3]\n['0|\uc774 \uac74 \uc2ec\ud310\uccad\uad6c\ub294 \ucc98\ubd84\uccad\uc758 \uc9c1\uad8c\uacbd\uc815\uc73c\ub85c \uc778\ud558\uc5ec \uc2ec\ub9ac\uc77c \ud604\uc7ac \uccad\uad6c\uc758 \ub300\uc0c1\uc774 \ub418\ub294 \ucc98\ubd84\uc774 \uc874\uc7ac\ud558\uc9c0 \uc544\ub2c8\ud558\ubbc0\ub85c \ubd80\uc801\ubc95\ud55c \uccad\uad6c\ub85c \ud310\ub2e8\ub428|0',\n '1|\ucc98\ubd84\uccad\uc758 2016\ub144 \uc81c2\uae30 \ubd80\uac00\uac00\uce58\uc138 \uacbd\uc815\uacb0\uc815 \ud6c4 \uccad\uad6c\uc778\uc774 \uc2ec\ud310\uccad\uad6c\ub97c \uc81c\uae30\ud558\uc5ec 2017.10.24. \uc774\ubbf8 \uae30\uac01\uacb0\uc815\uc744 \ubc1b\uc558\uc73c\ubbc0\ub85c \uc774 \uac74 \uc2ec\ud310\uccad\uad6c\ub294 \ub3d9\uc77c\ud55c \ucc98\ubd84\uc5d0 \ub300\ud558\uc5ec \uc911\ubcf5\ud558\uc5ec \uc81c\uae30\ub41c \uc810, \uccad\uad6c\uc778\uc740 \ub2f9\ucd08 \uc2ec\ud310\uccad\uad6c\uc640 \ub3d9\uc77c\ud55c \ub0b4\uc6a9\uc758 \uacbd\uc815\uccad\uad6c\ub97c \ud558\uc600\uace0, \uadf8\uc5d0 \ub300\ud55c \ucc98\ubd84\uccad\uc758 \uac70\ubd80\ud1b5\uc9c0\ub294 \ubbfc\uc6d0\ud68c\uc2e0\uc5d0 \ubd88\uacfc\ud55c \uac83\uc774\uc5b4\uc11c \uc2ec\ud310\uccad\uad6c\uc758 \ub300\uc0c1\uc774 \ub418\ub294 \ucc98\ubd84\uc73c\ub85c \ubcfc \uc218 \uc5c6\ub294 \uc810 \ub4f1\uc5d0 \ube44\ucd94\uc5b4 \uc774 \uac74 \uc2ec\ud310\uccad\uad6c\ub294 \ubd80\uc801\ubc95\ud55c \uccad\uad6c\ub85c \ud310\ub2e8\ub428|0',\n '2|\ucc98\ubd84\uccad\uc774 \uccad\uad6c\uc8fc\uc7a5\uc744 \ubc1b\uc544\ub4e4\uc5ec \uc774 \uac74 \uacfc\uc138\ucc98\ubd84\uc744 \uc9c1\uad8c\uc73c\ub85c \uac10\uc561\uacbd\uc815\ud558\uc600\uc73c\ubbc0\ub85c \uc774 \uac74 \uc2ec\ud310\uccad\uad6c\ub294 \uc2ec\ub9ac\uc77c \ud604\uc7ac \ubd88\ubcf5 \ub300\uc0c1\uc774 \ub418\ub294 \ucc98\ubd84\uc774 \uc874\uc7ac\ud558\uc9c0 \uc544\ub2c8\ud558\uc5ec \ubd80\uc801\ubc95\ud55c \uccad\uad6c\uc5d0 \ud574\ub2f9\ud558\ub294 \uac83\uc73c\ub85c \ud310\ub2e8\ub428|0']<\/pre>\n\n\n\n<p>\ub2e4\uc74c\uc73c\ub85c \ubc1b\uc740 \ub370\uc774\ud130\ub97c konlpy.Okt()\ub97c \uc0ac\uc6a9\ud558\uc5ec \ud615\ud0dc\uc18c\ub97c \ubd84\ub9ac\ud569\ub2c8\ub2e4. \ubd84\ub9ac\ud55c \ub370\uc774\ud130\ub97c \ud1a0\ud070\ud654 \ud558\uc5ec \ub370\uc774\ud130 \uc14b\uc744 \ub9cc\ub4ed\ub2c8\ub2e4. \ub370\uc774\ud130 \uc14b\uc744 \ub9cc\ub4dc\ub294 \uacfc\uc815\uc5d0\uc11c \uc2ec\ud310\ubb38\uc758 \uac1c\uc778\uc815\ubcf4\ub97c \uc775\uba85\ucc98\ub9ac\ud558\uae30 \uc704\ud574\uc11c \uc0ac\uc6a9\ud588\ub358 \ud2b9\uc218\uae30\ud638\ub4e4\uc744 \uc81c\uac70\ud569\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">train_tokens[0:3]\narray([[list(['\uc774', '\uac74', '\uc2ec\ud310', '\uccad\uad6c', '\ub294', '\ucc98\ubd84', '\uccad', '\uc758', '\uc9c1\uad8c', '\uacbd\uc815', '\uc73c\ub85c', '\uc778\ud558\ub2e4', '\uc2ec\ub9ac', '\uc77c', '\ud604\uc7ac', '\uccad\uad6c', '\uc758', '\ub300\uc0c1', '\uc774', '\ub418\ub2e4', '\ucc98\ubd84', '\uc774', '\uc874\uc7ac', '\ud558\ub2e4', '\uc544\ub2c8\ub2e4', '\ubd80', '\uc801\ubc95\ud558\ub2e4', '\uccad\uad6c', '\ub85c', '\ud310\ub2e8', '\ub418\ub2e4']),\n        0],\n       [list(['\ucc98\ubd84', '\uccad', '\uc758', '2016\ub144', '\uc81c', '2', '\uae30', '\ubd80\uac00\uac00\uce58\uc138', '\uacbd\uc815', '\uacb0\uc815', '\ud6c4', '\uccad\uad6c\uc778', '\uc774', '\uc2ec\ud310', '\uccad\uad6c', '\ub97c', '\uc81c\uae30', '\ud558\ub2e4', '2017', '10', '24', '\uc774\ubbf8', '\uae30', '\uac01', '\uacb0\uc815', '\uc744', '\ubc1b\ub2e4', '\uc774', '\uac74', '\uc2ec\ud310', '\uccad\uad6c', '\ub294', '\ub3d9\uc77c\ud558\ub2e4', '\ucc98\ubd84', '\uc5d0', '\ub300\ud558', '\uc5ec', '\uc911\ubcf5', '\ud558\ub2e4', '\uc81c\uae30', '\ub418\ub2e4', '\uc810', ',', '\uccad\uad6c\uc778', '\uc740', '\ub2f9\ucd08', '\uc2ec\ud310', '\uccad\uad6c', '\uc640', '\ub3d9\uc77c\ud558\ub2e4', '\ub0b4\uc6a9', '\uc758', '\uacbd\uc815', '\uccad\uad6c', '\ub97c', '\ud558\ub2e4', ',', '\uadf8', '\uc5d0', '\ub300\ud55c', '\ucc98\ubd84', '\uccad', '\uc758', '\uac70\ubd80', '\ud1b5\uc9c0', '\ub294', '\ubbfc\uc6d0', '\ud68c\uc2e0', '\uc5d0', '\ubd88\uacfc\ud558\ub2e4', '\uac83', '\uc774\uc5b4\uc11c', '\uc2ec\ud310', '\uccad\uad6c', '\uc758', '\ub300\uc0c1', '\uc774', '\ub418\ub2e4', '\ucc98\ubd84', '\uc73c\ub85c', '\ubcfc', '\uc218', '\uc5c6\ub2e4', '\uc810', '\ub4f1', '\uc5d0', '\ube44\ucd94\ub2e4', '\uc774', '\uac74', '\uc2ec\ud310', '\uccad\uad6c', '\ub294', '\ubd80', '\uc801\ubc95\ud558\ub2e4', '\uccad\uad6c', '\ub85c', '\ud310\ub2e8', '\ub418\ub2e4']),\n        0],\n       [list(['\ucc98\ubd84', '\uccad', '\uc774', '\uccad\uad6c', '\uc8fc\uc7a5', '\uc744', '\ubc1b\uc544\ub4e4\uc774\ub2e4', '\uc774', '\uac74', '\uacfc\uc138', '\ucc98\ubd84', '\uc744', '\uc9c1\uad8c', '\uc73c\ub85c', '\uac10\uc561', '\uacbd\uc815', '\ud558\ub2e4', '\uc774', '\uac74', '\uc2ec\ud310', '\uccad\uad6c', '\ub294', '\uc2ec\ub9ac', '\uc77c', '\ud604\uc7ac', '\ubd88\ubcf5', '\ub300\uc0c1', '\uc774', '\ub418\ub2e4', '\ucc98\ubd84', '\uc774', '\uc874\uc7ac', '\ud558\ub2e4', '\uc544\ub2c8\ub2e4', '\ubd80', '\uc801\ubc95\ud558\ub2e4', '\uccad\uad6c', '\uc5d0', '\ud574\ub2f9', '\ud558\ub2e4', '\uac83', '\uc73c\ub85c', '\ud310\ub2e8', '\ub418\ub2e4']),\n        0]], dtype=object)<\/pre>\n\n\n\n<p>\ud574\ub2f9 \uc791\uc5c5\uc744 \uac70\uce58\uba74 \uc704\uc640 \uac19\uc740 \ud615\ud0dc\ub85c \ubcc0\uacbd\ub429\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># supervised learning\uc744 \uc704\ud55c text, label \uc0dd\uc131\ntrain_X = train_tokens[:,0]\ntrain_Y = train_tokens[:,1]<\/pre>\n\n\n\n<p>\ud310\uacb0\uc740 \uac01\ud558, \uae30\uac01, \ucde8\uc18c,\uacbd\uc815,\uc7ac\uc870\uc0ac\ub85c \ub098\ub20c \uc218 \uc788\uc2b5\ub2c8\ub2e4. \uc774\uc911 \ucde8\uc18c, \uacbd\uc815, \uc7ac\uc870\uc0ac\ub294 \uc778\uc6a9\uc73c\ub85c \ub2e4\uc2dc \ubd84\ub958\ud560 \uc218 \uc788\uc5b4 \ucd5c\uc885 \ub370\uc774\ud130\ub294 \uac01\ud558(0), \uae30\uac01(1), \uc778\uc6a9(2)\uc758 \ud615\ud0dc\ub85c label\uc744 \ub9cc\ub4e4 \uc218 \uc788\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">train_Y[0:10]\narray([0, 0, 0, 1, 1, 0, 1, 1, 0, 1], dtype=object)<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">W2V = Word2Vec.Word2Vec()\ntrain_Y_ = W2V.One_hot(train_Y)  ## Convert to One-hot\ntrain_X_ = W2V.Convert2Vec(\".\/model\/FastText.model\",train_X)<\/pre>\n\n\n\n<p>train_x \ub370\uc774\ud130\ub294 word-embedding \ud615\ud0dc\ub85c \ub9cc\ub4e4\uc5b4\uc90d\ub2c8\ub2e4.  \uc774\ub294 one-hot-encoding \ud615\ud0dc\uc758 \ub370\uc774\ud130\ubcf4\ub2e4 word- embedding\uc774 \ud559\uc2b5\uc5d0 \ub354 \uc720\ub9ac\ud558\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4.<br>\uc790\uc138\ud55c \uc124\uba85\ub294  word-embedding \uc790\ub8cc\ub97c \ubcf4\uc2dc\uae38 \uad8c\ud574\ub4dc\ub9bd\ub2c8\ub2e4.<\/p>\n\n\n\n<p>W2V.Convert2Vec\uc740 train_x\uc758 \uac12\uc744 \uc0ac\uc804\uc5d0 \ud6c8\ub828\ud55c FastText \ubaa8\ub378\uc758 \ubca1\ud130\ub85c \ubcc0\ud658\ud574\uc8fc\ub294 \ud568\uc218\uc785\ub2c8\ub2e4. FastText\ub294 \ubcf8 \ube14\ub85c\uadf8\uc5d0 \uac04\ub2e8\ud788 \uae30\uc220\ub178\ud2b8\uc5d0 \uac04\ub2e8\ud55c Test Code\ub97c \uc62c\ub824\ub1a8\uc2b5\ub2c8\ub2e4. <\/p>\n\n\n\n<p class=\"has-medium-font-size\">Step. 2 \ubaa8\ub378 \uc900\ube44<\/p>\n\n\n\n<p>\ub2e4\uc74c\uacfc \uac19\uc740 \ubaa8\ub378\uc744 \uc900\ube44\ud569\ub2c8\ub2e4.<br>\uc608\uce21\uc744 \uc704\ud574 BiLSTM \ubaa8\ub378\uc744 \uc0ac\uc6a9\ud588\uc2b5\ub2c8\ub2e4. BiLSTM\uc740 RNN \ubaa8\ub378\uc758 \ud558\ub098\ub85c \ub192\uc740 \uc131\ub2a5\uc744 \ubc1c\ud718\ud558\ub294 \ubaa8\ub378\ub85c \uc54c\ub824\uc838\uc788\uc2b5\ub2c8\ub2e4.<br><br>RNN \ubaa8\ub378\uc758 \ud2b9\uc131\uc0c1 [batch_size, sequence_length, output_size]\uc758 \ud615\ud0dc\uc758 \uc785\ub825\uc774 \ud544\uc694\ud569\ub2c8\ub2e4. batch_size\ub294 \ubb38\uc7a5\uc758 \ud06c\uae30, sequence_length\ub294 \ubb38\uc7a5\uc758 \uae38\uc774, output_size\ub294 \ubb38\uc7a5\uc758 vector size\uc785\ub2c8\ub2e4. <\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">Batch_size = 32\nTotal_size = len(train_X)\nVector_size = 300\ntrain_seq_length = [len(x) for x in train_X] # flexible input lenght\nMaxseq_length = max(train_seq_length) ## 95\nlearning_rate = 0.001\nlstm_units = 128\nnum_class = 3\ntraining_epochs = 100<\/pre>\n\n\n\n<p>\uc544\ub798\uc640 \uac19\uc774 \ubaa8\ub378\uc744 \uc120\uc5b8\ud569\ub2c8\ub2e4. BiLSTM\uc740 LSTM \ubaa8\ub378\uc744 \ub450\uac1c\ub97c \uc0ac\uc6a9\ud558\uc5ec forward, backward \ubc29\ud5a5\uc73c\ub85c \ud559\uc2b5\ud568\uc73c\ub85c \uad6c\ud604\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<p>keras\ub098 pytorch\ub4f1\uc744 \uc0ac\uc6a9\ud558\uba74 \ub354 \uac04\ub2e8\ud788 \uad6c\ud604\ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\"># ?, cell count, input dimension(one-hot)\nX = tf.placeholder(tf.float32, shape = [None, Maxseq_length, Vector_size], name = 'X')\n\n# ?, output(true,false)\nY = tf.placeholder(tf.float32, shape = [None, num_class], name = 'Y')\n\nseq_len = tf.placeholder(tf.int32, shape = [None])\nkeep_prob = tf.placeholder(tf.float32, shape = None)\n\nwith tf.compat.v1.variable_scope('forward', reuse = tf.compat.v1.AUTO_REUSE):\n    # hidden_size : 128\n    lstm_fw_cell = tf.nn.rnn_cell.LSTMCell(lstm_units, forget_bias=1.0, state_is_tuple=True)\n    lstm_fw_cell = tf.contrib.rnn.DropoutWrapper(lstm_fw_cell, output_keep_prob = keep_prob)\n\nwith tf.compat.v1.variable_scope('backward', reuse = tf.compat.v1.AUTO_REUSE):\n    lstm_bw_cell = tf.nn.rnn_cell.LSTMCell(lstm_units, forget_bias=1.0, state_is_tuple=True)\n    lstm_bw_cell = tf.contrib.rnn.DropoutWrapper(lstm_bw_cell, output_keep_prob = keep_prob)\n\nwith tf.compat.v1.variable_scope('Weights', reuse = tf.compat.v1.AUTO_REUSE):\n    W = tf.get_variable(name=\"W\", shape=[2 * lstm_units, num_class], dtype=tf.float32, initializer = tf.contrib.layers.xavier_initializer())\n    b = tf.get_variable(name=\"b\", shape=[num_class], dtype=tf.float32, initializer=tf.zeros_initializer())\n<\/pre>\n\n\n\n<p>\ub450 \ubaa8\ub378\uc744 \ud569\ud558\uc5ec \ud558\ub098\uc758 \ud559\uc2b5 \ubaa8\ub378\uc744 \uc644\uc131\ud569\ub2c8\ub2e4. <\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">with tf.variable_scope(\"loss\", reuse = tf.AUTO_REUSE):\n    (output_fw, output_bw), states = tf.nn.bidirectional_dynamic_rnn(lstm_fw_cell, lstm_bw_cell, dtype=tf.float32, inputs = X, sequence_length = seq_len)\n    ## concat fw, bw final states\n    outputs = tf.concat([states[0][1], states[1][1]], axis=1) #bi-lstm fully connected layer\n    logits = tf.matmul(outputs, W) + b # hypothesis\n    \n    with tf.compat.v1.variable_scope(\"loss\"):\n        loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits = logits , labels = Y)) # Softmax loss\n        optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(loss) # Adam Optimizer<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">prediction = tf.nn.softmax(logits)\ncorrect_pred = tf.equal(tf.argmax(prediction, 1), tf.argmax(Y, 1))\naccuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n\ntotal_batch = int(len(train_X) \/ Batch_size)\n\ntrain_acc = []\ntrain_loss = []\nhistory_loss = []\n\nprint(\"Start training!\")<\/pre>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">with tf.Session(config = config) as sess:\n    start_time = time.time()\n    sess.run(tf.global_variables_initializer())\n    \n    for epoch in range(training_epochs):\n\n        avg_acc, avg_loss = 0. , 0.\n        mask = np.random.permutation(len(train_X_)) #shuffle all row\n        train_X_ = train_X_[mask]\n        train_Y_ = train_Y_[mask]\n        \n        for step in range(total_batch):\n            train_batch_X = train_X_[step*Batch_size : step*Batch_size+Batch_size] # 32 batch size\n            train_batch_Y = train_Y_[step*Batch_size : step*Batch_size+Batch_size]\n            batch_seq_length = train_seq_length[step*Batch_size : step*Batch_size+Batch_size]\n            \n            train_batch_X = W2V.Zero_padding(train_batch_X, Batch_size, Maxseq_length, Vector_size) # 32, 255, 300 -&amp;gt; fill zero with empty rows words. max row words 255\n            \n            sess.run(optimizer, feed_dict={X: train_batch_X, Y: train_batch_Y, seq_len: batch_seq_length})\n            \n            # Compute average loss\n            loss_ = sess.run(loss, feed_dict={X: train_batch_X, Y: train_batch_Y, seq_len: batch_seq_length, keep_prob : 0.75})\n            avg_loss += loss_ \/ total_batch\n            \n            acc_ = sess.run(accuracy , feed_dict={X: train_batch_X, Y: train_batch_Y, seq_len: batch_seq_length, keep_prob : 0.75})\n            avg_acc += acc_ \/ total_batch\n            \n            history_loss.append(loss_)\n            print(\"epoch :{}-{}  {:02d} step : {:04d} loss = {:.6f} accuracy= {:.6f}\".format(step*Batch_size, step*Batch_size+Batch_size, epoch+1, step+1, loss_, acc_))\n   \n        print(\"&amp;lt;Train&amp;gt; Loss = {:.6f} Accuracy = {:.6f}\".format(avg_loss, avg_acc))\n      \n        train_loss.append(avg_loss)\n        train_acc.append(avg_acc)\n\n\n    save_path = saver.save(sess, modelName)\n    \n    print ('save_path',save_path)<\/pre>\n\n\n\n<p>\ud559\uc2b5\uc774 \uc885\ub8cc\uac00 \ub418\uace0 \uc544\ub798\uc640 \uac19\uc740 \ucf54\ub4dc\ub97c \uc785\ub825\ud558\uc5ec \ud559\uc2b5\uc774 \uc798 \ub410\ub294\uc9c0 \ud655\uc778\ud574\ubd05\ub2c8\ub2e4.<\/p>\n\n\n\n<pre class=\"EnlighterJSRAW\" data-enlighter-language=\"generic\" data-enlighter-theme=\"\" data-enlighter-highlight=\"\" data-enlighter-linenumbers=\"\" data-enlighter-lineoffset=\"\" data-enlighter-title=\"\" data-enlighter-group=\"\">import matplotlib.pyplot as plt\nplt.figure(figsize=(10,5))\nplt.plot(history_loss)<\/pre>\n","protected":false},"excerpt":{"rendered":"<p>Step. 1 \ub370\uc774\ud130 \uc900\ube44 \uc2ec\ud310\ubb38 \ub370\uc774\ud130\ub294 2018\ub144\ub3c4 \uc2ec\ud310\ubb38 \uc911 \uacb0\uc815\uc694\uc9c0 \ub370\uc774\ud130\ub97c \ud65c\uc6a9\ud588\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 \ub370\uc774\ud130\ub294 \uc870\uc138\uc2ec\ud310\uc6d0 \ud648\ud398\uc774\uc9c0\uc5d0 \uacf5\uac1c \ub418\uc5b4 \uc788\uc5b4 \ub2e4\uc6b4\ub85c\ub4dc \ubc1b\uc744 \uc218 \uc788\uc2b5\ub2c8\ub2e4. \ud574\ub2f9 \ud30c\uc77c\uc740 &#8220;\ubc88\ud638|\uc2ec\ud310\uacb0\uc815\uc694\uc9c0|\uc720\ud615&#8221;\uc73c\ub85c \uad6c\ubd84\ub418\uc5b4 \uc788\uc2b5\ub2c8\ub2e4. train_data\ub97c \uc77d\uc5b4\ubcf4\uba74 \uc544\ub798\uc640 \uac19\uc2b5\ub2c8\ub2e4. \ub2e4\uc74c\uc73c\ub85c \ubc1b\uc740 \ub370\uc774\ud130\ub97c konlpy.Okt()\ub97c \uc0ac\uc6a9\ud558\uc5ec \ud615\ud0dc\uc18c\ub97c \ubd84\ub9ac\ud569\ub2c8\ub2e4. \ubd84\ub9ac\ud55c \ub370\uc774\ud130\ub97c \ud1a0\ud070\ud654 \ud558\uc5ec \ub370\uc774\ud130 \uc14b\uc744 \ub9cc\ub4ed\ub2c8\ub2e4. \ub370\uc774\ud130 \uc14b\uc744 \ub9cc\ub4dc\ub294 \uacfc\uc815\uc5d0\uc11c \uc2ec\ud310\ubb38\uc758 \uac1c\uc778\uc815\ubcf4\ub97c \uc775\uba85\ucc98\ub9ac\ud558\uae30 \uc704\ud574\uc11c \uc0ac\uc6a9\ud588\ub358 &hellip; <\/p>\n<p class=\"link-more\"><a href=\"http:\/\/blog.cedartrees.co.kr\/index.php\/2020\/06\/06\/judegement-prediction\/\" class=\"more-link\">\ub354 \ubcf4\uae30<span class=\"screen-reader-text\"> &#8220;\uc2ec\ud310 \ud310\uacb0 \uacb0\uacfc \uc608\uce21 \ud574\ubcf4\uae30&#8221;<\/span><\/a><\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":[],"categories":[16],"tags":[61,56,130],"_links":{"self":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/153"}],"collection":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/comments?post=153"}],"version-history":[{"count":3,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/153\/revisions"}],"predecessor-version":[{"id":940,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/posts\/153\/revisions\/940"}],"wp:attachment":[{"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/media?parent=153"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/categories?post=153"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/blog.cedartrees.co.kr\/index.php\/wp-json\/wp\/v2\/tags?post=153"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}