Skip to content

Commit

Permalink
remove lr schedulers
Browse files Browse the repository at this point in the history
  • Loading branch information
hyunwoongko committed Dec 18, 2019
1 parent ef96e13 commit fae3e2f
Show file tree
Hide file tree
Showing 8 changed files with 52 additions and 19 deletions.
32 changes: 17 additions & 15 deletions .idea/workspace.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Binary file modified __pycache__/conf.cpython-36.pyc
Binary file not shown.
Binary file modified __pycache__/train.cpython-36.pyc
Binary file not shown.
4 changes: 1 addition & 3 deletions conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,8 @@
drop_prob = 0.1

# optimizer parameter setting
warmup = 5
factor = 0.8
init_lr = 1e-4
weight_decay = 5e-4
epoch = 100
epoch = 300
clip = 1
inf = float('inf')
32 changes: 32 additions & 0 deletions graph.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
"""
@author : Hyunwoong
@when : 2019-12-18
@homepage : https://github.com/gusdnd852
"""

import matplotlib.pyplot as plt
import re


def read(name):
f = open(name, 'r')
file = f.read()
file = re.sub('\\[', '', file)
file = re.sub('\\]', '', file)
f.close()

return [float(i) * 100.0 for i in file.split(',')]


train = read('./result/train.txt')
test = read('./result/test.txt')

plt.plot(train, 'r', label='train')
plt.plot(test, 'b', label='validation')

plt.xlabel('epoch')
plt.ylabel('loss')
plt.title('training result')
plt.grid(True, which='both', axis='both')
plt.legend(loc='lower right')
plt.show()
1 change: 1 addition & 0 deletions result/test.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[5.107369244098663]
1 change: 1 addition & 0 deletions result/train.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[5.809054460819597]
1 change: 0 additions & 1 deletion train.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ def initialize_weights(m):
print(f'The model has {count_parameters(model):,} trainable parameters')
model.apply(initialize_weights)
optimizer = Adam(model.parameters(), lr=init_lr, weight_decay=weight_decay)
optimizer = LRScheduler(d_model=d_model, factor=factor, warmup=warmup, optimizer=optimizer)
criterion = nn.CrossEntropyLoss(ignore_index=src_pad_idx)


Expand Down

0 comments on commit fae3e2f

Please sign in to comment.