-
Notifications
You must be signed in to change notification settings - Fork 51
/
Copy pathenigma.py
executable file
·63 lines (53 loc) · 2.15 KB
/
enigma.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
# Learning the Enigma with Recurrent Neural Networks
# Sam Greydanus. January 2017. MIT License.
import copy
from crypto_enigma import *
import numpy as np
# enigma data class
class Enigma():
def __init__(self, A, tsteps, key_len):
self.A = A
self.tsteps = tsteps
self.key_len = key_len
self.wordlen = tsteps - key_len
with open('collisions.txt', 'w') as f:
f.write('Start logging collisions:\n')
def change_tsteps(self, tsteps):
self.wordlen = tsteps - self.key_len
def encode(self, key, text):
key = key.decode('unicode-escape')
enigma = EnigmaConfig.config_enigma(rotor_names=u"A-I-II-III", window_letters=key, \
plugs=u"", rings=u"02.14.08")
return enigma.enigma_encoding(text)
def rands(self, size):
ix = np.random.randint(len(self.A),size=size)
return ''.join([self.A[i] for i in ix])
def one_hot(self, s):
_A = copy.deepcopy(self.A) + '-'
ix = [_A.find(l) for l in s]
z = np.zeros((len(s),len(_A)))
z[range(len(s)),ix] = 1
return z
def next_batch(self, batch_size, verbose=False):
batch_X = [] ; batch_y = [] ; batch_Xs = [] ; batch_ks = [] ; batch_ys = []
for _ in range(batch_size):
ys = self.rands(self.wordlen).decode('unicode-escape')
ks = self.rands(3)
# lets us check for overfitting later
while ks == 'KEY':
ks_ = ks
ks = self.rands(3)
with open('collisions.txt', 'a') as f:
f.write('\twarning! key was "{}" but now is "{}"\n'.format(ks_, ks))
Xs = self.encode(ks, ys)
ks += '-'*(self.key_len - len(ks))
if verbose: print( Xs, ks, ys )
X = self.one_hot(ks + Xs)
y = self.one_hot(ks + ys)
batch_X.append(X)
batch_y.append(y)
batch_Xs.append(Xs) ; batch_ys.append(ys) ; batch_ks.append(ks)
if not verbose:
return (batch_X,batch_y)
else:
return (batch_X, batch_y, batch_Xs, batch_ks, batch_ys)