-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLearner.py
68 lines (54 loc) · 2.72 KB
/
Learner.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import tensorflow as tf
from tensorflow import keras
def Decider():
#trainingDataInputs = list
#trainingLabelsOutputs = list
#testDataInputs = list
#testLabelsOutputs = list
inputs = keras.Input(shape=(5,)) # Returns a placeholder tensor
# A layer instance is callable on a tensor, and returns a tensor.
x = keras.layers.Dense(64, activation='sigmoid')(inputs)
y = keras.layers.Dense(64, activation='relu')(x)
z = keras.layers.Dense(64, activation='relu')(y)
a = keras.layers.Dense(64, activation='relu')(z)
predictions = keras.layers.Dense(1, activation='softmax')(a)
# Instantiate the model given inputs and outputs.
model = keras.Model(inputs=inputs, outputs=predictions)
# The compile step specifies the training configuration.
model.compile(optimizer=tf.train.RMSPropOptimizer(0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
# Trains for 5 epochs
model.fit(trainingDataInputs, trainingLabelsOutputs, batch_size=5, epochs=500, validation_data=(testDataInputs, testLabelsOutputs))
model.save('./FIND-model.h5')
#given ratings from the classifier, use Machine Learning Magic to determine if fake news
# model = keras.models.Sequential()
# # Adds a densely-connected layer with 64 units to the model:
# #model.add(keras.layers.Dense(64, kernel_regularizer=keras.regularizers.l1(0.01))
# # Add another:
# model.add(keras.layers.Dense(64, input_shape=(5,), activation='sigmoid'))
# model.add(keras.layers.Dense(64, activation='sigmoid'))
# model.add(keras.layers.Dense(64, activation='sigmoid'))
# model.add(keras.layers.Dense(64, activation='relu'))
# model.add(keras.layers.Dense(64, activation='relu'))
# # Add a softmax layer with 1 output unit:
# model.add(keras.layers.Dense(1, activation='softmax'))
# model.compile(optimizer=tf.train.AdamOptimizer(0.001)), loss='categorical_crossentropy', metrics=['accuracy'])
# data = np.random.random((1000, 32))
# labels = np.random.random((1000, 1))
# val_data = np.random.random((100, 32))
# val_labels = np.random.random((100, 1))
# callbacks = [
# # Interrupt training if `val_loss` stops improving for over 2 epochs
# keras.callbacks.EarlyStopping(patience=5, monitor='val_loss'),
# # Write TensorBoard logs to `./logs` directory
# keras.callbacks.TensorBoard(log_dir='./logs')
# ]
# model.fit(data, labels, epochs=10, callbacks=callbacks, batch_size=32, validation_data=(val_data, val_labels))
# model.save('./FIND-model.h5')
# model = keras.models.load_model('FIND-model.h5')
# ^^^ to load a saved model
return 0
print("We're running")
print(Decider())
print("done")