# Untitled

Aug 30th, 2021
19
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
1. #Workaround for tensors:
2.
3. import numpy as np
4. import tensorflow as tf
5. aiOutPossible = np.array([[.2,.2,.1], [.35, .2, .1,]])
6. aiOutPossible = tf.convert_to_tensor(aiOutPossible)
7.
8. aiOutPossible = aiOutPossible / aiOutPossible.sum()
9. >>> AttributeError: 'tensorflow.python.framework.ops.EagerTensor' object has no attribute 'sum'
10.
11. c = tf.reduce_sum(aiOutPossible, axis=1)
12. aiOutPossibleNorm = aiOutPossible / tf.reshape(c, (-1, 1))
13. print(aiOutPossibleNorm)
14. >>> <tf.Tensor: shape=(2, 3), dtype=float64, numpy=
15.     array([[0.4       , 0.4       , 0.2       ],
16.            [0.53846154, 0.30769231, 0.15384615]])>
17.
18.
19. #Assuming this is correct, here is the new loss function:
20. def customLoss(dataOut,aiOut):
21.     actualOut     = dataOut[:, 0:4096]
22.     possibleMoves = dataOut[:, 4096:8192]
23.
24.     aiOutPossible = possibleMoves*aiOut     #This is the ai output, only including possible moves
25.     c = tf.reduce_sum(aiOutPossible, axis=1)
26.     aiOutPossibleNorm = aiOutPossible / tf.reshape(c, (-1, 1))
27.
28.     #loss = tf.keras.backend.binary_crossentropy(actualOut, aiOutPossibleNorm)
29.     loss = tf.keras.backend.categorical_crossentropy(actualOut, aiOutPossibleNorm)
30.
31.     return loss
32.
33. #And here is the output:
34. 225/225 - 28s - loss: 3.1917 - accuracy: 7.2618e-04 - val_loss: 2.9393 - val_accuracy: 0.0013
35. 222/222 - 22s - loss: 2.8795 - accuracy: 0.0018 - val_loss: 2.8688 - val_accuracy: 0.0033
36. 201/201 - 20s - loss: 2.8157 - accuracy: 0.0037 - val_loss: 2.8088 - val_accuracy: 0.0047
37. 221/221 - 21s - loss: 2.7629 - accuracy: 0.0047 - val_loss: 2.7512 - val_accuracy: 0.0082
38. 222/222 - 24s - loss: 2.7161 - accuracy: 0.0088 - val_loss: 2.7203 - val_accuracy: 0.0124
39. 221/221 - 23s - loss: 2.6670 - accuracy: 0.0119 - val_loss: 2.6841 - val_accuracy: 0.0127
40. 214/214 - 21s - loss: 2.6615 - accuracy: 0.0130 - val_loss: 2.6551 - val_accuracy: 0.0135
41. 193/193 - 19s - loss: 2.6199 - accuracy: 0.0123 - val_loss: 2.6307 - val_accuracy: 0.0134
42. 217/217 - 21s - loss: 2.5873 - accuracy: 0.0142 - val_loss: 2.6186 - val_accuracy: 0.0151
43. 227/227 - 23s - loss: 2.5883 - accuracy: 0.0145 - val_loss: 2.5926 - val_accuracy: 0.0137
44. 202/202 - 21s - loss: 2.5732 - accuracy: 0.0136 - val_loss: 2.5800 - val_accuracy: 0.0159
45. 217/217 - 21s - loss: 2.5440 - accuracy: 0.0165 - val_loss: 2.5638 - val_accuracy: 0.0165
46. 224/224 - 21s - loss: 2.5426 - accuracy: 0.0161 - val_loss: 2.5498 - val_accuracy: 0.0170
47. 221/221 - 28s - loss: 2.5150 - accuracy: 0.0164 - val_loss: 2.5286 - val_accuracy: 0.0166
48. 211/211 - 26s - loss: 2.4972 - accuracy: 0.0163 - val_loss: 2.5367 - val_accuracy: 0.0169
49. 221/221 - 24s - loss: 2.5312 - accuracy: 0.0153 - val_loss: 2.5261 - val_accuracy: 0.0170
50. 218/218 - 22s - loss: 2.5103 - accuracy: 0.0161 - val_loss: 2.5146 - val_accuracy: 0.0163
51.
52. #Unfortunately, it seems the output is still incredibly low, and very far from the 70% target
53.
54.
55.
56. #-Ruler
RAW Paste Data