Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- from keras.models import Sequential
- from keras.layers.core import Dense, Dropout, Activation
- from keras.optimizers import SGD, Adam, RMSprop
- from keras.utils import np_utils
- from time import time
- from pickle import dump
- trainfile = "train.csv"
- t = time()
- data = np.loadtxt(open(trainfile,'r'),delimiter=',',skiprows=1) #http://softwarerecs.stackexchange.com/questions/7463/fastest-python-library-to-read-a-csv-file
- y,X = data[:,0:1],data[:,1:]/255
- print("Loading :",time()-t," sec")
- #check shape
- print(X,y)
- outname = "modell"
- dropout_rate = 0.4
- batch_size = 128
- nb_classes = 10
- nb_epoch = 20
- #split train|valid
- #if (False):
- # split_len = 2*len(X)//3
- # X_train,X_test = X[:split_len],X[split_len:]
- # y_train,y_test = y[:split_len].astype('int32'),y[split_len:].astype('int32')
- # y_train,y_test = np_utils.to_categorical(y_train,nb_classes),np_utils.to_categorical(y_test,nb_classes)
- # print(y_train,y_test)
- #else:
- X_train = X
- y_train = np_utils.to_categorical(y.astype('int32'),nb_classes)
- #build nn
- model = Sequential()
- model.add(Dense(512, input_shape=(784,)))
- model.add(Activation('relu'))
- model.add(Dropout(dropout_rate))
- model.add(Dense(512))
- model.add(Activation('relu'))
- model.add(Dropout(dropout_rate))
- model.add(Dense(10))
- model.add(Activation('softmax'))
- model.compile(loss='categorical_crossentropy',
- optimizer=RMSprop(),
- metrics=['accuracy'])
- #model.summary()
- history = model.fit(X_train, y_train,
- batch_size=batch_size, nb_epoch=nb_epoch,
- verbose=1)
- with open(outname+".json", "w") as json_file:
- json_file.write(model.to_json())
- model.save_weights(outname+".h5")
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement