1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
| import tensorflow as tf from PIL import Image import numpy as np import os from tensorflow.keras import backend as K
train_path = '../data/malan/' train_txt = '../data/code.txt' x_train_savepath = '../data/1.npy' y_train_savepath = '../data/2.npy'
test_path = '../data/malan/' test_txt = '../data/test.txt' x_test_savepath = '../data/test1.npy' y_test_savepath = '../data/test2.npy'
def generateds(path, txt): f = open(txt, 'r') contents = f.readlines() f.close() x, y_ = [], [] for content in contents: value = content.split() img_path = path + value[0] img_raw = tf.io.read_file(img_path) img_tensor = tf.image.decode_image(img_raw) img = tf.image.resize(img_tensor, [192, 192]) img = img / 255. x.append(img) y_.append(value[1]) print('loading : ' + content)
x = np.array(x) y_ = np.array(y_) y_ = y_.astype(np.int64) return x, y_ if os.path.exists(x_train_savepath) and os.path.exists(y_train_savepath) and os.path.exists( x_test_savepath) and os.path.exists(y_test_savepath): print('-------------Load Datasets-----------------') x_train_save = np.load(x_train_savepath, allow_pickle=True) y_train = np.load(y_train_savepath, allow_pickle=True) x_test_save = np.load(x_test_savepath, allow_pickle=True) y_test = np.load(y_test_savepath, allow_pickle=True) x_train = np.reshape(x_train_save, (len(x_train_save), 3, 192, 192)) x_test = np.reshape(x_test_save, (len(x_test_save), 3, 192, 192)) else: print('-------------Generate Datasets-----------------') x_train, y_train = generateds(train_path, train_txt) x_test, y_test = generateds(test_path, test_txt) print('-------------Save Datasets-----------------') x_train_save = np.reshape(x_train, (len(x_train), -1)) x_test_save = np.reshape(x_test, (len(x_test), -1)) np.save(x_train_savepath, x_train_save) np.save(y_train_savepath, y_train) np.save(x_test_savepath, x_test_save) np.save(y_test_savepath, y_test)
model = tf.keras.models.Sequential([ tf.keras.layers.Flatten(), tf.keras.layers.Dense(128, activation='relu'), tf.keras.layers.Dense(10, activation='softmax') ])
model.compile(optimizer='adam', loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False), metrics=['sparse_categorical_accuracy'])
x_train = K.cast_to_floatx(x_train) y_train = K.cast_to_floatx(y_train)
model.fit(x_train, y_train, batch_size=3, epochs=50, validation_data=(x_test, y_test), validation_freq=1) model.summary()
checkpoint_save_path = "./checkpoint2/qf.ckpt" if os.path.exists(checkpoint_save_path + '.index'): print('-------------load the model-----------------') model.load_weights(checkpoint_save_path)
cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=checkpoint_save_path, save_weights_only=True, save_best_only=True)
history = model.fit(x_train, y_train, batch_size=2, epochs=5, validation_data=(x_test, y_test), validation_freq=1, callbacks=[cp_callback]) print(model.trainable_variables) file = open('./weights.txt', 'w') for v in model.trainable_variables: file.write(str(v.name) + '\n') file.write(str(v.shape) + '\n') file.write(str(v.numpy()) + '\n') file.close()
|