, . , , . , , . . .
def generate_random_hyperparams(lr_min, lr_max, kp_min, kp_max):
'''generate random learning rate and keep probability'''
random_learng_rate = 10**np.random.uniform(lr_min, lr_max)
random_keep_prob = np.random.uniform(kp_min, kp_max)
return random_learning_rate, random_keep_prob
, , ( , ) ... , run_model .
def run_model(random_learning_rate,random_keep_prob):
graph = tf.Graph()
with graph.as_default():
learning_rate = tf.placeholder(tf.float32, name='learning_rate')
keep_prob = tf.placeholder(tf.float32, name='keep_prob')
with tf.Session(graph=graph) as session:
tf.initialize_all_variables().run()
feed_train = {data: batch_data,
learning_rate: random_learning_rate,
keep_prob : random_keep_prob
}
performance = session.run([...], feed_dict = feed_train)
return performance
, tf.placeholders , python.
:
performance_records = {}
for i in range(10):
random_learning_rate, random_keep_prob = generate_random_hyperparams(-5, -1, 0.2, 0.8)
performance = run_model(random_learning_rate, random_keep_prob)
performance_records[(random_learning_rate, random_keep_prob)] = performance