TFLearn 与 Tensorflow 一起使用
好用的不是一点点、、=-=、、
import tensorflow as tf import tflearn import tflearn.datasets.mnist as mnist # Using MNIST Dataset import tflearn.datasets.mnist as mnist mnist_data = mnist.read_data_sets(one_hot=True) # User defined placeholders with tf.Graph().as_default(): # Placeholders for data and labels X = tf.placeholder(shape=(None, 784), dtype=tf.float32) Y = tf.placeholder(shape=(None, 10), dtype=tf.float32) net = tf.reshape(X, [-1, 28, 28, 1]) # Using TFLearn wrappers for network building net = tflearn.conv_2d(net, 32, 3, activation='relu') net = tflearn.max_pool_2d(net, 2) net = tflearn.local_response_normalization(net) net = tflearn.dropout(net, 0.8) net = tflearn.conv_2d(net, 64, 3, activation='relu') net = tflearn.max_pool_2d(net, 2) net = tflearn.local_response_normalization(net) net = tflearn.dropout(net, 0.8) net = tflearn.fully_connected(net, 128, activation='tanh') net = tflearn.dropout(net, 0.8) net = tflearn.fully_connected(net, 256, activation='tanh') net = tflearn.dropout(net, 0.8) net = tflearn.fully_connected(net, 10, activation='linear') # Defining other ops using Tensorflow loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=net, labels=Y)) optimizer = tf.train.AdamOptimizer(learning_rate=0.01).minimize(loss) # Initializing the variables init = tf.initialize_all_variables() # Launch the graph with tf.Session() as sess: sess.run(init) batch_size = 128 for epoch in range(2): # 2 epochs avg_cost = 0. total_batch = int(mnist_data.train.num_examples/batch_size) for i in range(total_batch): batch_xs, batch_ys = mnist_data.train.next_batch(batch_size) sess.run(optimizer, feed_dict={X: batch_xs, Y: batch_ys}) cost = sess.run(loss, feed_dict={X: batch_xs, Y: batch_ys}) avg_cost += cost/total_batch if i % 20 == 0: print "Epoch:", '%03d' % (epoch+1), "Step:", '%03d' % i, "Loss:", str(cost)
结果:
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 从 HTTP 原因短语缺失研究 HTTP/2 和 HTTP/3 的设计差异
· AI与.NET技术实操系列:向量存储与相似性搜索在 .NET 中的实现
· 基于Microsoft.Extensions.AI核心库实现RAG应用
· Linux系列:如何用heaptrack跟踪.NET程序的非托管内存泄露
· 开发者必知的日志记录最佳实践
· winform 绘制太阳,地球,月球 运作规律
· AI与.NET技术实操系列(五):向量存储与相似性搜索在 .NET 中的实现
· 超详细:普通电脑也行Windows部署deepseek R1训练数据并当服务器共享给他人
· 【硬核科普】Trae如何「偷看」你的代码?零基础破解AI编程运行原理
· 上周热点回顾(3.3-3.9)