Python flask 接收json数据
1.主配置文件app.py
import logging from flask import Flask, request, make_response from logConfig import * app = Flask(__name__) @app.route("/upload", methods=["POST"]) def save_file(): res_status = request.remote_addr,request.method,request.path,request.url logger.info(res_status) if request.method == 'POST': print("start...") #print(request.remote_addr,request.method,request.path,request.url) # 获取json数据 print(type(request.json)) #print(request.json['hello']) res = request.json['data'] #print(res) print("end...") logger.info(res) #return "已接收保存\n" return res else: logger.error("非POST类型请求.") if __name__ == '__main__': app.run(host="0.0.0.0", port=5000, debug=True) #curl http://172.16.0.46:5000/upload -X POST -d '{"data":{"mem": "80","cpu":"20","disk":"50","server":{"name":"lipc","age":"18"}}}' --header "Content-Type: application/json"
2.日志格式配置logConfig.py import logging import logging.handlers import time LOG_PATH = r'./' def getlogname(): logfilename = time.strftime('%Y%m%d%H%M%S',time.localtime(time.time())) print(logfilename) return logfilename def logConfig_1(): ''' 配置 log 输出到文件 : fileName 中 ''' fileName = LOG_PATH + r"\%s.txt"%getlogname() #print(lfname) logformat='%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s' logging.basicConfig(level=logging.DEBUG,#控制台打印的日志级别 filename=fileName, filemode='a',##模式,有w和a,w就是写模式,每次都会重新写日志,覆盖之前的日志 #a是追加模式,默认如果不写的话,就是追加模式 format=logformat #日志格式 ) print('logConfig_2配置完成') #logConfig_1()#配置log def logConfig_2(): ''' 配置 log 输出到文件 和 控制台: 即 需要2个处理器: 1-输出到文件 2-输出到控制台 ''' logfilename = LOG_PATH + r"\%s.txt"%getlogname() logformat_1='%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s' #获取日志器 Logger ,并取名 'mylogger' logger = logging.getLogger('mylogger') logger.setLevel(logging.DEBUG)#统一设置log打印级别 logger.handler = [] #创建处理器 Handler:fileHandler、streamHandler fileHandler = logging.FileHandler(logfilename)#输出到文件 fileHandler.setFormatter(logging.Formatter(logformat_1)) streamHandler = logging.StreamHandler()#输出到控制台 streamHandler.setLevel(logging.ERROR)#可单独对handler设置log打印级别 logformat_2=' %(name)s-%(asctime)s - %(module)s-%(funcName)s[line:%(lineno)d] - %(levelname)s: %(message)s' streamHandler.setFormatter(logging.Formatter(logformat_2)) #将处理器添加到日志器中 logger.addHandler(fileHandler) logger.addHandler(streamHandler) print('logConfig_2配置完成') return logger #全局变量 logger = logConfig_2()#配置log,在其他模块中引用的时候直接运用logger,不能再次创建 if __name__ == '__main__': #logging.debug('debug message %s - %d ......','hello', 1) logger.debug('debug message') logger.info('info message') logger.warning('warning message') logger.error('error message') logger.critical('critical message')
3.启动python程序 python3 app.py
4.终端程序
#将此shell脚本加入到crontab中,定时执行即可上报对应的终端数据到服务端 [root@cambricon ~]10 cat lipc-test.sh #!/bin/bash function ema_info() { sn_number=$(cat /proc/tztek_env |awk 'NR==2 {print $3}') disk_used=$(df -h|grep aibox|awk '{print $5}') Mem_used=$(free -m | sed -n '2p' | awk '{print $3/$2*100"%"}') ai_status=$(/home/nvidia/aibox/devops/devops_ai.sh status| awk 'NR==1 {print $3}') da_status=$(/home/nvidia/aibox/devops/devops_da.sh status| awk 'NR==1 {print $3}') up_status=$(/home/nvidia/aibox/devops/devops_up.sh status| awk 'NR==1 {print $3}') json="{\"data\":{\"SN\":\"$sn_number\",\"mem\":\"$Mem_used\",\"disk\":\"$disk_used\",\"server\":{\"ai\":\"$ai_status\",\"da\":\"$da_status\",\"up\":\"$up_status\"}}}" timeout -t 15 curl -H "Content-Type: application/json" -X POST -d "${json}" "http://172.16.0.46:5000/upload" } ema_info