E:\song\cpc_log_v6_fastapi\app.py
from fastapi import FastAPI, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.openapi.docs import get_swagger_ui_html
from fastapi.staticfiles import StaticFiles
from auth.jwt_bearer import JWTBearer
from config.config import initiate_database
from routes.admin import router as AdminRouter
from routes.student import router as StudentRouter
from routes.user import router as UserRouter
from routes.log import router as LogRouter
from routes.logtype import router as LogTypeRouter
from routes.feedback import router as FeedBackRouter
from routes.logsolution import router as LogsolutionRouter
app = FastAPI(docs_url=None)
app.mount('/static',StaticFiles(directory='static'),name='static')
origins = [
"http://localhost",
"http://localhost:3000/",
]
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
token_listener = JWTBearer()
@app.on_event("startup")
async def start_database():
await initiate_database()
@app.get("/", tags=["Root"])
async def read_root():
return {"message": "Welcome to this fantastic app."}
@app.get('/docs',include_in_schema=False)
async def custom_swagger_ui_html():
return get_swagger_ui_html(
openapi_url=app.openapi_url,
title=app.title + " - Swagger UI",
oauth2_redirect_url=app.swagger_ui_oauth2_redirect_url,
swagger_js_url="/static/swagger-ui-bundle.js",
swagger_css_url="/static/swagger-ui.css",
)
app.include_router(AdminRouter, tags=["Administrator"], prefix="/api/admin")
# app.include_router(StudentRouter, tags=["Students"], prefix="/student", dependencies=[Depends(token_listener)])
app.include_router(StudentRouter, tags=["Students"], prefix="/api/student")
app.include_router(UserRouter, tags=["User"], prefix="/api/user")
app.include_router(LogRouter, tags=["Log"], prefix="/api/log")
app.include_router(LogTypeRouter,tags=['LogType'],prefix="/api/logtype")
app.include_router(FeedBackRouter,tags=['FeedBack'],prefix='/api/feedback')
app.include_router(LogsolutionRouter,tags=['Log Solutions'],prefix='/api/logsolution')
E:\song\cpc_log_v6_fastapi\main.py
import uvicorn
if __name__ == '__main__':
uvicorn.run('app:app', host="127.0.0.1", port=9999, reload=True)
E:\song\cpc_log_v6_fastapi\__init__.py
E:\song\cpc_log_v6_fastapi\auth\admin.py
from fastapi import HTTPException, Depends, status
from fastapi.security import HTTPBasicCredentials, HTTPBasic
from passlib.context import CryptContext
from database.database import admin_collection
security = HTTPBasic()
hash_helper = CryptContext(schemes=["bcrypt"])
async def validate_login(credentials: HTTPBasicCredentials = Depends(security)):
admin = admin_collection.find_one({"email": credentials.username})
if admin:
password = hash_helper.verify(credentials.password, admin['password'])
if not password:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect email or password"
)
return True
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect email or password"
)
E:\song\cpc_log_v6_fastapi\auth\jwt_bearer.py
from fastapi import Request, HTTPException
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from .jwt_handler import decode_jwt
def verify_jwt(jwtoken: str) -> bool:
isTokenValid: bool = False
payload = decode_jwt(jwtoken)
if payload:
isTokenValid = True
return isTokenValid
class JWTBearer(HTTPBearer):
def __init__(self, auto_error: bool = True):
super(JWTBearer, self).__init__(auto_error=auto_error)
async def __call__(self, request: Request):
credentials: HTTPAuthorizationCredentials = await super(JWTBearer, self).__call__(request)
print("Credentials :", credentials)
if credentials:
if not credentials.scheme == "Bearer":
raise HTTPException(status_code=403, detail="Invalid authentication token")
if not verify_jwt(credentials.credentials):
raise HTTPException(status_code=403, detail="Invalid token or expired token")
return credentials.credentials
else:
raise HTTPException(status_code=403, detail="Invalid authorization token")
E:\song\cpc_log_v6_fastapi\auth\jwt_handler.py
import time
from typing import Dict
import jwt
from config.config import Settings
def token_response(token: str):
return {
"access_token": token
}
secret_key = Settings().secret_key
def sign_jwt(user_id: str) -> Dict[str, str]:
# Set the expiry time.
payload = {
'user_id': user_id,
'expires': time.time() + 2400
}
return token_response(jwt.encode(payload, secret_key, algorithm="HS256"))
def decode_jwt(token: str) -> dict:
decoded_token = jwt.decode(token.encode(), secret_key, algorithms=["HS256"])
return decoded_token if decoded_token['expires'] >= time.time() else {}
E:\song\cpc_log_v6_fastapi\auth\__init__.py
E:\song\cpc_log_v6_fastapi\config\config.py
from typing import Optional
from beanie import init_beanie
from motor.motor_asyncio import AsyncIOMotorClient
from pydantic import BaseSettings
from models.admin import Admin
from models.device import Device
from models.feedback import FeedBack
from models.logsolution import LogSolution
from models.logtype import LogType
from models.student import Student
from models.user import User
class Settings(BaseSettings):
# database configurations
DATABASE_URL: Optional[str] = None
# JWT
secret_key: str
algorithm: str = "HS256"
class Config:
env_file = ".env.dev"
orm_mode = True
async def initiate_database():
client = AsyncIOMotorClient(Settings().DATABASE_URL)
await init_beanie(database=client["cpc"],
document_models=[Admin, Student, User, LogSolution, LogType, FeedBack, Device])
E:\song\cpc_log_v6_fastapi\config\__init__.py
E:\song\cpc_log_v6_fastapi\database\database.py
from typing import List, Union
from beanie import PydanticObjectId
from models.admin import Admin
from models.student import Student
from models.user import User
admin_collection = Admin
student_collection = Student
user_collection = User
async def add_user(new_user: User) -> User:
user = await new_user.create()
return user
async def add_admin(new_admin: Admin) -> Admin:
admin = await new_admin.create()
return admin
async def retrieve_students() -> List[Student]:
students = await student_collection.all().to_list()
return students
async def add_student(new_student: Student) -> Student:
student = await new_student.create()
return student
async def retrieve_student(id: PydanticObjectId) -> Student:
student = await student_collection.get(id)
if student:
return student
async def delete_student(id: PydanticObjectId) -> bool:
student = await student_collection.get(id)
if student:
await student.delete()
return True
async def update_student_data(id: PydanticObjectId, data: dict) -> Union[bool, Student]:
des_body = {k: v for k, v in data.items() if v is not None}
update_query = {"$set": {
field: value for field, value in des_body.items()
}}
student = await student_collection.get(id)
if student:
await student.update(update_query)
return student
return False
async def update_user_data(name: str, data: dict) -> bool | User:
print(data)
des_body = {k: v for k, v in data.items() if v is not None}
print(des_body)
print("----")
update_query = {"$set": {
field: value for field, value in des_body.items()
}}
print(update_query)
print(name)
user = await user_collection.find_one({"name": name})
print(user)
if user:
await user.update(update_query)
return user
return False
E:\song\cpc_log_v6_fastapi\database\device.py
from models.device import Device,DeviceShortView
async def find_all_devices() -> [Device]:
devices = await Device.all().to_list()
return devices
async def find_device_by_name(name: str) -> Device:
device = await Device.find_one({"name": name}).project(DeviceShortView)
return device
E:\song\cpc_log_v6_fastapi\database\feedback.py
from models.feedback import FeedBack
async def add_feedback_data(new_feedback:FeedBack):
feedback = await new_feedback.create()
return feedback
E:\song\cpc_log_v6_fastapi\database\logsolution.py
from beanie import PydanticObjectId
from models.logsolution import LogSolution
logsolution_collection = LogSolution
async def find_log_solution(category: str) -> LogSolution:
log = await logsolution_collection.find_one({"category": category})
return log
async def find_all_logsolution_data() -> list[LogSolution]:
logs = await logsolution_collection.all().to_list()
return logs
async def insert_logsolution_data(data: dict) -> LogSolution:
logsolution = await logsolution_collection.insert(data)
return logsolution
async def update_logsolution_data(id: PydanticObjectId, data: dict) -> bool | LogSolution:
des_body = {k: v for k, v in data.items() if v is not None}
update_query = {"$set": {
field: value for field, value in des_body.items()
}}
logsolution = await logsolution_collection.get(id)
if logsolution:
await logsolution.update(update_query)
return logsolution
return False
async def delete_logsolution_data(id: PydanticObjectId) -> bool:
logsolution = await logsolution_collection.get(id)
if logsolution:
await logsolution.delete()
return True
return False
E:\song\cpc_log_v6_fastapi\database\logtype.py
from beanie import PydanticObjectId
from models.logtype import LogType
async def find_all_logtype() -> list[LogType]:
log = await LogType.all().to_list()
return log
async def insert_logtype_data(data: dict) -> LogType | bool:
logsolution = await LogType.insert(data)
if logsolution is None:
return False
return logsolution
async def update_logtype_data(id: PydanticObjectId, data: dict) -> bool | LogType:
des_body = {k: v for k, v in data.items() if v is not None}
update_query = {"$set": {
field: value for field, value in des_body.items()
}}
logtype = await LogType.get(id)
if logtype:
await logtype.update(update_query)
return logtype
return False
async def delete_logtype_date(id:PydanticObjectId)-> bool:
logtype = await LogType.get(id)
if logtype:
await logtype.delete()
return True
return False
E:\song\cpc_log_v6_fastapi\database\user.py
from models.user import User
user_collection = User
async def add_user(new_user: User) -> User:
user = await new_user.create()
return user
async def update_user_data(name: str, data: dict) -> bool | User:
des_body = {k: v for k, v in data.items() if v is not None}
update_query = {"$set": {
field: value for field, value in des_body.items()
}}
print(des_body)
print(name)
user = await user_collection.find_one({"name": name})
if user:
await user.update(update_query)
return user
return False
E:\song\cpc_log_v6_fastapi\database\__init__.py
E:\song\cpc_log_v6_fastapi\logdata\logclassify.py
class LogClassify:
def __init__(self, loglist, logtypelist):
self.loglist = loglist
self.logtypelist = logtypelist
# 关键字检测
def __keyword_detect(self, errorLogItem, categoryItem):
problemDesp = ''.join(i for i in errorLogItem.get("problem"))
for keyword in categoryItem.keyword:
if keyword in problemDesp: # 如果发现错误,就是将当前的错误类型添加到当前的错误对象中,返回true,不在往下执行
return True
return False
# 类型检测
def __category_detect(self, errorLogItem, logtypeItem):
for categoryItem in logtypeItem.category:
# 如果在该categoryItem检测到keyword,就将该对象的 category属性 设置为 categoryItem.name,然后就是返回,不在往下执行
res = self.__keyword_detect(errorLogItem, categoryItem)
if res:
errorLogItem['category'] = categoryItem.name
return True
return False
def classify_log(self):
for errorLogItem in self.loglist:
errorLogItem['category'] = 'unclassified'
for logtypeItem in self.logtypelist:
res = self.__category_detect(errorLogItem, logtypeItem)
if res:
break
return self.loglist
E:\song\cpc_log_v6_fastapi\logdata\logdataframe.py
import re
import pandas as pd
from logdata.logpath import get_log_path_by_date
def __convert_log_to_df(logFullPathList):
"""批量读取多个log文件,然后将其转换成pandas的数据格式
logFullPathList -- 一个包含log全路径的数据
Return: 返回一个dateFrame
"""
df_list = []
for file in logFullPathList:
df_list.append(pd.read_csv(file, header=None, encoding='utf-8', engine='python', sep='\.[0-9]{3}\s{2}'))
df = pd.concat(df_list)
# 将 时间字符去掉后面秒数,时间就是精确到分钟,这样在分组的时候,同一个log都能分到一起。
# 这个时间精确性暂时这样控制的.
df[0] = df[0].str.rsplit(':', n=1, expand=True).drop(columns=[1])
df.columns = ['time', 'content']
return df
def __get_ports(contentArr):
"""
输入:一个problems数组
功能:通过正则表达式将problems中的字符串数组中的port口提取出来,并且通过set对重复的port口进行去重
输出:一个ports数组
"""
# rule = r'([P|p]ort \'[1-6]{1}第\d{1,2}层)|([P|p]ort \[[1-6]{1})'
rule = r'(\[Port\d{1}\])|(卡匣\[\w{2}\d{4}\])|(第.层)'
pattern = re.compile(rule)
match = pattern.findall(''.join(i for i in contentArr))
if match:
new_match = []
for ele in match:
for i in ele:
if i != '':
new_match.append(i)
# 对数组中的元素进行去重
new_match = [i for n, i in enumerate(new_match) if i not in new_match[:n]]
return new_match
return []
def __split_problem_solution(df):
"""将 分析结果 和 解决方式 分开,分别放到两个数组里面,然后作为一个对象返回
Keyword arguments:
argument -- description
Return: return_description
"""
dfObj = {}
df_problem = df[df.str.contains('分析结果')].values.tolist()
dfObj['problem'] = df_problem
dfObj['ports'] = __get_ports(df_problem)
df_solution = df[df.str.contains('解决方式')].values.tolist()
dfObj['solution'] = df_solution
return dfObj
def __sorted_log_list_by_time(logDf):
"""将log dataframe按照时间进行分组,然后将 分析结果和解决方式分开,然后封装成一个对象数组返回
Keyword arguments:
argument -- description
Return: 返回一个对象数组
"""
# 按照相同的时间格式进行纵向分组
grouped = logDf.groupby('time')
logList = []
for name, group in grouped:
obj_temp = __split_problem_solution(group['content'])
obj_temp['time'] = name
logList.append(obj_temp)
return logList
async def get_log(dirPath, dateStr):
# 首先获取符合条件的log文件名称
logFullPathList = get_log_path_by_date(dirPath, dateStr)
# 如果数组为空,则表示则没有error log文件
if not logFullPathList:
return []
# 将log文件转换成dataframe
logDf = __convert_log_to_df(logFullPathList)
# 对dataframe中的数据按照时间进行排序
log_list = __sorted_log_list_by_time(logDf)
return log_list
if __name__ == '__main__':
dirPath = 'G:\EMA_v2\TerminalMessage'
dateStr = '20220813'
logFullPathList = get_log_path_by_date(dirPath, dateStr)
logDf = __convert_log_to_df(logFullPathList)
log_list = __sorted_log_list_by_time(logDf)
print(log_list)
#
# for log in log_list:
# print(log)
# print('----------------')
E:\song\cpc_log_v6_fastapi\logdata\logpath.py
import os
import re
def get_log_path_by_date(dirPath, dateStr):
"""get log full path by date
Keyword arguments:
dirPath - dir path that contains the log
dateStr - date string
Return: return an array that contains the log path
"""
file_name_list = os.listdir(dirPath)
file_list = []
for file_name in file_name_list:
if dateStr in file_name:
file_list.append(os.path.join(dirPath, file_name))
# ******************************************************************
# log文件名称匹配,文件名称固定了,就不需要了
# 通过正则表达式去提取文件名称的日期信息
# res = re.search(r'-(20[0-9][0-9][0-1][1-9][0-3][0-9])-', file_name)
# if res :
# matchObj =res.group().replace('-', '') # 去除日期前后的短划线
# print(matchObj)
# if matchObj == dateStr: # 如果日期符合条件,就将log文件全路径加入数组
# file_list.append(os.path.join(dirPath, file_name))
# **********************************************************************
return file_list
if __name__ == '__main__':
dirPath = 'G:\EMA_v2\TerminalMessage'
dateStr = '20220813'
logFullPathList = get_log_path_by_date(dirPath, dateStr)
print(logFullPathList)
E:\song\cpc_log_v6_fastapi\logdata\__init__.py
E:\song\cpc_log_v6_fastapi\models\admin.py
from beanie import Document
from fastapi.security import HTTPBasicCredentials
from pydantic import BaseModel, EmailStr
class Admin(Document):
fullname: str
email: EmailStr
password: str
class Collection:
name = "admin"
class Config:
schema_extra = {
"example": {
"fullname": "Abdulazeez Abdulazeez Adeshina",
"email": "abdul@youngest.dev",
"password": "3xt3m#"
}
}
class AdminSignIn(HTTPBasicCredentials):
class Config:
schema_extra = {
"example": {
"username": "abdul@youngest.dev",
"password": "3xt3m#"
}
}
class AdminData(BaseModel):
fullname: str
email: EmailStr
class Config:
schema_extra = {
"example": {
"fullname": "Abdulazeez Abdulazeez Adeshina",
"email": "abdul@youngest.dev",
}
}
E:\song\cpc_log_v6_fastapi\models\device.py
from beanie import Document
from pydantic import BaseModel
class Device(Document):
name: str
ip: str
type: str
class Collection:
name = "Device"
class Config:
arbitrary_types_allowed = True
schema_extra = {
"example": {
"devices": [
{
"name": "MOR100",
"ip": "10.12.140.8",
"type": "TEST"
},
{
"name": "MOR100",
"ip": "10.12.140.8",
"type": "TEST"
}
]
}
}
class DeviceShortView(BaseModel):
name: str
ip: str
type:str
class Settings:
projection = {
"name": 1,
"ip": 1,
"type":1
}
E:\song\cpc_log_v6_fastapi\models\feedback.py
from typing import Optional, Any
from beanie import Document
from pydantic import BaseModel
class FeedBack(Document):
userName: str # 用户姓名
alias: str # 英文姓名
userId : str # 用户id
email:str # 用户邮箱
organization:str # 用户的大部门
department:str # 用户部门
useDesc:str # DL 还是IDL
isInErrorList:int # 您的问题是否在一场描述列表内
isHabit:int # 精准查询是否符合您的使用习惯
isUnderstand:int # 分析结果是否看得懂
isHelp:int # 解决方式否帮助到你
message:str # 其他留言
# 执行collection的名称
class Collection:
name = 'FeedBack'
class Config:
arbitrary_types_allowed = True
schema_extra = {
"example": {
"userName": "卓松" ,# 用户姓名
"alias": "xiaosongzhuo", # 英文姓名
"userId" : "K2207368" ,# 用户id
"email":"xiaosong.zhuo@auo.com" ,# 用户邮箱
"organization":"ML6kA0",# 用户的大部门
"department":"ML6KA3" ,# 用户部门
"useDesc":"IDL", # DL 还是IDL
"isInErrorList":1, # 您的问题是否在一场描述列表内
"isHabit":1, # 精准查询是否符合您的使用习惯
"isUnderstand":1 ,# 分析结果是否看得懂
"isHelp":1 ,# 解决方式否帮助到你
"message":"界面清晰明了,简单清楚", # 其他留言
}
}
E:\song\cpc_log_v6_fastapi\models\log.py
from typing import Optional, Any
from pydantic import BaseModel
class Response(BaseModel):
status_code: int
response_type: str
description: str
data: Optional[Any]
class Config:
schema_extra = {
"example": {
"status_code": 200,
"response_type": "success",
"description": "Operation successful",
"data": "Sample data"
}
}
E:\song\cpc_log_v6_fastapi\models\logsolution.py
from typing import Optional, Any
from beanie import Document
from pydantic import BaseModel
class Pic(BaseModel):
url: str
desp: str
class LogSolution(Document):
category: str
pic: list[Pic]
video: str
class Collection:
name = 'LogSolution'
class Config:
arbitrary_types_allowed = True
schema_extra = {
"example": {
"category": "Slot Mapping unmatch",
"pic": [
{
"url": "http://10.12.140.8:8008/data/pic/Slot_Mapping_unmatch/Slot_Mapping_unmatch_1.png",
"desp": "步骤一"
},
{
"url": "http://10.12.140.8:8008/data/pic/Slot_Mapping_unmatch/Slot_Mapping_unmatch_2.png",
"desp": "步骤二"
}
],
"video": "http://10.12.140.8:8008/data/video/Slot_Mapping_unmatch.mp4"
}
}
class UpdateLogSolutionModel(BaseModel):
category: str
pic: list[Pic]
video: str
class Collection:
name = "LogSolution"
class Config:
schema_extra = {
"example": {
"category": "Slot Mapping unmatch",
"pic": [
{
"url": "http://10.12.140.8:8008/data/pic/Slot_Mapping_unmatch/Slot_Mapping_unmatch_1.png",
"desp": "步骤一"
},
{
"url": "http://10.12.140.8:8008/data/pic/Slot_Mapping_unmatch/Slot_Mapping_unmatch_2.png",
"desp": "步骤二"
}
],
"video": "http://10.12.140.8:8008/data/video/Slot_Mapping_unmatch.mp4"
}
}
class Response(BaseModel):
status_code: int
response_type: str
description: str
data: Optional[Any]
class Config:
schema_extra = {
"example": {
"status_code": 200,
"response_type": "success",
"description": "Operation successful",
"data": "Sample data"
}
}
E:\song\cpc_log_v6_fastapi\models\logtype.py
from beanie import Document
from pydantic import BaseModel
class Category(BaseModel):
name: str
keyword: list[str]
class LogType(Document):
description: str
category: list[Category]
# 执行collection的名称
class Collection:
name = 'LogType'
class Config:
arbitrary_types_allowed = True
schema_extra = {
"example": {
"description": "卡匣异常描述",
"category": [
{
"name": "PPID页签栏缺失",
"keyword": ["PPID页签栏并无"]
},
{
"name": "Mapping异常",
"keyword": ["Mapping异常"]
},
{
"name": "Recipe列表为空",
"keyword": ["Mapping异常"]
},
{
"name":"制成能力不匹配",
"keyword":["IT的TCS系统直接退卡"]
}
]
}
}
E:\song\cpc_log_v6_fastapi\models\student.py
from typing import Optional, Any
from beanie import Document
from pydantic import BaseModel, EmailStr
class Student(Document):
fullname: str
email: EmailStr
course_of_study: str
year: int
gpa: float
class Config:
schema_extra = {
"example": {
"fullname": "Abdulazeez Abdulazeez Adeshina",
"email": "abdul@school.com",
"course_of_study": "Water resources engineering",
"year": 4,
"gpa": "3.76"
}
}
class UpdateStudentModel(BaseModel):
fullname: Optional[str]
email: Optional[EmailStr]
course_of_study: Optional[str]
year: Optional[int]
gpa: Optional[float]
class Collection:
name = "student"
class Config:
schema_extra = {
"example": {
"fullname": "Abdulazeez Abdulazeez",
"email": "abdul@school.com",
"course_of_study": "Water resources and environmental engineering",
"year": 4,
"gpa": "5.0"
}
}
class Response(BaseModel):
status_code: int
response_type: str
description: str
data: Optional[Any]
class Config:
schema_extra = {
"example": {
"status_code": 200,
"response_type": "success",
"description": "Operation successful",
"data": "Sample data"
}
}
E:\song\cpc_log_v6_fastapi\models\user.py
from typing import Optional, Any
from beanie import Document
from pydantic import BaseModel
class User(Document):
name: str
age: int
sex: str
class Config:
schema_extra = {
"example": {
"name": "Alice",
"age": 10,
"sex": "female",
}
}
class UpdateUserModel(BaseModel):
name: str
age: int
sex: str
class Collection:
name = "user"
class Config:
schema_extra = {
"example": {
"name": "Alice",
"age": 10,
"sex": "female",
}
}
class Response(BaseModel):
status_code: int
response_type: str
description: str
data: Optional[Any]
class Config:
schema_extra = {
"example": {
"status_code": 200,
"response_type": "success",
"description": "Operation successful",
"data": "Sample data"
}
}
E:\song\cpc_log_v6_fastapi\models\__init__.py
E:\song\cpc_log_v6_fastapi\routes\admin.py
from fastapi import Body, APIRouter, HTTPException
from passlib.context import CryptContext
from auth.jwt_handler import sign_jwt
from database.database import add_admin
from models.admin import Admin, AdminData, AdminSignIn
router = APIRouter()
hash_helper = CryptContext(schemes=["bcrypt"])
@router.post("/login")
async def admin_login(admin_credentials: AdminSignIn = Body(...)):
admin_exists = await Admin.find_one(Admin.email == admin_credentials.username)
if admin_exists:
password = hash_helper.verify(
admin_credentials.password, admin_exists.password)
if password:
return sign_jwt(admin_credentials.username)
raise HTTPException(
status_code=403,
detail="Incorrect email or password"
)
raise HTTPException(
status_code=403,
detail="Incorrect email or password"
)
@router.post("/new", response_model=AdminData)
async def admin_signup(admin: Admin = Body(...)):
admin_exists = await Admin.find_one(Admin.email == admin.email)
if admin_exists:
raise HTTPException(
status_code=409,
detail="Admin with email supplied already exists"
)
admin.password = hash_helper.encrypt(admin.password)
new_admin = await add_admin(admin)
return new_admin
E:\song\cpc_log_v6_fastapi\routes\feedback.py
from fastapi import APIRouter, Body
from models.feedback import FeedBack
from database.feedback import add_feedback_data
router = APIRouter()
@router.post("/", response_description="Log solution")
async def add_feedback(feedback:FeedBack):
res = await add_feedback_data(feedback)
return {'data':res}
E:\song\cpc_log_v6_fastapi\routes\log.py
from fastapi import APIRouter
from pydantic import BaseModel
from database.logsolution import find_log_solution
from database.logtype import find_all_logtype
from database.device import find_device_by_name
from logdata.logclassify import LogClassify
from logdata.logdataframe import get_log
router = APIRouter()
class LogParam(BaseModel):
"""
request model,查询log的一些请求参数
"""
curDev: str
category: str = None
cstid: str = None
portid: str = None
logDates: list[str]
class Config:
schema_extra = {
"example": {
"curDev": "MOR100",
"category": "Slot Mapping Unmatched",
"cstid": "AA1000",
"portid": "1",
"logDates": [
"2022-09-23", "2022-09-24", "2022-09-25"
]
}
}
async def assmble_logsolution(logs):
for log in logs:
temp = await find_log_solution(log['category'])
if temp:
log.update(temp)
if log.get('id'):
del (log["id"])
if log.get('revision_id'):
del (log["revision_id"])
else:
log.update({
"pic": [
],
"video": ""
})
return logs
# 获取某日期的log数据
async def get_logs_by_date(dirPath, dateStr):
# 获取到一个包含log信息的对象数组
logs = await get_log(dirPath, dateStr)
if not logs:
return []
# 获取所有的错误类型
logTypes = await find_all_logtype()
loghandler = LogClassify(logs, logTypes)
logs = loghandler.classify_log()
logs = await assmble_logsolution(logs)
return logs
@router.post("/", response_description="Log solution")
async def get_logs(logparams: LogParam):
# 去除请求体中的请求参数
curDev = logparams.curDev
category = logparams.category
cstid = logparams.cstid
portid = logparams.portid
logDates = logparams.logDates
# ---------------------------------
res = await find_device_by_name(curDev)
print(res)
# -----------------------------------
dirPath = 'G:\EMA_v2\TerminalMessage'
dateStr = '20221012'
# 对前端发送的日期信息进行处理和排序
newLogDates = [i.replace('-', '') for i in logDates]
newLogDates.sort()
logArr = []
for date in newLogDates:
# 通过文件路径和日期获取log
log = await get_logs_by_date(dirPath, dateStr)
logArr.append({
'date': date,
'logs': log
})
return logArr
E:\song\cpc_log_v6_fastapi\routes\logsolution.py
from fastapi import APIRouter
from database.logsolution import find_all_logsolution_data,insert_logsolution_data,update_logsolution_data,delete_logsolution_data
from models.logsolution import LogSolution,UpdateLogSolutionModel
router = APIRouter()
@router.get('/',response_description='get all log solution')
async def find_all_logsolutions():
logs = await find_all_logsolution_data()
return logs
@router.post('/',response_description='change log solution')
async def insert_logsolutions(logsolution:LogSolution):
solution = await insert_logsolution_data(logsolution)
return solution
@router.put('/{id}',response_description='update log solution')
async def update_logsolution(id:str,logsolution:UpdateLogSolutionModel):
res = await update_logsolution_data(id,logsolution.dict())
return res
@router.delete('/{id}',response_description='delete log solution')
async def delete_logsolution(id:str):
res = await delete_logsolution_data(id)
return res
E:\song\cpc_log_v6_fastapi\routes\logtype.py
from fastapi import APIRouter
from database.logtype import find_all_logtype,insert_logtype_data,update_logtype_data,delete_logtype_date
from models.logtype import LogType
router = APIRouter()
@router.get("/", response_description="Log solution")
async def get_logs():
logTypes = await find_all_logtype()
return {
"status_code": 200,
"response_type": "success",
"description": "Users data retrieved successfully",
"data": logTypes
}
@router.post('/',response_description='change log solution')
async def insert_logtype(logtype:LogType):
res = await insert_logtype_data(logtype)
return res
@router.put('/{id}',response_description='update logtype')
async def update_logtype(id:str,logtype:LogType):
res = await update_logtype_data(id,logtype.dict())
return res
@router.delete('/{id}',response_description='delete logtype')
async def delete_logtype(id:str):
res = await delete_logtype_date(id)
return res
E:\song\cpc_log_v6_fastapi\routes\student.py
from fastapi import APIRouter, Body
from database.database import *
from models.student import *
router = APIRouter()
@router.get("/", response_description="Students retrieved", response_model=Response)
async def get_students():
students = await retrieve_students()
return {
"status_code": 200,
"response_type": "success",
"description": "Students data retrieved successfully",
"data": students
}
@router.get("/{id}", response_description="Student data retrieved", response_model=Response)
async def get_student_data(id: PydanticObjectId):
student = await retrieve_student(id)
if student:
return {
"status_code": 200,
"response_type": "success",
"description": "Student data retrieved successfully",
"data": student
}
return {
"status_code": 404,
"response_type": "error",
"description": "Student doesn't exist",
}
@router.post("/", response_description="Student data added into the database", response_model=Response)
async def add_student_data(student: Student = Body(...)):
new_student = await add_student(student)
return {
"status_code": 200,
"response_type": "success",
"description": "Student created successfully",
"data": new_student
}
@router.delete("/{id}", response_description="Student data deleted from the database")
async def delete_student_data(id: PydanticObjectId):
deleted_student = await delete_student(id)
if deleted_student:
return {
"status_code": 200,
"response_type": "success",
"description": "Student with ID: {} removed".format(id),
"data": deleted_student
}
return {
"status_code": 404,
"response_type": "error",
"description": "Student with id {0} doesn't exist".format(id),
"data": False
}
@router.put("{id}", response_model=Response)
async def update_student(id: PydanticObjectId, req: UpdateStudentModel = Body(...)):
updated_student = await update_student_data(id, req.dict())
if updated_student:
return {
"status_code": 200,
"response_type": "success",
"description": "Student with ID: {} updated".format(id),
"data": updated_student
}
return {
"status_code": 404,
"response_type": "error",
"description": "An error occurred. Student with ID: {} not found".format(id),
"data": False
}
E:\song\cpc_log_v6_fastapi\routes\user.py
from fastapi import APIRouter, Body
from database.user import add_user,update_user_data
from models.user import Response,User,UpdateUserModel
router = APIRouter()
@router.get("/", response_description="User retrieved", response_model=Response)
async def get_users(name:str):
user = {"name":"Bruce"}
return {
"status_code": 200,
"response_type": "success",
"description": "Users data retrieved successfully",
"data": user
}
@router.post("/", response_description="Student data added into the database", response_model=Response)
async def add_user_data(user: User = Body(...)):
new_user = await add_user(user)
return {
"status_code": 200,
"response_type": "success",
"description": "Student created successfully",
"data": new_user
}
@router.put("/{name}",response_description="修改用户信息",response_model=Response)
async def update_user(name:str,req:UpdateUserModel=Body(...)):
update_user = await update_user_data(name,req.dict())
if update_user:
return {
"status_code": 200,
"response_type": "success",
"description": "Student created successfully",
"data": update_user
}
return {
"status_code": 404,
"response_type": "error",
"description": "An error occurred. User with name: {} not found".format(name),
"data": False
}
E:\song\cpc_log_v6_fastapi\routes\__init__.py
E:\song\cpc_log_v6_fastapi\test\le00.py
from typing import Optional
from pydantic import BaseModel
from beanie import Document, Indexed, init_beanie
import asyncio, motor
class Category(BaseModel):
name: str
description: str
class Product(Document):
name: str # You can use normal types just like in pydantic
description: Optional[str] = None
price: Indexed(float) # type: ignore # You can also specify that a field should correspond to an index
category: Category # You can include pydantic models as well
# Beanie is fully asynchronous, so we will access it from an async function
async def example():
# Beanie uses Motor under the hood
client = motor.motor_asyncio.AsyncIOMotorClient("mongodb://mongodb:27017") # type: ignore
# Init beanie with the Product document class
await init_beanie(database=client.test, document_models=[Product])
chocolate = Category(name="Chocolate", description="A preparation of roasted and ground cacao seeds.")
# Beanie documents work just like pydantic models
tonybar = Product(name="Tony's", price=5.95, category=chocolate)
# And can be inserted into the database
await tonybar.insert()
# You can find documents with pythonic syntax
product = await Product.find_one(Product.price < 10)
# And update them
await product.set({Product.name:"Gold bar"}) # type: ignore
asyncio.run(example())
E:\song\cpc_log_v6_fastapi\test\le01.py
dataStr = "2022-09-27"
print(dataStr.replace('-',''))
E:\song\cpc_log_v6_fastapi\test\le02.py
li = ['20220924', '20220817', '20220710']
res = li.sort()
print(li)
print(res)
E:\song\cpc_log_v6_fastapi\test\le03_beanie.py
import asyncio
from typing import Optional
from motor.motor_asyncio import AsyncIOMotorClient
from pydantic import BaseModel
from beanie import Document, Indexed, init_beanie
class Category(BaseModel):
name: str
description: str
class Product(Document):
name: str # You can use normal types just like in pydantic
description: Optional[str] = None
price: Indexed(float) # You can also specify that a field should correspond to an index
category: Category # You can include pydantic models as well
# Beanie is fully asynchronous, so we will access it from an async function
async def example():
# Beanie uses Motor async client under the hood
client = AsyncIOMotorClient("mongodb://localhost:27017/?readPreference=primary&appname=MongoDB%20Compass&directConnection=true&ssl=false")
# Initialize beanie with the Product document class
await init_beanie(database=client['test'], document_models=[Product])
chocolate = Category(name="Chocolate", description="A preparation of roasted and ground cacao seeds.")
# Beanie documents work just like pydantic models
tonybar = Product(name="Tony's", price=5.95, category=chocolate)
# And can be inserted into the database
res = await tonybar.insert()
# print(res) # 插入成功返回插入成功的对象
# res1 = await tonybar.create()
# print(res1)
# You can find documents with pythonic syntax
# product = await Product.find_one(Product.price < 10)
# And update them
# await product.set({Product.name: "Gold bar"})
if __name__ == "__main__":
asyncio.run(example())
E:\song\cpc_log_v6_fastapi\test\le04_logtype.py
import asyncio
from beanie import Document, init_beanie
from motor.motor_asyncio import AsyncIOMotorClient
from pydantic import BaseModel
class LogType(Document):
category: str
# 执行collection的名称
class Collection:
name = 'LogType'
class Pic(BaseModel):
url: str
desp: str
class LogSolution(Document):
category: str
pic: list[Pic]
video: str
class Collection:
name = 'LogSolution'
# Beanie is fully asynchronous, so we will access it from an async function
async def example(logArr):
# Beanie uses Motor async client under the hood
client = AsyncIOMotorClient(
"mongodb://localhost:27017/?readPreference=primary&appname=MongoDB%20Compass&directConnection=true&ssl=false")
# Initialize beanie with the Product document class
await init_beanie(database=client['cpc'], document_models=[LogType, LogSolution])
async def assmble_log_logtype(logs):
logtypes = await LogType.all().to_list()
for log in logs:
log["category"] = "unclassified" # 首先都是设置成unclassified
for logtype in logtypes: # 然后用logtype去检测log,确定log类型,如果有其对应的类型就为其设置对应的类型
if logtype.category in log["problem"]:
log["category"] = logtype.category
break # 设置完其对于的类型,就跳出循环,不要在logtype去检测该log的类型了
return logs
async def assmble_log_logsolution(logs):
for log in logs:
logsolution = await LogSolution.find_one({"category": log["category"]})
if logsolution is None:
# 如果没有对应的解决方案,则设置这些字段为空数组或者空字符
log.update({
"pic": [
],
"video": ""
})
break
log.update(logsolution)
del(log["id"])
del(log["revision_id"])
return logs
print("======================================")
res = await assmble_log_logtype(logArr)
for log in res:
print(log)
print("***************************************")
res1 = await assmble_log_logsolution(res)
for log in res1:
print(log)
if __name__ == "__main__":
logs = [{
"problem": "分析结果:Port '1第1层Port '1第2层的片子 ' Slot Mapping unmatch "
},
{
"problem": "分析结果:Port '1第1层Port '1第2层的片子 ' Slot Mapping unmatch "
},
{
"problem": "分析结果:Port '1第1层Port '1第2层的片子 ' Slot Mapping "
}]
asyncio.run(example(logs))
E:\song\cpc_log_v6_fastapi\test\le05_logtype_detail.py
import asyncio
from beanie import Document, init_beanie
from motor.motor_asyncio import AsyncIOMotorClient
from pydantic import BaseModel
class Category(BaseModel):
name: str
keyword: list[str]
class LogType(Document):
description: str
category: list[Category]
# 执行collection的名称
class Collection:
name = 'LogType'
class Config:
arbitrary_types_allowed = True
schema_extra = {
"example": {
"description": "卡匣异常描述",
"category": [
{
"name": "PPID页签栏缺失",
"keyword": ["PPID页签栏并无"]
},
{
"name": "Mapping异常",
"keyword": ["Mapping异常"]
},
{
"name": "Recipe列表为空",
"keyword": ["Mapping异常"]
},
{
"name": "制成能力不匹配",
"keyword": ["IT的TCS系统直接退卡"]
}
]
}
}
class LogtypeShortView(BaseModel):
description: str
category: list[str]
class Settings:
projection = {
"description": 1,
"category": "$category.name"
}
class LogClassify:
def __init__(self, loglist, logtypelist):
self.loglist = loglist
self.logtypelist = logtypelist
# 关键字检测
def __keyword_detect(self, errorLogItem, categoryItem):
problemDesp = ''.join(i for i in errorLogItem.get("problem"))
for keyword in categoryItem.keyword:
if keyword in problemDesp: # 如果发现错误,就是将当前的错误类型添加到当前的错误对象中,返回true,不在往下执行
return True
return False
# 类型检测
def __category_detect(self, errorLogItem, logtypeItem):
for categoryItem in logtypeItem.category:
# 如果在该categoryItem检测到keyword,就将该对象的 category属性 设置为 categoryItem.name,然后就是返回,不在往下执行
res = self.__keyword_detect(errorLogItem, categoryItem)
if res:
errorLogItem['category'] = categoryItem.name
return True
return False
def classify_log(self):
for errorLogItem in self.loglist:
errorLogItem['category'] = 'unclassified'
for logtypeItem in self.logtypelist:
res = self.__category_detect(errorLogItem, logtypeItem)
if res:
break
return self.loglist
# Beanie is fully asynchronous, so we will access it from an async function
async def example(errorLog):
# Beanie uses Motor async client under the hood
client = AsyncIOMotorClient(
"mongodb://localhost:27017/?readPreference=primary&appname=MongoDB%20Compass&directConnection=true&ssl=false")
# Initialize beanie with the Product document class
await init_beanie(database=client['cpc'], document_models=[LogType])
async def find_all_logtype() -> [LogType]:
log = await LogType.all().to_list()
return log
print("======================================")
allLogtypes = await find_all_logtype()
loghander = LogClassify(errorLog, allLogtypes)
newErrorLog = loghander.classify_log()
for ele in newErrorLog:
print(ele)
if __name__ == "__main__":
errorLog = [{
"problem": "分析结果:[Port2]卡匣[AA0147]所使用的PPID为 '0017',但CPC的PPID页签栏里并无'0017',故被CPC退卡"
}, {
"problem": "分析结果:故被CPC退卡"
}, {
"problem": '分析结果:[Port3]卡匣[FA0013]第3层的片子发生Mapping异常'
}, {
"problem": '分析结果:[Port1]卡匣[AA0348]被IT的TCS系统直接退卡,TCS系统提示机台的制程能力不匹配'
}]
asyncio.run(example(errorLog))
E:\song\cpc_log_v6_fastapi\test\le06_正则表达式提取port口.py
import re
def exact_port(logs):
for log in logs:
rule = r'(\[Port\d{1}\])|(卡匣\[\w{2}\d{4}\])|(第.层)'
pattern = re.compile(rule)
match = pattern.findall(log['problem'])
new_match = []
if match:
for ele in match:
for i in ele:
if i != '':
new_match.append(i)
# 对数组中的元素进行去重
new_match = [i for n, i in enumerate(new_match) if i not in new_match[:n]]
log['ports'] = new_match
return logs
if __name__ == '__main__':
logs = [
{
"problem": """
2022/10/12 00:39:30.459 分析结果:[Port2]卡匣[AA0147]所使用的PPID为 '0017',但CPC的PPID页签栏里并无'0017',故被CPC退卡
2022/10/12 00:39:30.459 解决方式:请找机主丁相宇/孙阳(662256/662260)在CPC上新建PPID'0017'
"""
}, {
"problem": """
2022/10/12 02:15:42.444 分析结果:[Port3]卡匣[FA0013]第3层的片子发生Mapping异常
2022/10/12 02:15:42.444 解决方式:请找机主丁相宇/孙阳(662256/662260)现场确认[Port3]卡匣[FA0013]第3层的片子的实际状况
2022/10/12 02:15:42.444 分析结果:[Port3]卡匣[FA0013]第7层的片子发生Mapping异常
2022/10/12 02:15:42.444 解决方式:请找机主丁相宇/孙阳(662256/662260)现场确认[Port3]卡匣[FA0013]第7层的片子的实际状况
2022/10/12 02:15:42.444 分析结果:[Port3]卡匣[FA0013]第9层的片子发生Mapping异常
2022/10/12 02:15:42.444 解决方式:请找机主丁相宇/孙阳(662256/662260)现场确认[Port3]卡匣[FA0013]第9层的片子的实际状况
2022/10/12 02:15:42.444 分析结果:[Port3]卡匣[FA0013]第21层的片子发生Mapping异常
2022/10/12 02:15:42.444 解决方式:请找机主丁相宇/孙阳(662256/662260)现场确认[Port3]卡匣[FA0013]第21层的片子的实际状况
"""
}, {
"problem": """
2022/10/12 03:11:35.683 分析结果:[Port4]卡匣[AA0311]机台[ANMOR110]所使用的Recipe为 '0007',但机台[ANMOR110]的Recipe列表为空,故被CPC退卡
2022/10/12 03:11:35.683 解决方式:请找机主丁相宇/孙阳(662256/662260)重启机台[ANMOR110],与CPC断线重连,重新触发机台上报Recipe列表给CPC
"""
}, {
"problem": """ 2022/10/12 11:27:00.942 分析结果:[Port1]卡匣[AA0348]被IT的TCS系统直接退卡,TCS系统提示机台的制程能力不匹配
2022/10/12 11:27:00.942 解决方式:请找机主丁相宇/孙阳(662256/662260)查询OPI上[Port1]卡匣[AA0348]的站点讯息是否正确"""
}
]
res = exact_port(logs)
print("=================")
for i in res:
print(i["ports"])
E:\song\cpc_log_v6_fastapi\test\le07_mongodb_filter.py
import asyncio
from beanie import Document, init_beanie
from motor.motor_asyncio import AsyncIOMotorClient
from pydantic import BaseModel
class Category(BaseModel):
name: str
keyword: list[str]
class LogType(Document):
description: str
category: list[Category]
# 执行collection的名称
class Collection:
name = 'LogType'
class Config:
arbitrary_types_allowed = True
schema_extra = {
"example": {
"description": "卡匣异常描述",
"category": [
{
"name": "PPID页签栏缺失",
"keyword": ["PPID页签栏并无"]
},
{
"name": "Mapping异常",
"keyword": ["Mapping异常"]
},
{
"name": "Recipe列表为空",
"keyword": ["Mapping异常"]
},
{
"name": "制成能力不匹配",
"keyword": ["IT的TCS系统直接退卡"]
}
]
}
}
class LogtypeShortView(BaseModel):
description: str
category: list[str]
class Settings:
projection = {
"description": 1,
"category": "$category.name"
}
# Beanie is fully asynchronous, so we will access it from an async function
async def example():
# Beanie uses Motor async client under the hood
client = AsyncIOMotorClient(
"mongodb://localhost:27017/?readPreference=primary&appname=MongoDB%20Compass&directConnection=true&ssl=false")
# Initialize beanie with the Product document class
await init_beanie(database=client['cpc'], document_models=[LogType])
async def find_all_logtype() -> [LogType]:
log = await LogType.all().project(LogtypeShortView).to_list()
return log
print("======================================")
allLogtypes = await find_all_logtype()
print(allLogtypes)
if __name__ == "__main__":
asyncio.run(example())
E:\song\cpc_log_v6_fastapi\test\le08_对象的删除.py
per = {
'name': 'alice',
'age': 19
}
if per.get('age'):
del (per["age"])
print(per)
E:\song\cpc_log_v6_fastapi\test\le09_mongodb_device.py
import asyncio
from beanie import Document, init_beanie
from motor.motor_asyncio import AsyncIOMotorClient
from pydantic import BaseModel
class Category(BaseModel):
name: str
keyword: list[str]
class LogType(Document):
description: str
category: list[Category]
# 执行collection的名称
class Collection:
name = 'LogType'
class Config:
arbitrary_types_allowed = True
schema_extra = {
"example": {
"description": "卡匣异常描述",
"category": [
{
"name": "PPID页签栏缺失",
"keyword": ["PPID页签栏并无"]
},
{
"name": "Mapping异常",
"keyword": ["Mapping异常"]
},
{
"name": "Recipe列表为空",
"keyword": ["Mapping异常"]
},
{
"name": "制成能力不匹配",
"keyword": ["IT的TCS系统直接退卡"]
}
]
}
}
class Device(Document):
name: str
ip: str
type: str
class Collection:
name = "Device"
class Config:
arbitrary_types_allowed = True
schema_extra = {
"example": {
"devices": [
{
"name": "MOR100",
"ip": "10.12.140.8",
"type": "TEST"
},
{
"name": "MOR100",
"ip": "10.12.140.8",
"type": "TEST"
}
]
}
}
class DeviceShortView(BaseModel):
name: str
ip: str
type:str
class Settings:
projection = {
"name": 1,
"ip": 1,
"type":1
}
# Beanie is fully asynchronous, so we will access it from an async function
async def example():
# Beanie uses Motor async client under the hood
client = AsyncIOMotorClient(
"mongodb://localhost:27017/?readPreference=primary&appname=MongoDB%20Compass&directConnection=true&ssl=false")
# Initialize beanie with the Product document class
await init_beanie(database=client['cpc'], document_models=[Device])
async def find_all_devices() -> [Device]:
# log = await Device.all().project(LogtypeShortView).to_list()
log = await Device.all().to_list()
return log
async def find_device_by_name(name: str) -> Device:
device = await Device.find_one({"name": name}).project(DeviceShortView)
return device
print("======================================")
device = await find_device_by_name("MOR100")
print(device.dict())
if __name__ == "__main__":
asyncio.run(example())
E:\song\cpc_log_v6_fastapi\test\le10_readlog_pandas.py
import os
import re
import pandas as pd
#显示所有列
pd.set_option('display.max_columns', None)
# 显示所有行
pd.set_option('display.max_rows', None)
# 设置value的显示长度为100,默认为50
pd.set_option('max_colwidth',100)
def __convert_log_to_df(logFullPathList):
"""批量读取多个log文件,然后将其转换成pandas的数据格式
logFullPathList -- 一个包含log全路径的数据
Return: 返回一个dateFrame
"""
df_list = []
for file in logFullPathList:
df_list.append(pd.read_csv(file, header=None, encoding='utf-8', engine='python', sep='\.[0-9]{3}\s{2}'))
df = pd.concat(df_list)
# 将 时间字符去掉后面秒数,时间就是精确到分钟,这样在分组的时候,同一个log都能分到一起。
# 这个时间精确性暂时这样控制的.
# df[0] = df[0].str.rsplit(':', n=1, expand=True).drop(columns=[1])
# df.columns = ['time', 'content']
print(df)
return df
def __get_ports(contentArr):
"""
输入:一个problems数组
功能:通过正则表达式将problems中的字符串数组中的port口提取出来,并且通过set对重复的port口进行去重
输出:一个ports数组
"""
# rule = r'([P|p]ort \'[1-6]{1}第\d{1,2}层)|([P|p]ort \[[1-6]{1})'
rule = r'(\[Port\d{1}\])|(卡匣\[\w{2}\d{4}\])|(第.层)'
pattern = re.compile(rule)
match = pattern.findall(''.join(i for i in contentArr))
if match:
new_match = []
for ele in match:
for i in ele:
if i != '':
new_match.append(i)
# 对数组中的元素进行去重
new_match = [i for n, i in enumerate(new_match) if i not in new_match[:n]]
return new_match
return []
def __split_problem_solution(df):
"""将 分析结果 和 解决方式 分开,分别放到两个数组里面,然后作为一个对象返回
Keyword arguments:
argument -- description
Return: return_description
"""
dfObj = {}
df_problem = df[df.str.contains('分析结果')].values.tolist()
dfObj['problem'] = df_problem
dfObj['ports'] = __get_ports(df_problem)
df_solution = df[df.str.contains('解决方式')].values.tolist()
dfObj['solution'] = df_solution
return dfObj
def __sorted_log_list_by_time(logDf):
"""将log dataframe按照时间进行分组,然后将 分析结果和解决方式分开,然后封装成一个对象数组返回
Keyword arguments:
argument -- description
Return: 返回一个对象数组
"""
# 按照相同的时间格式进行纵向分组
grouped = logDf.groupby('time')
logList = []
for name, group in grouped:
obj_temp = __split_problem_solution(group['content'])
obj_temp['time'] = name
logList.append(obj_temp)
return logList
# async def get_log(dirPath, dateStr):
# # 首先获取符合条件的log文件名称
# logFullPathList = get_log_path_by_date(dirPath, dateStr)
# # 如果数组为空,则表示则没有error log文件
# if not logFullPathList:
# return []
# # 将log文件转换成dataframe
# logDf = __convert_log_to_df(logFullPathList)
# # 对dataframe中的数据按照时间进行排序
# log_list = __sorted_log_list_by_time(logDf)
# return log_list
if __name__ == '__main__':
dirpath = 'G:\EMA_v2\TerminalMessage'
dateStr = '20220813'
log_fullpath_list =[]
for path in os.listdir(dirpath):
path=os.path.join(dirpath, path)
log_fullpath_list.append(os.path.join(dirpath, path))
logDf = __convert_log_to_df(log_fullpath_list)
# log_list = __sorted_log_list_by_time(logDf)
# print(log_list)
#
# for log in log_list:
# print(log)
# print('----------------')
E:\song\cpc_log_v6_fastapi\test\le11_read_log.py
import re
import json
def read_log(file_path):
log = {}
time_list = []
content_list = []
last_time = None
time_reg = r'((([0-9]{3}[1-9]|[0-9]{2}[1-9][0-9]{1}|[0-9]{1}[1-9][0-9]{2}|[1-9][0-9]{3})\/(((0[13578]|1[02])\/(0[1-9]|[12][0-9]|3[01]))|((0[469]|11)\/(0[1-9]|[12][0-9]|30))|(02-(0[1-9]|[1][0-9]|2[0-8]))))|((([0-9]{2})(0[48]|[2468][048]|[13579][26])|((0[48]|[2468][048]|[3579][26])00))-02-29))\s+([0-1]?[0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])'
pattern = re.compile(time_reg)
with open(file_path, 'r', encoding='utf-8') as f:
lines = f.readlines()
for line in lines:
content_list.append(line)
res = re.search(pattern, line)
if res:
current_time = res.group()
# last_time is None 说明是第一次进入,所以直接continue
if last_time is None:
last_time = current_time
continue
# last_time 不等于 current_time,说明开始了新的log
if last_time != current_time and last_time is not None:
# 首先将最后一行弹出,因为这个是新的log的开头一行
content_list.pop()
# 然后将先前的log存储起来
log[last_time] = content_list.copy()
# 清空
content_list.clear()
# 将新的log的开头加入
content_list.append(line)
# 将last_time设置成current_time
last_time = current_time
# 循环完成,读取结束,此时将剩余的内容存储起来
log[last_time] = content_list.copy()
return log
if __name__ == '__main__':
filePath = 'G:\EMA_v2\TerminalMessage\TerminalMessage.log-20221106-1.log'
res = read_log(filePath)
data = json.dumps(res, indent=4,ensure_ascii=False, sort_keys=False,separators=(',', ':'))
print(data)