自定义web3.py ETH filter


# Event_Filters[Contract_Heromint_Address] = {
# 'contract': Contract_NFT,
# 'events': {
# 'HeroCreated': { 'filter': None, 'polled': False },
# }
# }
# Event_Filters[Contract_Market_Address] = {
# 'contract': Contract_Market,
# 'events': {
# 'SaleCreated': { 'filter': None, 'polled': False },
# 'SaleSucceed': {'filter': None, 'polled': False},
# 'SaleCancelled': {'filter': None, 'polled': False},
# }
# }
# Event_Filters[Contract_Market_Address] = {
# 'contract': Contract_Inherit,
# 'events': {
# 'Transfer': {'filter': None, 'polled': False},
# }
# }
#
#
# # 计算合约事件topic签名
# def format_contract_topics(contract, event_name):
# event_template = contract.events[event_name].getLogs()
# # print("contract",contract,event_name)
# # print(event_template)
# #
# event_abi = contract.events[event_name]._get_event_abi()
# abi_codec = contract.web3.codec
# topic_set = construct_event_topic_set(event_abi, abi_codec)
# return topic_set
#
#
# def get_last_blocknumer(network):
# return None, 0
#
#
# # 将收到的原始事件日志解析为具体的事件信息
# def log_receipt_to_event_data(log: LogReceipt) -> Union[LogReceipt, EventData]:
# event = log
# try:
# address = log['address']
# topics = log['topics']
# contract_map = Topic_Map.get(address, None)
# if contract_map and len(topics) > 0:
# contract = contract_map['contract']
# # 十六进制topic字节流转换为hex字符串
# topic = topics[0].hex()
# if topic in contract_map['topics']:
# ev = contract_map['topics'][topic]
# event_abi = contract.events[ev]._get_event_abi()
# abi_codec = contract.web3.codec
# # 根据合约codec和事件abi,解析原始日志为详细的事件信息
# event = get_event_data(abi_codec, event_abi, log)
# except Exception as e:
# logger.exception(e)
# return event
#
#
# # 将事件信息中的特殊格式数据转换为数据库可识别数据
# def block_event_to_db_event(event, network):
# db_event = {
# 'args': None,
# 'event': None,
# 'token_id': None,
# 'from_': None,
# 'to_': None,
# 'price': None,
# 'network': network,
# 'logIndex': event['logIndex'],
# 'transactionIndex': event['transactionIndex'],
# 'transactionHash': event['transactionHash'].hex(), # 十六进制字节流转换为字符串
# 'address': event['address'],
# 'blockHash': event['blockHash'].hex(),
# 'blockNumber': event['blockNumber']
# }
# tokenId = None
# if 'event' in event:
# db_event['event'] = event['event']
# if 'args' in event:
# # 特殊处理事件消息中的tokenId参数,从大整数转换为字符串,与数据库格式适配
# db_event['args'] = json.dumps(dict(event['args']))
# args = event['args']
# if 'tokenId' in args:
# tokenId = f"{args['tokenId']}"
# db_event['token_id'] = tokenId
# if event['event'] == 'Transfer':
# db_event['from_'] = args.get('from', None)
# db_event['to_'] = args.get('to', None)
# elif event['event'] == 'SaleCreated':
# db_event['from_'] = args.get('seller', None)
# db_event['price'] = args.get('price', None)
# elif event['event'] == 'UnitCreated':
# db_event['to_'] = args.get('owner', None)
# elif event['event'] == 'SaleSucceed':
# db_event['from_'] = args.get('seller', None)
# db_event['to_'] = args.get('buyer', None)
# db_event['price'] = args.get('price', None)
# elif event['event'] == 'SaleCancelled':
# db_event['from_'] = args.get('seller', None)
# return db_event
#
#
# # 处理收到的事件监听
# def handle_log_events(log_receipt: LogReceipt, filter_id: str):
# try:
#
# global network
# # 转换log日志为事件信息
#
# event = log_receipt_to_event_data(log_receipt)
#
# # 格式化特殊字节流和特定参数
# db_event = block_event_to_db_event(event, network)
#
# logger.info(f'save nft events: {json.dumps(db_event)}')
# # 保存消息同步历史
# new_event = save_last_event(db_event, filter_id=filter_id, network=network)
#
# if new_event:
# # 未保存过的新事件,分配处理
# dispatch_nft_event(event)
# except Exception as e:
# logger.exception(e)
#
#
# # 根据事件类型分发处理
# def dispatch_nft_event(event):
# try:
# if 'event' in event:
# event_name = event['event']
# if event_name == 'UnitCreated':
# pass
#
# else:
# logger.info(f'unhandler for event: {event["transactionHash"]}, {event["logIndex"]}')
# except Exception as e:
# logger.exception(e)
#
#
# # 初始化合约事件监听过滤器
# # 使用一个监听器监听多份合约事件
# def setup_event_filters():
# global network
# filter_id, blocknumber = get_last_blocknumer(network)
# topics = []
#
# for c_addr in Event_Filters.keys():
# ev_fitler = Event_Filters[c_addr]
# contract = Event_Filters[c_addr]['contract']
# Topic_Map[c_addr] = {
# 'contract': contract,
# 'topics': {}
# }
#
# # 根据合约地址和事件topic建立事件名称映射
# for ev in ev_fitler['events'].keys():
# ev_topics = format_contract_topics(contract, ev)
# # 计算每个事件的topic签名
# if len(ev_topics) > 0:
# topic = ev_topics[0]
# topics.append(topic)
# Topic_Map[c_addr]['topics'][topic] = ev
# Unit_Event_Filters['filter'] = w3.eth.filter({
# 'fromBlock': 1,
# 'address': [Contract_Heromint_Address, Contract_Market_Address, Contract_Inherit_Address], # 监听两个合约地址
# 'topics': [topics], # 监听两个合约下的特定事件topic
# })
#
#
# # 执行合约事件监听器
# def loop_contract_event():
# print("work1")
# while True:
# try:
# ev_filter = Unit_Event_Filters['filter']
# if ev_filter:
# ev_polled = Unit_Event_Filters['polled']
# if ev_polled:
# # 增量获取新事件消息
# log_new_loop(ev_filter)
# else:
# # 初始化的监听器先拉起最近的所有历史事件,确保filter已经polled
# log_all_loop(ev_filter)
# Unit_Event_Filters['polled'] = True
# else:
# setup_event_filters()
# except ValueError as ve:
# logger.exception(ve)
# # if 'code' in ve and ve['code'] == -32000:
# # setup_event_filters()
# except Exception as e:
# logger.exception(e)
#
# time.sleep(5)
#
#
# def log_all_loop(event_filter):
# # event_filter = w3.eth.filter({"BeaconUpgraded": Contract_Heromint_Address})
#
# for event in event_filter.get_all_entries():
# handle_log_events(event, filter_id=event_filter.filter_id)
#
#
# def log_new_loop(event_filter):
# for event in event_filter.get_new_entries():
# handle_log_events(event, filter_id=event_filter.filter_id)
#
#
# def save_last_event(db_event, filter_id, network):
# new_event = False
# # last_event = db.blockevents.find_one({
# # 'transactionHash': db_event['transactionHash'],
# # 'logIndex': db_event['logIndex'],
# # 'network': network
# # })
#
# # if not last_event:
# # db_event['created_at'] = datetime.now()
# # db_event['filter_id'] = filter_id
# # # db.blockevents.insert_one(db_event)
# # new_event = True
# # print("db_event", db_event,filter_id,network)
#
# return new_event
posted @ 2022-11-19 16:33  熵殇  阅读(314)  评论(0编辑  收藏  举报