常用脚本总结
OpenVPN Script
openvpn_op.sh
#!/bin/bash
source ~/.ssh/.security
source ~/.bash_aliases
source ~/.bashrc
op=$1
if [ -z $op ];then
openvpn3 session-stats --config client.ovpn
openvpn3 sessions-list
openvpn3 session-manage --cleanup
elif [ "$op" = "stop" ];then
openvpn3 session-manage --config client.ovpn --disconnect
elif [ "$op" = "start" ];then
echo "start openvpn"
./login_expect.exp session-start ${short_username} ${ad4_password}
sleep 3
rm -rf /tmp/.acs_*_lock
elif [ "$op" = "restart" ];then
echo "restart"
openvpn3 session-manage --config client.ovpn --restart
rm -rf /tmp/.acs_*_lock
fi
monitor_openvpn.sh
#!/bin/bash
source ~/.bash_aliases
source ~/.bashrc
TZ='Asia/Shanghai'; export TZ
BASEDIR=$(cd $(dirname $0) && pwd)
cd $BASEDIR>/dev/null
log="$BASEDIR/logs/monitor_openvpn_$(date +'%Y_%m_%d').log"
echo "-----------------------------------" |tee -a $log
echo "[$(date +'%Y-%m-%d %H:%M:%S')]Begin Monitor Status"|tee -a $log
echo "-----------------------------------" |tee -a $log
openvpn3 session-stats --config client.ovpn |tee -a $log
openvpn3 sessions-list |tee -a $log
session=$(openvpn3 session-stats --config client.ovpn)
if [ $? -gt 1 ];then
echo "OpenVpn is disconnected" |tee -a $log
$BASEDIR/openvpn_op.sh start |tee -a $log
elif [ $(ip route show|grep tun0|wc -l) -lt 1 ];then
echo "OpenVPN is running but tunnel is missing , need restart" |tee -a $log
$BASEDIR/openvpn_op.sh stop |tee -a $log
$BASEDIR/openvpn_op.sh start|tee -a $log
fi
echo "-----------------------------------" |tee -a $log
echo "END " |tee -a $log
echo "-----------------------------------" |tee -a $log
login_expect.exp
#!/usr/bin/expect
set action [lindex $argv 0 ]
set user [lindex $argv 1 ]
set password [lindex $argv 2 ]
#set prompt [lindex $argv 3 ]
set timeout 30
spawn openvpn3 $action --config client.ovpn
expect {
"*Auth User name*" {send "${user}\r";exp_continue}
"*assword:" {send "$password\r";exp_continue}
"Enter*" {send "1\r"}
}
interact
API Call ArgoCD to do some operation
refresh Modules
#!/bin/bash
BASEDIR=$(dirname $0)
cd $BASEDIR>/dev/null
source ~/.ssh/.security
env=stg
module="cloud-api-ms"
usage(){
cat <<EOF
USAGE: $0 OPTIONS
OPTIONS:
--env/-e string dev[default dev]
--module/-m string
--version/-v string versin[Default latest]
EXAMPLES:
$0 -e dev -m cloud-api-ms -v 1.0.0
EOF
}
while [ $# -gt 0 ]; do
case "$1" in
-e|--env)
env="$2"
shift
;;
-m|--module)
module="$2"
shift
;;
-v|--version)
version="$2"
shift
;;
-h|--help) usage
exit 1
;;
esac
shift
done
if [ -z $env ];then
usage
exit 1
fi
env=${env,,}
echo "-------------------------------"
echo -e "Begin Upgrade ${BLUE}$env${NC} Module ${BLUE}$module${NC}"
echo "-------------------------------"
url=$(eval echo '$'"argocd_url_$env")
username=$(eval echo '$'"argocd_username_$env")
password=$(eval echo '$'"argocd_password_$env")
auth="{\"username\":\"${username}\",\"password\":\"${password}\"}"
token=$(curl -Ss -XPOST --header "Content-Type: application/json" --data "{\"username\": \"$username\" , \"password\" : \"$password\" }" --url $url/api/v1/session|jq -r ".token")
echo "url --> $url"
#echo "token--> $token"
#curl -Ss $url/api/v1/applications --cookie "argocd.token=$token" >/dev/null
result=$(curl -Ss -XPOST -H "Authorization: Bearer $token" --header "Content-Type: application/json" -d @argocd_ms_refresh.json --url "$url/api/v1/applications/$module/sync")
echo $result|jq '.status.summary.images'
sleep 15
result=$(curl -Ss -XGET -H "Authorization: Bearer $token" --header "Content-Type: application/json" -d @argocd_ms_refresh.json --url "$url/api/v1/applications/$module")
echo $result|jq '.status.summary.images'
argocd_ms_refresh.json
{"appNamespace":"argocd","revision":"stage","prune":false,"dryRun":false,"strategy":{"hook":{"force":false}},"resources":null,"syncOptions":{"items":[]}}
Docker Run Squid
run_squid.sh
#!/bin/bash
docker stop squid
docker rm squid
docker run --name squid -d --restart=always --publish 3128:3128 -v ./squid.conf:/etc/squid/squid.conf sameersbn/squid:latest
Script to Batch Simulator Sub&Dev Records via API and DB
simulator_sub_dev.py
查看代码
# -*- coding: utf-8 -*-
"""
------------------------------------------------
simulator_sub_dev
------------------------------------------------
Author: Ben (email: nanjinghhu@vip.qq.com)
Create: 2/1/2023
------------------------------------------------
ChangeLog
------------------------------------------------
Author Date Version Describe
------------------------------------------------
tben 2/1/2023 v1.0.0 Init
------------------------------------------------
"""
import logging,logging.handlers
import os
import argparse
import csv
import urllib3
import json
import datetime
import time
import random
import psycopg2
from psycopg2 import pool
from concurrent.futures import ThreadPoolExecutor
from boto3.dynamodb.conditions import Attr
logger = logging.getLogger()
rds_map={
"dev":{
"host":"192.168.1.2",
"db":"postgres",
"username":"postgres",
"password":"postgres"
},
"stg":{
"host":"192.168.1.2",
"db":"postgres",
"username":"postgres",
"password":"postgres"
}
}
manufacturerOUIList=["aa","bb"]
def init_log():
"""
Initialize Log Format, Info Message will print on console , Debug Logs will write into Logger Files with orgId .
:param orgId: For quickly check org's Data
:return: None
"""
log_name="_%s_%s.log"%(os.path.splitext(os.path.basename(__file__))[0],datetime.datetime.now().strftime('%Y_%m_%d'))
handler = logging.handlers.RotatingFileHandler(os.path.expanduser(log_name),
maxBytes=100 * 1024 * 1024,
backupCount=10,
)
fmt = '%(asctime)s-[%(filename)s:%(lineno)s]-[%(threadName)s]-[%(levelname)s]- %(message)s'
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(fmt)
handler.setFormatter(formatter)
logger.addHandler(handler)
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('[%(asctime)s][%(levelname)-2s][%(threadName)s] - %(message)s')
console.setFormatter(formatter)
logging.getLogger("").addHandler(console)
def create_billing(rest_api,url,r_body,Id):
"""
Call API to create billing data
:param rest_api:
:param url:
:param r_body:
:param Id:
:return:
"""
r=rest_api.request('POST',"%s?Id=%s"%(str(urllib3.util.parse_url(url)),Id),
headers={'Content-Type': 'application/json'},
body=json.dumps(r_body))
logging.info("[%s]-%s",r.status,r.data)
if r.status == 200:
return json.loads(r.data).get("id")
else:
return None
def process_device_info(db_pool,org,snList,deviceInfoMap,timeInfoMap):
"""
Batch Update Device Info
:param db_pool:
:param org:
:param snList:
:param deviceInfoMap:
:param timeInfoMap:
:return:
"""
logging.info("Begin Batch Update [%s] SN: %s",org,','.join(snList))
ps_connection = db_pool.getconn()
if ps_connection:
ps_cursor = None
try:
sql="""select sn from device
where org='%s' and sn in ('%s') ;""" %(org,"','".join(snList))
logging.debug(sql)
ps_cursor = ps_connection.cursor()
ps_cursor.execute(sql)
db_sn_map={}
for result in ps_cursor.fetchall():
db_sn_map[result[0]]=result[0]
for sn in deviceInfoMap.keys():
if db_sn_map.get(sn):
sql="""update device set info='%s'
where info->>'org'='%s' and info->>'sn'='%s'"""%(str(deviceInfoMap.get(sn))
.replace("'","\"")
.replace("True","true")
.replace("False",'false')
,orgId,sn)
logging.debug(sql)
ps_cursor.execute(sql)
sql="""update device_time set info='%s'
where info->>'org'='%s' and info->>'sn'='%s'"""%(str(timeInfoMap.get(sn))
.replace("'","\"")
.replace("True","true")
.replace("False",'false')
,orgId,sn)
logging.debug(sql)
ps_cursor.execute(sql)
else:
sql="""insert into device values('%s');"""%str(deviceInfoMap.get(sn)).replace("'","\"") \
.replace("True","true").replace("False",'false')
logging.debug(sql)
ps_cursor.execute(sql)
sql="""insert into device_time values('%s')"""%str(timeInfoMap.get(sn)).replace("'","\"") \
.replace("True","true").replace("False",'false')
logging.debug(sql)
ps_cursor.execute(sql)
ps_connection.commit()
except Exception as e:
print(e)
logging.error("Execute SQL [%s - %s] failed , exception:%s",orgId,",".join(snList),e)
finally:
if ps_cursor:
ps_cursor.close()
db_pool.putconn(ps_connection)
def update_device_info(deviceList,orgId,postgreSQL_pool):
"""
Update Device Info
:param deviceList:
:param orgId:
:param postgreSQL_pool:
:return:
"""
thread_pool = ThreadPoolExecutor(max_workers=10)
i=0
serialNumberList= []
deviceInfoMap={}
timeInfoMap={}
for device in deviceList:
serialNumber=device["SN"].upper()
manufacturerOUI=manufacturerOUIList[random.randint(0,13)]
device_json={
"_id": "xxx",
"org": "xx",
"mode": "xx",
"role": "xx",
"ip": "10.1.1.10",
"createTime": {
"$date": int(time.mktime(time.localtime())*1000)
},
"mac": "04:bc:9f:20:54:1b",
"subnetMask": "255.255.255.0",
"lastDiscoverTime": {
"$date": int(time.mktime(time.localtime())*1000)
},
"connectionRequestPassword": "",
"connectionRequestUsername": ""
}
device_json["_id"]="{0}-{1}-{2}".format(orgId,manufacturerOUI,serialNumber)
device_json["customer"]=orgId
device_json["ip"]=device["IPAddress"]
device_json["mac"]=device["macAddress"].upper()
device_json["sn"]=serialNumber
device_json["oui"]=manufacturerOUI
device_time_json={
"_id": "xx",
"orgId": "xx",
"paramValues": {
"Gateway": {
"MS": {
"stun_addr": "xx"
}
}
},
"sn": "xx",
"cc": 9,
"lastInformTime": {
"$date": int(time.mktime(time.localtime())*1000)
},
"paramAttributes": {
"gateway": {
"DeviceInfo": {
"xx": 1
}
}
},
"event": "Connect REQUEST"
}
device_time_json["_id"]="{0}-{1}-{2}".format(orgId,manufacturerOUI,serialNumber)
device_time_json["orgId"]=orgId
device_time_json["serialNumber"]=serialNumber
serialNumberList.append(serialNumber)
deviceInfoMap[serialNumber]=device_json
timeInfoMap[serialNumber]=device_time_json
if i>100:
thread_pool.map(process_device_info,[postgreSQL_pool],[orgId],[serialNumberList],[deviceInfoMap],[timeInfoMap])
i=0
serialNumberList= []
deviceInfoMap={}
timeInfoMap={}
i=i+1
if i > 0:
thread_pool.map(process_device_info,[postgreSQL_pool],[orgId],[serialNumberList],[deviceInfoMap],[timeInfoMap])
thread_pool.shutdown(wait=True)
return
def process(args,rest_api,postgreSQL_pool):
orgId=args.orgId
for fileName in args.csv.split(","):
deviceInfoList=[]
subscriberInfoMap = {}
with open(fileName,'r') as sf:
reader = csv.DictReader(sf)
logging.info("[%s]Titles:%s ", fileName, reader.fieldnames)
for line in reader:
deviceInfo = {}
deviceInfo["mac"]=line["sub.deviceId"]
deviceInfo["ip"]=line["sub.opt"]
deviceInfo["op"]="xx"
deviceInfo["role"]="xx"
row = {}
for column in line:
rename_flg=False
if column == "name":
column="sub.name"
rename_flg=True
column_value= line["name"] if rename_flg else line[column]
if column in ["Model","SN"]:
column_value="xx" if column_value in ["xx","bb"] else column_value
deviceInfo[column]=column_value.strip()
continue
column_array=column.split(".")
if len(column_array) == 1:
row[column]=column_value
continue
first_column=column_array[0]
second_column=column_array[1]
if second_column == "orgId":
continue
if first_column == "sub":
sub_info_map={}
if row.get(first_column):
sub_info_map=row[first_column]
sub_info_map[second_column]=column_value
row[first_column]=sub_info_map
else:
second_column_map= {}
tmp_list=[]
if row.get(first_column):
second_column_map=row[first_column][0]
second_column_map[second_column]=column_value
tmp_list.append(second_column_map)
row[first_column]=tmp_list
if subscriberInfoMap.get(csc_key):
deviceInfo["mode"]="xx"
deviceInfo["role"]="xx"
row["sub"].extend(subscriberInfoMap.get(key)["currentSDInfos"])
subscriberInfoMap[key]=row
deviceInfoList.append(di)
update_device_info(deviceInfoList,orgId,postgreSQL_pool)
thread_pool = ThreadPoolExecutor(max_workers=10)
for key in subscriberInfoMap.keys():
thread_pool.map(create_billing,[rest_api],[args.url],[subscriberInfoMap.get(key)],[orgId])
thread_pool.shutdown(wait=True)
return
def main():
"""
Main Method
:return:
"""
parser = argparse.ArgumentParser()
parser.add_argument("--url", help="sub URL")
parser.add_argument("--orgId",help="orgId")
parser.add_argument("--env",help="Environment")
parser.add_argument("--csv",help="csv files ",default='subscriber1-10000.csv,subscriber10001-20000.csv,subscriber20001-40000.csv')
args = parser.parse_args()
init_log()
logging.info("------------------------")
logging.info(args)
logging.info(rds_map.get(args.env)["host"])
logging.info("------------------------")
rest_api=urllib3.PoolManager(maxsize=500)
postgreSQL_pool = None
try:
postgreSQL_pool = psycopg2.pool.SimpleConnectionPool(1, 20,
user = rds_map.get(args.env)["username"],
password = rds_map.get(args.env)["password"],
host = rds_map.get(args.env)["host"],
port = 5432,
database = rds_map.get(args.env)["db"])
process(args,rest_api,postgreSQL_pool)
except psycopg2.OperationalError as dberr:
logging.error("Failed Connect to DB ,exception is :\r\n%s",dberr)
except Exception as e:
logging.error("process failed , exception : %s", e)
finally:
if postgreSQL_pool:
postgreSQL_pool.closeall()
if __name__ == '__main__':
main()