Avenger
import AvengerAccessor
import os,datetime,subprocess,sys,re
Tmp_dev_50=
"/home/wqhist/"
Tmp_dev_20=
"/tmp/DEV20_WqhistLog/wqhist/"
Tmp_dev_60=
"/tmp/DEV60_WqhistLog/wqhist/"
def scpFileToRemoteNode(user, ip, password, local_dir, remote_dir, port=22
):
os.system("rm -rf local_dir")
os.system("mkdir local_dir")
# 继续创建一个用来存放远程主机文件的本地目录
SCP_CMD_BASE = r
"""
expect -c "
set timeout 300 ;
spawn scp -P {port} -r {username}@{host}:{remote_dir} {local_dir};
expect *assword* {{{{ send {password}\r }}}} ;
expect *\r ;
expect \r ;
expect eof
"
""".format(username=user, password=password, host=ip, local_dir=local_dir, remote_dir=remote_dir, port=
port)
SCP_CMD = SCP_CMD_BASE.format(local_dir=
local_dir)
print "execute SCP_CMD: ", SCP_CMD
p = subprocess.Popen(SCP_CMD, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=
True)
p.communicate()
os.system(SCP_CMD)
scpFileToRemoteNode("root",
"172.16.20.20",
"passwd",
"/tmp/DEV20_WqhistLog",
"/home/wqhist/", 22
)
scpFileToRemoteNode("root",
"172.16.20.60",
"passwd",
"/tmp/DEV60_WqhistLog",
"/home/wqhist/", 22
)
client =
AvengerAccessor(None)
client.login("liaogang",
"passwd")
#登陆Avenger
def walk(file_list,*args):
#遍历出所有日志文件返回列表。
for walkpath
in args:
# print(walkpath)
ff =
os.walk(walkpath)
for time_dir, dirs, files,
in ff:
for file
in files:
# print(file)
file_list.append(os.path.join(time_dir,file))
files =
[]
walk(files,Tmp_dev_50,Tmp_dev_20,Tmp_dev_60)
#print(files)
def creat_source(aa):
#创建Avenger中的source,只需要创建一次,不能重复创建同名的source
# client.SourceManager.createSource(mySourceName="wqhist", shardKey={}, indexList=[], Some_Description="WQHIST" )
client.SourceManager.createSource(aa, {}, [],
"WQHIST" )
# source=creat_source(‘wqhistory_command‘)
source_name=
"wqhistory_command"
dataAndStatus =
client.SourceManager.listSource()
print dataAndStatus
def full_bak():
#全量备份。此处只执行第一次
full_query = client.queryDataFromSource(dataType=
"meta",
#查询source中是否存在日志文件。
source=
source_name,
isPandas=
False,
maxRecordNum=
None,
volatile=
False,
partitionList=
None
)
if len(full_query) ==
0:
for file
in files:
client.insertFile(avengerDataId=file, metaDict={
"File_key":
"wqhistory"}, metaSource=
source_name,
filePath=file, upsert=False, storageType=
‘Mongo‘,
#upsert=True会覆盖之前的内容。False是追加
rsaPubPkcs1=None, compressType=None, serialType=
None)
print(
"Full backup complete!")
full_bak()
def LastWeek_list():
#输出前7的所有日期。对应于wqhist下面的日期目录。
lastweek_list=
[]
for i
in range(1,8
):
dt = datetime.date.today() - datetime.timedelta(days=
i)
lastweek_list.append(dt.strftime("%Y-%m-%d"))
return lastweek_list
sevendays =
LastWeek_list()
#print(sevendays)
def Add_bak(*args):
#增量备份,备份前7日的日志
today =
datetime.date.today()
lastweek_date = str(today - datetime.timedelta(days=7
))
for f_path
in args:
lastweek_rootlog=f_path+lastweek_date+
‘/‘+
‘root_‘+lastweek_date+
‘.log‘
#print(lastweek_rootlog)
query_lastweek = client.queryDataFromSource(dataType=
"meta",_id=lastweek_rootlog,
#查询source中是否有7天前的数据
source=
source_name,
isPandas=
False,
maxRecordNum=
None,
volatile=
False,
partitionList=
None
)
if query_lastweek
is None:
sevendays_path_list=
[]
for i
in sevendays:
sevendays_path_list.append(f_path+i+
‘/‘)
for sevendays_path
in sevendays_path_list:
sevendays_files =
os.listdir(sevendays_path)
sevenday_file =
[]
for file
in sevendays_files:
sevenday_file.append(sevendays_path+
file)
for file_path
in sevenday_file:
#print(file_path)
client.insertFile(avengerDataId=file_path, metaDict={
"File_key":
"wqhistory"}, metaSource=source_name,
#插入前7天的日志
filePath=file_path, upsert=False, storageType=
‘Mongo‘,
rsaPubPkcs1=None, compressType=None, serialType=
None)
print(
"Incremental backup complete!")
LastWeek_list()
Add_bak(Tmp_dev_50,Tmp_dev_20,Tmp_dev_60)
#需要备份的日志路径作为参数
python将文件备份到数据库(全备、增备)
标签:process time tmp 不能 远程主机 sap key cli dict