一、需求描述
1、从Mysql数据库表下载数据到服务器;
2、将数据已csv文件格式存储并对数据格式进行处理(添加表头,表头和数据均用竖线分隔符隔开,末尾也加分割符);
3、文件路径文件夹以天为单位,文件名中含日期和序号,序号记录相同文件在同一天重新下载传送的批次;
3、将文件压缩成.gz格式;
4、文件以Sftp方式传送到对方服务器固定位置。
二、Python 全代码
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import paramiko,csv
import os, sys, stat
import pymysql
import time, gzip
def create_dir(sftp, sftpRemoteDir):
try:
if stat.S_ISDIR(sftp.stat(sftpRemoteDir).st_mode):
pass
except Exception as e:
sftp.mkdir(sftpRemoteDir)
def sftp_upload(sftp, sftpLocalDir, sftpRemoteDir):
if os.path.isdir(sftpLocalDir):
for file in os.listdir(sftpLocalDir):
remoteDirTmp = os.path.join(sftpRemoteDir, file)
localDirTmp = os.path.join(sftpLocalDir, file)
if os.path.isdir(localDirTmp):
create_dir(sftp, remoteDirTmp)
sftp_upload(sftp, localDirTmp, remoteDirTmp)
else:
print("upload file:", sftpLocalDir)
try:
sftp.put(sftpLocalDir, sftpRemoteDir)
except Exception as e:
print('upload error:', e)
if __name__ == '__main__':
# 变量
date = time.strftime("%Y%m%d", time.localtime())
sftpHost = '192.168.220.104'
sftpPort = 22
sftpUser = 'sftpsun'
sftpPassord = 'hadoophadoop'
sftpLocalDir = '/home/hadoop/python_file/' + date
sftpRemoteDir = '/upload'
xhDir = '/home/hadoop/xh/' + date
xhFile = xhDir + '/' + 'xhFile.txt'
# 序号
if os.path.isdir(xhDir):
if os.path.isfile(xhFile):
with open(xhFile, 'r', encoding='utf-8') as f:
xh = f.readline()
xh = int(xh) + 1
xh = str(xh).zfill(2)
with open(xhFile, 'w', encoding='utf-8') as f:
f.write(xh)
else:
with open(xhFile, 'w', encoding='utf-8') as f:
f.write('00')
else:
os.makedirs(xhDir)
with open(xhFile, 'w', encoding='utf-8') as f:
f.write('00')
with open(xhFile, 'r', encoding='utf-8') as f:
xh = f.readline()
sendFile = 'ZXSEND_0112_082_' + date + '_' + xh + '_001'
sftpLocalFile = sftpLocalDir + '/' + sendFile
# 目录创建
if os.path.isdir(sftpLocalDir):
pass
else:
os.makedirs(sftpLocalDir)
# 数据下载
conn = pymysql.connect(
host="hadoop100",
port=3306,
user='root',
password='Mysql123456#',
database='flink_sql',
charset='utf8')
cursor = conn.cursor()
sql = "select t.* from hot_item t"
cursor.execute(sql)
data = cursor.fetchall()
print(data)
cursor.close()
# 数据格式处理写入本地文件
header = ('w_end', 'item_id', 'item_count', 'rk')
with open(sftpLocalFile + '.csv', "w+", newline="", encoding='utf-8') as f:
lines = csv.writer(f,delimiter="|")
lines.writerow(header)
for line in data:
a = list(line)
lines.writerow(line)
f2 = open(sftpLocalFile + '.dat', 'w',encoding='utf-8')
with open(sftpLocalFile + '.csv', 'r') as f:
for line in f:
line = line.strip()
if not line.endswith(r'|'):
line += r'|'
line += '\n'
f2.write(line)
f2.close()
os.remove(sftpLocalFile + '.csv')
# 压缩
with open(sftpLocalFile + '.dat', 'rb') as f:
data = f.read()
with gzip.open(sftpLocalFile + '.dat' + '.gz', 'wb') as f:
f.write(data)
os.remove(sftpLocalFile + '.dat')
# 文件上传
sf = paramiko.Transport((sftpHost, sftpPort))
sf.connect(username=sftpUser, password=sftpPassord)
sftp = paramiko.SFTPClient.from_transport(sf)
sftp_upload(sftp, sftpLocalDir, sftpRemoteDir)
sf.close()
文章评论