119 lines
3.4 KiB
Markdown
119 lines
3.4 KiB
Markdown
# 文件自动备份
|
||
|
||
## 需求
|
||
|
||
- 小明有一个云服务器,上面运行了一个网站,网站提供给同学们记录一些笔记,但是总是担心数据不安全,打算写一个文件自动备份文件。
|
||
- 程序运行的时候,自动将`www`目录压缩到`backup`目录下,命名为`年月日时间.zip`,并且每隔1分钟就会执行一次这个动作
|
||
- 如果压缩包md5一样,就删除最新的备份
|
||
- 将每次备份的时间,路径,大小,md5都记录到`logs.json`文件中
|
||
|
||
|
||
## 参考代码
|
||
|
||
```python
|
||
import os
|
||
import shutil
|
||
import time
|
||
from datetime import datetime
|
||
import json
|
||
import hashlib
|
||
|
||
# 源目录和目标目录
|
||
source_dir = 'www'
|
||
backup_dir = 'backup'
|
||
log_file = 'logs.json' # 日志文件路径
|
||
|
||
# 确保备份目录存在
|
||
os.makedirs(backup_dir, exist_ok=True)
|
||
|
||
|
||
# 获取当前日期和时间,并格式化为年月日时间
|
||
def get_timestamp():
|
||
return datetime.now().strftime("%Y%m%d%H%M%S")
|
||
|
||
|
||
# 压缩目录
|
||
def zip_directory(source, destination, filename):
|
||
# 创建压缩文件的完整路径
|
||
zip_path = os.path.join(destination, filename)
|
||
# 使用shutil.make_archive创建压缩文件
|
||
shutil.make_archive(zip_path, 'zip', source)
|
||
return zip_path # 返回压缩文件的完整路径
|
||
|
||
|
||
# 获取文件MD5
|
||
def get_file_md5(file_path):
|
||
hash_md5 = hashlib.md5()
|
||
with open(file_path, "rb") as f:
|
||
for chunk in iter(lambda: f.read(4096), b""):
|
||
hash_md5.update(chunk)
|
||
return hash_md5.hexdigest()
|
||
|
||
|
||
# 获取文件大小
|
||
def get_file_size(file_path):
|
||
return os.path.getsize(file_path)
|
||
|
||
|
||
# 记录日志
|
||
def log_backup(logs, backup_path, md5, file_size):
|
||
# 添加新的日志条目
|
||
logs.append({
|
||
'时间戳': get_timestamp(),
|
||
'备份路径': backup_path,
|
||
'md5': md5,
|
||
'文件大小': file_size
|
||
})
|
||
# 写入日志文件
|
||
with open(log_file, 'w', encoding="utf-8") as f:
|
||
json.dump(logs, f, indent=4, ensure_ascii=False)
|
||
print(f"日志已经记录: {backup_path}.zip 大小为 {file_size} bytes md5为 {md5}")
|
||
|
||
|
||
# 删除最新的备份
|
||
def remove_latest_backup(backup_path):
|
||
os.remove(backup_path)
|
||
|
||
|
||
# 主函数,每隔1分钟执行一次压缩操作
|
||
def main():
|
||
while True:
|
||
# 获取时间戳
|
||
timestamp = get_timestamp()
|
||
# 构建备份文件名
|
||
backup_filename = f"backup-{timestamp}"
|
||
# 压缩目录
|
||
backup_path = zip_directory(source_dir, backup_dir, backup_filename)
|
||
# 获取压缩文件的MD5和大小
|
||
backup_file_path = f"{backup_path}.zip"
|
||
md5 = get_file_md5(backup_file_path)
|
||
file_size = get_file_size(backup_file_path)
|
||
|
||
# 读取日志,检查是否有相同的MD5
|
||
if os.path.exists(log_file):
|
||
with open(log_file, 'r', encoding="utf-8") as f:
|
||
logs = json.load(f)
|
||
for log in logs:
|
||
if log['md5'] == md5:
|
||
print(f"md5相同. 删除最新的记录: {backup_file_path}")
|
||
remove_latest_backup(backup_file_path)
|
||
break
|
||
else:
|
||
logs = []
|
||
|
||
# 记录日志
|
||
log_backup(logs if logs else [], backup_path, md5, file_size)
|
||
# 等待60秒
|
||
time.sleep(60)
|
||
|
||
|
||
main()
|
||
```
|
||
|
||
|
||
|
||
><span style="color: red; background: yellow; padding: 2px 5px; font-size: 22px;">作业8.1提交的内容</span>
|
||
>
|
||
>- 理解程序的运行逻辑
|
||
>- 程序运行成功的截图,单独发送给组长
|