Update server2.py
Browse files- server2.py +54 -56
server2.py
CHANGED
|
@@ -230,79 +230,78 @@ zip_file_path = '/a/sillytavern.tar.gz' # 输出的 tar.gz 文件路径
|
|
| 230 |
metadata_file_path = '/a/dataset-metadata.json'
|
| 231 |
last_uploaded_size = -1
|
| 232 |
|
|
|
|
| 233 |
def update_last_uploaded_size(size):
|
| 234 |
global last_uploaded_size
|
| 235 |
last_uploaded_size = size
|
| 236 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 237 |
def compress_folder():
|
| 238 |
global last_uploaded_size # 声明使用全局变量
|
| 239 |
-
|
| 240 |
# 清理并创建目录 /a
|
| 241 |
if os.path.exists('/a'):
|
| 242 |
subprocess.run(['rm', '-r', '/a/'])
|
| 243 |
-
|
| 244 |
os.makedirs('/a', exist_ok=True)
|
| 245 |
-
|
| 246 |
with lock: # 使用锁
|
| 247 |
-
|
| 248 |
-
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
print('开始压缩数据')
|
| 252 |
-
#result = subprocess.run(['7z', 'a', '-tzip', '-mx=1','-mmt=2', zip_file_path, data_folder])
|
| 253 |
-
# 7z a -tzip -mx=1 -mmt=2 /a/sillytavern.zip /sillytavern/*
|
| 254 |
-
# tar -cvf - /sillytavern | pigz -p 6 -1 > /a/sillytavern.tar.gz
|
| 255 |
-
# result = subprocess.run(
|
| 256 |
-
# f'tar -cvf - {data_folder} | pigz -p 6 -1 > {zip_file_path} > /dev/null',
|
| 257 |
-
# shell=True,
|
| 258 |
-
# check=True
|
| 259 |
-
# )
|
| 260 |
-
try:
|
| 261 |
-
result = subprocess.run(
|
| 262 |
-
f'tar -cvf - {data_folder} | pigz -p 2 -1 > {zip_file_path} 2> /dev/null',
|
| 263 |
-
shell=True,
|
| 264 |
-
check=True,
|
| 265 |
-
stdout=subprocess.DEVNULL, # 将标准输出重定向到 /dev/null
|
| 266 |
-
stderr=subprocess.DEVNULL # 将标准错误也重定向到 /dev/null
|
| 267 |
-
)
|
| 268 |
-
except subprocess.CalledProcessError as e:
|
| 269 |
-
print(f"Command failed with return code {e.returncode}")
|
| 270 |
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
subprocess.run(['mv', zip_file_path,file_path])
|
| 276 |
-
if result.returncode != 0:
|
| 277 |
-
print("压缩失败,请检查7z命令是否正确")
|
| 278 |
return
|
| 279 |
|
| 280 |
-
|
| 281 |
-
|
| 282 |
-
|
| 283 |
-
|
| 284 |
-
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
|
| 289 |
-
|
| 290 |
-
|
| 291 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 292 |
if last_uploaded_size == -1:
|
| 293 |
print("首次运行,正在执行上传...")
|
| 294 |
-
update_last_uploaded_size(file_size)
|
| 295 |
|
| 296 |
with open(metadata_file_path, 'w') as json_fid:
|
| 297 |
json.dump(kaggle_metadata, json_fid)
|
| 298 |
-
|
| 299 |
-
|
| 300 |
-
# kaggle datasets version -m 'new version message' -p /a
|
| 301 |
last_uploaded_size = file_size
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
update_last_uploaded_size(file_size) # 更新元数据中的最后一次上传大小
|
| 306 |
|
| 307 |
with open(metadata_file_path, 'w') as json_fid:
|
| 308 |
json.dump(kaggle_metadata, json_fid)
|
|
@@ -311,14 +310,13 @@ def compress_folder():
|
|
| 311 |
last_uploaded_size = file_size
|
| 312 |
|
| 313 |
else:
|
| 314 |
-
print(f"
|
| 315 |
-
|
| 316 |
|
| 317 |
def repeat_task():
|
| 318 |
print('------打包线程启动-------------')
|
| 319 |
while True:
|
| 320 |
print('等待打包')
|
| 321 |
-
time.sleep(
|
| 322 |
compress_folder()
|
| 323 |
print('------打包线程结束-------------')
|
| 324 |
# repeat_task()
|
|
|
|
| 230 |
metadata_file_path = '/a/dataset-metadata.json'
|
| 231 |
last_uploaded_size = -1
|
| 232 |
|
| 233 |
+
|
| 234 |
def update_last_uploaded_size(size):
|
| 235 |
global last_uploaded_size
|
| 236 |
last_uploaded_size = size
|
| 237 |
|
| 238 |
+
def get_folder_size(folder):
|
| 239 |
+
"""使用 du 命令获取文件夹的总大小(字节数)"""
|
| 240 |
+
result = subprocess.run(['du', '-sb', folder], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
| 241 |
+
if result.returncode == 0:
|
| 242 |
+
size = result.stdout.split()[0] # 获取大小
|
| 243 |
+
return int(size)
|
| 244 |
+
else:
|
| 245 |
+
print(f"Error calculating size: {result.stderr.decode()}")
|
| 246 |
+
return 0
|
| 247 |
+
|
| 248 |
def compress_folder():
|
| 249 |
global last_uploaded_size # 声明使用全局变量
|
|
|
|
| 250 |
# 清理并创建目录 /a
|
| 251 |
if os.path.exists('/a'):
|
| 252 |
subprocess.run(['rm', '-r', '/a/'])
|
|
|
|
| 253 |
os.makedirs('/a', exist_ok=True)
|
|
|
|
| 254 |
with lock: # 使用锁
|
| 255 |
+
# 获取当前文件夹的大小
|
| 256 |
+
current_folder_size = get_folder_size(data_folder)
|
| 257 |
+
print(f"当前文件夹大小: {current_folder_size} 字节")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 258 |
|
| 259 |
+
# 判断文件夹大小是否大于 800MB (800MB = 800 * 1024 * 1024 字节)
|
| 260 |
+
if current_folder_size < (800 * 1024 ** 2):
|
| 261 |
+
print("文件夹小于800MB,不进行上传和压缩。")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 262 |
return
|
| 263 |
|
| 264 |
+
# 如果是首次运行或者文件夹大小不同于上次上传,进行压缩
|
| 265 |
+
if last_uploaded_size == -1 or current_folder_size != last_uploaded_size:
|
| 266 |
+
print('开始压缩数据')
|
| 267 |
+
try:
|
| 268 |
+
# 执行压缩
|
| 269 |
+
result = subprocess.run(
|
| 270 |
+
f'tar -cvf - {data_folder} | pigz -p 2 -1 > {zip_file_path} 2> /dev/null',
|
| 271 |
+
shell=True,
|
| 272 |
+
check=True,
|
| 273 |
+
stdout=subprocess.DEVNULL, # 将标准输出重定向到 /dev/null
|
| 274 |
+
stderr=subprocess.DEVNULL # 将标准错误也重定向到 /dev/null
|
| 275 |
+
)
|
| 276 |
+
except subprocess.CalledProcessError as e:
|
| 277 |
+
print(f"命令失败,返回代码: {e.returncode}")
|
| 278 |
+
return
|
| 279 |
+
|
| 280 |
+
print('压缩完成')
|
| 281 |
+
file_path = "/a/sillytavern.ctk"
|
| 282 |
+
subprocess.run(['mv', zip_file_path, file_path])
|
| 283 |
+
print(f"数据压缩为 {file_path}")
|
| 284 |
+
|
| 285 |
+
file_size = os.path.getsize(file_path) # 获取压缩包的大小
|
| 286 |
+
print('文件大小:', file_size)
|
| 287 |
+
|
| 288 |
+
if file_size < (800 * 1024 ** 2): # 判断压缩后的大小是否小于 800MB
|
| 289 |
+
print("压缩文件小于800MB,不进行上传。")
|
| 290 |
+
return
|
| 291 |
+
|
| 292 |
+
# 执行上传
|
| 293 |
if last_uploaded_size == -1:
|
| 294 |
print("首次运行,正在执行上传...")
|
| 295 |
+
update_last_uploaded_size(file_size)
|
| 296 |
|
| 297 |
with open(metadata_file_path, 'w') as json_fid:
|
| 298 |
json.dump(kaggle_metadata, json_fid)
|
| 299 |
+
|
| 300 |
+
subprocess.run(['kaggle', 'datasets', 'version', '-m', 'new version message', '-p', '/a'])
|
|
|
|
| 301 |
last_uploaded_size = file_size
|
| 302 |
+
else:
|
| 303 |
+
print(f"新文件大小 {file_size} 大于上次上传的大小,正在执行上传...")
|
| 304 |
+
update_last_uploaded_size(file_size)
|
|
|
|
| 305 |
|
| 306 |
with open(metadata_file_path, 'w') as json_fid:
|
| 307 |
json.dump(kaggle_metadata, json_fid)
|
|
|
|
| 310 |
last_uploaded_size = file_size
|
| 311 |
|
| 312 |
else:
|
| 313 |
+
print(f"新文件夹大小 {current_folder_size} 不大于上次上传的大小,跳过")
|
|
|
|
| 314 |
|
| 315 |
def repeat_task():
|
| 316 |
print('------打包线程启动-------------')
|
| 317 |
while True:
|
| 318 |
print('等待打包')
|
| 319 |
+
time.sleep(3600)
|
| 320 |
compress_folder()
|
| 321 |
print('------打包线程结束-------------')
|
| 322 |
# repeat_task()
|