-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathccp.py
More file actions
672 lines (643 loc) · 36.5 KB
/
ccp.py
File metadata and controls
672 lines (643 loc) · 36.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
"""
CodeCraft PMS Backend Project
파일명 : ccp.py
생성자 : 김창환
생성일 : 2025/01/10
업데이트 : 2025/03/23
설명 : 프로젝트 Import/Export API 엔드포인트 정의
"""
from fastapi import APIRouter, HTTPException, File, UploadFile
from cryptography.fernet import Fernet
from pydantic import BaseModel
from dotenv import load_dotenv
from urllib.parse import quote
from logger import logger
from collections import defaultdict
import os, sys, logging, shutil, tarfile, io, struct, httpx, requests, json
import traceback
router = APIRouter()
sys.path.append(os.path.abspath('/data/Database Project')) # Database Project와 연동하기 위해 사용
import csv_DB
import project_DB
import push
class ccp_payload(BaseModel):
pid: int = None
univ_id: int = None
msg: str = None
ver: int = None
is_removed: int = None # 삭제된 프로젝트를 복원하는 경우에만 사용; 1로 export 기능을 스킵
def handle_db_result(result):
"""데이터베이스 결과 처리 함수"""
if isinstance(result, Exception):
logging.error(f"Database error: {result}", exc_info=True)
return False
return result
def create_project_info():
return
async def pull_storage_server(pid: int, output_path: str):
"""Storage 서버에서 특정 프로젝트의 데이터를 다운로드 및 추출하는 함수"""
b_server_url = f"http://192.168.50.84:10080/api/ccp/push"
timeout = httpx.Timeout(60.0, connect=5.0)
async with httpx.AsyncClient(timeout=timeout) as client:
try:
response = await client.post(b_server_url, params={"pid": pid})
if response.status_code == 200:
if not os.path.exists(output_path):
os.makedirs(output_path)
archive_path = os.path.join(output_path, f"{pid}_output.tar.gz")
with open(archive_path, 'wb') as f:
f.write(response.content)
logging.info(f"Downloaded archive for project {pid}: {archive_path}")
with tarfile.open(archive_path, 'r:gz') as tar:
tar.extractall(path=output_path)
os.remove(archive_path)
logging.info(f"Extraction completed and archive removed for project {pid}")
return {"RESULT_CODE": 200, "RESULT_MSG": f"Files for project {pid} downloaded successfully."}
else:
logging.error(f"Failed to download from storage server for project {pid}. Status code: {response.status_code}")
return {"RESULT_CODE": 500, "RESULT_MSG": f"Failed to download from storage server. Status code: {response.status_code}"}
except Exception as e:
logging.error(f"Error while pulling from storage server for project {pid}: {str(e)}", exc_info=True)
return {"RESULT_CODE": 500, "RESULT_MSG": f"Error while pulling from storage server: {str(e)}"}
load_dotenv()
key = os.getenv('CCP_KEY')
cipher = Fernet(key)
def encrypt_ccp_file(pid):
"""CCP 파일을 tar 압축 후 암호화하는 함수"""
try:
logging.info(f"------ Start encryption process for PID {pid} ------")
input_dir = f'/data/ccp/{pid}/'
output_dir = f'/data/ccp/'
# 파일을 tar로 압축할 메모리 버퍼 생성
compressed_file = io.BytesIO()
# tar 파일로 압축
with tarfile.open(fileobj=compressed_file, mode='w|') as tar:
for root, dirs, files in os.walk(input_dir):
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, input_dir)
tar.add(file_path, arcname=arcname)
logging.info(f"Files in {input_dir} compressed successfully.")
# 압축된 데이터 가져오기
compressed_file.seek(0)
compressed_data = compressed_file.read()
# 파일 데이터 암호화
encrypted_data = cipher.encrypt(compressed_data)
logging.info(f"Data encryption completed for PID {pid}.")
# 헤더 작성
num_files = len(os.listdir(input_dir)) # 디렉터리 내 파일 개수
header = struct.pack('!I', num_files) # 파일 개수 (4바이트)
# 각 파일의 메타데이터 기록
for file in os.listdir(input_dir):
file_path = os.path.join(input_dir, file)
file_size = os.path.getsize(file_path)
file_name_length = len(file)
header += struct.pack('!I', file_name_length) # 파일 이름 길이 (4바이트)
header += file.encode('utf-8') # 파일 이름 (UTF-8)
header += struct.pack('!I', file_size) # 파일 크기 (4바이트)
logging.info(f"Header creation completed for PID {pid}. Number of files: {num_files}")
# 암호화된 ccp 파일 저장
encrypted_file_path = os.path.join(output_dir, f'{pid}.ccp')
with open(encrypted_file_path, 'wb') as encrypted_file:
encrypted_file.write(header) # 헤더 기록
encrypted_file.write(encrypted_data) # 암호화된 데이터 기록
logging.info(f"Encrypted CCP file saved successfully: {encrypted_file_path}")
logging.info(f"------ End of encryption process for PID {pid} ------")
return True
except Exception as e:
logging.error(f"Error occurred during encryption process for PID {pid}: {str(e)}", exc_info=True)
return False
def decrypt_ccp_file(pid):
"""CCP 파일을 복호화하여 원본 데이터를 복원하는 함수"""
try:
logging.info(f"------ Start decryption process for PID {pid} ------")
input_file_path = f'/data/ccp/{pid}.ccp'
output_dir = f'/data/ccp/{pid}/'
if not os.path.exists(input_file_path):
raise Exception(f"CCP file {input_file_path} does not exist")
logging.info(f"CCP file found: {input_file_path}")
# 파일 열기
with open(input_file_path, 'rb') as encrypted_file:
# 헤더 읽기 (파일 개수 + 각 파일의 메타데이터)
header = encrypted_file.read(4)
if len(header) < 4:
raise Exception(f"Failed to read header, insufficient data. Read {len(header)} bytes")
num_files = struct.unpack('!I', header)[0] # 파일 개수
logging.info(f"Number of files in CCP: {num_files}")
# 디렉터리 생성
os.makedirs(output_dir, exist_ok=True)
os.makedirs(os.path.join(output_dir, 'DATABASE'), exist_ok=True)
os.makedirs(os.path.join(output_dir, 'OUTPUT'), exist_ok=True)
# 각 파일의 메타데이터 읽기 및 복원
files_metadata = []
for _ in range(num_files):
# 파일 이름 길이 읽기
file_name_length_data = encrypted_file.read(4)
if len(file_name_length_data) < 4:
raise Exception(f"Failed to read file name length, insufficient data. Read {len(file_name_length_data)} bytes")
file_name_length = struct.unpack('!I', file_name_length_data)[0]
# 파일 이름 읽기
file_name = encrypted_file.read(file_name_length).decode('utf-8')
# 파일 크기 읽기
file_size_data = encrypted_file.read(4)
if len(file_size_data) < 4:
raise Exception(f"Failed to read file size, insufficient data. Read {len(file_size_data)} bytes")
file_size = struct.unpack('!I', file_size_data)[0]
files_metadata.append((file_name, file_size))
logging.info(f"Metadata extraction completed for {num_files} files.")
# 남은 암호화된 데이터 읽기
encrypted_data = encrypted_file.read()
# 복호화
decrypted_data = cipher.decrypt(encrypted_data)
logging.info(f"Data decryption completed for PID {pid}")
# 복호화된 데이터 저장
decrypted_tar_path = os.path.join(output_dir, 'ccp_decrypted.tar')
with open(decrypted_tar_path, 'wb') as decrypted_file:
decrypted_file.write(decrypted_data)
logging.info(f"Decrypted tar file saved: {decrypted_tar_path}")
# 각 파일의 데이터를 복원
with open(decrypted_tar_path, 'rb') as decrypted_tar:
with tarfile.open(fileobj=decrypted_tar) as tar:
for member in tar.getmembers():
member_path = os.path.join(output_dir, member.name)
# 'OUTPUT' 폴더 내부만 경로 복원
if member.name.startswith('OUTPUT/'):
member_path = os.path.join(output_dir, 'OUTPUT', os.path.relpath(member.name, 'OUTPUT'))
elif member.name.startswith('DATABASE/'):
member_path = os.path.join(output_dir, 'DATABASE', os.path.relpath(member.name, 'DATABASE'))
# 디렉터리 생성 및 파일 추출
if member.isdir():
os.makedirs(member_path, exist_ok=True)
else:
os.makedirs(os.path.dirname(member_path), exist_ok=True)
with open(member_path, 'wb') as f:
f.write(tar.extractfile(member).read())
logging.info(f"Decryption and extraction completed for PID {pid}")
logging.info(f"------ End of decryption process for PID {pid} ------")
return {"RESULT_CODE": 200, "RESULT_MSG": f"Decryption successful for project {pid}"}
except Exception as e:
logging.error(f"Error during decryption process for PID {pid}: {str(e)}", exc_info=True)
return {"RESULT_CODE": 500, "RESULT_MSG": f"Decryption failed: {str(e)}"}
def build_csv_dict(pid):
"""CCP 데이터베이스 폴더에서 CSV 파일을 분석하여 매핑하는 함수"""
source_dir = f"/data/ccp/{pid}/DATABASE"
target_prefix = "/var/lib/mysql/csv/"
prefix_mapping = {
"project_user": "project_user",
"student": "student",
"professor": "professor",
"project": "project",
"permission": "permission",
"work": "work",
"progress": "progress",
"doc_rep": "doc_report",
"doc_s": "doc_summary",
"doc_r": "doc_require",
"doc_m": "doc_meeting",
"doc_t": "doc_test",
"doc_o": "doc_other",
"grade": "grade",
"doc_a": "doc_attach"
}
csv_dict = {}
try:
logging.info(f"------ Start building CSV dictionary for PID {pid} ------")
if not os.path.exists(source_dir):
raise FileNotFoundError(f"Directory {source_dir} does not exist.")
files = os.listdir(source_dir)
logging.info(f"Found {len(files)} files in {source_dir}.")
sorted_prefixes = sorted(prefix_mapping.keys(), key=lambda x: len(x), reverse=True)
for filename in files:
if filename.endswith(".csv"):
for prefix in sorted_prefixes:
if filename.startswith(prefix):
key = prefix_mapping[prefix]
csv_dict[key] = os.path.join(target_prefix, filename)
logging.info(f"Mapped file {filename} to key {key}.")
break
logging.info(f"CSV dictionary built successfully for PID {pid}. Total mappings: {len(csv_dict)}")
logging.info(f"------ End of CSV dictionary build for PID {pid} ------")
return csv_dict
except Exception as e:
logging.error(f"Error during CSV dictionary build for PID {pid}: {str(e)}", exc_info=True)
raise
@router.post("/ccp/import")
async def api_project_import(payload: ccp_payload):
"""프로젝트 복원 기능"""
logging.info(f"------ Start project import process for PID {payload.pid} ------")
try:
# Step 1: Retrieve version history
logging.info(f"Step 1: Retrieving version history for project {payload.pid}")
history = csv_DB.fetch_csv_history(payload.pid)
if not history:
raise Exception(f"No history records found for project {payload.pid}")
highest_ver = str(int(max(record['ver'] for record in history)) + 1)
logging.info(f"Highest version: {highest_ver}")
selected_version = next((record['ver'] for record in history if record['ver'] == payload.ver), None)
if selected_version is None:
raise Exception(f"Version {payload.ver} not found in project history")
logging.info(f"Selected version {payload.ver} found in history")
# Export(백업) 기능: is_removed가 1이면 이 기능을 비활성화
if payload.is_removed != 1:
# Step 2: Backup current project
logging.info(f"Step 2: Backing up current project {payload.pid}")
os.makedirs(f'/data/ccp/{payload.pid}/DATABASE', exist_ok=True)
os.makedirs(f'/data/ccp/{payload.pid}/OUTPUT', exist_ok=True)
result = csv_DB.export_csv(payload.pid)
if not handle_db_result(result):
raise Exception("Failed to export DB during backup")
result = await pull_storage_server(payload.pid, f'/data/ccp/{payload.pid}/OUTPUT')
if result['RESULT_CODE'] != 200:
raise Exception(result['RESULT_MSG'])
if not encrypt_ccp_file(payload.pid):
raise Exception(f"Failed to encrypt project folder for backup")
# Step 3: Save backup history
logging.info("Saving backup record to DB history")
payload.msg = f"Revert {highest_ver} to {payload.ver}"
backup_ver = csv_DB.insert_csv_history(payload.pid, payload.univ_id, payload.msg)
if backup_ver is None:
raise Exception("Failed to insert backup history record")
logging.info(f"Backup history recorded as version {backup_ver}")
# Step 4: Upload backup to Storage Server
history = csv_DB.fetch_csv_history(payload.pid)
version = str(max(record['ver'] for record in history))
ccp_file_path = f"/data/ccp/{payload.pid}.ccp"
ccp_file_name = f"{payload.pid}_{version}.ccp"
storage_url = "http://192.168.50.84:10080/api/ccp/pull"
logging.info(f"Uploading backup CCP file to Storage Server: {ccp_file_name}")
with open(ccp_file_path, "rb") as file:
files = {"file": (ccp_file_name, file, "application/octet-stream")}
form_data = {"pid": str(payload.pid), "name": ccp_file_name}
response = requests.post(storage_url, files=files, data=form_data)
if response.status_code != 200:
raise Exception("Failed to upload backup CCP file")
# Step 5: Remove temporary backup files
shutil.rmtree(f'/data/ccp/{payload.pid}', ignore_errors=True)
os.remove(f'/data/ccp/{payload.pid}.ccp')
logging.info("Backup completed and temporary files removed")
else:
logging.info("Export function is disabled (is_removed=1): skipping steps 2 to 5.")
# Step 6: Download selected CCP version
logging.info(f"Step 6: Downloading CCP file for version {payload.ver} from Storage Server")
selected_ccp_url = "http://192.168.50.84:10080/api/ccp/push_ccp"
timeout = httpx.Timeout(60.0, connect=5.0)
async with httpx.AsyncClient(timeout=timeout) as client:
response = await client.post(selected_ccp_url, params={"pid": payload.pid, "ver": payload.ver})
if response.status_code != 200:
raise Exception(f"Storage server returned status {response.status_code}")
selected_ccp_file_path = f"/data/ccp/{payload.pid}_{payload.ver}.ccp"
with open(selected_ccp_file_path, "wb") as f:
f.write(response.content)
# Step 7: Decrypt and extract the CCP file
logging.info("Step 7: Decrypting and extracting the downloaded CCP file")
os.rename(selected_ccp_file_path, f"/data/ccp/{payload.pid}.ccp")
result = decrypt_ccp_file(payload.pid)
if result.get("RESULT_CODE", 500) != 200:
raise Exception(result.get("RESULT_MSG", "Unknown error during decryption"))
# Step 8: Restore DATABASE CSV files
logging.info(f"Step 8: Pushing DATABASE CSV files to DB server for project {payload.pid}")
db_push_url = "http://192.168.50.84:70/api/ccp/push_db"
database_dir = f"/data/ccp/{payload.pid}/DATABASE"
if not os.path.exists(database_dir):
raise Exception("DATABASE folder not found in extracted files")
try:
files_transferred = []
for filename in os.listdir(database_dir):
if filename.endswith(".csv"):
file_path = os.path.join(database_dir, filename)
with open(file_path, "rb") as f:
files_payload = {"file": (filename, f, "application/octet-stream")}
data_payload = {"pid": str(payload.pid)}
response = requests.post(db_push_url, files=files_payload, data=data_payload, timeout=15)
if response.status_code != 200:
raise Exception(f"Failed to push file {filename}: {response.text}")
else:
files_transferred.append(filename)
logging.info(f"Successfully pushed files to DB server: {files_transferred}")
except Exception as e:
logging.error(f"Failed to push DATABASE CSV files to DB server for project {payload.pid}: {str(e)}")
raise HTTPException(status_code=500, detail=f"Failed to push DATABASE CSV files to DB server: {str(e)}")
logging.info(f"Step 8.5: Restoring DATABASE CSV files for project {payload.pid}")
try:
csv_files = build_csv_dict(payload.pid)
logging.info(f"CSV files to import: {csv_files}")
import_result = csv_DB.import_csv(csv_files, payload.pid)
if import_result is not True:
raise Exception("DB import_csv function returned failure")
logging.info("DATABASE CSV files restored successfully")
except Exception as e:
logging.error(f"Failed to restore DATABASE CSV files for project {payload.pid}: {str(e)}")
raise HTTPException(status_code=500, detail=f"Failed to restore DATABASE CSV files: {str(e)}")
# Step 9: Restore OUTPUT files
logging.info(f"Step 9: Restoring OUTPUT files for project {payload.pid}")
output_folder = f"/data/ccp/{payload.pid}/OUTPUT"
target_folder = os.path.join(output_folder, str(payload.pid))
if os.path.exists(target_folder):
archive_path = f"/data/ccp/{payload.pid}_output.tar.gz"
with tarfile.open(archive_path, "w:gz") as tar:
for root, _, files in os.walk(target_folder):
for file in files:
full_path = os.path.join(root, file)
rel_path = os.path.relpath(full_path, target_folder)
tar.add(full_path, arcname=rel_path)
pull_output_url = "http://192.168.50.84:10080/api/ccp/pull_output"
with open(archive_path, "rb") as file:
multipart_form = {
"file": (f"{payload.pid}_output.tar.gz", file, "application/gzip"),
"pid": (None, str(payload.pid)),
"name": (None, f"{payload.pid}_output.tar.gz")
}
response = requests.post(pull_output_url, files=multipart_form)
if response.status_code != 200:
raise Exception("Failed to upload OUTPUT archive to Storage Server")
os.remove(archive_path)
else:
logging.info("No OUTPUT files found, skipping restore process.")
deleted_file = "deleted_project.json"
pid_str = str(payload.pid)
if os.path.exists(deleted_file):
try:
with open(deleted_file, "r", encoding="utf-8") as f:
deleted_data = json.load(f)
if pid_str in deleted_data:
del deleted_data[pid_str]
with open(deleted_file, "w", encoding="utf-8") as f:
json.dump(deleted_data, f, ensure_ascii=False, indent=2)
logging.info(f"Deleted project entry for PID {payload.pid} removed from deleted_project.json")
except Exception as e:
logging.warning(f"Failed to clean up deleted_project.json for PID {payload.pid}: {e}")
logging.info(f"------ Project import process completed successfully for PID {payload.pid} ------")
return {"RESULT_CODE": 200, "RESULT_MSG": f"Project {payload.pid} imported successfully."}
except Exception as e:
logging.error(f"Error during project import process for PID {payload.pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Error during project import: {str(e)}")
def initialize_folder(pid: int):
"""백업/추출을 위한 폴더를 초기화한다."""
try:
os.makedirs(f'/data/ccp/{pid}/DATABASE', exist_ok=True)
os.makedirs(f'/data/ccp/{pid}/OUTPUT', exist_ok=True)
logging.info(f"Folder structure initialized successfully for project {pid}")
except Exception as e:
logging.error(f"Failed to initialize folder for project {pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to initialize folder: {str(e)}")
def export_database_csv(payload: ccp_payload):
"""DB 서버의 데이터를 CSV 파일로 내보낸다."""
try:
result = csv_DB.export_csv(payload.pid)
if not handle_db_result(result):
raise Exception("Failed to export DB")
logging.info(f"Database export successful for project {payload.pid}")
return result
except Exception as e:
logging.error(f"Failed to export database for project {payload.pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to export database: {str(e)}")
async def download_output_files(pid: int):
"""Storage 서버에서 OUTPUT 파일들을 다운로드 받아 지정 폴더에 저장한다."""
try:
result = await pull_storage_server(pid, f'/data/ccp/{pid}/OUTPUT')
if result['RESULT_CODE'] != 200:
raise Exception(result['RESULT_MSG'])
logging.info(f"OUTPUT files downloaded successfully for project {pid}")
return result
except Exception as e:
logging.error(f"Failed to download OUTPUT files from Storage server for project {pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to download OUTPUT files: {str(e)}")
def encrypt_project_folder(pid: int):
"""지정 폴더를 암호화하여 CCP 파일로 생성한다."""
try:
encryption_result = encrypt_ccp_file(pid)
if not encryption_result:
raise Exception(f"Failed to encrypt project folder for pid {pid}")
logging.info(f"Encryption successful for project {pid}")
return encryption_result
except Exception as e:
logging.error(f"Error during encryption process for pid {pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Error during encryption: {str(e)}")
def save_history_record(payload: ccp_payload) -> int:
"""DB 서버에 히스토리 레코드를 저장한다."""
try:
ver = csv_DB.insert_csv_history(payload.pid, payload.univ_id, payload.msg)
if ver is None:
raise Exception("Failed to insert history record")
logging.info(f"History record saved successfully with version {ver} for project {payload.pid}")
return ver
except Exception as e:
logging.error(f"Failed to save history record for project {payload.pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to insert history record: {str(e)}")
def upload_ccp_file(payload: ccp_payload, ver: int):
"""생성된 CCP 파일을 Storage 서버에 업로드한다."""
logging.info(f"------ Starting CCP file upload for project {payload.pid} (version {ver}) ------")
ccp_file_path = f"/data/ccp/{payload.pid}.ccp"
ccp_file_name = f"{payload.pid}_{ver}.ccp"
storage_url = "http://192.168.50.84:10080/api/ccp/pull"
try:
logging.info(f"Reading CCP file: {ccp_file_path}")
with open(ccp_file_path, "rb") as file:
files = {"file": (ccp_file_name, file, "application/octet-stream")}
form_data = {"pid": str(payload.pid), "name": ccp_file_name}
logging.info(f"Sending CCP file to Storage Server: {storage_url}")
response = requests.post(storage_url, files=files, data=form_data, timeout=30)
if response.status_code != 200:
logging.error(f"Failed to upload CCP file for project {payload.pid}: {response.text}", exc_info=True)
raise HTTPException(status_code=500, detail="Failed to upload CCP file to Storage Server")
logging.info(f"CCP file uploaded successfully: {ccp_file_name}")
logging.info(f"------ CCP file upload completed for project {payload.pid} ------")
except FileNotFoundError:
logging.error(f"CCP file not found: {ccp_file_path}", exc_info=True)
raise HTTPException(status_code=404, detail="CCP file not found")
except requests.exceptions.RequestException as e:
logging.error(f"Request error during CCP file upload for project {payload.pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Request to storage server failed: {str(e)}")
except Exception as e:
logging.error(f"Unexpected error during CCP file upload for project {payload.pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Error during CCP file upload: {str(e)}")
def cleanup_project_folder(pid: int):
"""작업 후 생성된 폴더와 파일들을 정리한다."""
try:
shutil.rmtree(f'/data/ccp/{pid}', ignore_errors=True)
os.remove(f'/data/ccp/{pid}.ccp')
logging.info(f"Cleanup completed successfully for project {pid}")
except FileNotFoundError:
logging.warning(f"Some files for project {pid} were not found during cleanup.")
except Exception as e:
logging.error(f"Failed to delete folder or CCP file for project {pid}: {str(e)}", exc_info=True)
@router.post("/ccp/export")
async def api_project_export(payload: ccp_payload):
"""프로젝트 추출 기능"""
logging.info(f"------ Start project export process for PID {payload.pid} ------")
# Step 1: 폴더 초기화
try:
logging.info(f"Initializing folder structure for project {payload.pid}")
os.makedirs(f'/data/ccp/{payload.pid}/DATABASE', exist_ok=True)
os.makedirs(f'/data/ccp/{payload.pid}/OUTPUT', exist_ok=True)
except Exception as e:
logging.error(f"Failed to initialize folder for project {payload.pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to initialize folder: {str(e)}")
# Step 2: 데이터베이스 내보내기
try:
logging.info(f"Exporting database to CSV for project {payload.pid}")
result = csv_DB.export_csv(payload.pid)
if not handle_db_result(result):
raise Exception("Failed to export database")
logging.info("Database exported successfully")
except Exception as e:
logging.error(f"Failed to export database for project {payload.pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to export database: {str(e)}")
# Step 3: CSV 파일 다운로드
try:
logging.info("Downloading CSV files from API Server")
api_url = "http://192.168.50.84:70/api/ccp/pull_db"
response = requests.post(api_url, json={"pid": payload.pid})
if response.status_code != 200:
raise Exception(response.json().get("message", "Unknown error"))
logging.info("CSV files downloaded successfully")
except Exception as e:
logging.error(f"Failed to download CSV files: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to download CSV files: {str(e)}")
# Step 4: CSV 폴더 정리
try:
logging.info("Cleaning up the CSV folder from API Server")
cleanup_url = "http://192.168.50.84:70/api/ccp/clean_db"
response = requests.post(cleanup_url, json={"pid": payload.pid})
if response.status_code != 200:
raise Exception(response.json().get("message", "Unknown error"))
logging.info("CSV folder cleaned up successfully")
except Exception as e:
logging.error(f"Failed to clean up CSV folder: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to clean up CSV folder: {str(e)}")
# Step 5: OUTPUT 파일 다운로드
try:
logging.info("Downloading OUTPUT files from Storage Server")
result = await pull_storage_server(payload.pid, f'/data/ccp/{payload.pid}/OUTPUT')
if result['RESULT_CODE'] != 200:
raise Exception(result['RESULT_MSG'])
logging.info("OUTPUT files downloaded successfully")
except Exception as e:
logging.error(f"Failed to download OUTPUT files: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to download OUTPUT files: {str(e)}")
# Step 6: 프로젝트 폴더 암호화
try:
logging.info("Encrypting project folder")
if not encrypt_ccp_file(payload.pid):
raise Exception("Failed to encrypt project folder")
logging.info("Project folder encrypted successfully")
except Exception as e:
logging.error(f"Error during encryption: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Error during encryption: {str(e)}")
# Step 7: 히스토리 저장
try:
logging.info("Saving backup history to DB")
backup_ver = csv_DB.insert_csv_history(payload.pid, payload.univ_id, payload.msg)
if backup_ver is None:
raise Exception("Failed to insert backup history record")
logging.info(f"Backup history recorded successfully as version {backup_ver}")
except Exception as e:
logging.error(f"Failed to save backup history: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to save backup history: {str(e)}")
# Step 8: CCP 파일 업로드
try:
logging.info("Uploading backup CCP file to Storage Server")
history = csv_DB.fetch_csv_history(payload.pid)
version = str(max(record['ver'] for record in history))
ccp_file_path = f"/data/ccp/{payload.pid}.ccp"
ccp_file_name = f"{payload.pid}_{version}.ccp"
storage_url = "http://192.168.50.84:10080/api/ccp/pull"
with open(ccp_file_path, "rb") as file:
files = {"file": (ccp_file_name, file, "application/octet-stream")}
response = requests.post(storage_url, files=files, data={"pid": str(payload.pid), "name": ccp_file_name})
if response.status_code != 200:
raise Exception("Failed to upload backup CCP file")
logging.info(f"Backup CCP file uploaded successfully: {ccp_file_name}")
except FileNotFoundError:
logging.error(f"Backup CCP file not found: {ccp_file_path}", exc_info=True)
raise HTTPException(status_code=404, detail="Backup CCP file not found")
except requests.exceptions.RequestException as e:
logging.error(f"Request error during CCP file upload: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Request error during CCP file upload: {str(e)}")
except Exception as e:
logging.error(f"Unexpected error during CCP file upload: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Error during CCP file upload: {str(e)}")
# Step 9: 임시 폴더 정리
try:
logging.info("Cleaning up temporary project folder and CCP file")
shutil.rmtree(f'/data/ccp/{payload.pid}', ignore_errors=True)
os.remove(f'/data/ccp/{payload.pid}.ccp')
logging.info("Temporary files deleted successfully")
except FileNotFoundError:
logging.warning(f"Some files were already deleted, skipping cleanup.")
except Exception as e:
logging.error(f"Failed to clean up project folder: {str(e)}", exc_info=True)
logging.info(f"------ Project export process completed successfully for PID {payload.pid} ------")
return {"RESULT_CODE": 200, "RESULT_MSG": f"Project {payload.pid} exported successfully."}
@router.post("/ccp/del_history")
async def api_delete_history(payload: ccp_payload):
"""프로젝트 히스토리 삭제"""
try:
result = csv_DB.delete_csv_history(payload.pid)
if not result:
raise Exception(f"Failed to delete history for project {payload.pid}")
logging.info(f"History successfully deleted for project {payload.pid}")
except Exception as e:
logging.error(f"Error occurred while deleting history for project {payload.pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Error during deletion process: {str(e)}")
return {"RESULT_CODE": 200, "RESULT_MSG": "History deleted successfully"}
@router.post("/ccp/load_history")
async def api_load_history(payload: ccp_payload):
"""프로젝트 히스토리 로드"""
try:
result = csv_DB.fetch_csv_history(payload.pid)
if not result:
raise Exception(f"Failed to load history for project {payload.pid}")
logging.info(f"History successfully loaded for project {payload.pid}, total records: {len(result)}")
except Exception as e:
logging.error(f"Error occurred while loading history for project {payload.pid}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Error during history load: {str(e)}")
return {"RESULT_CODE": 200, "RESULT_MSG": "History loaded successfully", "PAYLOAD": result}
@router.post("/ccp/load_history_id")
async def api_load_history_by_univid(payload: ccp_payload):
"""프로젝트 복원용 히스토리 로드"""
try:
result = csv_DB.fetch_csv_history_by_univid(payload.univ_id)
if not result or not isinstance(result, list):
raise Exception(f"Failed to load history for user {payload.univ_id}")
deleted_file = "deleted_project.json"
deleted_data = {}
if os.path.exists(deleted_file):
try:
with open(deleted_file, "r", encoding="utf-8") as f:
deleted_data = json.load(f)
except json.JSONDecodeError:
logging.warning("deleted_project.json is malformed or empty.")
try:
all_projects = project_DB.fetch_project_info(payload.univ_id)
except Exception as e:
all_projects = []
logging.warning(f"Failed to load active project info for univ_id {payload.univ_id}: {e}")
pname_lookup = {p["p_no"]: p["p_name"] for p in all_projects if "p_no" in p and "p_name" in p}
grouped = defaultdict(lambda: {"pname": None, "history": []})
for entry in result:
p_no = str(entry["p_no"])
history_item = {
"ver": entry["ver"],
"date": entry["date"],
"s_no": entry["s_no"],
"msg": entry["msg"]
}
if p_no in deleted_data:
grouped[p_no]["pname"] = deleted_data[p_no].get("pname")
elif entry["p_no"] in pname_lookup:
grouped[p_no]["pname"] = pname_lookup[entry["p_no"]]
else:
grouped[p_no]["pname"] = None
grouped[p_no]["history"].append(history_item)
for p_data in grouped.values():
p_data["history"].sort(key=lambda x: x["ver"], reverse=True)
logging.info(f"History successfully loaded for user {payload.univ_id}, total projects: {len(grouped)}")
except Exception as e:
logging.error(f"Error occurred while loading history for user {payload.univ_id}: {str(e)}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Error during history load: {str(e)}")
return {
"RESULT_CODE": 200,
"RESULT_MSG": "History loaded successfully",
"PAYLOAD": grouped
}