Initial commit (Clean history)
This commit is contained in:
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
BIN
app/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
app/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
app/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
app/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/admin.cpython-310.pyc
Normal file
BIN
app/__pycache__/admin.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/admin.cpython-312.pyc
Normal file
BIN
app/__pycache__/admin.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/admin.cpython-313.pyc
Normal file
BIN
app/__pycache__/admin.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/api_workflow.cpython-312.pyc
Normal file
BIN
app/__pycache__/api_workflow.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/api_workflow.cpython-313.pyc
Normal file
BIN
app/__pycache__/api_workflow.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/apps.cpython-310.pyc
Normal file
BIN
app/__pycache__/apps.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/apps.cpython-312.pyc
Normal file
BIN
app/__pycache__/apps.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/apps.cpython-313.pyc
Normal file
BIN
app/__pycache__/apps.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/backup.cpython-310.pyc
Normal file
BIN
app/__pycache__/backup.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/backup.cpython-312.pyc
Normal file
BIN
app/__pycache__/backup.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/backup.cpython-313.pyc
Normal file
BIN
app/__pycache__/backup.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/basic.cpython-312.pyc
Normal file
BIN
app/__pycache__/basic.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/basic.cpython-313.pyc
Normal file
BIN
app/__pycache__/basic.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/cleardata.cpython-310.pyc
Normal file
BIN
app/__pycache__/cleardata.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/cleardata.cpython-312.pyc
Normal file
BIN
app/__pycache__/cleardata.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/cleardata.cpython-313.pyc
Normal file
BIN
app/__pycache__/cleardata.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/cob.cpython-310.pyc
Normal file
BIN
app/__pycache__/cob.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/cob.cpython-312.pyc
Normal file
BIN
app/__pycache__/cob.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/cob.cpython-313.pyc
Normal file
BIN
app/__pycache__/cob.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/consumers.cpython-313.pyc
Normal file
BIN
app/__pycache__/consumers.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/contract.cpython-310.pyc
Normal file
BIN
app/__pycache__/contract.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/contract.cpython-312.pyc
Normal file
BIN
app/__pycache__/contract.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/contract.cpython-313.pyc
Normal file
BIN
app/__pycache__/contract.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/document_generator.cpython-310.pyc
Normal file
BIN
app/__pycache__/document_generator.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/document_generator.cpython-312.pyc
Normal file
BIN
app/__pycache__/document_generator.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/document_generator.cpython-313.pyc
Normal file
BIN
app/__pycache__/document_generator.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/email.cpython-310.pyc
Normal file
BIN
app/__pycache__/email.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/email.cpython-312.pyc
Normal file
BIN
app/__pycache__/email.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/email.cpython-313.pyc
Normal file
BIN
app/__pycache__/email.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/importdata.cpython-312.pyc
Normal file
BIN
app/__pycache__/importdata.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/importdata.cpython-313.pyc
Normal file
BIN
app/__pycache__/importdata.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/job.cpython-310.pyc
Normal file
BIN
app/__pycache__/job.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/jobcob.cpython-310.pyc
Normal file
BIN
app/__pycache__/jobcob.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/jobemail.cpython-310.pyc
Normal file
BIN
app/__pycache__/jobemail.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/jobemail.cpython-312.pyc
Normal file
BIN
app/__pycache__/jobemail.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/jobemail.cpython-313.pyc
Normal file
BIN
app/__pycache__/jobemail.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/jobs.cpython-312.pyc
Normal file
BIN
app/__pycache__/jobs.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/jobs.cpython-313.pyc
Normal file
BIN
app/__pycache__/jobs.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/message.cpython-310.pyc
Normal file
BIN
app/__pycache__/message.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/message.cpython-313.pyc
Normal file
BIN
app/__pycache__/message.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/middleware.cpython-310.pyc
Normal file
BIN
app/__pycache__/middleware.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/middleware.cpython-312.pyc
Normal file
BIN
app/__pycache__/middleware.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/middleware.cpython-313.pyc
Normal file
BIN
app/__pycache__/middleware.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/models.cpython-310.pyc
Normal file
BIN
app/__pycache__/models.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/models.cpython-312.pyc
Normal file
BIN
app/__pycache__/models.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/models.cpython-313.pyc
Normal file
BIN
app/__pycache__/models.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/payment.cpython-310.pyc
Normal file
BIN
app/__pycache__/payment.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/payment.cpython-312.pyc
Normal file
BIN
app/__pycache__/payment.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/payment.cpython-313.pyc
Normal file
BIN
app/__pycache__/payment.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/querydict.cpython-310.pyc
Normal file
BIN
app/__pycache__/querydict.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/querydict.cpython-312.pyc
Normal file
BIN
app/__pycache__/querydict.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/querydict.cpython-313.pyc
Normal file
BIN
app/__pycache__/querydict.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/routing.cpython-313.pyc
Normal file
BIN
app/__pycache__/routing.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/schedule.cpython-310.pyc
Normal file
BIN
app/__pycache__/schedule.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/schedule.cpython-312.pyc
Normal file
BIN
app/__pycache__/schedule.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/schedule.cpython-313.pyc
Normal file
BIN
app/__pycache__/schedule.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/server.cpython-310.pyc
Normal file
BIN
app/__pycache__/server.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/server.cpython-312.pyc
Normal file
BIN
app/__pycache__/server.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/server.cpython-313.pyc
Normal file
BIN
app/__pycache__/server.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/signals.cpython-310.pyc
Normal file
BIN
app/__pycache__/signals.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/signals.cpython-312.pyc
Normal file
BIN
app/__pycache__/signals.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/signals.cpython-313.pyc
Normal file
BIN
app/__pycache__/signals.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/socket_handlers.cpython-313.pyc
Normal file
BIN
app/__pycache__/socket_handlers.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/views.cpython-310.pyc
Normal file
BIN
app/__pycache__/views.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/views.cpython-312.pyc
Normal file
BIN
app/__pycache__/views.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/views.cpython-313.pyc
Normal file
BIN
app/__pycache__/views.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/vnpay.cpython-310.pyc
Normal file
BIN
app/__pycache__/vnpay.cpython-310.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflow_actions.cpython-312.pyc
Normal file
BIN
app/__pycache__/workflow_actions.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflow_actions.cpython-313.pyc
Normal file
BIN
app/__pycache__/workflow_actions.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflow_engine.cpython-312.pyc
Normal file
BIN
app/__pycache__/workflow_engine.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflow_engine.cpython-313.pyc
Normal file
BIN
app/__pycache__/workflow_engine.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflow_registry.cpython-312.pyc
Normal file
BIN
app/__pycache__/workflow_registry.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflow_registry.cpython-313.pyc
Normal file
BIN
app/__pycache__/workflow_registry.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflow_utils.cpython-312.pyc
Normal file
BIN
app/__pycache__/workflow_utils.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflow_utils.cpython-313.pyc
Normal file
BIN
app/__pycache__/workflow_utils.cpython-313.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflows.cpython-312.pyc
Normal file
BIN
app/__pycache__/workflows.cpython-312.pyc
Normal file
Binary file not shown.
BIN
app/__pycache__/workflows.cpython-313.pyc
Normal file
BIN
app/__pycache__/workflows.cpython-313.pyc
Normal file
Binary file not shown.
3
app/admin.py
Normal file
3
app/admin.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
28
app/api_workflow.py
Normal file
28
app/api_workflow.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from app.workflow_engine import run_workflow
|
||||
from datetime import datetime # Thêm import
|
||||
|
||||
@api_view(["POST"])
|
||||
def execute_workflow(request):
|
||||
try:
|
||||
workflow_code = request.data.get("workflow_code")
|
||||
trigger = request.data.get("trigger")
|
||||
|
||||
# Tạo bản sao của dữ liệu request để làm context cho workflow.
|
||||
context = dict(request.data)
|
||||
|
||||
# FIX: Bổ sung biến hệ thống: ngày hiện tại để Serializer có thể lấy giá trị cho field 'date'
|
||||
context["current_date"] = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
if not workflow_code or not trigger:
|
||||
# Sử dụng status.HTTP_400_BAD_REQUEST hoặc 400 như trong code gốc
|
||||
return Response({"error": "workflow_code & trigger are required"}, status=400)
|
||||
|
||||
result = run_workflow(workflow_code, trigger, context)
|
||||
return Response({"success": True, "result": result})
|
||||
|
||||
except Exception as e:
|
||||
# Trả về lỗi chi tiết hơn
|
||||
return Response({"error": str(e)}, status=400)
|
||||
12
app/apps.py
Normal file
12
app/apps.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AppConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'app'
|
||||
|
||||
def ready(self):
|
||||
import app.workflow_actions
|
||||
|
||||
from . import signals
|
||||
signals.connect_signals()
|
||||
181
app/backup.py
Normal file
181
app/backup.py
Normal file
@@ -0,0 +1,181 @@
|
||||
import os, subprocess, logging
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from app.models import *
|
||||
from django.db import close_old_connections
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
static_folder = os.path.join(BASE_DIR, "static")
|
||||
|
||||
#==========================================================================================
|
||||
def backup_postgres(db_name, db_user, db_host, db_port):
|
||||
# close old connections
|
||||
close_old_connections()
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
filename="{}/{}/backup_postgres.log".format(static_folder, 'log')
|
||||
)
|
||||
output_dir = "{}/database".format(static_folder)
|
||||
|
||||
"""
|
||||
Backup a PostgreSQL database using pg_dump.
|
||||
|
||||
Args:
|
||||
db_name (str): Name of the database to backup
|
||||
db_user (str): Database user
|
||||
db_host (str): Database host
|
||||
db_port (str): Database port
|
||||
output_dir (str): Directory to store the backup file
|
||||
"""
|
||||
try:
|
||||
# Create output directory if it doesn't exist
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# Generate timestamp for backup file
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
backup_file = os.path.join(output_dir, f'{db_name}_backup_{timestamp}.sql')
|
||||
start_time = datetime.now()
|
||||
|
||||
# Construct pg_dump command
|
||||
pg_dump_cmd = [
|
||||
'pg_dump',
|
||||
'-U', db_user,
|
||||
'-h', db_host,
|
||||
'-p', db_port,
|
||||
'-F', 'p', # Plain SQL format
|
||||
'-b', # Include large objects
|
||||
'-v', # Verbose output
|
||||
db_name
|
||||
]
|
||||
|
||||
# Set PGPASSWORD environment variable to avoid password prompt
|
||||
os.environ['PGPASSWORD'] = "V59yNLN42a9Q7xT" #input("Enter database password: ")
|
||||
|
||||
# Execute pg_dump and save output to file
|
||||
logging.info(f"Starting backup of database {db_name} to {backup_file}")
|
||||
with open(backup_file, 'w') as f:
|
||||
process = subprocess.run(
|
||||
pg_dump_cmd,
|
||||
stdout=f,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
check=True
|
||||
)
|
||||
|
||||
logging.info(f"Backup completed successfully: {backup_file}")
|
||||
print(f"Backup saved to {backup_file}")
|
||||
backup = Backup(code="DB{}".format(datetime.now().strftime('%Y%m%d%H%M')), name="database-backup",
|
||||
status=Task_Status.objects.get(pk=4), start_time=start_time, end_time=datetime.now(), file=f'{db_name}_backup_{timestamp}.sql')
|
||||
backup.save()
|
||||
return {"status": "success"}
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
logging.error(f"Backup failed: {e.stderr}")
|
||||
print(f"Error during backup: {e.stderr}")
|
||||
backup = Backup(code="DB{}".format(datetime.now().strftime('%Y%m%d%H%M')), name="database-backup",
|
||||
status=Task_Status.objects.get(pk=3), start_time=start_time, end_time=datetime.now())
|
||||
backup.save()
|
||||
return {"status": "error", "text": str(e.stderr)}
|
||||
except Exception as e:
|
||||
logging.error(f"Unexpected error: {str(e)}")
|
||||
print(f"Unexpected error: {str(e)}")
|
||||
backup = Backup(code="DB{}".format(datetime.now().strftime('%Y%m%d%H%M')), name="database-backup",
|
||||
status=Task_Status.objects.get(pk=3), start_time=start_time, end_time=datetime.now())
|
||||
backup.save()
|
||||
return {"status": "error", "text": str(e)}
|
||||
|
||||
#==========================================================================================
|
||||
def restore_postgres(db_name, db_user, db_host, db_port, backup_file):
|
||||
"""
|
||||
Restore a PostgreSQL database from a .sql backup file using psql.
|
||||
|
||||
Args:
|
||||
db_name (str): Name of the database to restore
|
||||
db_user (str): Database user
|
||||
db_host (str): Database host
|
||||
db_port (str): Database port
|
||||
backup_file (str): Path to the backup .sql file
|
||||
"""
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
filename="{}/{}/backup_postgres.log".format(static_folder, 'log')
|
||||
)
|
||||
try:
|
||||
# Verify backup file exists
|
||||
if not os.path.exists(backup_file):
|
||||
raise FileNotFoundError(f"Backup file not found: {backup_file}")
|
||||
|
||||
# Construct psql command
|
||||
psql_cmd = [
|
||||
'psql',
|
||||
'-U', db_user,
|
||||
'-h', db_host,
|
||||
'-p', db_port,
|
||||
'-d', db_name,
|
||||
'-f', backup_file
|
||||
]
|
||||
|
||||
# Set PGPASSWORD environment variable to avoid password prompt
|
||||
os.environ['PGPASSWORD'] = "V59yNLN42a9Q7xT" #input("Enter database password: ")
|
||||
|
||||
# Execute psql to restore the database
|
||||
logging.info(f"Starting restore of database {db_name} from {backup_file}")
|
||||
process = subprocess.run(
|
||||
psql_cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
check=True
|
||||
)
|
||||
|
||||
logging.info(f"Restore completed successfully for database {db_name}")
|
||||
print(f"Database {db_name} restored successfully from {backup_file}")
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
logging.error(f"Restore failed: {e.stderr}")
|
||||
print(f"Error during restore: {e.stderr}")
|
||||
except FileNotFoundError as e:
|
||||
logging.error(str(e))
|
||||
print(str(e))
|
||||
except Exception as e:
|
||||
logging.error(f"Unexpected error: {str(e)}")
|
||||
print(f"Unexpected error: {str(e)}")
|
||||
finally:
|
||||
# Clean up environment variable
|
||||
os.environ.pop('PGPASSWORD', None)
|
||||
|
||||
#==========================================================================================
|
||||
def delete_old_dbbackup():
|
||||
# close old connections
|
||||
close_old_connections()
|
||||
# start
|
||||
begin_date = (datetime.now() - timedelta(days=7)).date()
|
||||
rows = Backup.objects.filter(name='database-backup', create_time__date__lte=begin_date, status=4)
|
||||
for row in rows:
|
||||
file_path = "{}/database/{}".format(static_folder, row.file)
|
||||
try:
|
||||
os.remove(file_path)
|
||||
print("Đã xóa file:", file_path)
|
||||
row.note = 'file was deleted'
|
||||
row.save()
|
||||
|
||||
except FileNotFoundError:
|
||||
print("File không tồn tại:", file_path)
|
||||
except PermissionError:
|
||||
print("Không có quyền xóa file:", file_path)
|
||||
except Exception as e:
|
||||
print("Lỗi khác:", e)
|
||||
|
||||
#==========================================================================================
|
||||
@api_view(['GET', 'POST'])
|
||||
def backup_database(request):
|
||||
if request.method == 'GET':
|
||||
result = backup_postgres("y99", "postgres", "107.155.65.79", "5423")
|
||||
else:
|
||||
result = backup_postgres(request.data["db_name"], request.data["db_user"], request.data["db_host"], request.data["db_port"])
|
||||
return Response(result)
|
||||
158
app/basic.py
Executable file
158
app/basic.py
Executable file
@@ -0,0 +1,158 @@
|
||||
from datetime import datetime
|
||||
import csv
|
||||
|
||||
def empty(value):
|
||||
return True if value == '' or value == "" or value == None else False
|
||||
|
||||
|
||||
def isnumber(value):
|
||||
if empty(value) == True:
|
||||
return False
|
||||
try:
|
||||
float(value)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def isdate(value):
|
||||
try:
|
||||
datetime.strptime(value[0:10], '%Y-%m-%d')
|
||||
return True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return False
|
||||
|
||||
|
||||
def formatdate(value):
|
||||
try:
|
||||
dt = datetime.strptime(value[0:10], '%Y-%m-%d')
|
||||
return str(dt.date())
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def getattr(obj, attr):
|
||||
return obj[attr] if attr in obj else None
|
||||
|
||||
|
||||
def isvalid(ele, obj):
|
||||
valid = True
|
||||
for key, value in obj.items():
|
||||
if getattr(ele, key) != value:
|
||||
valid = False
|
||||
return valid
|
||||
|
||||
|
||||
def find(arr, obj, attr=None):
|
||||
for ele in arr:
|
||||
if isvalid(ele, obj) == True:
|
||||
return ele
|
||||
|
||||
|
||||
def find_index(arr, obj, attr=None):
|
||||
count = 0
|
||||
for ele in arr:
|
||||
if isvalid(ele, obj) == True:
|
||||
return count
|
||||
count += 1
|
||||
|
||||
|
||||
def filter(arr, obj, attr=None):
|
||||
result = []
|
||||
for ele in arr:
|
||||
if isvalid(ele, obj) == True:
|
||||
result.append(ele)
|
||||
return result
|
||||
|
||||
|
||||
def write_csv(file, data, header):
|
||||
with open(file, 'w', encoding='UTF8') as f:
|
||||
print('called')
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(header)
|
||||
for row in data:
|
||||
arr = [row[column] for column in header]
|
||||
writer.writerow(arr)
|
||||
f.close()
|
||||
|
||||
def execute_data_query(name, params):
|
||||
"""
|
||||
Hàm này thực thi một query dữ liệu dựa trên tên model và các tham số.
|
||||
Nó tái sử dụng logic từ API view.
|
||||
"""
|
||||
from app.views import get_serializer, base_query, final_result, get_limit_rows
|
||||
from django.db.models import Q, F
|
||||
import ast
|
||||
|
||||
Model, serializer_class = get_serializer(name)
|
||||
if Model is None:
|
||||
return None
|
||||
|
||||
# Lấy các tham số từ dict `params`
|
||||
filter_str = params.get('filter')
|
||||
values = params.get('values')
|
||||
values = values if values==None else values.split(',')
|
||||
summary = params.get('summary')
|
||||
page = int(params.get('page', 1))
|
||||
onpage = int(params.get('perpage')) if params.get('perpage') != None else None
|
||||
sort = params.get('sort')
|
||||
sort = None if sort==None else sort.split(',')
|
||||
distinct_values = params.get('distinct_values')
|
||||
filter_or = params.get('filter_or')
|
||||
exclude = params.get('exclude')
|
||||
calculation = params.get('calculation')
|
||||
final_filter = params.get('final_filter')
|
||||
final_exclude = params.get('final_exclude')
|
||||
|
||||
# Xây dựng filter_list
|
||||
filter_list = Q()
|
||||
if filter_or != None:
|
||||
for key, value in ast.literal_eval(filter_or).items():
|
||||
filter_list.add(Q(**{key: value}), Q.OR)
|
||||
|
||||
if filter_str != None:
|
||||
for key, value in ast.literal_eval(filter_str).items():
|
||||
if isinstance(value, dict) == True:
|
||||
if value['type'] == 'F':
|
||||
filter_list.add(Q(**{key: F(value['field'])}), Q.AND)
|
||||
else:
|
||||
filter_list.add(Q(**{key: value}), Q.AND)
|
||||
|
||||
# Thực thi query
|
||||
rows = Model.objects.all() if len(filter_list) == 0 else Model.objects.filter(filter_list)
|
||||
if exclude != None:
|
||||
exclude_list = Q()
|
||||
for key, value in ast.literal_eval(exclude).items():
|
||||
if isinstance(value, dict) == True:
|
||||
if value['type'] == 'F':
|
||||
exclude_list.add(Q(**{key: F(value['field'])}), Q.AND)
|
||||
else:
|
||||
exclude_list.add(Q(**{key: value}), Q.AND)
|
||||
rows = rows.exclude(exclude_list)
|
||||
|
||||
rows, need_serializer = base_query(rows, values, summary, distinct_values)
|
||||
rows = final_result(rows, calculation, final_filter, final_exclude, sort)
|
||||
|
||||
# Initialize total_rows and full_data
|
||||
total_rows = 0
|
||||
full_data = False
|
||||
|
||||
if summary == 'count':
|
||||
total_rows = rows
|
||||
full_data = True
|
||||
rows = total_rows
|
||||
elif summary == 'aggregate':
|
||||
total_rows = 1
|
||||
full_data = True
|
||||
else:
|
||||
total_rows, full_data, rows = get_limit_rows(rows, page, onpage)
|
||||
|
||||
if need_serializer == True:
|
||||
rows = serializer_class(rows, many=True).data
|
||||
else:
|
||||
import json
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
rows = json.loads(json.dumps(list(rows), cls=DjangoJSONEncoder))
|
||||
|
||||
return {'total_rows': total_rows, 'full_data': full_data, 'rows': rows}
|
||||
94
app/cleardata.py
Normal file
94
app/cleardata.py
Normal file
@@ -0,0 +1,94 @@
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from app.models import *
|
||||
|
||||
|
||||
#==========================================================================================
|
||||
# @api_view(['POST'])
|
||||
# def data_deletion(request):
|
||||
# if request.method != 'POST':
|
||||
# return Response(status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# #check password
|
||||
# try:
|
||||
# password = request.data['password']
|
||||
# hash = request.data['hash']
|
||||
# if password != "igyRr8HOW5e7G4PE09hso3nrLKPMPcGqpYlnkZ2zmqKBxoo4Pg":
|
||||
# return Response(status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# if hash != "05c62b974e358cb9a61d0418e5ccb271b9e9a76b240e878f4509fc1e7536114e":
|
||||
# return Response(status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Customer_Note.objects.all().delete()
|
||||
# Contract.objects.all().delete()
|
||||
# Customer_File.objects.all().delete()
|
||||
# Customer_People.objects.all().delete()
|
||||
# Customer.objects.all().delete()
|
||||
# Email_Sent.objects.all().delete()
|
||||
# Entry_File.objects.all().delete()
|
||||
# Internal_Entry.objects.all().delete()
|
||||
# Account_Book.objects.all().delete()
|
||||
|
||||
# Staff_File.objects.all().delete()
|
||||
# Staff.objects.all().delete()
|
||||
|
||||
# Message_Receiver.objects.all().delete()
|
||||
# Message.objects.all().delete()
|
||||
# Email_Sent.objects.all().delete()
|
||||
|
||||
# Branch.objects.all().update(manager=None, signature=None)
|
||||
|
||||
# People_File.objects.all().delete()
|
||||
# People.objects.all().delete()
|
||||
# Company.objects.all().delete()
|
||||
|
||||
# default_user = User.objects.filter(username='support@y99.vn').first()
|
||||
# News.objects.all().update(user=default_user)
|
||||
|
||||
# Token.objects.all().delete()
|
||||
# User_Apps.objects.exclude(user__username__in=['bigdatatech', 'support@y99.vn', 'admin@y99.vn']).delete()
|
||||
|
||||
# File.objects.exclude(user__username__in=['bigdatatech', 'support@y99.vn', 'admin@y99.vn']).delete()
|
||||
# User.objects.exclude(username__in=['bigdatatech', 'support@y99.vn', 'admin@y99.vn']).delete()
|
||||
|
||||
# for row in Internal_Account.objects.all():
|
||||
# row.balance = 0
|
||||
# row.save()
|
||||
|
||||
# return Response(status=status.HTTP_200_OK)
|
||||
# except Exception as e:
|
||||
# print(e)
|
||||
# return Response(status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
#==========================================================================================
|
||||
@api_view(['GET', 'POST'])
|
||||
def data_deletion(request):
|
||||
#check password
|
||||
try:
|
||||
# Customer_Note.objects.all().delete()
|
||||
# Contract.objects.all().delete()
|
||||
# Customer_File.objects.all().delete()
|
||||
# Customer_People.objects.all().delete()
|
||||
# Customer.objects.all().delete()
|
||||
Email_Sent.objects.all().delete()
|
||||
Entry_File.objects.all().delete()
|
||||
Internal_Entry.objects.all().delete()
|
||||
Account_Book.objects.all().delete()
|
||||
Email_Sent.objects.all().delete()
|
||||
|
||||
Contract.objects.all().delete()
|
||||
Product_Booked.objects.all().delete()
|
||||
Transaction_File.objects.all().delete()
|
||||
Transaction_Detail.objects.all().delete()
|
||||
Transaction.objects.all().delete()
|
||||
|
||||
for row in Internal_Account.objects.all():
|
||||
row.balance = 0
|
||||
row.save()
|
||||
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return Response(status=status.HTTP_400_BAD_REQUEST)
|
||||
91
app/cob.py
Normal file
91
app/cob.py
Normal file
@@ -0,0 +1,91 @@
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from app.models import *
|
||||
from django.db.models import F, Q, Count, Sum, FloatField
|
||||
from datetime import datetime, timedelta
|
||||
from django.db import close_old_connections
|
||||
|
||||
#==========================================================================================
|
||||
def account_book(system_date):
|
||||
for row in Internal_Account.objects.all():
|
||||
filter1 = {'date': system_date, 'account': row.id, 'type': 1}
|
||||
filter2 = {'date': system_date, 'account': row.id, 'type': 2}
|
||||
arr = Internal_Entry.objects.all().values('account').annotate(
|
||||
credit = Sum(F('amount'), filter=Q(**filter1), output=FloatField()),
|
||||
debit = Sum(F('amount'), filter=Q(**filter2), output=FloatField()),
|
||||
number_credit = Count(F('id'), filter=Q(**filter1), output=FloatField()),
|
||||
number_debit = Count(F('id'), filter=Q(**filter2), output=FloatField())
|
||||
)
|
||||
entry = next(iter(arr), None)
|
||||
book = Account_Book.objects.filter(system_date=system_date, account=row.id).first()
|
||||
if book:
|
||||
book.end_balance = row.balance
|
||||
book.update_time = datetime.now()
|
||||
if entry:
|
||||
book.credit = entry['credit']
|
||||
book.debit = entry['debit']
|
||||
book.number_credit = entry['number_credit']
|
||||
book.number_debit = entry['number_debit']
|
||||
book.save()
|
||||
else:
|
||||
previous_date = system_date - timedelta(days=1)
|
||||
previous = Account_Book.objects.filter(system_date=previous_date, account=row.id).first()
|
||||
start_balance = previous.end_balance if previous else 0
|
||||
book = Account_Book(system_date=system_date, account=row, current_date=datetime.now().date(), start_balance=start_balance,
|
||||
end_balance=row.balance, create_time=datetime.now(), update_time=datetime.now())
|
||||
if entry:
|
||||
book.credit = entry['credit']
|
||||
book.debit = entry['debit']
|
||||
book.number_credit = entry['number_credit']
|
||||
book.number_debit = entry['number_debit']
|
||||
book.save()
|
||||
|
||||
#==========================================================================================
|
||||
def run_cob():
|
||||
# close old connections
|
||||
close_old_connections()
|
||||
try:
|
||||
print(f'===START COB===: {str(datetime.now())}')
|
||||
mode = System_Setting.objects.filter(category='system', classify='mode', code='status').first()
|
||||
#set batch mode
|
||||
mode.detail = 'cob'
|
||||
mode.save()
|
||||
row = System_Setting.objects.filter(category='system', classify='current', code='date').first()
|
||||
system_date = row.detail
|
||||
sys_date = datetime.strptime(system_date, "%Y-%m-%d").date()
|
||||
current_date = datetime.now().date()
|
||||
#change next date
|
||||
if datetime.now().hour >= 23:
|
||||
current_date = current_date + timedelta(days=1)
|
||||
#diff days
|
||||
days = (sys_date - current_date).days
|
||||
while days <= 0:
|
||||
#batch log
|
||||
log = Batch_Log(system_date=sys_date, start_time=datetime.now(), status=Task_Status.objects.filter(code='running').first())
|
||||
log.save()
|
||||
# write account book
|
||||
account_book(sys_date)
|
||||
row.detail = str(sys_date)
|
||||
row.save()
|
||||
log.status = Task_Status.objects.filter(code='success').first()
|
||||
log.save()
|
||||
sys_date = sys_date + timedelta(days=1)
|
||||
days = (sys_date - current_date).days
|
||||
#return active mode
|
||||
mode.detail = 'active'
|
||||
mode.save()
|
||||
log.end_time = datetime.now()
|
||||
log.duration = int((log.end_time - log.start_time).total_seconds())
|
||||
log.task = Task_Status.objects.filter(code='success').first()
|
||||
log.save()
|
||||
print(f'===END COB===: {str(datetime.now())}')
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error RUN-COB: {e}")
|
||||
|
||||
#==========================================================================================
|
||||
@api_view(['GET', 'POST'])
|
||||
def close_of_business(request):
|
||||
run_cob()
|
||||
return Response(status = status.HTTP_200_OK)
|
||||
107
app/consumers.py
Normal file
107
app/consumers.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import json
|
||||
from channels.generic.websocket import AsyncJsonWebsocketConsumer
|
||||
from channels.db import database_sync_to_async
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from .basic import execute_data_query
|
||||
|
||||
class DataConsumer(AsyncJsonWebsocketConsumer):
|
||||
async def encode_json(self, content):
|
||||
"""
|
||||
Encode the given content as JSON, using Django's encoder
|
||||
to handle dates, decimals, etc.
|
||||
"""
|
||||
return json.dumps(content, cls=DjangoJSONEncoder)
|
||||
|
||||
async def connect(self):
|
||||
self.subscribed_groups = set()
|
||||
self.subscription_params = {} # e.g., {'Product': {'filter': '...', 'values': '...'}}
|
||||
await self.accept()
|
||||
|
||||
async def disconnect(self, close_code):
|
||||
for group_name in self.subscribed_groups:
|
||||
await self.channel_layer.group_discard(group_name, self.channel_name)
|
||||
|
||||
async def receive_json(self, content):
|
||||
action = content.get("action")
|
||||
if action == "subscribe":
|
||||
await self.handle_subscribe(content.get("payload", {}), content.get("request_id"))
|
||||
|
||||
async def handle_subscribe(self, payload, request_id):
|
||||
model_name = payload.get("name")
|
||||
params = payload.get("params", {})
|
||||
|
||||
if not model_name:
|
||||
await self.send_json({"type": "error", "request_id": request_id, "message": "Model name is required."})
|
||||
return
|
||||
|
||||
# Store subscription params for this client
|
||||
self.subscription_params[model_name] = params
|
||||
|
||||
# Run the initial data query
|
||||
data = await database_sync_to_async(execute_data_query)(model_name, params)
|
||||
|
||||
# Send the initial result back to the client
|
||||
await self.send_json({
|
||||
"type": "subscription_response",
|
||||
"request_id": request_id,
|
||||
"data": data
|
||||
})
|
||||
|
||||
# Join the group using a lowercase model name to match the signal
|
||||
group_name = f"model_{model_name.lower()}_updates"
|
||||
if group_name not in self.subscribed_groups:
|
||||
await self.channel_layer.group_add(group_name, self.channel_name)
|
||||
self.subscribed_groups.add(group_name)
|
||||
|
||||
async def realtime_update(self, event):
|
||||
# Move imports inside the method to prevent AppRegistryNotReady error on startup
|
||||
import ast
|
||||
from django.db.models import Q
|
||||
from .views import get_serializer
|
||||
|
||||
payload = event["payload"]
|
||||
record = payload["record"]
|
||||
model_name_lower = payload["name"]
|
||||
model_name_capitalized = model_name_lower.capitalize()
|
||||
|
||||
# 1. Get this client's subscription parameters for the specific model
|
||||
client_params = self.subscription_params.get(model_name_capitalized)
|
||||
if not client_params:
|
||||
return # This client is not subscribed to this model.
|
||||
|
||||
# 2. Check if the updated record matches the client's filter
|
||||
filter_str = client_params.get('filter')
|
||||
if filter_str:
|
||||
try:
|
||||
Model, _ = get_serializer(model_name_lower)
|
||||
if not Model:
|
||||
return
|
||||
|
||||
filter_q = Q()
|
||||
filter_dict = ast.literal_eval(filter_str)
|
||||
for key, value in filter_dict.items():
|
||||
filter_q.add(Q(**{key: value}), Q.AND)
|
||||
|
||||
matches = await database_sync_to_async(
|
||||
Model.objects.filter(pk=record["id"]).filter(filter_q).exists
|
||||
)()
|
||||
|
||||
if not matches:
|
||||
return # Record does not match the client's filter, so don't send.
|
||||
except Exception:
|
||||
return # Fail silently if filter is invalid or DB check fails.
|
||||
|
||||
# 3. Create a tailored payload, respecting the 'values' parameter
|
||||
payload_for_client = payload.copy()
|
||||
values_str = client_params.get('values')
|
||||
if values_str:
|
||||
requested_values = values_str.split(',')
|
||||
# The record from the signal contains all fields. Filter it down.
|
||||
filtered_record = {key: record.get(key) for key in requested_values if key in record}
|
||||
payload_for_client['record'] = filtered_record
|
||||
|
||||
# 4. Send the final, tailored payload to the client
|
||||
await self.send_json({
|
||||
"type": "realtime_update",
|
||||
"payload": payload_for_client
|
||||
})
|
||||
344
app/contract.py
Normal file
344
app/contract.py
Normal file
@@ -0,0 +1,344 @@
|
||||
import os
|
||||
from docx import Document
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from app.models import *
|
||||
from datetime import datetime
|
||||
from num2words import num2words
|
||||
from docx.shared import Inches, Pt
|
||||
from docx.enum.text import WD_ALIGN_PARAGRAPH
|
||||
import numpy as np
|
||||
import mammoth
|
||||
import subprocess
|
||||
|
||||
#=============================================================================
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
static_folder = os.path.join(BASE_DIR, "static")
|
||||
|
||||
#=============================================================================
|
||||
def replace_text(doc, old_text, new_text):
|
||||
for para in doc.paragraphs:
|
||||
for run in para.runs:
|
||||
if old_text in run.text:
|
||||
run.text = run.text.replace(old_text, new_text)
|
||||
|
||||
for section in doc.sections:
|
||||
footer = section.footer
|
||||
for paragraph in footer.paragraphs:
|
||||
for run in paragraph.runs:
|
||||
# Bỏ qua field PAGE (thường có style đặc biệt hoặc chứa "PAGE" trong XML)
|
||||
if "PAGE" in run._element.xml:
|
||||
continue
|
||||
if old_text in run.text:
|
||||
run.text = run.text.replace(old_text, new_text)
|
||||
|
||||
#=============================================================================
|
||||
def base_replace(doc, date):
|
||||
replace_text(doc, "[day]", str(date.day))
|
||||
replace_text(doc, "[month]", str(date.month))
|
||||
replace_text(doc, "[year]", str(date.year))
|
||||
replace_text(doc, "[start_date]", date.strftime("%d/%m/%Y"))
|
||||
|
||||
#=============================================================================
|
||||
def convert_docx_to_html(docx_path, html_path):
|
||||
with open(docx_path, "rb") as docx_file:
|
||||
result = mammoth.convert_to_html(docx_file)
|
||||
html = result.value # HTML string
|
||||
with open(html_path, "w", encoding="utf-8") as html_file:
|
||||
# Thêm thẻ <meta charset="utf-8"> vào đầu
|
||||
html_with_encoding = f"""<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
</head>
|
||||
<body>
|
||||
{html}
|
||||
</body>
|
||||
</html>"""
|
||||
html_file.write(html_with_encoding)
|
||||
|
||||
#=============================================================================
|
||||
def docx_to_pdf(input_path, output_dir=None):
|
||||
if output_dir is None:
|
||||
output_dir = os.path.dirname(os.path.abspath(input_path))
|
||||
|
||||
subprocess.run([
|
||||
"libreoffice",
|
||||
"--headless", # chạy không mở GUI
|
||||
"--convert-to", "pdf",
|
||||
"--outdir", output_dir,
|
||||
input_path
|
||||
], check=True)
|
||||
|
||||
#=============================================================================
|
||||
def insert_image_after_keyword(doc, keywords, image_path, full_name, time):
|
||||
try:
|
||||
for table in doc.tables:
|
||||
for row in table.rows:
|
||||
for cell in row.cells:
|
||||
for para in cell.paragraphs:
|
||||
for keyword in keywords:
|
||||
if keyword in para.text:
|
||||
print('==TEXT FOUNDED==', keyword, image_path, full_name)
|
||||
p = cell.add_paragraph()
|
||||
p.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
||||
p.paragraph_format.space_before = Pt(2)
|
||||
p.add_run().add_picture(image_path, width=Inches(2))
|
||||
|
||||
p = cell.add_paragraph()
|
||||
p.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
||||
p.paragraph_format.space_before = Pt(2)
|
||||
run = p.add_run(full_name)
|
||||
run.bold = True
|
||||
|
||||
p = cell.add_paragraph()
|
||||
p.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
||||
p.paragraph_format.space_before = Pt(2)
|
||||
p.add_run(time)
|
||||
|
||||
break;
|
||||
except Exception as e:
|
||||
print("==INSERT IMAGE ERROR==", e)
|
||||
|
||||
#=============================================================================
|
||||
@api_view(['GET'])
|
||||
def create_contract(request):
|
||||
code = request.query_params['code'] if request.query_params.get('code') != None else None
|
||||
if code == None:
|
||||
return Response(status = status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# mapping
|
||||
columns = ['code', 'fullname', 'phone', 'create_time', 'customer__dob', 'legal_code', 'issue_date', 'issue_place', 'product__type__code',
|
||||
'address', 'approve_amount', 'approve_term', 'address', 'loanapp__code', 'loanapp__rate_info', 'customer',
|
||||
'loanapp__valid_from', 'loanapp__valid_to', 'product__category__code', 'loanapp__beneficiary_account', 'loanapp__beneficiary_bank']
|
||||
# application
|
||||
application = Application.objects.filter(code=code).values(*columns).first()
|
||||
if application == None:
|
||||
return Response(status = status.HTTP_400_BAD_REQUEST)
|
||||
# contract
|
||||
contract = Contract.objects.filter(application__code=code).first()
|
||||
cust_people = Customer_People.objects.filter(customer=application['customer'])
|
||||
singed = contract.signature if contract!= None else None
|
||||
keywords = ["Sign, full name)"]
|
||||
|
||||
# application is unsecured
|
||||
# if application['product__type__code'] == 'unsecured':
|
||||
# keywords = ["(Ký, ghi rõ họ tên)", "(Sign, full name)"]
|
||||
# # contract
|
||||
# docx_path = static_folder + '/contract/contract_unsecured.docx'
|
||||
# output_path = static_folder + "/contract/{}_contract.docx".format(code)
|
||||
# #html_contract = static_folder + "/contract/{}_contract.html".format(code)
|
||||
# doc = Document(docx_path)
|
||||
# base_replace(doc, application['loanapp__valid_from'])
|
||||
|
||||
# # confirmation
|
||||
# docx_confirm = static_folder + '/contract/confirmation_unsecured.docx'
|
||||
# output_confirm = static_folder + "/contract/{}_confirmation.docx".format(code)
|
||||
# #html_confirm = static_folder + "/contract/{}_confirmation.html".format(code)
|
||||
# doc_confirm = Document(docx_confirm)
|
||||
# base_replace(doc_confirm, application['loanapp__valid_from'])
|
||||
|
||||
# # replace
|
||||
# for text in columns:
|
||||
# value = application[text]
|
||||
# if text.find('date') >=0 or text.find('dob')>=0 or text.find('__valid')>=0:
|
||||
# value = value.strftime("%d/%m/%Y")
|
||||
# elif text.find('amount')>=0:
|
||||
# value = "{:,}".format(value).replace(",", ".")
|
||||
# elif text == 'create_time':
|
||||
# value = application['loanapp__valid_from'].strftime("%d/%m/%Y")
|
||||
# elif text == 'approve_term':
|
||||
# value = str(int(round(((application['loanapp__valid_to'] - application['loanapp__valid_from']).days) / 30, 0)))
|
||||
|
||||
# # replace
|
||||
# replace_text(doc, "[{}]".format(text), str(value))
|
||||
# replace_text(doc_confirm, "[{}]".format(text), str(value))
|
||||
|
||||
# # output
|
||||
# replace_text(doc, '[inword]', num2words(application['approve_amount'], lang='vi'))
|
||||
# replace_text(doc_confirm, '[inword]', num2words(application['approve_amount'], lang='vi'))
|
||||
|
||||
# # insert image
|
||||
# if singed:
|
||||
# file_name = static_folder + '/files/' + contract.signature.file
|
||||
# print("Signed", file_name)
|
||||
# insert_image_after_keyword(doc, keywords, file_name, application['fullname'], contract.update_time.strftime("%d/%m/%Y"))
|
||||
# insert_image_after_keyword(doc_confirm, keywords, file_name, application['fullname'], contract.update_time.strftime("%d/%m/%Y"))
|
||||
|
||||
# doc.save(output_path)
|
||||
# doc_confirm.save(output_confirm)
|
||||
# # convert-to-pdf
|
||||
# docx_to_pdf(output_path)
|
||||
# docx_to_pdf(output_confirm)
|
||||
|
||||
# #convert_docx_to_html(output_path, html_contract)
|
||||
# # arr = [{"code": 'contract', "name": 'Hợp đồng', "en": "Contract", "file": "{}_contract.docx".format(code),
|
||||
# # "html": "{}_contract.html".format(code), "pdf": "{}_contract.pdf".format(code)}]
|
||||
# arr = [{"code": 'contract', "name": 'Hợp đồng', "en": "Contract", "file": "{}_contract.docx".format(code), "pdf": "{}_contract.pdf".format(code)},
|
||||
# {"code": 'confirmation', "name": 'Xác nhận', "en": "Confirmation", "file": "{}_confirmation.docx".format(code), "pdf": "{}_confirmation.pdf".format(code)}]
|
||||
|
||||
# else:
|
||||
|
||||
if True:
|
||||
# contract
|
||||
docx_path = static_folder + '/contract/contract.docx'
|
||||
output_path = static_folder + "/contract/{}_contract.docx".format(code)
|
||||
#html_contract = static_folder + "/contract/{}_contract.html".format(code)
|
||||
doc = Document(docx_path)
|
||||
base_replace(doc, application['loanapp__valid_from'])
|
||||
|
||||
# agreement
|
||||
docx_agree = static_folder + '/contract/agreement.docx'
|
||||
if application['product__category__code'] == 'phone' or application['product__category__code'] == 'laptop':
|
||||
docx_agree = static_folder + '/contract/agreement_phone.docx'
|
||||
|
||||
output_agree = static_folder + "/contract/{}_agreement.docx".format(code)
|
||||
#html_agree = static_folder + "/contract/{}_agreement.html".format(code)
|
||||
doc_agree = Document(docx_agree)
|
||||
base_replace(doc_agree, application['loanapp__valid_from'])
|
||||
|
||||
# commitment
|
||||
docx_commit = static_folder + '/contract/commitment.docx'
|
||||
output_commit = static_folder + "/contract/{}_commitment.docx".format(code)
|
||||
#html_commit = static_folder + "/contract/{}_commitment.html".format(code)
|
||||
doc_commit = Document(docx_commit)
|
||||
base_replace(doc_commit, application['loanapp__valid_from'])
|
||||
|
||||
# pawn
|
||||
docx_pawn = static_folder + '/contract/pawn.docx'
|
||||
output_pawn = static_folder + "/contract/{}_pawn.docx".format(code)
|
||||
#html_pawn = static_folder + "/contract/{}_pawn.html".format(code)
|
||||
doc_pawn = Document(docx_pawn)
|
||||
base_replace(doc_pawn, application['loanapp__valid_from'])
|
||||
|
||||
# confirmation
|
||||
docx_confirm = static_folder + '/contract/confirmation.docx'
|
||||
output_confirm = static_folder + "/contract/{}_confirmation.docx".format(code)
|
||||
#html_confirm = static_folder + "/contract/{}_confirmation.html".format(code)
|
||||
doc_confirm = Document(docx_confirm)
|
||||
base_replace(doc_confirm, application['loanapp__valid_from'])
|
||||
|
||||
# replace
|
||||
for text in columns:
|
||||
value = application[text]
|
||||
if text.find('date') >=0 or text.find('dob')>=0 or text.find('__valid')>=0:
|
||||
value = value.strftime("%d/%m/%Y")
|
||||
elif text.find('amount')>=0:
|
||||
value = "{:,}".format(value).replace(",", ".")
|
||||
elif text == 'create_time':
|
||||
value = application['loanapp__valid_from'].strftime("%d/%m/%Y")
|
||||
elif text == 'approve_term':
|
||||
value = str(int(round(((application['loanapp__valid_to'] - application['loanapp__valid_from']).days) / 30, 0)))
|
||||
|
||||
# replace
|
||||
value = ' ' if value == None else value
|
||||
replace_text(doc, "[{}]".format(text), str(value))
|
||||
replace_text(doc_agree, "[{}]".format(text), str(value))
|
||||
replace_text(doc_commit, "[{}]".format(text), str(value))
|
||||
replace_text(doc_pawn, "[{}]".format(text), str(value))
|
||||
replace_text(doc_confirm, "[{}]".format(text), str(value))
|
||||
|
||||
#end_date
|
||||
replace_text(doc, '[inword]', num2words(application['approve_amount'], lang='vi'))
|
||||
replace_text(doc_confirm, '[inword]', num2words(application['approve_amount'], lang='vi'))
|
||||
replace_text(doc_pawn, '[inword]', num2words(application['approve_amount'], lang='vi'))
|
||||
replace_text(doc_pawn, '[inword_en]', num2words(application['approve_amount'], lang='en'))
|
||||
|
||||
if application['loanapp__beneficiary_account']:
|
||||
replace_text(doc_confirm, '[tickoffice]', "☐")
|
||||
replace_text(doc_confirm, '[tickbank]', "✓")
|
||||
else:
|
||||
replace_text(doc_confirm, '[tickoffice]', "✓")
|
||||
replace_text(doc_confirm, '[tickbank]', "☐")
|
||||
|
||||
# collateral
|
||||
mapping = ['collateral__seri_number', 'collateral__code', 'collateral__appraisal_value', 'collateral__type__name',
|
||||
'collateral__detail', 'collateral__status__name', 'collateral__vehicle_number', 'collateral__engine_number', 'collateral__year_mfg']
|
||||
collateral = Loan_Collateral.objects.filter(loan__code=application['loanapp__code']).values(*mapping).first()
|
||||
if collateral:
|
||||
for text in mapping:
|
||||
value = collateral[text]
|
||||
if text.find('value')>=0:
|
||||
value = "{:,}".format(np.int64(value)).replace(",", ".")
|
||||
value = '' if value==None else value
|
||||
replace_text(doc, "[{}]".format(text), str(value))
|
||||
replace_text(doc_agree, "[{}]".format(text), str(value))
|
||||
replace_text(doc_commit, "[{}]".format(text), str(value))
|
||||
replace_text(doc_pawn, "[{}]".format(text), str(value))
|
||||
replace_text(doc_confirm, "[{}]".format(text), str(value))
|
||||
else:
|
||||
for text in mapping:
|
||||
value = ' '
|
||||
replace_text(doc, "[{}]".format(text), str(value))
|
||||
replace_text(doc_agree, "[{}]".format(text), str(value))
|
||||
replace_text(doc_commit, "[{}]".format(text), str(value))
|
||||
replace_text(doc_pawn, "[{}]".format(text), str(value))
|
||||
replace_text(doc_confirm, "[{}]".format(text), str(value))
|
||||
|
||||
# relation
|
||||
for i in [1,2,3,4]:
|
||||
text1 = '[fullname{}]'.format(i)
|
||||
text2 = '[phone{}]'.format(i)
|
||||
text3 = '[relation{}]'.format(i)
|
||||
people_info = cust_people[i-1] if i <= cust_people.count() else None
|
||||
if people_info:
|
||||
replace_text(doc_pawn, text1, people_info.people.fullname)
|
||||
replace_text(doc_pawn, text2, people_info.people.phone)
|
||||
replace_text(doc_pawn, text3, people_info.relation.name)
|
||||
else:
|
||||
replace_text(doc_pawn, text1, ' ')
|
||||
replace_text(doc_pawn, text2, ' ')
|
||||
replace_text(doc_pawn, text3, ' ')
|
||||
|
||||
# insert image
|
||||
if singed:
|
||||
file_name = static_folder + '/files/' + contract.signature.file
|
||||
print("Signed", file_name)
|
||||
insert_image_after_keyword(doc, keywords, file_name, application['fullname'], contract.update_time.strftime("%d/%m/%Y"))
|
||||
insert_image_after_keyword(doc_agree, keywords, file_name, application['fullname'], contract.update_time.strftime("%d/%m/%Y"))
|
||||
insert_image_after_keyword(doc_commit, keywords, file_name, application['fullname'], contract.update_time.strftime("%d/%m/%Y"))
|
||||
insert_image_after_keyword(doc_pawn, keywords, file_name, application['fullname'], contract.update_time.strftime("%d/%m/%Y"))
|
||||
insert_image_after_keyword(doc_confirm, keywords, file_name, application['fullname'], contract.update_time.strftime("%d/%m/%Y"))
|
||||
|
||||
# output
|
||||
doc.save(output_path)
|
||||
doc_agree.save(output_agree)
|
||||
doc_commit.save(output_commit)
|
||||
doc_pawn.save(output_pawn)
|
||||
doc_confirm.save(output_confirm)
|
||||
|
||||
# convert_docx_to_html(output_path, html_contract)
|
||||
# convert_docx_to_html(output_agree, html_agree)
|
||||
# convert_docx_to_html(output_commit, html_commit)
|
||||
# convert_docx_to_html(output_pawn, html_pawn)
|
||||
# convert_docx_to_html(output_confirm, html_confirm)
|
||||
|
||||
docx_to_pdf(output_path)
|
||||
docx_to_pdf(output_agree)
|
||||
docx_to_pdf(output_commit)
|
||||
docx_to_pdf(output_pawn)
|
||||
docx_to_pdf(output_confirm)
|
||||
|
||||
# arr = [{"code": 'contract', "name": 'Hợp đồng', "en": "Contract", "file": "{}_contract.docx".format(code), "html": "{}_contract.html".format(code), "pdf": "{}_contract.pdf".format(code)},
|
||||
# {"code": 'agreement', "name": 'Thỏa thuận', "en": "Agreement", "file": "{}_agreement.docx".format(code), "html": "{}_agreement.html".format(code), "pdf": "{}_agreement.pdf".format(code)},
|
||||
# {"code": 'commitment', "name": 'Cam kết', "en": "Commitment", "file": "{}_commitment.docx".format(code), "html": "{}_commitment.html".format(code), "pdf": "{}_commitment.pdf".format(code)},
|
||||
# {"code": 'pawn', "name": 'Cầm cố', "en": "Pawn", "file": "{}_pawn.docx".format(code), "html": "{}_pawn.html".format(code), "pdf": "{}_pawn.pdf".format(code)},
|
||||
# {"code": 'confirmation', "name": 'Xác nhận', "en": "Confirmation", "file": "{}_confirmation.docx".format(code), "html": "{}_confirmation.html".format(code), "pdf": "{}_confirmation.pdf".format(code)}]
|
||||
|
||||
arr = [{"code": 'contract', "name": 'Hợp đồng', "en": "Contract", "file": "{}_contract.docx".format(code), "pdf": "{}_contract.pdf".format(code)},
|
||||
{"code": 'agreement', "name": 'Thỏa thuận', "en": "Agreement", "file": "{}_agreement.docx".format(code), "pdf": "{}_agreement.pdf".format(code)},
|
||||
{"code": 'commitment', "name": 'Cam kết', "en": "Commitment", "file": "{}_commitment.docx".format(code), "pdf": "{}_commitment.pdf".format(code)},
|
||||
{"code": 'pawn', "name": 'Cầm cố', "en": "Pawn", "file": "{}_pawn.docx".format(code), "pdf": "{}_pawn.pdf".format(code)},
|
||||
{"code": 'confirmation', "name": 'Xác nhận', "en": "Confirmation", "file": "{}_confirmation.docx".format(code), "pdf": "{}_confirmation.pdf".format(code)}]
|
||||
|
||||
# update contract
|
||||
contract = Contract.objects.filter(application__code=code).first()
|
||||
if contract:
|
||||
contract.document = arr
|
||||
else:
|
||||
application = Application.objects.filter(code=code).first()
|
||||
contract = Contract(application=application, content="{} contract".format(code), document=arr)
|
||||
# save
|
||||
contract.save()
|
||||
return Response(status = status.HTTP_200_OK)
|
||||
415
app/document_generator.py
Normal file
415
app/document_generator.py
Normal file
@@ -0,0 +1,415 @@
|
||||
import os
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
import numpy as np
|
||||
from docx import Document
|
||||
from docx.enum.text import WD_ALIGN_PARAGRAPH
|
||||
from docx.shared import Inches, Pt
|
||||
from django.apps import apps
|
||||
from num2words import num2words
|
||||
from django.conf import settings
|
||||
from app.models import Document_Configuration
|
||||
|
||||
# =============================================================================
|
||||
# Constants
|
||||
# =============================================================================
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
static_folder = os.path.join(settings.BASE_DIR, "static")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Utility Functions
|
||||
# =============================================================================
|
||||
|
||||
def replace_text_in_doc(doc, old_text, new_text):
|
||||
"""Thay thế tất cả các lần xuất hiện của old_text bằng new_text trong tài liệu, xử lý split run."""
|
||||
new_text = str(new_text) if new_text is not None else ""
|
||||
|
||||
def replace_in_paragraph(para):
|
||||
runs = list(para.runs)
|
||||
full_text = ''.join(run.text for run in runs)
|
||||
if old_text not in full_text:
|
||||
return
|
||||
|
||||
start_idx = full_text.find(old_text)
|
||||
if start_idx == -1:
|
||||
return
|
||||
|
||||
current_pos = 0
|
||||
runs_to_modify = []
|
||||
for run in runs:
|
||||
run_len = len(run.text)
|
||||
run_start = current_pos
|
||||
run_end = current_pos + run_len
|
||||
current_pos = run_end
|
||||
|
||||
if run_start < start_idx + len(old_text) and run_end > start_idx:
|
||||
runs_to_modify.append(run)
|
||||
|
||||
if not runs_to_modify:
|
||||
return
|
||||
|
||||
first_run = runs_to_modify[0]
|
||||
first_run_index = next(i for i, r in enumerate(runs) if r is first_run)
|
||||
|
||||
local_start = start_idx - sum(len(runs[i].text) for i in range(first_run_index))
|
||||
|
||||
remaining_old = old_text
|
||||
|
||||
for i, run in enumerate(runs_to_modify):
|
||||
run_text = run.text
|
||||
if i == 0:
|
||||
prefix = run_text[:local_start]
|
||||
remove_len = min(len(remaining_old), len(run_text) - local_start)
|
||||
suffix = run_text[local_start + remove_len:]
|
||||
run.text = prefix + suffix
|
||||
remaining_old = remaining_old[remove_len:]
|
||||
else:
|
||||
remove_len = min(len(remaining_old), len(run_text))
|
||||
suffix = run_text[remove_len:]
|
||||
run.text = suffix
|
||||
remaining_old = remaining_old[remove_len:]
|
||||
|
||||
first_run = runs_to_modify[0]
|
||||
first_run.text = first_run.text[:local_start] + new_text + first_run.text[local_start:]
|
||||
|
||||
replace_in_paragraph(para)
|
||||
|
||||
for para in doc.paragraphs:
|
||||
replace_in_paragraph(para)
|
||||
|
||||
for table in doc.tables:
|
||||
for row in table.rows:
|
||||
for cell in row.cells:
|
||||
for para in cell.paragraphs:
|
||||
replace_in_paragraph(para)
|
||||
|
||||
for section in doc.sections:
|
||||
footer = section.footer
|
||||
for para in footer.paragraphs:
|
||||
if any("PAGE" in run._element.xml for run in para.runs):
|
||||
continue
|
||||
replace_in_paragraph(para)
|
||||
|
||||
|
||||
def docx_to_pdf(input_path, output_dir=None):
|
||||
"""Converts a .docx file to .pdf using LibreOffice, handling non-zero exit codes gracefully."""
|
||||
if output_dir is None:
|
||||
output_dir = os.path.dirname(os.path.abspath(input_path))
|
||||
|
||||
pdf_path = os.path.join(output_dir, os.path.basename(input_path).replace(".docx", ".pdf"))
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[
|
||||
"libreoffice",
|
||||
"--headless",
|
||||
"--convert-to",
|
||||
"pdf",
|
||||
"--outdir",
|
||||
output_dir,
|
||||
input_path,
|
||||
],
|
||||
timeout=60,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
# Log the warning/error from LibreOffice
|
||||
print(f"WARNING: libreoffice command returned non-zero exit code ({result.returncode}) for {input_path}.")
|
||||
print(f" STDOUT: {result.stdout}")
|
||||
print(f" STDERR: {result.stderr}")
|
||||
|
||||
# Check if the PDF was created anyway
|
||||
if not os.path.exists(pdf_path) or os.path.getsize(pdf_path) == 0:
|
||||
# This is a real failure
|
||||
raise Exception(f"PDF conversion failed and output file was not created. STDERR: {result.stderr}")
|
||||
else:
|
||||
print(f"INFO: PDF file was created successfully despite the non-zero exit code.")
|
||||
|
||||
except FileNotFoundError:
|
||||
print("ERROR: libreoffice command not found. Please ensure it is installed and in your PATH.")
|
||||
raise
|
||||
except Exception as e:
|
||||
# Re-raise other exceptions (like timeout)
|
||||
print(f"ERROR: An unexpected error occurred during PDF conversion for {input_path}. Error: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def insert_image_after_keyword(doc, keywords, image_path, full_name, time):
|
||||
"""Finds a keyword in a table and inserts an image and text after it."""
|
||||
if not os.path.exists(image_path):
|
||||
print(f"==INSERT IMAGE ERROR== File not found: {image_path}")
|
||||
return
|
||||
|
||||
try:
|
||||
for table in doc.tables:
|
||||
for row in table.rows:
|
||||
for cell in row.cells:
|
||||
for para in cell.paragraphs:
|
||||
for keyword in keywords:
|
||||
if keyword in para.text:
|
||||
p_img = cell.add_paragraph()
|
||||
p_img.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
||||
p_img.add_run().add_picture(image_path, width=Inches(1.5))
|
||||
|
||||
p_name = cell.add_paragraph()
|
||||
p_name.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
||||
run_name = p_name.add_run(full_name)
|
||||
run_name.bold = True
|
||||
|
||||
p_time = cell.add_paragraph()
|
||||
p_time.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
||||
p_time.add_run(time)
|
||||
return
|
||||
except Exception as e:
|
||||
print(f"==INSERT IMAGE ERROR== {e}")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Document Generator Class
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class DocumentGenerator:
|
||||
def __init__(self, document_code, context_pks: dict):
|
||||
self.document_code = document_code
|
||||
self.context_pks = context_pks
|
||||
self.config = self._get_config()
|
||||
self.data_context = {}
|
||||
self.replacements = {}
|
||||
|
||||
def _get_config(self):
|
||||
try:
|
||||
return Document_Configuration.objects.get(code=self.document_code)
|
||||
except Document_Configuration.DoesNotExist:
|
||||
raise ValueError(f"Document configuration '{self.document_code}' not found.")
|
||||
|
||||
def _get_model(self, model_string):
|
||||
app_label, model_name = model_string.split(".")
|
||||
return apps.get_model(app_label, model_name)
|
||||
|
||||
def _resolve_lookup_value(self, lookup_from):
|
||||
if lookup_from in self.context_pks:
|
||||
return self.context_pks[lookup_from]
|
||||
|
||||
try:
|
||||
alias, field_path = lookup_from.split(".", 1)
|
||||
if alias not in self.data_context:
|
||||
raise ValueError(f"Alias '{alias}' not found in data context.")
|
||||
|
||||
source_object = self.data_context.get(alias)
|
||||
return self._get_value_from_object(source_object, field_path)
|
||||
except ValueError:
|
||||
raise ValueError(f"Could not resolve '{lookup_from}'. It is not a valid API parameter or a reference to another data source.")
|
||||
|
||||
def _get_value_from_object(self, obj, field_path):
|
||||
if obj is None:
|
||||
return None
|
||||
value = obj
|
||||
for part in field_path.replace("__", ".").split("."):
|
||||
if value is None:
|
||||
return None
|
||||
value = getattr(value, part, None)
|
||||
return value
|
||||
|
||||
def fetch_data(self):
|
||||
if not isinstance(self.config.mappings, list):
|
||||
raise TypeError("Document configuration 'mappings' must be a list.")
|
||||
|
||||
for mapping in self.config.mappings:
|
||||
model_cls = self._get_model(mapping["model"])
|
||||
lookup_field = mapping["lookup_field"]
|
||||
lookup_value = self._resolve_lookup_value(mapping["lookup_value_from"])
|
||||
alias = mapping["alias"]
|
||||
|
||||
if lookup_value is None:
|
||||
self.data_context[alias] = None if mapping["type"] == "object" else []
|
||||
continue
|
||||
|
||||
queryset = model_cls.objects.filter(**{lookup_field: lookup_value})
|
||||
|
||||
if mapping["type"] == "object":
|
||||
self.data_context[alias] = queryset.first()
|
||||
elif mapping["type"] == "list":
|
||||
self.data_context[alias] = list(queryset)
|
||||
|
||||
def _format_value(self, value, format_config, obj=None):
|
||||
if value is None:
|
||||
return ""
|
||||
|
||||
def apply_format(val, fmt, obj):
|
||||
fmt_type = fmt.get("type") if isinstance(fmt, dict) else fmt
|
||||
if fmt_type == "currency":
|
||||
try:
|
||||
# Đảm bảo val là số trước khi format, và làm tròn về số nguyên
|
||||
num_val = int(round(float(val), 0))
|
||||
# Format tiền tệ kiểu VN (dấu chấm phân cách hàng nghìn)
|
||||
return "{:,}".format(num_val).replace(",", ".")
|
||||
except Exception:
|
||||
return str(val)
|
||||
if fmt_type == "date":
|
||||
date_format = fmt.get("format", "%d/%m/%Y").replace("dd", "%d").replace("mm", "%m").replace("YYYY", "%Y")
|
||||
try:
|
||||
return val.strftime(date_format)
|
||||
except Exception:
|
||||
return str(val)
|
||||
if fmt_type == "number_to_words":
|
||||
try:
|
||||
return num2words(val, lang=fmt.get("lang", "en"))
|
||||
except Exception:
|
||||
return str(val)
|
||||
if fmt_type == "conditional":
|
||||
return fmt.get("true_value") if val else fmt.get("false_value")
|
||||
if fmt_type == "computed_months":
|
||||
start_date = self._resolve_lookup_value(fmt.get("start_date_from"))
|
||||
end_date = self._resolve_lookup_value(fmt.get("end_date_from"))
|
||||
if start_date and end_date:
|
||||
# Thêm kiểm tra type để tránh lỗi
|
||||
import datetime
|
||||
if not isinstance(start_date, datetime.date): return ""
|
||||
if not isinstance(end_date, datetime.date): return ""
|
||||
|
||||
return str(int(round(((end_date - start_date).days) / 30, 0)))
|
||||
return ""
|
||||
if fmt_type == "expression":
|
||||
expr = fmt.get("expr")
|
||||
if not expr:
|
||||
return ""
|
||||
import re
|
||||
tokens = re.findall(r"[a-zA-Z0-9_\.]+", expr)
|
||||
local_dict = {}
|
||||
for token in tokens:
|
||||
if "__" in token or "." in token:
|
||||
val2 = self._get_value_from_object(obj, token)
|
||||
else:
|
||||
val2 = getattr(obj, token, None)
|
||||
|
||||
try:
|
||||
val_str = str(val2) if val2 is not None else ""
|
||||
|
||||
val_to_parse = val_str.replace(',', '')
|
||||
|
||||
# Nếu val_to_parse là chuỗi rỗng, đặt bằng 0
|
||||
local_dict[token] = float(val_to_parse) if val_to_parse else 0
|
||||
|
||||
except Exception:
|
||||
local_dict[token] = 0
|
||||
|
||||
try:
|
||||
result = eval(expr, {"__builtins__": None}, local_dict)
|
||||
|
||||
# Làm tròn kết quả về số nguyên (theo yêu cầu trước đó)
|
||||
# Trả về kết quả số để formatter 'next' (currency) xử lý tiếp
|
||||
return int(round(result, 0)) if isinstance(result, (int, float)) else result
|
||||
|
||||
except Exception as e:
|
||||
return 0
|
||||
return val
|
||||
|
||||
# Áp dụng lồng định dạng qua khóa 'next'
|
||||
cur_fmt = format_config
|
||||
val = value
|
||||
|
||||
# Vòng lặp này sẽ đảm bảo định dạng 'expression' được thực thi,
|
||||
# sau đó kết quả số (chưa format) sẽ được truyền sang định dạng 'currency'
|
||||
while isinstance(cur_fmt, dict) and cur_fmt.get("next") is not None:
|
||||
val = apply_format(val, cur_fmt, obj)
|
||||
cur_fmt = cur_fmt["next"]
|
||||
|
||||
val = apply_format(val, cur_fmt, obj)
|
||||
return str(val)
|
||||
|
||||
def prepare_replacements(self):
|
||||
# Set base date replacements
|
||||
today = datetime.now()
|
||||
self.replacements['[day]'] = str(today.day)
|
||||
self.replacements['[month]'] = str(today.month)
|
||||
self.replacements['[year]'] = str(today.year)
|
||||
self.replacements['[date]'] = today.strftime("%d/%m/%Y")
|
||||
|
||||
for mapping in self.config.mappings:
|
||||
alias = mapping["alias"]
|
||||
data = self.data_context.get(alias)
|
||||
|
||||
if mapping["type"] == "object":
|
||||
if data is None:
|
||||
for placeholder in mapping["fields"]:
|
||||
self.replacements[placeholder] = ""
|
||||
continue
|
||||
for placeholder, config in mapping["fields"].items():
|
||||
if isinstance(config, dict):
|
||||
value = self._get_value_from_object(data, config["source"])
|
||||
self.replacements[placeholder] = self._format_value(value, config["format"], data)
|
||||
else:
|
||||
value = self._get_value_from_object(data, config)
|
||||
self.replacements[placeholder] = str(value) if value is not None else ""
|
||||
|
||||
elif mapping["type"] == "list":
|
||||
items = data or []
|
||||
max_items = mapping.get("max_items", 4)
|
||||
for i in range(max_items):
|
||||
item = items[i] if i < len(items) else None
|
||||
for p_template, config in mapping["fields"].items():
|
||||
placeholder = p_template.replace("{index}", str(i + 1))
|
||||
if item is None:
|
||||
self.replacements[placeholder] = ""
|
||||
continue
|
||||
if isinstance(config, dict):
|
||||
value = self._get_value_from_object(item, config["source"])
|
||||
self.replacements[placeholder] = self._format_value(value, config["format"], item)
|
||||
else:
|
||||
value = self._get_value_from_object(item, config)
|
||||
self.replacements[placeholder] = str(value) if value is not None else ""
|
||||
|
||||
def generate(self, signature_info=None, output_filename=None):
|
||||
self.fetch_data()
|
||||
self.prepare_replacements()
|
||||
|
||||
# Remove leading slashes from template_path to prevent os.path.join issues
|
||||
clean_template_path = self.config.template_path.lstrip('/')
|
||||
template_full_path = os.path.join(static_folder, clean_template_path)
|
||||
if not os.path.exists(template_full_path):
|
||||
raise FileNotFoundError(f"Template file not found at: {template_full_path}")
|
||||
|
||||
# --- FILENAME LOGIC ---
|
||||
if output_filename:
|
||||
# Use user-provided filename, ensure it has the correct extension
|
||||
if not output_filename.endswith(".docx"):
|
||||
base_name = os.path.splitext(output_filename)[0]
|
||||
output_filename = f"{base_name}.docx"
|
||||
else:
|
||||
# Use a more descriptive output filename (original logic)
|
||||
pk_values = "_".join(self.context_pks.values())
|
||||
output_filename = f"{self.document_code}_{pk_values}_{int(datetime.now().timestamp())}.docx"
|
||||
|
||||
output_dir = os.path.join(static_folder, "contract")
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
output_path = os.path.join(output_dir, output_filename)
|
||||
pdf_filename = output_filename.replace(".docx", ".pdf")
|
||||
|
||||
doc = Document(template_full_path)
|
||||
|
||||
for old_text, new_text in self.replacements.items():
|
||||
replace_text_in_doc(doc, old_text, new_text)
|
||||
|
||||
if signature_info:
|
||||
insert_image_after_keyword(
|
||||
doc,
|
||||
signature_info["keywords"],
|
||||
signature_info["file_path"],
|
||||
signature_info["full_name"],
|
||||
signature_info["timestamp"],
|
||||
)
|
||||
|
||||
doc.save(output_path)
|
||||
docx_to_pdf(output_path, output_dir)
|
||||
|
||||
return {
|
||||
"code": self.document_code,
|
||||
"name": self.config.name,
|
||||
"file": output_filename,
|
||||
"pdf": pdf_filename,
|
||||
}
|
||||
149
app/email.py
Normal file
149
app/email.py
Normal file
@@ -0,0 +1,149 @@
|
||||
from app.views import get_serializer
|
||||
from app.models import *
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from email.utils import formataddr
|
||||
import smtplib
|
||||
|
||||
|
||||
#=============================================================================
|
||||
def send_via_zeptomail(receiver, subject, content, sender):
|
||||
port = 587
|
||||
smtp_server = "smtp.zeptomail.com"
|
||||
username="emailapikey"
|
||||
password = "wSsVR60l/0LwDf8szz2vc+o/mF5WB1PyRE4ri1Wm7nevGvjK9cdtlUWbV1LyG6UdEG8/HDZBp7x8kBoH1joKjNwkzFFUCiiF9mqRe1U4J3x17qnvhDzMWGhbmxCIKY4JxglskmZhFs1u"
|
||||
|
||||
# log sent
|
||||
obj = Email_Sent(subject=subject, receiver=receiver, content=content, status=Send_Status.objects.get(pk=1))
|
||||
try:
|
||||
info = Email_Setup.objects.get(pk=sender)
|
||||
receiver = receiver.split(";")
|
||||
|
||||
# log
|
||||
obj.save()
|
||||
sent_list = []
|
||||
for email in receiver:
|
||||
message = MIMEMultipart("alternative")
|
||||
message["Subject"] = subject
|
||||
message["From"] = formataddr((info.display_name, info.email))
|
||||
message["To"] = email
|
||||
# Turn these into plain/html MIMEText objects
|
||||
part = MIMEText(content, "html")
|
||||
|
||||
# Add HTML/plain-text parts to MIMEMultipart message
|
||||
# The email client will try to render the last part first
|
||||
message.attach(part)
|
||||
|
||||
# Create secure connection with server and send email
|
||||
server = smtplib.SMTP(smtp_server, port)
|
||||
server.starttls()
|
||||
server.login(username, password)
|
||||
server.send_message(message)
|
||||
sent_list.append(email)
|
||||
|
||||
obj.status = Send_Status.objects.get(pk=2)
|
||||
obj.save()
|
||||
result = True
|
||||
|
||||
except Exception as ex:
|
||||
print(ex)
|
||||
obj.status = Send_Status.objects.get(pk=3)
|
||||
obj.save()
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
|
||||
#=============================================================================
|
||||
def send_one_by_one(receiver, subject, content, sender):
|
||||
# log sent
|
||||
obj = Email_Sent(subject=subject, receiver=receiver, content=content, status=Send_Status.objects.get(pk=1))
|
||||
try:
|
||||
info = Email_Setup.objects.get(pk=sender)
|
||||
receiver = receiver.split(";")
|
||||
|
||||
# log
|
||||
obj.save()
|
||||
sent_list = []
|
||||
for email in receiver:
|
||||
message = MIMEMultipart("alternative")
|
||||
message["Subject"] = subject
|
||||
message["From"] = formataddr((info.display_name, info.email))
|
||||
message["To"] = email
|
||||
# Turn these into plain/html MIMEText objects
|
||||
part = MIMEText(content, "html")
|
||||
|
||||
# Add HTML/plain-text parts to MIMEMultipart message
|
||||
# The email client will try to render the last part first
|
||||
message.attach(part)
|
||||
|
||||
# Create secure connection with server and send email
|
||||
server = smtplib.SMTP(info.smtp, info.port)
|
||||
server.starttls()
|
||||
server.login(info.email, info.password)
|
||||
server.sendmail(info.email, email, message.as_string())
|
||||
sent_list.append(email)
|
||||
|
||||
obj.status = Send_Status.objects.get(pk=2)
|
||||
obj.save()
|
||||
result = True
|
||||
|
||||
except Exception as ex:
|
||||
print(ex)
|
||||
obj.status = Send_Status.objects.get(pk=3)
|
||||
obj.save()
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
|
||||
#=============================================================================
|
||||
def send_all(receiver, subject, content, sender):
|
||||
try:
|
||||
info = Email_Setup.objects.get(pk=sender)
|
||||
message = MIMEMultipart("alternative")
|
||||
message["Subject"] = subject
|
||||
message["From"] = formataddr((info.display_name, info.email))
|
||||
receiver = receiver.split(";")
|
||||
message["To"] = ", ".join(receiver)
|
||||
|
||||
# Turn these into plain/html MIMEText objects
|
||||
part = MIMEText(content, "html")
|
||||
|
||||
# Add HTML/plain-text parts to MIMEMultipart message
|
||||
# The email client will try to render the last part first
|
||||
message.attach(part)
|
||||
|
||||
# Create secure connection with server and send email
|
||||
server = smtplib.SMTP(info.smtp, info.port)
|
||||
server.starttls()
|
||||
server.login(info.email, info.password)
|
||||
server.sendmail(info.email, receiver, message.as_string())
|
||||
result = True
|
||||
except Exception as ex:
|
||||
print(ex)
|
||||
result = False
|
||||
|
||||
Model, serializer_class = get_serializer('Email_Sent')
|
||||
data = {"subject": subject, "receiver": message["To"], "content": content, "status": 2 if result==True else 3}
|
||||
serializer = serializer_class(data=data)
|
||||
serializer.save() if serializer.is_valid() else print(serializer.errors)
|
||||
return result
|
||||
|
||||
|
||||
#=============================================================================
|
||||
@api_view(['POST'])
|
||||
def send_email(request):
|
||||
subject = request.data.get('subject')
|
||||
receiver = request.data.get('to')
|
||||
content = request.data.get('content')
|
||||
sender = 1
|
||||
if 'sender' in request.data:
|
||||
sender = request.data.get('sender')
|
||||
|
||||
# send email
|
||||
send_via_zeptomail(receiver, subject, content, sender)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
58
app/email_base_template.html
Normal file
58
app/email_base_template.html
Normal file
@@ -0,0 +1,58 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{{subject}}</title>
|
||||
</head>
|
||||
<body>
|
||||
<div style="padding-top: 20px; padding-bottom: 20px; width: 100%; background-color: #eeeeee; font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;">
|
||||
<table role="presentation" cellspacing="0" cellpadding="0" border="0" style="width: 100%; max-width: 100%; margin: 0 auto;">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td>
|
||||
<table role="presentation" cellspacing="0" cellpadding="0" border="0" style="margin: 0 auto; width: 100%; max-width: 680px; background-color: #ffffff; border-radius: 16px; box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); overflow: hidden;">
|
||||
<tbody>
|
||||
<tr class="header-row">
|
||||
<td style="padding: 0;">
|
||||
<div class="header-image" style="background: #f3f4f6; position: relative; width: 100%; height: 425px; overflow: hidden;">
|
||||
<img src="{{image_url}}" alt="image" style="width: 100%; height: 100%; object-fit: cover; display: block;" />
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding: 0px 25px 20px;">
|
||||
<div style="border: 1px solid #e5e7eb; border-radius: 12px; background-color: #ffffff; overflow: hidden;">
|
||||
<div style="padding: 15px;">
|
||||
<div style="font-size: 16px; margin: 0; color: #374151; line-height: 1.6; word-break: break-word;">
|
||||
{{body_content}}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding: 0px 25px 20px;">
|
||||
<div class="company-info" style="background: linear-gradient(to right, #000000, #0f9b0f); color: #ffffff; border-radius: 12px;">
|
||||
<div style="width: 100%; height: 100%; margin-bottom: 15px; background-color: #ffffff; border-radius: 8px; overflow: hidden;">
|
||||
<img src="https://api.bigdatatech.vn/static/files/20251113051227-1.png" alt="Utopia Footer" style="width: 100%; height: 100%; object-fit: cover; display: block;" />
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="background-color: #f9fafb; padding: 20px; border-top: 1px solid #e5e7eb; text-align: center;">
|
||||
<p style="color: #9ca3af; font-size: 11px; margin: 0; line-height: 1.5;">
|
||||
©2025 Utopia. Tất cả các quyền được bảo lưu.
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
408
app/importdata.py
Executable file
408
app/importdata.py
Executable file
@@ -0,0 +1,408 @@
|
||||
import os, pandas, json
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from app.views import get_serializer
|
||||
from app.basic import *
|
||||
from datetime import datetime
|
||||
|
||||
#=============================================================================
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
static_folder = os.path.join(BASE_DIR, "static")
|
||||
upload_folder = static_folder + "/files/"
|
||||
|
||||
#=============================================================================
|
||||
def check_fields(columns, fields):
|
||||
lack = []
|
||||
for name in fields:
|
||||
found = find(columns, {'name': name})
|
||||
lack.append(name) if found == None else True
|
||||
return lack
|
||||
|
||||
|
||||
def field_related(name, columns):
|
||||
Model, serializer_class = get_serializer(name)
|
||||
fields = []
|
||||
for field in Model._meta.get_fields():
|
||||
obj = {"name": field.name, "related": field.related_model.__name__ if field.related_model else None }
|
||||
fields.append(obj)
|
||||
|
||||
for key, value in columns.items():
|
||||
if getattr(value, 'api'):
|
||||
found = find(fields, {"name": key})
|
||||
value['model'] = found['related']
|
||||
return columns
|
||||
|
||||
|
||||
def check_data(data, fields):
|
||||
for key, value in fields.items():
|
||||
for row in data:
|
||||
row[key] = None if empty(row[key]) == True else row[key]
|
||||
if (getattr(value, 'empty') == 'no' or getattr(value, 'api') != None) and empty(row[key]) == True:
|
||||
row['error'] = key + ' không được bỏ trống'
|
||||
|
||||
elif getattr(value, 'type') == 'number':
|
||||
if empty(row[key]) == False and isnumber(row[key]) == False:
|
||||
row['error'] = key + ' không phải là số'
|
||||
elif getattr(value, 'api') != None:
|
||||
row[key] = int(row[key])
|
||||
|
||||
elif getattr(value, 'type') == 'date':
|
||||
if empty(row[key]) == False and isdate(row[key]) == False:
|
||||
row['error'] = key + ' không đúng định dạng YYYY-MM-DD'
|
||||
else:
|
||||
row[key] = formatdate(row[key])
|
||||
|
||||
if getattr(row, 'error'):
|
||||
return 'error', [row]
|
||||
return None, data
|
||||
|
||||
|
||||
def validate_key(name, data, columns):
|
||||
keys = []
|
||||
related = []
|
||||
for key, value in columns.items():
|
||||
if getattr(value, 'key') == 'yes':
|
||||
keys.append({"key": key, "value": value})
|
||||
if getattr(value, 'api') != None:
|
||||
related.append({"key": key, "value": value})
|
||||
|
||||
Model, serializer_class = get_serializer(name)
|
||||
fields = []
|
||||
for field in Model._meta.get_fields():
|
||||
obj = {"name": field.name, "related": field.related_model.__name__ if field.related_model else None }
|
||||
fields.append(obj)
|
||||
|
||||
error = False
|
||||
if len(related)>0:
|
||||
for row in data:
|
||||
for obj in related:
|
||||
key = obj['key']
|
||||
value = obj['value']
|
||||
name = value['column']
|
||||
f = {name: row[key]}
|
||||
field = find(fields, {'name': value['field']})
|
||||
Model1, serializer_class = get_serializer(field['related'])
|
||||
obj = Model1.objects.filter(**f).values('id').first()
|
||||
if obj:
|
||||
row['_' + key] = str(row[key])
|
||||
row[key] = obj['id']
|
||||
else:
|
||||
row['error'] = 'Không tồn tại ' + key + ' = ' + str(row[key])
|
||||
error = True
|
||||
|
||||
if error == False and len(keys)>0:
|
||||
for row in data:
|
||||
f = {}
|
||||
for obj in keys:
|
||||
key = obj['key']
|
||||
value = obj['value']
|
||||
if 'api' in value:
|
||||
name = value['field']
|
||||
f[name] = row[key]
|
||||
else:
|
||||
f[key] = row[key]
|
||||
obj = Model.objects.filter(**f).values('id').first()
|
||||
row['id'] = obj['id'] if obj !=None else None
|
||||
return error, data, keys
|
||||
|
||||
|
||||
def validate_duplicate(data, keys):
|
||||
arr = [o for o in data if o['id'] == None]
|
||||
ele = {}
|
||||
for row in arr:
|
||||
attr = ''
|
||||
for o in keys:
|
||||
key = '_' + o['key'] if getattr(o['value'], 'api') != None else o['key']
|
||||
attr += str(row[key]) + '#'
|
||||
attr = attr[0: len(attr)-1]
|
||||
counter = 0 if getattr(ele, attr) == None else getattr(ele, attr)
|
||||
ele[attr] = counter + 1
|
||||
array = []
|
||||
for key, value in ele.items():
|
||||
if value>1:
|
||||
arr1 = key.split('#')
|
||||
f = {}
|
||||
count = 0
|
||||
for o in keys:
|
||||
key = '_' + o['key'] if getattr(o['value'], 'api') != None else o['key']
|
||||
f[key] = int(arr1[count]) if getattr(o['value'], 'type')=='number' else arr1[count]
|
||||
count += 1
|
||||
arr2 = filter(arr, f)
|
||||
if len(arr2) >0:
|
||||
arr2[0]['error'] = str(arr1) + ' trùng ' + str(len(arr2)) + ' dòng'
|
||||
array += arr2
|
||||
return array
|
||||
|
||||
|
||||
def bulk_insert(data, fields, logcode):
|
||||
Model, serializer_class = get_serializer(logcode.model)
|
||||
count = 0
|
||||
batch = []
|
||||
success_count = 0
|
||||
error_count = 0
|
||||
arr = [o for o in data if o['id'] == None]
|
||||
total_records = len(arr)
|
||||
error = None
|
||||
try:
|
||||
for row in arr:
|
||||
count += 1
|
||||
batch.append(row)
|
||||
if count % 500 == 0 or count == total_records:
|
||||
print(count)
|
||||
serializer = serializer_class(data = batch, many=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
success_count += len(batch)
|
||||
else:
|
||||
print(serializer.errors)
|
||||
error = serializer.errors
|
||||
error_count += len(batch)
|
||||
batch = []
|
||||
logcode.success_count = success_count
|
||||
logcode.error_count = error_count
|
||||
logcode.total = count
|
||||
logcode.save()
|
||||
|
||||
except Exception as e:
|
||||
print('error', e)
|
||||
error = e
|
||||
return error, error_count, success_count, count
|
||||
|
||||
|
||||
def bulk_update(data, fields, logcode):
|
||||
Model, serializer_class = get_serializer(logcode.model)
|
||||
count = 0
|
||||
batch = []
|
||||
array = []
|
||||
success_count = 0
|
||||
error_count = 0
|
||||
error = None
|
||||
succ_count = 0 if logcode.success_count == None else logcode.success_count
|
||||
err_count = 0 if logcode.error_count == None else logcode.error_count
|
||||
total = 0 if logcode.total == None else logcode.total
|
||||
arr = [o for o in data if o['id'] != None]
|
||||
total_records = len(arr)
|
||||
try:
|
||||
for obj in arr:
|
||||
count += 1
|
||||
batch.append(obj)
|
||||
if count % 500 == 0 or count == total_records:
|
||||
qs = Model.objects.filter(id__in= [o['id'] for o in batch])
|
||||
for row in qs:
|
||||
found = find(batch, {'id': row.id})
|
||||
for key, value in fields.items():
|
||||
if getattr(value, 'api') != None:
|
||||
Model1, serializer_class1 = get_serializer(value['model'])
|
||||
setattr(row, key, Model1.objects.get(id=found[key]))
|
||||
else:
|
||||
setattr(row, key, found[key])
|
||||
array.append(row)
|
||||
print(count)
|
||||
try:
|
||||
Model.objects.bulk_update(array, list(fields.keys()))
|
||||
success_count += len(batch)
|
||||
except Exception as e:
|
||||
error_count += len(batch)
|
||||
error = e
|
||||
batch = []
|
||||
array = []
|
||||
logcode.success_count = succ_count + success_count
|
||||
logcode.error_count = err_count + error_count
|
||||
logcode.total = total + count
|
||||
logcode.save()
|
||||
except Exception as e:
|
||||
print('error', e)
|
||||
error = e
|
||||
return error, error_count, success_count, count
|
||||
|
||||
|
||||
def open_file(model, filename, fields, isfull=False):
|
||||
try:
|
||||
df = pandas.read_excel("Upload/" + filename, dtype=str, na_filter=False)
|
||||
data = df.to_json(path_or_buf= None, orient='table', force_ascii=False)
|
||||
data = json.loads(data)
|
||||
lack = check_fields(data['schema']['fields'], fields)
|
||||
if len(lack) > 0:
|
||||
return {'error': 'lack-fields', 'fields': lack}
|
||||
fields = field_related(model, fields)
|
||||
result, rows = check_data(data['data'], fields)
|
||||
if result != None:
|
||||
return {'error': 'data-error', 'data': rows, 'fields': data['schema']['fields']}
|
||||
else:
|
||||
return {'data': rows if isfull else rows[:1000], 'fields': data['schema']['fields'], 'total': len(rows)}
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def perform_import(code):
|
||||
Model, serializer_class = get_serializer('Import_Log')
|
||||
logcode = Model.objects.filter(code = code).first()
|
||||
progress = [{'code': 1, 'message': 'Bắt đầu kiểm tra bản ghi mới / tồn tại' + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'waiting'}]
|
||||
logcode.status = 'executing'
|
||||
logcode.progress = progress
|
||||
logcode.save()
|
||||
rs = open_file(logcode.model, logcode.file, logcode.fields, True)
|
||||
error, rows, keys = validate_key(logcode.model, rs['data'], logcode.fields)
|
||||
if error == True:
|
||||
arr = [getattr(o, 'error') for o in rows if getattr(o, 'error') != None][:10]
|
||||
logcode.note = ', '.join(arr)
|
||||
logcode.status = 'error'
|
||||
progress.append({'code': 2, 'message': 'Dữ liệu có lỗi: ' + logcode.note, 'type': 'error'})
|
||||
logcode.save()
|
||||
return
|
||||
|
||||
# inform
|
||||
progress.append({'code': 3, 'message': 'Hoàn tất kiểm tra dữ liệu mới / tồn tại' + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'success'})
|
||||
progress.append({'code': 3.1, 'message': 'Bắt đầu kiểm tra dữ liệu trùng lặp' + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'waiting'})
|
||||
logcode.save()
|
||||
|
||||
# check duplicate
|
||||
arr = validate_duplicate(rows, keys)
|
||||
if len(arr) >0:
|
||||
arr = [getattr(o, 'error') for o in arr if getattr(o, 'error') != None][:10]
|
||||
logcode.note = ', '.join(arr)
|
||||
logcode.status = 'error'
|
||||
progress.append({'code': 3.2, 'message': 'Dữ liệu bị trùng lặp, cần loại bỏ trùng lặp' + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'error'})
|
||||
logcode.save()
|
||||
return
|
||||
|
||||
progress.append({'code': 3, 'message': 'Hoàn tất dữ liệu trùng lặp' + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'success'})
|
||||
progress.append({'code': 4, 'message': 'Bắt đầu insert dữ liệu' + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'waiting'})
|
||||
logcode.save()
|
||||
|
||||
# insert
|
||||
error, error_count, success_count, count = bulk_insert(rows, logcode.fields, logcode)
|
||||
if error != None:
|
||||
logcode.note = error
|
||||
logcode.status = 'error'
|
||||
progress.append({'code': 5, 'message': 'Insert dữ liệu có lỗi: ' + str(error_count) + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'error'})
|
||||
logcode.save()
|
||||
return
|
||||
|
||||
progress.append({'code': 6, 'message': 'Insert dữ liệu thành công: ' + str(success_count) + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'success'})
|
||||
progress.append({'code': 7, 'message': 'Bắt đầu cập nhật dữ liệu' + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'waiting'})
|
||||
logcode.save()
|
||||
error, error_count, success_count, count = bulk_update(rows, logcode.fields, logcode)
|
||||
if error != None:
|
||||
logcode.note = error
|
||||
logcode.status = 'error'
|
||||
progress.append({'code': 8, 'message': 'Cập nhật dữ liệu có lỗi: ' + str(error_count) + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'error'})
|
||||
else:
|
||||
progress.append({'code': 9, 'message': 'Cập nhật dữ liệu thành công: ' + str(success_count) + ' - ' + datetime.now().strftime("%H:%M:%S"), 'type': 'success'})
|
||||
logcode.status = 'success'
|
||||
# save
|
||||
logcode.save()
|
||||
|
||||
# update after import success
|
||||
return serializer_class(logcode).data
|
||||
|
||||
|
||||
@api_view(['POST'])
|
||||
def bulk_import(request):
|
||||
perform_import(request.data['logcode'])
|
||||
return Response(status.HTTP_200_OK)
|
||||
|
||||
|
||||
@api_view(['POST'])
|
||||
def bulk_upload(request):
|
||||
if request.method == 'POST':
|
||||
file = request.data['file']
|
||||
filename = request.data['name']
|
||||
fields = request.data['fields']
|
||||
fields = json.loads(fields)
|
||||
model = request.data['model']
|
||||
try:
|
||||
with open(upload_folder + filename, 'wb+') as destination:
|
||||
for chunk in file.chunks():
|
||||
destination.write(chunk)
|
||||
result = open_file(model, filename, fields)
|
||||
return Response(result)
|
||||
except IOError as e: # Will only catch IOErrors
|
||||
return Response(e) # Re-raise other IOErrors
|
||||
except OSError as e: # Will only catch OSErrors
|
||||
return Response(e)
|
||||
|
||||
#=============================================================================
|
||||
@api_view(['GET'])
|
||||
def read_excel(request):
|
||||
try:
|
||||
filename = request.query_params['name']
|
||||
df = pandas.read_excel(upload_folder + filename, dtype=str, na_filter=False)
|
||||
val = df.to_json(path_or_buf= None, orient='table', force_ascii=False)
|
||||
return Response(val)
|
||||
|
||||
except IOError as e: # Will only catch IOErrors
|
||||
return Response(e) # Re-raise other IOErrors
|
||||
except OSError as e: # Will only catch OSErrors
|
||||
return Response(e)
|
||||
|
||||
|
||||
#=============================================================================
|
||||
@api_view(['GET'])
|
||||
def model_fields(request, name):
|
||||
Model, serializer_class = get_serializer(name)
|
||||
if Model == None:
|
||||
return Response(status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
arr = []
|
||||
count = 0
|
||||
for field in Model._meta.get_fields():
|
||||
count += 1
|
||||
obj = {"id": count, "name": field.name, "datatype": field.get_internal_type(), "null": field.null,
|
||||
"unique": field.unique if hasattr(field, 'unique') else False,
|
||||
"related": field.related_model.__name__ if field.related_model else None }
|
||||
arr.append(obj)
|
||||
return Response(arr)
|
||||
|
||||
|
||||
#=============================================================================
|
||||
@api_view(['POST'])
|
||||
def find_key(request):
|
||||
Model, serializer_class = get_serializer(request.data['name'])
|
||||
fields = []
|
||||
for field in Model._meta.get_fields():
|
||||
obj = {"name": field.name, "related": field.related_model.__name__ if field.related_model else None }
|
||||
fields.append(obj)
|
||||
keys = request.data['keys']
|
||||
related = request.data['related']
|
||||
data = request.data['data']
|
||||
def find_field(name):
|
||||
for field in fields:
|
||||
if field['name'] == name:
|
||||
print('name', name, field['related'])
|
||||
|
||||
Model, serializerclass = get_serializer(field['related'])
|
||||
return field, Model
|
||||
|
||||
error = False
|
||||
if len(related)>0:
|
||||
for row in data:
|
||||
for obj in related:
|
||||
key = obj['key']
|
||||
value = obj['value']
|
||||
name = value['column']
|
||||
f = {name: row[key]}
|
||||
field, Model1 = find_field(value['field'])
|
||||
obj = Model1.objects.filter(**f).values('id').first()
|
||||
if obj:
|
||||
row[value['field']] = obj['id']
|
||||
else:
|
||||
row['error'] = 'Không tồn tại ' + key + ' = ' + str(row[key])
|
||||
error = True
|
||||
|
||||
if error == False and len(keys) > 0:
|
||||
for row in data:
|
||||
f = {}
|
||||
for obj in keys:
|
||||
key = obj['key']
|
||||
value = obj['value']
|
||||
|
||||
db_field = value.get('field') or value.get('column') or key
|
||||
|
||||
f[db_field] = row[key]
|
||||
|
||||
obj = Model.objects.filter(**f).values('id').first()
|
||||
row['id'] = obj['id'] if obj != None else None
|
||||
|
||||
return Response({"error": error, "data": data})
|
||||
193
app/jobemail.py
Normal file
193
app/jobemail.py
Normal file
@@ -0,0 +1,193 @@
|
||||
import os
|
||||
from django.conf import settings
|
||||
import numpy as np
|
||||
from datetime import datetime
|
||||
from django.apps import apps
|
||||
from num2words import num2words
|
||||
|
||||
from app.models import Email_Template
|
||||
from app.email import send_via_zeptomail
|
||||
|
||||
|
||||
class EmailJobRunner:
|
||||
def __init__(self, template: Email_Template, context_pks: dict):
|
||||
self.template = template
|
||||
self.context_pks = context_pks
|
||||
self.config = self.template.content
|
||||
self.data_context = {}
|
||||
self.replacements = {}
|
||||
|
||||
def _get_model(self, model_string):
|
||||
app_label, model_name = model_string.split(".")
|
||||
return apps.get_model(app_label, model_name)
|
||||
|
||||
def _get_value_from_object(self, obj, field_path):
|
||||
if obj is None:
|
||||
return None
|
||||
value = obj
|
||||
for part in field_path.replace("__", ".").split("."):
|
||||
if value is None:
|
||||
return None
|
||||
value = getattr(value, part, None)
|
||||
return value
|
||||
|
||||
def _resolve_lookup_value(self, lookup_from):
|
||||
if lookup_from in self.context_pks:
|
||||
return self.context_pks[lookup_from]
|
||||
|
||||
try:
|
||||
alias, field_path = lookup_from.split(".", 1)
|
||||
if alias not in self.data_context:
|
||||
raise ValueError(f"Alias '{alias}' not found in data context.")
|
||||
|
||||
source_object = self.data_context.get(alias)
|
||||
return self._get_value_from_object(source_object, field_path)
|
||||
except ValueError:
|
||||
raise ValueError(f"Could not resolve '{lookup_from}'. It is not a valid API parameter or a reference to another data source.")
|
||||
|
||||
def fetch_data(self):
|
||||
mappings = self.config.get("mappings", [])
|
||||
if not isinstance(mappings, list):
|
||||
raise TypeError("Email template 'mappings' must be a list.")
|
||||
|
||||
trigger_model_mapping = next((m for m in mappings if m.get("is_trigger_object", False)), None)
|
||||
if trigger_model_mapping:
|
||||
model_cls = self._get_model(trigger_model_mapping["model"])
|
||||
lookup_field = trigger_model_mapping["lookup_field"]
|
||||
lookup_value = self._resolve_lookup_value(trigger_model_mapping["lookup_value_from"])
|
||||
alias = trigger_model_mapping["alias"]
|
||||
if lookup_value is not None:
|
||||
self.data_context[alias] = model_cls.objects.filter(**{lookup_field: lookup_value}).first()
|
||||
else:
|
||||
self.data_context[alias] = None
|
||||
|
||||
for mapping in mappings:
|
||||
if mapping.get("is_trigger_object", False):
|
||||
continue
|
||||
|
||||
model_cls = self._get_model(mapping["model"])
|
||||
lookup_field = mapping["lookup_field"]
|
||||
lookup_value = self._resolve_lookup_value(mapping["lookup_value_from"])
|
||||
alias = mapping["alias"]
|
||||
|
||||
if lookup_value is None:
|
||||
self.data_context[alias] = None if mapping.get("type") == "object" else []
|
||||
continue
|
||||
|
||||
queryset = model_cls.objects.filter(**{lookup_field: lookup_value})
|
||||
|
||||
if mapping.get("type") == "object":
|
||||
self.data_context[alias] = queryset.first()
|
||||
elif mapping.get("type") == "list":
|
||||
self.data_context[alias] = list(queryset)
|
||||
|
||||
def _format_value(self, value, format_config):
|
||||
"""Applies formatting to a value based on configuration."""
|
||||
if value is None:
|
||||
return ""
|
||||
|
||||
format_type = format_config.get("type")
|
||||
if not format_type:
|
||||
return str(value)
|
||||
|
||||
try:
|
||||
if format_type == "currency":
|
||||
return "{:,}".format(np.int64(value)).replace(",", ".")
|
||||
if format_type == "date":
|
||||
date_format = format_config.get("format", "dd/mm/YYYY").replace("dd", "%d").replace("mm", "%m").replace("YYYY", "%Y")
|
||||
return value.strftime(date_format)
|
||||
if format_type == "number_to_words":
|
||||
return num2words(value, lang=format_config.get("lang", "vi"))
|
||||
if format_type == "conditional":
|
||||
return format_config["true_value"] if value else format_config["false_value"]
|
||||
except Exception as e:
|
||||
print(f"Error formatting value '{value}' with config '{format_config}': {e}")
|
||||
return ""
|
||||
|
||||
return str(value)
|
||||
|
||||
def prepare_replacements(self):
|
||||
|
||||
today = datetime.now()
|
||||
self.replacements['[day]'] = str(today.day)
|
||||
self.replacements['[month]'] = str(today.month)
|
||||
self.replacements['[year]'] = str(today.year)
|
||||
self.replacements['[date]'] = today.strftime("%d/%m/%Y")
|
||||
|
||||
mappings = self.config.get("mappings", [])
|
||||
for mapping in mappings:
|
||||
alias = mapping["alias"]
|
||||
data = self.data_context.get(alias)
|
||||
fields = mapping.get("fields", {})
|
||||
|
||||
if mapping.get("type") == "object":
|
||||
if data is None:
|
||||
for placeholder in fields:
|
||||
self.replacements[placeholder] = ""
|
||||
continue
|
||||
|
||||
for placeholder, config in fields.items():
|
||||
if isinstance(config, dict):
|
||||
value = self._get_value_from_object(data, config["source"])
|
||||
self.replacements[placeholder] = self._format_value(value, config.get("format", {}))
|
||||
else:
|
||||
value = self._get_value_from_object(data, config)
|
||||
self.replacements[placeholder] = str(value) if value is not None else ""
|
||||
|
||||
def run(self):
|
||||
|
||||
try:
|
||||
print(f"Running email job for template: {self.template.name}")
|
||||
self.fetch_data()
|
||||
self.prepare_replacements()
|
||||
|
||||
subject_template = self.config.get("subject", "")
|
||||
body_template = self.config.get("content", "")
|
||||
recipient_placeholder = self.config.get("recipient_placeholder", "[customer.email]")
|
||||
sender_id = self.config.get("sender_id", 1)
|
||||
image_url = self.config.get("imageUrl", "https://api.bigdatatech.vn/static/files/20251113051227-1.png")
|
||||
|
||||
|
||||
final_subject = subject_template
|
||||
final_body = body_template
|
||||
for key, value in self.replacements.items():
|
||||
final_subject = final_subject.replace(key, str(value))
|
||||
final_body = final_body.replace(key, str(value))
|
||||
|
||||
recipient_email = self.replacements.get(recipient_placeholder)
|
||||
|
||||
if not recipient_email:
|
||||
print(f"Email job '{self.template.name}' failed: Recipient email not found for placeholder '{recipient_placeholder}'.")
|
||||
return False
|
||||
|
||||
# Load the base template
|
||||
template_path = os.path.join(settings.BASE_DIR, 'app', 'email_base_template.html')
|
||||
with open(template_path, 'r', encoding='utf-8') as f:
|
||||
full_email_content = f.read()
|
||||
|
||||
# Replace placeholders in the base template
|
||||
full_email_content = full_email_content.replace('{{subject}}', final_subject)
|
||||
full_email_content = full_email_content.replace('{{body_content}}', final_body)
|
||||
full_email_content = full_email_content.replace('{{image_url}}', image_url)
|
||||
|
||||
|
||||
print(f"Sending email to '{recipient_email}' with subject '{final_subject}'")
|
||||
success = send_via_zeptomail(
|
||||
receiver=recipient_email,
|
||||
subject=final_subject,
|
||||
content=full_email_content,
|
||||
sender=sender_id
|
||||
)
|
||||
|
||||
if success:
|
||||
print(f"Email job '{self.template.name}' completed successfully.")
|
||||
else:
|
||||
print(f"Email job '{self.template.name}' failed during sending.")
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
print(f"An unexpected error occurred in EmailJobRunner for template '{self.template.name}': {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
55
app/jobfile.py
Normal file
55
app/jobfile.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from apscheduler.schedulers.background import BlockingScheduler
|
||||
from datetime import datetime
|
||||
import subprocess
|
||||
from app.models import *
|
||||
from django.db import close_old_connections
|
||||
|
||||
#=====================================================================
|
||||
def sync_files():
|
||||
# close old connections
|
||||
close_old_connections()
|
||||
|
||||
print("===Start sync files===", datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
|
||||
password = 'qabeNMHRXhiHAiq'
|
||||
source = "bigdata@5.223.62.175:/home/bigdata/filestore/y99/"
|
||||
target = "./static/backup/y99prod"
|
||||
ssh_port = "234"
|
||||
|
||||
cmd = [
|
||||
"sshpass", "-p", password,
|
||||
"rsync", "-avz", "--delete",
|
||||
"-e", f"ssh -p {ssh_port}",
|
||||
source, target
|
||||
]
|
||||
# start time
|
||||
start_time = datetime.now()
|
||||
try:
|
||||
subprocess.run(cmd, check=True)
|
||||
print("✅ Sync thành công")
|
||||
print("===End sync files===", datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
|
||||
backup = Backup(code="FILE{}".format(datetime.now().strftime('%Y%m%d%H%M')), name="file-backup",
|
||||
status=Task_Status.objects.get(pk=4), start_time=start_time, end_time=datetime.now())
|
||||
backup.save()
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
print("❌ Lỗi khi sync:", e)
|
||||
backup = Backup(code="FILE{}".format(datetime.now().strftime('%Y%m%d%H%M')), name="file-backup",
|
||||
status=Task_Status.objects.get(pk=3), start_time=start_time, end_time=datetime.now())
|
||||
backup.save()
|
||||
|
||||
#=====================================================================
|
||||
# scheduler
|
||||
scheduler = BlockingScheduler()
|
||||
|
||||
# backup files
|
||||
scheduler.add_job(sync_files,
|
||||
'cron',
|
||||
hour='7,12,20', # chạy lúc 7h,12h,20h
|
||||
minute=0,
|
||||
timezone='Asia/Ho_Chi_Minh'
|
||||
)
|
||||
|
||||
# scheduler.add_job(sync_files, name="sync-files", timezone='Asia/Ho_Chi_Minh', next_run_time=datetime.now())
|
||||
|
||||
#=====================================================================
|
||||
scheduler.start()
|
||||
67
app/middleware.py
Normal file
67
app/middleware.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from urllib.parse import urlparse
|
||||
from django.http import JsonResponse
|
||||
import threading
|
||||
|
||||
_thread_locals = threading.local()
|
||||
|
||||
def get_current_user():
|
||||
"""
|
||||
Retrieves the current user from thread-local storage.
|
||||
"""
|
||||
return getattr(_thread_locals, 'user', None)
|
||||
|
||||
class CurrentUserMiddleware:
|
||||
"""
|
||||
Middleware to store the current user in thread-local storage.
|
||||
"""
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
# Store the user in thread-local storage
|
||||
_thread_locals.user = request.user if hasattr(request, 'user') and request.user.is_authenticated else None
|
||||
|
||||
response = self.get_response(request)
|
||||
|
||||
# Clean up the thread-local storage after the request is finished
|
||||
if hasattr(_thread_locals, 'user'):
|
||||
del _thread_locals.user
|
||||
|
||||
return response
|
||||
|
||||
ALLOWED_ORIGINS = [
|
||||
"http://localhost:3000",
|
||||
"https://biz.utopia.com.vn",
|
||||
"https://datamodel.bigdatatech.vn"
|
||||
]
|
||||
|
||||
ALLOWED_DOMAIN_SUFFIXES = [
|
||||
".utopia.com.vn"
|
||||
]
|
||||
|
||||
ALLOWED_HOST = [
|
||||
"localhost:8000",
|
||||
"api.utopia.com.vn",
|
||||
"dev.api.utopia.com.vn"
|
||||
]
|
||||
|
||||
class BlockUnauthorizedOriginsMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
origin = request.headers.get("Origin")
|
||||
host = request.get_host()
|
||||
if origin:
|
||||
parsed = urlparse(origin)
|
||||
domain = parsed.hostname
|
||||
if origin not in ALLOWED_ORIGINS and not any(
|
||||
domain.endswith(suffix) for suffix in ALLOWED_DOMAIN_SUFFIXES
|
||||
):
|
||||
return JsonResponse({"detail": "Forbidden origin"}, status=4.3)
|
||||
|
||||
if not origin and host not in ALLOWED_HOST:
|
||||
return JsonResponse({"detail": "Direct access not allowed"}, status=403)
|
||||
|
||||
return self.get_response(request)
|
||||
|
||||
1804
app/migrations/0001_initial.py
Normal file
1804
app/migrations/0001_initial.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,66 @@
|
||||
# Generated by Django 4.1.3 on 2022-11-12 07:52
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('app', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='customer',
|
||||
name='avatar',
|
||||
field=models.CharField(max_length=200, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='customer',
|
||||
name='fullname',
|
||||
field=models.CharField(default='xxx', max_length=50),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='dob',
|
||||
field=models.DateField(null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='issued_date',
|
||||
field=models.DateField(null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='issued_place',
|
||||
field=models.CharField(max_length=200, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='legal_id',
|
||||
field=models.CharField(max_length=20, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='legal_type',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.legal_type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='location',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.location'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='phone',
|
||||
field=models.CharField(default='xxx', max_length=20),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='customer',
|
||||
name='sex',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.sex'),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,135 @@
|
||||
# Generated by Django 4.1.3 on 2022-11-12 23:21
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('app', '0002_customer_avatar_customer_fullname_alter_customer_dob_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Aseet_Type',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('code', models.CharField(max_length=30, unique=True)),
|
||||
('name', models.CharField(max_length=200, null=True)),
|
||||
('detail', models.TextField()),
|
||||
('create_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'asset_type',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Debt_Group',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('code', models.CharField(max_length=30, unique=True)),
|
||||
('name', models.CharField(max_length=200, null=True)),
|
||||
('detail', models.TextField()),
|
||||
('create_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'debt_group',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Loan',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('valid_from', models.DateField()),
|
||||
('valid_to', models.DateField(null=True)),
|
||||
('disbursement', models.FloatField()),
|
||||
('principal', models.FloatField()),
|
||||
('interest', models.FloatField()),
|
||||
('accrued_period', models.FloatField()),
|
||||
('outstanding', models.FloatField()),
|
||||
('last_cal_itr', models.DateField(null=True)),
|
||||
('last_pay_itr', models.FloatField(null=True)),
|
||||
('last_pay_pcp', models.FloatField(null=True)),
|
||||
('interest_collected', models.FloatField()),
|
||||
('principal_collected', models.FloatField()),
|
||||
('interest_overdue', models.IntegerField()),
|
||||
('principal_overdue', models.IntegerField()),
|
||||
('create_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
('update_time', models.DateTimeField(null=True)),
|
||||
('approver', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.user')),
|
||||
('creator', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.user')),
|
||||
('customer', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.customer')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'loan',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Loan_Product',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('code', models.CharField(max_length=30, unique=True)),
|
||||
('name', models.CharField(max_length=200, null=True)),
|
||||
('detail', models.TextField()),
|
||||
('create_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'loan_product',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Loan_Type',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('code', models.CharField(max_length=30, unique=True)),
|
||||
('name', models.CharField(max_length=200, null=True)),
|
||||
('detail', models.TextField()),
|
||||
('create_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'loan_type',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Loan_Schedule',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('pay_date', models.DateField(null=True)),
|
||||
('pay_amount', models.DateField(null=True)),
|
||||
('create_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
('update_time', models.DateTimeField(null=True)),
|
||||
('loan', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.loan')),
|
||||
('payment_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.payment_type')),
|
||||
('updater', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.user')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'loan_schedule',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Loan_Payment',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('pay_date', models.DateField(null=True)),
|
||||
('pay_amount', models.DateField(null=True)),
|
||||
('create_time', models.DateTimeField(auto_now_add=True, null=True)),
|
||||
('update_time', models.DateTimeField(null=True)),
|
||||
('loan', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.loan')),
|
||||
('updater', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.user')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'loan_payment',
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='loan',
|
||||
name='product',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.loan_product'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='loan',
|
||||
name='updater',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='+', to='app.user'),
|
||||
),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user