17c207a9 by 周伟奇

merge fix

2 parents d19ca8f1 c270100a
......@@ -520,7 +520,7 @@ OTHER_TUPLE = (None, None, None, None, None, None, None, None, None, None, None,
# "35":"针式打印-部分格线-竖版-邮储银行",
# "36":"针式打印-部分格线-竖版-邮储银行-绿卡",
# "38":"普通打印-无格线-农业银行-整数-特殊",
# "50":"普通打印-无格线-农业银行-整数-特殊",
CLASSIFY_LIST = [
('其他', OTHER_TUPLE),
......@@ -563,6 +563,18 @@ CLASSIFY_LIST = [
('针式打印-部分格线-竖版-邮储银行', (2, None, 5, 6, None, 4, None, 7, None, None, None, None, None)),
('针式打印-部分格线-竖版-邮储银行-绿卡', (2, None, 5, 6, None, 4, None, 7, None, None, None, None, None)),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('其他', OTHER_TUPLE),
('普通打印-无格线-农业银行-整数-特殊', (1, None, 3, 4, None, 2, None, 5, None, None, None, None, None)),
]
......@@ -608,6 +620,18 @@ CLASSIFY_HEADER_LIST = [
('序号', '交易日期', '交易渠道', '摘要', '交易金额', '账户余额', '对方账号/卡号/汇票号', '原子账号', '交易机构名称'),
('序号', '交易日期', '交易渠道', '摘要', '交易金额', '账户余额', '对方账号/卡号/汇票号', '原子账号', '交易机构名称'),
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
OTHER_TUPLE,
('交易日期', '摘要/附言', '交易金额', '账户余额', '对方账号和户名'),
]
......@@ -812,7 +836,7 @@ MVI_FIELD_ORDER = (('发票代码', '发票代码'),
('主管税务机关及代码', '主管税务机关及代码'),
('吨位', '吨位'),
('限乘人数', '限乘人数'),)
IC_PID = VAT_PID = VATS_PID = MVC_PID = MVI_PID = None
IC_PID = VAT_PID = VATS_PID = MVC_PID = MVI_PID = RP_PID = None
# 营业执照
BL_CN_NAME = '营业执照'
......@@ -916,6 +940,10 @@ BC_FIELD_ORDER = (('BankName', '发卡行名称'),
('CardType', '银行卡类型'),
('Name', '持卡人姓名'),)
# DDA
DDA_CN_NAME = 'DDA'
DDA_CLASSIFY = 38
SUCCESS_CODE_SET = {'0', 0}
FIELD_ORDER_MAP = {
......@@ -941,7 +969,7 @@ MODEL_FIELD_VAT = 'vat_count'
LICENSE_ORDER = ((MVI_CLASSIFY, (MVI_PID, MVI_CN_NAME, MVI_FIELD_ORDER, False, False, MODEL_FIELD_MVI)),
(IC_CLASSIFY, (IC_PID, IC_CN_NAME, None, True, False, MODEL_FIELD_IC)),
(RP_CLASSIFY, (None, RP_CN_NAME, None, True, False, MODEL_FIELD_RP)),
(RP_CLASSIFY, (RP_PID, RP_CN_NAME, None, True, False, MODEL_FIELD_RP)),
(BC_CLASSIFY, (BC_PID, BC_CN_NAME, BC_FIELD_ORDER, False, False, MODEL_FIELD_BC)),
(BL_CLASSIFY, (BL_PID, BL_CN_NAME, BL_FIELD_ORDER, False, False, MODEL_FIELD_BL)),
(UCI_CLASSIFY, (UCI_PID, UCI_CN_NAME, UCI_FIELD_ORDER, False, False, MODEL_FIELD_UCI)),
......@@ -960,12 +988,12 @@ FOLDER_LICENSE_ORDER = ((MVI_CLASSIFY, (MVI_PID, MVI_CN_NAME, MVI_FIELD_ORDER, F
LICENSE_CLASSIFY_MAPPING = dict(LICENSE_ORDER)
OTHER_CLASSIFY_SET = {OTHER_CLASSIFY}
LICENSE_CLASSIFY_SET_1 = {IC_CLASSIFY, VAT_CLASSIFY, MVC_CLASSIFY, MVI_CLASSIFY, UCI_CLASSIFY}
LICENSE_CLASSIFY_SET_1 = {IC_CLASSIFY, VAT_CLASSIFY, MVC_CLASSIFY, MVI_CLASSIFY, UCI_CLASSIFY, DDA_CLASSIFY}
LICENSE_CLASSIFY_SET_2 = {BL_CLASSIFY, EEP_CLASSIFY, DL_CLASSIFY, PP_CLASSIFY, BC_CLASSIFY}
NYYH_CLASSIFY = {17, 18}
NYZS_CLASSIFY = 18
SPECIAL_NYZS_CLASSIFY = 38
SPECIAL_NYZS_CLASSIFY = 50
MS_CLASSIFY = 21
MS_ERROR_COL = (5, 6)
WECHART_CLASSIFY = 12
......@@ -1168,5 +1196,25 @@ CO_ORDER = ('customerType', 'customerChineseName', 'legalRepName', 'idNum', 'bus
PREFIX_MVC = 'GB'
PREFIX_DL = 'DL'
# --------------- DDA 保存图片 --------------------
DDA_FIELD = 'DDA'
IC_FIELD = 'ID'
BC_FIELD = 'BC'
IC_KEY_FIELD = ('姓名', '公民身份号码')
BC_KEY_FIELD = 'CardNum'
DDA_IC_NAME = 'customer_name'
DDA_IC_ID = 'customer_id'
DDA_BC_NAME = 'account_name'
DDA_BC_ID = 'account_id'
DDA_IMG_PATH = 'img_path'
DDA_PRO = 'pro'
DDA_MAPPING = [
(DDA_IC_NAME, IC_FIELD),
(DDA_IC_ID, IC_FIELD),
(DDA_BC_ID, BC_FIELD),
]
......
......@@ -23,7 +23,7 @@ from apps.doc.ocr.edms import EDMS, rh
from apps.doc.named_enum import KeywordsType, FailureReason, WorkflowName, ProcessName, RequestTeam, RequestTrigger
from apps.doc.exceptions import EDMSException, OCR1Exception, OCR2Exception, OCR4Exception
from apps.doc.ocr.wb import BSWorkbook
from apps.doc.models import DocStatus, HILDoc, AFCDoc, Keywords, HILOCRResult, AFCOCRResult, HILOCRReport, AFCOCRReport
from apps.doc.models import DocStatus, HILDoc, AFCDoc, Keywords, HILOCRResult, AFCOCRResult, HILOCRReport, AFCOCRReport, DDARecords, IDBCRecords
from celery_compare.tasks import compare
......@@ -43,6 +43,10 @@ class Command(BaseCommand, LoggerMixin):
self.img_queue_size = int(conf.IMG_QUEUE_SIZE)
# 数据目录
self.data_dir = conf.DATA_DIR
# DDA目录
self.dda_dir = os.path.join(self.data_dir, 'HIL', 'SF5-CL-S-1', 'DDA')
self.dda_complete_dir = os.path.join(self.dda_dir, 'complete')
self.dda_wanting_dir = os.path.join(self.dda_dir, 'wanting')
# ocr相关
self.ocr_1_urls = conf.get_namespace('OCR_URL_1_')
self.ocr_url_2 = conf.OCR_URL_2
......@@ -189,13 +193,25 @@ class Command(BaseCommand, LoggerMixin):
else:
res_list.append((pno, ino, part_idx, consts.RES_SUCCESS_EMPTY))
def license1_process(self, ocr_data, license_summary, classify, res_list, pno, ino, part_idx, img_path):
def license1_process(self, ocr_data, license_summary, classify, res_list, pno, ino, part_idx, img_path, do_dda, dda_id_bc_mapping):
# 类别:'0'身份证, '1'居住证
license_data = ocr_data.get('data', [])
license_data = ocr_data.get('data')
if not license_data:
res_list.append((pno, ino, part_idx, consts.RES_SUCCESS_EMPTY))
return
res_list.append((pno, ino, part_idx, consts.RES_SUCCESS))
if classify == consts.DDA_CLASSIFY: # DDA处理
pro = ocr_data.get('confidence')
dda_ocr_result = {
consts.DDA_IC_NAME: license_data.get('result', {}).get(consts.DDA_IC_NAME, {}).get('words', ''),
consts.DDA_IC_ID: license_data.get('result', {}).get(consts.DDA_IC_ID, {}).get('words', ''),
consts.DDA_BC_NAME: license_data.get('result', {}).get(consts.DDA_BC_NAME, {}).get('words', ''),
consts.DDA_BC_ID: license_data.get('result', {}).get(consts.DDA_BC_ID, {}).get('words', ''),
consts.DDA_IMG_PATH: img_path,
consts.DDA_PRO: pro
}
license_summary.setdefault(classify, []).append(dda_ocr_result)
if classify == consts.MVC_CLASSIFY: # 车辆登记证 3/4页结果整合
for mvc_dict in license_data:
try:
......@@ -231,6 +247,8 @@ class Command(BaseCommand, LoggerMixin):
mvc_dict['解除抵押日期'].append(
register_info.get('details', {}).get('date', {}).get('words', ''))
del mvc_res
license_summary.setdefault(classify, []).extend(license_data)
if classify == consts.IC_CLASSIFY: # 身份证真伪
for id_card_dict in license_data:
try:
......@@ -277,9 +295,15 @@ class Command(BaseCommand, LoggerMixin):
'{0} [ocr_4 failed] [img_path={1}]'.format(self.log_base, img_path))
id_card_dict[consts.IC_TURE_OR_FALSE] = consts.IC_RES_MAPPING.get(card_type)
license_summary.setdefault(classify, []).extend(license_data)
def license2_process(self, ocr_res_2, license_summary, pid, classify, res_list, pno, ino, part_idx):
finally:
if do_dda and isinstance(id_card_dict.get(consts.IC_KEY_FIELD[0]), str) and isinstance(id_card_dict.get(consts.IC_KEY_FIELD[1]), str):
ic_name = id_card_dict[consts.IC_KEY_FIELD[0]].strip()
ic_id = id_card_dict[consts.IC_KEY_FIELD[1]].strip()
if len(ic_name) > 0 and len(ic_id) > 0:
dda_id_bc_mapping.setdefault(consts.IC_FIELD, []).append((ic_name, ic_id, img_path))
license_summary.setdefault(classify, []).extend(license_data)
def license2_process(self, ocr_res_2, license_summary, pid, classify, res_list, pno, ino, part_idx, img_path, do_dda, dda_id_bc_mapping):
if ocr_res_2.get('ErrorCode') in consts.SUCCESS_CODE_SET:
res_list.append((pno, ino, part_idx, consts.RES_SUCCESS))
if pid == consts.BC_PID:
......@@ -288,6 +312,10 @@ class Command(BaseCommand, LoggerMixin):
# for en_key, chn_key in consts.BC_FIELD:
# res_dict[chn_key] = ocr_res_2.get(en_key, '')
license_summary.setdefault(classify, []).append(ocr_res_2)
if do_dda and isinstance(ocr_res_2.get(consts.BC_KEY_FIELD), str):
bc_no = ocr_res_2[consts.BC_KEY_FIELD].strip()
if len(bc_no) > 0:
dda_id_bc_mapping.setdefault(consts.BC_FIELD, []).append((bc_no, img_path))
else:
# 营业执照等
for result_dict in ocr_res_2.get('ResultList', []):
......@@ -714,6 +742,8 @@ class Command(BaseCommand, LoggerMixin):
business_type, doc_id_str = task_str.split(consts.SPLIT_STR)
doc_id = int(doc_id_str)
doc_class = HILDoc if business_type == consts.HIL_PREFIX else AFCDoc
is_hil = True if business_type == consts.HIL_PREFIX else False
dda_id_bc_mapping = dict()
doc_data_path = os.path.join(self.data_dir, business_type, consts.TMP_DIR_NAME, doc_id_str)
excel_path = os.path.join(doc_data_path, '{0}.xlsx'.format(doc_id_str))
......@@ -726,6 +756,7 @@ class Command(BaseCommand, LoggerMixin):
# 'bs': None or normal or mobile,
# }
report_list = [None, False, None]
do_dda = is_hil and doc.document_scheme == consts.DOC_SCHEME_LIST[1]
except Exception as e:
self.online_log.error('{0} [process error (db filter)] [task={1}] [error={2}]'.format(
self.log_base, task_str, traceback.format_exc()))
......@@ -767,8 +798,8 @@ class Command(BaseCommand, LoggerMixin):
res_list.append((pno, ino, part_idx, consts.RES_SUCCESS_OTHER))
continue
elif classify in consts.LICENSE_CLASSIFY_SET_1: # 证件1
self.license1_process(ocr_data, license_summary, classify, res_list,
pno, ino, part_idx, img_path)
self.license1_process(ocr_data, license_summary, classify, res_list, pno,
ino, part_idx, img_path, do_dda, dda_id_bc_mapping)
elif classify in consts.LICENSE_CLASSIFY_SET_2: # 证件2
pid, _, _, _, _, _ = consts.LICENSE_CLASSIFY_MAPPING.get(classify)
file_data = ocr_data.get('section_img')
......@@ -813,7 +844,9 @@ class Command(BaseCommand, LoggerMixin):
card_name_res.get('data', {}).get('is_exists_name') == 0:
name = '无'
ocr_2_res['Name'] = name
self.license2_process(ocr_2_res, license_summary, pid, classify, res_list, pno, ino, part_idx)
self.license2_process(ocr_2_res, license_summary, pid, classify,
res_list, pno, ino, part_idx, img_path,
do_dda, dda_id_bc_mapping)
break
else:
res_list.append((pno, ino, part_idx, consts.RES_FAILED_2))
......@@ -930,6 +963,8 @@ class Command(BaseCommand, LoggerMixin):
os.remove(excel_path)
finally:
# TODO 识别结果存一张表,方便跑报表
# CA比对
if doc.document_scheme == consts.DOC_SCHEME_LIST[0]:
try:
# 更新OCR累计识别结果表
......@@ -978,6 +1013,204 @@ class Command(BaseCommand, LoggerMixin):
else:
self.online_log.info('{0} [comparison info send success] [task={1}] '
'[res_id={2}]'.format(self.log_base, task_str, res_obj.id))
# DDA处理
if do_dda:
# 入库
try:
dda_record = DDARecords.objects.filter(
application_id=doc.application_id).first()
if dda_record is None:
dda_record = DDARecords(application_id=doc.application_id)
except Exception as e:
self.online_log.error('{0} [process error (dda db get)] [task={1}] '
'[error={2}]'.format(self.log_base, task_str, traceback.format_exc()))
else:
try:
if not dda_record.all_found:
found_time = timezone.now()
move_img_path_dict = dict()
ic_res_list = dda_id_bc_mapping.get(consts.IC_FIELD, [])
bc_res_list = dda_id_bc_mapping.get(consts.BC_FIELD, [])
self.online_log.info('{0} [dda process] [task={1}] [ic={2}] '
'[bc={3}]'.format(self.log_base, task_str,ic_res_list,
bc_res_list))
if not dda_record.is_dda_found:
try:
# DDA过滤,获取有效DDA
best_dda_res = None
dda_res_list = license_summary.get(consts.DDA_CLASSIFY, [])
if len(dda_res_list) > 0:
dda_res_list.sort(key=lambda x: x.get(consts.DDA_PRO, 0),
reverse=True)
tmp_best_dda_res = dda_res_list[0]
if tmp_best_dda_res.get(consts.DDA_PRO, 0) >= 0.6:
best_dda_res = tmp_best_dda_res
self.online_log.info(
'{0} [dda process] [task={1}] [dda={2}]'.format(
self.log_base, task_str, dda_res_list))
except Exception as e:
best_dda_res = None
dda_record.is_dda_found = False if best_dda_res is None else True
if dda_record.is_dda_found:
dda_path = best_dda_res.get(consts.DDA_IMG_PATH, '')
customer_name = best_dda_res.get(consts.DDA_IC_NAME, '')
customer_id = best_dda_res.get(consts.DDA_IC_ID, '')
account_id = best_dda_res.get(consts.DDA_BC_ID, '')
dda_record.dda_path = dda_path
dda_record.dda_found_time = found_time
dda_record.customer_name = customer_name
dda_record.customer_id = customer_id
dda_record.account_id = account_id
# move
move_img_path_dict.setdefault(
consts.DDA_FIELD, set()).add(dda_path)
if dda_record.is_dda_found:
try:
if not dda_record.is_id_found:
for ic_name, ic_id, ic_img_path in ic_res_list:
if ic_id == dda_record.customer_id \
or ic_name == dda_record.customer_name:
dda_record.is_id_found = True
dda_record.id_path = ic_img_path
dda_record.id_found_time = found_time
move_img_path_dict.setdefault(
consts.IC_FIELD, set()).add(ic_img_path)
break
else:
id_record = IDBCRecords.objects.filter(
application_id=doc.application_id,
target_id=dda_record.customer_id,
is_id=True).first()
if id_record is None:
id_record = IDBCRecords.objects.filter(
application_id=doc.application_id,
target_name=dda_record.customer_name,
is_id=True).first()
if id_record is not None:
dda_record.is_id_found = True
dda_record.id_path = id_record.file_path
dda_record.id_found_time = id_record.create_time
move_img_path_dict.setdefault(
consts.IC_FIELD, set()).add(id_record.file_path)
except Exception as e:
self.online_log.error(
'{0} [process error (dda id process)] [task={1}] '
'[error={2}]'.format(self.log_base, task_str,
traceback.format_exc()))
try:
if not dda_record.is_bc_found:
for bc_no, bc_img_path in bc_res_list:
if bc_no == dda_record.account_id:
dda_record.is_bc_found = True
dda_record.bc_path = bc_img_path
dda_record.bc_found_time = found_time
move_img_path_dict.setdefault(
consts.BC_FIELD, set()).add(bc_img_path)
break
else:
bc_record = IDBCRecords.objects.filter(
application_id=doc.application_id,
target_id=dda_record.account_id,
is_id=False).first()
if bc_record is not None:
dda_record.is_bc_found = True
dda_record.bc_path = bc_record.file_path
dda_record.bc_found_time = bc_record.create_time
move_img_path_dict.setdefault(
consts.BC_FIELD, set()).add(bc_record.file_path)
except Exception as e:
self.online_log.error(
'{0} [process error (dda bc process)] [task={1}] '
'[error={2}]'.format(self.log_base, task_str,
traceback.format_exc()))
if dda_record.is_dda_found and dda_record.is_id_found and dda_record.is_bc_found:
dda_record.all_found = True
dda_record.save()
# 图片移动
try:
if len(move_img_path_dict) > 0:
self.online_log.info(
'{0} [dda process] [task={1}] [move_img_path={2}]'.format(
self.log_base, task_str, move_img_path_dict))
wanting_dir = os.path.join(self.dda_wanting_dir, doc.application_id)
wanting_dir_exists = os.path.isdir(wanting_dir)
if dda_record.all_found:
target_dir = os.path.join(self.dda_complete_dir, doc.application_id)
if wanting_dir_exists:
shutil.move(wanting_dir, target_dir)
else:
os.makedirs(target_dir, exist_ok=True)
else:
target_dir = wanting_dir
if not wanting_dir_exists:
os.makedirs(target_dir, exist_ok=True)
for prefix, path_set in move_img_path_dict.items():
for idx, path in enumerate(path_set):
if os.path.isfile(path):
file_name = '{0}_{1}{2}'.format(
prefix, idx, os.path.splitext(path)[-1])
target_path = os.path.join(target_dir, file_name)
shutil.copyfile(path, target_path)
else:
self.online_log.warn(
'{0} [dda process] [img path empty] [task={1}] '
'[path={2}]'.format(self.log_base, task_str, path))
except Exception as e:
self.online_log.error(
'{0} [process error (dda img move)] [task={1}] '
'[error={2}]'.format(self.log_base, task_str, traceback.format_exc()))
# id & bc 入库
try:
if not dda_record.is_dda_found and not dda_record.is_id_found:
ic_set = set()
for ic_name, ic_id, ic_img_path in ic_res_list:
query_str = '{0}{1}'.format(ic_name, ic_id)
if query_str in ic_set:
continue
ic_set.add(query_str)
IDBCRecords.objects.create(
application_id=doc.application_id,
target_name=ic_name,
target_id=ic_id,
is_id=True,
file_path=ic_img_path)
if not dda_record.is_dda_found and not dda_record.is_bc_found:
bc_set = set()
for bc_no, bc_img_path in bc_res_list:
if bc_no in bc_set:
continue
bc_set.add(bc_no)
IDBCRecords.objects.create(
application_id=doc.application_id,
target_id=bc_no,
is_id=False,
file_path=bc_img_path)
except Exception as e:
self.online_log.error(
'{0} [process error (dda id&bc db save)] [task={1}] '
'[error={2}]'.format(self.log_base, task_str, traceback.format_exc()))
# TODO report
except Exception as e:
self.online_log.error(
'{0} [process error (dda process)] [task={1}] '
'[error={2}]'.format(self.log_base, task_str, traceback.format_exc()))
finally:
# report_dict = {
# 'process': None or pdf or excel or edms
......
......@@ -387,3 +387,51 @@ class AFCCompareOfflineReport(models.Model):
situ_db_label = 'afc'
# DDA ID & BC ----> HIL SE 专有
class DDARecords(models.Model):
id = models.AutoField(primary_key=True, verbose_name="id") # 主键
application_id = models.CharField(max_length=64, verbose_name="申请id") # 索引
is_dda_found = models.BooleanField(default=False, verbose_name="DDA是否找到")
is_id_found = models.BooleanField(default=False, verbose_name="身份证是否找到")
is_bc_found = models.BooleanField(default=False, verbose_name="银行卡是否找到")
all_found = models.BooleanField(default=False, verbose_name="是否全找到")
dda_path = models.CharField(null=True, max_length=1024, verbose_name="DDA图片路径")
id_path = models.CharField(null=True, max_length=1024, verbose_name="身份证图片路径")
bc_path = models.CharField(null=True, max_length=1024, verbose_name="银行卡图片路径")
customer_name = models.CharField(null=True, max_length=1024, verbose_name="DDA身份证姓名")
customer_id = models.CharField(null=True, max_length=1024, verbose_name="DDA身份证号码")
account_id = models.CharField(null=True, max_length=1024, verbose_name="DDA银行卡号")
dda_found_time = models.DateTimeField(null=True, verbose_name='DDA时间')
id_found_time = models.DateTimeField(null=True, verbose_name='身份证时间')
bc_found_time = models.DateTimeField(null=True, verbose_name='银行卡时间')
update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') # 索引
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间') # 索引
class Meta:
managed = False
db_table = 'dda_records'
class IDBCRecords(models.Model):
id = models.AutoField(primary_key=True, verbose_name="id") # 主键
application_id = models.CharField(max_length=64, verbose_name="申请id") # 索引
target_name = models.CharField(null=True, max_length=1024, verbose_name="DDA身份证姓名") # 与申请号联合索引
target_id = models.CharField(max_length=1024, verbose_name="DDA身份证号码or银行卡号") # 与申请号联合索引
is_id = models.BooleanField(default=True, verbose_name="身份证or银行卡")
file_path = models.CharField(max_length=1024, verbose_name="图片路径")
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
class Meta:
managed = False
db_table = 'idbc_records'
......
......@@ -708,7 +708,7 @@ class BSWorkbook(Workbook):
self.remove(self.get_sheet_by_name('Sheet'))
def rebuild(self, bs_summary, license_summary, res_list, document_scheme):
count_list = [(consts.MODEL_FIELD_BS, len(self.sheetnames) - 1)]
count_list = [(consts.MODEL_FIELD_BS, len(bs_summary))]
if document_scheme == consts.DOC_SCHEME_LIST[1]:
self.license_rebuild(license_summary, document_scheme, count_list)
self.bs_rebuild(bs_summary)
......
import pyodbc
hil_sql = """
create table dda_records
(
id bigint identity
primary key,
application_id nvarchar(64) not null,
is_dda_found bit default 0 not null,
is_id_found bit default 0 not null,
is_bc_found bit default 0 not null,
all_found bit default 0 not null,
dda_path nvarchar(1024),
id_path nvarchar(1024),
bc_path nvarchar(1024),
customer_name nvarchar(1024),
customer_id nvarchar(1024),
account_id nvarchar(1024),
dda_found_time datetime,
id_found_time datetime,
bc_found_time datetime,
update_time datetime not null,
create_time datetime not null,
);
create index dda_records_application_id_index
on dda_records (application_id);
create index dda_records_update_time_index
on dda_records (update_time);
create index dda_records_create_time_index
on dda_records (create_time);
create table idbc_records
(
id bigint identity
primary key,
application_id nvarchar(64) not null,
target_name nvarchar(1024),
target_id nvarchar(1024) not null,
is_id bit default 1 not null,
file_path nvarchar(1024) not null,
create_time datetime not null,
);
create index idbc_records_application_id_index
on idbc_records (application_id);
create index idbc_records_application_id_target_name_index
on idbc_records (application_id, target_name);
create index idbc_records_application_id_target_id_index
on idbc_records (application_id, target_id);
"""
hil_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True)
hil_cursor = hil_cnxn.cursor()
hil_cursor.execute(hil_sql)
hil_cursor.close()
hil_cnxn.close()
# afc_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True)
# afc_cursor = afc_cnxn.cursor()
# afc_cursor.execute(afc_sql)
# afc_cursor.close()
# afc_cnxn.close()
\ No newline at end of file
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!