30d85698 by chenyao

将uat-tmp所有内容合并到uat-tmp-cy分支上

2 parents cbe57bd2 ea1e7a1e
......@@ -55,6 +55,7 @@ class LoginView(ObtainJSONWebToken, GenericView):
'role': user_role.role if user_role else -1
}
rh.set_token(res.data.get('token')[-10:], user.username)
rh.set_token(res.data.get('token')[-11:], user_role.role if user_role else -1)
return response.ok(data=data)
......@@ -85,9 +86,10 @@ class IWALoginView(IWABaseView, GenericView):
is_valid, data = self.validate(q_number)
if is_valid:
rh.set_token(data.get('token')[-10:], data.get('user_name'))
user_role = UserRole.objects.filter(auth_user_id=data.get('user_id')).first()
data['role'] = user_role.role if user_role else -1
rh.set_token(data.get('token')[-10:], data.get('user_name'))
rh.set_token(data.get('token')[-11:], user_role.role if user_role else -1)
return response.ok(data=data)
else:
self.no_permission(data)
......
......@@ -4,5 +4,10 @@ from . import views
urlpatterns = [
path(r'', views.DocView.as_view()),
path(r'query/employee', views.EmployeeView.as_view()),
path(r'query/greenBookHistoryFile', views.SearchGBHistoryFileView.as_view()),
path(r'download/greenBookHistoryFile', views.DownloadGBHistoryFileView.as_view()),
path(r'invoice/downloadExcel', views.InvoiceExcelView.as_view()),
path(r'invoice/queryInfo', views.InvoiceQueryInfoView.as_view()),
path(r'contract/v1', views.SEContractView.as_view()),
]
......
......@@ -162,7 +162,7 @@ class Command(BaseCommand, LoggerMixin):
@staticmethod
def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir):
time_stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
time_stamp = datetime.now().strftime('%Y-%m-%d_%H_%M_%S')
new_name = '{0}_{1}'.format(time_stamp, name)
img_save_path = os.path.join(img_output_dir, new_name)
pdf_save_path = os.path.join(pdf_output_dir, new_name)
......
......@@ -320,7 +320,7 @@ class Command(BaseCommand, LoggerMixin):
true_file_set.add(os_error_filename_set.pop())
for name in true_file_set:
time.sleep(10)
unique_folder_name = '{0}_{1}'.format(datetime.now().strftime('%Y-%m-%d_%H:%M:%S'), name)
unique_folder_name = '{0}_{1}'.format(datetime.now().strftime('%Y-%m-%d_%H_%M_%S'), name)
path = os.path.join(input_dir, name)
try:
......
......@@ -272,7 +272,7 @@ class Command(BaseCommand, LoggerMixin):
@staticmethod
def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir, seperate_dir_map):
time_stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
time_stamp = datetime.now().strftime('%Y-%m-%d_%H_%M_%S')
new_name = '{0}_{1}'.format(time_stamp, name)
img_save_path = os.path.join(img_output_dir, new_name)
pdf_save_path = os.path.join(pdf_output_dir, new_name)
......
......@@ -186,7 +186,7 @@ class Command(BaseCommand, LoggerMixin):
@staticmethod
def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir):
time_stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
time_stamp = datetime.now().strftime('%Y-%m-%d_%H_%M_%S')
new_name = '{0}_{1}'.format(time_stamp, name)
img_save_path = os.path.join(img_output_dir, new_name)
pdf_save_path = os.path.join(pdf_output_dir, new_name)
......
......@@ -409,7 +409,7 @@ class Command(BaseCommand, LoggerMixin):
@staticmethod
def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir):
time_stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
time_stamp = datetime.now().strftime('%Y-%m-%d_%H_%M_%S')
new_name = '{0}_{1}'.format(time_stamp, name)
img_save_path = os.path.join(img_output_dir, new_name)
pdf_save_path = os.path.join(pdf_output_dir, new_name)
......
......@@ -336,6 +336,9 @@ class AFCOCRResult(models.Model):
fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同")
fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同")
fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活")
fs_ocr = models.TextField(null=True, verbose_name="财务报表")
fss_ocr = models.TextField(null=True, verbose_name="财务情况说明书")
dp_ocr = models.TextField(null=True, verbose_name="首付款支付承诺书")
update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间')
......@@ -379,6 +382,9 @@ class HILOCRResult(models.Model):
fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同")
fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同")
fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活")
fs_ocr = models.TextField(null=True, verbose_name="财务报表")
fss_ocr = models.TextField(null=True, verbose_name="财务情况说明书")
dp_ocr = models.TextField(null=True, verbose_name="首付款支付承诺书")
update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间')
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
......@@ -420,6 +426,9 @@ class AFCSEOCRResult(models.Model):
fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同")
fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同")
fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活")
fs_ocr = models.TextField(null=True, verbose_name="财务报表")
fss_ocr = models.TextField(null=True, verbose_name="财务情况说明书")
dp_ocr = models.TextField(null=True, verbose_name="首付款支付承诺书")
update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间')
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
......@@ -461,6 +470,9 @@ class HILSEOCRResult(models.Model):
fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同")
fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同")
fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活")
fs_ocr = models.TextField(null=True, verbose_name="财务报表")
fss_ocr = models.TextField(null=True, verbose_name="财务情况说明书")
dp_ocr = models.TextField(null=True, verbose_name="首付款支付承诺书")
update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间')
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
......@@ -1118,4 +1130,46 @@ class DealerMapping(models.Model):
class Meta:
managed = False
db_table = 'dealer_mapping'
\ No newline at end of file
db_table = 'dealer_mapping'
class HILGreenBookHistoryFile(models.Model):
id = models.AutoField(primary_key=True, verbose_name="id") # 主键
object_id = models.CharField(max_length=64, verbose_name="文件唯一ID")
object_name = models.CharField(max_length=255, verbose_name="文件名称")
application_no = models.CharField(max_length=64, verbose_name="申请号")
object_type = models.CharField(max_length=64, verbose_name="文件类型")
customer_name = models.CharField(max_length=64, verbose_name="customer_name")
content_size = models.CharField(max_length=64, verbose_name="文件大小")
owner_name = models.CharField(max_length=64, verbose_name="owner_name")
input_date = models.DateTimeField(verbose_name="上传时间")
modify_date = models.DateTimeField(verbose_name="修改时间")
location = models.CharField(max_length=255, verbose_name="文件位置")
download_finish = models.SmallIntegerField(null=False, default=0, verbose_name="是否下载完成")
update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间')
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
class Meta:
managed = False
db_table = 'hil_gb_history_file'
class AFCGreenBookHistoryFile(models.Model):
id = models.AutoField(primary_key=True, verbose_name="id") # 主键
object_id = models.CharField(max_length=64, verbose_name="文件唯一ID")
object_name = models.CharField(max_length=255, verbose_name="文件名称")
application_no = models.CharField(max_length=64, verbose_name="申请号")
object_type = models.CharField(max_length=64, verbose_name="文件类型")
customer_name = models.CharField(max_length=64, verbose_name="customer_name")
content_size = models.CharField(max_length=64, verbose_name="文件大小")
owner_name = models.CharField(max_length=64, verbose_name="owner_name")
input_date = models.DateTimeField(verbose_name="上传时间")
modify_date = models.DateTimeField(verbose_name="修改时间")
location = models.CharField(max_length=255, verbose_name="文件位置")
download_finish = models.BooleanField(default=True, verbose_name="是否下载完成")
update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间')
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
class Meta:
managed = False
db_table = 'afc_gb_history_file'
situ_db_label = 'afc'
......
......@@ -102,9 +102,14 @@ class ECM(GenericView):
"docbase": self.doc_base_map.get(business_type),
"documentType": doc_type,
"objectId": object_id,
"b_input_date": time.strftime("%m/%d/%Y %X"),
"b_credit_signing_date": time.strftime("%m/%d/%Y %X"),
"b_credit_check": True,
"b_id_number": '',
}
header_info = self.get_headers()
self.running_log.info("{0} download header_info:{1}".format(self.log_base, header_info))
self.running_log.info("{0} download args_info:{1}".format(self.log_base, download_json))
response = requests.post(self.download_url, headers=header_info, json=download_json, verify=False)
if response.status_code != 200:
raise ECMException('ECM download failed with code: {0}'.format(response.status_code))
......@@ -142,6 +147,9 @@ class ECM(GenericView):
"b_region": "0",
"b_region_name": self.b_region_name_map.get(business_type),
"b_input_date": time.strftime("%m/%d/%Y %X"),
"b_credit_signing_date": time.strftime("%m/%d/%Y %X"),
"b_credit_check": True,
"b_id_number": '',
# "file_base64_content": "",
}
for key in self.upload_fields:
......@@ -153,9 +161,28 @@ class ECM(GenericView):
args['file_base64_content'] = file_data
header_info = self.get_headers()
self.running_log.info("{0} upload header_info:{1}".format(self.log_base, header_info))
self.running_log.info("{0} upload args_info:{1}".format(self.log_base, args))
response = requests.post(self.upload_url, headers=header_info, json=args, verify=False)
if response.status_code != 200:
raise ECMException('ECM upload failed with code: {0} , with headers: {1} , with content: {2}'.format(
response.status_code, response.headers, response.text))
if 'ns6:createResponse' not in response.json().get('S:Envelope', {}).get('S:Body', {}):
if 'ns12:createResponse' not in response.json().get('S:Envelope', {}).get('S:Body', {}):
raise ECMException('ECM upload failed: {0} , with headers: {1}'.format(response.json(), response.headers))
def search_doc_info_list(self, filePath, business_type):
args = {
#userName n大写,和其他接口不一样,是因为apigateway没有做统一
"userName": self.username,
"password": self.pwd,
"docbase": self.doc_base_map.get(business_type),
"documentType": "green_book",
"dql":"select r_object_id, object_name,b_application_no, r_object_type,b_customer_name,r_content_size, owner_name, b_input_date, r_modify_date, b_location from green_book where b_location = '{}'" .format(filePath),
}
header_info = self.get_headers()
self.running_log.info("{0} search header_info:{1}".format(self.log_base, header_info))
self.running_log.info("{0} search args_info:{1}".format(self.log_base, args))
response = requests.post(self.search_url, headers=header_info, json=args, verify=False)
if response.status_code != 200:
raise ECMException('ECM search failed with code: {0}'.format(response.status_code))
#self.running_log.info("{0} search response.json():{1}".format(self.log_base, response.json()))
return response.json()
\ No newline at end of file
......
......@@ -827,6 +827,80 @@ class BSWorkbook(Workbook):
ws.append(row)
ws.append((None, ))
def financial_rebuild(self, financial_statement_dict):
# 如果 financial_statement_dict 为空,则不创建表
if not financial_statement_dict:
return
# 如果 financial_statement_dict 不为空,则创建表
ws = self.create_sheet(consts.FINANCIAL_SHEET_NAME)
for fin_key, fin_value in financial_statement_dict.items():
table_str = "识别码"
if fin_key == "code":
table_str = "识别码"
elif fin_key == "stamp":
table_str = "印章"
for table_key, table_value in fin_value.items():
if table_key == "balance_sheet":
row = ["资产负债表" + table_str, str(table_value)]
ws.append(row)
elif table_key == "income_statement":
row = ["利润表" + table_str, str(table_value)]
ws.append(row)
elif table_key == "cash_flow_statement":
row = ["现金流量表" + table_str, str(table_value)]
ws.append(row)
def financial_explanation_rebuild(self, financial_explanation_dict):
"""
Desc:
重构财报情况说明sheet
"""
# 如果 financial_explanation_dict 为空,则不创建sheet
if not financial_explanation_dict:
return
# 如果 financial_explanation_dict 不为空, 则创建sheet
ws = self.create_sheet(consts.FINANCIAL_EXPLANATION_SHEET_NAME)
for fin_key, fin_value in financial_explanation_dict.items():
table_str = "公司名称"
if fin_key == "title":
table_str = "公司名称"
elif fin_key == "stamp":
table_str = "印章"
row = ["财报情况说明" + table_str, str(fin_value)]
ws.append(row)
def down_payment_rebuild(self, down_payment_dict):
"""
Desc:
重构首付款支付承诺书sheet
"""
# 如果 down_payment_dict 为空, 则不创建sheet
if not down_payment_dict:
return
# 如果 down_payment_dict 不为空, 则创建sheet
ws = self.create_sheet(consts.DOWN_PAYMENT_SHEET_NAME)
english_chinese_dict = {
"financial_org_name": "渠道",
"main_borrower_name": "姓名",
"main_borrower_id_no": "证件号码",
"apply_no": "合同编号",
"contract_name": "合同名称",
"promisor_signature": "承诺人签字-电子",
"promisor_signature_date": "承诺人签字日期-电子"
}
for dp_key, dp_value in down_payment_dict.items():
if dp_key in english_chinese_dict.keys():
row = [english_chinese_dict[dp_key], str(dp_value)]
ws.append(row)
else:
row = [english_chinese_dict[dp_key], ""]
ws.append(row)
@staticmethod
def remove_yuan(amount_key_set, key, src_str):
if key in amount_key_set and isinstance(src_str, str):
......@@ -926,7 +1000,7 @@ class BSWorkbook(Workbook):
if len(self.sheetnames) > 1:
self.remove(self.get_sheet_by_name('Sheet'))
def rebuild(self, bs_summary, license_summary, res_list, document_scheme, contract_result, metadata):
def rebuild(self, bs_summary, license_summary, res_list, document_scheme, contract_result, metadata, financial_statement_dict, financial_explanation_dict, down_payment_dict):
res_count_tuple = self.res_sheet(res_list)
count_list = [(consts.MODEL_FIELD_BS, len(bs_summary))]
......@@ -934,10 +1008,16 @@ class BSWorkbook(Workbook):
self.license_rebuild(license_summary, document_scheme, count_list)
self.contract_rebuild(contract_result)
self.bs_rebuild(bs_summary, res_count_tuple, metadata)
self.financial_rebuild(financial_statement_dict)
self.financial_explanation_rebuild(financial_explanation_dict)
self.down_payment_rebuild(down_payment_dict)
else:
self.bs_rebuild(bs_summary, res_count_tuple, metadata)
self.license_rebuild(license_summary, document_scheme, count_list)
self.contract_rebuild(contract_result, True)
self.financial_rebuild(financial_statement_dict)
self.financial_explanation_rebuild(financial_explanation_dict)
self.down_payment_rebuild(down_payment_dict)
self.move_res_sheet()
self.remove_base_sheet()
return count_list, self.need_follow
......
......@@ -8,4 +8,6 @@ broker = conf.CELERY_BROKER_URL
app = Celery('celery_compare', broker=broker, include=['celery_compare.tasks'])
app.conf.update(worker_max_tasks_per_child=5, timezone='Asia/Shanghai')
# worker_max_tasks_per_child ,worker执行了几次任务就会死
#app.conf.update(worker_max_tasks_per_child=10, timezone='Asia/Shanghai')
app.conf.update(timezone='Asia/Shanghai')
......
......@@ -19,10 +19,18 @@ class HMHRetriever:
def get_target_fields(self, pdf_text_list):
result = dict()
is_find_name_id_company, is_find_application_no, is_find_name_date = False, False, False
for bbox, text in pdf_text_list.pop(str(0), []):
# print(text)
# for bbox, text in pdf_text_list.pop(str(0), []):
pdf_text_items = pdf_text_list.pop(str(0), [])
for i in range(len(pdf_text_items)):
bbox, text = pdf_text_items[i]
combined_text = text
if i < len(pdf_text_items) - 1:
combined_text += pdf_text_items[i + 1][1]
if not is_find_name_id_company:
name_id_company_list = re.findall(r'姓名(.*)证件号码(.*)与(.*公司)', text)
# name_id_company_list = re.findall(r'姓名(.*?)证件号码(.*?)与(.*?公司|.*)', combined_text)
name_id_company_list = re.findall(r'姓名(.*)证件号码(.*)与(.*公司)', combined_text)
for name_id_company_tuple in name_id_company_list:
if len(name_id_company_tuple) == 3:
result[self.search_fields_list[0][0]] = {
......@@ -40,7 +48,7 @@ class HMHRetriever:
is_find_name_id_company = True
break
if not is_find_application_no:
application_no_list = re.findall(r'合同编号.*(CH-B\d*-\d*).*', text)
application_no_list = re.findall(r'合同编号.*(CH-B\d*-\d*).*', combined_text)
if len(application_no_list) == 1:
result[self.search_fields_list[3][0]] = {
self.words_str: application_no_list[0],
......@@ -48,7 +56,7 @@ class HMHRetriever:
}
is_find_application_no = True
if not is_find_name_date:
name_date_list = re.findall(r'(.*).*签署日期.*(\d{4}-\d{2}-\d{2})', text)
name_date_list = re.findall(r'(.*).*签署日期.*(\d{4}-\d{2}-\d{2})', combined_text)
for name_date_tuple in name_date_list:
if len(name_date_tuple) == 2:
result[self.search_fields_list[4][0]] = {
......
#这个有问题
from Crypto.Cipher import AES
from base64 import b64encode, b64decode
def encrypt_ecb(data, key):
data = data.encode()
key = key.encode()
aes = AES.new(key, AES.MODE_CBC, bytes(16))
res = aes.encrypt(pad(data, 32))
return b64encode(res).decode()
def decrypt(data, key, iv):
key = key.encode()
iv = iv.encode()
# aes = AES.new(key, AES.MODE_CBC, bytes(16))
aes = AES.new(key, AES.MODE_CBC, iv)
res = aes.decrypt(b64decode(data))
return unpad(res, 32).decode()
def unpad(padded_data, block_size, style='pkcs7'):
pdata_len = len(padded_data)
if pdata_len == 0:
raise ValueError("Zero-length input cannot be unpadded")
if pdata_len % block_size:
raise ValueError("Input data is not padded")
if style in ('pkcs7', 'x923'):
padding_len = bord(padded_data[-1])
if padding_len<1 or padding_len>min(block_size, pdata_len):
raise ValueError("Padding is incorrect.")
if style == 'pkcs7':
if padded_data[-padding_len:]!=bchr(padding_len)*padding_len:
raise ValueError("PKCS#7 padding is incorrect.")
else:
if padded_data[-padding_len:-1]!=bchr(0)*(padding_len-1):
raise ValueError("ANSI X.923 padding is incorrect.")
elif style == 'iso7816':
padding_len = pdata_len - padded_data.rfind(bchr(128))
if padding_len<1 or padding_len>min(block_size, pdata_len):
raise ValueError("Padding is incorrect.")
if padding_len>1 and padded_data[1-padding_len:]!=bchr(0)*(padding_len-1):
raise ValueError("ISO 7816-4 padding is incorrect.")
else:
raise ValueError("Unknown padding style")
return padded_data[:-padding_len]
def pad(data_to_pad, block_size, style='pkcs7'):
padding_len = block_size-len(data_to_pad)%block_size
if style == 'pkcs7':
padding = bchr(padding_len)*padding_len
elif style == 'x923':
padding = bchr(0)*(padding_len-1) + bchr(padding_len)
elif style == 'iso7816':
padding = bchr(128) + bchr(0)*(padding_len-1)
else:
raise ValueError("Unknown padding style")
return data_to_pad + padding
def bord(s):
return s
def bchr(s):
return bytes([s])
if __name__ == '__main__':
decrypt_data = decrypt('QkjNiuixpmtcxxqxaIZ30A==', 'm0XsOHC52YZ5KtakhpuMSZtF7DhwudmG', 'OCRocr2024UATocr')
print('解密:', decrypt_data)
\ No newline at end of file
from Crypto.Cipher import AES
from base64 import b64encode, b64decode
def aes_encrypt_cbc(data, key, iv):
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.encrypt(data)
def aes_decrypt_cbc(data, key, iv):
res = ''
try:
cipher = AES.new(key.encode(), AES.MODE_CBC, iv.encode())
res = cipher.decrypt(b64decode(data))
res = res.decode('utf-8').replace('\x0e', '')
except Exception as e:
res = ''
return res
# 示例使用
key = 'm0XsOHC52YZ5KtakhpuMSZtF7DhwudmG' # 密钥长度必须是16、24或32字节
iv = 'OCRocr2024UATocr'
decrypted_data = aes_decrypt_cbc('QkjNiuixpmtcxxqxaIZ30A==', key, iv)
print("解密:", decrypted_data)
\ No newline at end of file
......@@ -12,6 +12,7 @@ import logging
compare_log = logging.getLogger('compare')
class Comparison:
def __init__(self):
......@@ -192,7 +193,7 @@ class Comparison:
def se_input_list_compare(self, input_list, ocr_str, **kwargs):
if isinstance(input_list, list) and len(input_list) > 0 and isinstance(ocr_str, str):
ocr_str = ocr_str.translate(self.KH_TRANS)
for input_str in input_list:
input_str = input_str.translate(self.KH_TRANS)
compare_log.info('[se_input_list_compare] [input_str {0}] [ocr_str {1}]'.format(input_str, ocr_str))
......@@ -221,7 +222,7 @@ class Comparison:
for idx in range(len(src_str)):
if src_str[idx].isdigit():
replace_char_list.append(src_str[idx])
elif idx == len(src_str)-3:
elif idx == len(src_str) - 3:
replace_char_list.append('.')
return ''.join(replace_char_list)
......@@ -323,6 +324,9 @@ class Comparison:
return self.RESULT_Y
if kwargs.get('remove_space', False):
input_str = input_str.replace(' ', '')
if kwargs.get('remove_all_space', False):
input_str = input_str.replace(' ', '')
ocr_str = ocr_str.replace(' ', '')
if kwargs.get('brackets_replace', False):
input_str = input_str.translate(self.KH_TRANS)
ocr_str = ocr_str.translate(self.KH_TRANS)
......@@ -603,6 +607,33 @@ class Comparison:
except Exception as e:
return self.RESULT_N
def se_bd_date_2_compare(self, input_str, ocr_str, **kwargs):
try:
# Convert strings to date objects
ocr_date = datetime.strptime(ocr_str, "%Y-%m-%d").date()
# Get today's date
today_date = datetime.today().date()
'''
开始时间<后天(不包含), 结束时间>昨天(不包含)
'''
if kwargs.get('start', False):
# 开始时间 < 后天(不包含)
day_after_tomorrow_date = today_date + relativedelta(days=2)
if ocr_date < day_after_tomorrow_date:
return self.RESULT_Y
else:
# 结束时间>昨天(不包含)
yesterday_date = today_date + relativedelta(days=-1)
if ocr_date > yesterday_date:
return self.RESULT_Y
# Default return value if conditions are not met
return self.RESULT_N
except Exception as e:
# Return RESULT_N in case of any exception
return self.RESULT_N
def se_bs_print_date_compare(self, input_str, ocr_str, **kwargs):
try:
input_date = datetime.strptime(input_str, "%Y-%m-%d")
......@@ -661,7 +692,7 @@ class Comparison:
# input_str = input_str.replace('-', '')
return self.is_after_today_pre(ocr_str)
def se_qrs_compare(self, input_str, ocr_str_or_list, **kwargs):
try:
target_count_str, application_id = input_str.split('_')
......@@ -676,7 +707,58 @@ class Comparison:
except Exception as e:
return self.RESULT_N
def hash_code_compare(self, input_str, ocr_dict, **kwargs):
try:
balance_sheet_hash = ocr_dict.get('balance_sheet','')
income_statement_hash = ocr_dict.get('income_statement','')
cash_flow_statement_hash = ocr_dict.get('cash_flow_statement','')
if balance_sheet_hash != input_str or income_statement_hash != input_str or cash_flow_statement_hash != input_str:
return self.RESULT_N
else:
return self.RESULT_Y
except Exception as e:
return self.RESULT_N
cp = Comparison()
def stamp_dict_compare(self, input_str, ocr_dict, **kwargs):
try:
balance_sheet_stamp = ocr_dict.get('balance_sheet','')
income_statement_stamp = ocr_dict.get('income_statement','')
cash_flow_statement_stamp = ocr_dict.get('cash_flow_statement','')
if balance_sheet_stamp != 1 or income_statement_stamp != 1 or cash_flow_statement_stamp != 1:
return self.RESULT_N
else:
return self.RESULT_Y
except Exception as e:
return self.RESULT_N
def stamp_str_compare(self, input_str, ocr_str, **kwargs):
try:
if ocr_str != 1:
return self.RESULT_N
else:
return self.RESULT_Y
except Exception as e:
return self.RESULT_N
def fiscal_year_compare(self, input_str, ocr_list, **kwargs):
try:
this_year_str = datetime.now().strftime('%Y')
this_year = int(this_year_str)
last_year = this_year - 1
if str(input_str) != str(this_year) and str(input_str) != str(last_year):
return self.RESULT_N
return self.RESULT_Y
except Exception as e:
return self.RESULT_N
def input_list_not_zero_compare(self, input_list, ocr_list, **kwargs):
try:
for item in input_list:
if float(item) == 0:
return self.RESULT_N
return self.RESULT_Y
except Exception as e:
return self.RESULT_N
cp = Comparison()
......
import pyodbc
hil_sql = """
ALTER TABLE hil_ocr_result ADD fs_ocr nvarchar(max);
ALTER TABLE hil_se_ocr_result ADD fs_ocr nvarchar(max);
ALTER TABLE hil_ocr_result ADD fss_ocr nvarchar(max);
ALTER TABLE hil_se_ocr_result ADD fss_ocr nvarchar(max);
"""
afc_sql = """
ALTER TABLE afc_ocr_result ADD fs_ocr nvarchar(max);
ALTER TABLE afc_se_ocr_result ADD fs_ocr nvarchar(max);
ALTER TABLE afc_ocr_result ADD fss_ocr nvarchar(max);
ALTER TABLE afc_se_ocr_result ADD fss_ocr nvarchar(max);
"""
hil_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True)
hil_cursor = hil_cnxn.cursor()
hil_cursor.execute(hil_sql)
hil_cursor.close()
hil_cnxn.close()
afc_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True)
afc_cursor = afc_cnxn.cursor()
afc_cursor.execute(afc_sql)
afc_cursor.close()
afc_cnxn.close()
import pyodbc
hil_sql = """
ALTER TABLE hil_ocr_result ADD dp_ocr nvarchar(max);
ALTER TABLE hil_se_ocr_result ADD dp_ocr nvarchar(max);
"""
afc_sql = """
ALTER TABLE afc_ocr_result ADD dp_ocr nvarchar(max);
ALTER TABLE afc_se_ocr_result ADD dp_ocr nvarchar(max);
"""
hil_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True)
hil_cursor = hil_cnxn.cursor()
hil_cursor.execute(hil_sql)
hil_cursor.close()
hil_cnxn.close()
afc_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True)
afc_cursor = afc_cnxn.cursor()
afc_cursor.execute(afc_sql)
afc_cursor.close()
afc_cnxn.close()
import pyodbc
hil_sql = """
CREATE TABLE [dbo].[hil_gb_history_file] (
[id] int IDENTITY(1,1) NOT NULL,
[object_id] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NOT NULL,
[object_name] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[application_no] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[object_type] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[customer_name] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[content_size] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[owner_name] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[input_date] datetime NULL,
[modify_date] datetime NULL,
[location] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[download_finish] int NOT NULL,
[update_time] datetime NULL,
[create_time] datetime NULL
)
GO;
alter table hil_gb_history_file ADD CONSTRAINT unique_object_id unique(object_id)
"""
afc_sql = """
CREATE TABLE [dbo].[afc_gb_history_file] (
[id] int IDENTITY(1,1) NOT NULL,
[object_id] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NOT NULL,
[object_name] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[application_no] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[object_type] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[customer_name] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[content_size] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[owner_name] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[input_date] datetime NULL,
[modify_date] datetime NULL,
[location] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL,
[download_finish] int NOT NULL,
[update_time] datetime NULL,
[create_time] datetime NULL
)
GO;
alter table afc_gb_history_file ADD CONSTRAINT unique_object_id unique(object_id)
"""
hil_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True)
hil_cursor = hil_cnxn.cursor()
hil_cursor.execute(hil_sql)
hil_cursor.close()
hil_cnxn.close()
afc_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True)
afc_cursor = afc_cnxn.cursor()
afc_cursor.execute(afc_sql)
afc_cursor.close()
afc_cnxn.close()
......@@ -100,9 +100,9 @@ DATABASES = {
for db_setting in DATABASES.values():
db_setting['OPTIONS'] = {
'driver': 'ODBC Driver 17 for SQL Server',
'extra_params': "odbc_cursortype=2"
'extra_params': "odbc_cursortype=2;TrustServerCertificate=yes;Encrypt=yes"
}
db_setting['CONN_MAX_AGE'] = 0
# set this to False if you want to turn off pyodbc's connection pooling
DATABASE_CONNECTION_POOLING = True
......
......@@ -16,3 +16,4 @@ BASE_URL = https://sfocr-prod.bmwgroup.net
DELAY_SECONDS = 60
BD_PRICE = 950
\ No newline at end of file
......
......@@ -14,4 +14,6 @@ DEALER_CODE = ocr_situ_group
BASE_URL = https://staging-bmw-ocr.situdata.com
DELAY_SECONDS = 60
\ No newline at end of file
DELAY_SECONDS = 60
BD_PRICE = 950
\ No newline at end of file
......
......@@ -14,4 +14,6 @@ DEALER_CODE = ocr_situ_group
BASE_URL = https://sfocr-uat.bmwgroup.net
DELAY_SECONDS = 60
\ No newline at end of file
DELAY_SECONDS = 60
BD_PRICE = 950
\ No newline at end of file
......
Styling with Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!