将uat-tmp所有内容合并到uat-tmp-cy分支上
Showing
26 changed files
with
501 additions
and
23 deletions
| ... | @@ -55,6 +55,7 @@ class LoginView(ObtainJSONWebToken, GenericView): | ... | @@ -55,6 +55,7 @@ class LoginView(ObtainJSONWebToken, GenericView): |
| 55 | 'role': user_role.role if user_role else -1 | 55 | 'role': user_role.role if user_role else -1 |
| 56 | } | 56 | } |
| 57 | rh.set_token(res.data.get('token')[-10:], user.username) | 57 | rh.set_token(res.data.get('token')[-10:], user.username) |
| 58 | rh.set_token(res.data.get('token')[-11:], user_role.role if user_role else -1) | ||
| 58 | return response.ok(data=data) | 59 | return response.ok(data=data) |
| 59 | 60 | ||
| 60 | 61 | ||
| ... | @@ -85,9 +86,10 @@ class IWALoginView(IWABaseView, GenericView): | ... | @@ -85,9 +86,10 @@ class IWALoginView(IWABaseView, GenericView): |
| 85 | is_valid, data = self.validate(q_number) | 86 | is_valid, data = self.validate(q_number) |
| 86 | 87 | ||
| 87 | if is_valid: | 88 | if is_valid: |
| 88 | rh.set_token(data.get('token')[-10:], data.get('user_name')) | ||
| 89 | user_role = UserRole.objects.filter(auth_user_id=data.get('user_id')).first() | 89 | user_role = UserRole.objects.filter(auth_user_id=data.get('user_id')).first() |
| 90 | data['role'] = user_role.role if user_role else -1 | 90 | data['role'] = user_role.role if user_role else -1 |
| 91 | rh.set_token(data.get('token')[-10:], data.get('user_name')) | ||
| 92 | rh.set_token(data.get('token')[-11:], user_role.role if user_role else -1) | ||
| 91 | return response.ok(data=data) | 93 | return response.ok(data=data) |
| 92 | else: | 94 | else: |
| 93 | self.no_permission(data) | 95 | self.no_permission(data) | ... | ... |
This diff is collapsed.
Click to expand it.
| ... | @@ -4,5 +4,10 @@ from . import views | ... | @@ -4,5 +4,10 @@ from . import views |
| 4 | 4 | ||
| 5 | urlpatterns = [ | 5 | urlpatterns = [ |
| 6 | path(r'', views.DocView.as_view()), | 6 | path(r'', views.DocView.as_view()), |
| 7 | path(r'query/employee', views.EmployeeView.as_view()), | ||
| 8 | path(r'query/greenBookHistoryFile', views.SearchGBHistoryFileView.as_view()), | ||
| 9 | path(r'download/greenBookHistoryFile', views.DownloadGBHistoryFileView.as_view()), | ||
| 10 | path(r'invoice/downloadExcel', views.InvoiceExcelView.as_view()), | ||
| 11 | path(r'invoice/queryInfo', views.InvoiceQueryInfoView.as_view()), | ||
| 7 | path(r'contract/v1', views.SEContractView.as_view()), | 12 | path(r'contract/v1', views.SEContractView.as_view()), |
| 8 | ] | 13 | ] | ... | ... |
| ... | @@ -162,7 +162,7 @@ class Command(BaseCommand, LoggerMixin): | ... | @@ -162,7 +162,7 @@ class Command(BaseCommand, LoggerMixin): |
| 162 | 162 | ||
| 163 | @staticmethod | 163 | @staticmethod |
| 164 | def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir): | 164 | def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir): |
| 165 | time_stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S') | 165 | time_stamp = datetime.now().strftime('%Y-%m-%d_%H_%M_%S') |
| 166 | new_name = '{0}_{1}'.format(time_stamp, name) | 166 | new_name = '{0}_{1}'.format(time_stamp, name) |
| 167 | img_save_path = os.path.join(img_output_dir, new_name) | 167 | img_save_path = os.path.join(img_output_dir, new_name) |
| 168 | pdf_save_path = os.path.join(pdf_output_dir, new_name) | 168 | pdf_save_path = os.path.join(pdf_output_dir, new_name) | ... | ... |
| ... | @@ -320,7 +320,7 @@ class Command(BaseCommand, LoggerMixin): | ... | @@ -320,7 +320,7 @@ class Command(BaseCommand, LoggerMixin): |
| 320 | true_file_set.add(os_error_filename_set.pop()) | 320 | true_file_set.add(os_error_filename_set.pop()) |
| 321 | for name in true_file_set: | 321 | for name in true_file_set: |
| 322 | time.sleep(10) | 322 | time.sleep(10) |
| 323 | unique_folder_name = '{0}_{1}'.format(datetime.now().strftime('%Y-%m-%d_%H:%M:%S'), name) | 323 | unique_folder_name = '{0}_{1}'.format(datetime.now().strftime('%Y-%m-%d_%H_%M_%S'), name) |
| 324 | path = os.path.join(input_dir, name) | 324 | path = os.path.join(input_dir, name) |
| 325 | 325 | ||
| 326 | try: | 326 | try: | ... | ... |
| ... | @@ -272,7 +272,7 @@ class Command(BaseCommand, LoggerMixin): | ... | @@ -272,7 +272,7 @@ class Command(BaseCommand, LoggerMixin): |
| 272 | 272 | ||
| 273 | @staticmethod | 273 | @staticmethod |
| 274 | def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir, seperate_dir_map): | 274 | def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir, seperate_dir_map): |
| 275 | time_stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S') | 275 | time_stamp = datetime.now().strftime('%Y-%m-%d_%H_%M_%S') |
| 276 | new_name = '{0}_{1}'.format(time_stamp, name) | 276 | new_name = '{0}_{1}'.format(time_stamp, name) |
| 277 | img_save_path = os.path.join(img_output_dir, new_name) | 277 | img_save_path = os.path.join(img_output_dir, new_name) |
| 278 | pdf_save_path = os.path.join(pdf_output_dir, new_name) | 278 | pdf_save_path = os.path.join(pdf_output_dir, new_name) | ... | ... |
| ... | @@ -186,7 +186,7 @@ class Command(BaseCommand, LoggerMixin): | ... | @@ -186,7 +186,7 @@ class Command(BaseCommand, LoggerMixin): |
| 186 | 186 | ||
| 187 | @staticmethod | 187 | @staticmethod |
| 188 | def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir): | 188 | def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir): |
| 189 | time_stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S') | 189 | time_stamp = datetime.now().strftime('%Y-%m-%d_%H_%M_%S') |
| 190 | new_name = '{0}_{1}'.format(time_stamp, name) | 190 | new_name = '{0}_{1}'.format(time_stamp, name) |
| 191 | img_save_path = os.path.join(img_output_dir, new_name) | 191 | img_save_path = os.path.join(img_output_dir, new_name) |
| 192 | pdf_save_path = os.path.join(pdf_output_dir, new_name) | 192 | pdf_save_path = os.path.join(pdf_output_dir, new_name) | ... | ... |
| ... | @@ -409,7 +409,7 @@ class Command(BaseCommand, LoggerMixin): | ... | @@ -409,7 +409,7 @@ class Command(BaseCommand, LoggerMixin): |
| 409 | 409 | ||
| 410 | @staticmethod | 410 | @staticmethod |
| 411 | def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir): | 411 | def get_path(name, img_output_dir, wb_output_dir, pdf_output_dir): |
| 412 | time_stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S') | 412 | time_stamp = datetime.now().strftime('%Y-%m-%d_%H_%M_%S') |
| 413 | new_name = '{0}_{1}'.format(time_stamp, name) | 413 | new_name = '{0}_{1}'.format(time_stamp, name) |
| 414 | img_save_path = os.path.join(img_output_dir, new_name) | 414 | img_save_path = os.path.join(img_output_dir, new_name) |
| 415 | pdf_save_path = os.path.join(pdf_output_dir, new_name) | 415 | pdf_save_path = os.path.join(pdf_output_dir, new_name) | ... | ... |
This diff is collapsed.
Click to expand it.
| ... | @@ -336,6 +336,9 @@ class AFCOCRResult(models.Model): | ... | @@ -336,6 +336,9 @@ class AFCOCRResult(models.Model): |
| 336 | fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同") | 336 | fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同") |
| 337 | fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同") | 337 | fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同") |
| 338 | fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活") | 338 | fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活") |
| 339 | fs_ocr = models.TextField(null=True, verbose_name="财务报表") | ||
| 340 | fss_ocr = models.TextField(null=True, verbose_name="财务情况说明书") | ||
| 341 | dp_ocr = models.TextField(null=True, verbose_name="首付款支付承诺书") | ||
| 339 | 342 | ||
| 340 | 343 | ||
| 341 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') | 344 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') |
| ... | @@ -379,6 +382,9 @@ class HILOCRResult(models.Model): | ... | @@ -379,6 +382,9 @@ class HILOCRResult(models.Model): |
| 379 | fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同") | 382 | fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同") |
| 380 | fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同") | 383 | fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同") |
| 381 | fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活") | 384 | fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活") |
| 385 | fs_ocr = models.TextField(null=True, verbose_name="财务报表") | ||
| 386 | fss_ocr = models.TextField(null=True, verbose_name="财务情况说明书") | ||
| 387 | dp_ocr = models.TextField(null=True, verbose_name="首付款支付承诺书") | ||
| 382 | 388 | ||
| 383 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') | 389 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') |
| 384 | create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间') | 390 | create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间') |
| ... | @@ -420,6 +426,9 @@ class AFCSEOCRResult(models.Model): | ... | @@ -420,6 +426,9 @@ class AFCSEOCRResult(models.Model): |
| 420 | fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同") | 426 | fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同") |
| 421 | fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同") | 427 | fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同") |
| 422 | fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活") | 428 | fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活") |
| 429 | fs_ocr = models.TextField(null=True, verbose_name="财务报表") | ||
| 430 | fss_ocr = models.TextField(null=True, verbose_name="财务情况说明书") | ||
| 431 | dp_ocr = models.TextField(null=True, verbose_name="首付款支付承诺书") | ||
| 423 | 432 | ||
| 424 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') | 433 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') |
| 425 | create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间') | 434 | create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间') |
| ... | @@ -461,6 +470,9 @@ class HILSEOCRResult(models.Model): | ... | @@ -461,6 +470,9 @@ class HILSEOCRResult(models.Model): |
| 461 | fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同") | 470 | fsm_sc_ocr = models.TextField(null=True, verbose_name="汽车销售合同") |
| 462 | fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同") | 471 | fsm_sc2_ocr = models.TextField(null=True, verbose_name="汽车销售合同补充合同") |
| 463 | fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活") | 472 | fsm_activited = models.IntegerField(null=False, default=0, verbose_name="fsm激活状态 1:激活") |
| 473 | fs_ocr = models.TextField(null=True, verbose_name="财务报表") | ||
| 474 | fss_ocr = models.TextField(null=True, verbose_name="财务情况说明书") | ||
| 475 | dp_ocr = models.TextField(null=True, verbose_name="首付款支付承诺书") | ||
| 464 | 476 | ||
| 465 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') | 477 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') |
| 466 | create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间') | 478 | create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间') |
| ... | @@ -1118,4 +1130,46 @@ class DealerMapping(models.Model): | ... | @@ -1118,4 +1130,46 @@ class DealerMapping(models.Model): |
| 1118 | 1130 | ||
| 1119 | class Meta: | 1131 | class Meta: |
| 1120 | managed = False | 1132 | managed = False |
| 1121 | db_table = 'dealer_mapping' | ||
| ... | \ No newline at end of file | ... | \ No newline at end of file |
| 1133 | db_table = 'dealer_mapping' | ||
| 1134 | |||
| 1135 | class HILGreenBookHistoryFile(models.Model): | ||
| 1136 | id = models.AutoField(primary_key=True, verbose_name="id") # 主键 | ||
| 1137 | object_id = models.CharField(max_length=64, verbose_name="文件唯一ID") | ||
| 1138 | object_name = models.CharField(max_length=255, verbose_name="文件名称") | ||
| 1139 | application_no = models.CharField(max_length=64, verbose_name="申请号") | ||
| 1140 | object_type = models.CharField(max_length=64, verbose_name="文件类型") | ||
| 1141 | customer_name = models.CharField(max_length=64, verbose_name="customer_name") | ||
| 1142 | content_size = models.CharField(max_length=64, verbose_name="文件大小") | ||
| 1143 | owner_name = models.CharField(max_length=64, verbose_name="owner_name") | ||
| 1144 | input_date = models.DateTimeField(verbose_name="上传时间") | ||
| 1145 | modify_date = models.DateTimeField(verbose_name="修改时间") | ||
| 1146 | location = models.CharField(max_length=255, verbose_name="文件位置") | ||
| 1147 | download_finish = models.SmallIntegerField(null=False, default=0, verbose_name="是否下载完成") | ||
| 1148 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') | ||
| 1149 | create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间') | ||
| 1150 | |||
| 1151 | class Meta: | ||
| 1152 | managed = False | ||
| 1153 | db_table = 'hil_gb_history_file' | ||
| 1154 | |||
| 1155 | |||
| 1156 | class AFCGreenBookHistoryFile(models.Model): | ||
| 1157 | id = models.AutoField(primary_key=True, verbose_name="id") # 主键 | ||
| 1158 | object_id = models.CharField(max_length=64, verbose_name="文件唯一ID") | ||
| 1159 | object_name = models.CharField(max_length=255, verbose_name="文件名称") | ||
| 1160 | application_no = models.CharField(max_length=64, verbose_name="申请号") | ||
| 1161 | object_type = models.CharField(max_length=64, verbose_name="文件类型") | ||
| 1162 | customer_name = models.CharField(max_length=64, verbose_name="customer_name") | ||
| 1163 | content_size = models.CharField(max_length=64, verbose_name="文件大小") | ||
| 1164 | owner_name = models.CharField(max_length=64, verbose_name="owner_name") | ||
| 1165 | input_date = models.DateTimeField(verbose_name="上传时间") | ||
| 1166 | modify_date = models.DateTimeField(verbose_name="修改时间") | ||
| 1167 | location = models.CharField(max_length=255, verbose_name="文件位置") | ||
| 1168 | download_finish = models.BooleanField(default=True, verbose_name="是否下载完成") | ||
| 1169 | update_time = models.DateTimeField(auto_now=True, verbose_name='修改时间') | ||
| 1170 | create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间') | ||
| 1171 | |||
| 1172 | class Meta: | ||
| 1173 | managed = False | ||
| 1174 | db_table = 'afc_gb_history_file' | ||
| 1175 | situ_db_label = 'afc' | ... | ... |
| ... | @@ -102,9 +102,14 @@ class ECM(GenericView): | ... | @@ -102,9 +102,14 @@ class ECM(GenericView): |
| 102 | "docbase": self.doc_base_map.get(business_type), | 102 | "docbase": self.doc_base_map.get(business_type), |
| 103 | "documentType": doc_type, | 103 | "documentType": doc_type, |
| 104 | "objectId": object_id, | 104 | "objectId": object_id, |
| 105 | "b_input_date": time.strftime("%m/%d/%Y %X"), | ||
| 106 | "b_credit_signing_date": time.strftime("%m/%d/%Y %X"), | ||
| 107 | "b_credit_check": True, | ||
| 108 | "b_id_number": '', | ||
| 105 | } | 109 | } |
| 106 | header_info = self.get_headers() | 110 | header_info = self.get_headers() |
| 107 | self.running_log.info("{0} download header_info:{1}".format(self.log_base, header_info)) | 111 | self.running_log.info("{0} download header_info:{1}".format(self.log_base, header_info)) |
| 112 | self.running_log.info("{0} download args_info:{1}".format(self.log_base, download_json)) | ||
| 108 | response = requests.post(self.download_url, headers=header_info, json=download_json, verify=False) | 113 | response = requests.post(self.download_url, headers=header_info, json=download_json, verify=False) |
| 109 | if response.status_code != 200: | 114 | if response.status_code != 200: |
| 110 | raise ECMException('ECM download failed with code: {0}'.format(response.status_code)) | 115 | raise ECMException('ECM download failed with code: {0}'.format(response.status_code)) |
| ... | @@ -142,6 +147,9 @@ class ECM(GenericView): | ... | @@ -142,6 +147,9 @@ class ECM(GenericView): |
| 142 | "b_region": "0", | 147 | "b_region": "0", |
| 143 | "b_region_name": self.b_region_name_map.get(business_type), | 148 | "b_region_name": self.b_region_name_map.get(business_type), |
| 144 | "b_input_date": time.strftime("%m/%d/%Y %X"), | 149 | "b_input_date": time.strftime("%m/%d/%Y %X"), |
| 150 | "b_credit_signing_date": time.strftime("%m/%d/%Y %X"), | ||
| 151 | "b_credit_check": True, | ||
| 152 | "b_id_number": '', | ||
| 145 | # "file_base64_content": "", | 153 | # "file_base64_content": "", |
| 146 | } | 154 | } |
| 147 | for key in self.upload_fields: | 155 | for key in self.upload_fields: |
| ... | @@ -153,9 +161,28 @@ class ECM(GenericView): | ... | @@ -153,9 +161,28 @@ class ECM(GenericView): |
| 153 | args['file_base64_content'] = file_data | 161 | args['file_base64_content'] = file_data |
| 154 | header_info = self.get_headers() | 162 | header_info = self.get_headers() |
| 155 | self.running_log.info("{0} upload header_info:{1}".format(self.log_base, header_info)) | 163 | self.running_log.info("{0} upload header_info:{1}".format(self.log_base, header_info)) |
| 164 | self.running_log.info("{0} upload args_info:{1}".format(self.log_base, args)) | ||
| 156 | response = requests.post(self.upload_url, headers=header_info, json=args, verify=False) | 165 | response = requests.post(self.upload_url, headers=header_info, json=args, verify=False) |
| 157 | if response.status_code != 200: | 166 | if response.status_code != 200: |
| 158 | raise ECMException('ECM upload failed with code: {0} , with headers: {1} , with content: {2}'.format( | 167 | raise ECMException('ECM upload failed with code: {0} , with headers: {1} , with content: {2}'.format( |
| 159 | response.status_code, response.headers, response.text)) | 168 | response.status_code, response.headers, response.text)) |
| 160 | if 'ns6:createResponse' not in response.json().get('S:Envelope', {}).get('S:Body', {}): | 169 | if 'ns12:createResponse' not in response.json().get('S:Envelope', {}).get('S:Body', {}): |
| 161 | raise ECMException('ECM upload failed: {0} , with headers: {1}'.format(response.json(), response.headers)) | 170 | raise ECMException('ECM upload failed: {0} , with headers: {1}'.format(response.json(), response.headers)) |
| 171 | |||
| 172 | def search_doc_info_list(self, filePath, business_type): | ||
| 173 | args = { | ||
| 174 | #userName n大写,和其他接口不一样,是因为apigateway没有做统一 | ||
| 175 | "userName": self.username, | ||
| 176 | "password": self.pwd, | ||
| 177 | "docbase": self.doc_base_map.get(business_type), | ||
| 178 | "documentType": "green_book", | ||
| 179 | "dql":"select r_object_id, object_name,b_application_no, r_object_type,b_customer_name,r_content_size, owner_name, b_input_date, r_modify_date, b_location from green_book where b_location = '{}'" .format(filePath), | ||
| 180 | } | ||
| 181 | header_info = self.get_headers() | ||
| 182 | self.running_log.info("{0} search header_info:{1}".format(self.log_base, header_info)) | ||
| 183 | self.running_log.info("{0} search args_info:{1}".format(self.log_base, args)) | ||
| 184 | response = requests.post(self.search_url, headers=header_info, json=args, verify=False) | ||
| 185 | if response.status_code != 200: | ||
| 186 | raise ECMException('ECM search failed with code: {0}'.format(response.status_code)) | ||
| 187 | #self.running_log.info("{0} search response.json():{1}".format(self.log_base, response.json())) | ||
| 188 | return response.json() | ||
| ... | \ No newline at end of file | ... | \ No newline at end of file | ... | ... |
| ... | @@ -827,6 +827,80 @@ class BSWorkbook(Workbook): | ... | @@ -827,6 +827,80 @@ class BSWorkbook(Workbook): |
| 827 | ws.append(row) | 827 | ws.append(row) |
| 828 | ws.append((None, )) | 828 | ws.append((None, )) |
| 829 | 829 | ||
| 830 | |||
| 831 | def financial_rebuild(self, financial_statement_dict): | ||
| 832 | # 如果 financial_statement_dict 为空,则不创建表 | ||
| 833 | if not financial_statement_dict: | ||
| 834 | return | ||
| 835 | # 如果 financial_statement_dict 不为空,则创建表 | ||
| 836 | ws = self.create_sheet(consts.FINANCIAL_SHEET_NAME) | ||
| 837 | for fin_key, fin_value in financial_statement_dict.items(): | ||
| 838 | table_str = "识别码" | ||
| 839 | if fin_key == "code": | ||
| 840 | table_str = "识别码" | ||
| 841 | elif fin_key == "stamp": | ||
| 842 | table_str = "印章" | ||
| 843 | |||
| 844 | for table_key, table_value in fin_value.items(): | ||
| 845 | if table_key == "balance_sheet": | ||
| 846 | row = ["资产负债表" + table_str, str(table_value)] | ||
| 847 | ws.append(row) | ||
| 848 | elif table_key == "income_statement": | ||
| 849 | row = ["利润表" + table_str, str(table_value)] | ||
| 850 | ws.append(row) | ||
| 851 | elif table_key == "cash_flow_statement": | ||
| 852 | row = ["现金流量表" + table_str, str(table_value)] | ||
| 853 | ws.append(row) | ||
| 854 | |||
| 855 | |||
| 856 | def financial_explanation_rebuild(self, financial_explanation_dict): | ||
| 857 | """ | ||
| 858 | Desc: | ||
| 859 | 重构财报情况说明sheet | ||
| 860 | """ | ||
| 861 | # 如果 financial_explanation_dict 为空,则不创建sheet | ||
| 862 | if not financial_explanation_dict: | ||
| 863 | return | ||
| 864 | # 如果 financial_explanation_dict 不为空, 则创建sheet | ||
| 865 | ws = self.create_sheet(consts.FINANCIAL_EXPLANATION_SHEET_NAME) | ||
| 866 | for fin_key, fin_value in financial_explanation_dict.items(): | ||
| 867 | table_str = "公司名称" | ||
| 868 | if fin_key == "title": | ||
| 869 | table_str = "公司名称" | ||
| 870 | elif fin_key == "stamp": | ||
| 871 | table_str = "印章" | ||
| 872 | |||
| 873 | row = ["财报情况说明" + table_str, str(fin_value)] | ||
| 874 | ws.append(row) | ||
| 875 | |||
| 876 | |||
| 877 | def down_payment_rebuild(self, down_payment_dict): | ||
| 878 | """ | ||
| 879 | Desc: | ||
| 880 | 重构首付款支付承诺书sheet | ||
| 881 | """ | ||
| 882 | # 如果 down_payment_dict 为空, 则不创建sheet | ||
| 883 | if not down_payment_dict: | ||
| 884 | return | ||
| 885 | # 如果 down_payment_dict 不为空, 则创建sheet | ||
| 886 | ws = self.create_sheet(consts.DOWN_PAYMENT_SHEET_NAME) | ||
| 887 | english_chinese_dict = { | ||
| 888 | "financial_org_name": "渠道", | ||
| 889 | "main_borrower_name": "姓名", | ||
| 890 | "main_borrower_id_no": "证件号码", | ||
| 891 | "apply_no": "合同编号", | ||
| 892 | "contract_name": "合同名称", | ||
| 893 | "promisor_signature": "承诺人签字-电子", | ||
| 894 | "promisor_signature_date": "承诺人签字日期-电子" | ||
| 895 | } | ||
| 896 | for dp_key, dp_value in down_payment_dict.items(): | ||
| 897 | if dp_key in english_chinese_dict.keys(): | ||
| 898 | row = [english_chinese_dict[dp_key], str(dp_value)] | ||
| 899 | ws.append(row) | ||
| 900 | else: | ||
| 901 | row = [english_chinese_dict[dp_key], ""] | ||
| 902 | ws.append(row) | ||
| 903 | |||
| 830 | @staticmethod | 904 | @staticmethod |
| 831 | def remove_yuan(amount_key_set, key, src_str): | 905 | def remove_yuan(amount_key_set, key, src_str): |
| 832 | if key in amount_key_set and isinstance(src_str, str): | 906 | if key in amount_key_set and isinstance(src_str, str): |
| ... | @@ -926,7 +1000,7 @@ class BSWorkbook(Workbook): | ... | @@ -926,7 +1000,7 @@ class BSWorkbook(Workbook): |
| 926 | if len(self.sheetnames) > 1: | 1000 | if len(self.sheetnames) > 1: |
| 927 | self.remove(self.get_sheet_by_name('Sheet')) | 1001 | self.remove(self.get_sheet_by_name('Sheet')) |
| 928 | 1002 | ||
| 929 | def rebuild(self, bs_summary, license_summary, res_list, document_scheme, contract_result, metadata): | 1003 | def rebuild(self, bs_summary, license_summary, res_list, document_scheme, contract_result, metadata, financial_statement_dict, financial_explanation_dict, down_payment_dict): |
| 930 | res_count_tuple = self.res_sheet(res_list) | 1004 | res_count_tuple = self.res_sheet(res_list) |
| 931 | 1005 | ||
| 932 | count_list = [(consts.MODEL_FIELD_BS, len(bs_summary))] | 1006 | count_list = [(consts.MODEL_FIELD_BS, len(bs_summary))] |
| ... | @@ -934,10 +1008,16 @@ class BSWorkbook(Workbook): | ... | @@ -934,10 +1008,16 @@ class BSWorkbook(Workbook): |
| 934 | self.license_rebuild(license_summary, document_scheme, count_list) | 1008 | self.license_rebuild(license_summary, document_scheme, count_list) |
| 935 | self.contract_rebuild(contract_result) | 1009 | self.contract_rebuild(contract_result) |
| 936 | self.bs_rebuild(bs_summary, res_count_tuple, metadata) | 1010 | self.bs_rebuild(bs_summary, res_count_tuple, metadata) |
| 1011 | self.financial_rebuild(financial_statement_dict) | ||
| 1012 | self.financial_explanation_rebuild(financial_explanation_dict) | ||
| 1013 | self.down_payment_rebuild(down_payment_dict) | ||
| 937 | else: | 1014 | else: |
| 938 | self.bs_rebuild(bs_summary, res_count_tuple, metadata) | 1015 | self.bs_rebuild(bs_summary, res_count_tuple, metadata) |
| 939 | self.license_rebuild(license_summary, document_scheme, count_list) | 1016 | self.license_rebuild(license_summary, document_scheme, count_list) |
| 940 | self.contract_rebuild(contract_result, True) | 1017 | self.contract_rebuild(contract_result, True) |
| 1018 | self.financial_rebuild(financial_statement_dict) | ||
| 1019 | self.financial_explanation_rebuild(financial_explanation_dict) | ||
| 1020 | self.down_payment_rebuild(down_payment_dict) | ||
| 941 | self.move_res_sheet() | 1021 | self.move_res_sheet() |
| 942 | self.remove_base_sheet() | 1022 | self.remove_base_sheet() |
| 943 | return count_list, self.need_follow | 1023 | return count_list, self.need_follow | ... | ... |
This diff is collapsed.
Click to expand it.
| ... | @@ -8,4 +8,6 @@ broker = conf.CELERY_BROKER_URL | ... | @@ -8,4 +8,6 @@ broker = conf.CELERY_BROKER_URL |
| 8 | 8 | ||
| 9 | app = Celery('celery_compare', broker=broker, include=['celery_compare.tasks']) | 9 | app = Celery('celery_compare', broker=broker, include=['celery_compare.tasks']) |
| 10 | 10 | ||
| 11 | app.conf.update(worker_max_tasks_per_child=5, timezone='Asia/Shanghai') | 11 | # worker_max_tasks_per_child ,worker执行了几次任务就会死 |
| 12 | #app.conf.update(worker_max_tasks_per_child=10, timezone='Asia/Shanghai') | ||
| 13 | app.conf.update(timezone='Asia/Shanghai') | ... | ... |
This diff is collapsed.
Click to expand it.
| ... | @@ -19,10 +19,18 @@ class HMHRetriever: | ... | @@ -19,10 +19,18 @@ class HMHRetriever: |
| 19 | def get_target_fields(self, pdf_text_list): | 19 | def get_target_fields(self, pdf_text_list): |
| 20 | result = dict() | 20 | result = dict() |
| 21 | is_find_name_id_company, is_find_application_no, is_find_name_date = False, False, False | 21 | is_find_name_id_company, is_find_application_no, is_find_name_date = False, False, False |
| 22 | for bbox, text in pdf_text_list.pop(str(0), []): | 22 | # for bbox, text in pdf_text_list.pop(str(0), []): |
| 23 | # print(text) | 23 | pdf_text_items = pdf_text_list.pop(str(0), []) |
| 24 | |||
| 25 | for i in range(len(pdf_text_items)): | ||
| 26 | bbox, text = pdf_text_items[i] | ||
| 27 | combined_text = text | ||
| 28 | if i < len(pdf_text_items) - 1: | ||
| 29 | combined_text += pdf_text_items[i + 1][1] | ||
| 30 | |||
| 24 | if not is_find_name_id_company: | 31 | if not is_find_name_id_company: |
| 25 | name_id_company_list = re.findall(r'姓名(.*)证件号码(.*)与(.*公司)', text) | 32 | # name_id_company_list = re.findall(r'姓名(.*?)证件号码(.*?)与(.*?公司|.*)', combined_text) |
| 33 | name_id_company_list = re.findall(r'姓名(.*)证件号码(.*)与(.*公司)', combined_text) | ||
| 26 | for name_id_company_tuple in name_id_company_list: | 34 | for name_id_company_tuple in name_id_company_list: |
| 27 | if len(name_id_company_tuple) == 3: | 35 | if len(name_id_company_tuple) == 3: |
| 28 | result[self.search_fields_list[0][0]] = { | 36 | result[self.search_fields_list[0][0]] = { |
| ... | @@ -40,7 +48,7 @@ class HMHRetriever: | ... | @@ -40,7 +48,7 @@ class HMHRetriever: |
| 40 | is_find_name_id_company = True | 48 | is_find_name_id_company = True |
| 41 | break | 49 | break |
| 42 | if not is_find_application_no: | 50 | if not is_find_application_no: |
| 43 | application_no_list = re.findall(r'合同编号.*(CH-B\d*-\d*).*', text) | 51 | application_no_list = re.findall(r'合同编号.*(CH-B\d*-\d*).*', combined_text) |
| 44 | if len(application_no_list) == 1: | 52 | if len(application_no_list) == 1: |
| 45 | result[self.search_fields_list[3][0]] = { | 53 | result[self.search_fields_list[3][0]] = { |
| 46 | self.words_str: application_no_list[0], | 54 | self.words_str: application_no_list[0], |
| ... | @@ -48,7 +56,7 @@ class HMHRetriever: | ... | @@ -48,7 +56,7 @@ class HMHRetriever: |
| 48 | } | 56 | } |
| 49 | is_find_application_no = True | 57 | is_find_application_no = True |
| 50 | if not is_find_name_date: | 58 | if not is_find_name_date: |
| 51 | name_date_list = re.findall(r'(.*).*签署日期.*(\d{4}-\d{2}-\d{2})', text) | 59 | name_date_list = re.findall(r'(.*).*签署日期.*(\d{4}-\d{2}-\d{2})', combined_text) |
| 52 | for name_date_tuple in name_date_list: | 60 | for name_date_tuple in name_date_list: |
| 53 | if len(name_date_tuple) == 2: | 61 | if len(name_date_tuple) == 2: |
| 54 | result[self.search_fields_list[4][0]] = { | 62 | result[self.search_fields_list[4][0]] = { | ... | ... |
src/common/tools/aes.py
0 → 100644
| 1 | #这个有问题 | ||
| 2 | from Crypto.Cipher import AES | ||
| 3 | from base64 import b64encode, b64decode | ||
| 4 | |||
| 5 | |||
| 6 | def encrypt_ecb(data, key): | ||
| 7 | data = data.encode() | ||
| 8 | key = key.encode() | ||
| 9 | aes = AES.new(key, AES.MODE_CBC, bytes(16)) | ||
| 10 | res = aes.encrypt(pad(data, 32)) | ||
| 11 | return b64encode(res).decode() | ||
| 12 | |||
| 13 | |||
| 14 | def decrypt(data, key, iv): | ||
| 15 | key = key.encode() | ||
| 16 | iv = iv.encode() | ||
| 17 | # aes = AES.new(key, AES.MODE_CBC, bytes(16)) | ||
| 18 | aes = AES.new(key, AES.MODE_CBC, iv) | ||
| 19 | res = aes.decrypt(b64decode(data)) | ||
| 20 | return unpad(res, 32).decode() | ||
| 21 | |||
| 22 | |||
| 23 | def unpad(padded_data, block_size, style='pkcs7'): | ||
| 24 | pdata_len = len(padded_data) | ||
| 25 | if pdata_len == 0: | ||
| 26 | raise ValueError("Zero-length input cannot be unpadded") | ||
| 27 | if pdata_len % block_size: | ||
| 28 | raise ValueError("Input data is not padded") | ||
| 29 | if style in ('pkcs7', 'x923'): | ||
| 30 | padding_len = bord(padded_data[-1]) | ||
| 31 | if padding_len<1 or padding_len>min(block_size, pdata_len): | ||
| 32 | raise ValueError("Padding is incorrect.") | ||
| 33 | if style == 'pkcs7': | ||
| 34 | if padded_data[-padding_len:]!=bchr(padding_len)*padding_len: | ||
| 35 | raise ValueError("PKCS#7 padding is incorrect.") | ||
| 36 | else: | ||
| 37 | if padded_data[-padding_len:-1]!=bchr(0)*(padding_len-1): | ||
| 38 | raise ValueError("ANSI X.923 padding is incorrect.") | ||
| 39 | elif style == 'iso7816': | ||
| 40 | padding_len = pdata_len - padded_data.rfind(bchr(128)) | ||
| 41 | if padding_len<1 or padding_len>min(block_size, pdata_len): | ||
| 42 | raise ValueError("Padding is incorrect.") | ||
| 43 | if padding_len>1 and padded_data[1-padding_len:]!=bchr(0)*(padding_len-1): | ||
| 44 | raise ValueError("ISO 7816-4 padding is incorrect.") | ||
| 45 | else: | ||
| 46 | raise ValueError("Unknown padding style") | ||
| 47 | return padded_data[:-padding_len] | ||
| 48 | |||
| 49 | def pad(data_to_pad, block_size, style='pkcs7'): | ||
| 50 | padding_len = block_size-len(data_to_pad)%block_size | ||
| 51 | if style == 'pkcs7': | ||
| 52 | padding = bchr(padding_len)*padding_len | ||
| 53 | elif style == 'x923': | ||
| 54 | padding = bchr(0)*(padding_len-1) + bchr(padding_len) | ||
| 55 | elif style == 'iso7816': | ||
| 56 | padding = bchr(128) + bchr(0)*(padding_len-1) | ||
| 57 | else: | ||
| 58 | raise ValueError("Unknown padding style") | ||
| 59 | return data_to_pad + padding | ||
| 60 | |||
| 61 | def bord(s): | ||
| 62 | return s | ||
| 63 | def bchr(s): | ||
| 64 | return bytes([s]) | ||
| 65 | |||
| 66 | if __name__ == '__main__': | ||
| 67 | |||
| 68 | decrypt_data = decrypt('QkjNiuixpmtcxxqxaIZ30A==', 'm0XsOHC52YZ5KtakhpuMSZtF7DhwudmG', 'OCRocr2024UATocr') | ||
| 69 | print('解密:', decrypt_data) | ||
| ... | \ No newline at end of file | ... | \ No newline at end of file |
src/common/tools/aes_util.py
0 → 100644
| 1 | from Crypto.Cipher import AES | ||
| 2 | from base64 import b64encode, b64decode | ||
| 3 | |||
| 4 | def aes_encrypt_cbc(data, key, iv): | ||
| 5 | cipher = AES.new(key, AES.MODE_CBC, iv) | ||
| 6 | return cipher.encrypt(data) | ||
| 7 | |||
| 8 | def aes_decrypt_cbc(data, key, iv): | ||
| 9 | res = '' | ||
| 10 | try: | ||
| 11 | cipher = AES.new(key.encode(), AES.MODE_CBC, iv.encode()) | ||
| 12 | res = cipher.decrypt(b64decode(data)) | ||
| 13 | res = res.decode('utf-8').replace('\x0e', '') | ||
| 14 | except Exception as e: | ||
| 15 | res = '' | ||
| 16 | return res | ||
| 17 | |||
| 18 | |||
| 19 | # 示例使用 | ||
| 20 | key = 'm0XsOHC52YZ5KtakhpuMSZtF7DhwudmG' # 密钥长度必须是16、24或32字节 | ||
| 21 | iv = 'OCRocr2024UATocr' | ||
| 22 | decrypted_data = aes_decrypt_cbc('QkjNiuixpmtcxxqxaIZ30A==', key, iv) | ||
| 23 | print("解密:", decrypted_data) | ||
| ... | \ No newline at end of file | ... | \ No newline at end of file |
| ... | @@ -12,6 +12,7 @@ import logging | ... | @@ -12,6 +12,7 @@ import logging |
| 12 | 12 | ||
| 13 | compare_log = logging.getLogger('compare') | 13 | compare_log = logging.getLogger('compare') |
| 14 | 14 | ||
| 15 | |||
| 15 | class Comparison: | 16 | class Comparison: |
| 16 | 17 | ||
| 17 | def __init__(self): | 18 | def __init__(self): |
| ... | @@ -192,7 +193,7 @@ class Comparison: | ... | @@ -192,7 +193,7 @@ class Comparison: |
| 192 | def se_input_list_compare(self, input_list, ocr_str, **kwargs): | 193 | def se_input_list_compare(self, input_list, ocr_str, **kwargs): |
| 193 | if isinstance(input_list, list) and len(input_list) > 0 and isinstance(ocr_str, str): | 194 | if isinstance(input_list, list) and len(input_list) > 0 and isinstance(ocr_str, str): |
| 194 | ocr_str = ocr_str.translate(self.KH_TRANS) | 195 | ocr_str = ocr_str.translate(self.KH_TRANS) |
| 195 | 196 | ||
| 196 | for input_str in input_list: | 197 | for input_str in input_list: |
| 197 | input_str = input_str.translate(self.KH_TRANS) | 198 | input_str = input_str.translate(self.KH_TRANS) |
| 198 | compare_log.info('[se_input_list_compare] [input_str {0}] [ocr_str {1}]'.format(input_str, ocr_str)) | 199 | compare_log.info('[se_input_list_compare] [input_str {0}] [ocr_str {1}]'.format(input_str, ocr_str)) |
| ... | @@ -221,7 +222,7 @@ class Comparison: | ... | @@ -221,7 +222,7 @@ class Comparison: |
| 221 | for idx in range(len(src_str)): | 222 | for idx in range(len(src_str)): |
| 222 | if src_str[idx].isdigit(): | 223 | if src_str[idx].isdigit(): |
| 223 | replace_char_list.append(src_str[idx]) | 224 | replace_char_list.append(src_str[idx]) |
| 224 | elif idx == len(src_str)-3: | 225 | elif idx == len(src_str) - 3: |
| 225 | replace_char_list.append('.') | 226 | replace_char_list.append('.') |
| 226 | return ''.join(replace_char_list) | 227 | return ''.join(replace_char_list) |
| 227 | 228 | ||
| ... | @@ -323,6 +324,9 @@ class Comparison: | ... | @@ -323,6 +324,9 @@ class Comparison: |
| 323 | return self.RESULT_Y | 324 | return self.RESULT_Y |
| 324 | if kwargs.get('remove_space', False): | 325 | if kwargs.get('remove_space', False): |
| 325 | input_str = input_str.replace(' ', '') | 326 | input_str = input_str.replace(' ', '') |
| 327 | if kwargs.get('remove_all_space', False): | ||
| 328 | input_str = input_str.replace(' ', '') | ||
| 329 | ocr_str = ocr_str.replace(' ', '') | ||
| 326 | if kwargs.get('brackets_replace', False): | 330 | if kwargs.get('brackets_replace', False): |
| 327 | input_str = input_str.translate(self.KH_TRANS) | 331 | input_str = input_str.translate(self.KH_TRANS) |
| 328 | ocr_str = ocr_str.translate(self.KH_TRANS) | 332 | ocr_str = ocr_str.translate(self.KH_TRANS) |
| ... | @@ -603,6 +607,33 @@ class Comparison: | ... | @@ -603,6 +607,33 @@ class Comparison: |
| 603 | except Exception as e: | 607 | except Exception as e: |
| 604 | return self.RESULT_N | 608 | return self.RESULT_N |
| 605 | 609 | ||
| 610 | def se_bd_date_2_compare(self, input_str, ocr_str, **kwargs): | ||
| 611 | try: | ||
| 612 | # Convert strings to date objects | ||
| 613 | ocr_date = datetime.strptime(ocr_str, "%Y-%m-%d").date() | ||
| 614 | |||
| 615 | # Get today's date | ||
| 616 | today_date = datetime.today().date() | ||
| 617 | ''' | ||
| 618 | 开始时间<后天(不包含), 结束时间>昨天(不包含) | ||
| 619 | ''' | ||
| 620 | if kwargs.get('start', False): | ||
| 621 | # 开始时间 < 后天(不包含) | ||
| 622 | day_after_tomorrow_date = today_date + relativedelta(days=2) | ||
| 623 | if ocr_date < day_after_tomorrow_date: | ||
| 624 | return self.RESULT_Y | ||
| 625 | else: | ||
| 626 | # 结束时间>昨天(不包含) | ||
| 627 | yesterday_date = today_date + relativedelta(days=-1) | ||
| 628 | if ocr_date > yesterday_date: | ||
| 629 | return self.RESULT_Y | ||
| 630 | |||
| 631 | # Default return value if conditions are not met | ||
| 632 | return self.RESULT_N | ||
| 633 | except Exception as e: | ||
| 634 | # Return RESULT_N in case of any exception | ||
| 635 | return self.RESULT_N | ||
| 636 | |||
| 606 | def se_bs_print_date_compare(self, input_str, ocr_str, **kwargs): | 637 | def se_bs_print_date_compare(self, input_str, ocr_str, **kwargs): |
| 607 | try: | 638 | try: |
| 608 | input_date = datetime.strptime(input_str, "%Y-%m-%d") | 639 | input_date = datetime.strptime(input_str, "%Y-%m-%d") |
| ... | @@ -661,7 +692,7 @@ class Comparison: | ... | @@ -661,7 +692,7 @@ class Comparison: |
| 661 | # input_str = input_str.replace('-', '') | 692 | # input_str = input_str.replace('-', '') |
| 662 | 693 | ||
| 663 | return self.is_after_today_pre(ocr_str) | 694 | return self.is_after_today_pre(ocr_str) |
| 664 | 695 | ||
| 665 | def se_qrs_compare(self, input_str, ocr_str_or_list, **kwargs): | 696 | def se_qrs_compare(self, input_str, ocr_str_or_list, **kwargs): |
| 666 | try: | 697 | try: |
| 667 | target_count_str, application_id = input_str.split('_') | 698 | target_count_str, application_id = input_str.split('_') |
| ... | @@ -676,7 +707,58 @@ class Comparison: | ... | @@ -676,7 +707,58 @@ class Comparison: |
| 676 | except Exception as e: | 707 | except Exception as e: |
| 677 | return self.RESULT_N | 708 | return self.RESULT_N |
| 678 | 709 | ||
| 710 | def hash_code_compare(self, input_str, ocr_dict, **kwargs): | ||
| 711 | try: | ||
| 712 | balance_sheet_hash = ocr_dict.get('balance_sheet','') | ||
| 713 | income_statement_hash = ocr_dict.get('income_statement','') | ||
| 714 | cash_flow_statement_hash = ocr_dict.get('cash_flow_statement','') | ||
| 715 | if balance_sheet_hash != input_str or income_statement_hash != input_str or cash_flow_statement_hash != input_str: | ||
| 716 | return self.RESULT_N | ||
| 717 | else: | ||
| 718 | return self.RESULT_Y | ||
| 719 | except Exception as e: | ||
| 720 | return self.RESULT_N | ||
| 679 | 721 | ||
| 680 | cp = Comparison() | 722 | def stamp_dict_compare(self, input_str, ocr_dict, **kwargs): |
| 723 | try: | ||
| 724 | balance_sheet_stamp = ocr_dict.get('balance_sheet','') | ||
| 725 | income_statement_stamp = ocr_dict.get('income_statement','') | ||
| 726 | cash_flow_statement_stamp = ocr_dict.get('cash_flow_statement','') | ||
| 727 | if balance_sheet_stamp != 1 or income_statement_stamp != 1 or cash_flow_statement_stamp != 1: | ||
| 728 | return self.RESULT_N | ||
| 729 | else: | ||
| 730 | return self.RESULT_Y | ||
| 731 | except Exception as e: | ||
| 732 | return self.RESULT_N | ||
| 733 | |||
| 734 | def stamp_str_compare(self, input_str, ocr_str, **kwargs): | ||
| 735 | try: | ||
| 736 | if ocr_str != 1: | ||
| 737 | return self.RESULT_N | ||
| 738 | else: | ||
| 739 | return self.RESULT_Y | ||
| 740 | except Exception as e: | ||
| 741 | return self.RESULT_N | ||
| 742 | |||
| 743 | def fiscal_year_compare(self, input_str, ocr_list, **kwargs): | ||
| 744 | try: | ||
| 745 | this_year_str = datetime.now().strftime('%Y') | ||
| 746 | this_year = int(this_year_str) | ||
| 747 | last_year = this_year - 1 | ||
| 748 | if str(input_str) != str(this_year) and str(input_str) != str(last_year): | ||
| 749 | return self.RESULT_N | ||
| 750 | return self.RESULT_Y | ||
| 751 | except Exception as e: | ||
| 752 | return self.RESULT_N | ||
| 753 | |||
| 754 | def input_list_not_zero_compare(self, input_list, ocr_list, **kwargs): | ||
| 755 | try: | ||
| 756 | for item in input_list: | ||
| 757 | if float(item) == 0: | ||
| 758 | return self.RESULT_N | ||
| 759 | return self.RESULT_Y | ||
| 760 | except Exception as e: | ||
| 761 | return self.RESULT_N | ||
| 681 | 762 | ||
| 682 | 763 | ||
| 764 | cp = Comparison() | ... | ... |
src/common/tools/mssql_script28.py
0 → 100644
| 1 | import pyodbc | ||
| 2 | |||
| 3 | hil_sql = """ | ||
| 4 | ALTER TABLE hil_ocr_result ADD fs_ocr nvarchar(max); | ||
| 5 | ALTER TABLE hil_se_ocr_result ADD fs_ocr nvarchar(max); | ||
| 6 | ALTER TABLE hil_ocr_result ADD fss_ocr nvarchar(max); | ||
| 7 | ALTER TABLE hil_se_ocr_result ADD fss_ocr nvarchar(max); | ||
| 8 | """ | ||
| 9 | |||
| 10 | afc_sql = """ | ||
| 11 | ALTER TABLE afc_ocr_result ADD fs_ocr nvarchar(max); | ||
| 12 | ALTER TABLE afc_se_ocr_result ADD fs_ocr nvarchar(max); | ||
| 13 | ALTER TABLE afc_ocr_result ADD fss_ocr nvarchar(max); | ||
| 14 | ALTER TABLE afc_se_ocr_result ADD fss_ocr nvarchar(max); | ||
| 15 | """ | ||
| 16 | |||
| 17 | hil_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True) | ||
| 18 | |||
| 19 | hil_cursor = hil_cnxn.cursor() | ||
| 20 | hil_cursor.execute(hil_sql) | ||
| 21 | |||
| 22 | hil_cursor.close() | ||
| 23 | hil_cnxn.close() | ||
| 24 | |||
| 25 | afc_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True) | ||
| 26 | |||
| 27 | afc_cursor = afc_cnxn.cursor() | ||
| 28 | afc_cursor.execute(afc_sql) | ||
| 29 | |||
| 30 | afc_cursor.close() | ||
| 31 | afc_cnxn.close() |
src/common/tools/mssql_script29.py
0 → 100644
| 1 | import pyodbc | ||
| 2 | |||
| 3 | hil_sql = """ | ||
| 4 | ALTER TABLE hil_ocr_result ADD dp_ocr nvarchar(max); | ||
| 5 | ALTER TABLE hil_se_ocr_result ADD dp_ocr nvarchar(max); | ||
| 6 | """ | ||
| 7 | |||
| 8 | afc_sql = """ | ||
| 9 | ALTER TABLE afc_ocr_result ADD dp_ocr nvarchar(max); | ||
| 10 | ALTER TABLE afc_se_ocr_result ADD dp_ocr nvarchar(max); | ||
| 11 | """ | ||
| 12 | |||
| 13 | hil_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True) | ||
| 14 | |||
| 15 | hil_cursor = hil_cnxn.cursor() | ||
| 16 | hil_cursor.execute(hil_sql) | ||
| 17 | |||
| 18 | hil_cursor.close() | ||
| 19 | hil_cnxn.close() | ||
| 20 | |||
| 21 | afc_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True) | ||
| 22 | |||
| 23 | afc_cursor = afc_cnxn.cursor() | ||
| 24 | afc_cursor.execute(afc_sql) | ||
| 25 | |||
| 26 | afc_cursor.close() | ||
| 27 | afc_cnxn.close() |
src/common/tools/mssql_script30.py
0 → 100644
| 1 | import pyodbc | ||
| 2 | |||
| 3 | hil_sql = """ | ||
| 4 | CREATE TABLE [dbo].[hil_gb_history_file] ( | ||
| 5 | [id] int IDENTITY(1,1) NOT NULL, | ||
| 6 | [object_id] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NOT NULL, | ||
| 7 | [object_name] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 8 | [application_no] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 9 | [object_type] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 10 | [customer_name] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 11 | [content_size] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 12 | [owner_name] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 13 | [input_date] datetime NULL, | ||
| 14 | [modify_date] datetime NULL, | ||
| 15 | [location] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 16 | [download_finish] int NOT NULL, | ||
| 17 | [update_time] datetime NULL, | ||
| 18 | [create_time] datetime NULL | ||
| 19 | ) | ||
| 20 | GO; | ||
| 21 | |||
| 22 | alter table hil_gb_history_file ADD CONSTRAINT unique_object_id unique(object_id) | ||
| 23 | |||
| 24 | """ | ||
| 25 | |||
| 26 | afc_sql = """ | ||
| 27 | CREATE TABLE [dbo].[afc_gb_history_file] ( | ||
| 28 | [id] int IDENTITY(1,1) NOT NULL, | ||
| 29 | [object_id] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NOT NULL, | ||
| 30 | [object_name] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 31 | [application_no] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 32 | [object_type] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 33 | [customer_name] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 34 | [content_size] varchar(64) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 35 | [owner_name] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 36 | [input_date] datetime NULL, | ||
| 37 | [modify_date] datetime NULL, | ||
| 38 | [location] varchar(255) COLLATE SQL_Latin1_General_CP1_CI_AS NULL, | ||
| 39 | [download_finish] int NOT NULL, | ||
| 40 | [update_time] datetime NULL, | ||
| 41 | [create_time] datetime NULL | ||
| 42 | ) | ||
| 43 | GO; | ||
| 44 | |||
| 45 | alter table afc_gb_history_file ADD CONSTRAINT unique_object_id unique(object_id) | ||
| 46 | |||
| 47 | """ | ||
| 48 | |||
| 49 | hil_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True) | ||
| 50 | |||
| 51 | hil_cursor = hil_cnxn.cursor() | ||
| 52 | hil_cursor.execute(hil_sql) | ||
| 53 | |||
| 54 | hil_cursor.close() | ||
| 55 | hil_cnxn.close() | ||
| 56 | |||
| 57 | afc_cnxn = pyodbc.connect('DRIVER={ODBC Driver 17 for SQL Server};', autocommit=True) | ||
| 58 | |||
| 59 | afc_cursor = afc_cnxn.cursor() | ||
| 60 | afc_cursor.execute(afc_sql) | ||
| 61 | |||
| 62 | afc_cursor.close() | ||
| 63 | afc_cnxn.close() |
| ... | @@ -100,9 +100,9 @@ DATABASES = { | ... | @@ -100,9 +100,9 @@ DATABASES = { |
| 100 | for db_setting in DATABASES.values(): | 100 | for db_setting in DATABASES.values(): |
| 101 | db_setting['OPTIONS'] = { | 101 | db_setting['OPTIONS'] = { |
| 102 | 'driver': 'ODBC Driver 17 for SQL Server', | 102 | 'driver': 'ODBC Driver 17 for SQL Server', |
| 103 | 'extra_params': "odbc_cursortype=2" | 103 | 'extra_params': "odbc_cursortype=2;TrustServerCertificate=yes;Encrypt=yes" |
| 104 | } | 104 | } |
| 105 | 105 | db_setting['CONN_MAX_AGE'] = 0 | |
| 106 | # set this to False if you want to turn off pyodbc's connection pooling | 106 | # set this to False if you want to turn off pyodbc's connection pooling |
| 107 | DATABASE_CONNECTION_POOLING = True | 107 | DATABASE_CONNECTION_POOLING = True |
| 108 | 108 | ... | ... |
| ... | @@ -14,4 +14,6 @@ DEALER_CODE = ocr_situ_group | ... | @@ -14,4 +14,6 @@ DEALER_CODE = ocr_situ_group |
| 14 | 14 | ||
| 15 | BASE_URL = https://staging-bmw-ocr.situdata.com | 15 | BASE_URL = https://staging-bmw-ocr.situdata.com |
| 16 | 16 | ||
| 17 | DELAY_SECONDS = 60 | ||
| ... | \ No newline at end of file | ... | \ No newline at end of file |
| 17 | DELAY_SECONDS = 60 | ||
| 18 | |||
| 19 | BD_PRICE = 950 | ||
| ... | \ No newline at end of file | ... | \ No newline at end of file | ... | ... |
| ... | @@ -14,4 +14,6 @@ DEALER_CODE = ocr_situ_group | ... | @@ -14,4 +14,6 @@ DEALER_CODE = ocr_situ_group |
| 14 | 14 | ||
| 15 | BASE_URL = https://sfocr-uat.bmwgroup.net | 15 | BASE_URL = https://sfocr-uat.bmwgroup.net |
| 16 | 16 | ||
| 17 | DELAY_SECONDS = 60 | ||
| ... | \ No newline at end of file | ... | \ No newline at end of file |
| 17 | DELAY_SECONDS = 60 | ||
| 18 | |||
| 19 | BD_PRICE = 950 | ||
| ... | \ No newline at end of file | ... | \ No newline at end of file | ... | ... |
-
Please register or sign in to post a comment