refine code to prevent exception (#1231)

### What problem does this PR solve?


### Type of change

- [x] Refactoring
This commit is contained in:
KevinHuSh 2024-06-21 14:06:46 +08:00 committed by GitHub
parent 18f4a6b35c
commit 15bf9f8c25
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -153,7 +153,7 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
txt += l
sections = []
for sec in txt.split("\n"):
if num_tokens_from_string(sec) > 10 * parser_config.get("chunk_token_num", 128):
if num_tokens_from_string(sec) > 10 * int(parser_config.get("chunk_token_num", 128)):
sections.append((sec[:int(len(sec)/2)], ""))
sections.append((sec[int(len(sec)/2):], ""))
else:
@ -169,7 +169,7 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
elif re.search(r"\.json$", filename, re.IGNORECASE):
callback(0.1, "Start to parse.")
sections = JsonParser(parser_config.get("chunk_token_num", 128))(binary)
sections = JsonParser(int(parser_config.get("chunk_token_num", 128)))(binary)
sections = [(l, "") for l in sections if l]
callback(0.8, "Finish parsing.")
@ -187,8 +187,8 @@ def chunk(filename, binary=None, from_page=0, to_page=100000,
st = timer()
chunks = naive_merge(
sections, parser_config.get(
"chunk_token_num", 128), parser_config.get(
sections, int(parser_config.get(
"chunk_token_num", 128)), parser_config.get(
"delimiter", "\n!?。;!?"))
res.extend(tokenize_chunks(chunks, doc, eng, pdf_parser))