Commit 1cd5d5eb authored by jyong's avatar jyong

add qa thread control

parent 6a564e2d
...@@ -272,6 +272,7 @@ class DatasetDocumentListApi(Resource): ...@@ -272,6 +272,7 @@ class DatasetDocumentListApi(Resource):
parser.add_argument('duplicate', type=bool, nullable=False, location='json') parser.add_argument('duplicate', type=bool, nullable=False, location='json')
parser.add_argument('original_document_id', type=str, required=False, location='json') parser.add_argument('original_document_id', type=str, required=False, location='json')
parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json') parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
parser.add_argument('doc_language', type=str, default='English', required=False, nullable=False, location='json')
args = parser.parse_args() args = parser.parse_args()
if not dataset.indexing_technique and not args['indexing_technique']: if not dataset.indexing_technique and not args['indexing_technique']:
...@@ -317,6 +318,7 @@ class DatasetInitApi(Resource): ...@@ -317,6 +318,7 @@ class DatasetInitApi(Resource):
parser.add_argument('data_source', type=dict, required=True, nullable=True, location='json') parser.add_argument('data_source', type=dict, required=True, nullable=True, location='json')
parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json') parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json') parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
parser.add_argument('doc_language', type=str, default='English', required=False, nullable=False, location='json')
args = parser.parse_args() args = parser.parse_args()
# validate args # validate args
......
...@@ -498,18 +498,8 @@ class DocumentService: ...@@ -498,18 +498,8 @@ class DocumentService:
document_data["doc_form"], document_data["doc_form"],
data_source_info, created_from, position, data_source_info, created_from, position,
account, page['page_name'], batch) account, page['page_name'], batch)
# if page['type'] == 'database':
# document.splitting_completed_at = datetime.datetime.utcnow()
# document.cleaning_completed_at = datetime.datetime.utcnow()
# document.parsing_completed_at = datetime.datetime.utcnow()
# document.completed_at = datetime.datetime.utcnow()
# document.indexing_status = 'completed'
# document.word_count = 0
# document.tokens = 0
# document.indexing_latency = 0
db.session.add(document) db.session.add(document)
db.session.flush() db.session.flush()
# if page['type'] != 'database':
document_ids.append(document.id) document_ids.append(document.id)
documents.append(document) documents.append(document)
position += 1 position += 1
...@@ -521,8 +511,7 @@ class DocumentService: ...@@ -521,8 +511,7 @@ class DocumentService:
db.session.commit() db.session.commit()
# trigger async task # trigger async task
#document_index_created.send(dataset.id, document_ids=document_ids) document_indexing_task.delay(dataset.id, document_ids, document_data['doc_language'])
document_indexing_task.delay(dataset.id, document_ids)
return documents, batch return documents, batch
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment