Commit 586c90c9 authored by StyleZhang's avatar StyleZhang

merge main

parents 511de822 54f3bbbf
......@@ -45,6 +45,11 @@ docker compose up -d
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization installation process.
### Helm Chart
A big thanks to @BorisPolonsky for providing us with a [Helm Chart](https://helm.sh/) version, which allows Dify to be deployed on Kubernetes.
You can go to https://github.com/BorisPolonsky/dify-helm for deployment information.
### Configuration
If you need to customize the configuration, please refer to the comments in our [docker-compose.yml](docker/docker-compose.yaml) file and manually set the environment configuration. After making the changes, please run 'docker-compose up -d' again.
......
......@@ -47,6 +47,11 @@ docker compose up -d
运行后,可以在浏览器上访问 [http://localhost/install](http://localhost/install) 进入 Dify 控制台并开始初始化安装操作。
### Helm Chart
非常感谢 @BorisPolonsky 为我们提供了一个 [Helm Chart](https://helm.sh/) 版本,可以在 Kubernetes 上部署 Dify。
您可以前往 https://github.com/BorisPolonsky/dify-helm 来获取部署信息。
### 配置
需要自定义配置,请参考我们的 [docker-compose.yml](docker/docker-compose.yaml) 文件中的注释,并手动设置环境配置,修改完毕后,请再次执行 `docker-compose up -d`
......
......@@ -46,6 +46,11 @@ docker compose up -d
Después de ejecutarlo, puedes acceder al panel de control de Dify en tu navegador desde [http://localhost/install](http://localhost/install) y comenzar el proceso de instalación de inicialización.
### Helm Chart
Un gran agradecimiento a @BorisPolonsky por proporcionarnos una versión de [Helm Chart](https://helm.sh/), que permite desplegar Dify en Kubernetes.
Puede ir a https://github.com/BorisPolonsky/dify-helm para obtener información de despliegue.
### Configuración
Si necesitas personalizar la configuración, consulta los comentarios en nuestro archivo [docker-compose.yml](docker/docker-compose.yaml) y configura manualmente la configuración del entorno. Después de realizar los cambios, ejecuta nuevamente 'docker-compose up -d'.
......
......@@ -46,6 +46,11 @@ docker compose up -d
実行後、ブラウザで [http://localhost/install](http://localhost/install) にアクセスし、初期化インストール作業を開始することができます。
### Helm Chart
@BorisPolonsky に大感謝します。彼は Dify を Kubernetes 上にデプロイするための [Helm Chart](https://helm.sh/) バージョンを提供してくれました。
デプロイ情報については、https://github.com/BorisPolonsky/dify-helm をご覧ください。
### 構成
カスタマイズが必要な場合は、[docker-compose.yml](docker/docker-compose.yaml) ファイルのコメントを参照し、手動で環境設定をお願いします。変更後、再度 'docker-compose up -d' を実行してください。
......
......@@ -85,3 +85,9 @@ SENTRY_DSN=
# DEBUG
DEBUG=false
SQLALCHEMY_ECHO=false
# Notion import configuration, support public and internal
NOTION_INTEGRATION_TYPE=public
NOTION_CLIENT_SECRET=you-client-secret
NOTION_CLIENT_ID=you-client-id
NOTION_INTERNAL_SECRET=you-internal-secret
\ No newline at end of file
......@@ -17,6 +17,11 @@
```bash
openssl rand -base64 42
```
3.5 If you use annaconda, create a new environment and activate it
```bash
conda create --name dify python=3.10
conda activate dify
```
4. Install dependencies
```bash
pip install -r requirements.txt
......
......@@ -79,7 +79,7 @@ class Config:
self.CONSOLE_URL = get_env('CONSOLE_URL')
self.API_URL = get_env('API_URL')
self.APP_URL = get_env('APP_URL')
self.CURRENT_VERSION = "0.3.3"
self.CURRENT_VERSION = "0.3.4"
self.COMMIT_SHA = get_env('COMMIT_SHA')
self.EDITION = "SELF_HOSTED"
self.DEPLOY_ENV = get_env('DEPLOY_ENV')
......@@ -190,6 +190,9 @@ class Config:
# notion import setting
self.NOTION_CLIENT_ID = get_env('NOTION_CLIENT_ID')
self.NOTION_CLIENT_SECRET = get_env('NOTION_CLIENT_SECRET')
self.NOTION_INTEGRATION_TYPE = get_env('NOTION_INTEGRATION_TYPE')
self.NOTION_INTERNAL_SECRET = get_env('NOTION_INTERNAL_SECRET')
class CloudEditionConfig(Config):
......
......@@ -39,9 +39,21 @@ class OAuthDataSource(Resource):
print(vars(oauth_provider))
if not oauth_provider:
return {'error': 'Invalid provider'}, 400
<<<<<<< HEAD
auth_url = oauth_provider.get_authorization_url()
return redirect(auth_url)
=======
if current_app.config.get('NOTION_INTEGRATION_TYPE') == 'internal':
internal_secret = current_app.config.get('NOTION_INTERNAL_SECRET')
oauth_provider.save_internal_access_token(internal_secret)
return redirect(f'{current_app.config.get("CONSOLE_URL")}?oauth_data_source=success')
else:
auth_url = oauth_provider.get_authorization_url()
return redirect(auth_url)
>>>>>>> main
class OAuthDataSourceCallback(Resource):
......
......@@ -219,7 +219,11 @@ class DataSourceNotionApi(Resource):
@setup_required
@login_required
@account_initialization_required
<<<<<<< HEAD
def get(self, workspace_id, page_id):
=======
def get(self, workspace_id, page_id, page_type):
>>>>>>> main
workspace_id = str(workspace_id)
page_id = str(page_id)
data_source_binding = DataSourceBinding.query.filter(
......@@ -233,7 +237,16 @@ class DataSourceNotionApi(Resource):
if not data_source_binding:
raise NotFound('Data source binding not found.')
reader = NotionPageReader(integration_token=data_source_binding.access_token)
<<<<<<< HEAD
page_content = reader.read_page(page_id)
=======
if page_type == 'page':
page_content = reader.read_page(page_id)
elif page_type == 'database':
page_content = reader.query_database_data(page_id)
else:
page_content = ""
>>>>>>> main
return {
'content': page_content
}, 200
......@@ -291,7 +304,12 @@ class DataSourceNotionDocumentSyncApi(Resource):
api.add_resource(DataSourceApi, '/data-source/integrates', '/data-source/integrates/<uuid:binding_id>/<string:action>')
api.add_resource(DataSourceNotionListApi, '/notion/pre-import/pages')
<<<<<<< HEAD
api.add_resource(DataSourceNotionApi, '/notion/workspaces/<uuid:workspace_id>/pages/<uuid:page_id>/preview',
=======
api.add_resource(DataSourceNotionApi,
'/notion/workspaces/<uuid:workspace_id>/pages/<uuid:page_id>/<string:page_type>/preview',
>>>>>>> main
'/datasets/notion-indexing-estimate')
api.add_resource(DataSourceNotionDatasetSyncApi, '/datasets/<uuid:dataset_id>/notion/sync')
api.add_resource(DataSourceNotionDocumentSyncApi, '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/notion/sync')
......@@ -143,7 +143,14 @@ class FilePreviewApi(Resource):
with open(filepath, "rb") as fp:
data = fp.read()
encoding = chardet.detect(data)['encoding']
<<<<<<< HEAD
text = data.decode(encoding=encoding).strip() if data else ''
=======
if encoding:
text = data.decode(encoding=encoding).strip() if data else ''
else:
text = data.decode(encoding='utf-8').strip() if data else ''
>>>>>>> main
text = text[0:PREVIEW_WORDS_LIMIT] if text else ''
return {'content': text}
......
"""Notion reader."""
<<<<<<< HEAD
=======
import json
>>>>>>> main
import logging
import os
from datetime import datetime
......@@ -14,6 +18,10 @@ BLOCK_CHILD_URL_TMPL = "https://api.notion.com/v1/blocks/{block_id}/children"
DATABASE_URL_TMPL = "https://api.notion.com/v1/databases/{database_id}/query"
SEARCH_URL = "https://api.notion.com/v1/search"
RETRIEVE_PAGE_URL_TMPL = "https://api.notion.com/v1/pages/{page_id}"
<<<<<<< HEAD
=======
RETRIEVE_DATABASE_URL_TMPL = "https://api.notion.com/v1/databases/{database_id}"
>>>>>>> main
HEADING_TYPE = ['heading_1', 'heading_2', 'heading_3']
logger = logging.getLogger(__name__)
......@@ -58,7 +66,11 @@ class NotionPageReader(BaseReader):
"GET", block_url, headers=self.headers, json=query_dict
)
data = res.json()
<<<<<<< HEAD
if data["results"] is None:
=======
if 'results' not in data or data["results"] is None:
>>>>>>> main
done = True
break
heading = ''
......@@ -82,7 +94,12 @@ class NotionPageReader(BaseReader):
heading = text
result_block_id = result["id"]
has_children = result["has_children"]
<<<<<<< HEAD
if has_children:
=======
block_type = result["type"]
if has_children and block_type != 'child_page':
>>>>>>> main
children_text = self._read_block(
result_block_id, num_tabs=num_tabs + 1
)
......@@ -182,7 +199,12 @@ class NotionPageReader(BaseReader):
result_block_id = result["id"]
has_children = result["has_children"]
<<<<<<< HEAD
if has_children:
=======
block_type = result["type"]
if has_children and block_type != 'child_page':
>>>>>>> main
children_text = self._read_block(
result_block_id, num_tabs=num_tabs + 1
)
......@@ -210,6 +232,50 @@ class NotionPageReader(BaseReader):
"""Read a page as documents."""
return self._read_parent_blocks(page_id)
<<<<<<< HEAD
=======
def query_database_data(
self, database_id: str, query_dict: Dict[str, Any] = {}
) -> str:
"""Get all the pages from a Notion database."""
res = requests.post\
(
DATABASE_URL_TMPL.format(database_id=database_id),
headers=self.headers,
json=query_dict,
)
data = res.json()
database_content_list = []
if 'results' not in data or data["results"] is None:
return ""
for result in data["results"]:
properties = result['properties']
data = {}
for property_name, property_value in properties.items():
type = property_value['type']
if type == 'multi_select':
value = []
multi_select_list = property_value[type]
for multi_select in multi_select_list:
value.append(multi_select['name'])
elif type == 'rich_text' or type == 'title':
if len(property_value[type]) > 0:
value = property_value[type][0]['plain_text']
else:
value = ''
elif type == 'select' or type == 'status':
if property_value[type]:
value = property_value[type]['name']
else:
value = ''
else:
value = property_value[type]
data[property_name] = value
database_content_list.append(json.dumps(data))
return "\n\n".join(database_content_list)
>>>>>>> main
def query_database(
self, database_id: str, query_dict: Dict[str, Any] = {}
) -> List[str]:
......@@ -288,10 +354,15 @@ class NotionPageReader(BaseReader):
docs = []
if database_id is not None:
# get all the pages in the database
<<<<<<< HEAD
page_ids = self.query_database(database_id)
for page_id in page_ids:
page_text = self.read_page(page_id)
docs.append(Document(page_text))
=======
page_text = self.query_database_data(database_id)
docs.append(Document(page_text))
>>>>>>> main
else:
for page_id in page_ids:
page_text_list = self.read_page_as_documents(page_id)
......@@ -308,7 +379,20 @@ class NotionPageReader(BaseReader):
"GET", retrieve_page_url, headers=self.headers, json=query_dict
)
data = res.json()
<<<<<<< HEAD
# last_edited_time = datetime.fromisoformat(data["last_edited_time"])
=======
return data["last_edited_time"]
def get_database_last_edited_time(self, database_id: str) -> str:
retrieve_page_url = RETRIEVE_DATABASE_URL_TMPL.format(database_id=database_id)
query_dict: Dict[str, Any] = {}
res = requests.request(
"GET", retrieve_page_url, headers=self.headers, json=query_dict
)
data = res.json()
>>>>>>> main
return data["last_edited_time"]
......
......@@ -235,6 +235,7 @@ class IndexingRunner:
if page['type'] == 'page':
page_ids = [page['page_id']]
documents = reader.load_data_as_documents(page_ids=page_ids)
<<<<<<< HEAD
processing_rule = DatasetProcessRule(
mode=tmp_processing_rule["mode"],
......@@ -256,6 +257,32 @@ class IndexingRunner:
preview_texts.append(node.get_text())
tokens += TokenCalculator.get_num_tokens(self.embedding_model_name, node.get_text())
=======
elif page['type'] == 'database':
documents = reader.load_data_as_documents(database_id=page['page_id'])
else:
documents = []
processing_rule = DatasetProcessRule(
mode=tmp_processing_rule["mode"],
rules=json.dumps(tmp_processing_rule["rules"])
)
# get node parser for splitting
node_parser = self._get_node_parser(processing_rule)
# split to nodes
nodes = self._split_to_nodes(
text_docs=documents,
node_parser=node_parser,
processing_rule=processing_rule
)
total_segments += len(nodes)
for node in nodes:
if len(preview_texts) < 5:
preview_texts.append(node.get_text())
tokens += TokenCalculator.get_num_tokens(self.embedding_model_name, node.get_text())
>>>>>>> main
return {
"total_segments": total_segments,
......@@ -287,6 +314,10 @@ class IndexingRunner:
raise ValueError("no notion page found")
workspace_id = data_source_info['notion_workspace_id']
page_id = data_source_info['notion_page_id']
<<<<<<< HEAD
=======
page_type = data_source_info['type']
>>>>>>> main
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == document.tenant_id,
......@@ -297,9 +328,20 @@ class IndexingRunner:
).first()
if not data_source_binding:
raise ValueError('Data source binding not found.')
<<<<<<< HEAD
# add page last_edited_time to data_source_info
self._get_notion_page_last_edited_time(page_id, data_source_binding.access_token, document)
text_docs = self._load_data_from_notion(page_id, data_source_binding.access_token)
=======
if page_type == 'page':
# add page last_edited_time to data_source_info
self._get_notion_page_last_edited_time(page_id, data_source_binding.access_token, document)
text_docs = self._load_page_data_from_notion(page_id, data_source_binding.access_token)
elif page_type == 'database':
# add page last_edited_time to data_source_info
self._get_notion_database_last_edited_time(page_id, data_source_binding.access_token, document)
text_docs = self._load_database_data_from_notion(page_id, data_source_binding.access_token)
>>>>>>> main
# update document status to splitting
self._update_document_index_status(
document_id=document.id,
......@@ -341,12 +383,24 @@ class IndexingRunner:
return text_docs
<<<<<<< HEAD
def _load_data_from_notion(self, page_id: str, access_token: str) -> List[Document]:
=======
def _load_page_data_from_notion(self, page_id: str, access_token: str) -> List[Document]:
>>>>>>> main
page_ids = [page_id]
reader = NotionPageReader(integration_token=access_token)
text_docs = reader.load_data_as_documents(page_ids=page_ids)
return text_docs
<<<<<<< HEAD
=======
def _load_database_data_from_notion(self, database_id: str, access_token: str) -> List[Document]:
reader = NotionPageReader(integration_token=access_token)
text_docs = reader.load_data_as_documents(database_id=database_id)
return text_docs
>>>>>>> main
def _get_notion_page_last_edited_time(self, page_id: str, access_token: str, document: Document):
reader = NotionPageReader(integration_token=access_token)
last_edited_time = reader.get_page_last_edited_time(page_id)
......@@ -359,6 +413,21 @@ class IndexingRunner:
Document.query.filter_by(id=document.id).update(update_params)
db.session.commit()
<<<<<<< HEAD
=======
def _get_notion_database_last_edited_time(self, page_id: str, access_token: str, document: Document):
reader = NotionPageReader(integration_token=access_token)
last_edited_time = reader.get_database_last_edited_time(page_id)
data_source_info = document.data_source_info_dict
data_source_info['last_edited_time'] = last_edited_time
update_params = {
Document.data_source_info: json.dumps(data_source_info)
}
Document.query.filter_by(id=document.id).update(update_params)
db.session.commit()
>>>>>>> main
def _get_node_parser(self, processing_rule: DatasetProcessRule) -> NodeParser:
"""
Get the NodeParser object according to the processing rule.
......
......@@ -26,6 +26,10 @@ class NotionOAuth(OAuthDataSource):
_TOKEN_URL = 'https://api.notion.com/v1/oauth/token'
_NOTION_PAGE_SEARCH = "https://api.notion.com/v1/search"
_NOTION_BLOCK_SEARCH = "https://api.notion.com/v1/blocks"
<<<<<<< HEAD
=======
_NOTION_BOT_USER = "https://api.notion.com/v1/users/me"
>>>>>>> main
def get_authorization_url(self):
params = {
......@@ -84,6 +88,44 @@ class NotionOAuth(OAuthDataSource):
db.session.add(new_data_source_binding)
db.session.commit()
<<<<<<< HEAD
=======
def save_internal_access_token(self, access_token: str):
workspace_name = self.notion_workspace_name(access_token)
workspace_icon = None
workspace_id = current_user.current_tenant_id
# get all authorized pages
pages = self.get_authorized_pages(access_token)
source_info = {
'workspace_name': workspace_name,
'workspace_icon': workspace_icon,
'workspace_id': workspace_id,
'pages': pages,
'total': len(pages)
}
# save data source binding
data_source_binding = DataSourceBinding.query.filter(
db.and_(
DataSourceBinding.tenant_id == current_user.current_tenant_id,
DataSourceBinding.provider == 'notion',
DataSourceBinding.access_token == access_token
)
).first()
if data_source_binding:
data_source_binding.source_info = source_info
data_source_binding.disabled = False
db.session.commit()
else:
new_data_source_binding = DataSourceBinding(
tenant_id=current_user.current_tenant_id,
access_token=access_token,
source_info=source_info,
provider='notion'
)
db.session.add(new_data_source_binding)
db.session.commit()
>>>>>>> main
def sync_data_source(self, binding_id: str):
# save data source binding
data_source_binding = DataSourceBinding.query.filter(
......@@ -128,6 +170,14 @@ class NotionOAuth(OAuthDataSource):
page_name = page_result['properties']['title']['title'][0]['plain_text']
else:
page_name = 'Untitled'
<<<<<<< HEAD
=======
elif 'Title' in page_result['properties']:
if len(page_result['properties']['Title']['title']) > 0:
page_name = page_result['properties']['Title']['title'][0]['plain_text']
else:
page_name = 'Untitled'
>>>>>>> main
else:
page_name = 'Untitled'
page_icon = page_result['icon']
......@@ -217,7 +267,14 @@ class NotionOAuth(OAuthDataSource):
}
response = requests.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers)
response_json = response.json()
<<<<<<< HEAD
results = response_json['results']
=======
if 'results' in response_json:
results = response_json['results']
else:
results = []
>>>>>>> main
return results
def notion_block_parent_page_id(self, access_token: str, block_id: str):
......@@ -233,6 +290,23 @@ class NotionOAuth(OAuthDataSource):
return self.notion_block_parent_page_id(access_token, parent[parent_type])
return parent[parent_type]
<<<<<<< HEAD
=======
def notion_workspace_name(self, access_token: str):
headers = {
'Authorization': f"Bearer {access_token}",
'Notion-Version': '2022-06-28',
}
response = requests.get(url=self._NOTION_BOT_USER, headers=headers)
response_json = response.json()
if 'object' in response_json and response_json['object'] == 'user':
user_type = response_json['type']
user_info = response_json[user_type]
if 'workspace_name' in user_info:
return user_info['workspace_name']
return 'workspace'
>>>>>>> main
def notion_database_search(self, access_token: str):
data = {
'filter': {
......@@ -247,5 +321,12 @@ class NotionOAuth(OAuthDataSource):
}
response = requests.post(url=self._NOTION_PAGE_SEARCH, json=data, headers=headers)
response_json = response.json()
<<<<<<< HEAD
results = response_json['results']
=======
if 'results' in response_json:
results = response_json['results']
else:
results = []
>>>>>>> main
return results
......@@ -31,15 +31,4 @@ celery==5.2.7
redis~=4.5.4
pypdf==3.8.1
openpyxl==3.1.2
requests~=2.28.2
pydantic~=1.10.8
SQLAlchemy~=1.4.48
Werkzeug~=2.3.4
click~=8.1.3
blinker~=1.6.2
numpy~=1.24.3
itsdangerous~=2.1.2
botocore~=1.29.146
alembic~=1.11.1
pytz~=2022.7.1
chardet~=5.1.0
\ No newline at end of file
......@@ -78,7 +78,7 @@ class DatasetService:
raise DatasetNameDuplicateError(
f'Dataset with name {name} already exists.')
dataset = Dataset(name=name, indexing_technique=indexing_technique, data_source_type='upload_file')
dataset = Dataset(name=name, indexing_technique=indexing_technique)
# dataset = Dataset(name=name, provider=provider, config=config)
dataset.created_by = account.id
dataset.updated_by = account.id
......@@ -374,6 +374,11 @@ class DocumentService:
def save_document_with_dataset_id(dataset: Dataset, document_data: dict,
account: Account, dataset_process_rule: Optional[DatasetProcessRule] = None,
created_from: str = 'web'):
# if dataset is empty, update dataset data_source_type
if not dataset.data_source_type:
dataset.data_source_type = document_data["data_source"]["type"]
db.session.commit()
if not dataset.indexing_technique:
if 'indexing_technique' not in document_data \
or document_data['indexing_technique'] not in Dataset.INDEXING_TECHNIQUE_LIST:
......@@ -474,6 +479,7 @@ class DocumentService:
document_data["data_source"]["type"],
data_source_info, created_from, position,
account, page['page_name'], batch)
<<<<<<< HEAD
if page['type'] == 'database':
document.splitting_completed_at = datetime.datetime.utcnow()
document.cleaning_completed_at = datetime.datetime.utcnow()
......@@ -487,6 +493,21 @@ class DocumentService:
db.session.flush()
if page['type'] != 'database':
document_ids.append(document.id)
=======
# if page['type'] == 'database':
# document.splitting_completed_at = datetime.datetime.utcnow()
# document.cleaning_completed_at = datetime.datetime.utcnow()
# document.parsing_completed_at = datetime.datetime.utcnow()
# document.completed_at = datetime.datetime.utcnow()
# document.indexing_status = 'completed'
# document.word_count = 0
# document.tokens = 0
# document.indexing_latency = 0
db.session.add(document)
db.session.flush()
# if page['type'] != 'database':
document_ids.append(document.id)
>>>>>>> main
documents.append(document)
position += 1
else:
......
......@@ -2,7 +2,7 @@ version: '3.1'
services:
# API service
api:
image: langgenius/dify-api:0.3.3
image: langgenius/dify-api:0.3.4
restart: always
environment:
# Startup mode, 'api' starts the API server.
......@@ -110,7 +110,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:0.3.3
image: langgenius/dify-api:0.3.4
restart: always
environment:
# Startup mode, 'worker' starts the Celery worker for processing the queue.
......@@ -156,7 +156,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:0.3.3
image: langgenius/dify-web:0.3.4
restart: always
environment:
EDITION: SELF_HOSTED
......
......@@ -93,7 +93,7 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
const pathname = usePathname()
const hideSideBar = /documents\/create$/.test(pathname)
const { t } = useTranslation()
const { data: datasetRes, error } = useSWR({
const { data: datasetRes, error, mutate: mutateDatasetRes } = useSWR({
action: 'fetchDataDetail',
datasetId,
}, apiParams => fetchDataDetail(apiParams.datasetId))
......@@ -168,6 +168,7 @@ const DatasetDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
<DatasetDetailContext.Provider value={{
indexingTechnique: datasetRes?.indexing_technique,
dataset: datasetRes,
mutateDatasetRes: () => mutateDatasetRes(),
}}>
<div className="bg-white grow">{children}</div>
</DatasetDetailContext.Provider>
......
'use client'
import React, { FC, useState } from 'react'
import type { FC } from 'react'
import React from 'react'
import { useTranslation } from 'react-i18next'
import { PlusIcon } from '@heroicons/react/24/outline'
import { ReactSortable } from 'react-sortablejs'
import RemoveIcon from '../../base/icons/remove-icon'
import s from './style.module.css'
export type Options = string[]
export interface IConfigSelectProps {
export type IConfigSelectProps = {
options: Options
onChange: (options: Options) => void
}
const ConfigSelect: FC<IConfigSelectProps> = ({
options,
onChange
onChange,
}) => {
const { t } = useTranslation()
const optionList = options.map((content, index) => {
return ({
id: index,
name: content,
})
})
return (
<div>
{options.length > 0 && (
<div className='mb-1 space-y-1 '>
{options.map((o, index) => (
<div className={`${s.inputWrap} relative`}>
<input
key={index}
type="input"
value={o || ''}
onChange={e => {
let value = e.target.value
onChange(options.map((item, i) => {
if (index === i) {
return value
}
return item
}))
}}
className={`${s.input} w-full px-3 text-sm leading-9 text-gray-900 border-0 grow h-9 bg-transparent focus:outline-none cursor-pointer`}
/>
<RemoveIcon
className={`${s.deleteBtn} absolute top-1/2 translate-y-[-50%] right-1.5 items-center justify-center w-6 h-6 rounded-md cursor-pointer hover:bg-[#FEE4E2]`}
onClick={() => {
onChange(options.filter((_, i) => index !== i))
}}
/>
</div>
))}
<div className='mb-1'>
<ReactSortable
className="space-y-1"
list={optionList}
setList={list => onChange(list.map(item => item.name))}
handle='.handle'
ghostClass="opacity-50"
animation={150}
>
{options.map((o, index) => (
<div className={`${s.inputWrap} relative`} key={index}>
<div className='handle flex items-center justify-center w-4 h-4 cursor-grab'>
<svg width="6" height="10" viewBox="0 0 6 10" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" clipRule="evenodd" d="M1 2C1.55228 2 2 1.55228 2 1C2 0.447715 1.55228 0 1 0C0.447715 0 0 0.447715 0 1C0 1.55228 0.447715 2 1 2ZM1 6C1.55228 6 2 5.55228 2 5C2 4.44772 1.55228 4 1 4C0.447715 4 0 4.44772 0 5C0 5.55228 0.447715 6 1 6ZM6 1C6 1.55228 5.55228 2 5 2C4.44772 2 4 1.55228 4 1C4 0.447715 4.44772 0 5 0C5.55228 0 6 0.447715 6 1ZM5 6C5.55228 6 6 5.55228 6 5C6 4.44772 5.55228 4 5 4C4.44772 4 4 4.44772 4 5C4 5.55228 4.44772 6 5 6ZM2 9C2 9.55229 1.55228 10 1 10C0.447715 10 0 9.55229 0 9C0 8.44771 0.447715 8 1 8C1.55228 8 2 8.44771 2 9ZM5 10C5.55228 10 6 9.55229 6 9C6 8.44771 5.55228 8 5 8C4.44772 8 4 8.44771 4 9C4 9.55229 4.44772 10 5 10Z" fill="#98A2B3"/>
</svg>
</div>
<input
key={index}
type="input"
value={o || ''}
onChange={(e) => {
const value = e.target.value
onChange(options.map((item, i) => {
if (index === i)
return value
return item
}))
}}
className={`${s.input} w-full px-1.5 text-sm leading-9 text-gray-900 border-0 grow h-9 bg-transparent focus:outline-none cursor-pointer`}
/>
<RemoveIcon
className={`${s.deleteBtn} absolute top-1/2 translate-y-[-50%] right-1.5 items-center justify-center w-6 h-6 rounded-md cursor-pointer hover:bg-[#FEE4E2]`}
onClick={() => {
onChange(options.filter((_, i) => index !== i))
}}
/>
</div>
))}
</ReactSortable>
</div>
)}
......
.inputWrap {
display: flex;
align-items: center;
border-radius: 8px;
border: 1px solid #EAECF0;
padding-left: 10px;
cursor: pointer;
}
......
......@@ -9,6 +9,7 @@ import { fetchFilePreview } from '@/service/common'
type IProps = {
file?: File
notionPage?: any
hidePreview: () => void
}
......@@ -38,10 +39,8 @@ const FilePreview = ({
}
useEffect(() => {
if (file) {
setLoading(true)
if (file)
getPreviewContent(file.id)
}
}, [file])
return (
......
......@@ -14,6 +14,7 @@ type IFileUploaderProps = {
prepareFileList: (files: any[]) => void
onFileUpdate: (fileItem: any, progress: number, list: any[]) => void
onPreview: (file: FileEntity) => void
titleClassName?: string
}
const ACCEPTS = [
......@@ -36,6 +37,7 @@ const FileUploader = ({
prepareFileList,
onFileUpdate,
onPreview,
titleClassName,
}: IFileUploaderProps) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
......@@ -232,7 +234,7 @@ const FileUploader = ({
accept={ACCEPTS.join(',')}
onChange={fileChangeHandle}
/>
<div className={s.title}>{t('datasetCreation.stepOne.uploader.title')}</div>
<div className={cn(s.title, titleClassName)}>{t('datasetCreation.stepOne.uploader.title')}</div>
<div ref={dropRef} className={cn(s.uploader, dragging && s.dragging)}>
<div className='flex justify-center items-center h-6 mb-2'>
<span className={s.uploadIcon}/>
......
......@@ -29,6 +29,7 @@ const NotionPagePreview = ({
const res = await fetchNotionPagePreview({
workspaceID: currentPage.workspace_id,
pageID: currentPage.page_id,
pageType: currentPage.type,
})
setPreviewContent(res.content)
setLoading(false)
......
......@@ -12,6 +12,7 @@ import type { DataSourceNotionPage } from '@/models/common'
import { DataSourceType } from '@/models/datasets'
import Button from '@/app/components/base/button'
import { NotionPageSelector } from '@/app/components/base/notion-page-selector'
import { useDatasetDetailContext } from '@/context/dataset-detail'
type IStepOneProps = {
datasetId?: string
......@@ -60,11 +61,15 @@ const StepOne = ({
notionPages = [],
updateNotionPages,
}: IStepOneProps) => {
const [showModal, setShowModal] = useState(false)
const [currentFile, setCurrentFile] = useState<File | undefined>()
const { dataset } = useDatasetDetailContext()
const [showModal, setShowModal] = useState(false)
const [showFilePreview, setShowFilePreview] = useState(true)
const [currentNotionPage, setCurrentNotionPage] = useState<Page | undefined>()
const { t } = useTranslation()
const hidePreview = () => setShowFilePreview(false)
const modalShowHandle = () => setShowModal(true)
const modalCloseHandle = () => setShowModal(false)
......@@ -92,56 +97,65 @@ const StepOne = ({
return true
return false
}, [files])
const shouldShowDataSourceTypeList = !datasetId || (datasetId && !dataset?.data_source_type)
return (
<div className='flex w-full h-full'>
<div className='grow overflow-y-auto relative'>
<div className={s.stepHeader}>{t('datasetCreation.steps.one')}</div>
{
shouldShowDataSourceTypeList && (
<div className={s.stepHeader}>{t('datasetCreation.steps.one')}</div>
)
}
<div className={s.form}>
<div className={s.dataSourceTypeList}>
<div
className={cn(
s.dataSourceItem,
dataSourceType === DataSourceType.FILE && s.active,
dataSourceTypeDisable && dataSourceType !== DataSourceType.FILE && s.disabled,
)}
onClick={() => {
if (dataSourceTypeDisable)
return
changeType(DataSourceType.FILE)
hideFilePreview()
hideNotionPagePreview()
}}
>
<span className={cn(s.datasetIcon)} />
{t('datasetCreation.stepOne.dataSourceType.file')}
</div>
<div
className={cn(
s.dataSourceItem,
dataSourceType === DataSourceType.NOTION && s.active,
dataSourceTypeDisable && dataSourceType !== DataSourceType.NOTION && s.disabled,
)}
onClick={() => {
if (dataSourceTypeDisable)
return
changeType(DataSourceType.NOTION)
hideFilePreview()
hideNotionPagePreview()
}}
>
<span className={cn(s.datasetIcon, s.notion)} />
{t('datasetCreation.stepOne.dataSourceType.notion')}
</div>
<div
className={cn(s.dataSourceItem, s.disabled, dataSourceType === DataSourceType.WEB && s.active)}
// onClick={() => changeType(DataSourceType.WEB)}
>
<span className={s.comingTag}>Coming soon</span>
<span className={cn(s.datasetIcon, s.web)} />
{t('datasetCreation.stepOne.dataSourceType.web')}
</div>
</div>
{
shouldShowDataSourceTypeList && (
<div className={s.dataSourceTypeList}>
<div
className={cn(
s.dataSourceItem,
dataSourceType === DataSourceType.FILE && s.active,
dataSourceTypeDisable && dataSourceType !== DataSourceType.FILE && s.disabled,
)}
onClick={() => {
if (dataSourceTypeDisable)
return
changeType(DataSourceType.FILE)
hideFilePreview()
hideNotionPagePreview()
}}
>
<span className={cn(s.datasetIcon)} />
{t('datasetCreation.stepOne.dataSourceType.file')}
</div>
<div
className={cn(
s.dataSourceItem,
dataSourceType === DataSourceType.NOTION && s.active,
dataSourceTypeDisable && dataSourceType !== DataSourceType.NOTION && s.disabled,
)}
onClick={() => {
if (dataSourceTypeDisable)
return
changeType(DataSourceType.NOTION)
hideFilePreview()
hideNotionPagePreview()
}}
>
<span className={cn(s.datasetIcon, s.notion)} />
{t('datasetCreation.stepOne.dataSourceType.notion')}
</div>
<div
className={cn(s.dataSourceItem, s.disabled, dataSourceType === DataSourceType.WEB && s.active)}
// onClick={() => changeType(DataSourceType.WEB)}
>
<span className={s.comingTag}>Coming soon</span>
<span className={cn(s.datasetIcon, s.web)} />
{t('datasetCreation.stepOne.dataSourceType.web')}
</div>
</div>
)
}
{dataSourceType === DataSourceType.FILE && (
<>
<FileUploader
......@@ -149,6 +163,7 @@ const StepOne = ({
prepareFileList={updateFileList}
onFileUpdate={updateFile}
onPreview={updateCurrentFile}
titleClassName={(!shouldShowDataSourceTypeList) ? 'mt-[30px] !mb-[44px] !text-lg !font-semibold !text-gray-900' : undefined}
/>
<Button disabled={nextDisabled} className={s.submitButton} type='primary' onClick={onStepChange}>{t('datasetCreation.stepOne.button')}</Button>
</>
......
......@@ -24,6 +24,7 @@ import { formatNumber } from '@/utils/format'
import type { DataSourceNotionPage } from '@/models/common'
import { DataSourceType } from '@/models/datasets'
import NotionIcon from '@/app/components/base/notion-icon'
import { useDatasetDetailContext } from '@/context/dataset-detail'
type Page = DataSourceNotionPage & { workspace_id: string }
......@@ -70,6 +71,7 @@ const StepTwo = ({
onCancel,
}: StepTwoProps) => {
const { t } = useTranslation()
const { mutateDatasetRes } = useDatasetDetailContext()
const scrollRef = useRef<HTMLDivElement>(null)
const [scrolled, setScrolled] = useState(false)
const previewScrollRef = useRef<HTMLDivElement>(null)
......@@ -310,6 +312,8 @@ const StepTwo = ({
updateIndexingTypeCache && updateIndexingTypeCache(indexType)
updateResultCache && updateResultCache(res)
}
if (mutateDatasetRes)
mutateDatasetRes()
onStepChange && onStepChange(+1)
isSetting && onSave && onSave()
}
......
......@@ -220,7 +220,7 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
? <Loading type='app' />
: total > 0
? <List documents={documentsList || []} datasetId={datasetId} onUpdate={mutate} />
: <EmptyElement onClick={routeToDocCreate} />
: <EmptyElement onClick={routeToDocCreate} type={isDataSourceNotion ? 'sync' : 'upload'} />
}
{/* Show Pagination only if the total is more than the limit */}
{(total && total > limit)
......
......@@ -54,7 +54,7 @@
font-weight: 400;
line-height: 1.5;
word-wrap: break-word;
word-break: break-all;
word-break: break-word;
user-select: text;
}
......@@ -1042,4 +1042,4 @@
.markdown-body ::-webkit-calendar-picker-indicator {
filter: invert(50%);
}
\ No newline at end of file
}
import { createContext, useContext } from 'use-context-selector'
import type { DataSet } from '@/models/datasets'
const DatasetDetailContext = createContext<{ indexingTechnique?: string; dataset?: DataSet }>({})
const DatasetDetailContext = createContext<{ indexingTechnique?: string; dataset?: DataSet; mutateDatasetRes?: () => void }>({})
export const useDatasetDetailContext = () => useContext(DatasetDetailContext)
......
......@@ -111,7 +111,7 @@ const translation = {
normal: 'Normal',
normalTip: 'Only can use apps,can not build apps',
inviteTeamMember: 'Add team member',
inviteTeamMemberTip: 'He can access your team data directly after signing in.',
inviteTeamMemberTip: 'They can access your team data directly after signing in.',
email: 'Email',
emailInvalid: 'Invalid Email Format',
emailPlaceholder: 'Input Email',
......
{
"name": "dify-web",
"version": "0.3.3",
"version": "0.3.4",
"private": true,
"scripts": {
"dev": "next dev",
......@@ -62,6 +62,7 @@
"react-infinite-scroll-component": "^6.1.0",
"react-markdown": "^8.0.6",
"react-slider": "^2.0.4",
"react-sortablejs": "^6.1.4",
"react-syntax-highlighter": "^15.5.0",
"react-tooltip": "5.8.3",
"react-window": "^1.8.9",
......@@ -73,6 +74,7 @@
"sass": "^1.61.0",
"scheduler": "^0.23.0",
"server-only": "^0.0.1",
"sortablejs": "^1.15.0",
"swr": "^2.1.0",
"tailwindcss": "^3.2.7",
"typescript": "4.9.5",
......@@ -84,6 +86,7 @@
"@types/js-cookie": "^3.0.3",
"@types/negotiator": "^0.6.1",
"@types/qs": "^6.9.7",
"@types/sortablejs": "^1.15.1",
"eslint-plugin-react-hooks": "^4.6.0",
"lint-staged": "^13.2.2",
"miragejs": "^0.1.47",
......
......@@ -150,6 +150,6 @@ export const fetchFileIndexingEstimate: Fetcher<FileIndexingEstimateResponse, an
return post('/datasets/indexing-estimate', { body }) as Promise<FileIndexingEstimateResponse>
}
export const fetchNotionPagePreview: Fetcher<{ content: string }, { workspaceID: string; pageID: string }> = ({ workspaceID, pageID }) => {
return get(`notion/workspaces/${workspaceID}/pages/${pageID}/preview`) as Promise<{ content: string }>
export const fetchNotionPagePreview: Fetcher<{ content: string }, { workspaceID: string; pageID: string; pageType: string }> = ({ workspaceID, pageID, pageType }) => {
return get(`notion/workspaces/${workspaceID}/pages/${pageID}/${pageType}/preview`) as Promise<{ content: string }>
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment