HuangHai 5 months ago
commit 51755ac79b

@ -0,0 +1,32 @@
cd /usr/local
conda init
source /root/.bashrc
git clone https://gitee.com/kgdxpr/DB-GPT.git
cd DB-GPT/
conda create -n dbgpt_env python=3.10
conda activate dbgpt_env
pip install torch
pip install -e ".[default]"
cp .env.template .env
pip install dashscope
vi .env
LLM_MODEL=tongyi_proxyllm
TONGYI_PROXY_API_KEY=sk-01d13a39e09844038322108ecdbd1bbc
PROXY_SERVER_URL=https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions
apt-get update
apt-get install sqlite3
apt-get install git-lfs
--下载向量模型
mkdir models
cd models
git clone https://www.modelscope.cn/Jerry0/text2vec-large-chinese.git
--启动
cd /usr/local/DB-GPT
python dbgpt/app/dbgpt_server.py

@ -6,6 +6,9 @@ cd /usr/bin
rm -rf python3
rm -rf python
ln -s python3.12 python3
ln -s python3.12 python
cd /usr/local
wget https://bootstrap.pypa.io/get-pip.py
python3 get-pip.py -i https://mirrors.aliyun.com/pypi/simple/

@ -0,0 +1,956 @@
# coding=utf-8
"""
@project: maxkb
@Author
@file dataset_serializers.py
@date2023/9/21 16:14
@desc:
"""
import io
import logging
import os.path
import re
import traceback
import uuid
import zipfile
from functools import reduce
from tempfile import TemporaryDirectory
from typing import Dict, List
from urllib.parse import urlparse
from celery_once import AlreadyQueued
from django.contrib.postgres.fields import ArrayField
from django.core import validators
from django.db import transaction, models
from django.db.models import QuerySet
from django.http import HttpResponse
from drf_yasg import openapi
from rest_framework import serializers
from application.models import ApplicationDatasetMapping
from common.config.embedding_config import VectorStore
from common.db.search import get_dynamics_model, native_page_search, native_search
from common.db.sql_execute import select_list
from common.event import ListenerManagement
from common.exception.app_exception import AppApiException
from common.mixins.api_mixin import ApiMixin
from common.util.common import post, flat_map, valid_license, parse_image
from common.util.field_message import ErrMessage
from common.util.file_util import get_file_content
from common.util.fork import ChildLink, Fork
from common.util.split_model import get_split_model
from dataset.models.data_set import DataSet, Document, Paragraph, Problem, Type, ProblemParagraphMapping, TaskType, \
State, File, Image
from dataset.serializers.common_serializers import list_paragraph, MetaSerializer, ProblemParagraphManage, \
get_embedding_model_by_dataset_id, get_embedding_model_id_by_dataset_id, write_image, zip_dir
from dataset.serializers.document_serializers import DocumentSerializers, DocumentInstanceSerializer
from dataset.task import sync_web_dataset, sync_replace_web_dataset
from embedding.models import SearchMode
from embedding.task import embedding_by_dataset, delete_embedding_by_dataset
from setting.models import AuthOperate, Model
from smartdoc.conf import PROJECT_DIR
from django.utils.translation import gettext_lazy as _
"""
# __exact 精确等于 like aaa
# __iexact 精确等于 忽略大小写 ilike 'aaa'
# __contains 包含like '%aaa%'
# __icontains 包含 忽略大小写 ilike %aaa%但是对于sqlite来说contains的作用效果等同于icontains。
# __gt 大于
# __gte 大于等于
# __lt 小于
# __lte 小于等于
# __in 存在于一个list范围内
# __startswith 以…开头
# __istartswith 以…开头 忽略大小写
# __endswith 以…结尾
# __iendswith 以…结尾,忽略大小写
# __range 在…范围内
# __year 日期字段的年份
# __month 日期字段的月份
# __day 日期字段的日
# __isnull=True/False
"""
class DataSetSerializers(serializers.ModelSerializer):
class Meta:
model = DataSet
fields = ['id', 'name', 'desc', 'meta', 'create_time', 'update_time']
class Application(ApiMixin, serializers.Serializer):
user_id = serializers.UUIDField(required=True, error_messages=ErrMessage.char(_('user id')))
dataset_id = serializers.UUIDField(required=True, error_messages=ErrMessage.char(_('dataset id')))
@staticmethod
def get_request_params_api():
return [
openapi.Parameter(name='dataset_id',
in_=openapi.IN_PATH,
type=openapi.TYPE_STRING,
required=True,
description=_('dataset id')),
]
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['id', 'name', 'desc', 'model_id', 'multiple_rounds_dialogue', 'user_id', 'status',
'create_time',
'update_time'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title="", description=_('id')),
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('application name'),
description=_('application name')),
'desc': openapi.Schema(type=openapi.TYPE_STRING, title="_('application description')",
description="_('application description')"),
'model_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('model id'),
description=_('model id')),
"multiple_rounds_dialogue": openapi.Schema(type=openapi.TYPE_BOOLEAN,
title=_('Whether to start multiple rounds of dialogue'),
description=_(
'Whether to start multiple rounds of dialogue')),
'prologue': openapi.Schema(type=openapi.TYPE_STRING, title=_('opening remarks'),
description=_('opening remarks')),
'example': openapi.Schema(type=openapi.TYPE_ARRAY, items=openapi.Schema(type=openapi.TYPE_STRING),
title=_('example'), description=_('example')),
'user_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('User id'), description=_('User id')),
'status': openapi.Schema(type=openapi.TYPE_BOOLEAN, title=_('Whether to publish'),
description=_('Whether to publish')),
'create_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('create time'),
description=_('create time')),
'update_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('update time'),
description=_('update time'))
}
)
class Query(ApiMixin, serializers.Serializer):
"""
查询对象
"""
name = serializers.CharField(required=False,
error_messages=ErrMessage.char(_('dataset name')),
max_length=64,
min_length=1)
desc = serializers.CharField(required=False,
error_messages=ErrMessage.char(_('dataset description')),
max_length=256,
min_length=1,
)
user_id = serializers.CharField(required=True)
select_user_id = serializers.CharField(required=False)
def get_query_set(self):
user_id = self.data.get("user_id")
query_set_dict = {}
query_set = QuerySet(model=get_dynamics_model(
{'temp.name': models.CharField(), 'temp.desc': models.CharField(),
"document_temp.char_length": models.IntegerField(), 'temp.create_time': models.DateTimeField(),
'temp.user_id': models.CharField(), 'temp.id': models.CharField()}))
if "desc" in self.data and self.data.get('desc') is not None:
query_set = query_set.filter(**{'temp.desc__icontains': self.data.get("desc")})
if "name" in self.data and self.data.get('name') is not None:
query_set = query_set.filter(**{'temp.name__icontains': self.data.get("name")})
if "select_user_id" in self.data and self.data.get('select_user_id') is not None:
query_set = query_set.filter(**{'temp.user_id__exact': self.data.get("select_user_id")})
query_set = query_set.order_by("-temp.create_time", "temp.id")
query_set_dict['default_sql'] = query_set
query_set_dict['dataset_custom_sql'] = QuerySet(model=get_dynamics_model(
{'dataset.user_id': models.CharField(),
})).filter(
**{'dataset.user_id': user_id}
)
query_set_dict['team_member_permission_custom_sql'] = QuerySet(model=get_dynamics_model(
{'user_id': models.CharField(),
'team_member_permission.auth_target_type': models.CharField(),
'team_member_permission.operate': ArrayField(verbose_name=_('permission'),
base_field=models.CharField(max_length=256,
blank=True,
choices=AuthOperate.choices,
default=AuthOperate.USE)
)})).filter(
**{'user_id': user_id, 'team_member_permission.operate__contains': ['USE'],
'team_member_permission.auth_target_type': 'DATASET'})
return query_set_dict
def page(self, current_page: int, page_size: int):
return native_page_search(current_page, page_size, self.get_query_set(), select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "dataset", 'sql', 'list_dataset.sql')),
post_records_handler=lambda r: r)
def list(self):
return native_search(self.get_query_set(), select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "dataset", 'sql', 'list_dataset.sql')))
@staticmethod
def get_request_params_api():
return [openapi.Parameter(name='name',
in_=openapi.IN_QUERY,
type=openapi.TYPE_STRING,
required=False,
description=_('dataset name')),
openapi.Parameter(name='desc',
in_=openapi.IN_QUERY,
type=openapi.TYPE_STRING,
required=False,
description=_('dataset description'))
]
@staticmethod
def get_response_body_api():
return DataSetSerializers.Operate.get_response_body_api()
class Create(ApiMixin, serializers.Serializer):
user_id = serializers.UUIDField(required=True, error_messages=ErrMessage.char(_('user id')), )
class CreateBaseSerializers(ApiMixin, serializers.Serializer):
"""
创建通用数据集序列化对象
"""
name = serializers.CharField(required=True,
error_messages=ErrMessage.char(_('dataset name')),
max_length=64,
min_length=1)
desc = serializers.CharField(required=True,
error_messages=ErrMessage.char(_('dataset description')),
max_length=256,
min_length=1)
embedding_mode_id = serializers.UUIDField(required=True,
error_messages=ErrMessage.uuid(_('embedding mode')))
documents = DocumentInstanceSerializer(required=False, many=True)
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
return True
class CreateQASerializers(serializers.Serializer):
"""
创建web站点序列化对象
"""
name = serializers.CharField(required=True,
error_messages=ErrMessage.char(_('dataset name')),
max_length=64,
min_length=1)
desc = serializers.CharField(required=True,
error_messages=ErrMessage.char(_('dataset description')),
max_length=256,
min_length=1)
embedding_mode_id = serializers.UUIDField(required=True,
error_messages=ErrMessage.uuid(_('embedding mode')))
file_list = serializers.ListSerializer(required=True,
error_messages=ErrMessage.list(_('file list')),
child=serializers.FileField(required=True,
error_messages=ErrMessage.file(
_('file list'))))
@staticmethod
def get_request_params_api():
return [openapi.Parameter(name='file',
in_=openapi.IN_FORM,
type=openapi.TYPE_ARRAY,
items=openapi.Items(type=openapi.TYPE_FILE),
required=True,
description=_('upload files ')),
openapi.Parameter(name='name',
in_=openapi.IN_FORM,
required=True,
type=openapi.TYPE_STRING, title=_('dataset name'),
description=_('dataset name')),
openapi.Parameter(name='desc',
in_=openapi.IN_FORM,
required=True,
type=openapi.TYPE_STRING, title=_('dataset description'),
description=_('dataset description')),
]
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['id', 'name', 'desc', 'user_id', 'char_length', 'document_count',
'update_time', 'create_time', 'document_list'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title="id",
description="id", default="xx"),
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset name'),
description=_('dataset name'), default=_('dataset name')),
'desc': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset description'),
description=_('dataset description'), default=_('dataset description')),
'user_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('user id'),
description=_('user id'), default="user_xxxx"),
'char_length': openapi.Schema(type=openapi.TYPE_STRING, title=_('char length'),
description=_('char length'), default=10),
'document_count': openapi.Schema(type=openapi.TYPE_STRING, title=_('document count'),
description=_('document count'), default=1),
'update_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('update time'),
description=_('update time'),
default="1970-01-01 00:00:00"),
'create_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('create time'),
description=_('create time'),
default="1970-01-01 00:00:00"
),
'document_list': openapi.Schema(type=openapi.TYPE_ARRAY, title=_('document list'),
description=_('document list'),
items=DocumentSerializers.Operate.get_response_body_api())
}
)
class CreateWebSerializers(serializers.Serializer):
"""
创建web站点序列化对象
"""
name = serializers.CharField(required=True,
error_messages=ErrMessage.char(_('dataset name')),
max_length=64,
min_length=1)
desc = serializers.CharField(required=True,
error_messages=ErrMessage.char(_('dataset description')),
max_length=256,
min_length=1)
source_url = serializers.CharField(required=True, error_messages=ErrMessage.char(_('web source url')), )
embedding_mode_id = serializers.UUIDField(required=True,
error_messages=ErrMessage.uuid(_('embedding mode')))
selector = serializers.CharField(required=False, allow_null=True, allow_blank=True,
error_messages=ErrMessage.char(_('selector')))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
source_url = self.data.get('source_url')
response = Fork(source_url, []).fork()
if response.status == 500:
raise AppApiException(500,
_('URL error, cannot parse [{source_url}]').format(source_url=source_url))
return True
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['id', 'name', 'desc', 'user_id', 'char_length', 'document_count',
'update_time', 'create_time', 'document_list'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title="id",
description="id", default="xx"),
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset name'),
description=_('dataset name'), default=_('dataset name')),
'desc': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset description'),
description=_('dataset description'), default=_('dataset description')),
'user_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('user id'),
description=_('user id'), default="user_xxxx"),
'char_length': openapi.Schema(type=openapi.TYPE_STRING, title=_('char length'),
description=_('char length'), default=10),
'document_count': openapi.Schema(type=openapi.TYPE_STRING, title=_('document count'),
description=_('document count'), default=1),
'update_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('update time'),
description=_('update time'),
default="1970-01-01 00:00:00"),
'create_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('create time'),
description=_('create time'),
default="1970-01-01 00:00:00"
),
'document_list': openapi.Schema(type=openapi.TYPE_ARRAY, title=_('document list'),
description=_('document list'),
items=DocumentSerializers.Operate.get_response_body_api())
}
)
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['name', 'desc', 'url'],
properties={
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset name'),
description=_('dataset name')),
'desc': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset description'),
description=_('dataset description')),
'embedding_mode_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('embedding mode'),
description=_('embedding mode')),
'source_url': openapi.Schema(type=openapi.TYPE_STRING, title=_('web source url'),
description=_('web source url')),
'selector': openapi.Schema(type=openapi.TYPE_STRING, title=_('selector'),
description=_('selector'))
}
)
@staticmethod
def post_embedding_dataset(document_list, dataset_id):
model_id = get_embedding_model_id_by_dataset_id(dataset_id)
# 发送向量化事件
embedding_by_dataset.delay(dataset_id, model_id)
return document_list
def save_qa(self, instance: Dict, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
self.CreateQASerializers(data=instance).is_valid()
file_list = instance.get('file_list')
document_list = flat_map([DocumentSerializers.Create.parse_qa_file(file) for file in file_list])
dataset_instance = {'name': instance.get('name'), 'desc': instance.get('desc'), 'documents': document_list,
'embedding_mode_id': instance.get('embedding_mode_id')}
return self.save(dataset_instance, with_valid=True)
@valid_license(model=DataSet, count=5000,
message=_(
'The community version supports up to 5000 knowledge bases. If you need more knowledge bases, please contact us (https://fit2cloud.com/).'))
@post(post_function=post_embedding_dataset)
@transaction.atomic
def save(self, instance: Dict, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
self.CreateBaseSerializers(data=instance).is_valid()
dataset_id = uuid.uuid1()
user_id = self.data.get('user_id')
if QuerySet(DataSet).filter(user_id=user_id, name=instance.get('name')).exists():
raise AppApiException(500, _('Knowledge base name duplicate!'))
dataset = DataSet(
**{'id': dataset_id, 'name': instance.get("name"), 'desc': instance.get('desc'), 'user_id': user_id,
'embedding_mode_id': instance.get('embedding_mode_id')})
document_model_list = []
paragraph_model_list = []
problem_paragraph_object_list = []
# 插入文档
for document in instance.get('documents') if 'documents' in instance else []:
document_paragraph_dict_model = DocumentSerializers.Create.get_document_paragraph_model(dataset_id,
document)
document_model_list.append(document_paragraph_dict_model.get('document'))
for paragraph in document_paragraph_dict_model.get('paragraph_model_list'):
paragraph_model_list.append(paragraph)
for problem_paragraph_object in document_paragraph_dict_model.get('problem_paragraph_object_list'):
problem_paragraph_object_list.append(problem_paragraph_object)
problem_model_list, problem_paragraph_mapping_list = (ProblemParagraphManage(problem_paragraph_object_list,
dataset_id)
.to_problem_model_list())
# 插入知识库
dataset.save()
# 插入文档
QuerySet(Document).bulk_create(document_model_list) if len(document_model_list) > 0 else None
# 批量插入段落
QuerySet(Paragraph).bulk_create(paragraph_model_list) if len(paragraph_model_list) > 0 else None
# 批量插入问题
QuerySet(Problem).bulk_create(problem_model_list) if len(problem_model_list) > 0 else None
# 批量插入关联问题
QuerySet(ProblemParagraphMapping).bulk_create(problem_paragraph_mapping_list) if len(
problem_paragraph_mapping_list) > 0 else None
# 响应数据
return {**DataSetSerializers(dataset).data,
'document_list': DocumentSerializers.Query(data={'dataset_id': dataset_id}).list(
with_valid=True)}, dataset_id
@staticmethod
def get_last_url_path(url):
parsed_url = urlparse(url)
if parsed_url.path is None or len(parsed_url.path) == 0:
return url
else:
return parsed_url.path.split("/")[-1]
def save_web(self, instance: Dict, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
self.CreateWebSerializers(data=instance).is_valid(raise_exception=True)
user_id = self.data.get('user_id')
if QuerySet(DataSet).filter(user_id=user_id, name=instance.get('name')).exists():
raise AppApiException(500, _('Knowledge base name duplicate!'))
dataset_id = uuid.uuid1()
dataset = DataSet(
**{'id': dataset_id, 'name': instance.get("name"), 'desc': instance.get('desc'), 'user_id': user_id,
'type': Type.web,
'embedding_mode_id': instance.get('embedding_mode_id'),
'meta': {'source_url': instance.get('source_url'), 'selector': instance.get('selector'),
'embedding_mode_id': instance.get('embedding_mode_id')}})
dataset.save()
sync_web_dataset.delay(str(dataset_id), instance.get('source_url'), instance.get('selector'))
return {**DataSetSerializers(dataset).data,
'document_list': []}
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['id', 'name', 'desc', 'user_id', 'char_length', 'document_count',
'update_time', 'create_time', 'document_list'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title="id",
description="id", default="xx"),
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset name'),
description=_('dataset name'), default=_('dataset name')),
'desc': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset description'),
description=_('dataset description'), default=_('dataset description')),
'user_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('user id'),
description=_('user id'), default="user_xxxx"),
'char_length': openapi.Schema(type=openapi.TYPE_STRING, title=_('char length'),
description=_('char length'), default=10),
'document_count': openapi.Schema(type=openapi.TYPE_STRING, title=_('document count'),
description=_('document count'), default=1),
'update_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('update time'),
description=_('update time'),
default="1970-01-01 00:00:00"),
'create_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('create time'),
description=_('create time'),
default="1970-01-01 00:00:00"
),
'document_list': openapi.Schema(type=openapi.TYPE_ARRAY, title=_('document list'),
description=_('document list'),
items=DocumentSerializers.Operate.get_response_body_api())
}
)
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['name', 'desc'],
properties={
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset name'),
description=_('dataset name')),
'desc': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset description'),
description=_('dataset description')),
'embedding_mode_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('embedding mode'),
description=_('embedding mode')),
'documents': openapi.Schema(type=openapi.TYPE_ARRAY, title=_('documents'),
description=_('documents'),
items=DocumentSerializers().Create.get_request_body_api()
)
}
)
class Edit(serializers.Serializer):
name = serializers.CharField(required=False, max_length=64, min_length=1,
error_messages=ErrMessage.char(_('dataset name')))
desc = serializers.CharField(required=False, max_length=256, min_length=1,
error_messages=ErrMessage.char(_('dataset description')))
meta = serializers.DictField(required=False)
application_id_list = serializers.ListSerializer(required=False, child=serializers.UUIDField(required=True,
error_messages=ErrMessage.char(
_('application id'))),
error_messages=ErrMessage.char(_('application id list')))
@staticmethod
def get_dataset_meta_valid_map():
dataset_meta_valid_map = {
Type.base: MetaSerializer.BaseMeta,
Type.web: MetaSerializer.WebMeta
}
return dataset_meta_valid_map
def is_valid(self, *, dataset: DataSet = None):
super().is_valid(raise_exception=True)
if 'meta' in self.data and self.data.get('meta') is not None:
dataset_meta_valid_map = self.get_dataset_meta_valid_map()
valid_class = dataset_meta_valid_map.get(dataset.type)
valid_class(data=self.data.get('meta')).is_valid(raise_exception=True)
class HitTest(ApiMixin, serializers.Serializer):
id = serializers.CharField(required=True, error_messages=ErrMessage.char("id"))
user_id = serializers.UUIDField(required=False, error_messages=ErrMessage.char(_('user id')))
query_text = serializers.CharField(required=True, error_messages=ErrMessage.char(_('query text')))
top_number = serializers.IntegerField(required=True, max_value=100, min_value=1,
error_messages=ErrMessage.char("top number"))
similarity = serializers.FloatField(required=True, max_value=2, min_value=0,
error_messages=ErrMessage.char(_('similarity')))
search_mode = serializers.CharField(required=True, validators=[
validators.RegexValidator(regex=re.compile("^embedding|keywords|blend$"),
message=_('The type only supports register|reset_password'), code=500)
], error_messages=ErrMessage.char(_('search mode')))
def is_valid(self, *, raise_exception=True):
super().is_valid(raise_exception=True)
if not QuerySet(DataSet).filter(id=self.data.get("id")).exists():
raise AppApiException(300, _('id does not exist'))
def hit_test(self):
self.is_valid()
vector = VectorStore.get_embedding_vector()
exclude_document_id_list = [str(document.id) for document in
QuerySet(Document).filter(
dataset_id=self.data.get('id'),
is_active=False)]
model = get_embedding_model_by_dataset_id(self.data.get('id'))
# 向量库检索
hit_list = vector.hit_test(self.data.get('query_text'), [self.data.get('id')], exclude_document_id_list,
self.data.get('top_number'),
self.data.get('similarity'),
SearchMode(self.data.get('search_mode')),
model)
hit_dict = reduce(lambda x, y: {**x, **y}, [{hit.get('paragraph_id'): hit} for hit in hit_list], {})
p_list = list_paragraph([h.get('paragraph_id') for h in hit_list])
return [{**p, 'similarity': hit_dict.get(p.get('id')).get('similarity'),
'comprehensive_score': hit_dict.get(p.get('id')).get('comprehensive_score')} for p in p_list]
class SyncWeb(ApiMixin, serializers.Serializer):
id = serializers.CharField(required=True, error_messages=ErrMessage.char(
_('dataset id')))
user_id = serializers.UUIDField(required=False, error_messages=ErrMessage.char(
_('user id')))
sync_type = serializers.CharField(required=True, error_messages=ErrMessage.char(
_(_('sync type'))), validators=[
validators.RegexValidator(regex=re.compile("^replace|complete$"),
message=_('The synchronization type only supports:replace|complete'), code=500)
])
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
first = QuerySet(DataSet).filter(id=self.data.get("id")).first()
if first is None:
raise AppApiException(300, _('id does not exist'))
if first.type != Type.web:
raise AppApiException(500, _('Synchronization is only supported for web site types'))
def sync(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
sync_type = self.data.get('sync_type')
dataset_id = self.data.get('id')
dataset = QuerySet(DataSet).get(id=dataset_id)
self.__getattribute__(sync_type + '_sync')(dataset)
return True
@staticmethod
def get_sync_handler(dataset):
def handler(child_link: ChildLink, response: Fork.Response):
if response.status == 200:
try:
document_name = child_link.tag.text if child_link.tag is not None and len(
child_link.tag.text.strip()) > 0 else child_link.url
paragraphs = get_split_model('web.md').parse(response.content)
print(child_link.url.strip())
first = QuerySet(Document).filter(meta__source_url=child_link.url.strip(),
dataset=dataset).first()
if first is not None:
# 如果存在,使用文档同步
DocumentSerializers.Sync(data={'document_id': first.id}).sync()
else:
# 插入
DocumentSerializers.Create(data={'dataset_id': dataset.id}).save(
{'name': document_name, 'paragraphs': paragraphs,
'meta': {'source_url': child_link.url.strip(),
'selector': dataset.meta.get('selector')},
'type': Type.web}, with_valid=True)
except Exception as e:
logging.getLogger("max_kb_error").error(f'{str(e)}:{traceback.format_exc()}')
return handler
def replace_sync(self, dataset):
"""
替换同步
:return:
"""
url = dataset.meta.get('source_url')
selector = dataset.meta.get('selector') if 'selector' in dataset.meta else None
sync_replace_web_dataset.delay(str(dataset.id), url, selector)
def complete_sync(self, dataset):
"""
完整同步 删掉当前数据集下所有的文档,再进行同步
:return:
"""
# 删除关联问题
QuerySet(ProblemParagraphMapping).filter(dataset=dataset).delete()
# 删除文档
QuerySet(Document).filter(dataset=dataset).delete()
# 删除段落
QuerySet(Paragraph).filter(dataset=dataset).delete()
# 删除向量
delete_embedding_by_dataset(self.data.get('id'))
# 同步
self.replace_sync(dataset)
@staticmethod
def get_request_params_api():
return [openapi.Parameter(name='dataset_id',
in_=openapi.IN_PATH,
type=openapi.TYPE_STRING,
required=True,
description=_('dataset id')),
openapi.Parameter(name='sync_type',
in_=openapi.IN_QUERY,
type=openapi.TYPE_STRING,
required=True,
description=_(
'Synchronization type->replace: replacement synchronization, complete: complete synchronization'))
]
class Operate(ApiMixin, serializers.Serializer):
id = serializers.CharField(required=True, error_messages=ErrMessage.char(
_('dataset id')))
user_id = serializers.UUIDField(required=False, error_messages=ErrMessage.char(
_('user id')))
def is_valid(self, *, raise_exception=True):
super().is_valid(raise_exception=True)
if not QuerySet(DataSet).filter(id=self.data.get("id")).exists():
raise AppApiException(300, _('id does not exist'))
def export_excel(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
document_list = QuerySet(Document).filter(dataset_id=self.data.get('id'))
paragraph_list = native_search(QuerySet(Paragraph).filter(dataset_id=self.data.get("id")), get_file_content(
os.path.join(PROJECT_DIR, "apps", "dataset", 'sql', 'list_paragraph_document_name.sql')))
problem_mapping_list = native_search(
QuerySet(ProblemParagraphMapping).filter(dataset_id=self.data.get("id")), get_file_content(
os.path.join(PROJECT_DIR, "apps", "dataset", 'sql', 'list_problem_mapping.sql')),
with_table_name=True)
data_dict, document_dict = DocumentSerializers.Operate.merge_problem(paragraph_list, problem_mapping_list,
document_list)
workbook = DocumentSerializers.Operate.get_workbook(data_dict, document_dict)
response = HttpResponse(content_type='application/vnd.ms-excel')
response['Content-Disposition'] = 'attachment; filename="dataset.xlsx"'
workbook.save(response)
return response
def export_zip(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
document_list = QuerySet(Document).filter(dataset_id=self.data.get('id'))
paragraph_list = native_search(QuerySet(Paragraph).filter(dataset_id=self.data.get("id")), get_file_content(
os.path.join(PROJECT_DIR, "apps", "dataset", 'sql', 'list_paragraph_document_name.sql')))
problem_mapping_list = native_search(
QuerySet(ProblemParagraphMapping).filter(dataset_id=self.data.get("id")), get_file_content(
os.path.join(PROJECT_DIR, "apps", "dataset", 'sql', 'list_problem_mapping.sql')),
with_table_name=True)
data_dict, document_dict = DocumentSerializers.Operate.merge_problem(paragraph_list, problem_mapping_list,
document_list)
res = [parse_image(paragraph.get('content')) for paragraph in paragraph_list]
workbook = DocumentSerializers.Operate.get_workbook(data_dict, document_dict)
response = HttpResponse(content_type='application/zip')
response['Content-Disposition'] = 'attachment; filename="archive.zip"'
zip_buffer = io.BytesIO()
with TemporaryDirectory() as tempdir:
dataset_file = os.path.join(tempdir, 'dataset.xlsx')
workbook.save(dataset_file)
for r in res:
write_image(tempdir, r)
zip_dir(tempdir, zip_buffer)
response.write(zip_buffer.getvalue())
return response
@staticmethod
def merge_problem(paragraph_list: List[Dict], problem_mapping_list: List[Dict]):
result = {}
document_dict = {}
for paragraph in paragraph_list:
problem_list = [problem_mapping.get('content') for problem_mapping in problem_mapping_list if
problem_mapping.get('paragraph_id') == paragraph.get('id')]
document_sheet = result.get(paragraph.get('document_id'))
d = document_dict.get(paragraph.get('document_name'))
if d is None:
document_dict[paragraph.get('document_name')] = {paragraph.get('document_id')}
else:
d.add(paragraph.get('document_id'))
if document_sheet is None:
result[paragraph.get('document_id')] = [[paragraph.get('title'), paragraph.get('content'),
'\n'.join(problem_list)]]
else:
document_sheet.append([paragraph.get('title'), paragraph.get('content'), '\n'.join(problem_list)])
result_document_dict = {}
for d_name in document_dict:
for index, d_id in enumerate(document_dict.get(d_name)):
result_document_dict[d_id] = d_name if index == 0 else d_name + str(index)
return result, result_document_dict
@transaction.atomic
def delete(self):
self.is_valid()
dataset = QuerySet(DataSet).get(id=self.data.get("id"))
QuerySet(Document).filter(dataset=dataset).delete()
QuerySet(ProblemParagraphMapping).filter(dataset=dataset).delete()
QuerySet(Paragraph).filter(dataset=dataset).delete()
QuerySet(Problem).filter(dataset=dataset).delete()
dataset.delete()
delete_embedding_by_dataset(self.data.get('id'))
return True
@transaction.atomic
def re_embedding(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
dataset_id = self.data.get('id')
dataset = QuerySet(DataSet).filter(id=dataset_id).first()
embedding_model_id = dataset.embedding_mode_id
dataset_user_id = dataset.user_id
embedding_model = QuerySet(Model).filter(id=embedding_model_id).first()
if embedding_model is None:
raise AppApiException(500, _('Model does not exist'))
if embedding_model.permission_type == 'PRIVATE' and dataset_user_id != embedding_model.user_id:
raise AppApiException(500, _('No permission to use this model') + f"{embedding_model.name}")
ListenerManagement.update_status(QuerySet(Document).filter(dataset_id=self.data.get('id')),
TaskType.EMBEDDING,
State.PENDING)
ListenerManagement.update_status(QuerySet(Paragraph).filter(dataset_id=self.data.get('id')),
TaskType.EMBEDDING,
State.PENDING)
ListenerManagement.get_aggregation_document_status_by_dataset_id(self.data.get('id'))()
embedding_model_id = get_embedding_model_id_by_dataset_id(self.data.get('id'))
try:
embedding_by_dataset.delay(dataset_id, embedding_model_id)
except AlreadyQueued as e:
raise AppApiException(500, _('Failed to send the vectorization task, please try again later!'))
def list_application(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
dataset = QuerySet(DataSet).get(id=self.data.get("id"))
return select_list(get_file_content(
os.path.join(PROJECT_DIR, "apps", "dataset", 'sql', 'list_dataset_application.sql')),
[self.data.get('user_id') if self.data.get('user_id') == str(dataset.user_id) else None,
dataset.user_id, self.data.get('user_id')])
def one(self, user_id, with_valid=True):
if with_valid:
self.is_valid()
query_set_dict = {'default_sql': QuerySet(model=get_dynamics_model(
{'temp.id': models.UUIDField()})).filter(**{'temp.id': self.data.get("id")}),
'dataset_custom_sql': QuerySet(model=get_dynamics_model(
{'dataset.user_id': models.CharField()})).filter(
**{'dataset.user_id': user_id}
), 'team_member_permission_custom_sql': QuerySet(
model=get_dynamics_model({'user_id': models.CharField(),
'team_member_permission.operate': ArrayField(
verbose_name=_('permission'),
base_field=models.CharField(max_length=256,
blank=True,
choices=AuthOperate.choices,
default=AuthOperate.USE)
)})).filter(
**{'user_id': user_id, 'team_member_permission.operate__contains': ['USE']})}
all_application_list = [str(adm.get('id')) for adm in self.list_application(with_valid=False)]
return {**native_search(query_set_dict, select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "dataset", 'sql', 'list_dataset.sql')), with_search_one=True),
'application_id_list': list(
filter(lambda application_id: all_application_list.__contains__(application_id),
[str(application_dataset_mapping.application_id) for
application_dataset_mapping in
QuerySet(ApplicationDatasetMapping).filter(
dataset_id=self.data.get('id'))]))}
@transaction.atomic
def edit(self, dataset: Dict, user_id: str):
"""
修改知识库
:param user_id: 用户id
:param dataset: Dict name desc
:return:
"""
self.is_valid()
if QuerySet(DataSet).filter(user_id=user_id, name=dataset.get('name')).exclude(
id=self.data.get('id')).exists():
raise AppApiException(500, _('Knowledge base name duplicate!'))
_dataset = QuerySet(DataSet).get(id=self.data.get("id"))
DataSetSerializers.Edit(data=dataset).is_valid(dataset=_dataset)
if 'embedding_mode_id' in dataset:
_dataset.embedding_mode_id = dataset.get('embedding_mode_id')
if "name" in dataset:
_dataset.name = dataset.get("name")
if 'desc' in dataset:
_dataset.desc = dataset.get("desc")
if 'meta' in dataset:
_dataset.meta = dataset.get('meta')
if 'application_id_list' in dataset and dataset.get('application_id_list') is not None:
application_id_list = dataset.get('application_id_list')
# 当前用户可修改关联的知识库列表
application_dataset_id_list = [str(dataset_dict.get('id')) for dataset_dict in
self.list_application(with_valid=False)]
for dataset_id in application_id_list:
if not application_dataset_id_list.__contains__(dataset_id):
raise AppApiException(500,
_('Unknown application id {dataset_id}, cannot be associated').format(
dataset_id=dataset_id))
# 删除已经关联的id
QuerySet(ApplicationDatasetMapping).filter(application_id__in=application_dataset_id_list,
dataset_id=self.data.get("id")).delete()
# 插入
QuerySet(ApplicationDatasetMapping).bulk_create(
[ApplicationDatasetMapping(application_id=application_id, dataset_id=self.data.get('id')) for
application_id in
application_id_list]) if len(application_id_list) > 0 else None
[ApplicationDatasetMapping(application_id=application_id, dataset_id=self.data.get('id')) for
application_id in application_id_list]
_dataset.save()
return self.one(with_valid=False, user_id=user_id)
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['name', 'desc'],
properties={
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset name'),
description=_('dataset name')),
'desc': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset description'),
description=_('dataset description')),
'meta': openapi.Schema(type=openapi.TYPE_OBJECT, title=_('meta'),
description=_(
'Knowledge base metadata->web:{source_url:xxx,selector:\'xxx\'},base:{}')),
'application_id_list': openapi.Schema(type=openapi.TYPE_ARRAY, title=_('application id list'),
description=_('application id list'),
items=openapi.Schema(type=openapi.TYPE_STRING))
}
)
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['id', 'name', 'desc', 'user_id', 'char_length', 'document_count',
'update_time', 'create_time'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title="id",
description="id", default="xx"),
'name': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset name'),
description=_('dataset name'), default=_('dataset name')),
'desc': openapi.Schema(type=openapi.TYPE_STRING, title=_('dataset description'),
description=_('dataset description'), default=_('dataset description')),
'user_id': openapi.Schema(type=openapi.TYPE_STRING, title=_('user id'),
description=_('user id'), default="user_xxxx"),
'char_length': openapi.Schema(type=openapi.TYPE_STRING, title=_('char length'),
description=_('char length'), default=10),
'document_count': openapi.Schema(type=openapi.TYPE_STRING, title=_('document count'),
description=_('document count'), default=1),
'update_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('update time'),
description=_('update time'),
default="1970-01-01 00:00:00"),
'create_time': openapi.Schema(type=openapi.TYPE_STRING, title=_('create time'),
description=_('create time'),
default="1970-01-01 00:00:00"
)
}
)
@staticmethod
def get_request_params_api():
return [openapi.Parameter(name='dataset_id',
in_=openapi.IN_PATH,
type=openapi.TYPE_STRING,
required=True,
description=_('dataset id')),
]

@ -0,0 +1,866 @@
# coding=utf-8
"""
@project: qabot
@Author
@file team_serializers.py
@date2023/9/5 16:32
@desc:
"""
import datetime
import os
import random
import re
import uuid
from django.conf import settings
from django.core import validators, signing, cache
from django.core.mail import send_mail
from django.core.mail.backends.smtp import EmailBackend
from django.db import transaction
from django.db.models import Q, QuerySet, Prefetch
from drf_yasg import openapi
from rest_framework import serializers
from application.models import Application
from common.constants.authentication_type import AuthenticationType
from common.constants.exception_code_constants import ExceptionCodeConstants
from common.constants.permission_constants import RoleConstants, get_permission_list_by_role
from common.db.search import page_search
from common.exception.app_exception import AppApiException
from common.mixins.api_mixin import ApiMixin
from common.models.db_model_manage import DBModelManage
from common.response.result import get_api_response
from common.util.common import valid_license
from common.util.field_message import ErrMessage
from common.util.lock import lock
from dataset.models import DataSet, Document, Paragraph, Problem, ProblemParagraphMapping
from embedding.task import delete_embedding_by_dataset_id_list
from function_lib.models.function import FunctionLib
from setting.models import Team, SystemSetting, SettingType, Model, TeamMember, TeamMemberPermission
from smartdoc.conf import PROJECT_DIR
from users.models.user import User, password_encrypt, get_user_dynamics_permission
from django.utils.translation import gettext_lazy as _, gettext, to_locale
from django.utils.translation import get_language
user_cache = cache.caches['user_cache']
class SystemSerializer(ApiMixin, serializers.Serializer):
@staticmethod
def get_profile():
version = os.environ.get('MAXKB_VERSION')
xpack_cache = DBModelManage.get_model('xpack_cache')
return {'version': version, 'IS_XPACK': hasattr(settings, 'IS_XPACK'),
'XPACK_LICENSE_IS_VALID': False if xpack_cache is None else xpack_cache.get('XPACK_LICENSE_IS_VALID',
False)}
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=[],
properties={
'version': openapi.Schema(type=openapi.TYPE_STRING, title=_("System version number"),
description=_("System version number")),
}
)
class LoginSerializer(ApiMixin, serializers.Serializer):
username = serializers.CharField(required=True,
error_messages=ErrMessage.char(_("Username")))
password = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Password")))
def is_valid(self, *, raise_exception=False):
"""
校验参数
:param raise_exception: Whether to throw an exception can only be True
:return: User information
"""
super().is_valid(raise_exception=True)
username = self.data.get("username")
password = password_encrypt(self.data.get("password"))
user = QuerySet(User).filter(Q(username=username,
password=password) | Q(email=username,
password=password)).first()
if user is None:
raise ExceptionCodeConstants.INCORRECT_USERNAME_AND_PASSWORD.value.to_app_api_exception()
if not user.is_active:
raise AppApiException(1005, _("The user has been disabled, please contact the administrator!"))
return user
def get_user_token(self):
"""
Get user token
:return: User Token (authentication information)
"""
user = self.is_valid()
token = signing.dumps({'username': user.username, 'id': str(user.id), 'email': user.email,
'type': AuthenticationType.USER.value})
return token
class Meta:
model = User
fields = '__all__'
def get_request_body_api(self):
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['username', 'password'],
properties={
'username': openapi.Schema(type=openapi.TYPE_STRING, title=_("Username"), description=_("Username")),
'password': openapi.Schema(type=openapi.TYPE_STRING, title=_("Password"), description=_("Password"))
}
)
def get_response_body_api(self):
return get_api_response(openapi.Schema(
type=openapi.TYPE_STRING,
title="token",
default="xxxx",
description="认证token"
))
class RegisterSerializer(ApiMixin, serializers.Serializer):
"""
Register request object
"""
email = serializers.EmailField(
required=True,
error_messages=ErrMessage.char(_("Email")),
validators=[validators.EmailValidator(message=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.message,
code=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.code)])
username = serializers.CharField(required=True,
error_messages=ErrMessage.char(_("Username")),
max_length=20,
min_length=6,
validators=[
validators.RegexValidator(regex=re.compile("^.{6,20}$"),
message=_("Username must be 6-20 characters long"))
])
password = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Password")),
validators=[validators.RegexValidator(regex=re.compile(
"^(?![a-zA-Z]+$)(?![A-Z0-9]+$)(?![A-Z_!@#$%^&*`~.()-+=]+$)(?![a-z0-9]+$)(?![a-z_!@#$%^&*`~()-+=]+$)"
"(?![0-9_!@#$%^&*`~()-+=]+$)[a-zA-Z0-9_!@#$%^&*`~.()-+=]{6,20}$")
, message=_(
"The password must be 6-20 characters long and must be a combination of letters, numbers, and special characters."))])
re_password = serializers.CharField(required=True,
error_messages=ErrMessage.char(_("Confirm Password")),
validators=[validators.RegexValidator(regex=re.compile(
"^(?![a-zA-Z]+$)(?![A-Z0-9]+$)(?![A-Z_!@#$%^&*`~.()-+=]+$)(?![a-z0-9]+$)(?![a-z_!@#$%^&*`~()-+=]+$)"
"(?![0-9_!@#$%^&*`~()-+=]+$)[a-zA-Z0-9_!@#$%^&*`~.()-+=]{6,20}$")
, message=_(
"The password must be 6-20 characters long and must be a combination of letters, numbers, and special characters."))])
code = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Verification code")))
class Meta:
model = User
fields = '__all__'
@lock(lock_key=lambda this, raise_exception: (
this.initial_data.get("email") + ":register"
))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
if self.data.get('password') != self.data.get('re_password'):
raise ExceptionCodeConstants.PASSWORD_NOT_EQ_RE_PASSWORD.value.to_app_api_exception()
username = self.data.get("username")
email = self.data.get("email")
code = self.data.get("code")
code_cache_key = email + ":register"
cache_code = user_cache.get(code_cache_key)
if code != cache_code:
raise ExceptionCodeConstants.CODE_ERROR.value.to_app_api_exception()
u = QuerySet(User).filter(Q(username=username) | Q(email=email)).first()
if u is not None:
if u.email == email:
raise ExceptionCodeConstants.EMAIL_IS_EXIST.value.to_app_api_exception()
if u.username == username:
raise ExceptionCodeConstants.USERNAME_IS_EXIST.value.to_app_api_exception()
return True
@valid_license(model=User, count=100,
message=_(
"The community version supports up to 100 users. If you need more users, please contact us (https://fit2cloud.com/)."))
@transaction.atomic
def save(self, **kwargs):
m = User(
**{'id': uuid.uuid1(), 'email': self.data.get("email"), 'username': self.data.get("username"),
'role': RoleConstants.USER.name})
m.set_password(self.data.get("password"))
m.save()
Team(**{'user': m, 'name': m.username + _("team")}).save()
email = self.data.get("email")
code_cache_key = email + ":register"
user_cache.delete(code_cache_key)
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['username', 'email', 'password', 're_password', 'code'],
properties={
'username': openapi.Schema(type=openapi.TYPE_STRING, title=_("Username"), description=_("Username")),
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email")),
'password': openapi.Schema(type=openapi.TYPE_STRING, title=_("Password"), description=_("Password")),
're_password': openapi.Schema(type=openapi.TYPE_STRING, title=_("Confirm Password"),
description=_("Confirm Password")),
'code': openapi.Schema(type=openapi.TYPE_STRING, title=_("Verification code"),
description=_("Verification code"))
}
)
class CheckCodeSerializer(ApiMixin, serializers.Serializer):
"""
校验验证码
"""
email = serializers.EmailField(
required=True,
error_messages=ErrMessage.char(_("Email")),
validators=[validators.EmailValidator(message=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.message,
code=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.code)])
code = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Verification code")))
type = serializers.CharField(required=True,
error_messages=ErrMessage.char(_("Type")),
validators=[
validators.RegexValidator(regex=re.compile("^register|reset_password$"),
message=_(
"The type only supports register|reset_password"),
code=500)
])
def is_valid(self, *, raise_exception=False):
super().is_valid()
value = user_cache.get(self.data.get("email") + ":" + self.data.get("type"))
if value is None or value != self.data.get("code"):
raise ExceptionCodeConstants.CODE_ERROR.value.to_app_api_exception()
return True
class Meta:
model = User
fields = '__all__'
def get_request_body_api(self):
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['email', 'code', 'type'],
properties={
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email")),
'code': openapi.Schema(type=openapi.TYPE_STRING, title=_("Verification code"),
description=_("Verification code")),
'type': openapi.Schema(type=openapi.TYPE_STRING, title=_("Type"), description="register|reset_password")
}
)
def get_response_body_api(self):
return get_api_response(openapi.Schema(
type=openapi.TYPE_BOOLEAN,
title=_('Is it successful'),
default=True,
description=_('Error message')))
class SwitchLanguageSerializer(serializers.Serializer):
user_id = serializers.UUIDField(required=True, error_messages=ErrMessage.char(_('user id')), )
language = serializers.CharField(required=True, error_messages=ErrMessage.char(_('language')))
def switch(self):
self.is_valid(raise_exception=True)
language = self.data.get('language')
support_language_list = ['zh-CN', 'zh-Hant', 'en-US']
if not support_language_list.__contains__(language):
raise AppApiException(500, _('language only support:') + ','.join(support_language_list))
QuerySet(User).filter(id=self.data.get('user_id')).update(language=language)
class RePasswordSerializer(ApiMixin, serializers.Serializer):
email = serializers.EmailField(
required=True,
error_messages=ErrMessage.char(_("Email")),
validators=[validators.EmailValidator(message=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.message,
code=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.code)])
code = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Verification code")))
password = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Password")),
validators=[validators.RegexValidator(regex=re.compile(
"^(?![a-zA-Z]+$)(?![A-Z0-9]+$)(?![A-Z_!@#$%^&*`~.()-+=]+$)(?![a-z0-9]+$)(?![a-z_!@#$%^&*`~()-+=]+$)"
"(?![0-9_!@#$%^&*`~()-+=]+$)[a-zA-Z0-9_!@#$%^&*`~.()-+=]{6,20}$")
, message=_(
"The confirmation password must be 6-20 characters long and must be a combination of letters, numbers, and special characters."))])
re_password = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Confirm Password")),
validators=[validators.RegexValidator(regex=re.compile(
"^(?![a-zA-Z]+$)(?![A-Z0-9]+$)(?![A-Z_!@#$%^&*`~.()-+=]+$)(?![a-z0-9]+$)(?![a-z_!@#$%^&*`~()-+=]+$)"
"(?![0-9_!@#$%^&*`~()-+=]+$)[a-zA-Z0-9_!@#$%^&*`~.()-+=]{6,20}$")
, message=_(
"The confirmation password must be 6-20 characters long and must be a combination of letters, numbers, and special characters."))]
)
class Meta:
model = User
fields = '__all__'
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
email = self.data.get("email")
cache_code = user_cache.get(email + ':reset_password')
if self.data.get('password') != self.data.get('re_password'):
raise AppApiException(ExceptionCodeConstants.PASSWORD_NOT_EQ_RE_PASSWORD.value.code,
ExceptionCodeConstants.PASSWORD_NOT_EQ_RE_PASSWORD.value.message)
if cache_code != self.data.get('code'):
raise AppApiException(ExceptionCodeConstants.CODE_ERROR.value.code,
ExceptionCodeConstants.CODE_ERROR.value.message)
return True
def reset_password(self):
"""
修改密码
:return: 是否成功
"""
if self.is_valid():
email = self.data.get("email")
QuerySet(User).filter(email=email).update(
password=password_encrypt(self.data.get('password')))
code_cache_key = email + ":reset_password"
# 删除验证码缓存
user_cache.delete(code_cache_key)
return True
def get_request_body_api(self):
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['email', 'code', "password", 're_password'],
properties={
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email")),
'code': openapi.Schema(type=openapi.TYPE_STRING, title=_("Verification code"),
description=_("Verification code")),
'password': openapi.Schema(type=openapi.TYPE_STRING, title=_("Password"), description=_("Password")),
're_password': openapi.Schema(type=openapi.TYPE_STRING, title=_("Confirm Password"),
description=_("Confirm Password"))
}
)
class SendEmailSerializer(ApiMixin, serializers.Serializer):
email = serializers.EmailField(
required=True
, error_messages=ErrMessage.char(_("Email")),
validators=[validators.EmailValidator(message=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.message,
code=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.code)])
type = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Type")), validators=[
validators.RegexValidator(regex=re.compile("^register|reset_password$"),
message=_("The type only supports register|reset_password"), code=500)
])
class Meta:
model = User
fields = '__all__'
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=raise_exception)
user_exists = QuerySet(User).filter(email=self.data.get('email')).exists()
if not user_exists and self.data.get('type') == 'reset_password':
raise ExceptionCodeConstants.EMAIL_IS_NOT_EXIST.value.to_app_api_exception()
elif user_exists and self.data.get('type') == 'register':
raise ExceptionCodeConstants.EMAIL_IS_EXIST.value.to_app_api_exception()
code_cache_key = self.data.get('email') + ":" + self.data.get("type")
code_cache_key_lock = code_cache_key + "_lock"
ttl = user_cache.ttl(code_cache_key_lock)
if ttl is not None:
raise AppApiException(500, _("Do not send emails again within {seconds} seconds").format(
seconds=int(ttl.total_seconds())))
return True
def send(self):
"""
发送邮件
:return: 是否发送成功
:exception 发送失败异常
"""
email = self.data.get("email")
state = self.data.get("type")
# 生成随机验证码
code = "".join(list(map(lambda i: random.choice(['1', '2', '3', '4', '5', '6', '7', '8', '9', '0'
]), range(6))))
# 获取邮件模板
language = get_language()
file = open(
os.path.join(PROJECT_DIR, "apps", "common", 'template', f'email_template_{to_locale(language)}.html'), "r",
encoding='utf-8')
content = file.read()
file.close()
code_cache_key = email + ":" + state
code_cache_key_lock = code_cache_key + "_lock"
# 设置缓存
user_cache.set(code_cache_key_lock, code, timeout=datetime.timedelta(minutes=1))
system_setting = QuerySet(SystemSetting).filter(type=SettingType.EMAIL.value).first()
if system_setting is None:
user_cache.delete(code_cache_key_lock)
raise AppApiException(1004,
_("The email service has not been set up. Please contact the administrator to set up the email service in [Email Settings]."))
try:
connection = EmailBackend(system_setting.meta.get("email_host"),
system_setting.meta.get('email_port'),
system_setting.meta.get('email_host_user'),
system_setting.meta.get('email_host_password'),
system_setting.meta.get('email_use_tls'),
False,
system_setting.meta.get('email_use_ssl')
)
# 发送邮件
send_mail(_('【Intelligent knowledge base question and answer system-{action}').format(
action=_('User registration') if state == 'register' else _('Change password')),
'',
html_message=f'{content.replace("${code}", code)}',
from_email=system_setting.meta.get('from_email'),
recipient_list=[email], fail_silently=False, connection=connection)
except Exception as e:
user_cache.delete(code_cache_key_lock)
raise AppApiException(500, f"{str(e)}" + _("Email sending failed"))
user_cache.set(code_cache_key, code, timeout=datetime.timedelta(minutes=30))
return True
def get_request_body_api(self):
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['email', 'type'],
properties={
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_('Email')),
'type': openapi.Schema(type=openapi.TYPE_STRING, title=_('Type'), description="register|reset_password")
}
)
def get_response_body_api(self):
return get_api_response(openapi.Schema(type=openapi.TYPE_STRING, default=True))
class UserProfile(ApiMixin):
@staticmethod
def get_user_profile(user: User):
"""
获取用户详情
:param user: 用户对象
:return:
"""
permission_list = get_user_dynamics_permission(str(user.id))
permission_list += [p.value for p in get_permission_list_by_role(RoleConstants[user.role])]
return {'id': user.id, 'username': user.username, 'email': user.email, 'role': user.role,
'permissions': [str(p) for p in permission_list],
'is_edit_password': user.password == 'd880e722c47a34d8e9fce789fc62389d' if user.role == 'ADMIN' else False,
'language': user.language}
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['id', 'username', 'email', 'role', 'is_active'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title="ID", description="ID"),
'username': openapi.Schema(type=openapi.TYPE_STRING, title=_("Username"), description=_("Username")),
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email")),
'role': openapi.Schema(type=openapi.TYPE_STRING, title=_("Role"), description=_("Role")),
'is_active': openapi.Schema(type=openapi.TYPE_STRING, title=_("Is active"), description=_("Is active")),
"permissions": openapi.Schema(type=openapi.TYPE_ARRAY, title=_("Permissions"),
description=_("Permissions"),
items=openapi.Schema(type=openapi.TYPE_STRING))
}
)
class UserSerializer(ApiMixin, serializers.ModelSerializer):
class Meta:
model = User
fields = ["email", "id",
"username", ]
def get_response_body_api(self):
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['id', 'username', 'email', 'role', 'is_active'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title="ID", description="ID"),
'username': openapi.Schema(type=openapi.TYPE_STRING, title=_("Username"), description=_("Username")),
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email")),
'role': openapi.Schema(type=openapi.TYPE_STRING, title=_("Role"), description=_("Role")),
'is_active': openapi.Schema(type=openapi.TYPE_STRING, title=_("Is active"), description=_("Is active"))
}
)
class Query(ApiMixin, serializers.Serializer):
email_or_username = serializers.CharField(required=True)
@staticmethod
def get_request_params_api():
return [openapi.Parameter(name='email_or_username',
in_=openapi.IN_QUERY,
type=openapi.TYPE_STRING,
required=True,
description=_("Email or username"))]
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['username', 'email', 'id'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title='ID', description="ID"),
'username': openapi.Schema(type=openapi.TYPE_STRING, title=_("Username"),
description=_("Username")),
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email"))
}
)
def list(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
email_or_username = self.data.get('email_or_username')
return [{'id': user_model.id, 'username': user_model.username, 'email': user_model.email} for user_model
in
QuerySet(User).filter(Q(username=email_or_username) | Q(email=email_or_username))]
def listByType(self, type, user_id):
teamIds = TeamMember.objects.filter(user_id=user_id).values_list('id', flat=True)
targets = TeamMemberPermission.objects.filter(
member_id__in=teamIds,
auth_target_type=type,
operate__contains=['USE']
).values_list('target', flat=True)
prefetch_users = Prefetch('user', queryset=User.objects.only('id', 'username'))
user_list = []
if type == 'DATASET':
user_list = DataSet.objects.filter(
Q(id__in=targets) | Q(user_id=user_id)
).prefetch_related(prefetch_users).distinct('user_id')
elif type == 'APPLICATION':
user_list = Application.objects.filter(
Q(id__in=targets) | Q(user_id=user_id)
).prefetch_related(prefetch_users).distinct('user_id')
elif type == 'FUNCTION':
user_list = FunctionLib.objects.filter(
Q(permission_type='PUBLIC') | Q(user_id=user_id)
).prefetch_related(prefetch_users).distinct('user_id')
other_users = [
{'id': app.user.id, 'username': app.user.username}
for app in user_list if app.user.id != user_id
]
users = [
{'id': 'all', 'username': _('All')},
{'id': user_id, 'username': _('Me')}
]
users.extend(other_users)
return users
class UserInstanceSerializer(ApiMixin, serializers.ModelSerializer):
class Meta:
model = User
fields = ['id', 'username', 'email', 'phone', 'is_active', 'role', 'nick_name', 'create_time', 'update_time',
'source']
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['id', 'username', 'email', 'phone', 'is_active', 'role', 'nick_name', 'create_time',
'update_time'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title="ID", description="ID"),
'username': openapi.Schema(type=openapi.TYPE_STRING, title=_("Username"), description=_("Username")),
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email")),
'phone': openapi.Schema(type=openapi.TYPE_STRING, title=_("Phone"), description=_("Phone")),
'is_active': openapi.Schema(type=openapi.TYPE_BOOLEAN, title=_("Is active"),
description=_("Is active")),
'role': openapi.Schema(type=openapi.TYPE_STRING, title=_("Role"), description=_("Role")),
'source': openapi.Schema(type=openapi.TYPE_STRING, title=_("Source"), description=_("Source")),
'nick_name': openapi.Schema(type=openapi.TYPE_STRING, title=_("Name"), description=_("Name")),
'create_time': openapi.Schema(type=openapi.TYPE_STRING, title=_("Create time"),
description=_("Create time")),
'update_time': openapi.Schema(type=openapi.TYPE_STRING, title=_("Update time"),
description=_("Update time"))
}
)
@staticmethod
def get_request_params_api():
return [openapi.Parameter(name='user_id',
in_=openapi.IN_PATH,
type=openapi.TYPE_STRING,
required=True,
description='ID')
]
class UserManageSerializer(serializers.Serializer):
class Query(ApiMixin, serializers.Serializer):
email_or_username = serializers.CharField(required=False, allow_null=True,
error_messages=ErrMessage.char(_('Email or username')))
@staticmethod
def get_request_params_api():
return [openapi.Parameter(name='email_or_username',
in_=openapi.IN_QUERY,
type=openapi.TYPE_STRING,
required=False,
description=_("Email or username"))]
@staticmethod
def get_response_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['username', 'email', 'id'],
properties={
'id': openapi.Schema(type=openapi.TYPE_STRING, title='ID', description="ID"),
'username': openapi.Schema(type=openapi.TYPE_STRING, title=_("Username"),
description=_("Username")),
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email"))
}
)
def get_query_set(self):
email_or_username = self.data.get('email_or_username')
query_set = QuerySet(User)
if email_or_username is not None:
query_set = query_set.filter(
Q(username__contains=email_or_username) | Q(email__contains=email_or_username))
query_set = query_set.order_by("-create_time")
return query_set
def list(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
return [{'id': user_model.id, 'username': user_model.username, 'email': user_model.email} for user_model in
self.get_query_set()]
def page(self, current_page: int, page_size: int, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
return page_search(current_page, page_size,
self.get_query_set(),
post_records_handler=lambda u: UserInstanceSerializer(u).data)
class UserInstance(ApiMixin, serializers.Serializer):
email = serializers.EmailField(
required=True,
error_messages=ErrMessage.char(_("Email")),
validators=[validators.EmailValidator(message=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.message,
code=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.code)])
username = serializers.CharField(required=True,
error_messages=ErrMessage.char(_("Username")),
max_length=20,
min_length=6,
validators=[
validators.RegexValidator(regex=re.compile("^.{6,20}$"),
message=_(
'Username must be 6-20 characters long'))
])
password = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Password")),
validators=[validators.RegexValidator(regex=re.compile(
"^(?![a-zA-Z]+$)(?![A-Z0-9]+$)(?![A-Z_!@#$%^&*`~.()-+=]+$)(?![a-z0-9]+$)(?![a-z_!@#$%^&*`~()-+=]+$)"
"(?![0-9_!@#$%^&*`~()-+=]+$)[a-zA-Z0-9_!@#$%^&*`~.()-+=]{6,20}$")
, message=_(
"The password must be 6-20 characters long and must be a combination of letters, numbers, and special characters."))])
nick_name = serializers.CharField(required=False, error_messages=ErrMessage.char(_("Name")), max_length=64,
allow_null=True, allow_blank=True)
phone = serializers.CharField(required=False, error_messages=ErrMessage.char(_("Phone")), max_length=20,
allow_null=True, allow_blank=True)
def is_valid(self, *, raise_exception=True):
super().is_valid(raise_exception=True)
username = self.data.get('username')
email = self.data.get('email')
u = QuerySet(User).filter(Q(username=username) | Q(email=email)).first()
if u is not None:
if u.email == email:
raise ExceptionCodeConstants.EMAIL_IS_EXIST.value.to_app_api_exception()
if u.username == username:
raise ExceptionCodeConstants.USERNAME_IS_EXIST.value.to_app_api_exception()
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['username', 'email', 'password'],
properties={
'username': openapi.Schema(type=openapi.TYPE_STRING, title=_("Username"),
description=_("Username")),
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email")),
'password': openapi.Schema(type=openapi.TYPE_STRING, title=_("Password"),
description=_("Password")),
'phone': openapi.Schema(type=openapi.TYPE_STRING, title=_("Phone"), description=_("Phone")),
'nick_name': openapi.Schema(type=openapi.TYPE_STRING, title=_("Name"), description=_("Name"))
}
)
class UserEditInstance(ApiMixin, serializers.Serializer):
email = serializers.EmailField(
required=False,
error_messages=ErrMessage.char(_("Email")),
validators=[validators.EmailValidator(message=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.message,
code=ExceptionCodeConstants.EMAIL_FORMAT_ERROR.value.code)])
nick_name = serializers.CharField(required=False, error_messages=ErrMessage.char(_("Name")), max_length=64,
allow_null=True, allow_blank=True)
phone = serializers.CharField(required=False, error_messages=ErrMessage.char(_("Phone")), max_length=20,
allow_null=True, allow_blank=True)
is_active = serializers.BooleanField(required=False, error_messages=ErrMessage.char(_("Is active")))
def is_valid(self, *, user_id=None, raise_exception=False):
super().is_valid(raise_exception=True)
if self.data.get('email') is not None and QuerySet(User).filter(email=self.data.get('email')).exclude(
id=user_id).exists():
raise AppApiException(1004, _('Email is already in use'))
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
properties={
'email': openapi.Schema(type=openapi.TYPE_STRING, title=_("Email"), description=_("Email")),
'nick_name': openapi.Schema(type=openapi.TYPE_STRING, title=_("Name"), description=_("Name")),
'phone': openapi.Schema(type=openapi.TYPE_STRING, title=_("Phone"), description=_("Phone")),
'is_active': openapi.Schema(type=openapi.TYPE_BOOLEAN, title=_("Is active"),
description=_("Is active")),
}
)
class RePasswordInstance(ApiMixin, serializers.Serializer):
password = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Password")),
validators=[validators.RegexValidator(regex=re.compile(
"^(?![a-zA-Z]+$)(?![A-Z0-9]+$)(?![A-Z_!@#$%^&*`~.()-+=]+$)(?![a-z0-9]+$)(?![a-z_!@#$%^&*`~()-+=]+$)"
"(?![0-9_!@#$%^&*`~()-+=]+$)[a-zA-Z0-9_!@#$%^&*`~.()-+=]{6,20}$")
, message=_(
"The password must be 6-20 characters long and must be a combination of letters, numbers, and special characters."))])
re_password = serializers.CharField(required=True, error_messages=ErrMessage.char(_("Confirm Password")),
validators=[validators.RegexValidator(regex=re.compile(
"^(?![a-zA-Z]+$)(?![A-Z0-9]+$)(?![A-Z_!@#$%^&*`~.()-+=]+$)(?![a-z0-9]+$)(?![a-z_!@#$%^&*`~()-+=]+$)"
"(?![0-9_!@#$%^&*`~()-+=]+$)[a-zA-Z0-9_!@#$%^&*`~.()-+=]{6,20}$")
, message=_(
"The confirmation password must be 6-20 characters long and must be a combination of letters, numbers, and special characters."))]
)
@staticmethod
def get_request_body_api():
return openapi.Schema(
type=openapi.TYPE_OBJECT,
required=['password', 're_password'],
properties={
'password': openapi.Schema(type=openapi.TYPE_STRING, title=_("Password"),
description=_("Password")),
're_password': openapi.Schema(type=openapi.TYPE_STRING, title=_("Confirm Password"),
description=_("Confirm Password")),
}
)
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
if self.data.get('password') != self.data.get('re_password'):
raise ExceptionCodeConstants.PASSWORD_NOT_EQ_RE_PASSWORD.value.to_app_api_exception()
@valid_license(model=User, count=100,
message=_(
'The community version supports up to 100 users. If you need more users, please contact us (https://fit2cloud.com/).'))
@transaction.atomic
def save(self, instance, with_valid=True):
if with_valid:
UserManageSerializer.UserInstance(data=instance).is_valid(raise_exception=True)
user = User(id=uuid.uuid1(), email=instance.get('email'),
phone="" if instance.get('phone') is None else instance.get('phone'),
nick_name="" if instance.get('nick_name') is None else instance.get('nick_name')
, username=instance.get('username'), password=password_encrypt(instance.get('password')),
role=RoleConstants.USER.name, source="LOCAL",
is_active=True)
user.save()
# 初始化用户团队
Team(**{'user': user, 'name': user.username + _('team')}).save()
return UserInstanceSerializer(user).data
class Operate(serializers.Serializer):
id = serializers.UUIDField(required=True, error_messages=ErrMessage.char("ID"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
if not QuerySet(User).filter(id=self.data.get('id')).exists():
raise AppApiException(1004, _('User does not exist'))
@transaction.atomic
def delete(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
user = QuerySet(User).filter(id=self.data.get('id')).first()
if user.role == RoleConstants.ADMIN.name:
raise AppApiException(1004, _('Unable to delete administrator'))
user_id = self.data.get('id')
team_member_list = QuerySet(TeamMember).filter(Q(user_id=user_id) | Q(team_id=user_id))
# 删除团队成员权限
QuerySet(TeamMemberPermission).filter(
member_id__in=[team_member.id for team_member in team_member_list]).delete()
# 删除团队成员
team_member_list.delete()
# 删除应用相关 因为应用相关都是级联删除所以不需要手动删除
QuerySet(Application).filter(user_id=self.data.get('id')).delete()
# 删除数据集相关
dataset_list = QuerySet(DataSet).filter(user_id=self.data.get('id'))
dataset_id_list = [str(dataset.id) for dataset in dataset_list]
QuerySet(Document).filter(dataset_id__in=dataset_id_list).delete()
QuerySet(Paragraph).filter(dataset_id__in=dataset_id_list).delete()
QuerySet(ProblemParagraphMapping).filter(dataset_id__in=dataset_id_list).delete()
QuerySet(Problem).filter(dataset_id__in=dataset_id_list).delete()
delete_embedding_by_dataset_id_list(dataset_id_list)
dataset_list.delete()
# 删除团队
QuerySet(Team).filter(user_id=self.data.get('id')).delete()
# 删除模型
QuerySet(Model).filter(user_id=self.data.get('id')).delete()
# 删除用户
QuerySet(User).filter(id=self.data.get('id')).delete()
return True
def edit(self, instance, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
UserManageSerializer.UserEditInstance(data=instance).is_valid(user_id=self.data.get('id'),
raise_exception=True)
user = QuerySet(User).filter(id=self.data.get('id')).first()
if user.role == RoleConstants.ADMIN.name and 'is_active' in instance and instance.get(
'is_active') is not None:
raise AppApiException(1004, _('Cannot modify administrator status'))
update_keys = ['email', 'nick_name', 'phone', 'is_active']
for update_key in update_keys:
if update_key in instance and instance.get(update_key) is not None:
user.__setattr__(update_key, instance.get(update_key))
user.save()
return UserInstanceSerializer(user).data
def one(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
user = QuerySet(User).filter(id=self.data.get('id')).first()
return UserInstanceSerializer(user).data
def re_password(self, instance, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
UserManageSerializer.RePasswordInstance(data=instance).is_valid(raise_exception=True)
user = QuerySet(User).filter(id=self.data.get('id')).first()
user.password = password_encrypt(instance.get('password'))
user.save()
return True

@ -0,0 +1,55 @@
# coding=utf-8
"""
@project: MaxKB
@Author
@file valid_serializers.py
@date2024/7/8 18:00
@desc:
"""
import re
from django.core import validators
from django.db.models import QuerySet
from rest_framework import serializers
from application.models import Application
from common.exception.app_exception import AppApiException
from common.models.db_model_manage import DBModelManage
from common.util.field_message import ErrMessage
from dataset.models import DataSet
from users.models import User
from django.utils.translation import gettext_lazy as _
model_message_dict = {
'dataset': {'model': DataSet, 'count': 5000,
'message': _(
'The community version supports up to 5000 knowledge bases. If you need more knowledge bases, please contact us (https://fit2cloud.com/).')},
'application': {'model': Application, 'count': 5000,
'message': _(
'The community version supports up to 5000 applications. If you need more applications, please contact us (https://fit2cloud.com/).')},
'user': {'model': User, 'count': 100,
'message': _(
'The community version supports up to 100 users. If you need more users, please contact us (https://fit2cloud.com/).')}
}
class ValidSerializer(serializers.Serializer):
valid_type = serializers.CharField(required=True, error_messages=ErrMessage.char(_('type')), validators=[
validators.RegexValidator(regex=re.compile("^application|dataset|user$"),
message="类型只支持:application|dataset|user", code=500)
])
valid_count = serializers.IntegerField(required=True, error_messages=ErrMessage.integer(_('check quantity')))
def valid(self, is_valid=True):
if is_valid:
self.is_valid(raise_exception=True)
model_value = model_message_dict.get(self.data.get('valid_type'))
xpack_cache = DBModelManage.get_model('xpack_cache')
is_license_valid = xpack_cache.get('XPACK_LICENSE_IS_VALID', False) if xpack_cache is not None else False
if not is_license_valid:
if self.data.get('valid_count') != model_value.get('count'):
raise AppApiException(400, model_value.get('message'))
if QuerySet(
model_value.get('model')).count() >= model_value.get('count'):
raise AppApiException(400, model_value.get('message'))
return True

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 359 KiB

@ -0,0 +1 @@
import{cr as c,cZ as t,cy as o,cz as p,cA as a,cI as e,dD as n,cC as r,dE as _,cU as i}from"./index-6b91f299.js";const u={class:"app-layout"},y=c({__name:"AppLayout",setup(d){const{user:s}=t();return(l,m)=>(o(),p("div",u,[a(e(n)),r("div",{class:i(["app-main",e(s).isExpire()?"isExpire":""])},[a(e(_))],2)]))}});export{y as default};

@ -0,0 +1 @@
.app-layout{background-color:var(--app-layout-bg-color);height:100%}.app-main{position:relative;height:100%;padding:var(--app-header-height) 0 0!important;box-sizing:border-box;overflow:auto}.app-main.isExpire{padding-top:calc(var(--app-header-height) + 40px)!important}.sidebar-container{box-sizing:border-box;transition:width .28s;width:var(--sidebar-width);min-width:var(--sidebar-width);background-color:var(--sidebar-bg-color)}.view-container{width:calc(100% - var(--sidebar-width))}

File diff suppressed because one or more lines are too long

@ -0,0 +1 @@
.p-16-24[data-v-009e60bc]{padding:16px 24px}.mb-16[data-v-009e60bc]{margin-bottom:16px}.flex-between[data-v-009e60bc]{display:flex;justify-content:space-between;align-items:center}.flex[data-v-009e60bc]{display:flex}.align-center[data-v-009e60bc]{align-items:center}.ml-8[data-v-009e60bc]{margin-left:8px}.mr-8[data-v-009e60bc]{margin-right:8px}.ml-12[data-v-009e60bc]{margin-left:12px}.mr-4[data-v-009e60bc]{margin-right:4px}.cursor[data-v-009e60bc]{cursor:pointer}.icon[data-v-009e60bc]{width:32px;height:32px}

@ -0,0 +1 @@
.create-application .relate-dataset-card[data-v-c66f423a]{color:var(--app-text-color)}.create-application .dialog-bg[data-v-c66f423a]{border-radius:8px;background:var(--dialog-bg-gradient-color);overflow:hidden;box-sizing:border-box}.create-application .scrollbar-height-left[data-v-c66f423a]{height:calc(var(--app-main-height) - 64px)}.create-application .scrollbar-height[data-v-c66f423a]{height:calc(var(--app-main-height) - 166px)}.prologue-md-editor[data-v-c66f423a]{height:150px}[data-v-c66f423a] .el-form-item__label{display:block}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -0,0 +1 @@
.dataset-setting[data-v-9c5a864f]{width:70%;margin:0 auto}

@ -0,0 +1 @@
import{cr as N,cZ as F,dk as P,cs as c,da as T,cv as p,cy as g,cG as w,cB as a,cC as l,cA as i,cF as j,cL as A,cK as u,cM as V,dq as k,cR as x,cX as y,cq as v,d$ as G,de as O,dP as K}from"./index-6b91f299.js";const X={class:"mt-8"},Z={class:"flex mt-8"},H=["src"],J={class:"el-upload__tip info mt-16"},Q={class:"dialog-footer"},ee=N({__name:"EditAvatarDialog",emits:["refresh"],setup(W,{expose:C,emit:$}){const{application:U}=F(),z=P(),{params:{id:h}}=z,D=$,d=c(null),r=c(null),t=c(!1),m=c(!1),f=c(null),n=c("default");T(t,e=>{e||(d.value=null,r.value=null)});const L=e=>{n.value=k(e.icon)?"custom":"default",r.value=k(e.icon)?e.icon:null,f.value=x.cloneDeep(e),t.value=!0},R=e=>{if((e==null?void 0:e.size)/1024/1024<10)d.value=e,r.value=URL.createObjectURL(e.raw);else return y(v("views.applicationOverview.appInfo.EditAvatarDialog.fileSizeExceeded")),!1};function S(){if(n.value==="default")U.asyncPutApplication(h,{icon:G},m).then(e=>{D("refresh"),O(v("views.applicationOverview.appInfo.EditAvatarDialog.setSuccess")),t.value=!1});else if(n.value==="custom"&&d.value){let e=new FormData;e.append("file",d.value.raw),K.putAppIcon(h,e,m).then(o=>{D("refresh"),O(v("views.applicationOverview.appInfo.EditAvatarDialog.setSuccess")),t.value=!1})}else y(v("views.applicationOverview.appInfo.EditAvatarDialog.uploadImagePrompt"))}return C({open:L}),(e,o)=>{const E=p("AppAvatar"),I=p("el-radio"),_=p("el-button"),q=p("el-upload"),B=p("el-radio-group"),M=p("el-dialog");return g(),w(M,{title:e.$t("views.applicationOverview.appInfo.EditAvatarDialog.title"),modelValue:t.value,"onUpdate:modelValue":o[2]||(o[2]=s=>t.value=s),"close-on-click-modal":!1,"close-on-press-escape":!1},{footer:a(()=>[l("span",Q,[i(_,{onClick:o[1]||(o[1]=j(s=>t.value=!1,["prevent"]))},{default:a(()=>[A(u(e.$t("common.cancel")),1)]),_:1}),i(_,{type:"primary",onClick:S,loading:m.value},{default:a(()=>[A(u(e.$t("common.save")),1)]),_:1},8,["loading"])])]),default:a(()=>[i(B,{modelValue:n.value,"onUpdate:modelValue":o[0]||(o[0]=s=>n.value=s),class:"radio-block mb-16"},{default:a(()=>[l("div",null,[i(I,{value:"default"},{default:a(()=>{var s,b;return[l("p",null,u(e.$t("views.applicationOverview.appInfo.EditAvatarDialog.default")),1),(s=f.value)!=null&&s.name?(g(),w(E,{key:0,name:(b=f.value)==null?void 0:b.name,pinyinColor:"",class:"mt-8 mb-8",shape:"square",size:32},null,8,["name"])):V("",!0)]}),_:1})]),l("div",X,[i(I,{value:"custom"},{default:a(()=>[l("p",null,u(e.$t("views.applicationOverview.appInfo.EditAvatarDialog.customizeUpload")),1),l("div",Z,[r.value?(g(),w(E,{key:0,shape:"square",size:32,style:{background:"none"},class:"mr-16"},{default:a(()=>[l("img",{src:r.value,alt:""},null,8,H)]),_:1})):V("",!0),i(q,{ref:"uploadRef",action:"#","auto-upload":!1,"show-file-list":!1,accept:"image/jpeg, image/png, image/gif","on-change":R},{default:a(()=>[i(_,{icon:"Upload",disabled:n.value!=="custom"},{default:a(()=>[A(u(e.$t("views.applicationOverview.appInfo.EditAvatarDialog.upload")),1)]),_:1},8,["disabled"])]),_:1},512)]),l("div",J,u(e.$t("views.applicationOverview.appInfo.EditAvatarDialog.sizeTip")),1)]),_:1})])]),_:1},8,["modelValue"])]),_:1},8,["title","modelValue"])}}});export{ee as _};

@ -0,0 +1 @@
import{cr as D,cs as i,c_ as E,cq as R,da as z,cv as r,cy as b,cG as P,cB as n,cC as T,cA as t,cF as _,cL as V,cK as w,cz as A,cD as G,cE as K,cR as M}from"./index-6b91f299.js";const O={class:"dialog-footer"},H=D({__name:"FieldFormDialog",emits:["refresh"],setup(S,{expose:g,emit:y}){const d=["string","int","dict","array","float"],F=y,c=i(),$=i(!1),u=i(!1),a=i({name:"",type:d[0],source:"reference",is_required:!0}),k=E({name:[{required:!0,message:R("views.functionLib.functionForm.form.paramName.placeholder"),trigger:"blur"}]}),s=i(!1);z(s,e=>{e||(a.value={name:"",type:d[0],source:"reference",is_required:!0},u.value=!1)});const L=e=>{e&&(a.value=M.cloneDeep(e),u.value=!0),s.value=!0},q=async e=>{e&&await e.validate(l=>{l&&(F("refresh",a.value),s.value=!1)})};return g({open:L}),(e,l)=>{const C=r("el-input"),m=r("el-form-item"),f=r("el-option"),p=r("el-select"),B=r("el-switch"),N=r("el-form"),v=r("el-button"),U=r("el-dialog");return b(),P(U,{title:u.value?e.$t("views.template.templateForm.title.editParam"):e.$t("views.template.templateForm.title.addParam"),modelValue:s.value,"onUpdate:modelValue":l[8]||(l[8]=o=>s.value=o),"close-on-click-modal":!1,"close-on-press-escape":!1,"destroy-on-close":!0,"append-to-body":""},{footer:n(()=>[T("span",O,[t(v,{onClick:l[6]||(l[6]=_(o=>s.value=!1,["prevent"]))},{default:n(()=>[V(w(e.$t("common.cancel")),1)]),_:1}),t(v,{type:"primary",onClick:l[7]||(l[7]=o=>q(c.value)),loading:$.value},{default:n(()=>[V(w(u.value?e.$t("common.save"):e.$t("common.add")),1)]),_:1},8,["loading"])])]),default:n(()=>[t(N,{"label-position":"top",ref_key:"fieldFormRef",ref:c,rules:k,model:a.value,"require-asterisk-position":"right"},{default:n(()=>[t(m,{label:e.$t("views.functionLib.functionForm.form.paramName.label"),prop:"name"},{default:n(()=>[t(C,{modelValue:a.value.name,"onUpdate:modelValue":l[0]||(l[0]=o=>a.value.name=o),placeholder:e.$t("views.functionLib.functionForm.form.paramName.placeholder"),maxlength:"64","show-word-limit":"",onBlur:l[1]||(l[1]=o=>a.value.name=a.value.name.trim())},null,8,["modelValue","placeholder"])]),_:1},8,["label"]),t(m,{label:e.$t("views.functionLib.functionForm.form.dataType.label")},{default:n(()=>[t(p,{modelValue:a.value.type,"onUpdate:modelValue":l[2]||(l[2]=o=>a.value.type=o)},{default:n(()=>[(b(),A(G,null,K(d,o=>t(f,{key:o,label:o,value:o},null,8,["label","value"])),64))]),_:1},8,["modelValue"])]),_:1},8,["label"]),t(m,{label:e.$t("views.functionLib.functionForm.form.source.label")},{default:n(()=>[t(p,{modelValue:a.value.source,"onUpdate:modelValue":l[3]||(l[3]=o=>a.value.source=o)},{default:n(()=>[t(f,{label:e.$t("views.functionLib.functionForm.form.source.reference"),value:"reference"},null,8,["label"]),t(f,{label:e.$t("views.functionLib.functionForm.form.source.custom"),value:"custom"},null,8,["label"])]),_:1},8,["modelValue"])]),_:1},8,["label"]),t(m,{label:e.$t("views.functionLib.functionForm.form.required.label"),onClick:l[5]||(l[5]=_(()=>{},["prevent"]))},{default:n(()=>[t(B,{size:"small",modelValue:a.value.is_required,"onUpdate:modelValue":l[4]||(l[4]=o=>a.value.is_required=o)},null,8,["modelValue"])]),_:1},8,["label"])]),_:1},8,["rules","model"])]),_:1},8,["title","modelValue"])}}});export{H as _};

@ -0,0 +1,64 @@
<svg width="122" height="36" viewBox="0 0 122 36" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_5734_851)">
<path d="M75.3094 19.0802V27.0498H71.8274L71.8109 26.2433C70.5933 26.876 69.4033 27.1922 68.2412 27.1921H67.8972C66.7033 27.1921 65.7546 26.7335 65.051 25.8164C64.5855 25.1005 64.342 24.2629 64.3513 23.409V23.314C64.3513 21.7486 64.9008 20.7089 65.9997 20.195C66.4505 19.8947 67.5929 19.7445 69.4271 19.7444H71.3008V19.3056C71.3008 18.4043 71.1703 17.8865 70.9094 17.752C70.6249 17.5385 70.1228 17.4318 69.4033 17.4318H65.6678L65.7312 14.2393L70.032 14.123C72.5857 14.123 74.1669 14.7555 74.7758 16.0206C75.1315 16.7558 75.3094 17.7757 75.3094 19.0802ZM68.6032 22.3899C68.4844 22.5313 68.3597 22.99 68.3597 23.314C68.3597 24.0256 68.7234 24.3814 69.4508 24.3814C69.8855 24.3814 70.5022 24.2035 71.3008 23.8478V22.111C71.3008 22.111 69.0361 21.8747 68.6032 22.3899Z" fill="url(#paint0_linear_5734_851)"/>
<path d="M56.6308 27.3319L55.4162 15.1817L52.0028 27.3319H48.028L44.6928 15.1817L43.4348 27.3319L37.9399 27.2851L40.6207 9.35059H47.6212L50.0211 17.8847L52.4444 9.35059H59.281L62.0087 27.3319H56.6308Z" fill="url(#paint1_linear_5734_851)"/>
<path d="M85.3943 26.9654L83.5118 23.1105L81.6291 26.9654H77.2017L80.831 20.5778L77.3644 14.6084H81.6525L83.5118 18.2543L85.313 14.6084H89.6009L86.1576 20.5778L89.7637 26.9654H85.3943Z" fill="url(#paint2_linear_5734_851)"/>
<path d="M101.114 26.9656C101.029 26.8419 96.8966 20.1336 96.8966 20.1336L95.5396 22.2226V26.9656H90.9727V9.11621H95.5396V16.3526L99.7128 9.17639H104.477L99.514 17.1079L105.855 26.9656H101.114Z" fill="url(#paint3_linear_5734_851)"/>
<path d="M121.036 22.1453C121.036 24.7452 119.74 27.1284 115.097 27.1284H107.032V9.02713L113.203 8.90893C113.203 8.90893 116.858 8.74776 118.706 10.1533C120.068 11.1888 120.515 13.0023 120.384 14.5199C120.254 16.0375 119.553 17.0132 118.405 17.7272C119.979 18.3542 121.036 19.5453 121.036 22.1453ZM114.228 16.3327C115.766 16.3327 116.62 15.5561 116.62 14.4502C116.62 13.2738 115.833 12.5839 114.228 12.5839L111.576 12.5909V16.3327H114.228ZM114.365 23.5376C116.497 23.5376 117.022 22.3933 117.022 21.6318C117.022 20.4311 116.17 19.5633 114.752 19.5633H111.576V23.5376H114.365Z" fill="url(#paint4_linear_5734_851)"/>
<path d="M17.4213 26.7354H12.8296L11.1277 28.4372C11.028 28.5369 10.9601 28.6639 10.9326 28.8022C10.9051 28.9405 10.9193 29.0838 10.9732 29.2141C11.0272 29.3443 11.1185 29.4557 11.2358 29.534C11.353 29.6123 11.4908 29.6541 11.6318 29.6541H18.6192C18.7602 29.6541 18.898 29.6123 19.0153 29.534C19.1325 29.4557 19.2239 29.3443 19.2778 29.2141C19.3318 29.0838 19.3459 28.9405 19.3184 28.8022C19.2909 28.6639 19.223 28.5369 19.1233 28.4372L17.4213 26.7354Z" fill="url(#paint5_linear_5734_851)"/>
<path d="M30.04 13.3823H29.1348V19.7499H30.04C30.1305 19.7499 30.2201 19.732 30.3037 19.6974C30.3873 19.6628 30.4633 19.612 30.5273 19.548C30.5913 19.484 30.642 19.4081 30.6767 19.3244C30.7113 19.2408 30.7291 19.1512 30.7291 19.0607V14.0715C30.7291 13.8887 30.6565 13.7134 30.5273 13.5842C30.398 13.4549 30.2227 13.3823 30.04 13.3823Z" fill="url(#paint6_linear_5734_851)"/>
<path d="M1.92296 13.3823H1.01776C0.834985 13.3823 0.659698 13.4549 0.530458 13.5842C0.401219 13.7134 0.328613 13.8887 0.328613 14.0715V19.0607C0.328611 19.1512 0.346435 19.2408 0.381067 19.3244C0.415699 19.4081 0.466461 19.484 0.530455 19.548C0.594448 19.612 0.670419 19.6628 0.754031 19.6974C0.837643 19.732 0.927258 19.7499 1.01776 19.7499H1.92296V13.3823Z" fill="url(#paint7_linear_5734_851)"/>
<path d="M19.0238 14.2251C18.682 14.2251 18.3541 14.3609 18.1124 14.6026C17.8707 14.8443 17.7349 15.1722 17.7349 15.514V16.4382C17.7349 16.7801 17.8707 17.108 18.1124 17.3497C18.3541 17.5914 18.682 17.7272 19.0239 17.7272C19.3657 17.7272 19.6936 17.5914 19.9353 17.3497C20.1771 17.108 20.3129 16.7801 20.3129 16.4382V15.5141C20.3129 15.3448 20.2796 15.1772 20.2148 15.0208C20.15 14.8644 20.055 14.7223 19.9353 14.6026C19.8156 14.4829 19.6735 14.388 19.5171 14.3232C19.3607 14.2584 19.1931 14.2251 19.0238 14.2251Z" fill="url(#paint8_linear_5734_851)"/>
<path d="M12.3012 14.2251C11.9593 14.2251 11.6315 14.3609 11.3897 14.6026C11.148 14.8443 11.0122 15.1722 11.0122 15.514V16.4382C11.0122 16.7801 11.148 17.108 11.3897 17.3497C11.6315 17.5914 11.9593 17.7272 12.3012 17.7272C12.6431 17.7272 12.9709 17.5914 13.2127 17.3497C13.4544 17.108 13.5902 16.7801 13.5902 16.4382V15.5141C13.5902 15.3448 13.5569 15.1772 13.4921 15.0208C13.4273 14.8644 13.3324 14.7223 13.2127 14.6026C13.093 14.4829 12.9509 14.388 12.7945 14.3232C12.6381 14.2584 12.4704 14.2251 12.3012 14.2251Z" fill="url(#paint9_linear_5734_851)"/>
<path d="M23.3607 6.91309H7.69709C6.3139 6.91465 4.98782 7.46481 4.00976 8.44287C3.0317 9.42092 2.48154 10.747 2.47998 12.1302V20.9609C2.48154 22.3441 3.03169 23.6702 4.00975 24.6483C4.98781 25.6264 6.3139 26.1766 7.69709 26.1781H23.3607C24.7439 26.1766 26.07 25.6264 27.0481 24.6483C28.0262 23.6703 28.5764 22.3442 28.5779 20.9609V12.1302C28.5763 10.747 28.0262 9.4209 27.0481 8.44284C26.07 7.46478 24.7439 6.91463 23.3607 6.91309ZM23.7988 20.9082C23.7988 21.1575 23.6998 21.3965 23.5235 21.5727C23.3473 21.749 23.1083 21.848 22.859 21.848H15.2189C14.0629 21.848 12.9263 22.1451 11.9181 22.7108L9.355 24.149V21.848H8.19882C7.94958 21.848 7.71055 21.749 7.53432 21.5728C7.35808 21.3966 7.25907 21.1575 7.25906 20.9083V11.5468C7.25907 11.2976 7.35808 11.0585 7.53432 10.8823C7.71056 10.7061 7.94958 10.6071 8.19882 10.6071H22.859C23.1082 10.6071 23.3472 10.7061 23.5235 10.8823C23.6997 11.0585 23.7987 11.2976 23.7987 11.5468L23.7988 20.9082Z" fill="url(#paint10_linear_5734_851)"/>
</g>
<defs>
<linearGradient id="paint0_linear_5734_851" x1="69.8304" y1="10.0003" x2="69.8304" y2="27.3566" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint1_linear_5734_851" x1="49.9743" y1="10.5855" x2="49.9743" y2="27.4838" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint2_linear_5734_851" x1="83.4827" y1="9.95634" x2="83.4827" y2="26.918" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint3_linear_5734_851" x1="98.4139" y1="9.57325" x2="98.4139" y2="26.5967" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint4_linear_5734_851" x1="114.034" y1="9.05828" x2="114.034" y2="26.3934" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint5_linear_5734_851" x1="15.1255" y1="26.7354" x2="15.1255" y2="29.6542" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint6_linear_5734_851" x1="29.9319" y1="13.3823" x2="29.9319" y2="19.7499" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint7_linear_5734_851" x1="1.12576" y1="13.3823" x2="1.12576" y2="19.7499" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint8_linear_5734_851" x1="19.0238" y1="14.2251" x2="19.0238" y2="17.7273" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint9_linear_5734_851" x1="12.3012" y1="14.2251" x2="12.3012" y2="17.7273" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<linearGradient id="paint10_linear_5734_851" x1="15.5289" y1="6.91309" x2="15.5289" y2="26.1782" gradientUnits="userSpaceOnUse">
<stop stop-color="#3370FF"/>
<stop offset="1" stop-color="#7F3BF5"/>
</linearGradient>
<clipPath id="clip0_5734_851">
<rect width="121" height="36" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 7.9 KiB

@ -0,0 +1 @@
import{cr as M,cs as g,c_ as q,cq as h,da as w,e5 as B,cv as l,cy as n,cG as d,cB as p,cA as k,cz as P,cK as R,cC as U,cF as C,e7 as I}from"./index-6b91f299.js";const S={key:1,class:"lighter"},$=M({__name:"ParagraphForm",props:{data:{type:Object,default:()=>{}},isEdit:Boolean},setup(m,{expose:b}){const v=m,y=["bold","underline","italic","-","title","strikeThrough","sub","sup","quote","unorderedList","orderedList","task","-","codeRow","code","link","image","table","mermaid","katex","-","revoke","next","=","pageFullscreen","preview","htmlPreview"],_=["markdownTotal",0,"=",1,"scrollSwitch"],V=g(),a=g({title:"",content:""}),x=q({content:[{required:!0,message:h("views.paragraph.form.content.requiredMessage1"),trigger:"blur"},{max:1e5,message:h("views.paragraph.form.content.requiredMessage2"),trigger:"blur"}]}),i=g();w(()=>v.data,e=>{e&&JSON.stringify(e)!=="{}"&&(a.value.title=e.title,a.value.content=e.content)},{immediate:!0}),w(()=>v.isEdit,e=>{var t;e||(t=i.value)==null||t.clearValidate()},{immediate:!0});function E(){if(i.value)return i.value.validate(e=>e)}const F=async(e,t)=>{const c=await Promise.all(e.map(r=>new Promise((u,f)=>{const s=new FormData;s.append("file",r),I.postImage(s).then(o=>{u(o)}).catch(o=>f(o))})));t(c.map(r=>r.data))};return B(()=>{a.value={title:"",content:""}}),b({validate:E,form:a}),(e,t)=>{const c=l("el-input"),r=l("el-form-item"),u=l("MdEditor"),f=l("MdPreview"),s=l("el-form");return n(),d(s,{ref_key:"paragraphFormRef",ref:i,model:a.value,"label-position":"top","require-asterisk-position":"right",rules:x,onSubmit:t[2]||(t[2]=C(()=>{},["prevent"]))},{default:p(()=>[k(r,{label:e.$t("views.paragraph.form.paragraphTitle.label")},{default:p(()=>[m.isEdit?(n(),d(c,{key:0,modelValue:a.value.title,"onUpdate:modelValue":t[0]||(t[0]=o=>a.value.title=o),placeholder:e.$t("views.paragraph.form.paragraphTitle.placeholder"),maxlength:"256","show-word-limit":""},null,8,["modelValue","placeholder"])):(n(),P("span",S,R(a.value.title||"-"),1))]),_:1},8,["label"]),k(r,{label:e.$t("views.paragraph.form.content.label"),prop:"content"},{default:p(()=>[m.isEdit?(n(),d(u,{key:0,modelValue:a.value.content,"onUpdate:modelValue":t[1]||(t[1]=o=>a.value.content=o),placeholder:e.$t("views.paragraph.form.content.placeholder"),maxLength:1e5,preview:!1,toolbars:y,style:{height:"300px"},onOnUploadImg:F,footers:_},{defFooters:p(()=>t[3]||(t[3]=[U("span",{style:{"margin-left":"-6px"}},"/ 100000",-1)])),_:1},8,["modelValue","placeholder"])):(n(),d(f,{key:1,ref_key:"editorRef",ref:V,editorId:"preview-only",modelValue:a.value.content,class:"maxkb-md"},null,8,["modelValue"]))]),_:1},8,["label"])]),_:1},8,["model","rules"])}}});export{$ as _};

@ -0,0 +1 @@
.aiMode-param-dialog[data-v-ff770558]{padding:8px 8px 24px}.aiMode-param-dialog .el-dialog__header[data-v-ff770558]{padding:16px 16px 0}.aiMode-param-dialog .el-dialog__body[data-v-ff770558]{padding:16px!important}.aiMode-param-dialog .dialog-max-height[data-v-ff770558]{height:550px}.aiMode-param-dialog .custom-slider .el-input-number.is-without-controls .el-input__wrapper[data-v-ff770558]{padding:0!important}.addDataset-dialog{padding:0}.addDataset-dialog .el-dialog__header{padding:24px 24px 8px}.addDataset-dialog .el-dialog__body{padding:8px!important}.addDataset-dialog .el-dialog__footer{padding:8px 24px 24px}.addDataset-dialog .el-dialog__headerbtn{top:13px}.addDataset-dialog .max-height{max-height:calc(100vh - 260px);padding:0 16px}.aiMode-param-dialog[data-v-73da63d7]{padding:8px 8px 24px}.aiMode-param-dialog .el-dialog__header[data-v-73da63d7]{padding:16px 16px 0}.aiMode-param-dialog .el-dialog__body[data-v-73da63d7]{padding:16px!important}.aiMode-param-dialog .dialog-max-height[data-v-73da63d7]{height:550px}.aiMode-param-dialog .custom-slider .el-input-number.is-without-controls .el-input__wrapper[data-v-73da63d7]{padding:0!important}.param-dialog{padding:8px 8px 24px}.param-dialog .el-dialog__header{padding:16px 16px 0}.param-dialog .el-dialog__body{padding:0!important}.param-dialog .dialog-max-height{height:560px}.param-dialog .custom-slider .el-input-number.is-without-controls .el-input__wrapper{padding:0!important}

@ -0,0 +1 @@
import{cr as k,cZ as D,cs as r,da as B,cv as c,cy as x,cG as z,cB as a,cC as d,cA as t,cF as N,cL as p,cK as s,cU as b,cP as T}from"./index-6b91f299.js";const U={class:"mb-8"},I={class:"mb-4"},M={class:"mb-4"},A={class:"danger"},F={class:"dialog-footer"},G=k({__name:"SyncWebDialog",emits:["refresh"],setup(K,{expose:g,emit:w}){const{dataset:h}=D(),V=w,_=r(!1),o=r("replace"),u=r(""),l=r(!1);B(l,e=>{e||(o.value="replace")});const W=e=>{u.value=e,l.value=!0},$=()=>{h.asyncSyncDataset(u.value,o.value,_).then(e=>{V("refresh",e.data),l.value=!1})};return g({open:W}),(e,n)=>{const m=c("el-text"),v=c("el-radio"),f=c("el-card"),C=c("el-radio-group"),y=c("el-button"),S=c("el-dialog");return x(),z(S,{title:e.$t("views.dataset.syncWeb.title"),modelValue:l.value,"onUpdate:modelValue":n[2]||(n[2]=i=>l.value=i),width:"600px","close-on-click-modal":!1,"close-on-press-escape":!1,"destroy-on-close":!0},{footer:a(()=>[d("span",F,[t(y,{onClick:n[1]||(n[1]=N(i=>l.value=!1,["prevent"]))},{default:a(()=>[p(s(e.$t("common.cancel")),1)]),_:1}),t(y,{type:"primary",onClick:$,loading:_.value},{default:a(()=>[p(s(e.$t("common.confirm")),1)]),_:1},8,["loading"])])]),default:a(()=>[d("p",U,s(e.$t("views.dataset.syncWeb.syncMethod")),1),t(C,{modelValue:o.value,"onUpdate:modelValue":n[0]||(n[0]=i=>o.value=i),class:"card__radio"},{default:a(()=>[t(f,{shadow:"never",class:b(["mb-16",o.value==="replace"?"active":""])},{default:a(()=>[t(v,{value:"replace",size:"large"},{default:a(()=>[d("p",I,s(e.$t("views.dataset.syncWeb.replace")),1),t(m,{type:"info"},{default:a(()=>[p(s(e.$t("views.dataset.syncWeb.replaceText")),1)]),_:1})]),_:1})]),_:1},8,["class"]),t(f,{shadow:"never",class:b(["mb-16",o.value==="complete"?"active":""])},{default:a(()=>[t(v,{value:"complete",size:"large"},{default:a(()=>[d("p",M,s(e.$t("views.dataset.syncWeb.complete")),1),t(m,{type:"info"},{default:a(()=>[p(s(e.$t("views.dataset.syncWeb.completeText")),1)]),_:1})]),_:1})]),_:1},8,["class"])]),_:1},8,["modelValue"]),d("p",A,s(e.$t("views.dataset.syncWeb.tip")),1)]),_:1},8,["title","modelValue"])}}});const P=T(G,[["__scopeId","data-v-959f578d"]]);export{P as S};

@ -0,0 +1 @@
.select-provider[data-v-959f578d]{font-size:16px;color:#646a73;font-weight:400;line-height:24px;cursor:pointer}.select-provider[data-v-959f578d]:hover{color:var(--el-color-primary)}.active-breadcrumb[data-v-959f578d]{font-size:16px;color:#1f2329;font-weight:500;line-height:24px}

@ -0,0 +1 @@
function o(e){for(var c=e.length/6|0,n=new Array(c),a=0;a<c;)n[a]="#"+e.slice(a*6,++a*6);return n}const r=o("4e79a7f28e2ce1575976b7b259a14fedc949af7aa1ff9da79c755fbab0ab");export{r as s};

@ -0,0 +1 @@
.paragraph-tabs[data-v-666a6383] .el-tabs__item{background:var(--app-text-color-light-1);margin:4px;border-radius:4px;padding:5px 10px 5px 8px!important;height:auto}.paragraph-tabs[data-v-666a6383] .el-tabs__item:nth-child(2){margin-left:0}.paragraph-tabs[data-v-666a6383] .el-tabs__item:last-child{margin-right:0}.paragraph-tabs[data-v-666a6383] .el-tabs__item.is-active{border:1px solid var(--el-color-primary);background:var(--el-color-primary-light-9);color:var(--el-text-color-primary)}.paragraph-tabs[data-v-666a6383] .el-tabs__nav-wrap:after{display:none}.paragraph-tabs[data-v-666a6383] .el-tabs__active-bar{display:none}.paragraph-list[data-v-666a6383]{height:calc(var(--create-dataset-height) - 101px)}.set-rules[data-v-e792cb86]{width:100%}.set-rules .left-height[data-v-e792cb86]{max-height:calc(var(--create-dataset-height) - 110px);overflow-x:hidden}.set-rules__radio[data-v-e792cb86]{width:100%;display:block}.set-rules__radio .el-radio[data-v-e792cb86]{white-space:break-spaces;width:100%;height:100%;line-height:22px;color:var(--app-text-color)}.set-rules__radio[data-v-e792cb86] .el-radio__label{padding-left:30px;width:100%}.set-rules__radio[data-v-e792cb86] .el-radio__input{position:absolute;top:16px}.set-rules__radio .active[data-v-e792cb86]{border:1px solid var(--el-color-primary)}.set-rules__form .title[data-v-e792cb86]{font-size:14px;font-weight:400}.result-success[data-v-714bf56b]{width:70%;margin:0 auto 30px}.upload__decoration[data-v-5bcf672e]{font-size:12px;line-height:20px;color:var(--el-text-color-secondary)}.el-upload__text .hover[data-v-5bcf672e]:hover{color:var(--el-color-primary-light-5)}.update-info[data-v-5bcf672e]{background:#d6e2ff;line-height:25px}.create-dataset__steps[data-v-8da16b66]{min-width:450px;max-width:800px;width:80%;margin:0 auto;padding-right:60px}.create-dataset__steps[data-v-8da16b66] .el-step__line{left:64%!important;right:-33%!important}.create-dataset__component[data-v-8da16b66]{width:100%;margin:0 auto;overflow:hidden}.create-dataset__footer[data-v-8da16b66]{padding:16px 24px;position:fixed;bottom:0;left:0;background:#ffffff;width:100%;box-sizing:border-box}.create-dataset .upload-document[data-v-8da16b66]{width:70%;margin:0 auto 20px}

File diff suppressed because one or more lines are too long

@ -0,0 +1 @@
var l={"+":["conjugate","add"],"":["negate","subtract"],"×":["signOf","multiply"],"÷":["reciprocal","divide"],"⌈":["ceiling","greaterOf"],"⌊":["floor","lesserOf"],"":["absolute","residue"],"":["indexGenerate","indexOf"],"?":["roll","deal"],"⋆":["exponentiate","toThePowerOf"],"⍟":["naturalLog","logToTheBase"],"○":["piTimes","circularFuncs"],"!":["factorial","binomial"],"⌹":["matrixInverse","matrixDivide"],"<":[null,"lessThan"],"≤":[null,"lessThanOrEqual"],"=":[null,"equals"],">":[null,"greaterThan"],"≥":[null,"greaterThanOrEqual"],"≠":[null,"notEqual"],"≡":["depth","match"],"≢":[null,"notMatch"],"∈":["enlist","membership"],"⍷":[null,"find"],"":["unique","union"],"∩":[null,"intersection"],"":["not","without"],"":[null,"or"],"∧":[null,"and"],"⍱":[null,"nor"],"⍲":[null,"nand"],"":["shapeOf","reshape"],",":["ravel","catenate"],"⍪":[null,"firstAxisCatenate"],"⌽":["reverse","rotate"],"⊖":["axis1Reverse","axis1Rotate"],"⍉":["transpose",null],"↑":["first","take"],"↓":[null,"drop"],"⊂":["enclose","partitionWithAxis"],"⊃":["diclose","pick"],"⌷":[null,"index"],"⍋":["gradeUp",null],"⍒":["gradeDown",null],"":["encode",null],"⊥":["decode",null],"⍕":["format","formatByExample"],"⍎":["execute",null],"⊣":["stop","left"],"⊢":["pass","right"]},t=/[\.\/⌿⍀¨⍣]/,a=/⍬/,i=/[\+−×÷⌈⌊∣⍳\?⋆⍟○!⌹<≤=>≥≠≡≢∈⍷∪∩∼∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢]/,u=/←/,o=/[⍝#].*$/,s=function(r){var n;return n=!1,function(e){return n=e,e===r?n==="\\":!0}};const f={name:"apl",startState:function(){return{prev:!1,func:!1,op:!1,string:!1,escape:!1}},token:function(r,n){var e;return r.eatSpace()?null:(e=r.next(),e==='"'||e==="'"?(r.eatWhile(s(e)),r.next(),n.prev=!0,"string"):/[\[{\(]/.test(e)?(n.prev=!1,null):/[\]}\)]/.test(e)?(n.prev=!0,null):a.test(e)?(n.prev=!1,"atom"):/[¯\d]/.test(e)?(n.func?(n.func=!1,n.prev=!1):n.prev=!0,r.eatWhile(/[\w\.]/),"number"):t.test(e)||u.test(e)?"operator":i.test(e)?(n.func=!0,n.prev=!1,l[e]?"variableName.function.standard":"variableName.function"):o.test(e)?(r.skipToEnd(),"comment"):e==="∘"&&r.peek()==="."?(r.next(),"variableName.function"):(r.eatWhile(/[\w\$_]/),n.prev=!0,"keyword"))}};export{f as apl};

@ -0,0 +1 @@
import{w as ln,c as S}from"./path-53f90ab3.js";import{ar as an,as as G,at as q,au as rn,av as y,Y as tn,aw as K,ax as _,ay as un,az as o,aA as sn,aB as on,aC as fn}from"./index-6b91f299.js";function cn(l){return l.innerRadius}function yn(l){return l.outerRadius}function gn(l){return l.startAngle}function mn(l){return l.endAngle}function pn(l){return l&&l.padAngle}function xn(l,h,C,z,v,A,Y,a){var D=C-l,i=z-h,n=Y-v,m=a-A,r=m*D-n*i;if(!(r*r<y))return r=(n*(h-A)-m*(l-v))/r,[l+r*D,h+r*i]}function V(l,h,C,z,v,A,Y){var a=l-C,D=h-z,i=(Y?A:-A)/K(a*a+D*D),n=i*D,m=-i*a,r=l+n,s=h+m,f=C+n,c=z+m,j=(r+f)/2,t=(s+c)/2,p=f-r,g=c-s,R=p*p+g*g,T=v-A,w=r*c-f*s,E=(g<0?-1:1)*K(fn(0,T*T*R-w*w)),I=(w*g-p*E)/R,O=(-w*p-g*E)/R,P=(w*g+p*E)/R,x=(-w*p+g*E)/R,d=I-j,e=O-t,u=P-j,F=x-t;return d*d+e*e>u*u+F*F&&(I=P,O=x),{cx:I,cy:O,x01:-n,y01:-m,x11:I*(v/T-1),y11:O*(v/T-1)}}function vn(){var l=cn,h=yn,C=S(0),z=null,v=gn,A=mn,Y=pn,a=null,D=ln(i);function i(){var n,m,r=+l.apply(this,arguments),s=+h.apply(this,arguments),f=v.apply(this,arguments)-rn,c=A.apply(this,arguments)-rn,j=un(c-f),t=c>f;if(a||(a=n=D()),s<r&&(m=s,s=r,r=m),!(s>y))a.moveTo(0,0);else if(j>tn-y)a.moveTo(s*G(f),s*q(f)),a.arc(0,0,s,f,c,!t),r>y&&(a.moveTo(r*G(c),r*q(c)),a.arc(0,0,r,c,f,t));else{var p=f,g=c,R=f,T=c,w=j,E=j,I=Y.apply(this,arguments)/2,O=I>y&&(z?+z.apply(this,arguments):K(r*r+s*s)),P=_(un(s-r)/2,+C.apply(this,arguments)),x=P,d=P,e,u;if(O>y){var F=sn(O/r*q(I)),L=sn(O/s*q(I));(w-=F*2)>y?(F*=t?1:-1,R+=F,T-=F):(w=0,R=T=(f+c)/2),(E-=L*2)>y?(L*=t?1:-1,p+=L,g-=L):(E=0,p=g=(f+c)/2)}var H=s*G(p),J=s*q(p),M=r*G(T),N=r*q(T);if(P>y){var Q=s*G(g),U=s*q(g),W=r*G(R),X=r*q(R),B;if(j<an)if(B=xn(H,J,W,X,Q,U,M,N)){var Z=H-B[0],$=J-B[1],k=Q-B[0],b=U-B[1],nn=1/q(on((Z*k+$*b)/(K(Z*Z+$*$)*K(k*k+b*b)))/2),en=K(B[0]*B[0]+B[1]*B[1]);x=_(P,(r-en)/(nn-1)),d=_(P,(s-en)/(nn+1))}else x=d=0}E>y?d>y?(e=V(W,X,H,J,s,d,t),u=V(Q,U,M,N,s,d,t),a.moveTo(e.cx+e.x01,e.cy+e.y01),d<P?a.arc(e.cx,e.cy,d,o(e.y01,e.x01),o(u.y01,u.x01),!t):(a.arc(e.cx,e.cy,d,o(e.y01,e.x01),o(e.y11,e.x11),!t),a.arc(0,0,s,o(e.cy+e.y11,e.cx+e.x11),o(u.cy+u.y11,u.cx+u.x11),!t),a.arc(u.cx,u.cy,d,o(u.y11,u.x11),o(u.y01,u.x01),!t))):(a.moveTo(H,J),a.arc(0,0,s,p,g,!t)):a.moveTo(H,J),!(r>y)||!(w>y)?a.lineTo(M,N):x>y?(e=V(M,N,Q,U,r,-x,t),u=V(H,J,W,X,r,-x,t),a.lineTo(e.cx+e.x01,e.cy+e.y01),x<P?a.arc(e.cx,e.cy,x,o(e.y01,e.x01),o(u.y01,u.x01),!t):(a.arc(e.cx,e.cy,x,o(e.y01,e.x01),o(e.y11,e.x11),!t),a.arc(0,0,r,o(e.cy+e.y11,e.cx+e.x11),o(u.cy+u.y11,u.cx+u.x11),t),a.arc(u.cx,u.cy,x,o(u.y11,u.x11),o(u.y01,u.x01),!t))):a.arc(0,0,r,T,R,t)}if(a.closePath(),n)return a=null,n+""||null}return i.centroid=function(){var n=(+l.apply(this,arguments)+ +h.apply(this,arguments))/2,m=(+v.apply(this,arguments)+ +A.apply(this,arguments))/2-an/2;return[G(m)*n,q(m)*n]},i.innerRadius=function(n){return arguments.length?(l=typeof n=="function"?n:S(+n),i):l},i.outerRadius=function(n){return arguments.length?(h=typeof n=="function"?n:S(+n),i):h},i.cornerRadius=function(n){return arguments.length?(C=typeof n=="function"?n:S(+n),i):C},i.padRadius=function(n){return arguments.length?(z=n==null?null:typeof n=="function"?n:S(+n),i):z},i.startAngle=function(n){return arguments.length?(v=typeof n=="function"?n:S(+n),i):v},i.endAngle=function(n){return arguments.length?(A=typeof n=="function"?n:S(+n),i):A},i.padAngle=function(n){return arguments.length?(Y=typeof n=="function"?n:S(+n),i):Y},i.context=function(n){return arguments.length?(a=n??null,i):a},i}export{vn as a};

@ -0,0 +1 @@
function t(r){return typeof r=="object"&&"length"in r?r:Array.from(r)}export{t as a};

@ -0,0 +1 @@
function t(e){var r=e.match(/^\s*\S/);return e.skipToEnd(),r?"error":null}const i={name:"asciiarmor",token:function(e,r){var n;if(r.state=="top")return e.sol()&&(n=e.match(/^-----BEGIN (.*)?-----\s*$/))?(r.state="headers",r.type=n[1],"tag"):t(e);if(r.state=="headers"){if(e.sol()&&e.match(/^\w+:/))return r.state="header","atom";var o=t(e);return o&&(r.state="body"),o}else{if(r.state=="header")return e.skipToEnd(),r.state="headers","string";if(r.state=="body")return e.sol()&&(n=e.match(/^-----END (.*)?-----\s*$/))?n[1]!=r.type?"error":(r.state="end","tag"):e.eatWhile(/[A-Za-z0-9+\/=]/)?null:(e.next(),"error");if(r.state=="end")return t(e)}},blankLine:function(e){e.state=="headers"&&(e.state="body")},startState:function(){return{state:"top",type:null}}};export{i as asciiArmor};

@ -0,0 +1 @@
function u(i){for(var s={},c=i.split(" "),T=0;T<c.length;++T)s[c[T]]=!0;return s}const o={keywords:u("DEFINITIONS OBJECTS IF DERIVED INFORMATION ACTION REPLY ANY NAMED CHARACTERIZED BEHAVIOUR REGISTERED WITH AS IDENTIFIED CONSTRAINED BY PRESENT BEGIN IMPORTS FROM UNITS SYNTAX MIN-ACCESS MAX-ACCESS MINACCESS MAXACCESS REVISION STATUS DESCRIPTION SEQUENCE SET COMPONENTS OF CHOICE DistinguishedName ENUMERATED SIZE MODULE END INDEX AUGMENTS EXTENSIBILITY IMPLIED EXPORTS"),cmipVerbs:u("ACTIONS ADD GET NOTIFICATIONS REPLACE REMOVE"),compareTypes:u("OPTIONAL DEFAULT MANAGED MODULE-TYPE MODULE_IDENTITY MODULE-COMPLIANCE OBJECT-TYPE OBJECT-IDENTITY OBJECT-COMPLIANCE MODE CONFIRMED CONDITIONAL SUBORDINATE SUPERIOR CLASS TRUE FALSE NULL TEXTUAL-CONVENTION"),status:u("current deprecated mandatory obsolete"),tags:u("APPLICATION AUTOMATIC EXPLICIT IMPLICIT PRIVATE TAGS UNIVERSAL"),storage:u("BOOLEAN INTEGER OBJECT IDENTIFIER BIT OCTET STRING UTCTime InterfaceIndex IANAifType CMIP-Attribute REAL PACKAGE PACKAGES IpAddress PhysAddress NetworkAddress BITS BMPString TimeStamp TimeTicks TruthValue RowStatus DisplayString GeneralString GraphicString IA5String NumericString PrintableString SnmpAdminString TeletexString UTF8String VideotexString VisibleString StringStore ISO646String T61String UniversalString Unsigned32 Integer32 Gauge Gauge32 Counter Counter32 Counter64"),modifier:u("ATTRIBUTE ATTRIBUTES MANDATORY-GROUP MANDATORY-GROUPS GROUP GROUPS ELEMENTS EQUALITY ORDERING SUBSTRINGS DEFINED"),accessTypes:u("not-accessible accessible-for-notify read-only read-create read-write"),multiLineStrings:!0};function L(i){var s=i.keywords||o.keywords,c=i.cmipVerbs||o.cmipVerbs,T=i.compareTypes||o.compareTypes,N=i.status||o.status,d=i.tags||o.tags,f=i.storage||o.storage,m=i.modifier||o.modifier,C=i.accessTypes||o.accessTypes,R=i.multiLineStrings||o.multiLineStrings,y=i.indentStatements!==!1,A=/[\|\^]/,E;function D(e,n){var t=e.next();if(t=='"'||t=="'")return n.tokenize=g(t),n.tokenize(e,n);if(/[\[\]\(\){}:=,;]/.test(t))return E=t,"punctuation";if(t=="-"&&e.eat("-"))return e.skipToEnd(),"comment";if(/\d/.test(t))return e.eatWhile(/[\w\.]/),"number";if(A.test(t))return e.eatWhile(A),"operator";e.eatWhile(/[\w\-]/);var r=e.current();return s.propertyIsEnumerable(r)?"keyword":c.propertyIsEnumerable(r)?"variableName":T.propertyIsEnumerable(r)?"atom":N.propertyIsEnumerable(r)?"comment":d.propertyIsEnumerable(r)?"typeName":f.propertyIsEnumerable(r)||m.propertyIsEnumerable(r)||C.propertyIsEnumerable(r)?"modifier":"variableName"}function g(e){return function(n,t){for(var r=!1,l,O=!1;(l=n.next())!=null;){if(l==e&&!r){var I=n.peek();I&&(I=I.toLowerCase(),(I=="b"||I=="h"||I=="o")&&n.next()),O=!0;break}r=!r&&l=="\\"}return(O||!(r||R))&&(t.tokenize=null),"string"}}function p(e,n,t,r,l){this.indented=e,this.column=n,this.type=t,this.align=r,this.prev=l}function a(e,n,t){var r=e.indented;return e.context&&e.context.type=="statement"&&(r=e.context.indented),e.context=new p(r,n,t,null,e.context)}function S(e){var n=e.context.type;return(n==")"||n=="]"||n=="}")&&(e.indented=e.context.indented),e.context=e.context.prev}return{name:"asn1",startState:function(){return{tokenize:null,context:new p(-2,0,"top",!1),indented:0,startOfLine:!0}},token:function(e,n){var t=n.context;if(e.sol()&&(t.align==null&&(t.align=!1),n.indented=e.indentation(),n.startOfLine=!0),e.eatSpace())return null;E=null;var r=(n.tokenize||D)(e,n);if(r=="comment")return r;if(t.align==null&&(t.align=!0),(E==";"||E==":"||E==",")&&t.type=="statement")S(n);else if(E=="{")a(n,e.column(),"}");else if(E=="[")a(n,e.column(),"]");else if(E=="(")a(n,e.column(),")");else if(E=="}"){for(;t.type=="statement";)t=S(n);for(t.type=="}"&&(t=S(n));t.type=="statement";)t=S(n)}else E==t.type?S(n):y&&((t.type=="}"||t.type=="top")&&E!=";"||t.type=="statement"&&E=="newstatement")&&a(n,e.column(),"statement");return n.startOfLine=!1,r},languageData:{indentOnInput:/^\s*[{}]$/,commentTokens:{line:"--"}}}}export{L as asn1};

@ -0,0 +1 @@
var t=["exten","same","include","ignorepat","switch"],o=["#include","#exec"],c=["addqueuemember","adsiprog","aelsub","agentlogin","agentmonitoroutgoing","agi","alarmreceiver","amd","answer","authenticate","background","backgrounddetect","bridge","busy","callcompletioncancel","callcompletionrequest","celgenuserevent","changemonitor","chanisavail","channelredirect","chanspy","clearhash","confbridge","congestion","continuewhile","controlplayback","dahdiacceptr2call","dahdibarge","dahdiras","dahdiscan","dahdisendcallreroutingfacility","dahdisendkeypadfacility","datetime","dbdel","dbdeltree","deadagi","dial","dictate","directory","disa","dumpchan","eagi","echo","endwhile","exec","execif","execiftime","exitwhile","extenspy","externalivr","festival","flash","followme","forkcdr","getcpeid","gosub","gosubif","goto","gotoif","gotoiftime","hangup","iax2provision","ices","importvar","incomplete","ivrdemo","jabberjoin","jabberleave","jabbersend","jabbersendgroup","jabberstatus","jack","log","macro","macroexclusive","macroexit","macroif","mailboxexists","meetme","meetmeadmin","meetmechanneladmin","meetmecount","milliwatt","minivmaccmess","minivmdelete","minivmgreet","minivmmwi","minivmnotify","minivmrecord","mixmonitor","monitor","morsecode","mp3player","mset","musiconhold","nbscat","nocdr","noop","odbc","odbc","odbcfinish","originate","ospauth","ospfinish","osplookup","ospnext","page","park","parkandannounce","parkedcall","pausemonitor","pausequeuemember","pickup","pickupchan","playback","playtones","privacymanager","proceeding","progress","queue","queuelog","raiseexception","read","readexten","readfile","receivefax","receivefax","receivefax","record","removequeuemember","resetcdr","retrydial","return","ringing","sayalpha","saycountedadj","saycountednoun","saycountpl","saydigits","saynumber","sayphonetic","sayunixtime","senddtmf","sendfax","sendfax","sendfax","sendimage","sendtext","sendurl","set","setamaflags","setcallerpres","setmusiconhold","sipaddheader","sipdtmfmode","sipremoveheader","skel","slastation","slatrunk","sms","softhangup","speechactivategrammar","speechbackground","speechcreate","speechdeactivategrammar","speechdestroy","speechloadgrammar","speechprocessingsound","speechstart","speechunloadgrammar","stackpop","startmusiconhold","stopmixmonitor","stopmonitor","stopmusiconhold","stopplaytones","system","testclient","testserver","transfer","tryexec","trysystem","unpausemonitor","unpausequeuemember","userevent","verbose","vmauthenticate","vmsayname","voicemail","voicemailmain","wait","waitexten","waitfornoise","waitforring","waitforsilence","waitmusiconhold","waituntil","while","zapateller"];function l(e,n){var i="",a=e.next();if(n.blockComment)return a=="-"&&e.match("-;",!0)?n.blockComment=!1:e.skipTo("--;")?(e.next(),e.next(),e.next(),n.blockComment=!1):e.skipToEnd(),"comment";if(a==";")return e.match("--",!0)&&!e.match("-",!1)?(n.blockComment=!0,"comment"):(e.skipToEnd(),"comment");if(a=="[")return e.skipTo("]"),e.eat("]"),"header";if(a=='"')return e.skipTo('"'),"string";if(a=="'")return e.skipTo("'"),"string.special";if(a=="#"&&(e.eatWhile(/\w/),i=e.current(),o.indexOf(i)!==-1))return e.skipToEnd(),"strong";if(a=="$"){var r=e.peek();if(r=="{")return e.skipTo("}"),e.eat("}"),"variableName.special"}if(e.eatWhile(/\w/),i=e.current(),t.indexOf(i)!==-1){switch(n.extenStart=!0,i){case"same":n.extenSame=!0;break;case"include":case"switch":case"ignorepat":n.extenInclude=!0;break}return"atom"}}const s={name:"asterisk",startState:function(){return{blockComment:!1,extenStart:!1,extenSame:!1,extenInclude:!1,extenExten:!1,extenPriority:!1,extenApplication:!1}},token:function(e,n){var i="";if(e.eatSpace())return null;if(n.extenStart)return e.eatWhile(/[^\s]/),i=e.current(),/^=>?$/.test(i)?(n.extenExten=!0,n.extenStart=!1,"strong"):(n.extenStart=!1,e.skipToEnd(),"error");if(n.extenExten)return n.extenExten=!1,n.extenPriority=!0,e.eatWhile(/[^,]/),n.extenInclude&&(e.skipToEnd(),n.extenPriority=!1,n.extenInclude=!1),n.extenSame&&(n.extenPriority=!1,n.extenSame=!1,n.extenApplication=!0),"tag";if(n.extenPriority)return n.extenPriority=!1,n.extenApplication=!0,e.next(),n.extenSame?null:(e.eatWhile(/[^,]/),"number");if(n.extenApplication){if(e.eatWhile(/,/),i=e.current(),i===",")return null;if(e.eatWhile(/\w/),i=e.current().toLowerCase(),n.extenApplication=!1,c.indexOf(i)!==-1)return"def"}else return l(e,n);return null},languageData:{commentTokens:{line:";",block:{open:";--",close:"--;"}}}};export{s as asterisk};

File diff suppressed because one or more lines are too long

@ -0,0 +1 @@
var f="><+-.,[]".split("");const r={name:"brainfuck",startState:function(){return{commentLine:!1,left:0,right:0,commentLoop:!1}},token:function(i,n){if(i.eatSpace())return null;i.sol()&&(n.commentLine=!1);var e=i.next().toString();if(f.indexOf(e)!==-1){if(n.commentLine===!0)return i.eol()&&(n.commentLine=!1),"comment";if(e==="]"||e==="[")return e==="["?n.left++:n.right++,"bracket";if(e==="+"||e==="-")return"keyword";if(e==="<"||e===">")return"atom";if(e==="."||e===",")return"def"}else return n.commentLine=!0,i.eol()&&(n.commentLine=!1),"comment";i.eol()&&(n.commentLine=!1)}};export{r as brainfuck};

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save