sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
1Panel-dev/MaxKB:apps/models_provider/impl/regolo_model_provider/credential/llm.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎
@file: llm.py
@date:2024/7/11 18:32
@desc:
"""
from typing import Dict
from django.utils.translation import gettext_lazy as _, gettext
from langchain_core.messages import HumanMessage
from common import forms
from common.exception.app_exception import AppApiException
from common.forms import BaseForm, TooltipLabel
from models_provider.base_model_provider import BaseModelCredential, ValidCode
from common.utils.logger import maxkb_logger
class RegoloLLMModelParams(BaseForm):
temperature = forms.SliderField(TooltipLabel(_('Temperature'),
_('Higher values make the output more random, while lower values make it more focused and deterministic')),
required=True, default_value=0.7,
_min=0.1,
_max=1.0,
_step=0.01,
precision=2)
max_tokens = forms.SliderField(
TooltipLabel(_('Output the maximum Tokens'),
_('Specify the maximum number of tokens that the model can generate')),
required=True, default_value=800,
_min=1,
_max=100000,
_step=1,
precision=0)
class RegoloLLMModelCredential(BaseForm, BaseModelCredential):
def is_valid(self, model_type: str, model_name, model_credential: Dict[str, object], model_params, provider,
raise_exception=False):
model_type_list = provider.get_model_type_list()
if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))):
raise AppApiException(ValidCode.valid_error.value,
gettext('{model_type} Model type is not supported').format(model_type=model_type))
for key in ['api_key', 'api_base']:
if key not in model_credential:
if raise_exception:
raise AppApiException(ValidCode.valid_error.value, gettext('{key} is required').format(key=key))
else:
return False
try:
model = provider.get_model(model_type, model_name, model_credential, **model_params)
model.invoke([HumanMessage(content=gettext('Hello'))])
except Exception as e:
maxkb_logger.error(f'Exception: {e}', exc_info=True)
if isinstance(e, AppApiException):
raise e
if raise_exception:
raise AppApiException(ValidCode.valid_error.value,
gettext(
'Verification failed, please check whether the parameters are correct: {error}').format(
error=str(e)))
else:
return False
return True
def encryption_dict(self, model: Dict[str, object]):
return {**model, 'api_key': super().encryption(model.get('api_key', ''))}
api_base = forms.TextInputField('API URL', required=True, default_value='https://api.regolo.ai/v1')
api_key = forms.PasswordInputField('API Key', required=True)
def get_model_params_setting_form(self, model_name):
return RegoloLLMModelParams()
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/models_provider/impl/regolo_model_provider/credential/llm.py",
"license": "GNU General Public License v3.0",
"lines": 66,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/models_provider/impl/regolo_model_provider/credential/tti.py | # coding=utf-8
from typing import Dict
from django.utils.translation import gettext_lazy as _, gettext
from common import forms
from common.exception.app_exception import AppApiException
from common.forms import BaseForm, TooltipLabel
from models_provider.base_model_provider import BaseModelCredential, ValidCode
from common.utils.logger import maxkb_logger
class RegoloTTIModelParams(BaseForm):
size = forms.SingleSelect(
TooltipLabel(_('Image size'),
_('The image generation endpoint allows you to create raw images based on text prompts. ')),
required=True,
default_value='1024x1024',
option_list=[
{'value': '1024x1024', 'label': '1024x1024'},
{'value': '1024x1792', 'label': '1024x1792'},
{'value': '1792x1024', 'label': '1792x1024'},
],
text_field='label',
value_field='value'
)
quality = forms.SingleSelect(
TooltipLabel(_('Picture quality'), _('''
By default, images are produced in standard quality.
''')),
required=True,
default_value='standard',
option_list=[
{'value': 'standard', 'label': 'standard'},
{'value': 'hd', 'label': 'hd'},
],
text_field='label',
value_field='value'
)
n = forms.SliderField(
TooltipLabel(_('Number of pictures'),
_('1 as default')),
required=True, default_value=1,
_min=1,
_max=10,
_step=1,
precision=0)
class RegoloTextToImageModelCredential(BaseForm, BaseModelCredential):
api_base = forms.TextInputField('API URL', required=True, default_value='https://api.regolo.ai/v1')
api_key = forms.PasswordInputField('API Key', required=True)
def is_valid(self, model_type: str, model_name, model_credential: Dict[str, object], model_params, provider,
raise_exception=False):
model_type_list = provider.get_model_type_list()
if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))):
raise AppApiException(ValidCode.valid_error.value,
gettext('{model_type} Model type is not supported').format(model_type=model_type))
for key in ['api_key', 'api_base']:
if key not in model_credential:
if raise_exception:
raise AppApiException(ValidCode.valid_error.value, gettext('{key} is required').format(key=key))
else:
return False
try:
model = provider.get_model(model_type, model_name, model_credential, **model_params)
res = model.check_auth()
except Exception as e:
maxkb_logger.error(f'Exception: {e}', exc_info=True)
if isinstance(e, AppApiException):
raise e
if raise_exception:
raise AppApiException(ValidCode.valid_error.value,
gettext(
'Verification failed, please check whether the parameters are correct: {error}').format(
error=str(e)))
else:
return False
return True
def encryption_dict(self, model: Dict[str, object]):
return {**model, 'api_key': super().encryption(model.get('api_key', ''))}
def get_model_params_setting_form(self, model_name):
return RegoloTTIModelParams()
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/models_provider/impl/regolo_model_provider/credential/tti.py",
"license": "GNU General Public License v3.0",
"lines": 77,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/models_provider/impl/regolo_model_provider/model/embedding.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎
@file: embedding.py
@date:2024/7/12 17:44
@desc:
"""
from typing import Dict
from langchain_community.embeddings import OpenAIEmbeddings
from models_provider.base_model_provider import MaxKBBaseModel
class RegoloEmbeddingModel(MaxKBBaseModel, OpenAIEmbeddings):
@staticmethod
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
return RegoloEmbeddingModel(
api_key=model_credential.get('api_key'),
model=model_name,
openai_api_base=model_credential.get('api_base') or "https://api.regolo.ai/v1",
)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/models_provider/impl/regolo_model_provider/model/embedding.py",
"license": "GNU General Public License v3.0",
"lines": 19,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/models_provider/impl/regolo_model_provider/model/image.py | from typing import Dict
from models_provider.base_model_provider import MaxKBBaseModel
from models_provider.impl.base_chat_open_ai import BaseChatOpenAI
class RegoloImage(MaxKBBaseModel, BaseChatOpenAI):
@staticmethod
def is_cache_model():
return False
@staticmethod
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
optional_params = MaxKBBaseModel.filter_optional_params(model_kwargs)
return RegoloImage(
model_name=model_name,
openai_api_base=model_credential.get('api_base') or "https://api.regolo.ai/v1",
openai_api_key=model_credential.get('api_key'),
streaming=True,
stream_usage=True,
extra_body=optional_params
)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/models_provider/impl/regolo_model_provider/model/image.py",
"license": "GNU General Public License v3.0",
"lines": 18,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/models_provider/impl/regolo_model_provider/model/llm.py | # coding=utf-8
"""
@project: maxkb
@Author:虎
@file: llm.py
@date:2024/4/18 15:28
@desc:
"""
from typing import List, Dict
from langchain_core.messages import BaseMessage, get_buffer_string
from langchain_openai.chat_models import ChatOpenAI
from common.config.tokenizer_manage_config import TokenizerManage
from models_provider.base_model_provider import MaxKBBaseModel
from models_provider.impl.base_chat_open_ai import BaseChatOpenAI
def custom_get_token_ids(text: str):
tokenizer = TokenizerManage.get_tokenizer()
return tokenizer.encode(text)
class RegoloChatModel(MaxKBBaseModel, BaseChatOpenAI):
@staticmethod
def is_cache_model():
return False
@staticmethod
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
optional_params = MaxKBBaseModel.filter_optional_params(model_kwargs)
return RegoloChatModel(
model=model_name,
openai_api_base=model_credential.get('api_base') or "https://api.regolo.ai/v1",
openai_api_key=model_credential.get('api_key'),
extra_body=optional_params
)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/models_provider/impl/regolo_model_provider/model/llm.py",
"license": "GNU General Public License v3.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/models_provider/impl/regolo_model_provider/model/tti.py | from typing import Dict
from openai import OpenAI
from common.config.tokenizer_manage_config import TokenizerManage
from models_provider.base_model_provider import MaxKBBaseModel
from models_provider.impl.base_tti import BaseTextToImage
def custom_get_token_ids(text: str):
tokenizer = TokenizerManage.get_tokenizer()
return tokenizer.encode(text)
class RegoloTextToImage(MaxKBBaseModel, BaseTextToImage):
api_base: str
api_key: str
model: str
params: dict
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.api_key = kwargs.get('api_key')
self.api_base = kwargs.get('api_base')
self.model = kwargs.get('model')
self.params = kwargs.get('params')
@staticmethod
def is_cache_model():
return False
@staticmethod
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
optional_params = {'params': {'size': '1024x1024', 'quality': 'standard', 'n': 1}}
for key, value in model_kwargs.items():
if key not in ['model_id', 'use_local', 'streaming']:
optional_params['params'][key] = value
return RegoloTextToImage(
model=model_name,
api_base=model_credential.get('api_base') or "https://api.regolo.ai/v1",
api_key=model_credential.get('api_key'),
**optional_params,
)
def check_auth(self):
chat = OpenAI(api_key=self.api_key, base_url=self.api_base)
response_list = chat.models.with_raw_response.list()
# self.generate_image('生成一个小猫图片')
def generate_image(self, prompt: str, negative_prompt: str = None):
chat = OpenAI(api_key=self.api_key, base_url=self.api_base)
res = chat.images.generate(model=self.model, prompt=prompt, **self.params)
file_urls = []
try:
for content in res.data:
url = content.url
file_urls.append(url)
return file_urls
except Exception as e:
raise f"RegoloTextToImage generate_image error: {e}"
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/models_provider/impl/regolo_model_provider/model/tti.py",
"license": "GNU General Public License v3.0",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/models_provider/impl/regolo_model_provider/regolo_model_provider.py | # coding=utf-8
"""
@project: maxkb
@Author:虎
@file: openai_model_provider.py
@date:2024/3/28 16:26
@desc:
"""
import os
from django.utils.translation import gettext as _
from common.utils.common import get_file_content
from maxkb.conf import PROJECT_DIR
from models_provider.base_model_provider import ModelInfo, ModelTypeConst, ModelInfoManage, IModelProvider, \
ModelProvideInfo
from models_provider.impl.regolo_model_provider.credential.embedding import RegoloEmbeddingCredential
from models_provider.impl.regolo_model_provider.credential.llm import RegoloLLMModelCredential
from models_provider.impl.regolo_model_provider.credential.tti import RegoloTextToImageModelCredential
from models_provider.impl.regolo_model_provider.model.embedding import RegoloEmbeddingModel
from models_provider.impl.regolo_model_provider.model.llm import RegoloChatModel
from models_provider.impl.regolo_model_provider.model.tti import RegoloTextToImage
openai_llm_model_credential = RegoloLLMModelCredential()
openai_tti_model_credential = RegoloTextToImageModelCredential()
model_info_list = [
ModelInfo('Phi-4', '', ModelTypeConst.LLM,
openai_llm_model_credential, RegoloChatModel
),
ModelInfo('DeepSeek-R1-Distill-Qwen-32B', '', ModelTypeConst.LLM,
openai_llm_model_credential,
RegoloChatModel),
ModelInfo('maestrale-chat-v0.4-beta', '',
ModelTypeConst.LLM, openai_llm_model_credential,
RegoloChatModel),
ModelInfo('Llama-3.3-70B-Instruct',
'',
ModelTypeConst.LLM, openai_llm_model_credential,
RegoloChatModel),
ModelInfo('Llama-3.1-8B-Instruct',
'',
ModelTypeConst.LLM, openai_llm_model_credential,
RegoloChatModel),
ModelInfo('DeepSeek-Coder-6.7B-Instruct', '',
ModelTypeConst.LLM, openai_llm_model_credential,
RegoloChatModel)
]
open_ai_embedding_credential = RegoloEmbeddingCredential()
model_info_embedding_list = [
ModelInfo('gte-Qwen2', '',
ModelTypeConst.EMBEDDING, open_ai_embedding_credential,
RegoloEmbeddingModel),
]
model_info_tti_list = [
ModelInfo('FLUX.1-dev', '',
ModelTypeConst.TTI, openai_tti_model_credential,
RegoloTextToImage),
ModelInfo('sdxl-turbo', '',
ModelTypeConst.TTI, openai_tti_model_credential,
RegoloTextToImage),
]
model_info_manage = (
ModelInfoManage.builder()
.append_model_info_list(model_info_list)
.append_default_model_info(
ModelInfo('gpt-3.5-turbo', _('The latest gpt-3.5-turbo, updated with OpenAI adjustments'), ModelTypeConst.LLM,
openai_llm_model_credential, RegoloChatModel
))
.append_model_info_list(model_info_embedding_list)
.append_default_model_info(model_info_embedding_list[0])
.append_model_info_list(model_info_tti_list)
.append_default_model_info(model_info_tti_list[0])
.build()
)
class RegoloModelProvider(IModelProvider):
def get_model_info_manage(self):
return model_info_manage
def get_model_provide_info(self):
return ModelProvideInfo(provider='model_regolo_provider', name='Regolo', icon=get_file_content(
os.path.join(PROJECT_DIR, "apps", 'models_provider', 'impl', 'regolo_model_provider',
'icon',
'regolo_icon_svg')))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/models_provider/impl/regolo_model_provider/regolo_model_provider.py",
"license": "GNU General Public License v3.0",
"lines": 79,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/serializers/valid_serializers.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎
@file: valid_serializers.py
@date:2024/7/8 18:00
@desc:
"""
import re
from django.core import validators
from django.core.cache import cache
from django.db.models import QuerySet
from rest_framework import serializers
from application.models import Application
from common.constants.cache_version import Cache_Version
from common.exception.app_exception import AppApiException
from knowledge.models import Knowledge
from users.models import User
from django.utils.translation import gettext_lazy as _
model_message_dict = {
'dataset': {'model': Knowledge, 'count': 50,
'message': _(
'The community version supports up to 50 knowledge bases. If you need more knowledge bases, please contact us (https://fit2cloud.com/).')},
'application': {'model': Application, 'count': 5,
'message': _(
'The community version supports up to 5 applications. If you need more applications, please contact us (https://fit2cloud.com/).')},
'user': {'model': User, 'count': 2,
'message': _(
'The community version supports up to 2 users. If you need more users, please contact us (https://fit2cloud.com/).')}
}
class ValidSerializer(serializers.Serializer):
valid_type = serializers.CharField(required=True, label=_('type'), validators=[
validators.RegexValidator(regex=re.compile("^application|knowledge|user$"),
message="类型只支持:application|knowledge|user", code=500)
])
valid_count = serializers.IntegerField(required=True, label=_('check quantity'))
def valid(self, is_valid=True):
if is_valid:
self.is_valid(raise_exception=True)
model_value = model_message_dict.get(self.data.get('valid_type'))
license_is_valid = cache.get(Cache_Version.SYSTEM.get_key(key='license_is_valid'),
version=Cache_Version.SYSTEM.get_version())
is_license_valid = license_is_valid if license_is_valid is not None else False
if not is_license_valid:
if self.data.get('valid_count') != model_value.get('count'):
raise AppApiException(400, model_value.get('message'))
if QuerySet(
model_value.get('model')).count() >= model_value.get('count'):
raise AppApiException(400, model_value.get('message'))
return True
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/serializers/valid_serializers.py",
"license": "GNU General Public License v3.0",
"lines": 50,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/views/valid.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎
@file: valid.py
@date:2024/7/8 17:50
@desc:
"""
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from common.auth import TokenAuth
from django.utils.translation import gettext_lazy as _
from common.result import result
from system_manage.serializers.valid_serializers import ValidSerializer
class Valid(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_('Get verification results'),
summary=_('Get verification results'),
operation_id=_('Get verification results'), # type: ignore
tags=[_('Validation')] # type: ignore
)
def get(self, request: Request, valid_type: str, valid_count: int):
return result.success(ValidSerializer(data={'valid_type': valid_type, 'valid_count': valid_count}).valid())
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/views/valid.py",
"license": "GNU General Public License v3.0",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/chat/api/vote_api.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: vote_api.py
@date:2025/6/23 17:35
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from chat.serializers.chat_record import VoteRequest
from common.mixins.api_mixin import APIMixin
from common.result import DefaultResultSerializer
class VoteAPI(APIMixin):
@staticmethod
def get_request():
return VoteRequest
@staticmethod
def get_parameters():
return [OpenApiParameter(
name="chat_id",
description=_("Chat ID"),
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="chat_record_id",
description=_("Chat Record ID"),
type=OpenApiTypes.STR,
location='path',
required=True,
)
]
@staticmethod
def get_response():
return DefaultResultSerializer
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/api/vote_api.py",
"license": "GNU General Public License v3.0",
"lines": 38,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/chat/serializers/chat_record.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat_record.py
@date:2025/6/23 11:16
@desc:
"""
from typing import Dict
from django.db import transaction
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _, gettext
from rest_framework import serializers
from application.models import VoteChoices, ChatRecord, Chat, ApplicationAccessToken, VoteReasonChoices
from application.serializers.application_chat import ChatCountSerializer
from application.serializers.application_chat_record import ChatRecordSerializerModel, \
ApplicationChatRecordQuerySerializers
from common.db.search import page_search
from common.exception.app_exception import AppApiException
from common.utils.lock import RedisLock
class VoteRequest(serializers.Serializer):
vote_status = serializers.ChoiceField(choices=VoteChoices.choices,
label=_("Bidding Status"))
vote_reason = serializers.ChoiceField(choices=VoteReasonChoices.choices, label=_("Vote Reason"), required=False,
allow_null=True)
vote_other_content = serializers.CharField(required=False, allow_blank=True, label=_("Vote other content"))
class HistoryChatModel(serializers.ModelSerializer):
class Meta:
model = Chat
fields = ['id',
'application_id',
'abstract',
'create_time',
'update_time']
class VoteSerializer(serializers.Serializer):
chat_id = serializers.UUIDField(required=True, label=_("Conversation ID"))
chat_record_id = serializers.UUIDField(required=True,
label=_("Conversation record id"))
@transaction.atomic
def vote(self, instance: Dict, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
VoteRequest(data=instance).is_valid(raise_exception=True)
rlock = RedisLock()
if not rlock.try_lock(self.data.get('chat_record_id')):
raise AppApiException(500,
gettext(
"Voting on the current session minutes, please do not send repeated requests"))
try:
chat_record_details_model = QuerySet(ChatRecord).get(id=self.data.get('chat_record_id'),
chat_id=self.data.get('chat_id'))
if chat_record_details_model is None:
raise AppApiException(500, gettext("Non-existent conversation chat_record_id"))
vote_status = instance.get("vote_status")
# 未投票状态,可以进行投票
if chat_record_details_model.vote_status == VoteChoices.UN_VOTE:
# 投票时获取字段
vote_reason = instance.get("vote_reason")
vote_other_content = instance.get("vote_other_content") or ''
if vote_status == VoteChoices.STAR:
# 点赞
chat_record_details_model.vote_status = VoteChoices.STAR
chat_record_details_model.vote_reason = vote_reason
chat_record_details_model.vote_other_content = vote_other_content
if vote_status == VoteChoices.TRAMPLE:
# 点踩
chat_record_details_model.vote_status = VoteChoices.TRAMPLE
chat_record_details_model.vote_reason = vote_reason
chat_record_details_model.vote_other_content = vote_other_content
chat_record_details_model.save()
# 已投票状态
else:
if vote_status == VoteChoices.UN_VOTE:
# 取消点赞
chat_record_details_model.vote_status = VoteChoices.UN_VOTE
chat_record_details_model.vote_reason = None
chat_record_details_model.vote_other_content = ''
chat_record_details_model.save()
else:
raise AppApiException(500, gettext("Already voted, please cancel first and then vote again"))
finally:
rlock.un_lock(self.data.get('chat_record_id'))
ChatCountSerializer(data={'chat_id': self.data.get('chat_id')}).update_chat()
return True
class HistoricalConversationSerializer(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_('Application ID'))
chat_user_id = serializers.UUIDField(required=True, label=_('Chat User ID'))
def get_queryset(self):
chat_user_id = self.data.get('chat_user_id')
application_id = self.data.get("application_id")
return QuerySet(Chat).filter(application_id=application_id, chat_user_id=chat_user_id,
is_deleted=False).order_by('-update_time', 'id')
def list(self):
self.is_valid(raise_exception=True)
queryset = self.get_queryset()
return [HistoryChatModel(r).data for r in queryset]
def page(self, current_page, page_size):
self.is_valid(raise_exception=True)
return page_search(current_page, page_size, self.get_queryset(), lambda r: HistoryChatModel(r).data)
class EditAbstractSerializer(serializers.Serializer):
abstract = serializers.CharField(required=True, label=_('Abstract'))
class HistoricalConversationOperateSerializer(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_('Application ID'))
chat_user_id = serializers.UUIDField(required=True, label=_('Chat User ID'))
chat_id = serializers.UUIDField(required=True, label=_('Chat ID'))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
e = QuerySet(Chat).filter(id=self.data.get('chat_id'), application_id=self.data.get('application_id'),
chat_user_id=self.data.get('chat_user_id')).exists()
if not e:
raise AppApiException(500, _('Chat is not exist'))
def edit_abstract(self, instance, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
EditAbstractSerializer(data=instance).is_valid(raise_exception=True)
QuerySet(Chat).filter(id=self.data.get('chat_id'), application_id=self.data.get('application_id'),
chat_user_id=self.data.get('chat_user_id')).update(abstract=instance.get('abstract'))
return True
def logic_delete(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
QuerySet(Chat).filter(id=self.data.get('chat_id'), application_id=self.data.get('application_id'),
chat_user_id=self.data.get('chat_user_id')).update(is_deleted=True)
return True
class Clear(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_('Application ID'))
chat_user_id = serializers.UUIDField(required=True, label=_('Chat User ID'))
def batch_logic_delete(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
QuerySet(Chat).filter(application_id=self.data.get('application_id'),
chat_user_id=self.data.get('chat_user_id')).update(is_deleted=True)
return True
class HistoricalConversationRecordSerializer(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_('Application ID'))
chat_id = serializers.UUIDField(required=True, label=_('Chat ID'))
chat_user_id = serializers.UUIDField(required=True, label=_('Chat User ID'))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
chat_user_id = self.data.get('chat_user_id')
application_id = self.data.get("application_id")
chat_id = self.data.get('chat_id')
chat_exist = QuerySet(Chat).filter(application_id=application_id, chat_user_id=chat_user_id,
id=chat_id).exists()
if not chat_exist:
raise AppApiException(500, _('Non-existent chatID'))
def get_queryset(self):
chat_id = self.data.get('chat_id')
return QuerySet(ChatRecord).filter(chat_id=chat_id).order_by('-create_time')
def list(self):
self.is_valid(raise_exception=True)
queryset = self.get_queryset()
return [ChatRecordSerializerModel(r).data for r in queryset]
def page(self, current_page, page_size):
self.is_valid(raise_exception=True)
application_access_token = QuerySet(ApplicationAccessToken).filter(
application_id=self.data.get('application_id')).first()
show_source = False
show_exec = False
if application_access_token is not None:
show_exec = application_access_token.show_exec
show_source = application_access_token.show_source
return ApplicationChatRecordQuerySerializers(
data={'application_id': self.data.get('application_id'), 'chat_id': self.data.get('chat_id')}).page(
current_page, page_size, show_source=show_source, show_exec=show_exec)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/serializers/chat_record.py",
"license": "GNU General Public License v3.0",
"lines": 165,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/chat/views/chat_record.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat_record.py
@date:2025/6/23 10:42
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from application.serializers.application_chat_record import ChatRecordOperateSerializer
from chat.api.chat_api import HistoricalConversationAPI, PageHistoricalConversationAPI, \
PageHistoricalConversationRecordAPI, HistoricalConversationRecordAPI, HistoricalConversationOperateAPI
from chat.api.vote_api import VoteAPI
from chat.serializers.chat_record import VoteSerializer, HistoricalConversationSerializer, \
HistoricalConversationRecordSerializer, HistoricalConversationOperateSerializer
from common import result
from common.auth import ChatTokenAuth
class VoteView(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['PUT'],
description=_("Like, Dislike"),
summary=_("Like, Dislike"),
operation_id=_("Like, Dislike"), # type: ignore
parameters=VoteAPI.get_parameters(),
request=VoteAPI.get_request(),
responses=VoteAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def put(self, request: Request, chat_id: str, chat_record_id: str):
return result.success(VoteSerializer(
data={'chat_id': chat_id,
'chat_record_id': chat_record_id
}).vote(request.data))
class HistoricalConversationView(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get historical conversation"),
summary=_("Get historical conversation"),
operation_id=_("Get historical conversation"), # type: ignore
parameters=HistoricalConversationAPI.get_parameters(),
responses=HistoricalConversationAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def get(self, request: Request):
return result.success(HistoricalConversationSerializer(
data={
'application_id': request.auth.application_id,
'chat_user_id': request.auth.chat_user_id,
}).list())
class Operate(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['PUT'],
description=_("Modify conversation about"),
summary=_("Modify conversation about"),
operation_id=_("Modify conversation about"), # type: ignore
parameters=HistoricalConversationOperateAPI.get_parameters(),
request=HistoricalConversationOperateAPI.get_request(),
responses=HistoricalConversationOperateAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def put(self, request: Request, chat_id: str):
return result.success(HistoricalConversationOperateSerializer(
data={
'application_id': request.auth.application_id,
'chat_user_id': request.auth.chat_user_id,
'chat_id': chat_id,
}).edit_abstract(request.data)
)
@extend_schema(
methods=['DELETE'],
description=_("Delete history conversation"),
summary=_("Delete history conversation"),
operation_id=_("Delete history conversation"), # type: ignore
parameters=HistoricalConversationOperateAPI.get_parameters(),
responses=HistoricalConversationOperateAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def delete(self, request: Request, chat_id: str):
return result.success(HistoricalConversationOperateSerializer(
data={
'application_id': request.auth.application_id,
'chat_user_id': request.auth.chat_user_id,
'chat_id': chat_id,
}).logic_delete())
class BatchDelete(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['DELETE'],
description=_("Batch delete history conversation"),
summary=_("Batch delete history conversation"),
operation_id=_("Batch delete history conversation"), # type: ignore
parameters=HistoricalConversationOperateAPI.get_parameters(),
responses=HistoricalConversationOperateAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def delete(self, request: Request):
return result.success(HistoricalConversationOperateSerializer.Clear(data={
'application_id': request.auth.application_id,
'chat_user_id': request.auth.chat_user_id,
}).batch_logic_delete())
class PageView(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get historical conversation by page"),
summary=_("Get historical conversation by page"),
operation_id=_("Get historical conversation by page"), # type: ignore
parameters=PageHistoricalConversationAPI.get_parameters(),
responses=PageHistoricalConversationAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def get(self, request: Request, current_page: int, page_size: int):
return result.success(HistoricalConversationSerializer(
data={
'application_id': request.auth.application_id,
'chat_user_id': request.auth.chat_user_id,
}).page(current_page, page_size))
class HistoricalConversationRecordView(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get historical conversation records"),
summary=_("Get historical conversation records"),
operation_id=_("Get historical conversation records"), # type: ignore
parameters=HistoricalConversationRecordAPI.get_parameters(),
responses=HistoricalConversationRecordAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def get(self, request: Request, chat_id: str):
return result.success(HistoricalConversationRecordSerializer(
data={
'chat_id': chat_id,
'application_id': request.auth.application_id,
'chat_user_id': request.auth.chat_user_id,
}).list())
class PageView(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get historical conversation records by page "),
summary=_("Get historical conversation records by page"),
operation_id=_("Get historical conversation records by page"), # type: ignore
parameters=PageHistoricalConversationRecordAPI.get_parameters(),
responses=PageHistoricalConversationRecordAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def get(self, request: Request, chat_id: str, current_page: int, page_size: int):
return result.success(HistoricalConversationRecordSerializer(
data={
'chat_id': chat_id,
'application_id': request.auth.application_id,
'chat_user_id': request.auth.chat_user_id,
}).page(current_page, page_size))
class ChatRecordView(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get conversation details"),
summary=_("Get conversation details"),
operation_id=_("Get conversation details"), # type: ignore
parameters=PageHistoricalConversationRecordAPI.get_parameters(),
responses=PageHistoricalConversationRecordAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def get(self, request: Request, chat_id: str, chat_record_id: str):
return result.success(ChatRecordOperateSerializer(
data={
'chat_id': chat_id,
'chat_record_id': chat_record_id,
'application_id': request.auth.application_id,
'chat_user_id': request.auth.chat_user_id,
}).one(False))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/views/chat_record.py",
"license": "GNU General Public License v3.0",
"lines": 178,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/api/application_chat.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_chat.py
@date:2025/6/10 13:54
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from application.serializers.application_chat import ApplicationChatQuerySerializers, \
ApplicationChatResponseSerializers, ApplicationChatRecordExportRequest
from common.mixins.api_mixin import APIMixin
from common.result import ResultSerializer, ResultPageSerializer
class ApplicationChatListResponseSerializers(ResultSerializer):
def get_data(self):
return ApplicationChatResponseSerializers(many=True)
class ApplicationChatPageResponseSerializers(ResultPageSerializer):
def get_data(self):
return ApplicationChatResponseSerializers(many=True)
class ApplicationChatQueryAPI(APIMixin):
@staticmethod
def get_request():
return ApplicationChatQuerySerializers
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="application_id",
description="application ID",
type=OpenApiTypes.STR,
location='path',
required=True,
), OpenApiParameter(
name="start_time",
description="start Time",
type=OpenApiTypes.STR,
required=True,
),
OpenApiParameter(
name="end_time",
description="end Time",
type=OpenApiTypes.STR,
required=True,
),
OpenApiParameter(
name="abstract",
description="summary",
type=OpenApiTypes.STR,
required=False,
),
OpenApiParameter(
name="username",
description="username",
type=OpenApiTypes.STR,
required=False,
),
OpenApiParameter(
name="min_star",
description=_("Minimum number of likes"),
type=OpenApiTypes.INT,
required=False,
),
OpenApiParameter(
name="min_trample",
description=_("Minimum number of clicks"),
type=OpenApiTypes.INT,
required=False,
),
OpenApiParameter(
name="comparer",
description=_("Comparator"),
type=OpenApiTypes.STR,
required=False,
),
]
@staticmethod
def get_response():
return ApplicationChatListResponseSerializers
class ApplicationChatQueryPageAPI(APIMixin):
@staticmethod
def get_request():
return ApplicationChatQueryAPI.get_request()
@staticmethod
def get_parameters():
return [
*ApplicationChatQueryAPI.get_parameters(),
OpenApiParameter(
name="current_page",
description=_("Current page"),
type=OpenApiTypes.INT,
location='path',
required=True,
),
OpenApiParameter(
name="page_size",
description=_("Page size"),
type=OpenApiTypes.INT,
location='path',
required=True,
),
]
@staticmethod
def get_response():
return ApplicationChatPageResponseSerializers
class ApplicationChatExportAPI(APIMixin):
@staticmethod
def get_request():
return ApplicationChatRecordExportRequest
@staticmethod
def get_parameters():
return ApplicationChatQueryAPI.get_parameters()
@staticmethod
def get_response():
return None
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/api/application_chat.py",
"license": "GNU General Public License v3.0",
"lines": 123,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/api/application_chat_record.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_chat_record.py
@date:2025/6/10 15:19
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from application.serializers.application_chat_record import ApplicationChatRecordAddKnowledgeSerializer, \
ApplicationChatRecordImproveInstanceSerializer
from common.mixins.api_mixin import APIMixin
class ApplicationChatRecordQueryAPI(APIMixin):
@staticmethod
def get_response():
pass
@staticmethod
def get_request():
pass
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="application_id",
description="Application ID",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="chat_id",
description=_("Chat ID"),
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="order_asc",
description=_("Is it in order"),
type=OpenApiTypes.BOOL,
required=True,
)
]
class ApplicationChatRecordPageQueryAPI(APIMixin):
@staticmethod
def get_response():
pass
@staticmethod
def get_request():
pass
@staticmethod
def get_parameters():
return [*ApplicationChatRecordQueryAPI.get_parameters(),
OpenApiParameter(
name="current_page",
description=_("Current page"),
type=OpenApiTypes.INT,
location='path',
required=True,
),
OpenApiParameter(
name="page_size",
description=_("Page size"),
type=OpenApiTypes.INT,
location='path',
required=True,
)]
class ApplicationChatRecordImproveParagraphAPI(APIMixin):
@staticmethod
def get_response():
pass
@staticmethod
def get_request():
return ApplicationChatRecordImproveInstanceSerializer
@staticmethod
def get_parameters():
return [OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="application_id",
description="Application ID",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="chat_id",
description=_("Chat ID"),
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="chat_record_id",
description=_("Chat Record ID"),
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="knowledge_id",
description=_("Knowledge ID"),
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="document_id",
description=_("Document ID"),
type=OpenApiTypes.STR,
location='path',
required=True,
)
]
class Operate(APIMixin):
@staticmethod
def get_parameters():
return [*ApplicationChatRecordImproveParagraphAPI.get_parameters(), OpenApiParameter(
name="paragraph_id",
description=_("Paragraph ID"),
type=OpenApiTypes.STR,
location='path',
required=True,
)]
class ApplicationChatRecordAddKnowledgeAPI(APIMixin):
@staticmethod
def get_request():
return ApplicationChatRecordAddKnowledgeSerializer
@staticmethod
def get_response():
return None
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="application_id",
description="Application ID",
type=OpenApiTypes.STR,
location='path',
required=True,
)]
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/api/application_chat_record.py",
"license": "GNU General Public License v3.0",
"lines": 162,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/serializers/application_chat.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_chat.py
@date:2025/6/10 11:06
@desc:
"""
import datetime
import os
import re
from io import BytesIO
from typing import Dict
import openpyxl
import pytz
from django.core import validators
from django.db import models
from django.db.models import QuerySet, Q
from django.http import StreamingHttpResponse
from django.utils import timezone
from django.utils.translation import gettext_lazy as _, gettext
from openpyxl.cell.cell import ILLEGAL_CHARACTERS_RE
from rest_framework import serializers
from application.models import Chat, Application, ChatRecord, ChatSourceChoices
from common.db.search import get_dynamics_model, native_search, native_page_search, native_page_handler
from common.exception.app_exception import AppApiException
from common.utils.common import get_file_content
from maxkb.conf import PROJECT_DIR
from maxkb.settings import TIME_ZONE, edition
class ApplicationChatResponseSerializers(serializers.Serializer):
id = serializers.UUIDField(required=True, label=_("chat id"))
abstract = serializers.CharField(required=True, label=_("summary"))
chat_user_id = serializers.UUIDField(required=True, label=_("Chat User ID"))
chat_user_type = serializers.CharField(required=True, label=_("Chat User Type"))
is_deleted = serializers.BooleanField(required=True, label=_("Is delete"))
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
chat_record_count = serializers.IntegerField(required=True, label=_("Number of conversations"))
star_num = serializers.IntegerField(required=True, label=_("Number of Likes"))
trample_num = serializers.IntegerField(required=True, label=_("Number of thumbs-downs"))
mark_sum = serializers.IntegerField(required=True, label=_("Number of tags"))
class ApplicationChatRecordExportRequest(serializers.Serializer):
select_ids = serializers.ListField(required=True, label=_("Chat ID List"),
child=serializers.UUIDField(required=True, label=_("Chat ID")))
class ApplicationChatQuerySerializers(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
abstract = serializers.CharField(required=False, allow_blank=True, allow_null=True, label=_("summary"))
username = serializers.CharField(required=False, allow_blank=True, allow_null=True, label=_("username"))
start_time = serializers.DateField(format='%Y-%m-%d', label=_("Start time"))
end_time = serializers.DateField(format='%Y-%m-%d', label=_("End time"))
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
min_star = serializers.IntegerField(required=False, min_value=0,
label=_("Minimum number of likes"))
min_trample = serializers.IntegerField(required=False, min_value=0,
label=_("Minimum number of clicks"))
comparer = serializers.CharField(required=False, label=_("Comparator"), validators=[
validators.RegexValidator(regex=re.compile("^and|or$"),
message=_("Only supports and|or"), code=500)
])
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
def get_end_time(self):
d = datetime.datetime.strptime(self.data.get('end_time'), '%Y-%m-%d').date()
naive = datetime.datetime.combine(d, datetime.time.max)
return timezone.make_aware(naive, timezone.get_default_timezone())
def get_start_time(self):
d = datetime.datetime.strptime(self.data.get('start_time'), '%Y-%m-%d').date()
naive = datetime.datetime.combine(d, datetime.time.min)
return timezone.make_aware(naive, timezone.get_default_timezone())
def get_query_set(self, select_ids=None):
end_time = self.get_end_time()
start_time = self.get_start_time()
query_set = QuerySet(model=get_dynamics_model(
{'application_chat.application_id': models.CharField(),
'application_chat.abstract': models.CharField(),
'application_chat.asker': models.JSONField(),
"star_num": models.IntegerField(),
'trample_num': models.IntegerField(),
'comparer': models.CharField(),
'application_chat.update_time': models.DateTimeField(),
'application_chat.id': models.UUIDField(),
'application_chat_record_temp.id': models.UUIDField()}))
base_query_dict = {'application_chat.application_id': self.data.get("application_id"),
'application_chat.update_time__gte': start_time,
'application_chat.update_time__lte': end_time,
}
if 'abstract' in self.data and self.data.get('abstract') is not None:
base_query_dict['application_chat.abstract__icontains'] = self.data.get('abstract')
if 'username' in self.data and self.data.get('username') is not None:
base_query_dict['application_chat.asker__username__icontains'] = self.data.get('username')
if select_ids is not None and len(select_ids) > 0:
base_query_dict['application_chat.id__in'] = select_ids
base_condition = Q(**base_query_dict)
min_star_query = None
min_trample_query = None
if 'min_star' in self.data and self.data.get('min_star') is not None:
min_star_query = Q(star_num__gte=self.data.get('min_star'))
if 'min_trample' in self.data and self.data.get('min_trample') is not None:
min_trample_query = Q(trample_num__gte=self.data.get('min_trample'))
if min_star_query is not None and min_trample_query is not None:
if self.data.get(
'comparer') is not None and self.data.get('comparer') == 'or':
condition = base_condition & (min_star_query | min_trample_query)
else:
condition = base_condition & (min_star_query & min_trample_query)
elif min_star_query is not None:
condition = base_condition & min_star_query
elif min_trample_query is not None:
condition = base_condition & min_trample_query
else:
condition = base_condition
return {
'default_queryset': query_set.filter(condition).order_by("-application_chat.update_time")
}
def list(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
return native_search(self.get_query_set(), select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'sql',
('list_application_chat_ee.sql' if ['PE', 'EE'].__contains__(
edition) else 'list_application_chat.sql'))),
with_table_name=False)
@staticmethod
def paragraph_list_to_string(paragraph_list):
return "\n**********\n".join(
[f"{paragraph.get('title')}:\n{paragraph.get('content')}" for paragraph in
paragraph_list] if paragraph_list is not None else '')
@staticmethod
def to_row(row: Dict):
details = row.get('details') or {}
padding_problem_text = ' '.join((node.get("answer", "") or "") for key, node in details.items() if
node.get("type") == 'question-node')
search_dataset_node_list = [(key, node) for key, node in details.items() if
node.get("type") == 'search-dataset-node' or node.get(
"step_type") == 'search_step' or node.get("type") == 'search-knowledge-node']
reference_paragraph_len = '\n'.join([str(len(node.get('paragraph_list',
[]))) if key == 'search_step' else node.get(
'name') + ':' + str(
len(node.get('paragraph_list', [])) if node.get('paragraph_list', []) is not None else '0') for
key, node in search_dataset_node_list])
reference_paragraph = '\n----------\n'.join(
[ApplicationChatQuerySerializers.paragraph_list_to_string(node.get('paragraph_list',
[])) if key == 'search_step' else node.get(
'name') + ':\n' + ApplicationChatQuerySerializers.paragraph_list_to_string(node.get('paragraph_list',
[])) for
key, node in search_dataset_node_list])
improve_paragraph_list = row.get('improve_paragraph_list') or []
vote_status_map = {'-1': '未投票', '0': '赞同', '1': '反对'}
vote_reason_map = {'accurate': gettext('accurate'), 'complete': gettext('complete'),
'inaccurate': gettext('inaccurate'), 'incomplete': gettext('incomplete'),
'other': gettext('Other'), }
return [str(row.get('chat_id')), row.get('abstract'), row.get('problem_text'), padding_problem_text,
row.get('answer_text'), vote_status_map.get(row.get('vote_status')),
vote_reason_map.get(row.get('vote_reason')),
row.get('vote_other_content'),
reference_paragraph_len,
reference_paragraph,
"\n".join([
f"{improve_paragraph_list[index].get('title')}\n{improve_paragraph_list[index].get('content')}"
for index in range(len(improve_paragraph_list))]),
row.get('asker').get('username'),
(row.get('message_tokens') or 0) + (row.get('answer_tokens') or 0),
row.get('ip_address') or '-',
get_source_display(row.get('source')),
row.get('run_time'),
str(row.get('create_time').astimezone(pytz.timezone(TIME_ZONE)).strftime('%Y-%m-%d %H:%M:%S')
if row.get('create_time') is not None else None)]
@staticmethod
def reset_value(value):
if isinstance(value, str):
value = re.sub(ILLEGAL_CHARACTERS_RE, '', value)
if isinstance(value, datetime.datetime):
eastern = pytz.timezone(TIME_ZONE)
c = datetime.timezone(eastern._utcoffset)
value = value.astimezone(c)
return value
def export(self, data, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
ApplicationChatRecordExportRequest(data=data).is_valid(raise_exception=True)
def stream_response():
workbook = openpyxl.Workbook(write_only=True)
worksheet = workbook.create_sheet(title='Sheet1')
current_page = 1
page_size = 500
headers = [gettext('Conversation ID'), gettext('summary'), gettext('User Questions'),
gettext('Problem after optimization'),
gettext('answer'), gettext('User feedback'), gettext('Feedback reason'),
gettext('Other reason content'),
gettext('Reference segment number'),
gettext('Section title + content'),
gettext('Annotation'), gettext('USER'), gettext('Consuming tokens'),
gettext('Ip Address'), gettext('source'),
gettext('Time consumed (s)'),
gettext('Question Time')]
worksheet.append(headers)
for data_list in native_page_handler(page_size, self.get_query_set(data.get('select_ids')),
primary_key='application_chat_record_temp.id',
primary_queryset='default_queryset',
get_primary_value=lambda item: item.get('id'),
select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'sql',
('export_application_chat_ee.sql' if ['PE',
'EE'].__contains__(
edition) else 'export_application_chat.sql'))),
with_table_name=False):
for item in data_list:
row = [self.reset_value(v) for v in self.to_row(item)]
worksheet.append(row)
current_page = current_page + 1
output = BytesIO()
workbook.save(output)
output.seek(0)
yield output.getvalue()
output.close()
workbook.close()
response = StreamingHttpResponse(stream_response(),
content_type='application/vnd.open.xmlformats-officedocument.spreadsheetml.sheet')
response['Content-Disposition'] = 'attachment; filename="data.xlsx"'
return response
def page(self, current_page: int, page_size: int, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
return native_page_search(current_page, page_size, self.get_query_set(), select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'sql',
('list_application_chat_ee.sql' if ['PE', 'EE'].__contains__(
edition) else 'list_application_chat.sql'))),
with_table_name=False)
class ChatCountSerializer(serializers.Serializer):
chat_id = serializers.UUIDField(required=True, label=_("Conversation ID"))
def get_query_set(self):
return QuerySet(ChatRecord).filter(chat_id=self.data.get('chat_id'))
def update_chat(self):
self.is_valid(raise_exception=True)
count_chat_record = native_search(self.get_query_set(), get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'sql', 'count_chat_record.sql')), with_search_one=True)
QuerySet(Chat).filter(id=self.data.get('chat_id')).update(star_num=count_chat_record.get('star_num', 0) or 0,
trample_num=count_chat_record.get('trample_num',
0) or 0,
chat_record_count=count_chat_record.get(
'chat_record_count', 0) or 0,
mark_sum=count_chat_record.get('mark_sum', 0) or 0)
return True
def get_source_display(source):
if not source or not isinstance(source, dict) or 'type' not in source:
return '-'
source_type = source.get('type')
# 定义映射关系
source_mapping = {
ChatSourceChoices.ONLINE.value: gettext('Online Usage'),
ChatSourceChoices.API_CALL.value: gettext('API Call'),
ChatSourceChoices.ENTERPRISE_WECHAT.value: gettext('Enterprise WeChat'),
ChatSourceChoices.WECHAT_PUBLIC_ACCOUNT.value: gettext('WeChat Public Account'),
ChatSourceChoices.LARK.value: gettext('Lark'),
ChatSourceChoices.DINGTALK.value: gettext('DingTalk'),
ChatSourceChoices.ENTERPRISE_WECHAT_ROBOT.value: gettext('Enterprise WeChat Robot'),
ChatSourceChoices.TRIGGER.value: gettext('Trigger'),
ChatSourceChoices.SLACK.value: gettext('Slack'),
}
return source_mapping.get(source_type, str(source_type))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/serializers/application_chat.py",
"license": "GNU General Public License v3.0",
"lines": 265,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/application/serializers/application_chat_record.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_chat_record.py
@date:2025/6/10 15:10
@desc:
"""
from functools import reduce
from typing import Dict
import uuid_utils.compat as uuid
from django.db import transaction
from django.db.models import QuerySet
from django.db.models.aggregates import Max, Min
from django.utils.translation import gettext_lazy as _, gettext
from rest_framework import serializers
from rest_framework.utils.formatting import lazy_format
from application.models import ChatRecord, ApplicationAccessToken, Application
from application.serializers.application_chat import ChatCountSerializer
from application.serializers.common import ChatInfo
from common.auth.authentication import get_is_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
from common.db.search import page_search
from common.exception.app_exception import AppApiException, AppUnauthorizedFailed
from common.utils.common import post
from knowledge.models import Paragraph, Document, Problem, ProblemParagraphMapping, Knowledge
from knowledge.serializers.common import get_embedding_model_id_by_knowledge_id, update_document_char_length
from knowledge.serializers.paragraph import ParagraphSerializers
from knowledge.task.embedding import embedding_by_paragraph, embedding_by_paragraph_list
class ChatRecordSerializerModel(serializers.ModelSerializer):
class Meta:
model = ChatRecord
fields = ['id', 'chat_id', 'vote_status','vote_reason','vote_other_content', 'problem_text', 'answer_text',
'message_tokens', 'answer_tokens', 'const', 'improve_paragraph_id_list', 'run_time', 'index',
'answer_text_list',
'create_time', 'update_time']
class ChatRecordOperateSerializer(serializers.Serializer):
chat_id = serializers.UUIDField(required=True, label=_("Conversation ID"))
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
chat_record_id = serializers.UUIDField(required=True, label=_("Conversation record id"))
def is_valid(self, *, debug=False, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
application_access_token = QuerySet(ApplicationAccessToken).filter(
application_id=self.data.get('application_id')).first()
if application_access_token is None:
raise AppApiException(500, gettext('Application authentication information does not exist'))
def get_chat_record(self):
chat_record_id = self.data.get('chat_record_id')
chat_id = self.data.get('chat_id')
chat_info: ChatInfo = ChatInfo.get_cache(chat_id)
if chat_info is not None:
chat_record_list = [chat_record for chat_record in chat_info.chat_record_list if
str(chat_record.id) == str(chat_record_id)]
if chat_record_list is not None and len(chat_record_list):
return chat_record_list[-1]
return QuerySet(ChatRecord).filter(id=chat_record_id, chat_id=chat_id).first()
def one(self, debug):
self.is_valid(debug=debug, raise_exception=True)
chat_record = self.get_chat_record()
if chat_record is None:
raise AppApiException(500, gettext("Conversation does not exist"))
application_access_token = QuerySet(ApplicationAccessToken).filter(
application_id=self.data.get('application_id')).first()
show_source = False
show_exec = False
if application_access_token is not None:
show_exec = application_access_token.show_exec
show_source = application_access_token.show_source
return ApplicationChatRecordQuerySerializers.reset_chat_record(
chat_record, True if debug else show_source, True if debug else show_exec)
class ApplicationChatRecordQuerySerializers(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
chat_id = serializers.UUIDField(required=True, label=_("Chat ID"))
order_asc = serializers.BooleanField(required=False, allow_null=True, label=_("Is it in order"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
def list(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
QuerySet(ChatRecord).filter(chat_id=self.data.get('chat_id'))
order_by = 'create_time' if self.data.get('order_asc') is None or self.data.get(
'order_asc') else '-create_time'
return [ChatRecordSerializerModel(chat_record).data for chat_record in
QuerySet(ChatRecord).filter(chat_id=self.data.get('chat_id')).order_by(order_by)]
@staticmethod
def get_loop_workflow_node(details):
result = []
for item in details.values():
if item.get('type') == 'loop-node':
for loop_item in item.get('loop_node_data') or []:
for inner_item in loop_item.values():
result.append(inner_item)
return result
@staticmethod
def reset_chat_record(chat_record, show_source, show_exec):
knowledge_list = []
paragraph_list = []
if 'search_step' in chat_record.details and chat_record.details.get('search_step').get(
'paragraph_list') is not None:
paragraph_list = chat_record.details.get('search_step').get(
'paragraph_list')
for item in [*chat_record.details.values(),
*ApplicationChatRecordQuerySerializers.get_loop_workflow_node(chat_record.details)]:
if item.get('type') == 'search-knowledge-node' and item.get('show_knowledge', False):
paragraph_list = paragraph_list + (item.get(
'paragraph_list') or [])
if item.get('type') == 'reranker-node' and item.get('show_knowledge', False):
paragraph_list = paragraph_list + [rl.get('metadata') for rl in (item.get('result_list') or []) if
'document_id' in (rl.get('metadata') or {}) and 'knowledge_id' in (
rl.get(
'metadata') or {})]
paragraph_list = list({p.get('id'): p for p in paragraph_list}.values())
knowledge_list = knowledge_list + [{'id': knowledge_id, **knowledge} for knowledge_id, knowledge in
reduce(lambda x, y: {**x, **y},
[{row.get(
'knowledge_id'): {'knowledge_name': row.get(
"knowledge_name"),
'knowledge_type': row.get('knowledge_type')}} for
row in
paragraph_list],
{}).items()]
if len(chat_record.improve_paragraph_id_list) > 0:
paragraph_model_list = QuerySet(Paragraph).filter(id__in=chat_record.improve_paragraph_id_list)
if len(paragraph_model_list) < len(chat_record.improve_paragraph_id_list):
paragraph_model_id_list = [str(p.id) for p in paragraph_model_list]
chat_record.improve_paragraph_id_list = list(
filter(lambda p_id: paragraph_model_id_list.__contains__(p_id),
chat_record.improve_paragraph_id_list))
chat_record.save()
show_source_dict = {'knowledge_list': knowledge_list,
'paragraph_list': paragraph_list, }
show_exec_dict = {'execution_details': [chat_record.details[key] for key in chat_record.details if
(True if show_exec else chat_record.details[key].get(
'type') == 'start-node')]}
return {
**ChatRecordSerializerModel(chat_record).data,
'padding_problem_text': chat_record.details.get('problem_padding').get(
'padding_problem_text') if 'problem_padding' in chat_record.details else None,
**(show_source_dict if show_source else {}),
**(show_exec_dict if show_exec else show_exec_dict)
}
def page(self, current_page: int, page_size: int, with_valid=True, show_source=None, show_exec=None):
if with_valid:
self.is_valid(raise_exception=True)
order_by = '-create_time' if self.data.get('order_asc') is None or self.data.get(
'order_asc') else 'create_time'
if show_source is None:
show_source = True
if show_exec is None:
show_exec = True
page = page_search(current_page, page_size,
QuerySet(ChatRecord).filter(chat_id=self.data.get('chat_id')).order_by(order_by),
post_records_handler=lambda chat_record: self.reset_chat_record(chat_record, show_source,
show_exec))
return page
class ParagraphModel(serializers.ModelSerializer):
class Meta:
model = Paragraph
fields = "__all__"
class ChatRecordImproveSerializer(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
chat_id = serializers.UUIDField(required=True, label=_("Conversation ID"))
chat_record_id = serializers.UUIDField(required=True,
label=_("Conversation record id"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
def get(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
chat_record_id = self.data.get('chat_record_id')
chat_id = self.data.get('chat_id')
chat_record = QuerySet(ChatRecord).filter(id=chat_record_id, chat_id=chat_id).first()
if chat_record is None:
raise AppApiException(500, gettext('Conversation record does not exist'))
if chat_record.improve_paragraph_id_list is None or len(chat_record.improve_paragraph_id_list) == 0:
return []
paragraph_model_list = QuerySet(Paragraph).filter(id__in=chat_record.improve_paragraph_id_list)
if len(paragraph_model_list) < len(chat_record.improve_paragraph_id_list):
paragraph_model_id_list = [str(p.id) for p in paragraph_model_list]
chat_record.improve_paragraph_id_list = list(
filter(lambda p_id: paragraph_model_id_list.__contains__(p_id),
chat_record.improve_paragraph_id_list))
chat_record.save()
return [ParagraphModel(p).data for p in paragraph_model_list]
class ApplicationChatRecordImproveInstanceSerializer(serializers.Serializer):
title = serializers.CharField(required=False, max_length=256, allow_null=True, allow_blank=True,
label=_("Section title"))
content = serializers.CharField(required=True, label=_("Paragraph content"))
problem_text = serializers.CharField(required=False, max_length=256, allow_null=True, allow_blank=True,
label=_("question"))
class ApplicationChatRecordAddKnowledgeSerializer(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
knowledge_id = serializers.UUIDField(required=True, label=_("Knowledge base id"))
document_id = serializers.UUIDField(required=True, label=_("Document id"))
chat_ids = serializers.ListSerializer(child=serializers.UUIDField(), required=True,
label=_("Conversation ID"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
if not Document.objects.filter(id=self.data['document_id'], knowledge_id=self.data['knowledge_id']).exists():
raise AppApiException(500, gettext("The document id is incorrect"))
@staticmethod
def post_embedding_paragraph(paragraph_ids, knowledge_id):
model_id = get_embedding_model_id_by_knowledge_id(knowledge_id)
embedding_by_paragraph_list(paragraph_ids, model_id)
@post(post_function=post_embedding_paragraph)
@transaction.atomic
def post_improve(self, instance: Dict, request=None, scope='WORKSPACE', with_valid=True):
if with_valid:
ApplicationChatRecordAddKnowledgeSerializer(data=instance).is_valid(raise_exception=True)
self.is_valid(raise_exception=True)
if scope == 'WORKSPACE':
is_permission = get_is_permissions(request=request, workspace_id=self.data.get('workspace_id'),
knowledge_id=self.data.get("knowledge_id"))(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()],
CompareConstants.AND),
)
else:
is_permission = get_is_permissions(request=request, workspace_id=self.data.get('workspace_id'),
knowledge_id=self.data.get("knowledge_id"))(
PermissionConstants.RESOURCE_KNOWLEDGE_DOCUMENT_EDIT, RoleConstants.ADMIN
)
if not is_permission:
raise AppUnauthorizedFailed(403, gettext('No permission to access'))
chat_ids = instance['chat_ids']
document_id = instance['document_id']
knowledge_id = instance['knowledge_id']
# 获取所有聊天记录
chat_record_list = list(ChatRecord.objects.filter(chat_id__in=chat_ids))
if len(chat_record_list) < len(chat_ids):
raise AppApiException(500, gettext("Conversation records that do not exist"))
# 批量创建段落和问题映射
paragraphs = []
paragraph_ids = []
problem_paragraph_mappings = []
for chat_record in chat_record_list:
paragraph = Paragraph(
id=uuid.uuid7(),
document_id=document_id,
content=chat_record.answer_text,
knowledge_id=knowledge_id,
title=chat_record.problem_text
)
problem, _ = Problem.objects.get_or_create(content=chat_record.problem_text, knowledge_id=knowledge_id)
problem_paragraph_mapping = ProblemParagraphMapping(
id=uuid.uuid7(),
knowledge_id=knowledge_id,
document_id=document_id,
problem_id=problem.id,
paragraph_id=paragraph.id
)
paragraphs.append(paragraph)
paragraph_ids.append(paragraph.id)
problem_paragraph_mappings.append(problem_paragraph_mapping)
chat_record.improve_paragraph_id_list.append(paragraph.id)
# 处理段落位置
self.prepend_paragraphs(document_id, paragraphs)
# 批量创建新段落和问题映射
Paragraph.objects.bulk_create(paragraphs)
ProblemParagraphMapping.objects.bulk_create(problem_paragraph_mappings)
# 批量保存聊天记录
ChatRecord.objects.bulk_update(chat_record_list, ['improve_paragraph_id_list'])
update_document_char_length(document_id)
for chat_id in chat_ids:
ChatCountSerializer(data={'chat_id': chat_id}).update_chat()
return paragraph_ids, knowledge_id
@staticmethod
def prepend_paragraphs(document_id, paragraphs):
# 获取所有现有段落
existing_paragraphs = list(Paragraph.objects.filter(
document_id=document_id
).order_by('position'))
# 计算新段落数量
new_count = len(paragraphs)
# 如果已有段落,需要重新调整所有段落的位置
if existing_paragraphs:
# 为现有段落重新分配位置,从新段落数量+1开始
for i, existing_paragraph in enumerate(existing_paragraphs):
existing_paragraph.position = new_count + i + 1
# 批量更新现有段落位置
if existing_paragraphs:
Paragraph.objects.bulk_update(existing_paragraphs, ['position'])
# 为新段落分配位置,从1开始
for i, paragraph in enumerate(paragraphs):
paragraph.position = i + 1
class ApplicationChatRecordImproveSerializer(serializers.Serializer):
chat_id = serializers.UUIDField(required=True, label=_("Conversation ID"))
chat_record_id = serializers.UUIDField(required=True,
label=_("Conversation record id"))
knowledge_id = serializers.UUIDField(required=True, label=_("Knowledge base id"))
document_id = serializers.UUIDField(required=True, label=_("Document id"))
application_id = serializers.UUIDField(required=True, label=_("Application id"))
workspace_id = serializers.CharField(required=True, label=_("Workspace ID"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
query_set = QuerySet(Knowledge).filter(id=self.data.get('knowledge_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Knowledge id does not exist'))
if not QuerySet(Document).filter(id=self.data.get('document_id'),
knowledge_id=self.data.get('knowledge_id')).exists():
raise AppApiException(500, gettext("The document id is incorrect"))
@staticmethod
def post_embedding_paragraph(chat_record, paragraph_id, knowledge_id):
model_id = get_embedding_model_id_by_knowledge_id(knowledge_id)
# 发送向量化事件
embedding_by_paragraph(paragraph_id, model_id)
return chat_record
@post(post_function=post_embedding_paragraph)
@transaction.atomic
def improve(self, instance: Dict, request=None, scope='WORKSPACE', with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
if scope == 'WORKSPACE':
is_permission = get_is_permissions(request, workspace_id=self.data.get('workspace_id'),
knowledge_id=self.data.get("knowledge_id"))(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()],
CompareConstants.AND),
)
else:
is_permission = get_is_permissions(request, workspace_id=self.data.get('workspace_id'),
knowledge_id=self.data.get("knowledge_id"))(
PermissionConstants.RESOURCE_KNOWLEDGE_DOCUMENT_EDIT, RoleConstants.ADMIN
)
if not is_permission:
raise AppUnauthorizedFailed(403, gettext('No permission to access'))
ApplicationChatRecordImproveInstanceSerializer(data=instance).is_valid(raise_exception=True)
chat_record_id = self.data.get('chat_record_id')
chat_id = self.data.get('chat_id')
chat_record = QuerySet(ChatRecord).filter(id=chat_record_id, chat_id=chat_id).first()
if chat_record is None:
raise AppApiException(500, gettext('Conversation record does not exist'))
document_id = self.data.get("document_id")
knowledge_id = self.data.get("knowledge_id")
max_position = Paragraph.objects.filter(document_id=document_id).aggregate(
max_position=Max('position')
)['max_position'] or 0
paragraph = Paragraph(
id=uuid.uuid7(),
document_id=document_id,
content=instance.get("content"),
knowledge_id=knowledge_id,
title=instance.get("title") if 'title' in instance else '',
position=max_position + 1
)
problem_text = instance.get('problem_text') if instance.get(
'problem_text') is not None else chat_record.problem_text
problem, _ = QuerySet(Problem).get_or_create(content=problem_text, knowledge_id=knowledge_id)
problem_paragraph_mapping = ProblemParagraphMapping(id=uuid.uuid7(), knowledge_id=knowledge_id,
document_id=document_id,
problem_id=problem.id,
paragraph_id=paragraph.id)
# 插入段落
paragraph.save()
# 插入关联问题
problem_paragraph_mapping.save()
chat_record.improve_paragraph_id_list.append(paragraph.id)
update_document_char_length(document_id)
# 添加标注
chat_record.save()
ChatCountSerializer(data={'chat_id': chat_id}).update_chat()
return ChatRecordSerializerModel(chat_record).data, paragraph.id, knowledge_id
class Operate(serializers.Serializer):
chat_id = serializers.UUIDField(required=True, label=_("Conversation ID"))
chat_record_id = serializers.UUIDField(required=True,
label=_("Conversation record id"))
knowledge_id = serializers.UUIDField(required=True, label=_("Knowledge base id"))
document_id = serializers.UUIDField(required=True, label=_("Document id"))
paragraph_id = serializers.UUIDField(required=True, label=_("Paragraph id"))
workspace_id = serializers.CharField(required=True, label=_("Workspace ID"))
def delete(self, request=None, scope='WORKSPACE', with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
if scope == 'WORKSPACE':
is_permission = get_is_permissions(request=request, workspace_id=self.data.get('workspace_id'),
knowledge_id=self.data.get("knowledge_id"))(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()],
CompareConstants.AND),
)
else:
is_permission = get_is_permissions(request=request, workspace_id=self.data.get('workspace_id'),
knowledge_id=self.data.get("knowledge_id"))(
PermissionConstants.RESOURCE_KNOWLEDGE_DOCUMENT_EDIT, RoleConstants.ADMIN
)
if not is_permission:
raise AppUnauthorizedFailed(403, gettext('No permission to access'))
workspace_id = self.data.get('workspace_id')
chat_record_id = self.data.get('chat_record_id')
chat_id = self.data.get('chat_id')
knowledge_id = self.data.get('knowledge_id')
document_id = self.data.get('document_id')
paragraph_id = self.data.get('paragraph_id')
chat_record = QuerySet(ChatRecord).filter(id=chat_record_id, chat_id=chat_id).first()
if chat_record is None:
raise AppApiException(500, gettext('Conversation record does not exist'))
if not chat_record.improve_paragraph_id_list.__contains__(uuid.UUID(paragraph_id)):
message = lazy_format(
gettext(
'The paragraph id is wrong. The current conversation record does not exist. [{paragraph_id}] paragraph id'),
paragraph_id=paragraph_id)
raise AppApiException(500, message.__str__())
chat_record.improve_paragraph_id_list = [row for row in chat_record.improve_paragraph_id_list if
str(row) != paragraph_id]
chat_record.save()
o = ParagraphSerializers.Operate(
data={"workspace_id": workspace_id, "knowledge_id": knowledge_id, 'document_id': document_id,
"paragraph_id": paragraph_id})
o.is_valid(raise_exception=True)
o.delete()
return True
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/serializers/application_chat_record.py",
"license": "GNU General Public License v3.0",
"lines": 456,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/application/views/application_chat.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_chat.py
@date:2025/6/10 11:00
@desc:
"""
import uuid_utils.compat as uuid
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from application.api.application_chat import ApplicationChatQueryAPI, ApplicationChatQueryPageAPI, \
ApplicationChatExportAPI
from application.models import ChatUserType, Application
from application.serializers.application_chat import ApplicationChatQuerySerializers
from chat.api.chat_api import ChatAPI, PromptGenerateAPI
from chat.api.chat_authentication_api import ChatOpenAPI
from chat.serializers.chat import OpenChatSerializers, ChatSerializers, DebugChatSerializers, PromptGenerateSerializer
from common.auth import TokenAuth
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
from common.log.log import log
from common.result import result
from common.utils.common import query_params_to_single_dict
def get_application_operation_object(application_id):
application_model = QuerySet(model=Application).filter(id=application_id).first()
if application_model is not None:
return {
'name': application_model.name
}
return {}
class ApplicationChat(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get the conversation list"),
summary=_("Get the conversation list"),
operation_id=_("Get the conversation list"), # type: ignore
request=ApplicationChatQueryAPI.get_request(),
parameters=ApplicationChatQueryAPI.get_parameters(),
responses=ApplicationChatQueryAPI.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str):
return result.success(ApplicationChatQuerySerializers(
data={**query_params_to_single_dict(request.query_params), 'workspace_id': workspace_id,
'application_id': application_id,
}).list())
class Page(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get the conversation list by page"),
summary=_("Get the conversation list by page"),
operation_id=_("Get the conversation list by page"), # type: ignore
request=ApplicationChatQueryPageAPI.get_request(),
parameters=ApplicationChatQueryPageAPI.get_parameters(),
responses=ApplicationChatQueryPageAPI.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str, current_page: int, page_size: int):
return result.success(ApplicationChatQuerySerializers(
data={**query_params_to_single_dict(request.query_params), 'workspace_id': workspace_id,
'application_id': application_id,
}).page(current_page=current_page,
page_size=page_size))
class Export(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_("Export conversation"),
summary=_("Export conversation"),
operation_id=_("Export conversation"), # type: ignore
request=ApplicationChatExportAPI.get_request(),
parameters=ApplicationChatExportAPI.get_parameters(),
responses=ApplicationChatExportAPI.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_EXPORT.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_EXPORT.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def post(self, request: Request, workspace_id: str, application_id: str):
return ApplicationChatQuerySerializers(
data={**query_params_to_single_dict(request.query_params), 'workspace_id': workspace_id,
'application_id': application_id,
}).export(request.data)
class OpenView(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get a temporary session id based on the application id"),
summary=_("Get a temporary session id based on the application id"),
operation_id=_("Get a temporary session id based on the application id"), # type: ignore
parameters=ChatOpenAPI.get_parameters(),
responses=None,
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str):
return result.success(OpenChatSerializers(
data={'workspace_id': workspace_id, 'application_id': application_id,
'chat_user_id': str(uuid.uuid7()), 'chat_user_type': ChatUserType.ANONYMOUS_USER,
'debug': True}).open())
class ChatView(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_("dialogue"),
summary=_("dialogue"),
operation_id=_("dialogue"), # type: ignore
request=ChatAPI.get_request(),
parameters=ChatAPI.get_parameters(),
responses=None,
tags=[_('Application')] # type: ignore
)
def post(self, request: Request, chat_id: str):
return DebugChatSerializers(data={'chat_id': chat_id}).chat(request.data)
class PromptGenerateView(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_("generate prompt"),
summary=_("generate prompt"),
operation_id=_("generate prompt"), # type: ignore
request=PromptGenerateAPI.get_request(),
parameters=PromptGenerateAPI.get_parameters(),
responses=None,
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate='Generate prompt',
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')))
def post(self, request: Request, workspace_id: str, model_id:str, application_id: str):
return PromptGenerateSerializer(data={'workspace_id': workspace_id, 'model_id': model_id, 'application_id': application_id}).generate_prompt(instance=request.data) | {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/views/application_chat.py",
"license": "GNU General Public License v3.0",
"lines": 162,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/views/application_chat_record.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_chat_record.py
@date:2025/6/10 15:08
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from application.api.application_chat_record import ApplicationChatRecordQueryAPI, \
ApplicationChatRecordImproveParagraphAPI, ApplicationChatRecordAddKnowledgeAPI
from application.serializers.application_chat_record import ApplicationChatRecordQuerySerializers, \
ApplicationChatRecordImproveSerializer, ChatRecordImproveSerializer, ApplicationChatRecordAddKnowledgeSerializer, \
ChatRecordOperateSerializer
from common import result
from common.auth import TokenAuth
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
from common.utils.common import query_params_to_single_dict
class ApplicationChatRecord(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get the conversation record list"),
summary=_("Get the conversation record list"),
operation_id=_("Get the conversation record list"), # type: ignore
request=ApplicationChatRecordQueryAPI.get_request(),
parameters=ApplicationChatRecordQueryAPI.get_parameters(),
responses=ApplicationChatRecordQueryAPI.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str, chat_id: str):
return result.success(ApplicationChatRecordQuerySerializers(
data={**query_params_to_single_dict(request.query_params), 'workspace_id': workspace_id,
'application_id': application_id,
'chat_id': chat_id
}).list())
class Page(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get the conversation record list by page"),
summary=_("Get the conversation record list by page"),
operation_id=_("Get the conversation record list by page"), # type: ignore
request=ApplicationChatRecordQueryAPI.get_request(),
parameters=ApplicationChatRecordQueryAPI.get_parameters(),
responses=ApplicationChatRecordQueryAPI.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str, chat_id: str, current_page: int,
page_size: int):
return result.success(ApplicationChatRecordQuerySerializers(
data={**query_params_to_single_dict(request.query_params), 'workspace_id': workspace_id,
'application_id': application_id,
'chat_id': chat_id}).page(
current_page=current_page,
page_size=page_size))
class ApplicationChatRecordOperateAPI(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get conversation record details"),
summary=_("Get conversation record details"),
operation_id=_("Get conversation record details"), # type: ignore
request=ApplicationChatRecordQueryAPI.get_request(),
parameters=ApplicationChatRecordQueryAPI.get_parameters(),
responses=ApplicationChatRecordQueryAPI.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_READ.get_workspace_permission_workspace_manage_role(),
PermissionConstants.APPLICATION_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str, chat_id: str, chat_record_id: str):
return result.success(ChatRecordOperateSerializer(
data={
'workspace_id': workspace_id,
'application_id': application_id,
'chat_id': chat_id,
'chat_record_id': chat_record_id}).one(True))
class ApplicationChatRecordAddKnowledge(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_("Add to Knowledge Base"),
summary=_("Add to Knowledge Base"),
operation_id=_("Add to Knowledge Base"), # type: ignore
request=ApplicationChatRecordAddKnowledgeAPI.get_request(),
parameters=ApplicationChatRecordAddKnowledgeAPI.get_parameters(),
responses=ApplicationChatRecordAddKnowledgeAPI.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_ADD_KNOWLEDGE.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_ADD_KNOWLEDGE.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def post(self, request: Request, workspace_id: str, application_id: str):
return result.success(ApplicationChatRecordAddKnowledgeSerializer(data = {'workspace_id': workspace_id, 'application_id': application_id, **request.data}).post_improve(
{'workspace_id': workspace_id, 'application_id': application_id, **request.data}, request=request))
class ApplicationChatRecordImprove(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get the list of marked paragraphs"),
summary=_("Get the list of marked paragraphs"),
operation_id=_("Get the list of marked paragraphs"), # type: ignore
request=ApplicationChatRecordQueryAPI.get_request(),
parameters=ApplicationChatRecordQueryAPI.get_parameters(),
responses=ApplicationChatRecordQueryAPI.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_ANNOTATION.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_ANNOTATION.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str, chat_id: str, chat_record_id: str):
return result.success(ChatRecordImproveSerializer(
data={'workspace_id': workspace_id, 'application_id': application_id, 'chat_id': chat_id,
'chat_record_id': chat_record_id}).get())
class ApplicationChatRecordImproveParagraph(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
description=_("Annotation"),
summary=_("Annotation"),
operation_id=_("Annotation"), # type: ignore
request=ApplicationChatRecordImproveParagraphAPI.get_request(),
parameters=ApplicationChatRecordImproveParagraphAPI.get_parameters(),
responses=ApplicationChatRecordImproveParagraphAPI.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_ANNOTATION.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_ANNOTATION.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def put(self, request: Request,
workspace_id: str,
application_id: str,
chat_id: str,
chat_record_id: str,
knowledge_id: str,
document_id: str):
return result.success(ApplicationChatRecordImproveSerializer(
data={'workspace_id': workspace_id, 'application_id': application_id, 'chat_id': chat_id,
'chat_record_id': chat_record_id,
'knowledge_id': knowledge_id, 'document_id': document_id}).improve(request.data, request=request))
class Operate(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['DELETE'],
description=_("Delete a Annotation"),
summary=_("Delete a Annotation"),
operation_id=_("Delete a Annotation"), # type: ignore
request=ApplicationChatRecordImproveParagraphAPI.Operate.get_request(),
parameters=ApplicationChatRecordImproveParagraphAPI.Operate.get_parameters(),
responses=ApplicationChatRecordImproveParagraphAPI.Operate.get_response(),
tags=[_("Application/Conversation Log")] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CHAT_LOG_ANNOTATION.get_workspace_application_permission(),
PermissionConstants.APPLICATION_CHAT_LOG_ANNOTATION.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def delete(self, request: Request, workspace_id: str, application_id: str, chat_id: str, chat_record_id: str,
knowledge_id: str,
document_id: str, paragraph_id: str):
return result.success(ApplicationChatRecordImproveSerializer.Operate(
data={'chat_id': chat_id, 'chat_record_id': chat_record_id, 'workspace_id': workspace_id,
'application_id': application_id,
'knowledge_id': knowledge_id, 'document_id': document_id,
'paragraph_id': paragraph_id}).delete(request=request))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/views/application_chat_record.py",
"license": "GNU General Public License v3.0",
"lines": 197,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/api/application_stats.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_stats.py
@date:2025/6/9 20:45
@desc:
"""
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from application.serializers.application_stats import ApplicationStatsSerializer
from common.mixins.api_mixin import APIMixin
from common.result import ResultSerializer
class ApplicationStatsResult(ResultSerializer):
def get_data(self):
return ApplicationStatsSerializer(many=True)
class ApplicationStatsAPI(APIMixin):
@staticmethod
def get_parameters():
return [OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="application_id",
description="application ID",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="start_time",
description="start Time",
type=OpenApiTypes.STR,
required=True,
),
OpenApiParameter(
name="end_time",
description="end Time",
type=OpenApiTypes.STR,
required=True,
),
]
@staticmethod
def get_response():
return ApplicationStatsResult
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/api/application_stats.py",
"license": "GNU General Public License v3.0",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/serializers/application_stats.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_stats.py
@date:2025/6/9 20:34
@desc:
"""
import datetime
import os
from typing import Dict, List
from django.db import models
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from django.utils import timezone
from rest_framework import serializers
from application.models import ApplicationChatUserStats, Application
from common.db.search import native_search, get_dynamics_model
from common.exception.app_exception import AppApiException
from common.utils.common import get_file_content
from maxkb.conf import PROJECT_DIR
from maxkb.settings import edition
class ApplicationStatsSerializer(serializers.Serializer):
chat_record_count = serializers.IntegerField(required=True, label=_("Number of conversations"))
customer_added_count = serializers.IntegerField(required=True, label=_("Number of new users"))
customer_num = serializers.IntegerField(required=True, label=_("Total number of users"))
day = serializers.CharField(required=True, label=_("date"))
star_num = serializers.IntegerField(required=True, label=_("Number of Likes"))
tokens_num = serializers.IntegerField(required=True, label=_("Tokens consumption"))
trample_num = serializers.IntegerField(required=True, label=_("Number of thumbs-downs"))
class ApplicationStatisticsSerializer(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
start_time = serializers.DateField(format='%Y-%m-%d', label=_("Start time"))
end_time = serializers.DateField(format='%Y-%m-%d', label=_("End time"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
def get_end_time(self):
d = datetime.datetime.strptime(self.data.get('end_time'), '%Y-%m-%d').date()
naive = datetime.datetime.combine(d, datetime.time.max)
return timezone.make_aware(naive, timezone.get_default_timezone())
def get_start_time(self):
d = datetime.datetime.strptime(self.data.get('start_time'), '%Y-%m-%d').date()
naive = datetime.datetime.combine(d, datetime.time.min)
return timezone.make_aware(naive, timezone.get_default_timezone())
def get_customer_count_trend(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
start_time = self.get_start_time()
end_time = self.get_end_time()
return native_search(
{'default_sql': QuerySet(ApplicationChatUserStats).filter(
application_id=self.data.get('application_id'),
create_time__gte=start_time,
create_time__lte=end_time)},
select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'sql', 'customer_count_trend.sql')))
def get_chat_record_aggregate_trend(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
start_time = self.get_start_time()
end_time = self.get_end_time()
chat_record_aggregate_trend = native_search(
{'default_sql': QuerySet(model=get_dynamics_model(
{'application_chat.application_id': models.UUIDField(),
'application_chat_record.create_time': models.DateTimeField()})).filter(
**{'application_chat.application_id': self.data.get('application_id'),
'application_chat_record.create_time__gte': start_time,
'application_chat_record.create_time__lte': end_time}
)},
select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'sql', 'chat_record_count_trend.sql')))
customer_count_trend = self.get_customer_count_trend(with_valid=False)
return self.merge_customer_chat_record(chat_record_aggregate_trend, customer_count_trend)
def merge_customer_chat_record(self, chat_record_aggregate_trend: List[Dict], customer_count_trend: List[Dict]):
return [{**self.find(chat_record_aggregate_trend, lambda c: c.get('day').strftime('%Y-%m-%d') == day,
{'star_num': 0, 'trample_num': 0, 'tokens_num': 0, 'chat_record_count': 0,
'customer_num': 0,
'day': day}),
**self.find(customer_count_trend, lambda c: c.get('day').strftime('%Y-%m-%d') == day,
{'customer_added_count': 0})}
for
day in
self.get_days_between_dates(self.data.get('start_time'), self.data.get('end_time'))]
@staticmethod
def find(source_list, condition, default):
value_list = [row for row in source_list if condition(row)]
if len(value_list) > 0:
return value_list[0]
return default
@staticmethod
def get_days_between_dates(start_date, end_date):
start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d')
end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d')
days = []
current_date = start_date
while current_date <= end_date:
days.append(current_date.strftime('%Y-%m-%d'))
current_date += datetime.timedelta(days=1)
return days
def get_token_usage_statistics(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
start_time = self.get_start_time()
end_time = self.get_end_time()
get_token_usage = native_search(
{'default_sql': QuerySet(model=get_dynamics_model(
{'application_chat.application_id': models.UUIDField(),
'application_chat_record.create_time': models.DateTimeField()})).filter(
**{'application_chat.application_id': self.data.get('application_id'),
'application_chat_record.create_time__gte': start_time,
'application_chat_record.create_time__lte': end_time}
)},
select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'sql',
('get_token_usage_ee.sql' if ['PE', 'EE'].__contains__(
edition) else 'get_token_usage.sql'))))
return get_token_usage
def get_top_questions_statistics(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
start_time = self.get_start_time()
end_time = self.get_end_time()
get_top_questions = native_search(
{'default_sql': QuerySet(model=get_dynamics_model(
{'application_chat.application_id': models.UUIDField(),
'application_chat_record.create_time': models.DateTimeField()})).filter(
**{'application_chat.application_id': self.data.get('application_id'),
'application_chat_record.create_time__gte': start_time,
'application_chat_record.create_time__lte': end_time}
)},
select_string=get_file_content(
os.path.join(PROJECT_DIR, "apps", "application", 'sql', (
'top_questions_ee.sql' if ['PE', 'EE'].__contains__(edition) else 'top_questions.sql'))))
return get_top_questions
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/serializers/application_stats.py",
"license": "GNU General Public License v3.0",
"lines": 141,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/application/views/application_stats.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_stats.py
@date:2025/6/9 20:30
@desc:
"""
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from application.api.application_stats import ApplicationStatsAPI
from application.serializers.application_stats import ApplicationStatisticsSerializer
from common import result
from common.auth import TokenAuth
from django.utils.translation import gettext_lazy as _
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
class ApplicationStats(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_('Dialogue-related statistical trends'),
summary=_('Dialogue-related statistical trends'),
operation_id=_('Dialogue-related statistical trends'), # type: ignore
parameters=ApplicationStatsAPI.get_parameters(),
responses=ApplicationStatsAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_OVERVIEW_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_OVERVIEW_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str):
return result.success(
ApplicationStatisticsSerializer(data={'application_id': application_id, 'workspace_id': workspace_id,
'start_time': request.query_params.get(
'start_time'),
'end_time': request.query_params.get(
'end_time')
}).get_chat_record_aggregate_trend())
class TokenUsageStatistics(APIView):
authentication_classes = [TokenAuth]
# 应用的token使用统计 根据人的使用数排序
@extend_schema(
methods=['GET'],
description=_('Application token usage statistics'),
summary=_('Application token usage statistics'),
operation_id=_('Application token usage statistics'), # type: ignore
parameters=ApplicationStatsAPI.get_parameters(),
responses=ApplicationStatsAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_OVERVIEW_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_OVERVIEW_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str):
return result.success(
ApplicationStatisticsSerializer(data={'application_id': application_id, 'workspace_id': workspace_id,
'start_time': request.query_params.get(
'start_time'),
'end_time': request.query_params.get(
'end_time')
}).get_token_usage_statistics())
class TopQuestionsStatistics(APIView):
authentication_classes = [TokenAuth]
# 应用的top问题统计
@extend_schema(
methods=['GET'],
description=_('Application top question statistics'),
summary=_('Application top question statistics'),
operation_id=_('Application top question statistics'), # type: ignore
parameters=ApplicationStatsAPI.get_parameters(),
responses=ApplicationStatsAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_OVERVIEW_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_OVERVIEW_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str):
return result.success(
ApplicationStatisticsSerializer(data={'application_id': application_id, 'workspace_id': workspace_id,
'start_time': request.query_params.get(
'start_time'),
'end_time': request.query_params.get(
'end_time')
}).get_top_questions_statistics())
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/views/application_stats.py",
"license": "GNU General Public License v3.0",
"lines": 95,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/serializers/application_access_token.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_access_token.py
@date:2025/6/9 17:49
@desc:
"""
import hashlib
import uuid_utils.compat as uuid
from django.core.cache import cache
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from application.models import ApplicationAccessToken, Application
from common.constants.cache_version import Cache_Version
from common.database_model_manage.database_model_manage import DatabaseModelManage
from common.exception.app_exception import AppApiException
class AccessTokenEditSerializer(serializers.Serializer):
access_token_reset = serializers.BooleanField(required=False,
label=_("Reset Token"))
is_active = serializers.BooleanField(required=False, label=_("Is it enabled"))
access_num = serializers.IntegerField(required=False, max_value=10000000,
min_value=0,
label=_("Number of visits"))
white_active = serializers.BooleanField(required=False,
label=_("Whether to enable whitelist"))
white_list = serializers.ListSerializer(required=False, child=serializers.CharField(required=True,
label=_("Whitelist")),
label=_("Whitelist")),
show_source = serializers.BooleanField(required=False,
label=_("Whether to display knowledge sources"))
show_exec = serializers.BooleanField(required=False,
label=_("Display execution details"))
language = serializers.CharField(required=False, allow_blank=True, allow_null=True,
label=_("language"))
authentication = serializers.BooleanField(default=False, label="Do you need authentication")
authentication_value = serializers.JSONField(required=False, label="Certified value", default=dict)
class AccessTokenSerializer(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
def edit(self, instance):
self.is_valid(raise_exception=True)
AccessTokenEditSerializer(data=instance).is_valid(raise_exception=True)
application_access_token = QuerySet(ApplicationAccessToken).get(
application_id=self.data.get('application_id'))
if 'is_active' in instance:
application_access_token.is_active = instance.get("is_active")
if 'access_token_reset' in instance and instance.get('access_token_reset'):
application_access_token.access_token = hashlib.md5(str(uuid.uuid7()).encode()).hexdigest()[8:24]
if 'access_num' in instance and instance.get('access_num') is not None:
application_access_token.access_num = instance.get("access_num")
if 'white_active' in instance and instance.get('white_active') is not None:
application_access_token.white_active = instance.get("white_active")
if 'white_list' in instance and instance.get('white_list') is not None:
application_access_token.white_list = instance.get('white_list')
if 'show_source' in instance and instance.get('show_source') is not None:
application_access_token.show_source = instance.get('show_source')
if 'show_exec' in instance and instance.get('show_exec') is not None:
application_access_token.show_exec = instance.get('show_exec')
if 'language' in instance and instance.get('language') is not None:
application_access_token.language = instance.get('language')
if 'language' not in instance or instance.get('language') is None:
application_access_token.language = None
application_access_token.save()
license_is_valid = cache.get(Cache_Version.SYSTEM.get_key(key='license_is_valid'),
version=Cache_Version.SYSTEM.get_version())
if license_is_valid:
if instance.get('authentication') is not None and instance.get(
'authentication_value') is not None:
application_access_token.authentication = instance.get('authentication')
application_access_token.authentication_value = instance.get('authentication_value')
application_access_token.save()
return self.one(with_valid=False)
def one(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
application_id = self.data.get("application_id")
application_access_token = QuerySet(ApplicationAccessToken).filter(
application_id=application_id).first()
if application_access_token is None:
application_access_token = ApplicationAccessToken(application_id=application_id,
access_token=hashlib.md5(
str(uuid.uuid7()).encode()).hexdigest()[
8:24], is_active=True)
application_access_token.save()
other = {}
license_is_valid = cache.get(Cache_Version.SYSTEM.get_key(key='license_is_valid'),
version=Cache_Version.SYSTEM.get_version())
if license_is_valid:
other = {'authentication': application_access_token.authentication,
'authentication_value': application_access_token.authentication_value}
return {'application_id': application_access_token.application_id,
'access_token': application_access_token.access_token,
"is_active": application_access_token.is_active,
'access_num': application_access_token.access_num,
'white_active': application_access_token.white_active,
'white_list': application_access_token.white_list,
'show_source': application_access_token.show_source,
'show_exec': application_access_token.show_exec,
'language': application_access_token.language,
**other,
}
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/serializers/application_access_token.py",
"license": "GNU General Public License v3.0",
"lines": 111,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/application/views/application_access_token.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_token.py
@date:2025/6/9 17:42
@desc:
"""
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from application.api.application_access_token import ApplicationAccessTokenAPI
from application.models import Application
from application.serializers.application_access_token import AccessTokenSerializer
from common import result
from common.auth import TokenAuth
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
from common.log.log import log
def get_application_operation_object(application_id):
application_model = QuerySet(model=Application).filter(id=application_id).first()
if application_model is not None:
return {
"name": application_model.name
}
return {}
class AccessToken(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
description=_("Modify application access restriction information"),
summary=_("Modify application access restriction information"),
operation_id=_("Modify application access restriction information"), # type: ignore
parameters=ApplicationAccessTokenAPI.get_parameters(),
request=ApplicationAccessTokenAPI.get_request(),
tags=[_('Application')] # type: ignore
)
@log(menu='Application', operate="Modify application access token",
get_operation_object= lambda r,k: get_application_operation_object((k.get('application_id')))
)
@has_permissions(PermissionConstants.APPLICATION_OVERVIEW_ACCESS.get_workspace_application_permission(),
PermissionConstants.APPLICATION_OVERVIEW_ACCESS.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def put(self, request: Request, workspace_id: str, application_id: str):
return result.success(
AccessTokenSerializer(data={'workspace_id': workspace_id, 'application_id': application_id}).edit(
request.data))
@extend_schema(
methods=['GET'],
description=_("Get application access restriction information"),
summary=_("Get application access restriction information"),
operation_id=_("Get application access restriction information"), # type: ignore
parameters=ApplicationAccessTokenAPI.get_parameters(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role()
)
def get(self, request: Request, workspace_id: str, application_id: str):
return result.success(
AccessTokenSerializer(data={'workspace_id': workspace_id, 'application_id': application_id}).one())
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/views/application_access_token.py",
"license": "GNU General Public License v3.0",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/serializers/common.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: common.py
@date:2025/6/9 13:42
@desc:
"""
from typing import List
from django.core.cache import cache
from django.db.models import QuerySet
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from application.models import Application, ChatRecord, Chat, ApplicationVersion, ChatUserType, ApplicationTypeChoices
from application.serializers.application_chat import ChatCountSerializer
from common.constants.cache_version import Cache_Version
from common.database_model_manage.database_model_manage import DatabaseModelManage
from common.exception.app_exception import ChatException
from knowledge.models import Document
from models_provider.models import Model
from models_provider.tools import get_model_credential
from system_manage.models.resource_mapping import ResourceMapping
class ChatInfo:
def __init__(self,
chat_id: str,
chat_user_id: str,
chat_user_type: str,
ip_address: str,
source: {},
knowledge_id_list: List[str],
exclude_document_id_list: list[str],
application_id: str,
debug=False):
"""
:param chat_id: 对话id
:param chat_user_id 对话用户id
:param chat_user_type 对话用户类型
:param knowledge_id_list: 知识库列表
:param exclude_document_id_list: 排除的文档
:param application_id 应用id
:param debug 是否是调试
:param ip_address: 用户ip地址
:param source: 用户来源
"""
self.chat_id = chat_id
self.chat_user_id = chat_user_id
self.chat_user_type = chat_user_type
self.knowledge_id_list = knowledge_id_list
self.exclude_document_id_list = exclude_document_id_list
self.application_id = application_id
self.chat_record_list: List[ChatRecord] = []
self.application = None
self.chat_user = None
self.ip_address = ip_address
self.source = source
self.debug = debug
@staticmethod
def get_no_references_setting(knowledge_setting, model_setting):
no_references_setting = knowledge_setting.get(
'no_references_setting', {
'status': 'ai_questioning',
'value': '{question}'})
if no_references_setting.get('status') == 'ai_questioning':
no_references_prompt = model_setting.get('no_references_prompt', '{question}')
no_references_setting['value'] = no_references_prompt if len(no_references_prompt) > 0 else "{question}"
return no_references_setting
def get_application(self):
if self.debug:
application = QuerySet(Application).filter(id=self.application_id).first()
if not application:
raise ChatException(500, _('The application does not exist'))
else:
application = QuerySet(ApplicationVersion).filter(application_id=self.application_id).order_by(
'-create_time')[0:1].first()
if not application:
raise ChatException(500, _("The application has not been published. Please use it after publishing."))
if application.type == ApplicationTypeChoices.SIMPLE.value:
# 数据集id列表
knowledge_id_list = [str(row.target_id) for row in
QuerySet(ResourceMapping).filter(source_id=self.application_id,
source_type='APPLICATION',
target_type='KNOWLEDGE')]
# 需要排除的文档
exclude_document_id_list = [str(document.id) for document in
QuerySet(Document).filter(
knowledge_id__in=knowledge_id_list,
is_active=False)]
self.knowledge_id_list = knowledge_id_list
self.exclude_document_id_list = exclude_document_id_list
self.application = application
return application
def get_chat_user(self, asker=None):
if self.chat_user:
return self.chat_user
chat_user_model = DatabaseModelManage.get_model("chat_user")
if self.chat_user_type == ChatUserType.CHAT_USER.value and chat_user_model:
chat_user = QuerySet(chat_user_model).filter(id=self.chat_user_id).first()
return {
'id': str(chat_user.id),
'email': chat_user.email,
'phone': chat_user.phone,
'nick_name': chat_user.nick_name,
'username': chat_user.username,
'source': chat_user.source
}
else:
if asker:
if isinstance(asker, dict):
self.chat_user = asker
else:
self.chat_user = {'username': asker}
else:
self.chat_user = {'username': '游客'}
return self.chat_user
def get_chat_user_group(self, asker=None):
chat_user = self.get_chat_user(asker=asker)
chat_user_id = chat_user.get('id')
if not chat_user_id:
return []
user_group_relation_model = DatabaseModelManage.get_model("user_group_relation")
if user_group_relation_model:
return [{
'id': user_group_relation.group_id,
'name': user_group_relation.group.name
} for user_group_relation in
QuerySet(user_group_relation_model).select_related('group').filter(user_id=chat_user_id)]
return []
def to_base_pipeline_manage_params(self):
self.get_application()
self.get_chat_user()
knowledge_setting = self.application.knowledge_setting
model_setting = self.application.model_setting
model_id = self.application.model_id
model_params_setting = None
if model_id is not None:
model = QuerySet(Model).filter(id=model_id).first()
credential = get_model_credential(model.provider, model.model_type, model.model_name)
model_params_setting = credential.get_model_params_setting_form(model.model_name).get_default_form_data()
return {
'knowledge_id_list': self.knowledge_id_list,
'exclude_document_id_list': self.exclude_document_id_list,
'exclude_paragraph_id_list': [],
'top_n': 3 if knowledge_setting.get('top_n') is None else knowledge_setting.get('top_n'),
'similarity': 0.6 if knowledge_setting.get('similarity') is None else knowledge_setting.get('similarity'),
'max_paragraph_char_number': knowledge_setting.get('max_paragraph_char_number') or 5000,
'history_chat_record': self.chat_record_list,
'chat_id': self.chat_id,
'dialogue_number': self.application.dialogue_number,
'problem_optimization_prompt': self.application.problem_optimization_prompt if self.application.problem_optimization_prompt is not None and len(
self.application.problem_optimization_prompt) > 0 else _(
"() contains the user's question. Answer the guessed user's question based on the context ({question}) Requirement: Output a complete question and put it in the <data></data> tag"),
'prompt': model_setting.get(
'prompt') if 'prompt' in model_setting and len(model_setting.get(
'prompt')) > 0 else Application.get_default_model_prompt(),
'system': model_setting.get(
'system', None),
'model_id': model_id,
'problem_optimization': self.application.problem_optimization,
'stream': True,
'model_setting': model_setting,
'model_params_setting': model_params_setting if self.application.model_params_setting is None or len(
self.application.model_params_setting.keys()) == 0 else self.application.model_params_setting,
'search_mode': self.application.knowledge_setting.get('search_mode') or 'embedding',
'no_references_setting': self.get_no_references_setting(self.application.knowledge_setting, model_setting),
'workspace_id': self.application.workspace_id,
'application_id': self.application_id,
'mcp_enable': self.application.mcp_enable,
'mcp_tool_ids': self.application.mcp_tool_ids,
'mcp_servers': self.application.mcp_servers,
'mcp_source': self.application.mcp_source,
'tool_enable': self.application.tool_enable,
'tool_ids': self.application.tool_ids,
'application_enable': self.application.application_enable,
'application_ids': self.application.application_ids,
'skill_tool_ids': self.application.skill_tool_ids,
'mcp_output_enable': self.application.mcp_output_enable,
}
def to_pipeline_manage_params(self, problem_text: str, post_response_handler,
exclude_paragraph_id_list, chat_user_id: str, chat_user_type, ip_address, source,
stream=True,
form_data=None):
if form_data is None:
form_data = {}
params = self.to_base_pipeline_manage_params()
return {**params, 'problem_text': problem_text, 'post_response_handler': post_response_handler,
'exclude_paragraph_id_list': exclude_paragraph_id_list, 'stream': stream, 'chat_user_id': chat_user_id,
'chat_user_type': chat_user_type, 'ip_address': ip_address, 'source': source, 'form_data': form_data}
def set_chat(self, question):
if not self.debug:
if not QuerySet(Chat).filter(id=self.chat_id).exists():
Chat(id=self.chat_id, application_id=self.application_id, abstract=question[0:1024],
chat_user_id=self.chat_user_id, chat_user_type=self.chat_user_type,
ip_address=self.ip_address, source=self.source,
asker=self.get_chat_user()).save()
def set_chat_variable(self, chat_context):
if not self.debug:
chat = QuerySet(Chat).filter(id=self.chat_id).first()
if chat:
chat.meta = {**(chat.meta if isinstance(chat.meta, dict) else {}), **chat_context}
chat.save()
else:
cache.set(Cache_Version.CHAT_VARIABLE.get_key(key=self.chat_id), chat_context,
version=Cache_Version.CHAT_VARIABLE.get_version(),
timeout=60 * 30)
def get_chat_variable(self):
if not self.debug:
chat = QuerySet(Chat).filter(id=self.chat_id).first()
if chat:
return chat.meta
return {}
else:
return cache.get(Cache_Version.CHAT_VARIABLE.get_key(key=self.chat_id),
version=Cache_Version.CHAT_VARIABLE.get_version()) or {}
def append_chat_record(self, chat_record: ChatRecord):
chat_record.problem_text = chat_record.problem_text[0:10240] if chat_record.problem_text is not None else ""
chat_record.answer_text = chat_record.answer_text[0:40960] if chat_record.problem_text is not None else ""
is_save = True
# 存入缓存中
for index in range(len(self.chat_record_list)):
record = self.chat_record_list[index]
if record.id == chat_record.id:
self.chat_record_list[index] = chat_record
is_save = False
break
if is_save:
self.chat_record_list.append(chat_record)
if not self.debug:
if not QuerySet(Chat).filter(id=self.chat_id).exists():
Chat(id=self.chat_id, application_id=self.application_id, abstract=chat_record.problem_text[0:1024],
chat_user_id=self.chat_user_id, chat_user_type=self.chat_user_type,
ip_address=self.ip_address, source=self.source,
asker=self.get_chat_user()).save()
else:
QuerySet(Chat).filter(id=self.chat_id).update(update_time=timezone.now())
# 插入会话记录
QuerySet(ChatRecord).update_or_create(id=chat_record.id,
create_defaults={'id': chat_record.id,
'chat_id': chat_record.chat_id,
"vote_status": chat_record.vote_status,
'problem_text': chat_record.problem_text,
'answer_text': chat_record.answer_text,
'answer_text_list': chat_record.answer_text_list,
'message_tokens': chat_record.message_tokens,
'answer_tokens': chat_record.answer_tokens,
'const': chat_record.const,
'details': chat_record.details,
'improve_paragraph_id_list': chat_record.improve_paragraph_id_list,
'run_time': chat_record.run_time,
'source': chat_record.source,
'ip_address': chat_record.ip_address or '',
'index': chat_record.index},
defaults={
"vote_status": chat_record.vote_status,
'problem_text': chat_record.problem_text,
'answer_text': chat_record.answer_text,
'answer_text_list': chat_record.answer_text_list,
'message_tokens': chat_record.message_tokens,
'answer_tokens': chat_record.answer_tokens,
'const': chat_record.const,
'details': chat_record.details,
'improve_paragraph_id_list': chat_record.improve_paragraph_id_list,
'run_time': chat_record.run_time,
'index': chat_record.index,
'source': chat_record.source,
'ip_address': chat_record.ip_address or '',
})
ChatCountSerializer(data={'chat_id': self.chat_id}).update_chat()
def to_dict(self):
return {
'chat_id': self.chat_id,
'chat_user_id': self.chat_user_id,
'chat_user_type': self.chat_user_type,
'ip_address': self.ip_address,
'source': self.source,
'knowledge_id_list': self.knowledge_id_list,
'exclude_document_id_list': self.exclude_document_id_list,
'application_id': self.application_id,
'chat_record_list': [self.chat_record_to_map(c) for c in self.chat_record_list][-20:],
'debug': self.debug
}
def chat_record_to_map(self, chat_record):
return {'id': chat_record.id,
'chat_id': chat_record.chat_id,
'vote_status': chat_record.vote_status,
'problem_text': chat_record.problem_text,
'answer_text': chat_record.answer_text,
'answer_text_list': chat_record.answer_text_list,
'message_tokens': chat_record.message_tokens,
'answer_tokens': chat_record.answer_tokens,
'const': chat_record.const,
'details': chat_record.details,
'improve_paragraph_id_list': chat_record.improve_paragraph_id_list,
'run_time': chat_record.run_time,
'source': chat_record.source,
'ip_address': chat_record.ip_address,
'index': chat_record.index}
@staticmethod
def map_to_chat_record(chat_record_dict):
return ChatRecord(id=chat_record_dict.get('id'),
chat_id=chat_record_dict.get('chat_id'),
vote_status=chat_record_dict.get('vote_status'),
problem_text=chat_record_dict.get('problem_text'),
answer_text=chat_record_dict.get('answer_text'),
answer_text_list=chat_record_dict.get('answer_text_list'),
message_tokens=chat_record_dict.get('message_tokens'),
answer_tokens=chat_record_dict.get('answer_tokens'),
const=chat_record_dict.get('const'),
details=chat_record_dict.get('details'),
improve_paragraph_id_list=chat_record_dict.get('improve_paragraph_id_list'),
run_time=chat_record_dict.get('run_time'),
index=chat_record_dict.get('index'),
source=chat_record_dict.get('source'),
ip_address=chat_record_dict.get('ip_address'))
def set_cache(self):
cache.set(Cache_Version.CHAT.get_key(key=self.chat_id), self.to_dict(),
version=Cache_Version.CHAT_INFO.get_version(),
timeout=60 * 30)
@staticmethod
def map_to_chat_info(chat_info_dict):
c = ChatInfo(chat_info_dict.get('chat_id'), chat_info_dict.get('chat_user_id'),
chat_info_dict.get('chat_user_type'), chat_info_dict.get('ip_address'),
chat_info_dict.get('source'),
chat_info_dict.get('knowledge_id_list'),
chat_info_dict.get('exclude_document_id_list'),
chat_info_dict.get('application_id'),
debug=chat_info_dict.get('debug'))
c.chat_record_list = [ChatInfo.map_to_chat_record(c_r) for c_r in chat_info_dict.get('chat_record_list')]
return c
@staticmethod
def get_cache(chat_id):
chat_info_dict = cache.get(Cache_Version.CHAT.get_key(key=chat_id),
version=Cache_Version.CHAT_INFO.get_version())
if chat_info_dict:
return ChatInfo.map_to_chat_info(chat_info_dict)
return None
def update_resource_mapping_by_application(application_id: str, other_resource_mapping=None):
from application.flow.tools import get_instance_resource, save_workflow_mapping, \
application_instance_field_call_dict
from system_manage.models.resource_mapping import ResourceType
if other_resource_mapping is None:
other_resource_mapping = []
application = QuerySet(Application).filter(id=application_id).first()
instance_mapping = get_instance_resource(application, ResourceType.APPLICATION, str(application.id),
application_instance_field_call_dict)
if application.type == 'WORK_FLOW':
save_workflow_mapping(application.work_flow, ResourceType.APPLICATION, str(application_id),
instance_mapping + other_resource_mapping)
return
else:
save_workflow_mapping({}, ResourceType.APPLICATION, str(application_id),
instance_mapping + other_resource_mapping)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/serializers/common.py",
"license": "GNU General Public License v3.0",
"lines": 351,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/chat/api/chat_api.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat_api.py
@date:2025/6/9 15:23
@desc:
"""
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from application.serializers.application_chat_record import ChatRecordSerializerModel
from chat.serializers.chat import ChatMessageSerializers, GeneratePromptSerializers
from chat.serializers.chat_record import HistoryChatModel, EditAbstractSerializer
from common.mixins.api_mixin import APIMixin
from common.result import ResultSerializer, ResultPageSerializer, DefaultResultSerializer
class PromptGenerateAPI(APIMixin):
@staticmethod
def get_parameters():
return [OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="model_id",
description="模型id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="application_id",
description="应用id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
]
@staticmethod
def get_request():
return GeneratePromptSerializers
class ChatAPI(APIMixin):
@staticmethod
def get_parameters():
return [OpenApiParameter(
name="chat_id",
description="对话id",
type=OpenApiTypes.STR,
location='path',
required=True,
)]
@staticmethod
def get_request():
return ChatMessageSerializers
class ApplicationCreateResponse(ResultSerializer):
def get_data(self):
return HistoryChatModel(many=True)
class PageApplicationCreateResponse(ResultPageSerializer):
def get_data(self):
return HistoryChatModel(many=True)
class ApplicationRecordResponse(ResultSerializer):
def get_data(self):
return ChatRecordSerializerModel(many=True)
class PageApplicationRecordResponse(ResultPageSerializer):
def get_data(self):
return ChatRecordSerializerModel(many=True)
class HistoricalConversationAPI(APIMixin):
@staticmethod
def get_parameters():
return []
@staticmethod
def get_response():
return ApplicationCreateResponse
class PageHistoricalConversationAPI(APIMixin):
@staticmethod
def get_parameters():
return []
@staticmethod
def get_response():
return PageApplicationCreateResponse
class HistoricalConversationOperateAPI(APIMixin):
@staticmethod
def get_parameters():
return [OpenApiParameter(
name="chat_id",
description="对话id",
type=OpenApiTypes.STR,
location='path',
required=True
)]
@staticmethod
def get_request():
return EditAbstractSerializer
@staticmethod
def get_response():
return DefaultResultSerializer
class HistoricalConversationRecordAPI(APIMixin):
@staticmethod
def get_parameters():
return [OpenApiParameter(
name="chat_id",
description="对话id",
type=OpenApiTypes.STR,
location='path',
required=True,
)]
@staticmethod
def get_response():
return ApplicationRecordResponse
class PageHistoricalConversationRecordAPI(APIMixin):
@staticmethod
def get_parameters():
return [OpenApiParameter(
name="chat_id",
description="对话id",
type=OpenApiTypes.STR,
location='path',
required=True,
)]
@staticmethod
def get_response():
return PageApplicationRecordResponse
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/api/chat_api.py",
"license": "GNU General Public License v3.0",
"lines": 124,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/chat/serializers/chat.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat.py
@date:2025/6/9 11:23
@desc:
"""
import json
import os
from gettext import gettext
from typing import List, Dict
import uuid_utils.compat as uuid
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
from rest_framework import serializers
from application.chat_pipeline.pipeline_manage import PipelineManage
from application.chat_pipeline.step.chat_step.i_chat_step import PostResponseHandler
from application.chat_pipeline.step.chat_step.impl.base_chat_step import BaseChatStep
from application.chat_pipeline.step.generate_human_message_step.impl.base_generate_human_message_step import \
BaseGenerateHumanMessageStep
from application.chat_pipeline.step.reset_problem_step.impl.base_reset_problem_step import BaseResetProblemStep
from application.chat_pipeline.step.search_dataset_step.impl.base_search_dataset_step import BaseSearchDatasetStep
from application.flow.common import Answer, Workflow
from application.flow.i_step_node import WorkFlowPostHandler
from application.flow.tools import to_stream_response_simple
from application.flow.workflow_manage import WorkflowManage
from application.models import Application, ApplicationTypeChoices, \
ChatUserType, ApplicationChatUserStats, ApplicationAccessToken, ChatRecord, Chat, ApplicationVersion
from application.serializers.application import ApplicationOperateSerializer
from application.serializers.common import ChatInfo
from common.database_model_manage.database_model_manage import DatabaseModelManage
from common.exception.app_exception import AppApiException, AppChatNumOutOfBoundsFailed, ChatException
from common.handle.base_to_response import BaseToResponse
from common.handle.impl.response.openai_to_response import OpenaiToResponse
from common.handle.impl.response.system_to_response import SystemToResponse
from common.utils.common import flat_map, get_file_content, is_valid_uuid
from knowledge.models import Document, Paragraph
from maxkb.conf import PROJECT_DIR
from models_provider.models import Model, Status
from models_provider.tools import get_model_instance_by_model_workspace_id
from system_manage.models.resource_mapping import ResourceMapping
class ChatMessagesSerializers(serializers.Serializer):
role = serializers.CharField(required=True, label=_("Role"))
content = serializers.CharField(required=True, label=_("Content"))
class GeneratePromptSerializers(serializers.Serializer):
prompt = serializers.CharField(required=True, label=_("Prompt template"))
messages = serializers.ListSerializer(child=ChatMessagesSerializers(), required=True, label=_("Chat context"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
messages = self.data.get("messages")
if len(messages) > 30:
raise AppApiException(400, _("Too many messages"))
for index in range(len(messages)):
role = messages[index].get('role')
if role == 'ai' and index % 2 != 1:
raise AppApiException(400, _("Authentication failed. Please verify that the parameters are correct."))
if role == 'user' and index % 2 != 0:
raise AppApiException(400, _("Authentication failed. Please verify that the parameters are correct."))
if role not in ['user', 'ai']:
raise AppApiException(400, _("Authentication failed. Please verify that the parameters are correct."))
class ChatMessageSerializers(serializers.Serializer):
message = serializers.CharField(required=True, label=_("User Questions"))
stream = serializers.BooleanField(required=True,
label=_("Is the answer in streaming mode"))
re_chat = serializers.BooleanField(required=True, label=_("Do you want to reply again"))
chat_record_id = serializers.UUIDField(required=False, allow_null=True,
label=_("Conversation record id"))
node_id = serializers.CharField(required=False, allow_null=True, allow_blank=True,
label=_("Node id"))
runtime_node_id = serializers.CharField(required=False, allow_null=True, allow_blank=True,
label=_("Runtime node id"))
node_data = serializers.DictField(required=False, allow_null=True,
label=_("Node parameters"))
form_data = serializers.DictField(required=False, label=_("Global variables"))
image_list = serializers.ListField(required=False, label=_("picture"))
document_list = serializers.ListField(required=False, label=_("document"))
audio_list = serializers.ListField(required=False, label=_("Audio"))
other_list = serializers.ListField(required=False, label=_("Other"))
child_node = serializers.DictField(required=False, allow_null=True,
label=_("Child Nodes"))
def get_post_handler(chat_info: ChatInfo):
class PostHandler(PostResponseHandler):
def handler(self,
chat_id,
chat_record_id,
paragraph_list: List[Paragraph],
problem_text: str,
answer_text,
manage: PipelineManage,
step: BaseChatStep,
padding_problem_text: str = None,
**kwargs):
answer_list = [[Answer(answer_text, 'ai-chat-node', 'ai-chat-node', 'ai-chat-node', {}, 'ai-chat-node',
kwargs.get('reasoning_content', '')).to_dict()]]
chat_record = ChatRecord(id=chat_record_id,
chat_id=chat_id,
problem_text=problem_text,
answer_text=answer_text,
details=manage.get_details(),
message_tokens=manage.context['message_tokens'],
answer_tokens=manage.context['answer_tokens'],
answer_text_list=answer_list,
run_time=manage.context['run_time'],
index=len(chat_info.chat_record_list) + 1,
ip_address=chat_info.ip_address,
source=chat_info.source
)
chat_info.append_chat_record(chat_record)
# 重新设置缓存
chat_info.set_cache()
return PostHandler()
class DebugChatSerializers(serializers.Serializer):
chat_id = serializers.UUIDField(required=True, label=_("Conversation ID"))
def chat(self, instance: dict, base_to_response: BaseToResponse = SystemToResponse()):
self.is_valid(raise_exception=True)
chat_id = self.data.get('chat_id')
chat_info: ChatInfo = ChatInfo.get_cache(chat_id)
application = QuerySet(Application).filter(id=chat_info.application_id).first()
chat_info.application = application
return ChatSerializers(data={
'chat_id': chat_id, "chat_user_id": chat_info.chat_user_id,
"chat_user_type": chat_info.chat_user_type,
"application_id": chat_info.application.id, "debug": True
}).chat(instance, base_to_response)
SYSTEM_ROLE = get_file_content(os.path.join(PROJECT_DIR, "apps", "chat", 'template', 'generate_prompt_system'))
class PromptGenerateSerializer(serializers.Serializer):
workspace_id = serializers.CharField(required=False, label=_('Workspace ID'))
model_id = serializers.CharField(required=False, allow_blank=True, allow_null=True, label=_("Model"))
application_id = serializers.CharField(required=False, allow_blank=True, allow_null=True, label=_("Application"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
application = query_set.first()
if application is None:
raise AppApiException(500, _('Application id does not exist'))
return application
def generate_prompt(self, instance: dict):
application = self.is_valid(raise_exception=True)
GeneratePromptSerializers(data=instance).is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
model_id = self.data.get('model_id')
prompt = instance.get('prompt')
messages = instance.get('messages')
message = messages[-1]['content']
q = prompt.replace("{userInput}", message)
messages[-1]['content'] = q
SUPPORTED_MODEL_TYPES = ["LLM", "IMAGE"]
model_exist = QuerySet(Model).filter(
id=model_id,
model_type__in=SUPPORTED_MODEL_TYPES
).exists()
if not model_exist:
raise Exception(_("Model does not exists or is not an LLM model"))
def process():
model = get_model_instance_by_model_workspace_id(model_id=model_id, workspace_id=workspace_id,
**application.model_params_setting)
try:
for r in model.stream([SystemMessage(content=SYSTEM_ROLE),
*[HumanMessage(content=m.get('content')) if m.get(
'role') == 'user' else AIMessage(
content=m.get('content')) for m in messages]]):
yield 'data: ' + json.dumps({'content': r.content}) + '\n\n'
except Exception as e:
yield 'data: ' + json.dumps({'error': str(e)}) + '\n\n'
return to_stream_response_simple(process())
class OpenAIMessage(serializers.Serializer):
content = serializers.CharField(required=True, label=_('content'))
role = serializers.CharField(required=True, label=_('Role'))
class OpenAIInstanceSerializer(serializers.Serializer):
messages = serializers.ListField(child=OpenAIMessage())
chat_id = serializers.UUIDField(required=False, label=_("Conversation ID"))
re_chat = serializers.BooleanField(required=False, label=_("Regenerate"))
stream = serializers.BooleanField(required=False, label=_("Streaming Output"))
class OpenAIChatSerializer(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
chat_user_id = serializers.CharField(required=True, label=_("Client id"))
chat_user_type = serializers.CharField(required=True, label=_("Client Type"))
ip_address = serializers.CharField(required=False, label=_("IP Address"))
source = serializers.JSONField(required=False, label=_("Source"))
@staticmethod
def get_message(instance):
return instance.get('messages')[-1].get('content')
@staticmethod
def generate_chat(chat_id, application_id, message, chat_user_id, chat_user_type, ip_address, source):
if chat_id is None:
chat_id = str(uuid.uuid1())
chat_info = ChatInfo(chat_id, chat_user_id, chat_user_type, ip_address, source, [], [],
application_id)
chat_info.set_cache()
else:
chat_info = ChatInfo.get_cache(chat_id)
if chat_info is None:
open_chat = ChatSerializers(data={
'chat_id': chat_id,
'chat_user_id': chat_user_id,
'chat_user_type': chat_user_type,
'application_id': application_id,
'ip_address': ip_address,
'source': source,
})
open_chat.is_valid(raise_exception=True)
chat_info = open_chat.re_open_chat(chat_id)
chat_info.set_cache()
return chat_id
def chat(self, instance: Dict, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
OpenAIInstanceSerializer(data=instance).is_valid(raise_exception=True)
chat_id = instance.get('chat_id')
message = self.get_message(instance)
re_chat = instance.get('re_chat', False)
stream = instance.get('stream', False)
application_id = self.data.get('application_id')
chat_user_id = self.data.get('chat_user_id')
chat_user_type = self.data.get('chat_user_type')
ip_address = self.data.get('ip_address')
source = self.data.get('source')
chat_id = self.generate_chat(chat_id, application_id, message, chat_user_id, chat_user_type, ip_address, source)
return ChatSerializers(
data={
'chat_id': chat_id,
'chat_user_id': chat_user_id,
'chat_user_type': chat_user_type,
'application_id': application_id,
'ip_address': ip_address,
'source': source,
}
).chat({'message': message,
're_chat': re_chat,
'stream': stream,
'form_data': instance.get('form_data', {}),
'image_list': instance.get('image_list', []),
'document_list': instance.get('document_list', []),
'audio_list': instance.get('audio_list', []),
'other_list': instance.get('other_list', [])},
base_to_response=OpenaiToResponse())
class ChatSerializers(serializers.Serializer):
chat_id = serializers.UUIDField(required=True, label=_("Conversation ID"))
chat_user_id = serializers.CharField(required=True, label=_("Client id"))
chat_user_type = serializers.CharField(required=True, label=_("Client Type"))
application_id = serializers.UUIDField(required=True, allow_null=True,
label=_("Application ID"))
debug = serializers.BooleanField(required=False, label=_("Debug"))
ip_address = serializers.CharField(required=False, label=_("IP Address"), allow_null=True, allow_blank=True)
source = serializers.JSONField(required=False, label=_("Source"))
def is_valid_application_workflow(self, *, raise_exception=False):
self.is_valid_intraday_access_num()
def is_valid_chat_id(self, chat_info: ChatInfo):
if self.data.get('application_id') is not None and self.data.get('application_id') != str(
chat_info.application_id):
raise ChatException(500, _("Conversation does not exist"))
def is_valid_intraday_access_num(self):
if not self.data.get('debug') and [ChatUserType.ANONYMOUS_USER.value,
ChatUserType.CHAT_USER.value].__contains__(
self.data.get('chat_user_type')):
access_client = QuerySet(ApplicationChatUserStats).filter(chat_user_id=self.data.get('chat_user_id'),
application_id=self.data.get(
'application_id')).first()
if access_client is None:
access_client = ApplicationChatUserStats(chat_user_id=self.data.get('chat_user_id'),
chat_user_type=self.data.get('chat_user_type'),
application_id=self.data.get('application_id'),
access_num=0,
intraday_access_num=0)
access_client.save()
application_access_token = QuerySet(ApplicationAccessToken).filter(
application_id=self.data.get('application_id')).first()
if application_access_token.access_num <= access_client.intraday_access_num:
raise AppChatNumOutOfBoundsFailed(1002, _("The number of visits exceeds today's visits"))
def is_valid_application_simple(self, *, chat_info: ChatInfo, raise_exception=False):
self.is_valid_intraday_access_num()
model_id = chat_info.application.model_id
if model_id is None:
return chat_info
model = QuerySet(Model).filter(id=model_id).first()
if model is None:
return chat_info
if model.status == Status.ERROR:
raise ChatException(500, _("The current model is not available"))
if model.status == Status.DOWNLOAD:
raise ChatException(500, _("The model is downloading, please try again later"))
return chat_info
def chat_simple(self, chat_info: ChatInfo, instance, base_to_response):
message = instance.get('message')
re_chat = instance.get('re_chat')
stream = instance.get('stream')
chat_user_id = self.data.get('chat_user_id')
chat_user_type = self.data.get('chat_user_type')
ip_address = self.data.get('ip_address')
source = self.data.get('source')
form_data = instance.get("form_data")
chat_record_id = instance.get('chat_record_id')
pipeline_manage_builder = PipelineManage.builder()
# 如果开启了问题优化,则添加上问题优化步骤
if chat_info.application.problem_optimization:
pipeline_manage_builder.append_step(BaseResetProblemStep)
# 构建流水线管理器
pipeline_message = (pipeline_manage_builder.append_step(BaseSearchDatasetStep)
.append_step(BaseGenerateHumanMessageStep)
.append_step(BaseChatStep)
.add_base_to_response(base_to_response)
.add_debug(self.data.get('debug', False))
.build())
exclude_paragraph_id_list = []
# 相同问题是否需要排除已经查询到的段落
if re_chat:
paragraph_id_list = flat_map(
[[paragraph.get('id') for paragraph in chat_record.details['search_step']['paragraph_list']] for
chat_record in chat_info.chat_record_list if
chat_record.problem_text == message and 'search_step' in chat_record.details and 'paragraph_list' in
chat_record.details['search_step']])
exclude_paragraph_id_list = list(set(paragraph_id_list))
# 构建运行参数
params = chat_info.to_pipeline_manage_params(message, get_post_handler(chat_info), exclude_paragraph_id_list,
chat_user_id, chat_user_type, ip_address, source, stream,
form_data)
if chat_record_id:
params['chat_record_id'] = chat_record_id
chat_info.set_chat(message)
# 运行流水线作业
pipeline_message.run(params)
return pipeline_message.context['chat_result']
@staticmethod
def get_chat_record(chat_info, chat_record_id):
if chat_info is not None:
chat_record_list = [chat_record for chat_record in chat_info.chat_record_list if
str(chat_record.id) == str(chat_record_id)]
if chat_record_list is not None and len(chat_record_list):
return chat_record_list[-1]
chat_record = QuerySet(ChatRecord).filter(id=chat_record_id, chat_id=chat_info.chat_id).first()
if chat_record is None:
if not is_valid_uuid(chat_record_id):
raise ChatException(500, _("Conversation record does not exist"))
chat_record = QuerySet(ChatRecord).filter(id=chat_record_id).first()
return chat_record
def chat_work_flow(self, chat_info: ChatInfo, instance: dict, base_to_response):
message = instance.get('message')
re_chat = instance.get('re_chat')
stream = instance.get('stream')
chat_user_id = self.data.get("chat_user_id")
chat_user_type = self.data.get('chat_user_type')
ip_address = self.data.get('ip_address')
source = self.data.get('source')
form_data = instance.get('form_data')
image_list = instance.get('image_list')
video_list = instance.get('video_list')
document_list = instance.get('document_list')
audio_list = instance.get('audio_list')
other_list = instance.get('other_list')
workspace_id = chat_info.application.workspace_id
chat_record_id = instance.get('chat_record_id')
debug = self.data.get('debug', False)
chat_record = None
history_chat_record = chat_info.chat_record_list
if chat_record_id is not None:
chat_record = self.get_chat_record(chat_info, chat_record_id)
if chat_record:
history_chat_record = [r for r in chat_info.chat_record_list if str(r.id) != chat_record_id]
work_flow = chat_info.application.work_flow
work_flow_manage = WorkflowManage(Workflow.new_instance(work_flow),
{'history_chat_record': history_chat_record, 'question': message,
'chat_id': chat_info.chat_id, 'chat_record_id': str(
uuid.uuid7()) if chat_record_id is None else str(chat_record_id),
'stream': stream,
're_chat': re_chat,
'chat_user_id': chat_user_id,
'chat_user_type': chat_user_type,
'ip_address': ip_address,
'source': source,
'workspace_id': workspace_id,
'debug': debug,
'chat_user': chat_info.get_chat_user(),
'chat_user_group': chat_info.get_chat_user_group(),
'application_id': str(chat_info.application_id)},
WorkFlowPostHandler(chat_info),
base_to_response, form_data, image_list, document_list, audio_list,
video_list,
other_list,
instance.get('runtime_node_id'),
instance.get('node_data'), chat_record, instance.get('child_node'))
chat_info.set_chat(message)
r = work_flow_manage.run()
return r
def is_valid_chat_user(self):
chat_user_id = self.data.get('chat_user_id')
application_id = self.data.get('application_id')
chat_user_type = self.data.get('chat_user_type')
is_auth_chat_user = DatabaseModelManage.get_model("is_auth_chat_user")
application_access_token = QuerySet(ApplicationAccessToken).filter(application_id=application_id).first()
if application_access_token and application_access_token.authentication and application_access_token.authentication_value.get(
'type') == 'login':
if chat_user_type == ChatUserType.CHAT_USER.value and is_auth_chat_user:
is_auth = is_auth_chat_user(chat_user_id, application_id)
if not is_auth:
raise ChatException(500, _("The chat user is not authorized."))
def chat(self, instance: dict, base_to_response: BaseToResponse = SystemToResponse()):
super().is_valid(raise_exception=True)
ChatMessageSerializers(data=instance).is_valid(raise_exception=True)
chat_info = self.get_chat_info()
chat_info.get_application()
chat_info.get_chat_user(asker=(instance.get('form_data') or {}).get('asker'))
self.is_valid_chat_id(chat_info)
self.is_valid_chat_user()
if chat_info.application.type == ApplicationTypeChoices.SIMPLE:
self.is_valid_application_simple(raise_exception=True, chat_info=chat_info)
return self.chat_simple(chat_info, instance, base_to_response)
else:
self.is_valid_application_workflow(raise_exception=True)
return self.chat_work_flow(chat_info, instance, base_to_response)
def get_chat_info(self):
self.is_valid(raise_exception=True)
chat_id = self.data.get('chat_id')
chat_info: ChatInfo = ChatInfo.get_cache(chat_id)
if chat_info is None:
chat_info: ChatInfo = self.re_open_chat(chat_id)
chat_info.set_cache()
return chat_info
def re_open_chat(self, chat_id: str):
chat = QuerySet(Chat).filter(id=chat_id).first()
if chat is None:
raise ChatException(500, _("Conversation does not exist"))
application = QuerySet(Application).filter(id=chat.application_id).first()
if application is None:
raise ChatException(500, _("Application does not exist"))
application_version = QuerySet(ApplicationVersion).filter(application_id=application.id).order_by(
'-create_time')[0:1].first()
if application_version is None:
raise ChatException(500, _("The application has not been published. Please use it after publishing."))
if application.type == ApplicationTypeChoices.SIMPLE:
return self.re_open_chat_simple(chat_id, application)
else:
return self.re_open_chat_work_flow(chat_id, application)
def re_open_chat_simple(self, chat_id, application):
# 数据集id列表
knowledge_id_list = [str(row.target_id) for row in
QuerySet(ResourceMapping).filter(source_id=str(application.id),
source_type='APPLICATION',
target_type='KNOWLEDGE')]
# 需要排除的文档
exclude_document_id_list = [str(document.id) for document in
QuerySet(Document).filter(
knowledge_id__in=knowledge_id_list,
is_active=False)]
chat_info = ChatInfo(chat_id, self.data.get('chat_user_id'), self.data.get('chat_user_type'),
self.data.get('ip_address'),
self.data.get('source'), knowledge_id_list,
exclude_document_id_list, application.id)
chat_record_list = list(QuerySet(ChatRecord).filter(chat_id=chat_id).order_by('-create_time')[0:5])
chat_record_list.sort(key=lambda r: r.create_time)
for chat_record in chat_record_list:
chat_info.chat_record_list.append(chat_record)
return chat_info
def re_open_chat_work_flow(self, chat_id, application):
chat_info = ChatInfo(chat_id, self.data.get('chat_user_id'), self.data.get('chat_user_type'),
self.data.get('ip_address'),
self.data.get('source'), [], [],
application.id)
chat_record_list = list(QuerySet(ChatRecord).filter(chat_id=chat_id).order_by('-create_time')[0:5])
chat_record_list.sort(key=lambda r: r.create_time)
for chat_record in chat_record_list:
chat_info.chat_record_list.append(chat_record)
return chat_info
class OpenChatSerializers(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
application_id = serializers.UUIDField(required=True)
chat_user_id = serializers.CharField(required=True, label=_("Client id"))
chat_user_type = serializers.CharField(required=True, label=_("Client Type"))
debug = serializers.BooleanField(required=True, label=_("Debug"))
ip_address = serializers.CharField(required=False, label=_("IP Address"))
source = serializers.JSONField(required=False, label=_("Source"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
application_id = self.data.get('application_id')
query_set = QuerySet(Application).filter(id=application_id)
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, gettext('Application does not exist'))
def open(self):
self.is_valid(raise_exception=True)
application_id = self.data.get('application_id')
application = QuerySet(Application).get(id=application_id)
debug = self.data.get("debug")
if not debug:
application_version = QuerySet(ApplicationVersion).filter(application_id=application_id).order_by(
'-create_time')[0:1].first()
if application_version is None:
raise AppApiException(500,
_("The application has not been published. Please use it after publishing."))
if application.type == ApplicationTypeChoices.SIMPLE:
return self.open_simple(application)
else:
return self.open_work_flow(application)
def open_work_flow(self, application):
self.is_valid(raise_exception=True)
application_id = self.data.get('application_id')
chat_user_id = self.data.get("chat_user_id")
chat_user_type = self.data.get("chat_user_type")
ip_address = self.data.get("ip_address")
source = self.data.get("source")
debug = self.data.get("debug")
chat_id = str(uuid.uuid7())
ChatInfo(chat_id, chat_user_id, chat_user_type, ip_address, source, [],
[],
application_id, debug).set_cache()
return chat_id
def open_simple(self, application):
application_id = self.data.get('application_id')
chat_user_id = self.data.get("chat_user_id")
chat_user_type = self.data.get("chat_user_type")
ip_address = self.data.get("ip_address")
source = self.data.get("source")
debug = self.data.get("debug")
knowledge_id_list = [str(row.target_id) for row in
QuerySet(ResourceMapping).filter(source_id=str(application_id),
source_type='APPLICATION',
target_type='KNOWLEDGE')]
chat_id = str(uuid.uuid7())
ChatInfo(chat_id, chat_user_id, chat_user_type, ip_address, source, knowledge_id_list,
[str(document.id) for document in
QuerySet(Document).filter(
knowledge_id__in=knowledge_id_list,
is_active=False)],
application_id,
debug=debug).set_cache()
return chat_id
class TextToSpeechSerializers(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
def text_to_speech(self, instance):
self.is_valid(raise_exception=True)
application_id = self.data.get('application_id')
application = QuerySet(Application).filter(id=application_id).first()
return ApplicationOperateSerializer(
data={'application_id': application_id,
'user_id': application.user_id}).text_to_speech(instance, False)
class SpeechToTextSerializers(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
def speech_to_text(self, instance):
self.is_valid(raise_exception=True)
application_id = self.data.get('application_id')
application = QuerySet(Application).filter(id=application_id).first()
return ApplicationOperateSerializer(
data={'application_id': application_id,
'user_id': application.user_id}).speech_to_text(instance, False)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/serializers/chat.py",
"license": "GNU General Public License v3.0",
"lines": 550,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/chat/api/chat_authentication_api.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat_authentication_api.py
@date:2025/6/6 19:59
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from chat.serializers.chat import OpenAIInstanceSerializer
from chat.serializers.chat_authentication import AnonymousAuthenticationSerializer
from common.mixins.api_mixin import APIMixin
class OpenAIAPI(APIMixin):
@staticmethod
def get_request():
return OpenAIInstanceSerializer
class ChatAuthenticationAPI(APIMixin):
@staticmethod
def get_request():
return AnonymousAuthenticationSerializer
@staticmethod
def get_parameters():
pass
@staticmethod
def get_response():
pass
class ChatAuthenticationProfileAPI(APIMixin):
@staticmethod
def get_parameters():
return [OpenApiParameter(
name="access_token",
description=_("access_token"),
type=OpenApiTypes.STR,
location='query',
required=True,
)]
class ChatOpenAPI(APIMixin):
@staticmethod
def get_parameters():
return []
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/api/chat_authentication_api.py",
"license": "GNU General Public License v3.0",
"lines": 42,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/chat/serializers/chat_authentication.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: ChatAuthentication.py
@date:2025/6/6 13:48
@desc:
"""
import uuid_utils.compat as uuid
from django.core import signing
from django.core.cache import cache
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from application.models import ApplicationAccessToken, ChatUserType, Application, ApplicationVersion
from application.serializers.application import ApplicationSerializerModel
from common.auth.common import ChatUserToken, ChatAuthentication
from common.constants.authentication_type import AuthenticationType
from common.constants.cache_version import Cache_Version
from common.database_model_manage.database_model_manage import DatabaseModelManage
from common.exception.app_exception import NotFound404, AppUnauthorizedFailed
from common.utils.rsa_util import get_key_pair_by_sql
class AnonymousAuthenticationSerializer(serializers.Serializer):
access_token = serializers.CharField(required=True, label=_("access_token"))
def auth(self, request, with_valid=True):
token = request.META.get('HTTP_AUTHORIZATION')
token_details = {}
try:
# 校验token
if token is not None:
token_details = signing.loads(token[7:])
except Exception as e:
pass
if with_valid:
self.is_valid(raise_exception=True)
access_token = self.data.get("access_token")
application_access_token = QuerySet(ApplicationAccessToken).filter(access_token=access_token).first()
if application_access_token is not None and application_access_token.is_active:
chat_user_id = token_details.get('chat_user_id') or str(uuid.uuid7())
_type = AuthenticationType.CHAT_ANONYMOUS_USER
return ChatUserToken(application_access_token.application_id, None, access_token, _type,
ChatUserType.ANONYMOUS_USER,
chat_user_id, ChatAuthentication(None)).to_token()
else:
raise NotFound404(404, _("Invalid access_token"))
class AuthProfileSerializer(serializers.Serializer):
access_token = serializers.CharField(required=True, label=_("access_token"))
def profile(self):
self.is_valid(raise_exception=True)
access_token = self.data.get("access_token")
application_access_token = QuerySet(ApplicationAccessToken).filter(access_token=access_token).first()
if application_access_token is None:
raise NotFound404(404, _("Invalid access_token"))
if not application_access_token.is_active:
raise NotFound404(404, _("Invalid access_token"))
application_id = application_access_token.application_id
profile = {
'authentication': False
}
application_setting_model = DatabaseModelManage.get_model('application_setting')
chat_platform = DatabaseModelManage.get_model('chat_platform')
if application_setting_model and chat_platform:
application_setting = QuerySet(application_setting_model).filter(application_id=application_id).first()
types = QuerySet(chat_platform).filter(is_active=True, is_valid=True).values_list('auth_type', flat=True)
login_value = application_access_token.authentication_value.get('login_value', [])
max_attempts = application_access_token.authentication_value.get('max_attempts', 1)
final_login_value = list(set(login_value) & set(types))
if 'LOCAL' in login_value:
final_login_value.insert(0, 'LOCAL')
if application_setting is not None:
profile = {
'icon': application_setting.application.icon,
'application_name': application_setting.application.name,
'bg_icon': application_setting.chat_background,
'authentication': application_access_token.authentication,
'authentication_type': application_access_token.authentication_value.get(
'type', 'password'),
'max_attempts': max_attempts,
'login_value': final_login_value,
'rasKey' : get_key_pair_by_sql().get('key')
}
return profile
class ApplicationProfileSerializer(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
@staticmethod
def reset_application(application, application_version):
update_field_dict = {
'application_name': 'name', 'desc': 'desc', 'prologue': 'prologue', 'dialogue_number': 'dialogue_number',
'user_id': 'user_id', 'model_id': 'model_id', 'knowledge_setting': 'knowledge_setting',
'model_setting': 'model_setting', 'model_params_setting': 'model_params_setting',
'tts_model_params_setting': 'tts_model_params_setting',
'problem_optimization': 'problem_optimization', 'work_flow': 'work_flow',
'problem_optimization_prompt': 'problem_optimization_prompt', 'tts_model_id': 'tts_model_id',
'stt_model_id': 'stt_model_id', 'tts_model_enable': 'tts_model_enable',
'stt_model_enable': 'stt_model_enable', 'tts_type': 'tts_type',
'tts_autoplay': 'tts_autoplay', 'stt_autosend': 'stt_autosend', 'file_upload_enable': 'file_upload_enable',
'file_upload_setting': 'file_upload_setting'
}
for (version_field, app_field) in update_field_dict.items():
_v = getattr(application_version, version_field)
setattr(application, app_field, _v)
def profile(self, with_valid=True):
if with_valid:
self.is_valid()
application_id = self.data.get("application_id")
application = QuerySet(Application).get(id=application_id)
application_access_token = QuerySet(ApplicationAccessToken).filter(application_id=application.id).first()
if application_access_token is None:
raise AppUnauthorizedFailed(500, _("Illegal User"))
application_setting_model = DatabaseModelManage.get_model('application_setting')
application_version = QuerySet(ApplicationVersion).filter(application_id=application.id).order_by(
'-create_time').first()
if application_version is not None:
self.reset_application(application, application_version)
license_is_valid = cache.get(Cache_Version.SYSTEM.get_key(key='license_is_valid'),
version=Cache_Version.SYSTEM.get_version())
application_setting_dict = {}
if application_setting_model is not None and license_is_valid:
application_setting = QuerySet(application_setting_model).filter(
application_id=application_access_token.application_id).first()
if application_setting is not None:
custom_theme = getattr(application_setting, 'custom_theme', {})
float_location = getattr(application_setting, 'float_location', {})
if not custom_theme:
application_setting.custom_theme = {
'theme_color': '',
'header_font_color': ''
}
if not float_location:
application_setting.float_location = {
'x': {'type': '', 'value': ''},
'y': {'type': '', 'value': ''}
}
application_setting_dict = {'show_source': application_access_token.show_source,
'show_history': application_setting.show_history,
'draggable': application_setting.draggable,
'show_guide': application_setting.show_guide,
'avatar': application_setting.avatar,
'show_avatar': application_setting.show_avatar,
'float_icon': application_setting.float_icon,
'disclaimer': application_setting.disclaimer,
'disclaimer_value': application_setting.disclaimer_value,
'custom_theme': application_setting.custom_theme,
'user_avatar': application_setting.user_avatar,
'show_user_avatar': application_setting.show_user_avatar,
'float_location': application_setting.float_location,
'chat_background': application_setting.chat_background}
base_node = [node for node in ((application.work_flow or {}).get('nodes', []) or []) if
node.get('id') == 'base-node']
return {**ApplicationSerializerModel(application).data,
'stt_model_id': application.stt_model_id,
'tts_model_id': application.tts_model_id,
'stt_model_enable': application.stt_model_enable,
'tts_model_enable': application.tts_model_enable,
'tts_type': application.tts_type,
'tts_autoplay': application.tts_autoplay,
'stt_autosend': application.stt_autosend,
'file_upload_enable': application.file_upload_enable,
'file_upload_setting': application.file_upload_setting,
'work_flow': {'nodes': base_node} if base_node else None,
'show_source': application_access_token.show_source,
'show_exec': application_access_token.show_exec,
'language': application_access_token.language,
**application_setting_dict}
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/serializers/chat_authentication.py",
"license": "GNU General Public License v3.0",
"lines": 164,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/chat/views/chat.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat.py
@date:2025/6/6 11:18
@desc:
"""
import requests
from django.http import HttpResponse, StreamingHttpResponse
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
from rest_framework.views import APIView
from application.api.application_api import SpeechToTextAPI, TextToSpeechAPI
from application.models import ChatUserType, ChatSourceChoices
from chat.api.chat_api import ChatAPI
from chat.api.chat_authentication_api import ChatAuthenticationAPI, ChatAuthenticationProfileAPI, ChatOpenAPI, OpenAIAPI
from chat.serializers.chat import OpenChatSerializers, ChatSerializers, SpeechToTextSerializers, \
TextToSpeechSerializers, OpenAIChatSerializer
from chat.serializers.chat_authentication import AnonymousAuthenticationSerializer, ApplicationProfileSerializer, \
AuthProfileSerializer
from common.auth import ChatTokenAuth
from common.constants.permission_constants import ChatAuth
from common.exception.app_exception import AppAuthenticationFailed
from common.log.log import _get_ip_address
from common.result import result
from knowledge.models import FileSourceType
from oss.serializers.file import FileSerializer
from users.api import CaptchaAPI
from users.serializers.login import CaptchaSerializer
def stream_image(response):
"""生成器函数,用于流式传输图片数据"""
for chunk in response.iter_content(chunk_size=4096):
if chunk: # 过滤掉保持连接的空块
yield chunk
class ResourceProxy(APIView):
def get(self, request: Request):
image_url = request.query_params.get("url")
if not image_url:
return result.error("Missing 'url' parameter")
try:
# 发送GET请求,流式获取图片内容
response = requests.get(
image_url,
stream=True, # 启用流式响应
allow_redirects=True,
timeout=10
)
content_type = response.headers.get('Content-Type', '').split(';')[0]
# 创建Django流式响应
django_response = StreamingHttpResponse(
stream_image(response), # 使用生成器
content_type=content_type
)
return django_response
except Exception as e:
return result.error(f"Image request failed: {str(e)}")
class OpenAIView(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['POST'],
description=_('OpenAI Interface Dialogue'),
summary=_('OpenAI Interface Dialogue'),
operation_id=_('OpenAI Interface Dialogue'), # type: ignore
request=OpenAIAPI.get_request(),
responses=None,
tags=[_('Chat')] # type: ignore
)
def post(self, request: Request, application_id: str):
ip_address = _get_ip_address(request)
return OpenAIChatSerializer(data={'application_id': application_id, 'chat_user_id': request.auth.chat_user_id,
'chat_user_type': request.auth.chat_user_type,
'ip_address': ip_address,
'source': {"type": ChatSourceChoices.API_CALL.value}}).chat(request.data)
class AnonymousAuthentication(APIView):
def options(self, request, *args, **kwargs):
return HttpResponse(
headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Credentials": "true",
"Access-Control-Allow-Methods": "POST",
"Access-Control-Allow-Headers": "Origin,Content-Type,Cookie,Accept,Token"}, )
@extend_schema(
methods=['POST'],
description=_('Application Anonymous Certification'),
summary=_('Application Anonymous Certification'),
operation_id=_('Application Anonymous Certification'), # type: ignore
request=ChatAuthenticationAPI.get_request(),
responses=None,
tags=[_('Chat')] # type: ignore
)
def post(self, request: Request):
return result.success(
AnonymousAuthenticationSerializer(data={'access_token': request.data.get("access_token")}).auth(
request),
headers={"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Credentials": "true",
"Access-Control-Allow-Methods": "POST",
"Access-Control-Allow-Headers": "Origin,Content-Type,Cookie,Accept,Token"}
)
class ApplicationProfile(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get application related information"),
summary=_("Get application related information"),
operation_id=_("Get application related information"), # type: ignore
request=None,
responses=None,
tags=[_('Chat')] # type: ignore
)
def get(self, request: Request):
if isinstance(request.auth, ChatAuth):
return result.success(ApplicationProfileSerializer(
data={'application_id': request.auth.application_id}).profile())
raise AppAuthenticationFailed(401, "身份异常")
class AuthProfile(APIView):
@extend_schema(
methods=['GET'],
description=_("Get application authentication information"),
summary=_("Get application authentication information"),
operation_id=_("Get application authentication information"), # type: ignore
parameters=ChatAuthenticationProfileAPI.get_parameters(),
responses=None,
tags=[_('Chat')] # type: ignore
)
def get(self, request: Request):
return result.success(
AuthProfileSerializer(data={'access_token': request.query_params.get("access_token")}).profile())
class ChatView(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['POST'],
description=_("dialogue"),
summary=_("dialogue"),
operation_id=_("dialogue"), # type: ignore
request=ChatAPI.get_request(),
parameters=ChatAPI.get_parameters(),
responses=None,
tags=[_('Chat')] # type: ignore
)
def post(self, request: Request, chat_id: str):
ip_address = _get_ip_address(request)
return ChatSerializers(data={'chat_id': chat_id,
'chat_user_id': request.auth.chat_user_id,
'chat_user_type': request.auth.chat_user_type,
'application_id': request.auth.application_id,
'debug': False,
'ip_address': ip_address,
'source': {
'type': ChatSourceChoices.API_CALL.value if request.auth.chat_user_type == ChatUserType.APPLICATION_API_KEY.value else ChatSourceChoices.ONLINE.value}
}
).chat(request.data)
class OpenView(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get the session id according to the application id"),
summary=_("Get the session id according to the application id"),
operation_id=_("Get the session id according to the application id"), # type: ignore
parameters=ChatOpenAPI.get_parameters(),
responses=None,
tags=[_('Chat')] # type: ignore
)
def get(self, request: Request):
ip_address = _get_ip_address(request)
return result.success(OpenChatSerializers(
data={'application_id': request.auth.application_id,
'chat_user_id': request.auth.chat_user_id, 'chat_user_type': request.auth.chat_user_type,
'ip_address': ip_address,
'source': {
'type': ChatSourceChoices.API_CALL.value if request.auth.chat_user_type == ChatUserType.APPLICATION_API_KEY.value else ChatSourceChoices.ONLINE.value},
'debug': False}).open())
class CaptchaView(APIView):
@extend_schema(methods=['GET'],
summary=_("Get Chat captcha"),
description=_("Get Chat captcha"),
operation_id=_("Get Chat captcha"), # type: ignore
tags=[_("Chat")], # type: ignore
responses=CaptchaAPI.get_response())
def get(self, request: Request):
username = request.query_params.get('username', None)
accessToken = request.query_params.get('accessToken', None)
return result.success(CaptchaSerializer().chat_generate(username, 'chat', accessToken))
class SpeechToText(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['POST'],
description=_("speech to text"),
summary=_("speech to text"),
operation_id=_("speech to text"), # type: ignore
request=SpeechToTextAPI.get_request(),
responses=SpeechToTextAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def post(self, request: Request):
return result.success(
SpeechToTextSerializers(
data={'application_id': request.auth.application_id})
.speech_to_text({'file': request.FILES.get('file')}))
class TextToSpeech(APIView):
authentication_classes = [ChatTokenAuth]
@extend_schema(
methods=['POST'],
description=_("text to speech"),
summary=_("text to speech"),
operation_id=_("text to speech"), # type: ignore
request=TextToSpeechAPI.get_request(),
responses=TextToSpeechAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def post(self, request: Request):
byte_data = TextToSpeechSerializers(
data={'application_id': request.auth.application_id}).text_to_speech(request.data)
return HttpResponse(byte_data, status=200, headers={'Content-Type': 'audio/mp3',
'Content-Disposition': 'attachment; filename="abc.mp3"'})
class UploadFile(APIView):
authentication_classes = [ChatTokenAuth]
parser_classes = [MultiPartParser]
@extend_schema(
methods=['POST'],
description=_("Upload files"),
summary=_("Upload files"),
operation_id=_("Upload files"), # type: ignore
request=TextToSpeechAPI.get_request(),
responses=TextToSpeechAPI.get_response(),
tags=[_('Application')] # type: ignore
)
def post(self, request: Request, chat_id: str):
files = request.FILES.getlist('file')
file_ids = []
meta = {}
for file in files:
file_url = FileSerializer(
data={'file': file, 'meta': meta, 'source_id': chat_id, 'source_type': FileSourceType.CHAT, }).upload()
file_ids.append({'name': file.name, 'url': file_url, 'file_id': file_url.split('/')[-1]})
return result.success(file_ids)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/views/chat.py",
"license": "GNU General Public License v3.0",
"lines": 236,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/common/auth/common.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: common.py
@date:2025/6/6 19:55
@desc:
"""
import hashlib
import json
import threading
from django.core import signing, cache
from common.constants.cache_version import Cache_Version
from common.utils.rsa_util import encrypt, decrypt
authentication_cache = cache.cache
lock = threading.Lock()
def _decrypt(authentication: str):
cache_key = hashlib.sha256(authentication.encode()).hexdigest()
result = authentication_cache.get(key=cache_key, version=Cache_Version.CHAT.value)
if result is None:
with lock:
result = authentication_cache.get(cache_key, version=Cache_Version.CHAT.value)
if result is None:
result = decrypt(authentication)
authentication_cache.set(cache_key, result, version=Cache_Version.CHAT.value, timeout=60 * 60 * 2)
return result
class ChatAuthentication:
def __init__(self, auth_type: str | None, **kwargs):
self.auth_type = auth_type
for k, v in kwargs.items():
self.__setattr__(k, v)
def to_dict(self):
return self.__dict__
def to_string(self):
value = json.dumps(self.to_dict())
authentication = encrypt(value)
cache_key = hashlib.sha256(authentication.encode()).hexdigest()
authentication_cache.set(cache_key, value, version=Cache_Version.CHAT.get_version(), timeout=60 * 60 * 2)
return authentication
@staticmethod
def new_instance(authentication: str):
auth = json.loads(_decrypt(authentication))
return ChatAuthentication(**auth)
class ChatUserToken:
def __init__(self, application_id, user_id, access_token, _type, chat_user_type, chat_user_id,
authentication: ChatAuthentication):
self.application_id = application_id
self.user_id = user_id
self.access_token = access_token
self.type = _type
self.chat_user_type = chat_user_type
self.chat_user_id = chat_user_id
self.authentication = authentication
def to_dict(self):
return {
'application_id': str(self.application_id),
'user_id': str(self.user_id),
'access_token': self.access_token,
'type': str(self.type.value),
'chat_user_type': str(self.chat_user_type),
'chat_user_id': str(self.chat_user_id),
'authentication': self.authentication.to_string()
}
def to_token(self):
return signing.dumps(self.to_dict())
@staticmethod
def new_instance(token_dict):
return ChatUserToken(token_dict.get('application_id'), token_dict.get('user_id'),
token_dict.get('access_token'), token_dict.get('type'), token_dict.get('chat_user_type'),
token_dict.get('chat_user_id'),
ChatAuthentication.new_instance(token_dict.get('authentication')))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/common/auth/common.py",
"license": "GNU General Public License v3.0",
"lines": 71,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/common/auth/handle/impl/chat_anonymous_user_token.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat_anonymous_user_token.py
@date:2025/6/6 15:08
@desc:
"""
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from application.models import ApplicationAccessToken
from common.auth.common import ChatUserToken
from common.auth.handle.auth_base_handle import AuthBaseHandle
from common.constants.authentication_type import AuthenticationType
from common.constants.permission_constants import RoleConstants, Permission, Group, Operate, ChatAuth
from common.database_model_manage.database_model_manage import DatabaseModelManage
from common.exception.app_exception import AppAuthenticationFailed
from maxkb.settings import edition
class ChatAnonymousUserToken(AuthBaseHandle):
def support(self, request, token: str, get_token_details):
token_details = get_token_details()
if token_details is None:
return False
return (
'application_id' in token_details and
'access_token' in token_details and
token_details.get('type') == AuthenticationType.CHAT_ANONYMOUS_USER.value)
def handle(self, request, token: str, get_token_details):
auth_details = get_token_details()
chat_user_token = ChatUserToken.new_instance(auth_details)
application_id = chat_user_token.application_id
access_token = chat_user_token.access_token
application_access_token = QuerySet(ApplicationAccessToken).filter(
application_id=application_id).first()
if application_access_token is None:
raise AppAuthenticationFailed(1002, _('Authentication information is incorrect'))
if not application_access_token.is_active:
raise AppAuthenticationFailed(1002, _('Authentication information is incorrect'))
if not application_access_token.access_token == access_token:
raise AppAuthenticationFailed(1002, _('Authentication information is incorrect'))
if application_access_token.authentication and ['PE', 'EE'].__contains__(edition):
if chat_user_token.authentication.auth_type != application_access_token.authentication_value.get('type',
''):
raise AppAuthenticationFailed(1002, _('Authentication information is incorrect'))
return None, ChatAuth(
current_role_list=[RoleConstants.CHAT_ANONYMOUS_USER],
permission_list=[
Permission(group=Group.APPLICATION,
operate=Operate.USE)],
application_id=application_access_token.application_id,
chat_user_id=chat_user_token.chat_user_id,
chat_user_type=chat_user_token.chat_user_type)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/common/auth/handle/impl/chat_anonymous_user_token.py",
"license": "GNU General Public License v3.0",
"lines": 52,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/oss/urls.py | from django.urls import path
from . import views
app_name = 'oss'
urlpatterns = [
path('oss/file', views.FileView.as_view()),
path('oss/get_url/<str:application_id>', views.GetUrlView.as_view()),
]
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/oss/urls.py",
"license": "GNU General Public License v3.0",
"lines": 7,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/knowledge/views/common.py | from django.db.models import QuerySet
from knowledge.models import Document
def get_document_operation_object(document_id: str):
document_model = QuerySet(model=Document).filter(id=document_id).first()
if document_model is not None:
return {
"name": document_model.name,
"type": document_model.type,
}
return {}
def get_document_operation_object_batch(document_id_list: str):
document_model_list = QuerySet(model=Document).filter(id__in=document_id_list)
if document_model_list is not None:
return {
"name": f'[{",".join([document_model.name for document_model in document_model_list])}]',
'document_list': [{'name': document_model.name, 'type': document_model.type} for document_model in
document_model_list]
}
return {}
def get_knowledge_document_operation_object(knowledge_dict: dict, document_dict: dict):
return {
'name': f'{knowledge_dict.get("name", "")}/{document_dict.get("name", "")}',
'dataset_name': knowledge_dict.get("name", ""),
'dataset_desc': knowledge_dict.get("desc", ""),
'dataset_type': knowledge_dict.get("type", ""),
'document_name': document_dict.get("name", ""),
'document_type': document_dict.get("type", ""),
}
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/knowledge/views/common.py",
"license": "GNU General Public License v3.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/api/application_version.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_version.py
@date:2025/6/4 17:33
@desc:
"""
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from application.serializers.application_version import ApplicationVersionModelSerializer
from common.mixins.api_mixin import APIMixin
from common.result import ResultSerializer, PageDataResponse, ResultPageSerializer
class ApplicationListVersionResult(ResultSerializer):
def get_data(self):
return ApplicationVersionModelSerializer(many=True)
class ApplicationPageVersionResult(ResultPageSerializer):
def get_data(self):
return ApplicationVersionModelSerializer(many=True)
class ApplicationWorkflowVersionResult(ResultSerializer):
def get_data(self):
return ApplicationVersionModelSerializer()
class ApplicationVersionAPI(APIMixin):
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="application_id",
description="application ID",
type=OpenApiTypes.STR,
location='path',
required=True,
)
]
class ApplicationVersionOperateAPI(APIMixin):
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="application_version_id",
description="工作流版本id",
type=OpenApiTypes.STR,
location='path',
required=True,
)
, *ApplicationVersionAPI.get_parameters()
]
@staticmethod
def get_response():
return ApplicationWorkflowVersionResult
class ApplicationVersionListAPI(APIMixin):
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="name",
description="Version Name",
type=OpenApiTypes.STR,
required=False,
)
, *ApplicationVersionAPI.get_parameters()]
@staticmethod
def get_response():
return ApplicationListVersionResult
class ApplicationVersionPageAPI(APIMixin):
@staticmethod
def get_parameters():
return ApplicationVersionListAPI.get_parameters()
@staticmethod
def get_response():
return ApplicationPageVersionResult
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/api/application_version.py",
"license": "GNU General Public License v3.0",
"lines": 78,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/api/system.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: system_setting.py
@date:2025/6/4 16:34
@desc:
"""
from common.mixins.api_mixin import APIMixin
from common.result import ResultSerializer
from system_manage.serializers.system import SystemProfileResponseSerializer
class SystemProfileResult(ResultSerializer):
def get_data(self):
return SystemProfileResponseSerializer()
class SystemProfileAPI(APIMixin):
@staticmethod
def get_response():
return SystemProfileResult
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/api/system.py",
"license": "GNU General Public License v3.0",
"lines": 18,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/serializers/system.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: system.py
@date:2025/6/4 16:01
@desc:
"""
import os
from django.db import models
from rest_framework import serializers
from django.core.cache import cache
from common.constants.cache_version import Cache_Version
from common.database_model_manage.database_model_manage import DatabaseModelManage
from common.utils.rsa_util import get_key_pair_by_sql
from maxkb import settings
from system_manage.models import SystemSetting
class SettingType(models.CharField):
# Community Edition
CE = "CE", "社区"
# Enterprise Edition
PE = "PE", "专业版"
# Professional Edition
EE = "EE", '企业版'
class SystemProfileResponseSerializer(serializers.Serializer):
version = serializers.CharField(required=True, label="version")
edition = serializers.CharField(required=True, label="edition")
license_is_valid = serializers.BooleanField(required=True, label="License is valid")
class SystemProfileSerializer(serializers.Serializer):
@staticmethod
def profile():
version = os.environ.get('MAXKB_VERSION')
license_is_valid = DatabaseModelManage.get_model('license_is_valid') or (lambda: False)
return {'version': version, 'edition': settings.edition,
'license_is_valid': license_is_valid() if license_is_valid() is not None else False,
'ras': get_key_pair_by_sql().get('key')}
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/serializers/system.py",
"license": "GNU General Public License v3.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/views/system_profile.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: system_profile.py
@date:2025/6/4 15:59
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from common import result
from system_manage.api.system import SystemProfileAPI
from system_manage.serializers.system import SystemProfileSerializer
class SystemProfile(APIView):
@extend_schema(
methods=['GET'],
description=_('Get MaxKB related information'),
operation_id=_('Get MaxKB related information'), # type: ignore
responses=SystemProfileAPI.get_response(),
tags=[_('System parameters')] # type: ignore
)
def get(self, request: Request):
return result.success(SystemProfileSerializer.profile())
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/views/system_profile.py",
"license": "GNU General Public License v3.0",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/serializers/application_version.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_version.py
@date:2025/6/3 16:25
@desc:
"""
from typing import Dict
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from application.models import Application, ApplicationVersion
from common.db.search import page_search
from common.exception.app_exception import AppApiException
class ApplicationVersionQuerySerializer(serializers.Serializer):
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
name = serializers.CharField(required=False, allow_null=True, allow_blank=True,
label=_("summary"))
class ApplicationVersionModelSerializer(serializers.ModelSerializer):
class Meta:
model = ApplicationVersion
fields = ['id', 'name', 'workspace_id', 'application_id', 'work_flow', 'publish_user_id', 'publish_user_name',
'create_time',
'update_time']
class ApplicationVersionEditSerializer(serializers.Serializer):
name = serializers.CharField(required=False, max_length=128, allow_null=True, allow_blank=True,
label=_("Version Name"))
class ApplicationVersionSerializer(serializers.Serializer):
workspace_id = serializers.CharField(required=False, label=_("Workspace ID"))
class Query(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
def get_query_set(self, query):
query_set = QuerySet(ApplicationVersion).filter(application_id=query.get('application_id'))
if 'name' in query and query.get('name') is not None:
query_set = query_set.filter(name__contains=query.get('name'))
if 'workspace_id' in self.data and self.data.get('workspace_id') is not None:
query_set = query_set.filter(workspace_id=self.data.get('workspace_id'))
return query_set.order_by("-create_time")
def list(self, query, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
ApplicationVersionQuerySerializer(data=query).is_valid(raise_exception=True)
query_set = self.get_query_set(query)
return [ApplicationVersionModelSerializer(v).data for v in query_set]
def page(self, query, current_page, page_size, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
return page_search(current_page, page_size,
self.get_query_set(query),
post_records_handler=lambda v: ApplicationVersionModelSerializer(v).data)
class Operate(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
application_id = serializers.UUIDField(required=True, label=_("Application ID"))
application_version_id = serializers.UUIDField(required=True,
label=_("Application version ID"))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
def one(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
application_version = QuerySet(ApplicationVersion).filter(application_id=self.data.get('application_id'),
id=self.data.get(
'application_version_id')).first()
if application_version is not None:
return ApplicationVersionModelSerializer(application_version).data
else:
raise AppApiException(500, _('Workflow version does not exist'))
def edit(self, instance: Dict, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
ApplicationVersionEditSerializer(data=instance).is_valid(raise_exception=True)
application_version = QuerySet(ApplicationVersion).filter(application_id=self.data.get('application_id'),
id=self.data.get(
'application_version_id')).first()
if application_version is not None:
name = instance.get('name', None)
if name is not None and len(name) > 0:
application_version.name = name
application_version.save()
return ApplicationVersionModelSerializer(application_version).data
else:
raise AppApiException(500, _('Workflow version does not exist'))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/serializers/application_version.py",
"license": "GNU General Public License v3.0",
"lines": 89,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/application/views/application_version.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application_version.py.py
@date:2025/6/3 15:46
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from application.api.application_version import ApplicationVersionListAPI, ApplicationVersionPageAPI, \
ApplicationVersionOperateAPI
from application.serializers.application_version import ApplicationVersionSerializer
from application.views import get_application_operation_object
from common import result
from common.auth import TokenAuth
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
from common.log.log import log
class ApplicationVersionView(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get the application version list"),
summary=_("Get the application version list"),
operation_id=_("Get the application version list"), # type: ignore
parameters=ApplicationVersionListAPI.get_parameters(),
responses=ApplicationVersionListAPI.get_response(),
tags=[_('Application/Version')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id, application_id: str):
return result.success(
ApplicationVersionSerializer.Query(
data={'workspace_id': workspace_id}).list(
{'name': request.query_params.get("name"), 'application_id': application_id}))
class Page(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get the list of application versions by page"),
summary=_("Get the list of application versions by page"),
operation_id=_("Get the list of application versions by page"), # type: ignore
parameters=ApplicationVersionPageAPI.get_parameters(),
responses=ApplicationVersionPageAPI.get_response(),
tags=[_('Application/Version')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str, current_page: int, page_size: int):
return result.success(
ApplicationVersionSerializer.Query(
data={'workspace_id': workspace_id}).page(
{'name': request.query_params.get("name"), 'application_id': application_id},
current_page, page_size))
class Operate(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get application version details"),
summary=_("Get application version details"),
operation_id=_("Get application version details"), # type: ignore
parameters=ApplicationVersionOperateAPI.get_parameters(),
responses=ApplicationVersionOperateAPI.get_response(),
tags=[_('Application/Version')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_EDIT.get_workspace_application_permission(),
PermissionConstants.APPLICATION_EDIT.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str, application_version_id: str):
return result.success(
ApplicationVersionSerializer.Operate(
data={'user_id': request.user, 'workspace_id': workspace_id,
'application_id': application_id, 'application_version_id': application_version_id}).one())
@extend_schema(
methods=['PUT'],
description=_("Modify application version information"),
summary=_("Modify application version information"),
operation_id=_("Modify application version information"), # type: ignore
parameters=ApplicationVersionOperateAPI.get_parameters(),
request=None,
responses=ApplicationVersionOperateAPI.get_response(),
tags=[_('Application/Version')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_EDIT.get_workspace_application_permission(),
PermissionConstants.APPLICATION_EDIT.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate="Modify application version information",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')),
)
def put(self, request: Request, workspace_id: str, application_id: str, application_version_id: str):
return result.success(
ApplicationVersionSerializer.Operate(
data={'application_id': application_id, 'workspace_id': workspace_id,
'application_version_id': application_version_id,
'user_id': request.user.id}).edit(
request.data))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/views/application_version.py",
"license": "GNU General Public License v3.0",
"lines": 114,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/chat/api/chat_embed_api.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat_embed_api.py
@date:2025/5/30 15:25
@desc:
"""
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from common.mixins.api_mixin import APIMixin
from django.utils.translation import gettext_lazy as _
from common.result import DefaultResultSerializer
class ChatEmbedAPI(APIMixin):
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="host",
description=_("host"),
type=OpenApiTypes.STR,
location='query',
required=False,
),
OpenApiParameter(
name="protocol",
description=_("protocol"),
type=OpenApiTypes.STR,
location='query',
required=False,
),
OpenApiParameter(
name="token",
description=_("token"),
type=OpenApiTypes.STR,
location='query',
required=False,
)
]
@staticmethod
def get_response():
return DefaultResultSerializer
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/api/chat_embed_api.py",
"license": "GNU General Public License v3.0",
"lines": 42,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/chat/serializers/chat_embed_serializers.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat_embed_serializers.py
@date:2025/5/30 14:34
@desc:
"""
import os
import uuid_utils.compat as uuid
from django.db.models import QuerySet
from django.http import HttpResponse
from django.template import Template, Context
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from application.models import ApplicationAccessToken
from common.database_model_manage.database_model_manage import DatabaseModelManage
from maxkb.conf import PROJECT_DIR
from maxkb.const import CONFIG
class ChatEmbedSerializer(serializers.Serializer):
host = serializers.CharField(required=True, label=_("Host"))
protocol = serializers.CharField(required=True, label=_("protocol"))
token = serializers.CharField(required=True, label=_("token"))
def get_embed(self, with_valid=True, params=None):
if params is None:
params = {}
if with_valid:
self.is_valid(raise_exception=True)
index_path = os.path.join(PROJECT_DIR, 'apps', "chat", 'template', 'embed.js')
file = open(index_path, "r", encoding='utf-8')
content = file.read()
file.close()
application_access_token = QuerySet(ApplicationAccessToken).filter(
access_token=self.data.get('token')).first()
is_draggable = 'false'
show_guide = 'true'
float_icon = f"{self.data.get('protocol')}://{self.data.get('host')}{CONFIG.get_chat_path()}/MaxKB.gif"
is_license_valid = DatabaseModelManage.get_model('license_is_valid')
X_PACK_LICENSE_IS_VALID = is_license_valid() if is_license_valid is not None else False
# 获取接入的query参数
query = self.get_query_api_input(application_access_token.application, params)
float_location = {"x": {"type": "right", "value": 0}, "y": {"type": "bottom", "value": 30}}
header_font_color = "rgb(100, 106, 115)"
application_setting_model = DatabaseModelManage.get_model('application_setting')
if application_setting_model is not None and X_PACK_LICENSE_IS_VALID:
application_setting = QuerySet(application_setting_model).filter(
application_id=application_access_token.application_id).first()
if application_setting is not None:
is_draggable = 'true' if application_setting.draggable else 'false'
if application_setting.float_icon is not None and len(application_setting.float_icon) > 0:
float_icon = application_setting.float_icon[1:] if application_setting.float_icon.startswith(
'.') else application_setting.float_icon
float_icon = f"{self.data.get('protocol')}://{self.data.get('host')}{CONFIG.get_chat_path()}{float_icon}"
show_guide = 'true' if application_setting.show_guide else 'false'
if application_setting.float_location is not None:
float_location = application_setting.float_location
if application_setting.custom_theme is not None and len(
application_setting.custom_theme.get('header_font_color', 'rgb(100, 106, 115)')) > 0:
header_font_color = application_setting.custom_theme.get('header_font_color',
'rgb(100, 106, 115)')
is_auth = 'true' if application_access_token is not None and application_access_token.is_active else 'false'
t = Template(content)
s = t.render(
Context(
{'is_auth': is_auth, 'protocol': self.data.get('protocol'), 'host': self.data.get('host'),
'token': self.data.get('token'),
'white_list_str': ",".join(
application_access_token.white_list if application_access_token.white_list is not None else []),
'white_active': 'true' if application_access_token.white_active else 'false',
'is_draggable': is_draggable,
'float_icon': float_icon,
'prefix': CONFIG.get_chat_path(),
'query': query,
'show_guide': show_guide,
'x_type': float_location.get('x', {}).get('type', 'right'),
'x_value': float_location.get('x', {}).get('value', 0),
'y_type': float_location.get('y', {}).get('type', 'bottom'),
'y_value': float_location.get('y', {}).get('value', 30),
'max_kb_id': str(uuid.uuid7()).replace('-', ''),
'header_font_color': header_font_color}))
response = HttpResponse(s, status=200, headers={'Content-Type': 'text/javascript'})
return response
@staticmethod
def get_query_api_input(application, params):
query = ''
if application.work_flow is not None:
work_flow = application.work_flow
if work_flow is not None:
for node in work_flow.get('nodes', []):
if node['id'] == 'base-node':
input_field_list = node.get('properties', {}).get('api_input_field_list',
node.get('properties', {}).get(
'input_field_list', []))
if input_field_list is not None:
for field in input_field_list:
if field['assignment_method'] == 'api_input' and field['variable'] in params:
query += f"&{field['variable']}={params[field['variable']]}"
if 'asker' in params:
query += f"&asker={params.get('asker')}"
return query
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/serializers/chat_embed_serializers.py",
"license": "GNU General Public License v3.0",
"lines": 100,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/chat/urls.py | from django.urls import path
from application.views import ChatRecordDetailView, ChatRecordLinkView
from chat.views.mcp import mcp_view
from . import views
app_name = 'chat'
# @formatter:off
urlpatterns = [
path('embed', views.ChatEmbedView.as_view()),
path('mcp', mcp_view),
path('auth/anonymous', views.AnonymousAuthentication.as_view()),
path('profile', views.AuthProfile.as_view()),
path('application/profile', views.ApplicationProfile.as_view(), name='profile'),
path('chat_message/<str:chat_id>', views.ChatView.as_view(), name='chat'),
path('open', views.OpenView.as_view(), name='open'),
path('text_to_speech', views.TextToSpeech.as_view()),
path('speech_to_text', views.SpeechToText.as_view()),
path('captcha', views.CaptchaView.as_view(), name='captcha'),
path('<str:application_id>/chat/completions', views.OpenAIView.as_view(), name='application/chat_completions'),
path('vote/chat/<str:chat_id>/chat_record/<str:chat_record_id>', views.VoteView.as_view(), name='vote'),
path('historical_conversation', views.HistoricalConversationView.as_view(), name='historical_conversation'),
path('historical_conversation/<str:chat_id>/record/<str:chat_record_id>',views.ChatRecordView.as_view(),name='conversation_details'),
path('historical_conversation/<int:current_page>/<int:page_size>', views.HistoricalConversationView.PageView.as_view(), name='historical_conversation'),
path('historical_conversation/clear',views.HistoricalConversationView.BatchDelete.as_view(), name='historical_conversation_clear'),
path('historical_conversation/<str:chat_id>',views.HistoricalConversationView.Operate.as_view(), name='historical_conversation_operate'),
path('historical_conversation_record/<str:chat_id>', views.HistoricalConversationRecordView.as_view(), name='historical_conversation_record'),
path('historical_conversation_record/<str:chat_id>/<int:current_page>/<int:page_size>', views.HistoricalConversationRecordView.PageView.as_view(), name='historical_conversation_record'),
path('share/<str:link>', ChatRecordDetailView.as_view()),
path('<str:application_id>/chat/<str:chat_id>/share_chat', ChatRecordLinkView.as_view()),
]
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/urls.py",
"license": "GNU General Public License v3.0",
"lines": 29,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/chat/views/chat_embed.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: chat_embed.py
@date:2025/5/30 15:22
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from chat.api.chat_embed_api import ChatEmbedAPI
from chat.serializers.chat_embed_serializers import ChatEmbedSerializer
class ChatEmbedView(APIView):
@extend_schema(
methods=['GET'],
description=_('Get embedded js'),
summary=_('Get embedded js'),
operation_id=_('Get embedded js'), # type: ignore
parameters=ChatEmbedAPI.get_parameters(),
responses=ChatEmbedAPI.get_response(),
tags=[_('Chat')] # type: ignore
)
def get(self, request: Request):
return ChatEmbedSerializer(
data={'protocol': request.query_params.get('protocol'), 'token': request.query_params.get('token'),
'host': request.query_params.get('host'), }).get_embed(params=request.query_params)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/chat/views/chat_embed.py",
"license": "GNU General Public License v3.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/serializers/application_folder.py | from rest_framework import serializers
from application.models import ApplicationFolder
class ApplicationFolderTreeSerializer(serializers.ModelSerializer):
children = serializers.SerializerMethodField()
class Meta:
model = ApplicationFolder
fields = ['id', 'name', 'desc', 'user_id', 'workspace_id', 'parent_id', 'children','create_time','update_time']
def get_children(self, obj):
return ApplicationFolderTreeSerializer(obj.get_children(), many=True).data
class ApplicationFolderFlatSerializer(serializers.ModelSerializer):
class Meta:
model = ApplicationFolder
fields = ['id', 'name', 'desc', 'user_id', 'workspace_id', 'parent_id']
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/serializers/application_folder.py",
"license": "GNU General Public License v3.0",
"lines": 13,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/api/application_api_key.py | from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from application.serializers.application_api_key import EditApplicationKeySerializer, ApplicationKeySerializerModel
from common.mixins.api_mixin import APIMixin
from common.result import ResultSerializer
class ApplicationKeyListResult(ResultSerializer):
def get_data(self):
return ApplicationKeySerializerModel(many=True)
class ApplicationKeyResult(ResultSerializer):
def get_data(self):
return ApplicationKeySerializerModel()
class ApplicationKeyAPI(APIMixin):
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="application_id",
description="application ID",
type=OpenApiTypes.STR,
location='path',
required=True,
)
]
@staticmethod
def get_response():
return ApplicationKeyResult
class List(APIMixin):
@staticmethod
def get_response():
return ApplicationKeyListResult
class Operate(APIMixin):
@staticmethod
def get_parameters():
return [*ApplicationKeyAPI.get_parameters(), OpenApiParameter(
name="api_key_id",
description="ApiKeyId",
type=OpenApiTypes.STR,
location='path',
required=True,
)]
@staticmethod
def get_request():
return EditApplicationKeySerializer
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/api/application_api_key.py",
"license": "GNU General Public License v3.0",
"lines": 50,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/serializers/application_api_key.py | import hashlib
import uuid_utils.compat as uuid
from django.db.models import QuerySet
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from application.models import Application
from application.models.application_api_key import ApplicationApiKey
from common.cache_data.application_api_key_cache import get_application_api_key, del_application_api_key
from common.db.search import page_search
from common.exception.app_exception import AppApiException
class ApplicationKeySerializerModel(serializers.ModelSerializer):
class Meta:
model = ApplicationApiKey
fields = "__all__"
class EditApplicationKeySerializer(serializers.Serializer):
is_active = serializers.BooleanField(required=False, label=_("Availability"))
allow_cross_domain = serializers.BooleanField(required=False,
label=_("Is cross-domain allowed"))
cross_domain_list = serializers.ListSerializer(required=False,
child=serializers.CharField(required=True,
label=_("Cross-domain address")),
label=_("Cross-domain list"))
is_permanent = serializers.BooleanField(required=False, label=_("Is permanent"))
expire_time = serializers.DateTimeField(required=False, allow_null=True, label=_("Expiration time"))
class ApplicationKeySerializer(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
application_id = serializers.UUIDField(required=True, label=_('application id'))
order_by = serializers.CharField(required=False, label=_('order by'), allow_null=True, allow_blank=True)
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
def generate(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
application_id = self.data.get("application_id")
application = QuerySet(Application).filter(id=application_id).first()
secret_key = 'agent-' + hashlib.md5(str(uuid.uuid7()).encode()).hexdigest()
application_api_key = ApplicationApiKey(id=uuid.uuid7(),
secret_key=secret_key,
user_id=application.user_id,
application_id=application_id)
application_api_key.save()
return ApplicationKeySerializerModel(application_api_key).data
def page(self, current_page: int, page_size: int, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
application_id = self.data.get("application_id")
query_set = QuerySet(ApplicationApiKey).filter(application_id=application_id)
order_by = '-create_time' if self.data.get('order_by') is None or self.data.get(
'order_by') == '' else self.data.get('order_by')
query_set = query_set.order_by(order_by)
return page_search(current_page, page_size,
query_set,
post_records_handler=lambda u: ApplicationKeySerializerModel(u).data)
class Operate(serializers.Serializer):
workspace_id = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_("Workspace ID"))
application_id = serializers.UUIDField(required=True, label=_('application id'))
api_key_id = serializers.UUIDField(required=True, label=_('ApiKeyId'))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
workspace_id = self.data.get('workspace_id')
query_set = QuerySet(Application).filter(id=self.data.get('application_id'))
if workspace_id:
query_set = query_set.filter(workspace_id=workspace_id)
if not query_set.exists():
raise AppApiException(500, _('Application id does not exist'))
def delete(self, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
api_key_id = self.data.get("api_key_id")
application_id = self.data.get('application_id')
application_api_key = QuerySet(ApplicationApiKey).filter(id=api_key_id,
application_id=application_id).first()
del_application_api_key(application_api_key.secret_key)
application_api_key.delete()
def edit(self, instance, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
EditApplicationKeySerializer(data=instance).is_valid(raise_exception=True)
api_key_id = self.data.get("api_key_id")
application_id = self.data.get('application_id')
application_api_key = QuerySet(ApplicationApiKey).filter(id=api_key_id,
application_id=application_id).first()
if application_api_key is None:
raise AppApiException(500, _('APIKey does not exist'))
if 'is_active' in instance and instance.get('is_active') is not None:
application_api_key.is_active = instance.get('is_active')
if 'allow_cross_domain' in instance and instance.get('allow_cross_domain') is not None:
application_api_key.allow_cross_domain = instance.get('allow_cross_domain')
if 'cross_domain_list' in instance and instance.get('cross_domain_list') is not None:
application_api_key.cross_domain_list = instance.get('cross_domain_list')
if 'is_permanent' in instance and instance.get('is_permanent') is not None:
application_api_key.is_permanent = instance.get('is_permanent')
if not application_api_key.is_permanent:
application_api_key.expire_time = instance.get('expire_time')
else:
application_api_key.expire_time = timezone.now()
application_api_key.save()
# 写入缓存
get_application_api_key('Bearer ' + application_api_key.secret_key, False)
return True
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/serializers/application_api_key.py",
"license": "GNU General Public License v3.0",
"lines": 107,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/application/views/application_api_key.py | from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from application.api.application_api_key import ApplicationKeyAPI
from application.models import Application
from application.serializers.application_api_key import ApplicationKeySerializer
from common.auth import TokenAuth
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
from common.log.log import log
from common.result import result, DefaultResultSerializer
def get_application_operation_object(application_id):
application_model = QuerySet(model=Application).filter(id=application_id).first()
if application_model is not None:
return {
"name": application_model.name
}
return {}
class ApplicationKey(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_('Create application ApiKey'),
summary=_('Create application ApiKey'),
operation_id=_('Create application ApiKey'), # type: ignore
parameters=ApplicationKeyAPI.get_parameters(),
request=None,
responses=ApplicationKeyAPI.get_response(),
tags=[_('Application Api Key')] # type: ignore
)
@log(menu='Application', operate="Add ApiKey",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')),
)
@has_permissions(PermissionConstants.APPLICATION_OVERVIEW_API_KEY.get_workspace_application_permission(),
PermissionConstants.APPLICATION_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role()
)
def post(self, request: Request, workspace_id: str, application_id: str):
return result.success(ApplicationKeySerializer(
data={'application_id': application_id, 'user_id': request.user.id,
'workspace_id': workspace_id}).generate())
class Page(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_('GET application ApiKey List'),
summary=_('Create application ApiKey List'),
operation_id=_('Create application ApiKey List'), # type: ignore
parameters=ApplicationKeyAPI.get_parameters(),
responses=ApplicationKeyAPI.List.get_response(),
tags=[_('Application Api Key')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_OVERVIEW_API_KEY.get_workspace_application_permission(),
PermissionConstants.APPLICATION_OVERVIEW_API_KEY.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str, current_page: int, page_size: int):
return result.success(ApplicationKeySerializer(
data={'application_id': application_id,
'workspace_id': workspace_id,
'order_by': request.query_params.get('order_by')}).page(current_page, page_size))
class Operate(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
description=_('Modify application API_KEY'),
summary=_('Modify application API_KEY'),
operation_id=_('Modify application API_KEY'), # type: ignore
parameters=ApplicationKeyAPI.Operate.get_parameters(),
request=ApplicationKeyAPI.Operate.get_request(),
responses=DefaultResultSerializer,
tags=[_('Application Api Key')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_OVERVIEW_API_KEY.get_workspace_application_permission(),
PermissionConstants.APPLICATION_OVERVIEW_API_KEY.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate="Modify application API_KEY",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')),
)
def put(self, request: Request, workspace_id: str, application_id: str, api_key_id: str):
return result.success(
ApplicationKeySerializer.Operate(
data={'workspace_id': workspace_id, 'application_id': application_id,
'api_key_id': api_key_id}).edit(
request.data))
@extend_schema(
methods=['DELETE'],
description=_('Delete Application API_KEY'),
summary=_('Delete Application API_KEY'),
operation_id=_('Delete Application API_KEY'), # type: ignore
parameters=ApplicationKeyAPI.Operate.get_parameters(),
request=ApplicationKeyAPI.Operate.get_request(),
responses=DefaultResultSerializer,
tags=[_('Application Api Key')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_OVERVIEW_API_KEY.get_workspace_application_permission(),
PermissionConstants.APPLICATION_OVERVIEW_API_KEY.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate="Delete application API_KEY",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')),
)
def delete(self, request: Request, workspace_id: str, application_id: str, api_key_id: str):
return result.success(
ApplicationKeySerializer.Operate(
data={'workspace_id': workspace_id, 'application_id': application_id,
'api_key_id': api_key_id}).delete())
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/views/application_api_key.py",
"license": "GNU General Public License v3.0",
"lines": 119,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/api/application_api.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application.py
@date:2025/5/26 16:59
@desc:
"""
from django.utils.translation import gettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from rest_framework import serializers
from application.serializers.application import ApplicationCreateSerializer, ApplicationListResponse, \
ApplicationImportRequest, ApplicationEditSerializer, TextToSpeechRequest, SpeechToTextRequest, PlayDemoTextRequest
from common.mixins.api_mixin import APIMixin
from common.result import ResultSerializer, ResultPageSerializer, DefaultResultSerializer
class ApplicationCreateRequest(ApplicationCreateSerializer.SimplateRequest):
work_flow = serializers.DictField(required=True, label=_("Workflow Objects"))
class ApplicationCreateResponse(ResultSerializer):
def get_data(self):
return ApplicationCreateSerializer.ApplicationResponse()
class ApplicationListResult(ResultSerializer):
def get_data(self):
return ApplicationListResponse(many=True)
class ApplicationPageResult(ResultPageSerializer):
def get_data(self):
return ApplicationListResponse(many=True)
class ApplicationQueryAPI(APIMixin):
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="current_page",
description=_("Current page"),
type=OpenApiTypes.INT,
location='path',
required=True,
),
OpenApiParameter(
name="page_size",
description=_("Page size"),
type=OpenApiTypes.INT,
location='path',
required=True,
),
OpenApiParameter(
name="folder_id",
description=_("folder id"),
type=OpenApiTypes.STR,
location='query',
required=False,
),
OpenApiParameter(
name="name",
description=_("Application Name"),
type=OpenApiTypes.STR,
location='query',
required=False,
),
OpenApiParameter(
name="desc",
description=_("Application Description"),
type=OpenApiTypes.STR,
location='query',
required=False,
),
OpenApiParameter(
name="user_id",
description=_("User ID"),
type=OpenApiTypes.STR,
location='query',
required=False,
),
OpenApiParameter(
name="publish_status",
description=_("Publish status") + '(published|unpublished)',
type=OpenApiTypes.STR,
location='query',
required=False,
)
]
@staticmethod
def get_response():
return ApplicationListResult
@staticmethod
def get_page_response():
return ApplicationPageResult
class ApplicationCreateAPI(APIMixin):
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
)
]
@staticmethod
def get_request():
return ApplicationCreateRequest
@staticmethod
def get_response():
return ApplicationCreateResponse
class ApplicationImportAPI(APIMixin):
@staticmethod
def get_parameters():
ApplicationCreateAPI.get_parameters()
@staticmethod
def get_request():
return ApplicationImportRequest
class ApplicationOperateAPI(APIMixin):
@staticmethod
def get_parameters():
return [
OpenApiParameter(
name="workspace_id",
description="工作空间id",
type=OpenApiTypes.STR,
location='path',
required=True,
),
OpenApiParameter(
name="application_id",
description="应用id",
type=OpenApiTypes.STR,
location='path',
required=True,
)
]
class ApplicationExportAPI(APIMixin):
@staticmethod
def get_parameters():
return ApplicationOperateAPI.get_parameters()
@staticmethod
def get_response():
return DefaultResultSerializer
class ApplicationEditAPI(APIMixin):
@staticmethod
def get_request():
return ApplicationEditSerializer
class TextToSpeechAPI(APIMixin):
@staticmethod
def get_parameters():
return ApplicationOperateAPI.get_parameters()
@staticmethod
def get_request():
return TextToSpeechRequest
@staticmethod
def get_response():
return DefaultResultSerializer
class SpeechToTextAPI(APIMixin):
@staticmethod
def get_parameters():
return ApplicationOperateAPI.get_parameters()
@staticmethod
def get_request():
return SpeechToTextRequest
@staticmethod
def get_response():
return DefaultResultSerializer
class PlayDemoTextAPI(APIMixin):
@staticmethod
def get_parameters():
return ApplicationOperateAPI.get_parameters()
@staticmethod
def get_request():
return PlayDemoTextRequest
@staticmethod
def get_response():
return DefaultResultSerializer
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/api/application_api.py",
"license": "GNU General Public License v3.0",
"lines": 179,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/application/views/application.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: application.py
@date:2025/5/26 16:51
@desc:
"""
from django.db.models import QuerySet
from django.http import HttpResponse
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.parsers import MultiPartParser
from rest_framework.request import Request
from rest_framework.views import APIView
from application.api.application_api import ApplicationCreateAPI, ApplicationQueryAPI, ApplicationImportAPI, \
ApplicationExportAPI, ApplicationOperateAPI, ApplicationEditAPI, TextToSpeechAPI, SpeechToTextAPI, PlayDemoTextAPI
from application.models import Application
from application.serializers.application import ApplicationSerializer, Query, ApplicationOperateSerializer
from common import result
from common.auth import TokenAuth
from common.auth.authentication import has_permissions, get_is_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
from common.log.log import log
from tools.api.tool import GetInternalToolAPI
def get_application_operation_object(application_id):
application_model = QuerySet(model=Application).filter(id=application_id).first()
if application_model is not None:
return {
'name': application_model.name
}
return {}
class ApplicationAPI(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_('Create an application'),
summary=_('Create an application'),
operation_id=_('Create an application'), # type: ignore
parameters=ApplicationCreateAPI.get_parameters(),
request=ApplicationCreateAPI.get_request(),
responses=ApplicationCreateAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_CREATE.get_workspace_permission(),
RoleConstants.USER.get_workspace_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate='Create an application',
get_operation_object=lambda r, k: {'name': r.data.get('name')},
)
def post(self, request: Request, workspace_id: str):
return result.success(
ApplicationSerializer(data={'workspace_id': workspace_id, 'user_id': request.user.id}).insert(request.data))
@extend_schema(
methods=['GET'],
description=_('Get the application list'),
summary=_('Get the application list'),
operation_id=_('Get the application list'), # type: ignore
parameters=ApplicationQueryAPI.get_parameters(),
responses=ApplicationQueryAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_READ.get_workspace_permission(),
RoleConstants.USER.get_workspace_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str):
return result.success(
Query(data={'workspace_id': workspace_id, 'user_id': request.user.id}).list(request.query_params))
class Page(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_('Get the application list by page'),
summary=_('Get the application list by page'),
operation_id=_('Get the application list by page'), # type: ignore
parameters=ApplicationQueryAPI.get_parameters(),
responses=ApplicationQueryAPI.get_page_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_READ.get_workspace_permission(),
RoleConstants.USER.get_workspace_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, current_page: int, page_size: int):
return result.success(
Query(data={'workspace_id': workspace_id, 'user_id': request.user.id}).page(current_page, page_size,
request.query_params))
class Import(APIView):
authentication_classes = [TokenAuth]
parser_classes = [MultiPartParser]
@extend_schema(
methods=['POST'],
description=_('Import Application'),
summary=_('Import Application'),
operation_id=_('Import Application'), # type: ignore
parameters=ApplicationImportAPI.get_parameters(),
request=ApplicationImportAPI.get_request(),
responses=result.DefaultResultSerializer,
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_IMPORT.get_workspace_permission(),
RoleConstants.USER.get_workspace_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate="Import Application", )
def post(self, request: Request, workspace_id: str, folder_id: str):
is_import_tool = get_is_permissions(request, workspace_id=workspace_id, folder_id=folder_id)(
PermissionConstants.TOOL_IMPORT.get_workspace_permission(),
PermissionConstants.TOOL_IMPORT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(), RoleConstants.USER.get_workspace_role()
)
return result.success(ApplicationSerializer(
data={'user_id': request.user.id, 'workspace_id': workspace_id,
}).import_({'file': request.FILES.get('file'), 'folder_id': folder_id}, is_import_tool))
class Export(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_('Export application'),
summary=_('Export application'),
operation_id=_('Export application'), # type: ignore
parameters=ApplicationExportAPI.get_parameters(),
request=None,
responses=ApplicationExportAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_EXPORT.get_workspace_application_permission(),
PermissionConstants.APPLICATION_EXPORT.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate="Export Application",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')),
)
def get(self, request: Request, workspace_id: str, application_id: str):
return ApplicationOperateSerializer(
data={'application_id': application_id,
'workspace_id': workspace_id,
'user_id': request.user.id}).export()
class Operate(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['DELETE'],
description=_('Deleting application'),
summary=_('Deleting application'),
operation_id=_('Deleting application'), # type: ignore
parameters=ApplicationOperateAPI.get_parameters(),
responses=result.DefaultResultSerializer,
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_DELETE.get_workspace_application_permission(),
PermissionConstants.APPLICATION_DELETE.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate='Deleting application',
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')),
)
def delete(self, request: Request, workspace_id: str, application_id: str):
return result.success(ApplicationOperateSerializer(
data={'application_id': application_id, 'user_id': request.user.id,
'workspace_id': workspace_id, }).delete(
with_valid=True))
@extend_schema(
methods=['PUT'],
description=_('Modify the application'),
summary=_('Modify the application'),
operation_id=_('Modify the application'), # type: ignore
parameters=ApplicationOperateAPI.get_parameters(),
request=ApplicationEditAPI.get_request(),
responses=ApplicationCreateAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_EDIT.get_workspace_application_permission(),
PermissionConstants.APPLICATION_EDIT.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate="Modify the application",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')),
)
def put(self, request: Request, workspace_id: str, application_id: str):
return result.success(
ApplicationOperateSerializer(
data={'application_id': application_id, 'user_id': request.user.id,
'workspace_id': workspace_id, }).edit(
request.data))
@extend_schema(
methods=['GET'],
description=_('Get application details'),
summary=_('Get application details'),
operation_id=_('Get application details'), # type: ignore
parameters=ApplicationOperateAPI.get_parameters(),
request=ApplicationEditAPI.get_request(),
responses=result.DefaultResultSerializer,
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, application_id: str):
return result.success(ApplicationOperateSerializer(
data={'application_id': application_id, 'user_id': request.user.id,
'workspace_id': workspace_id, }).one())
class Publish(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
description=_("Publishing an application"),
summary=_("Publishing an application"),
operation_id=_("Publishing an application"), # type: ignore
parameters=ApplicationOperateAPI.get_parameters(),
request=None,
responses=result.DefaultResultSerializer,
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_EDIT.get_workspace_application_permission(),
PermissionConstants.APPLICATION_EDIT.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate='Publishing an application',
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')))
def put(self, request: Request, workspace_id: str, application_id: str):
return result.success(
ApplicationOperateSerializer(
data={'application_id': application_id, 'user_id': request.user.id,
'workspace_id': workspace_id, }).publish(request.data))
class StoreApplication(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("Get Appstore apps"),
summary=_("Get Appstore apps"),
operation_id=_("Get Appstore apps"), # type: ignore
responses=GetInternalToolAPI.get_response(),
tags=[_("Application")] # type: ignore
)
def get(self, request: Request):
return result.success(ApplicationSerializer.StoreApplication(data={
'user_id': request.user.id,
'name': request.query_params.get('name', ''),
}).get_appstore_templates())
class McpServers(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_("speech to text"),
summary=_("speech to text"),
operation_id=_("speech to text"), # type: ignore
parameters=SpeechToTextAPI.get_parameters(),
request=SpeechToTextAPI.get_request(),
responses=SpeechToTextAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_READ.get_workspace_application_permission(),
PermissionConstants.APPLICATION_READ.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def post(self, request: Request, workspace_id, application_id: str):
return result.success(ApplicationOperateSerializer(
data={'mcp_servers': request.query_params.get('mcp_servers'), 'workspace_id': workspace_id,
'user_id': request.user.id,
'application_id': application_id}).get_mcp_servers(request.data))
class SpeechToText(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_("speech to text"),
summary=_("speech to text"),
operation_id=_("speech to text"), # type: ignore
parameters=SpeechToTextAPI.get_parameters(),
request=SpeechToTextAPI.get_request(),
responses=SpeechToTextAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_EDIT.get_workspace_application_permission(),
PermissionConstants.APPLICATION_EDIT.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def post(self, request: Request, workspace_id: str, application_id: str):
return result.success(
ApplicationOperateSerializer(
data={'application_id': application_id, 'workspace_id': workspace_id, 'user_id': request.user.id})
.speech_to_text({'file': request.FILES.get('file')}))
class TextToSpeech(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_("text to speech"),
summary=_("text to speech"),
operation_id=_("text to speech"), # type: ignore
parameters=TextToSpeechAPI.get_parameters(),
request=TextToSpeechAPI.get_request(),
responses=TextToSpeechAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_EDIT.get_workspace_application_permission(),
PermissionConstants.APPLICATION_EDIT.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def post(self, request: Request, workspace_id: str, application_id: str):
byte_data = ApplicationOperateSerializer(
data={'application_id': application_id, 'workspace_id': workspace_id,
'user_id': request.user.id}).text_to_speech(request.data)
return HttpResponse(byte_data, status=200, headers={'Content-Type': 'audio/mp3',
'Content-Disposition': 'attachment; filename="abc.mp3"'})
class PlayDemoText(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
description=_("PlayDemo"),
summary=_("PlayDemo"),
operation_id=_("PlayDemo"), # type: ignore
parameters=PlayDemoTextAPI.get_parameters(),
request=PlayDemoTextAPI.get_request(),
responses=PlayDemoTextAPI.get_response(),
tags=[_('Application')] # type: ignore
)
@has_permissions(PermissionConstants.APPLICATION_EDIT.get_workspace_application_permission(),
PermissionConstants.APPLICATION_EDIT.get_workspace_permission_workspace_manage_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.APPLICATION.get_workspace_application_permission()],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
@log(menu='Application', operate="trial listening",
get_operation_object=lambda r, k: get_application_operation_object(k.get('application_id')))
def post(self, request: Request, workspace_id: str, application_id: str):
byte_data = ApplicationOperateSerializer(
data={'application_id': application_id, 'workspace_id': workspace_id,
'user_id': request.user.id}).play_demo_text(request.data)
return HttpResponse(byte_data, status=200, headers={'Content-Type': 'audio/mp3',
'Content-Disposition': 'attachment; filename="abc.mp3"'})
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/application/views/application.py",
"license": "GNU General Public License v3.0",
"lines": 344,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/knowledge/task/generate.py | import traceback
from celery_once import QueueOnce
from django.db.models import QuerySet
from django.db.models.functions import Reverse, Substr
from django.utils.translation import gettext_lazy as _
from langchain_core.messages import HumanMessage
from common.config.embedding_config import ModelManage
from common.event.listener_manage import ListenerManagement
from common.utils.logger import maxkb_logger
from common.utils.page_utils import page, page_desc
from knowledge.models import Paragraph, Document, Status, TaskType, State
from knowledge.task.handler import save_problem
from models_provider.models import Model
from models_provider.tools import get_model
from ops import celery_app
def get_llm_model(model_id, model_params_setting=None):
model = QuerySet(Model).filter(id=model_id).first()
return ModelManage.get_model(model_id, lambda _id: get_model(model, **(model_params_setting or {})))
def generate_problem_by_paragraph(paragraph, llm_model, prompt):
try:
ListenerManagement.update_status(QuerySet(Paragraph).filter(id=paragraph.id), TaskType.GENERATE_PROBLEM,
State.STARTED)
res = llm_model.invoke(
[HumanMessage(content=prompt.replace('{data}', paragraph.content).replace('{title}', paragraph.title))])
if (res.content is None) or (len(res.content) == 0):
return
problems = res.content.split('\n')
for problem in problems:
save_problem(paragraph.knowledge_id, paragraph.document_id, paragraph.id, problem)
ListenerManagement.update_status(QuerySet(Paragraph).filter(id=paragraph.id), TaskType.GENERATE_PROBLEM,
State.SUCCESS)
except Exception as e:
ListenerManagement.update_status(QuerySet(Paragraph).filter(id=paragraph.id), TaskType.GENERATE_PROBLEM,
State.FAILURE)
def get_generate_problem(llm_model, prompt, post_apply=lambda: None, is_the_task_interrupted=lambda: False):
def generate_problem(paragraph_list):
for paragraph in paragraph_list:
if is_the_task_interrupted():
return
generate_problem_by_paragraph(paragraph, llm_model, prompt)
post_apply()
return generate_problem
def get_is_the_task_interrupted(document_id):
def is_the_task_interrupted():
document = QuerySet(Document).filter(id=document_id).first()
if document is None or Status(document.status)[TaskType.GENERATE_PROBLEM] == State.REVOKE:
return True
return False
return is_the_task_interrupted
@celery_app.task(base=QueueOnce, once={'keys': ['knowledge_id']},
name='celery:generate_related_by_knowledge')
def generate_related_by_knowledge_id(knowledge_id, model_id, model_params_setting, prompt, state_list=None):
document_list = QuerySet(Document).filter(knowledge_id=knowledge_id)
for document in document_list:
try:
generate_related_by_document_id.delay(document.id, model_id, model_params_setting, prompt, state_list)
except Exception as e:
pass
@celery_app.task(base=QueueOnce, once={'keys': ['document_id']},
name='celery:generate_related_by_document')
def generate_related_by_document_id(document_id, model_id, model_params_setting, prompt, state_list=None):
if state_list is None:
state_list = [State.PENDING.value, State.STARTED.value, State.SUCCESS.value, State.FAILURE.value,
State.REVOKE.value,
State.REVOKED.value, State.IGNORED.value]
try:
is_the_task_interrupted = get_is_the_task_interrupted(document_id)
if is_the_task_interrupted():
return
ListenerManagement.update_status(QuerySet(Document).filter(id=document_id),
TaskType.GENERATE_PROBLEM,
State.STARTED)
llm_model = get_llm_model(model_id, model_params_setting)
# 生成问题函数
generate_problem = get_generate_problem(llm_model, prompt,
ListenerManagement.get_aggregation_document_status(
document_id), is_the_task_interrupted)
query_set = QuerySet(Paragraph).annotate(
reversed_status=Reverse('status'),
task_type_status=Substr('reversed_status', TaskType.GENERATE_PROBLEM.value,
1),
).filter(task_type_status__in=state_list, document_id=document_id)
page_desc(query_set, 10, generate_problem, is_the_task_interrupted)
except Exception as e:
maxkb_logger.error(f'根据文档生成问题:{document_id}出现错误{str(e)}{traceback.format_exc()}')
maxkb_logger.error(_('Generate issue based on document: {document_id} error {error}{traceback}').format(
document_id=document_id, error=str(e), traceback=traceback.format_exc()))
finally:
ListenerManagement.post_update_document_status(document_id, TaskType.GENERATE_PROBLEM)
maxkb_logger.info(_('End--->Generate problem: {document_id}').format(document_id=document_id))
@celery_app.task(base=QueueOnce, once={'keys': ['paragraph_id_list']},
name='celery:generate_related_by_paragraph_list')
def generate_related_by_paragraph_id_list(document_id, paragraph_id_list, model_id, model_params_setting, prompt):
try:
is_the_task_interrupted = get_is_the_task_interrupted(document_id)
if is_the_task_interrupted():
ListenerManagement.update_status(QuerySet(Document).filter(id=document_id),
TaskType.GENERATE_PROBLEM,
State.REVOKED)
return
ListenerManagement.update_status(QuerySet(Document).filter(id=document_id),
TaskType.GENERATE_PROBLEM,
State.STARTED)
llm_model = get_llm_model(model_id, model_params_setting)
# 生成问题函数
generate_problem = get_generate_problem(llm_model, prompt, ListenerManagement.get_aggregation_document_status(
document_id))
def is_the_task_interrupted():
document = QuerySet(Document).filter(id=document_id).first()
if document is None or Status(document.status)[TaskType.GENERATE_PROBLEM] == State.REVOKE:
return True
return False
page(QuerySet(Paragraph).filter(id__in=paragraph_id_list), 10, generate_problem, is_the_task_interrupted)
finally:
ListenerManagement.post_update_document_status(document_id, TaskType.GENERATE_PROBLEM)
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/knowledge/task/generate.py",
"license": "GNU General Public License v3.0",
"lines": 115,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/models_provider/serializers/model_apply_serializers.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎
@file: model_apply_serializers.py
@date:2024/8/20 20:39
@desc:
"""
from django.db import connection
from django.db.models import QuerySet
from langchain_core.documents import Document
from rest_framework import serializers
from common.config.embedding_config import ModelManage
from django.utils.translation import gettext_lazy as _
from models_provider.models import Model
from models_provider.tools import get_model
def get_embedding_model(model_id):
model = QuerySet(Model).filter(id=model_id).first()
# 手动关闭数据库连接
connection.close()
embedding_model = ModelManage.get_model(model_id,
lambda _id: get_model(model, use_local=True))
return embedding_model
class EmbedDocuments(serializers.Serializer):
texts = serializers.ListField(required=True, child=serializers.CharField(required=True,
label=_('vector text')),
label=_('vector text list')),
class EmbedQuery(serializers.Serializer):
text = serializers.CharField(required=True, label=_('vector text'))
class CompressDocument(serializers.Serializer):
page_content = serializers.CharField(required=True, label=_('text'))
metadata = serializers.DictField(required=False, label=_('metadata'))
class CompressDocuments(serializers.Serializer):
documents = CompressDocument(required=True, many=True)
query = serializers.CharField(required=True, label=_('query'))
class ModelApplySerializers(serializers.Serializer):
model_id = serializers.UUIDField(required=True, label=_('model id'))
def embed_documents(self, instance, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
EmbedDocuments(data=instance).is_valid(raise_exception=True)
model = get_embedding_model(self.data.get('model_id'))
return model.embed_documents(instance.getlist('texts'))
def embed_query(self, instance, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
EmbedQuery(data=instance).is_valid(raise_exception=True)
model = get_embedding_model(self.data.get('model_id'))
return model.embed_query(instance.get('text'))
def compress_documents(self, instance, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
CompressDocuments(data=instance).is_valid(raise_exception=True)
model = get_embedding_model(self.data.get('model_id'))
return [{'page_content': d.page_content, 'metadata': d.metadata} for d in model.compress_documents(
[Document(page_content=document.get('page_content'), metadata=document.get('metadata')) for document in
instance.get('documents')], instance.get('query'))]
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/models_provider/serializers/model_apply_serializers.py",
"license": "GNU General Public License v3.0",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/models_provider/views/model_apply.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎
@file: model_apply.py
@date:2024/8/20 20:38
@desc:
"""
from urllib.request import Request
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.views import APIView
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants
from common.result import result
from models_provider.api.model import DefaultModelResponse
from models_provider.serializers.model_apply_serializers import ModelApplySerializers
class ModelApply(APIView):
class EmbedDocuments(APIView):
@extend_schema(methods=['POST'],
summary=_('Vectorization documentation'),
description=_('Vectorization documentation'),
operation_id=_('Vectorization documentation'), # type: ignore
responses=DefaultModelResponse.get_response(),
tags=[_('Model')] # type: ignore
)
def post(self, request: Request, model_id):
return result.success(
ModelApplySerializers(data={'model_id': model_id}).embed_documents(request.data))
class EmbedQuery(APIView):
@extend_schema(methods=['POST'],
summary=_('Vectorization documentation'),
description=_('Vectorization documentation'),
operation_id=_('Vectorization documentation'), # type: ignore
responses=DefaultModelResponse.get_response(),
tags=[_('Model')] # type: ignore
)
def post(self, request: Request, model_id):
return result.success(
ModelApplySerializers(data={'model_id': model_id}).embed_query(request.data))
class CompressDocuments(APIView):
@extend_schema(methods=['POST'],
summary=_('Reorder documents'),
description=_('Reorder documents'),
operation_id=_('Reorder documents'), # type: ignore
responses=DefaultModelResponse.get_response(),
tags=[_('Model')] # type: ignore
)
def post(self, request: Request, model_id):
return result.success(
ModelApplySerializers(data={'model_id': model_id}).compress_documents(request.data))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/models_provider/views/model_apply.py",
"license": "GNU General Public License v3.0",
"lines": 51,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/api/email_setting.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: workspace_user_resource_permission.py
@date:2025/4/28 18:13
@desc:
"""
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import OpenApiParameter
from common.mixins.api_mixin import APIMixin
from common.result import ResultSerializer
from system_manage.serializers.email_setting import EmailSettingSerializer
from system_manage.serializers.user_resource_permission import UserResourcePermissionResponse, \
UpdateUserResourcePermissionRequest
class EmailResponse(ResultSerializer):
def get_data(self):
return EmailSettingSerializer.Create()
class EmailSettingAPI(APIMixin):
@staticmethod
def get_request():
return EmailSettingSerializer.Create()
@staticmethod
def get_response():
return EmailResponse
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/api/email_setting.py",
"license": "GNU General Public License v3.0",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/serializers/email_setting.py | # coding=utf-8
"""
@project: maxkb
@Author:虎
@file: system_setting.py
@date:2024/3/19 16:29
@desc:
"""
import logging
from django.core.mail.backends.smtp import EmailBackend
from django.db.models import QuerySet
from rest_framework import serializers
from common.exception.app_exception import AppApiException
from django.utils.translation import gettext_lazy as _
from common.utils.logger import maxkb_logger
from system_manage.models import SystemSetting, SettingType
class EmailSettingSerializer(serializers.Serializer):
@staticmethod
def one():
system_setting = QuerySet(SystemSetting).filter(type=SettingType.EMAIL.value).first()
if system_setting is None:
return {}
return system_setting.meta
class Create(serializers.Serializer):
email_host = serializers.CharField(required=True, label=_('SMTP host'))
email_port = serializers.IntegerField(required=True, label=_('SMTP port'))
email_host_user = serializers.CharField(required=True, label=_('Sender\'s email'))
email_host_password = serializers.CharField(required=True, label=_('Password'))
email_use_tls = serializers.BooleanField(required=True, label=_('Whether to enable TLS'))
email_use_ssl = serializers.BooleanField(required=True, label=_('Whether to enable SSL'))
from_email = serializers.EmailField(required=True, label=_('Sender\'s email'))
def is_valid(self, *, raise_exception=False):
super().is_valid(raise_exception=True)
try:
EmailBackend(self.data.get("email_host"),
self.data.get("email_port"),
self.data.get("email_host_user"),
self.data.get("email_host_password"),
self.data.get("email_use_tls"),
False,
self.data.get("email_use_ssl")
).open()
except Exception as e:
maxkb_logger.error(f'Exception: {e}')
raise AppApiException(1004, _('Email verification failed'))
def update_or_save(self):
self.is_valid(raise_exception=True)
system_setting = QuerySet(SystemSetting).filter(type=SettingType.EMAIL.value).first()
if system_setting is None:
system_setting = SystemSetting(type=SettingType.EMAIL.value)
system_setting.meta = self.to_email_meta()
system_setting.save()
return system_setting.meta
def to_email_meta(self):
return {'email_host': self.data.get('email_host'),
'email_port': self.data.get('email_port'),
'email_host_user': self.data.get('email_host_user'),
'email_host_password': self.data.get('email_host_password'),
'email_use_tls': self.data.get('email_use_tls'),
'email_use_ssl': self.data.get('email_use_ssl'),
'from_email': self.data.get('from_email')
}
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/serializers/email_setting.py",
"license": "GNU General Public License v3.0",
"lines": 62,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/views/email_setting.py | # coding=utf-8
"""
@project: maxkb
@Author:虎
@file: system_setting.py
@date:2024/3/19 16:01
@desc:
"""
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from common.auth import TokenAuth
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants
from django.utils.translation import gettext_lazy as _
from common.log.log import log
from common.result import result
from common.utils.common import encryption
from models_provider.api.model import DefaultModelResponse
from system_manage.api.email_setting import EmailSettingAPI
from system_manage.serializers.email_setting import EmailSettingSerializer
def encryption_str(_value):
if isinstance(_value, str):
return encryption(_value)
return _value
def get_email_details(request):
path = request.path
body = request.data
query = request.query_params
email_host_password = body.get('email_host_password', '')
return {
'path': path,
'body': {**body, 'email_host_password': encryption_str(email_host_password)},
'query': query
}
class SystemSetting(APIView):
class Email(APIView):
authentication_classes = [TokenAuth]
@extend_schema(methods=['PUT'],
summary=_('Create or update email settings'),
description=_('Create or update email settings'),
operation_id=_('Create or update email settings'), # type: ignore
request=EmailSettingAPI.get_request(),
responses=EmailSettingAPI.get_response(),
tags=[_('Email Settings')]) # type: ignore
@log(menu='Email settings', operate='Create or update email settings',
get_details=get_email_details)
@has_permissions(PermissionConstants.EMAIL_SETTING_EDIT, RoleConstants.ADMIN)
def put(self, request: Request):
return result.success(
EmailSettingSerializer.Create(
data=request.data).update_or_save())
@extend_schema(
methods=['POST'],
summary=_('Test email settings'),
operation_id=_('Test email settings'), # type: ignore
request=EmailSettingAPI.get_request(),
responses=DefaultModelResponse.get_response(),
tags=[_('Email Settings')] # type: ignore
)
@has_permissions(PermissionConstants.EMAIL_SETTING_EDIT, RoleConstants.ADMIN)
@log(menu='Email settings', operate='Test email settings',
get_details=get_email_details
)
def post(self, request: Request):
return result.success(
EmailSettingSerializer.Create(
data=request.data).is_valid())
@extend_schema(methods=['GET'],
summary=_('Get email settings'),
description=_('Get email settings'),
operation_id=_('Get email settings'), # type: ignore
responses=DefaultModelResponse.get_response(),
tags=[_('Email Settings')]) # type: ignore
@has_permissions(PermissionConstants.EMAIL_SETTING_READ, RoleConstants.ADMIN)
def get(self, request: Request):
return result.success(
EmailSettingSerializer.one())
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/views/email_setting.py",
"license": "GNU General Public License v3.0",
"lines": 78,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/knowledge/views/problem.py | from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.views import APIView
from rest_framework.views import Request
from common.auth import TokenAuth
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
from common.log.log import log
from common.result import result
from common.utils.common import query_params_to_single_dict
from knowledge.api.problem import ProblemReadAPI, ProblemBatchCreateAPI, BatchAssociationAPI, BatchDeleteAPI, \
ProblemPageAPI, ProblemDeleteAPI, ProblemEditAPI, ProblemParagraphAPI
from knowledge.serializers.common import get_knowledge_operation_object
from knowledge.serializers.problem import ProblemSerializers
class ProblemView(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
summary=_('Question list'),
description=_('Question list'),
operation_id=_('Question list'), # type: ignore
parameters=ProblemReadAPI.get_parameters(),
responses=ProblemReadAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph/Question')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_READ.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_READ.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
def get(self, request: Request, workspace_id: str, knowledge_id: str):
q = ProblemSerializers.Query(
data={
**query_params_to_single_dict(request.query_params),
'workspace_id': workspace_id,
'knowledge_id': knowledge_id
}
)
q.is_valid(raise_exception=True)
return result.success(q.list())
@extend_schema(
methods=['POST'],
summary=_('Create question'),
description=_('Create question'),
operation_id=_('Create question'), # type: ignore
parameters=ProblemBatchCreateAPI.get_parameters(),
responses=ProblemBatchCreateAPI.get_response(),
request=ProblemBatchCreateAPI.get_request(),
tags=[_('Knowledge Base/Documentation/Paragraph/Question')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_CREATE.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_CREATE.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='problem', operate='Create question',
get_operation_object=lambda r, keywords: get_knowledge_operation_object(keywords.get('knowledge_id'))
,
)
def post(self, request: Request, workspace_id: str, knowledge_id: str):
return result.success(ProblemSerializers.Create(
data={'workspace_id': workspace_id, 'knowledge_id': knowledge_id}
).batch(request.data))
class Paragraph(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
summary=_('Get a list of associated paragraphs'),
description=_('Get a list of associated paragraphs'),
operation_id=_('Get a list of associated paragraphs'), # type: ignore
parameters=ProblemParagraphAPI.get_parameters(),
responses=ProblemParagraphAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph/Question')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_READ.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_READ.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
def get(self, request: Request, workspace_id: str, knowledge_id: str, problem_id: str):
return result.success(ProblemSerializers.Operate(
data={
**query_params_to_single_dict(request.query_params),
'workspace_id': workspace_id,
'knowledge_id': knowledge_id,
'problem_id': problem_id
}
).list_paragraph())
class BatchAssociation(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
summary=_('Batch associated paragraphs'),
description=_('Batch associated paragraphs'),
operation_id=_('Batch associated paragraphs'), # type: ignore
request=BatchAssociationAPI.get_request(),
parameters=BatchAssociationAPI.get_parameters(),
responses=BatchAssociationAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph/Question')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='problem', operate='Batch associated paragraphs',
get_operation_object=lambda r, keywords: get_knowledge_operation_object(keywords.get('knowledge_id')),
)
def put(self, request: Request, workspace_id: str, knowledge_id: str):
return result.success(ProblemSerializers.BatchOperate(
data={'knowledge_id': knowledge_id, 'workspace_id': workspace_id}
).association(request.data))
class BatchDelete(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
summary=_('Batch deletion issues'),
description=_('Batch deletion issues'),
operation_id=_('Batch deletion issues'), # type: ignore
request=BatchDeleteAPI.get_request(),
parameters=BatchDeleteAPI.get_parameters(),
responses=BatchDeleteAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph/Question')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='problem', operate='Batch deletion issues',
get_operation_object=lambda r, keywords: get_knowledge_operation_object(keywords.get('knowledge_id')),
)
def put(self, request: Request, workspace_id: str, knowledge_id: str):
return result.success(ProblemSerializers.BatchOperate(
data={'knowledge_id': knowledge_id, 'workspace_id': workspace_id}
).delete(request.data))
class Operate(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['DELETE'],
summary=_('Delete question'),
description=_('Delete question'),
operation_id=_('Delete question'), # type: ignore
parameters=ProblemDeleteAPI.get_parameters(),
responses=ProblemDeleteAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph/Question')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_DELETE.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_DELETE.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='problem', operate='Delete question',
get_operation_object=lambda r, keywords: get_knowledge_operation_object(keywords.get('knowledge_id')),
)
def delete(self, request: Request, workspace_id: str, knowledge_id: str, problem_id: str):
return result.success(ProblemSerializers.Operate(
data={
**query_params_to_single_dict(request.query_params),
'workspace_id': workspace_id,
'knowledge_id': knowledge_id,
'problem_id': problem_id
}
).delete())
@extend_schema(
methods=['PUT'],
summary=_('Modify question'),
description=_('Modify question'),
operation_id=_('Modify question'), # type: ignore
parameters=ProblemEditAPI.get_parameters(),
request=ProblemEditAPI.get_request(),
responses=ProblemEditAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph/Question')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='problem', operate='Modify question',
get_operation_object=lambda r, keywords: get_knowledge_operation_object(keywords.get('knowledge_id')),
)
def put(self, request: Request, workspace_id: str, knowledge_id: str, problem_id: str):
return result.success(ProblemSerializers.Operate(
data={
**query_params_to_single_dict(request.query_params),
'workspace_id': workspace_id,
'knowledge_id': knowledge_id,
'problem_id': problem_id
}
).edit(request.data))
class Page(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
summary=_('Get the list of questions by page'),
description=_('Get the list of questions by page'),
operation_id=_('Get the list of questions by page'), # type: ignore
parameters=ProblemPageAPI.get_parameters(),
responses=ProblemPageAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph/Question')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_READ.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_READ.get_workspace_permission_workspace_manage_role(),
PermissionConstants.KNOWLEDGE_DOCUMENT_READ.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_READ.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
def get(self, request: Request, workspace_id: str, knowledge_id: str, current_page, page_size):
d = ProblemSerializers.Query(
data={
**query_params_to_single_dict(request.query_params),
'knowledge_id': knowledge_id,
'workspace_id': workspace_id
}
)
d.is_valid(raise_exception=True)
return result.success(d.page(current_page, page_size))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/knowledge/views/problem.py",
"license": "GNU General Public License v3.0",
"lines": 237,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/knowledge/views/paragraph.py | from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.views import APIView
from rest_framework.views import Request
from common.auth import TokenAuth
from common.auth.authentication import has_permissions
from common.constants.permission_constants import PermissionConstants, RoleConstants, ViewPermission, CompareConstants
from common.log.log import log
from common.result import result
from common.utils.common import query_params_to_single_dict
from knowledge.api.paragraph import ParagraphReadAPI, ParagraphCreateAPI, ParagraphBatchDeleteAPI, ParagraphEditAPI, \
ParagraphGetAPI, ProblemCreateAPI, UnAssociationAPI, AssociationAPI, ParagraphPageAPI, \
ParagraphBatchGenerateRelatedAPI, ParagraphMigrateAPI, ParagraphAdjustOrderAPI
from knowledge.serializers.common import get_knowledge_operation_object
from knowledge.serializers.paragraph import ParagraphSerializers
from knowledge.views import get_knowledge_document_operation_object, get_document_operation_object
class ParagraphView(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
summary=_('Paragraph list'),
description=_('Paragraph list'),
operation_id=_('Paragraph list'), # type: ignore
parameters=ParagraphReadAPI.get_parameters(),
responses=ParagraphReadAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_READ.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_READ.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
def get(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str):
q = ParagraphSerializers.Query(
data={
**query_params_to_single_dict(request.query_params),
'workspace_id': workspace_id,
'knowledge_id': knowledge_id,
'document_id': document_id
}
)
return result.success(q.list())
@extend_schema(
summary=_('Create Paragraph'),
operation_id=_('Create Paragraph'), # type: ignore
parameters=ParagraphCreateAPI.get_parameters(),
request=ParagraphCreateAPI.get_request(),
responses=ParagraphCreateAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='Paragraph', operate='Create Paragraph',
get_operation_object=lambda r, keywords: get_knowledge_document_operation_object(
get_knowledge_operation_object(keywords.get('knowledge_id')),
get_knowledge_operation_object(keywords.get('knowledge_id')),
get_document_operation_object(keywords.get('document_id'))
),
)
def post(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str):
return result.success(ParagraphSerializers.Create(
data={'workspace_id': workspace_id, 'knowledge_id': knowledge_id, 'document_id': document_id}
).save(request.data))
class BatchDelete(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
summary=_('Batch Paragraph'),
description=_('Batch Paragraph'),
operation_id=_('Batch Paragraph'), # type: ignore
parameters=ParagraphBatchDeleteAPI.get_parameters(),
request=ParagraphBatchDeleteAPI.get_request(),
responses=ParagraphBatchDeleteAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
def put(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str):
return result.success(ParagraphSerializers.Batch(
data={'workspace_id': workspace_id, 'knowledge_id': knowledge_id, 'document_id': document_id}
).batch_delete(request.data))
class BatchMigrate(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
summary=_('Migrate paragraphs in batches'),
operation_id=_('Migrate paragraphs in batches'), # type: ignore
parameters=ParagraphMigrateAPI.get_parameters(),
request=ParagraphMigrateAPI.get_request(),
responses=ParagraphMigrateAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='Paragraph', operate='Migrate paragraphs in batches',
get_operation_object=lambda r, keywords: get_knowledge_document_operation_object(
get_knowledge_operation_object(keywords.get('knowledge_id')),
get_document_operation_object(keywords.get('document_id'))
),
)
def put(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str,
target_knowledge_id: str, target_document_id):
return result.success(
ParagraphSerializers.Migrate(data={
'workspace_id': workspace_id,
'knowledge_id': knowledge_id,
'target_knowledge_id': target_knowledge_id,
'document_id': document_id,
'target_document_id': target_document_id,
'paragraph_id_list': request.data.get('id_list')
}).migrate())
class BatchGenerateRelated(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
summary=_('Batch Generate Related'),
description=_('Batch Generate Related'),
operation_id=_('Batch Generate Related'), # type: ignore
parameters=ParagraphBatchGenerateRelatedAPI.get_parameters(),
request=ParagraphBatchGenerateRelatedAPI.get_request(),
responses=ParagraphBatchGenerateRelatedAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_GENERATE.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_GENERATE.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='Paragraph', operate='Batch generate related',
get_operation_object=lambda r, keywords: get_knowledge_document_operation_object(
get_knowledge_operation_object(keywords.get('knowledge_id')),
get_document_operation_object(keywords.get('document_id'))
),
)
def put(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str):
return result.success(ParagraphSerializers.Batch(
data={'workspace_id': workspace_id, 'knowledge_id': knowledge_id, 'document_id': document_id}
).batch_generate_related(request.data))
class Operate(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
summary=_('Modify paragraph data'),
description=_('Modify paragraph data'),
operation_id=_('Modify paragraph data'), # type: ignore
parameters=ParagraphEditAPI.get_parameters(),
request=ParagraphEditAPI.get_request(),
responses=ParagraphEditAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='Paragraph', operate='Modify paragraph data',
get_operation_object=lambda r, keywords: get_knowledge_document_operation_object(
get_knowledge_operation_object(keywords.get('knowledge_id')),
get_document_operation_object(keywords.get('document_id'))
),
)
def put(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str, paragraph_id: str):
o = ParagraphSerializers.Operate(
data={
'workspace_id': workspace_id,
"paragraph_id": paragraph_id,
'knowledge_id': knowledge_id,
'document_id': document_id
}
)
o.is_valid(raise_exception=True)
return result.success(o.edit(request.data))
@extend_schema(
methods=['GET'],
summary=_('Get paragraph details'),
description=_('Get paragraph details'),
operation_id=_('Get paragraph details'), # type: ignore
parameters=ParagraphGetAPI.get_parameters(),
responses=ParagraphGetAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
def get(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str, paragraph_id: str):
o = ParagraphSerializers.Operate(
data={
'workspace_id': workspace_id,
"paragraph_id": paragraph_id,
'knowledge_id': knowledge_id,
'document_id': document_id
}
)
o.is_valid(raise_exception=True)
return result.success(o.one())
@extend_schema(
methods=['DELETE'],
summary=_('Delete paragraph'),
description=_('Delete paragraph'),
operation_id=_('Delete paragraph'), # type: ignore
parameters=ParagraphGetAPI.get_parameters(),
responses=ParagraphGetAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')]) # type: ignore
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='Paragraph', operate='Delete paragraph',
get_operation_object=lambda r, keywords: get_knowledge_document_operation_object(
get_knowledge_operation_object(keywords.get('knowledge_id')),
get_document_operation_object(keywords.get('document_id'))
),
)
def delete(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str, paragraph_id: str):
o = ParagraphSerializers.Operate(
data={
'workspace_id': workspace_id,
"paragraph_id": paragraph_id,
'knowledge_id': knowledge_id,
'document_id': document_id
}
)
o.is_valid(raise_exception=True)
return result.success(o.delete())
class Problem(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['POST'],
summary=_('Add associated questions'),
description=_('Add associated questions'),
operation_id=_('Add associated questions'), # type: ignore
parameters=ProblemCreateAPI.get_parameters(),
request=ProblemCreateAPI.get_request(),
responses=ProblemCreateAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='Paragraph', operate='Add associated questions',
get_operation_object=lambda r, keywords: get_knowledge_document_operation_object(
get_knowledge_operation_object(keywords.get('knowledge_id')),
get_document_operation_object(keywords.get('document_id'))
),
)
def post(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str, paragraph_id: str):
return result.success(ParagraphSerializers.Problem(
data={
'workspace_id': workspace_id,
"knowledge_id": knowledge_id,
'document_id': document_id,
'paragraph_id': paragraph_id
}
).save(request.data, with_valid=True))
@extend_schema(
methods=['GET'],
summary=_('Get a list of paragraph questions'),
description=_('Get a list of paragraph questions'),
operation_id=_('Get a list of paragraph questions'), # type: ignore
parameters=ParagraphGetAPI.get_parameters(),
responses=ParagraphGetAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_READ.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_READ.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
def get(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str, paragraph_id: str):
return result.success(ParagraphSerializers.Problem(
data={
'workspace_id': workspace_id,
"knowledge_id": knowledge_id,
'document_id': document_id,
'paragraph_id': paragraph_id
}
).list(with_valid=True))
class UnAssociation(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
summary=_('Disassociation issue'),
description=_('Disassociation issue'),
operation_id=_('Disassociation issue'), # type: ignore
parameters=UnAssociationAPI.get_parameters(),
request=UnAssociationAPI.get_request(),
responses=UnAssociationAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_RELATE.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_RELATE.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='Paragraph', operate='Disassociation issue',
get_operation_object=lambda r, keywords: get_knowledge_document_operation_object(
get_knowledge_operation_object(keywords.get('knowledge_id')),
get_document_operation_object(keywords.get('document_id'))
)
)
def put(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str):
return result.success(ParagraphSerializers.Association(
data={
'workspace_id': workspace_id,
'knowledge_id': knowledge_id,
'document_id': document_id,
'paragraph_id': request.query_params.get('paragraph_id'),
'problem_id': request.query_params.get('problem_id')
}
).un_association())
class Association(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
summary=_('Related questions'),
description=_('Related questions'),
operation_id=_('Related questions'), # type: ignore
parameters=AssociationAPI.get_parameters(),
request=AssociationAPI.get_request(),
responses=AssociationAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_PROBLEM_RELATE.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_PROBLEM_RELATE.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
@log(
menu='Paragraph', operate='Related questions',
get_operation_object=lambda r, keywords: get_knowledge_document_operation_object(
get_knowledge_operation_object(keywords.get('knowledge_id')),
get_document_operation_object(keywords.get('document_id'))
),
)
def put(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str):
return result.success(ParagraphSerializers.Association(
data={
'workspace_id': workspace_id,
'knowledge_id': knowledge_id,
'document_id': document_id,
'paragraph_id': request.query_params.get('paragraph_id'),
'problem_id': request.query_params.get('problem_id')
}
).association())
class Page(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
summary=_('Get paragraph list by pagination'),
description=_('Get paragraph list by pagination'),
operation_id=_('Get paragraph list by pagination'), # type: ignore
parameters=ParagraphPageAPI.get_parameters(),
responses=ParagraphPageAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_READ.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_READ.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
def get(self, request: Request,
workspace_id: str, knowledge_id: str, document_id: str, current_page: int, page_size: int):
d = ParagraphSerializers.Query(
data={
**query_params_to_single_dict(request.query_params),
'workspace_id': workspace_id,
'knowledge_id': knowledge_id,
'document_id': document_id
}
)
return result.success(d.page(current_page, page_size))
class AdjustPosition(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['PUT'],
summary=_('Adjust paragraph position'),
description=_('Adjust paragraph position'),
operation_id=_('Adjust paragraph position'), # type: ignore
parameters=ParagraphAdjustOrderAPI.get_parameters(),
request=ParagraphAdjustOrderAPI.get_request(),
responses=ParagraphAdjustOrderAPI.get_response(),
tags=[_('Knowledge Base/Documentation/Paragraph')] # type: ignore
)
@has_permissions(
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_knowledge_permission(),
PermissionConstants.KNOWLEDGE_DOCUMENT_EDIT.get_workspace_permission_workspace_manage_role(),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role(),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[PermissionConstants.KNOWLEDGE.get_workspace_knowledge_permission()], CompareConstants.AND),
)
def put(self, request: Request, workspace_id: str, knowledge_id: str, document_id: str):
return result.success(ParagraphSerializers.AdjustPosition(
data={
'workspace_id': workspace_id,
'knowledge_id': knowledge_id,
'document_id': document_id,
'paragraph_id': request.query_params.get('paragraph_id'),
}
).adjust_position(request.query_params.get('new_position')))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/knowledge/views/paragraph.py",
"license": "GNU General Public License v3.0",
"lines": 444,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/serializers/user_resource_permission.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: workspace_user_resource_permission.py
@date:2025/4/28 17:17
@desc:
"""
import json
import os
from django.contrib.postgres.fields import ArrayField
from django.core.cache import cache
from django.db import models
from django.db.models import QuerySet, Q, TextField
from django.db.models.functions import Cast
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from application.models import Application
from common.constants.cache_version import Cache_Version
from common.constants.permission_constants import get_default_workspace_user_role_mapping_list, RoleConstants, \
ResourcePermission, ResourcePermissionRole, ResourceAuthType
from common.database_model_manage.database_model_manage import DatabaseModelManage
from common.db.search import native_search, native_page_search, get_dynamics_model
from common.db.sql_execute import select_list
from common.exception.app_exception import AppApiException
from common.utils.common import get_file_content
from knowledge.models import Knowledge
from maxkb.conf import PROJECT_DIR
from maxkb.settings import edition
from models_provider.models import Model
from system_manage.models import WorkspaceUserResourcePermission
from tools.models import Tool
from users.serializers.user import is_workspace_manage
class PermissionSerializer(serializers.Serializer):
VIEW = serializers.BooleanField(required=True, label="可读")
MANAGE = serializers.BooleanField(required=True, label="管理")
ROLE = serializers.BooleanField(required=True, label="跟随角色")
class UserResourcePermissionItemResponse(serializers.Serializer):
id = serializers.UUIDField(required=True, label="主键id")
name = serializers.CharField(required=True, label="资源名称")
auth_target_type = serializers.CharField(required=True, label="授权资源")
user_id = serializers.UUIDField(required=True, label="用户id")
icon = serializers.CharField(required=True, label="资源图标")
auth_type = serializers.CharField(required=True, label="授权类型")
permission = serializers.ChoiceField(required=False, allow_null=True, allow_blank=True,
choices=['NOT_AUTH', 'MANAGE', 'VIEW', 'ROLE'],
label=_('permission'))
class UserResourcePermissionResponse(serializers.Serializer):
KNOWLEDGE = UserResourcePermissionItemResponse(many=True)
class UpdateTeamMemberItemPermissionSerializer(serializers.Serializer):
target_id = serializers.CharField(required=True, label=_('target id'))
permission = serializers.ChoiceField(required=False, allow_null=True, allow_blank=True,
choices=['NOT_AUTH', 'MANAGE', 'VIEW', 'ROLE'],
label=_('permission'))
class UpdateUserResourcePermissionRequest(serializers.Serializer):
user_resource_permission_list = UpdateTeamMemberItemPermissionSerializer(required=True, many=True)
def is_valid(self, *, auth_target_type=None, workspace_id=None, raise_exception=False):
super().is_valid(raise_exception=True)
user_resource_permission_list = [{'target_id': urp.get('target_id'), 'auth_target_type': auth_target_type} for
urp in
self.data.get("user_resource_permission_list")]
illegal_target_id_list = select_list(
get_file_content(
os.path.join(PROJECT_DIR, "apps", "system_manage", 'sql', 'check_member_permission_target_exists.sql')),
[json.dumps(user_resource_permission_list), workspace_id, workspace_id, workspace_id, workspace_id,
workspace_id, workspace_id, workspace_id])
if illegal_target_id_list is not None and len(illegal_target_id_list) > 0:
raise AppApiException(500,
_('Non-existent id')+'[' + str(illegal_target_id_list) + ']')
m_map = {
"KNOWLEDGE": Knowledge,
'TOOL': Tool,
'MODEL': Model,
'APPLICATION': Application,
}
sql_map = {
"KNOWLEDGE": 'get_knowledge_user_resource_permission.sql',
'TOOL': 'get_tool_user_resource_permission.sql',
'MODEL': 'get_model_user_resource_permission.sql',
'APPLICATION': 'get_application_user_resource_permission.sql'
}
class UserResourcePermissionUserListRequest(serializers.Serializer):
name = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_('resource name'))
permission = serializers.MultipleChoiceField(required=False, allow_null=True, allow_blank=True,
choices=['NOT_AUTH', 'MANAGE', 'VIEW', 'ROLE'],
label=_('permission'))
class UserResourcePermissionSerializer(serializers.Serializer):
workspace_id = serializers.CharField(required=True, label=_('workspace id'))
user_id = serializers.CharField(required=True, label=_('user id'))
auth_target_type = serializers.CharField(required=True, label=_('resource'))
def get_queryset(self, instance):
resource_query_set = QuerySet(
model=get_dynamics_model({
'name': models.CharField(),
"permission": models.CharField(),
}))
name = instance.get('name')
permission = instance.get('permission')
query_p_list = [None if p == "NOT_AUTH" else p for p in permission]
if name:
resource_query_set = resource_query_set.filter(name__contains=name)
if permission:
if all([p is None for p in query_p_list]):
resource_query_set = resource_query_set.filter(permission=None)
else:
if any([p is None for p in query_p_list]):
resource_query_set = resource_query_set.filter(
Q(permission__in=query_p_list) | Q(permission=None))
else:
resource_query_set = resource_query_set.filter(
permission__in=query_p_list)
return {
'query_set': QuerySet(m_map.get(self.data.get('auth_target_type'))).filter(
workspace_id=self.data.get('workspace_id')),
'folder_query_set': QuerySet(m_map.get(self.data.get('auth_target_type'))).filter(
workspace_id=self.data.get('workspace_id')),
'workspace_user_resource_permission_query_set': QuerySet(WorkspaceUserResourcePermission).filter(
workspace_id=self.data.get('workspace_id'), user=self.data.get('user_id'),
auth_target_type=self.data.get('auth_target_type')),
'resource_query_set': resource_query_set
}
def is_auth(self, resource_id: str):
self.is_valid(raise_exception=True)
auth_target_type = self.data.get('auth_target_type')
workspace_id = self.data.get('workspace_id')
user_id = self.data.get('user_id')
workspace_manage = is_workspace_manage(user_id, workspace_id)
if workspace_manage:
return True
wurp = QuerySet(WorkspaceUserResourcePermission).filter(auth_target_type=auth_target_type,
workspace_id=workspace_id, user=user_id,
target=resource_id).first()
if wurp is None:
return False
workspace_user_role_mapping_model = DatabaseModelManage.get_model("workspace_user_role_mapping")
role_permission_mapping_model = DatabaseModelManage.get_model("role_permission_mapping_model")
if wurp.auth_type == ResourceAuthType.ROLE.value:
if workspace_user_role_mapping_model and role_permission_mapping_model:
inner = QuerySet(workspace_user_role_mapping_model).filter(workspace_id=workspace_id, user_id=user_id)
return QuerySet(role_permission_mapping_model).filter(role_id__in=inner,
permission_id=(
auth_target_type + ':READ')).exists()
else:
return False
else:
return wurp.permission_list.__contains__(ResourcePermission.VIEW.value)
def auth_resource_batch(self, resource_id_list: list):
self.is_valid(raise_exception=True)
auth_target_type = self.data.get('auth_target_type')
workspace_id = self.data.get('workspace_id')
user_id = self.data.get('user_id')
wurp = QuerySet(WorkspaceUserResourcePermission).filter(auth_target_type=auth_target_type,
workspace_id=workspace_id, user_id=user_id).first()
auth_type = wurp.auth_type if wurp else (
ResourceAuthType.RESOURCE_PERMISSION_GROUP if edition == 'CE' else ResourceAuthType.ROLE)
workspace_user_resource_permission = [WorkspaceUserResourcePermission(
target=resource_id,
auth_target_type=auth_target_type,
permission_list=[ResourcePermission.VIEW,
ResourcePermission.MANAGE] if auth_type == ResourceAuthType.RESOURCE_PERMISSION_GROUP else [
ResourcePermissionRole.ROLE],
workspace_id=workspace_id,
user_id=user_id,
auth_type=auth_type
) for resource_id in resource_id_list]
QuerySet(WorkspaceUserResourcePermission).bulk_create(workspace_user_resource_permission)
# 刷新缓存
version = Cache_Version.PERMISSION_LIST.get_version()
key = Cache_Version.PERMISSION_LIST.get_key(user_id=user_id)
cache.delete(key, version=version)
return True
def auth_resource(self, resource_id: str, is_folder=False):
self.is_valid(raise_exception=True)
auth_target_type = self.data.get('auth_target_type')
workspace_id = self.data.get('workspace_id')
user_id = self.data.get('user_id')
WorkspaceUserResourcePermission(
target=resource_id,
auth_target_type=auth_target_type,
permission_list=[ResourcePermission.VIEW,
ResourcePermission.MANAGE],
workspace_id=workspace_id,
user_id=user_id,
auth_type=ResourceAuthType.RESOURCE_PERMISSION_GROUP
).save()
# 刷新缓存
version = Cache_Version.PERMISSION_LIST.get_version()
key = Cache_Version.PERMISSION_LIST.get_key(user_id=user_id)
cache.delete(key, version=version)
return True
def list(self, instance, user, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
UserResourcePermissionUserListRequest(data=instance).is_valid(raise_exception=True)
workspace_id = self.data.get("workspace_id")
user_id = self.data.get("user_id")
# 用户权限列表
user_resource_permission_list = native_search(self.get_queryset(instance), get_file_content(
os.path.join(PROJECT_DIR, "apps", "system_manage", 'sql', sql_map.get(self.data.get('auth_target_type')))))
return [{**user_resource_permission}
for user_resource_permission in user_resource_permission_list]
def page(self, instance, current_page: int, page_size: int, user, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
UserResourcePermissionUserListRequest(data=instance).is_valid(raise_exception=True)
workspace_id = self.data.get("workspace_id")
user_id = self.data.get("user_id")
# 用户对应的资源权限分页列表
user_resource_permission_page_list = native_page_search(current_page, page_size, self.get_queryset(instance),
get_file_content(
os.path.join(PROJECT_DIR, "apps", "system_manage",
'sql', sql_map.get(
self.data.get('auth_target_type')))
))
return user_resource_permission_page_list
def edit(self, instance, user, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
UpdateUserResourcePermissionRequest(data={'user_resource_permission_list': instance}).is_valid(
raise_exception=True,
auth_target_type=self.data.get(
'auth_target_type'),
workspace_id=self.data.get('workspace_id'))
workspace_id = self.data.get("workspace_id")
user_id = self.data.get("user_id")
update_list = []
save_list = []
targets = [item['target_id'] for item in instance]
QuerySet(WorkspaceUserResourcePermission).filter(
workspace_id=workspace_id,
user_id=user_id,
auth_target_type=self.data.get('auth_target_type'),
target__in=targets
).delete()
workspace_user_resource_permission_exist_list = []
for user_resource_permission in instance:
permission = user_resource_permission['permission']
auth_type, permission_list = permission_map[permission]
exist_list = [user_resource_permission_exist for user_resource_permission_exist in
workspace_user_resource_permission_exist_list if
user_resource_permission.get('target_id') == str(user_resource_permission_exist.target)]
if len(exist_list) > 0:
exist_list[0].permission_list = [key for key in user_resource_permission.get('permission').keys() if
user_resource_permission.get('permission').get(key)]
exist_list[0].auth_type = user_resource_permission.get('auth_type')
update_list.append(exist_list[0])
else:
save_list.append(WorkspaceUserResourcePermission(target=user_resource_permission.get('target_id'),
auth_target_type=self.data.get('auth_target_type'),
permission_list=permission_list,
workspace_id=workspace_id,
user_id=user_id,
auth_type=auth_type))
# 批量更新
QuerySet(WorkspaceUserResourcePermission).bulk_update(update_list, ['permission_list', 'auth_type']) if len(
update_list) > 0 else None
# 批量插入
QuerySet(WorkspaceUserResourcePermission).bulk_create(save_list) if len(save_list) > 0 else None
version = Cache_Version.PERMISSION_LIST.get_version()
key = Cache_Version.PERMISSION_LIST.get_key(user_id=user_id)
cache.delete(key, version=version)
return instance
class ResourceUserPermissionUserListRequest(serializers.Serializer):
nick_name = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_('workspace id'))
username = serializers.CharField(required=False, allow_null=True, allow_blank=True, label=_('workspace id'))
permission = serializers.MultipleChoiceField(required=False, allow_null=True, allow_blank=True,
choices=['NOT_AUTH', 'MANAGE', 'VIEW', 'ROLE'],
label=_('permission'))
class ResourceUserPermissionEditRequest(serializers.Serializer):
user_id = serializers.CharField(required=True, label=_('workspace id'))
permission = serializers.ChoiceField(required=True, choices=['NOT_AUTH', 'MANAGE', 'VIEW', 'ROLE'],
label=_('permission'))
permission_map = {
"ROLE": ("ROLE", ["ROLE"]),
"MANAGE": ("RESOURCE_PERMISSION_GROUP", ["MANAGE", "VIEW"]),
"VIEW": ("RESOURCE_PERMISSION_GROUP", ["VIEW"]),
"NOT_AUTH": ("RESOURCE_PERMISSION_GROUP", []),
}
class ResourceUserPermissionSerializer(serializers.Serializer):
workspace_id = serializers.CharField(required=True, label=_('workspace id'))
target = serializers.CharField(required=True, label=_('resource id'))
auth_target_type = serializers.CharField(required=True, label=_('resource'))
users_permission = ResourceUserPermissionEditRequest(required=False, many=True, label=_('users_permission'))
RESOURCE_MODEL_MAP = {
'APPLICATION': Application,
'KNOWLEDGE': Knowledge,
'TOOL': Tool
}
def get_queryset(self, instance, is_x_pack_ee: bool):
user_query_set = QuerySet(model=get_dynamics_model({
'nick_name': models.CharField(),
'username': models.CharField(),
"permission": models.CharField(),
"u.id": models.UUIDField(),
"role": models.CharField(),
"role_setting.type": models.CharField(),
"user_role_relation.workspace_id": models.CharField(),
'tmp.type_list': ArrayField(models.CharField()),
'tmp.role_name_list_str': models.CharField()
}))
nick_name = instance.get('nick_name')
username = instance.get('username')
role_name = instance.get('role')
permission = instance.get('permission')
query_p_list = [None if p == "NOT_AUTH" else p for p in permission]
workspace_user_resource_permission_query_set = QuerySet(WorkspaceUserResourcePermission).filter(
workspace_id=self.data.get('workspace_id'),
auth_target_type=self.data.get('auth_target_type'),
target=self.data.get('target'))
if nick_name:
user_query_set = user_query_set.filter(nick_name__contains=nick_name)
if username:
user_query_set = user_query_set.filter(username__contains=username)
if permission:
if all([p is None for p in query_p_list]):
user_query_set = user_query_set.filter(
permission=None)
else:
if any([p is None for p in query_p_list]):
user_query_set = user_query_set.filter(
Q(permission__in=query_p_list) | Q(permission=None))
else:
user_query_set = user_query_set.filter(
permission__in=query_p_list)
workspace_user_role_mapping_model = DatabaseModelManage.get_model("workspace_user_role_mapping")
if workspace_user_role_mapping_model:
user_query_set = user_query_set.filter(
**{"u.id__in": QuerySet(workspace_user_role_mapping_model).filter(
workspace_id=self.data.get('workspace_id')).values("user_id")})
if is_x_pack_ee:
user_query_set = user_query_set.filter(**{
"tmp.type_list__contains": ["USER"]
})
role_name_and_type_query_set = QuerySet(model=get_dynamics_model({
'user_role_relation.workspace_id': models.CharField(),
'role_setting.type': models.CharField(),
})).filter(**{
"user_role_relation.workspace_id": self.data.get('workspace_id'),
"role_setting.type": "USER",
})
if role_name:
user_query_set = user_query_set.filter(
**{'tmp.role_name_list_str__icontains': str(role_name)}
)
return {
'workspace_user_resource_permission_query_set': workspace_user_resource_permission_query_set,
'user_query_set': user_query_set,
'role_name_and_type_query_set': role_name_and_type_query_set
}
else:
user_query_set = user_query_set.filter(
**{'role': "USER"})
return {
'workspace_user_resource_permission_query_set': workspace_user_resource_permission_query_set,
'user_query_set': user_query_set
}
def list(self, instance, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
ResourceUserPermissionUserListRequest(data=instance).is_valid(raise_exception=True)
is_x_pack_ee = self.is_x_pack_ee()
# 资源的用户授权列表
resource_user_permission_list = native_search(self.get_queryset(instance, is_x_pack_ee), get_file_content(
os.path.join(PROJECT_DIR, "apps", "system_manage",
'sql',
('get_resource_user_permission_detail_ee.sql' if is_x_pack_ee else
'get_resource_user_permission_detail.sql')
)
))
return resource_user_permission_list
@staticmethod
def is_x_pack_ee():
workspace_user_role_mapping_model = DatabaseModelManage.get_model("workspace_user_role_mapping")
role_permission_mapping_model = DatabaseModelManage.get_model("role_permission_mapping_model")
return workspace_user_role_mapping_model is not None and role_permission_mapping_model is not None
def page(self, instance, current_page: int, page_size: int, with_valid=True):
if with_valid:
self.is_valid(raise_exception=True)
ResourceUserPermissionUserListRequest(data=instance).is_valid(raise_exception=True)
# 分页列表
is_x_pack_ee = self.is_x_pack_ee()
resource_user_permission_page_list = native_page_search(current_page, page_size,
self.get_queryset(instance, is_x_pack_ee),
get_file_content(
os.path.join(PROJECT_DIR, "apps", "system_manage",
'sql',
(
'get_resource_user_permission_detail_ee.sql' if is_x_pack_ee else
'get_resource_user_permission_detail.sql')
)
))
return resource_user_permission_page_list
def get_has_manage_permission_resource_under_folders(self, current_user_id, folder_ids):
workspace_id = self.data.get("workspace_id")
auth_target_type = self.data.get("auth_target_type")
workspace_manage = is_workspace_manage(current_user_id, workspace_id)
resource_model = self.RESOURCE_MODEL_MAP[auth_target_type]
from folders.serializers.folder import has_exact_permission_by_role
permission_id = f"{auth_target_type}:READ+AUTH"
if workspace_manage:
role_type = RoleConstants.WORKSPACE_MANAGE.value.__str__()
has_user_role_exact_permission = has_exact_permission_by_role(current_user_id, workspace_id, permission_id,role_type)
if has_user_role_exact_permission:
current_user_managed_resources_ids = QuerySet(resource_model).filter(workspace_id=workspace_id,
folder__in=folder_ids).annotate(
id_str=Cast('id', TextField())
).values_list("id_str", flat=True)
else:
current_user_managed_resources_ids = []
else:
role_type = RoleConstants.USER.value.__str__()
has_user_role_exact_permission = has_exact_permission_by_role(current_user_id, workspace_id, permission_id,role_type)
permission_list = ['MANAGE']
if has_user_role_exact_permission:
permission_list = ['MANAGE','ROLE']
current_user_managed_resources_ids = QuerySet(WorkspaceUserResourcePermission).filter(
workspace_id=workspace_id, user_id=current_user_id, auth_target_type=auth_target_type,
target__in=QuerySet(resource_model).filter(workspace_id=workspace_id, folder__in=folder_ids).annotate(
id_str=Cast('id', TextField())
).values_list("id_str", flat=True),
permission_list__overlap= permission_list).values_list('target', flat=True)
return current_user_managed_resources_ids
def edit(self, instance, with_valid=True, current_user_id=None):
if with_valid:
self.is_valid(raise_exception=True)
ResourceUserPermissionEditRequest(data=instance, many=True).is_valid(
raise_exception=True)
workspace_id = self.data.get("workspace_id")
target = self.data.get("target")
auth_target_type = self.data.get("auth_target_type")
users_permission = instance
users_id = [item["user_id"] for item in users_permission]
include_children = users_permission[0].get('include_children')
folder_ids = users_permission[0].get('folder_ids')
# 删除已存在的对应的用户在该资源下的权限
if include_children:
managed_resource_ids = list(
self.get_has_manage_permission_resource_under_folders(current_user_id, folder_ids,)) + folder_ids
else:
managed_resource_ids = [target]
QuerySet(WorkspaceUserResourcePermission).filter(
workspace_id=workspace_id,
target__in=managed_resource_ids,
auth_target_type=auth_target_type,
user_id__in=users_id
).delete()
save_list = [
WorkspaceUserResourcePermission(
target=resource_id,
auth_target_type=auth_target_type,
workspace_id=workspace_id,
auth_type=permission_map[item['permission']][0],
user_id=item["user_id"],
permission_list=permission_map[item['permission']][1]
)
for resource_id in managed_resource_ids
for item in users_permission
]
if save_list:
QuerySet(WorkspaceUserResourcePermission).bulk_create(save_list)
version = Cache_Version.PERMISSION_LIST.get_version()
for user_id in users_id:
key = Cache_Version.PERMISSION_LIST.get_key(user_id=user_id)
cache.delete(key, version=version)
return instance
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/serializers/user_resource_permission.py",
"license": "GNU General Public License v3.0",
"lines": 465,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
1Panel-dev/MaxKB:apps/system_manage/urls.py | from django.urls import path
from . import views
app_name = "system_manage"
# @formatter:off
urlpatterns = [
path('workspace/<str:workspace_id>/user_resource_permission/user/<str:user_id>/resource/<str:resource>', views.WorkSpaceUserResourcePermissionView.as_view()),
path('workspace/<str:workspace_id>/user_resource_permission/user/<str:user_id>/resource/<str:resource>/<int:current_page>/<int:page_size>', views.WorkSpaceUserResourcePermissionView.Page.as_view()),
path('workspace/<str:workspace_id>/resource_user_permission/resource/<str:target>/resource/<str:resource>', views.WorkspaceResourceUserPermissionView.as_view()),
path('workspace/<str:workspace_id>/resource_user_permission/resource/<str:target>/resource/<str:resource>/<int:current_page>/<int:page_size>', views.WorkspaceResourceUserPermissionView.Page.as_view()),
path('workspace/<str:workspace_id>/resource_mapping/<str:resource>/<str:resource_id>/<int:current_page>/<int:page_size>', views.ResourceMappingView.as_view()),
path('email_setting', views.SystemSetting.Email.as_view()),
path('profile', views.SystemProfile.as_view()),
path('valid/<str:valid_type>/<int:valid_count>', views.Valid.as_view())
]
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/urls.py",
"license": "GNU General Public License v3.0",
"lines": 14,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
1Panel-dev/MaxKB:apps/system_manage/views/user_resource_permission.py | # coding=utf-8
"""
@project: MaxKB
@Author:虎虎
@file: workspace_user_resource_permission.py
@date:2025/4/28 16:38
@desc:
"""
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from drf_spectacular.utils import extend_schema
from rest_framework.request import Request
from rest_framework.views import APIView
from common import result
from common.auth import TokenAuth
from common.auth.authentication import has_permissions
from common.constants.permission_constants import RoleConstants, Permission, Group, Operate, ViewPermission, \
CompareConstants
from common.log.log import log
from system_manage.api.user_resource_permission import UserResourcePermissionAPI, EditUserResourcePermissionAPI, \
ResourceUserPermissionAPI, ResourceUserPermissionPageAPI, ResourceUserPermissionEditAPI, \
UserResourcePermissionPageAPI
from system_manage.serializers.user_resource_permission import UserResourcePermissionSerializer, \
ResourceUserPermissionSerializer
from users.models import User
def get_user_operation_object(user_id):
user_model = QuerySet(model=User).filter(id=user_id).first()
if user_model is not None:
return {
"name": user_model.username
}
return {}
class WorkSpaceUserResourcePermissionView(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_('Obtain resource authorization list'),
operation_id=_('Obtain resource authorization list'), # type: ignore
parameters=UserResourcePermissionAPI.get_parameters(),
responses=UserResourcePermissionAPI.get_response(),
tags=[_('Resources authorization')] # type: ignore
)
@has_permissions(
lambda r, kwargs: Permission(group=Group(kwargs.get('resource') + '_WORKSPACE_USER_RESOURCE_PERMISSION'),
operate=Operate.READ),
RoleConstants.ADMIN, RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, user_id: str, resource: str):
return result.success(UserResourcePermissionSerializer(
data={'workspace_id': workspace_id, 'user_id': user_id, 'auth_target_type': resource}
).list({'name': request.query_params.get('name'),
'permission': request.query_params.getlist('permission[]')}, request.user))
@extend_schema(
methods=['PUT'],
description=_('Modify the resource authorization list'),
operation_id=_('Modify the resource authorization list'), # type: ignore
parameters=EditUserResourcePermissionAPI.get_parameters(),
request=EditUserResourcePermissionAPI.get_request(),
responses=EditUserResourcePermissionAPI.get_response(),
tags=[_('Resources authorization')] # type: ignore
)
@log(menu='System', operate='Modify the resource authorization list',
get_operation_object=lambda r, k: get_user_operation_object(k.get('user_id'))
)
@has_permissions(
lambda r, kwargs: Permission(group=Group(kwargs.get('resource') + '_WORKSPACE_USER_RESOURCE_PERMISSION'),
operate=Operate.EDIT),
RoleConstants.ADMIN, RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def put(self, request: Request, workspace_id: str, user_id: str, resource: str):
return result.success(UserResourcePermissionSerializer(
data={'workspace_id': workspace_id, 'user_id': user_id, 'auth_target_type': resource}
).edit(request.data, request.user))
class Page(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_('Obtain resource authorization list by page'),
summary=_('Obtain resource authorization list by page'),
operation_id=_('Obtain resource authorization list by page'), # type: ignore
request=None,
parameters=UserResourcePermissionPageAPI.get_parameters(),
responses=UserResourcePermissionPageAPI.get_response(),
tags=[_('Resources authorization')] # type: ignore
)
@has_permissions(
lambda r, kwargs: Permission(group=Group(kwargs.get('resource') + '_WORKSPACE_USER_RESOURCE_PERMISSION'),
operate=Operate.READ),
RoleConstants.ADMIN, RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, user_id: str, resource: str, current_page: str,
page_size: str):
return result.success(UserResourcePermissionSerializer(
data={'workspace_id': workspace_id, 'user_id': user_id, 'auth_target_type': resource}
).page({'name': request.query_params.get('name'),
'permission': request.query_params.getlist('permission[]')}, current_page, page_size, request.user))
class WorkspaceResourceUserPermissionView(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_('Get user authorization status of resource'),
summary=_('Get user authorization status of resource'),
operation_id=_('Get user authorization status of resource'), # type: ignore
parameters=ResourceUserPermissionAPI.get_parameters(),
responses=ResourceUserPermissionAPI.get_response(),
tags=[_('Resources authorization')] # type: ignore
)
@has_permissions(
lambda r, kwargs: Permission(group=Group(kwargs.get('resource')),
operate=Operate.AUTH,
resource_path=f"/WORKSPACE/{kwargs.get('workspace_id')}:ROLE/WORKSPACE_MANAGE"),
lambda r, kwargs: Permission(group=Group(kwargs.get('resource')),
operate=Operate.AUTH,
resource_path=f"/WORKSPACE/{kwargs.get('workspace_id')}/{kwargs.get('resource').replace('_FOLDER','')}/{kwargs.get('target')}"),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[lambda r, kwargs: Permission(group=Group(kwargs.get('resource').replace('_FOLDER','')),
operate=Operate.SELF,
resource_path=f"/WORKSPACE/{kwargs.get('workspace_id')}/{kwargs.get('resource').replace('_FOLDER','')}/{kwargs.get('target')}")],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, target: str, resource: str):
return result.success(ResourceUserPermissionSerializer(
data={'workspace_id': workspace_id, "target": target, 'auth_target_type': resource.replace('_FOLDER',''),
}).list(
{'username': request.query_params.get("username"), 'nick_name': request.query_params.get("nick_name"),
'permission': request.query_params.getlist("permission[]")
}))
@extend_schema(
methods=['PUT'],
description=_('Edit user authorization status of resource'),
summary=_('Edit user authorization status of resource'),
operation_id=_('Edit user authorization status of resource'), # type: ignore
parameters=ResourceUserPermissionEditAPI.get_parameters(),
request=ResourceUserPermissionEditAPI.get_request(),
responses=ResourceUserPermissionEditAPI.get_response(),
tags=[_('Resources authorization')] # type: ignore
)
@log(menu='System', operate='Edit user authorization status of resource',
get_operation_object=lambda r, k: get_user_operation_object(k.get('user_id'))
)
@has_permissions(
lambda r, kwargs: Permission(group=Group(kwargs.get('resource')),
operate=Operate.AUTH,
resource_path=f"/WORKSPACE/{kwargs.get('workspace_id')}:ROLE/WORKSPACE_MANAGE"),
lambda r, kwargs: Permission(group=Group(kwargs.get('resource')),
operate=Operate.AUTH,
resource_path=f"/WORKSPACE/{kwargs.get('workspace_id')}/{kwargs.get('resource').replace('_FOLDER','')}/{kwargs.get('target')}"),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[lambda r, kwargs: Permission(group=Group(kwargs.get('resource').replace('_FOLDER','')),
operate=Operate.SELF,
resource_path=f"/WORKSPACE/{kwargs.get('workspace_id')}/{kwargs.get('resource').replace('_FOLDER','')}/{kwargs.get('target')}")],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def put(self, request: Request, workspace_id: str, target: str, resource: str):
return result.success(ResourceUserPermissionSerializer(
data={'workspace_id': workspace_id, "target": target, 'auth_target_type': resource.replace('_FOLDER',''), })
.edit(instance=request.data, current_user_id=request.user.id))
class Page(APIView):
authentication_classes = [TokenAuth]
@extend_schema(
methods=['GET'],
description=_('Get user authorization status of resource by page'),
summary=_('Get user authorization status of resource by page'),
operation_id=_('Get user authorization status of resource by page'), # type: ignore
parameters=ResourceUserPermissionPageAPI.get_parameters(),
responses=ResourceUserPermissionPageAPI.get_response(),
tags=[_('Resources authorization')] # type: ignore
)
@has_permissions(
lambda r, kwargs: Permission(group=Group(kwargs.get('resource')),
operate=Operate.AUTH,
resource_path=f"/WORKSPACE/{kwargs.get('workspace_id')}:ROLE/WORKSPACE_MANAGE"),
lambda r, kwargs: Permission(group=Group(kwargs.get('resource')),
operate=Operate.AUTH,
resource_path=f"/WORKSPACE/{kwargs.get('workspace_id')}/{kwargs.get('resource').replace('_FOLDER','')}/{kwargs.get('target')}"),
ViewPermission([RoleConstants.USER.get_workspace_role()],
[lambda r, kwargs: Permission(group=Group(kwargs.get('resource').replace('_FOLDER','')),
operate=Operate.SELF,
resource_path=f"/WORKSPACE/{kwargs.get('workspace_id')}/{kwargs.get('resource').replace('_FOLDER','')}/{kwargs.get('target')}")],
CompareConstants.AND),
RoleConstants.WORKSPACE_MANAGE.get_workspace_role())
def get(self, request: Request, workspace_id: str, target: str, resource: str, current_page: int,
page_size: int):
return result.success(ResourceUserPermissionSerializer(
data={'workspace_id': workspace_id, "target": target, 'auth_target_type': resource.replace('_FOLDER',''), }
).page({'username': request.query_params.get("username"),
'role': request.query_params.get("role"),
'nick_name': request.query_params.get("nick_name"),
'permission': request.query_params.getlist("permission[]")}, current_page, page_size,
))
| {
"repo_id": "1Panel-dev/MaxKB",
"file_path": "apps/system_manage/views/user_resource_permission.py",
"license": "GNU General Public License v3.0",
"lines": 187,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
666ghj/BettaFish:MindSpider/schema/models_bigdata.py | """
舆情大数据聚合主表ORM模型(自动由原tables.sql结构同步生成,对应大表批量搜索与内容入库)
数据模型定义位置:
- MindSpider/DeepSentimentCrawling/MediaCrawler/schema/tables.sql # 主表结构来源文件
- 本模块(自动映射SQL表,适配MySQL/PostgreSQL,推荐手动完善注释、唯一/索引补充)
- MindSpider/schema/models_sa.py # Base 定义来源
本模块以MindSpider\DeepSentimentCrawling\MediaCrawler\database\models.py为准
"""
from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy import Integer, String, BigInteger, Text, ForeignKey
# 使用 models_sa 中的 Base,确保所有表在同一个 metadata 中,外键引用可以正常工作
from models_sa import Base
class BilibiliVideo(Base):
__tablename__ = "bilibili_video"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
video_id: Mapped[int] = mapped_column(BigInteger, nullable=False, index=True, unique=True)
video_url: Mapped[str] = mapped_column(Text, nullable=False)
user_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
liked_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
video_type: Mapped[str | None] = mapped_column(Text, nullable=True)
title: Mapped[str | None] = mapped_column(Text, nullable=True)
desc: Mapped[str | None] = mapped_column(Text, nullable=True)
create_time: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
disliked_count: Mapped[str | None] = mapped_column(Text, nullable=True)
video_play_count: Mapped[str | None] = mapped_column(Text, nullable=True)
video_favorite_count: Mapped[str | None] = mapped_column(Text, nullable=True)
video_share_count: Mapped[str | None] = mapped_column(Text, nullable=True)
video_coin_count: Mapped[str | None] = mapped_column(Text, nullable=True)
video_danmaku: Mapped[str | None] = mapped_column(Text, nullable=True)
video_comment: Mapped[str | None] = mapped_column(Text, nullable=True)
video_cover_url: Mapped[str | None] = mapped_column(Text, nullable=True)
source_keyword: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
topic_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("daily_topics.topic_id", ondelete="SET NULL"), nullable=True)
crawling_task_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("crawling_tasks.task_id", ondelete="SET NULL"), nullable=True)
class BilibiliVideoComment(Base):
__tablename__ = "bilibili_video_comment"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
sex: Mapped[str | None] = mapped_column(Text, nullable=True)
sign: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
comment_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
video_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
content: Mapped[str | None] = mapped_column(Text, nullable=True)
create_time: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
sub_comment_count: Mapped[str | None] = mapped_column(Text, nullable=True)
parent_comment_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
like_count: Mapped[str | None] = mapped_column(Text, default='0', nullable=True)
class BilibiliUpInfo(Base):
__tablename__ = "bilibili_up_info"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
sex: Mapped[str | None] = mapped_column(Text, nullable=True)
sign: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
total_fans: Mapped[int | None] = mapped_column(Integer, nullable=True)
total_liked: Mapped[int | None] = mapped_column(Integer, nullable=True)
user_rank: Mapped[int | None] = mapped_column(Integer, nullable=True)
is_official: Mapped[int | None] = mapped_column(Integer, nullable=True)
class BilibiliContactInfo(Base):
__tablename__ = "bilibili_contact_info"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
up_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
fan_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
up_name: Mapped[str | None] = mapped_column(Text, nullable=True)
fan_name: Mapped[str | None] = mapped_column(Text, nullable=True)
up_sign: Mapped[str | None] = mapped_column(Text, nullable=True)
fan_sign: Mapped[str | None] = mapped_column(Text, nullable=True)
up_avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
fan_avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
class BilibiliUpDynamic(Base):
__tablename__ = "bilibili_up_dynamic"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
dynamic_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
user_name: Mapped[str | None] = mapped_column(Text, nullable=True)
text: Mapped[str | None] = mapped_column(Text, nullable=True)
type: Mapped[str | None] = mapped_column(Text, nullable=True)
pub_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
total_comments: Mapped[int | None] = mapped_column(Integer, nullable=True)
total_forwards: Mapped[int | None] = mapped_column(Integer, nullable=True)
total_liked: Mapped[int | None] = mapped_column(Integer, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
class DouyinAweme(Base):
__tablename__ = "douyin_aweme"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
sec_uid: Mapped[str | None] = mapped_column(String(255), nullable=True)
short_user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
user_unique_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
user_signature: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
aweme_id: Mapped[str | None] = mapped_column(String(64), index=True, nullable=True)
aweme_type: Mapped[str | None] = mapped_column(Text, nullable=True)
title: Mapped[str | None] = mapped_column(Text, nullable=True)
desc: Mapped[str | None] = mapped_column(Text, nullable=True)
create_time: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
liked_count: Mapped[str | None] = mapped_column(Text, nullable=True)
comment_count: Mapped[str | None] = mapped_column(Text, nullable=True)
share_count: Mapped[str | None] = mapped_column(Text, nullable=True)
collected_count: Mapped[str | None] = mapped_column(Text, nullable=True)
aweme_url: Mapped[str | None] = mapped_column(Text, nullable=True)
cover_url: Mapped[str | None] = mapped_column(Text, nullable=True)
video_download_url: Mapped[str | None] = mapped_column(Text, nullable=True)
music_download_url: Mapped[str | None] = mapped_column(Text, nullable=True)
note_download_url: Mapped[str | None] = mapped_column(Text, nullable=True)
source_keyword: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
topic_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("daily_topics.topic_id", ondelete="SET NULL"), nullable=True)
crawling_task_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("crawling_tasks.task_id", ondelete="SET NULL"), nullable=True)
class DouyinAwemeComment(Base):
__tablename__ = "douyin_aweme_comment"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
sec_uid: Mapped[str | None] = mapped_column(String(255), nullable=True)
short_user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
user_unique_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
user_signature: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
comment_id: Mapped[str | None] = mapped_column(String(64), index=True, nullable=True)
aweme_id: Mapped[str | None] = mapped_column(String(64), index=True, nullable=True)
content: Mapped[str | None] = mapped_column(Text, nullable=True)
create_time: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
sub_comment_count: Mapped[str | None] = mapped_column(Text, nullable=True)
parent_comment_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
like_count: Mapped[str | None] = mapped_column(Text, default='0', nullable=True)
pictures: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
class DyCreator(Base):
__tablename__ = "dy_creator"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
desc: Mapped[str | None] = mapped_column(Text, nullable=True)
gender: Mapped[str | None] = mapped_column(Text, nullable=True)
follows: Mapped[str | None] = mapped_column(Text, nullable=True)
fans: Mapped[str | None] = mapped_column(Text, nullable=True)
interaction: Mapped[str | None] = mapped_column(Text, nullable=True)
videos_count: Mapped[str | None] = mapped_column(String(255), nullable=True)
class KuaishouVideo(Base):
__tablename__ = "kuaishou_video"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(64), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
video_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
video_type: Mapped[str | None] = mapped_column(Text, nullable=True)
title: Mapped[str | None] = mapped_column(Text, nullable=True)
desc: Mapped[str | None] = mapped_column(Text, nullable=True)
create_time: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
liked_count: Mapped[str | None] = mapped_column(Text, nullable=True)
viewd_count: Mapped[str | None] = mapped_column(Text, nullable=True)
video_url: Mapped[str | None] = mapped_column(Text, nullable=True)
video_cover_url: Mapped[str | None] = mapped_column(Text, nullable=True)
video_play_url: Mapped[str | None] = mapped_column(Text, nullable=True)
source_keyword: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
topic_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("daily_topics.topic_id", ondelete="SET NULL"), nullable=True)
crawling_task_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("crawling_tasks.task_id", ondelete="SET NULL"), nullable=True)
class KuaishouVideoComment(Base):
__tablename__ = "kuaishou_video_comment"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(Text, nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
comment_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
video_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
content: Mapped[str | None] = mapped_column(Text, nullable=True)
create_time: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
sub_comment_count: Mapped[str | None] = mapped_column(Text, nullable=True)
class WeiboNote(Base):
__tablename__ = "weibo_note"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
gender: Mapped[str | None] = mapped_column(Text, nullable=True)
profile_url: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
note_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
content: Mapped[str | None] = mapped_column(Text, nullable=True)
create_time: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
create_date_time: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
liked_count: Mapped[str | None] = mapped_column(Text, nullable=True)
comments_count: Mapped[str | None] = mapped_column(Text, nullable=True)
shared_count: Mapped[str | None] = mapped_column(Text, nullable=True)
note_url: Mapped[str | None] = mapped_column(Text, nullable=True)
source_keyword: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
topic_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("daily_topics.topic_id", ondelete="SET NULL"), nullable=True)
crawling_task_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("crawling_tasks.task_id", ondelete="SET NULL"), nullable=True)
class WeiboNoteComment(Base):
__tablename__ = "weibo_note_comment"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
gender: Mapped[str | None] = mapped_column(Text, nullable=True)
profile_url: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
comment_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
note_id: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
content: Mapped[str | None] = mapped_column(Text, nullable=True)
create_time: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
create_date_time: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
comment_like_count: Mapped[str | None] = mapped_column(Text, nullable=True)
sub_comment_count: Mapped[str | None] = mapped_column(Text, nullable=True)
parent_comment_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
class WeiboCreator(Base):
__tablename__ = "weibo_creator"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
desc: Mapped[str | None] = mapped_column(Text, nullable=True)
gender: Mapped[str | None] = mapped_column(Text, nullable=True)
follows: Mapped[str | None] = mapped_column(Text, nullable=True)
fans: Mapped[str | None] = mapped_column(Text, nullable=True)
tag_list: Mapped[str | None] = mapped_column(Text, nullable=True)
class XhsCreator(Base):
__tablename__ = "xhs_creator"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
desc: Mapped[str | None] = mapped_column(Text, nullable=True)
gender: Mapped[str | None] = mapped_column(Text, nullable=True)
follows: Mapped[str | None] = mapped_column(Text, nullable=True)
fans: Mapped[str | None] = mapped_column(Text, nullable=True)
interaction: Mapped[str | None] = mapped_column(Text, nullable=True)
tag_list: Mapped[str | None] = mapped_column(Text, nullable=True)
class XhsNote(Base):
__tablename__ = "xhs_note"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
note_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
type: Mapped[str | None] = mapped_column(Text, nullable=True)
title: Mapped[str | None] = mapped_column(Text, nullable=True)
desc: Mapped[str | None] = mapped_column(Text, nullable=True)
video_url: Mapped[str | None] = mapped_column(Text, nullable=True)
time: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
last_update_time: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
liked_count: Mapped[str | None] = mapped_column(Text, nullable=True)
collected_count: Mapped[str | None] = mapped_column(Text, nullable=True)
comment_count: Mapped[str | None] = mapped_column(Text, nullable=True)
share_count: Mapped[str | None] = mapped_column(Text, nullable=True)
image_list: Mapped[str | None] = mapped_column(Text, nullable=True)
tag_list: Mapped[str | None] = mapped_column(Text, nullable=True)
note_url: Mapped[str | None] = mapped_column(Text, nullable=True)
source_keyword: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
xsec_token: Mapped[str | None] = mapped_column(Text, nullable=True)
topic_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("daily_topics.topic_id", ondelete="SET NULL"), nullable=True)
crawling_task_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("crawling_tasks.task_id", ondelete="SET NULL"), nullable=True)
class XhsNoteComment(Base):
__tablename__ = "xhs_note_comment"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
comment_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
create_time: Mapped[int | None] = mapped_column(BigInteger, index=True, nullable=True)
note_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
content: Mapped[str | None] = mapped_column(Text, nullable=True)
sub_comment_count: Mapped[int | None] = mapped_column(Integer, nullable=True)
pictures: Mapped[str | None] = mapped_column(Text, nullable=True)
parent_comment_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
like_count: Mapped[str | None] = mapped_column(Text, nullable=True)
class TiebaNote(Base):
__tablename__ = "tieba_note"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
note_id: Mapped[str | None] = mapped_column(String(644), index=True, nullable=True)
title: Mapped[str | None] = mapped_column(Text, nullable=True)
desc: Mapped[str | None] = mapped_column(Text, nullable=True)
note_url: Mapped[str | None] = mapped_column(Text, nullable=True)
publish_time: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
user_link: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
user_nickname: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
user_avatar: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
tieba_id: Mapped[str | None] = mapped_column(String(255), default='', nullable=True)
tieba_name: Mapped[str | None] = mapped_column(Text, nullable=True)
tieba_link: Mapped[str | None] = mapped_column(Text, nullable=True)
total_replay_num: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
total_replay_page: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
source_keyword: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
topic_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("daily_topics.topic_id", ondelete="SET NULL"), nullable=True)
crawling_task_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("crawling_tasks.task_id", ondelete="SET NULL"), nullable=True)
class TiebaComment(Base):
__tablename__ = "tieba_comment"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
comment_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
parent_comment_id: Mapped[str | None] = mapped_column(String(255), default='', nullable=True)
content: Mapped[str | None] = mapped_column(Text, nullable=True)
user_link: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
user_nickname: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
user_avatar: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
tieba_id: Mapped[str | None] = mapped_column(String(255), default='', nullable=True)
tieba_name: Mapped[str | None] = mapped_column(Text, nullable=True)
tieba_link: Mapped[str | None] = mapped_column(Text, nullable=True)
publish_time: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, default='', nullable=True)
sub_comment_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
note_id: Mapped[str | None] = mapped_column(String(255), index=True, nullable=True)
note_url: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
class TiebaCreator(Base):
__tablename__ = "tieba_creator"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(64), nullable=True)
user_name: Mapped[str | None] = mapped_column(Text, nullable=True)
nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
gender: Mapped[str | None] = mapped_column(Text, nullable=True)
follows: Mapped[str | None] = mapped_column(Text, nullable=True)
fans: Mapped[str | None] = mapped_column(Text, nullable=True)
registration_duration: Mapped[str | None] = mapped_column(Text, nullable=True)
class ZhihuContent(Base):
__tablename__ = "zhihu_content"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
content_id: Mapped[str | None] = mapped_column(String(64), index=True, nullable=True)
content_type: Mapped[str | None] = mapped_column(Text, nullable=True)
content_text: Mapped[str | None] = mapped_column(Text, nullable=True)
content_url: Mapped[str | None] = mapped_column(Text, nullable=True)
question_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
title: Mapped[str | None] = mapped_column(Text, nullable=True)
desc: Mapped[str | None] = mapped_column(Text, nullable=True)
created_time: Mapped[str | None] = mapped_column(String(32), index=True, nullable=True)
updated_time: Mapped[str | None] = mapped_column(Text, nullable=True)
voteup_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
comment_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
source_keyword: Mapped[str | None] = mapped_column(Text, nullable=True)
user_id: Mapped[str | None] = mapped_column(String(255), nullable=True)
user_link: Mapped[str | None] = mapped_column(Text, nullable=True)
user_nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
user_avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
user_url_token: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
topic_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("daily_topics.topic_id", ondelete="SET NULL"), nullable=True)
crawling_task_id: Mapped[str | None] = mapped_column(String(64), ForeignKey("crawling_tasks.task_id", ondelete="SET NULL"), nullable=True)
class ZhihuComment(Base):
__tablename__ = "zhihu_comment"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
comment_id: Mapped[str | None] = mapped_column(String(64), index=True, nullable=True)
parent_comment_id: Mapped[str | None] = mapped_column(String(64), nullable=True)
content: Mapped[str | None] = mapped_column(Text, nullable=True)
publish_time: Mapped[str | None] = mapped_column(String(32), index=True, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
sub_comment_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
like_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
dislike_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
content_id: Mapped[str | None] = mapped_column(String(64), index=True, nullable=True)
content_type: Mapped[str | None] = mapped_column(Text, nullable=True)
user_id: Mapped[str | None] = mapped_column(String(64), nullable=True)
user_link: Mapped[str | None] = mapped_column(Text, nullable=True)
user_nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
user_avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
class ZhihuCreator(Base):
__tablename__ = "zhihu_creator"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
user_id: Mapped[str | None] = mapped_column(String(64), unique=True, index=True, nullable=True)
user_link: Mapped[str | None] = mapped_column(Text, nullable=True)
user_nickname: Mapped[str | None] = mapped_column(Text, nullable=True)
user_avatar: Mapped[str | None] = mapped_column(Text, nullable=True)
url_token: Mapped[str | None] = mapped_column(Text, nullable=True)
gender: Mapped[str | None] = mapped_column(Text, nullable=True)
ip_location: Mapped[str | None] = mapped_column(Text, nullable=True)
follows: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
fans: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
anwser_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
video_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
question_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
article_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
column_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
get_voteup_count: Mapped[int | None] = mapped_column(Integer, default=0, nullable=True)
add_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_modify_ts: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
| {
"repo_id": "666ghj/BettaFish",
"file_path": "MindSpider/schema/models_bigdata.py",
"license": "GNU General Public License v2.0",
"lines": 428,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
9001/copyparty:copyparty/sftpd.py | # coding: utf-8
from __future__ import print_function, unicode_literals
import errno
import hashlib
import logging
import os
import select
import socket
import time
from threading import ExceptHookArgs
import paramiko
import paramiko.common
import paramiko.sftp_attr
from paramiko.common import AUTH_FAILED, AUTH_SUCCESSFUL
from paramiko.sftp import (
SFTP_FAILURE,
SFTP_NO_SUCH_FILE,
SFTP_OK,
SFTP_OP_UNSUPPORTED,
SFTP_PERMISSION_DENIED,
)
from .__init__ import ANYWIN, TYPE_CHECKING
from .authsrv import LEELOO_DALLAS, VFS, AuthSrv
from .bos import bos
from .util import (
VF_CAREFUL,
Daemon,
ODict,
Pebkac,
ipnorm,
min_ex,
read_utf8,
relchk,
runhook,
sanitize_fn,
ub64enc,
undot,
vjoin,
wunlink,
)
if TYPE_CHECKING:
from .svchub import SvcHub
if True: # pylint: disable=using-constant-test
from typing import Any, BinaryIO, Optional, Union
SATTR = paramiko.sftp_attr.SFTPAttributes
class SSH_Srv(paramiko.ServerInterface):
def __init__(self, hub: "SvcHub", addr: Any):
self.hub = hub
self.args = args = hub.args
self.log_func = hub.log
self.uname = "*"
self.addr = addr
self.ip = addr[0]
if self.ip.startswith("::ffff:"):
self.ip = self.ip[7:]
zsl = []
if args.sftp_anon:
zsl.append("none")
if args.sftp_key2u:
zsl.append("publickey")
if args.sftp_pw or args.sftp_anon:
zsl.append("password")
self._auths = ",".join(zsl)
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.hub.log("sftp:%s" % (self.ip,), msg, c)
def get_allowed_auths(self, username: str) -> str:
return self._auths
def get_banner(self) -> tuple[Optional[str], Optional[str]]:
if self.args.sftpv:
self.log("get_banner")
t = self.args.sftp_banner
if not t:
return (None, None)
if t.startswith("@"):
t = read_utf8(self.log, t[1:], False)
if t and not t.endswith("\n"):
t += "\n"
return (t, "en-US")
def check_channel_request(self, kind: str, chanid: int) -> int:
if self.args.sftpv:
self.log("channel-request: %r, %r" % (kind, chanid))
if kind == "session":
return paramiko.common.OPEN_SUCCEEDED
return paramiko.common.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
def check_auth_none(self, username: str) -> int:
try:
return self._check_auth_none(username)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return AUTH_FAILED
def _check_auth_none(self, uname: str) -> int:
args = self.args
if uname != args.sftp_anon or not uname:
return AUTH_FAILED
ipn = ipnorm(self.ip)
bans = self.hub.bans
if ipn in bans:
rt = bans[ipn] - time.time()
if rt < 0:
self.log("client unbanned")
del bans[ipn]
else:
self.log("client is banned")
return AUTH_FAILED
self.uname = "*"
self.log("auth-none OK: *")
return AUTH_SUCCESSFUL
def check_auth_password(self, username: str, password: str) -> int:
try:
return self._check_auth_password(username, password)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return AUTH_FAILED
def _check_auth_password(self, uname: str, pw: str) -> int:
args = self.args
if args.sftpv:
logpw = pw
if args.log_badpwd == 0:
logpw = ""
elif args.log_badpwd == 2:
zb = hashlib.sha512(pw.encode("utf-8", "replace")).digest()
logpw = "%" + ub64enc(zb[:12]).decode("ascii")
self.log("auth-pw: %r, %r" % (uname, logpw))
ipn = ipnorm(self.ip)
bans = self.hub.bans
if ipn in bans:
rt = bans[ipn] - time.time()
if rt < 0:
self.log("client unbanned")
del bans[ipn]
else:
self.log("client is banned")
return AUTH_FAILED
anon = args.sftp_anon
if anon and uname == anon:
self.uname = "*"
self.log("auth-pw OK: *")
return AUTH_SUCCESSFUL
if not args.sftp_pw:
return AUTH_FAILED
if args.usernames:
alts = ["%s:%s" % (uname, pw)]
else:
alts = [pw, uname]
attempt = "%s:%s" % (uname, pw)
uname = ""
asrv = self.hub.asrv
for zs in alts:
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
if zs:
uname = zs
break
if args.ipu and uname == "*":
uname = args.ipu_iu[args.ipu_nm.map(self.ip)]
if args.ipr and uname in args.ipr_u:
if not args.ipr_u[uname].map(self.ip):
logging.warning("username [%s] rejected by --ipr", uname)
return AUTH_FAILED
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
g = self.hub.gpwd
if g.lim:
bonk, ip = g.bonk(self.ip, attempt)
if bonk:
logging.warning("client banned: invalid passwords")
bans[self.ip] = bonk
try:
# only possible if multiprocessing disabled
self.hub.broker.httpsrv.bans[ip] = bonk # type: ignore
self.hub.broker.httpsrv.nban += 1 # type: ignore
except:
pass
return AUTH_FAILED
self.uname = uname
self.log("auth-pw OK: %s" % (uname,))
return AUTH_SUCCESSFUL
def check_auth_publickey(self, username: str, key: paramiko.PKey) -> int:
try:
return self._check_auth_publickey(username, key)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return AUTH_FAILED
def _check_auth_publickey(self, uname: str, key: paramiko.PKey) -> int:
args = self.args
if args.sftpv:
zs = key.get_name() + "," + key.get_base64()[:32]
self.log("auth-key: %r, %r" % (uname, zs))
ipn = ipnorm(self.ip)
bans = self.hub.bans
if ipn in bans:
rt = bans[ipn] - time.time()
if rt < 0:
self.log("client unbanned")
del bans[ipn]
else:
self.log("client is banned")
return AUTH_FAILED
anon = args.sftp_anon
if anon and uname == anon:
self.uname = "*"
self.log("auth-key OK: *")
return AUTH_SUCCESSFUL
attempt = "%s %s" % (key.get_name(), key.get_base64())
ok = args.sftp_key2u.get(attempt) == uname
if ok and args.ipr and uname in args.ipr_u:
if not args.ipr_u[uname].map(self.ip):
logging.warning("username [%s] rejected by --ipr", uname)
return AUTH_FAILED
asrv = self.hub.asrv
if not ok or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
self.log("auth-key REJECTED: %s" % (uname,))
return AUTH_FAILED
self.uname = uname
self.log("auth-key OK: %s" % (uname,))
return AUTH_SUCCESSFUL
class SFTP_FH(paramiko.SFTPHandle):
def __init__(self, flags: int = 0) -> None:
self.filename = ""
self.readfile: Optional[BinaryIO] = None
self.writefile: Optional[BinaryIO] = None
super(SFTP_FH, self).__init__(flags)
def stat(self):
try:
f = self.readfile or self.writefile
return SATTR.from_stat(os.fstat(f.fileno()))
except OSError as ex:
return paramiko.SFTPServer.convert_errno(ex.errno)
def chattr(self, attr):
# python doesn't have equivalents to fchown or fchmod, so we have to
# use the stored filename
if not self.writefile:
return SFTP_PERMISSION_DENIED
try:
paramiko.SFTPServer.set_file_attr(self.filename, attr)
return SFTP_OK
except OSError as ex:
return paramiko.SFTPServer.convert_errno(ex.errno)
class SFTP_Srv(paramiko.SFTPServerInterface):
def __init__(self, ssh: paramiko.ServerInterface, *a, **ka):
super(SFTP_Srv, self).__init__(ssh, *a, **ka)
self.ssh = ssh
self.ip: str = ssh.ip # type: ignore
self.hub: "SvcHub" = ssh.hub # type: ignore
self.uname: str = ssh.uname # type: ignore
self.args = self.hub.args
self.asrv: "AuthSrv" = self.hub.asrv
self.v = self.args.sftpv
self.vv = self.args.sftpvv
if self.uname == LEELOO_DALLAS:
raise Exception("send her back")
self.vols = [
vp
for vp, vn in self.asrv.vfs.all_vols.items()
if self.uname in vn.axs.uread
or self.uname in vn.axs.uwrite
or self.uname in vn.axs.uget
]
self.vis = set()
for zs in self.vols:
self.vis.add(zs)
while zs:
zs = zs.rsplit("/", 1)[0] if "/" in zs else ""
self.vis.add(zs)
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.hub.log("sftp:%s" % (self.ip,), msg, c)
def v2a(
self,
vpath: str,
r: bool = False,
w: bool = False,
m: bool = False,
d: bool = False,
) -> tuple[str, VFS, str]:
vpath = vpath.replace(os.sep, "/").strip("/")
rd, fn = os.path.split(vpath)
if relchk(rd):
self.log("malicious vpath: %s", vpath)
raise Exception("Unsupported characters in [%s]" % (vpath,))
fn = sanitize_fn(fn or "")
vpath = vjoin(rd, fn)
vn, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
if (
w
and fn.lower() in vn.flags["emb_all"]
and self.uname not in vn.axs.uread
and "wo_up_readme" not in vn.flags
):
fn = "_wo_" + fn
vpath = vjoin(rd, fn)
vn, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
if not vn.realpath:
# return "", vn, rem
raise OSError(errno.ENOENT, "no filesystem mounted at [/%s]" % (vpath,))
if "xdev" in vn.flags or "xvol" in vn.flags:
ap = vn.canonical(rem)
avn = vn.chk_ap(ap)
t = "Permission denied in [{}]"
if not avn:
raise OSError(errno.EPERM, "permission denied in [/%s]" % (vpath,))
cr, cw, cm, cd, _, _, _, _, _ = avn.uaxs[self.uname]
if r and not cr or w and not cw or m and not cm or d and not cd:
raise OSError(errno.EPERM, "permission denied in [/%s]" % (vpath,))
if "bcasechk" in vn.flags and not vn.casechk(rem, True):
raise OSError(errno.ENOENT, "file does not exist case-sensitively")
return os.path.join(vn.realpath, rem), vn, rem
def list_folder(self, path: str) -> list[SATTR] | int:
try:
return self._list_folder(path)
except Pebkac as ex:
if ex.code == 404:
self.log("folder 404: %s" % (path,))
return SFTP_NO_SUCH_FILE
return SFTP_PERMISSION_DENIED
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return SFTP_FAILURE
def _list_folder(self, path: str) -> list[SATTR] | int:
if self.v:
self.log("ls(%s):" % (path,))
path = path.strip("/")
try:
ap, vn, rem = self.v2a(path, r=True)
except Pebkac:
try:
self.v2a(path, w=True)
self.log("ls(%s): [] (write-only)" % (path,))
return [] # display write-only folders as empty
except:
pass
if path not in self.vis:
self.log("ls(%s): EPERM" % (path,))
return SFTP_PERMISSION_DENIED
# list of accessible volumes
ret = []
zi = int(time.time())
vst = os.stat_result((16877, -1, -1, 1, 1000, 1000, 8, zi, zi, zi))
prefix = path + "/"
for vn in self.asrv.vfs.all_nodes.values():
if path and not vn.vpath.startswith(prefix):
continue # vn is parent
vname = vn.vpath[len(prefix) :]
if "/" in vname or not vname:
continue # only include vols at current level
ret.append(SATTR.from_stat(vst, filename=vn.vpath))
ret.sort(key=lambda x: x.filename)
self.log("ls(%s): vfs-vols; |%d|" % (path, len(ret)))
return ret
_, vfs_ls, vfs_virt = vn.ls(
rem,
self.uname,
not self.args.no_scandir,
[[True, False], [False, True]],
throw=True,
)
ret = [SATTR.from_stat(x[1], filename=x[0]) for x in vfs_ls]
for zs, vn2 in vfs_virt.items():
if not vn2.realpath:
continue
st = bos.stat(vn2.realpath)
ret.append(SATTR.from_stat(st, filename=zs))
if self.uname not in vn.axs.udot:
ret = [x for x in ret if not x.filename.split("/")[-1].startswith(".")]
ret.sort(key=lambda x: x.filename)
self.log("ls(%s): |%d|" % (path, len(ret)))
return ret
def stat(self, path: str) -> SATTR | int:
try:
return self._stat(path)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return SFTP_FAILURE
def lstat(self, path: str) -> SATTR | int:
try:
return self._stat(path)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return SFTP_FAILURE
def _stat(self, vp: str) -> SATTR | int:
vp = vp.strip("/")
try:
ap, vn, _ = self.v2a(vp)
if (
self.uname not in vn.axs.uread
and self.uname not in vn.axs.uwrite
and self.uname not in vn.axs.uget
):
self.log("stat(%s): EPERM" % (vp,))
return SFTP_PERMISSION_DENIED
st = bos.stat(ap)
self.log("stat(%s): %s" % (vp, st))
except:
if vp not in self.vis:
self.log("stat(%s): ENOENT" % (vp,))
return SFTP_NO_SUCH_FILE
zi = int(time.time())
st = os.stat_result((16877, -1, -1, 1, 1000, 1000, 8, zi, zi, zi))
self.log("stat(%s): vfs-vols")
return SATTR.from_stat(st)
def open(self, path: str, flags: int, attr: SATTR) -> paramiko.SFTPHandle | int:
try:
return self._open(path, flags, attr)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return SFTP_FAILURE
def _open(self, vp: str, iflag: int, attr: SATTR) -> paramiko.SFTPHandle | int:
if ANYWIN:
iflag |= os.O_BINARY
if iflag & os.O_WRONLY:
rd = False
wr = True
if iflag & os.O_APPEND:
smode = "ab"
else:
smode = "wb"
elif iflag & os.O_RDWR:
rd = wr = True
if iflag & os.O_APPEND:
smode = "a+b"
else:
smode = "r+b"
else:
rd = True
wr = False
smode = "rb"
try:
vn, rem = self.asrv.vfs.get(vp, self.uname, rd, wr)
ap = os.path.join(vn.realpath, rem)
vf = vn.flags
except Pebkac as ex:
t = "denied open file [%s], iflag=%s, read=%s, write=%s: %s"
self.log(t % (vp, iflag, rd, wr, ex))
return SFTP_PERMISSION_DENIED
self.log("open(%s, %x, %s)" % (vp, iflag, smode))
if wr:
try:
st = bos.stat(ap)
td = time.time() - st.st_mtime
need_unlink = True
except:
need_unlink = False
td = 0
xbu = vn.flags.get("xbu")
if xbu:
hr = runhook(
self.log,
None,
self.hub.up2k,
"xbu.sftp",
xbu,
ap,
vp,
"",
"",
"",
0,
0,
"7.3.8.7",
time.time(),
None,
)
t = hr.get("rejectmsg") or ""
if t or hr.get("rc") != 0:
if not t:
t = "upload blocked by xbu server config: %r" % (vp,)
self.log(t, 3)
return SFTP_PERMISSION_DENIED
self.log("writing to [%s] => [%s]" % (vp, ap))
if wr and need_unlink: # type: ignore # !rm
assert td # type: ignore # !rm
if td >= -1 and td <= self.args.ftp_wt:
# within permitted timeframe; allow overwrite or resume
do_it = True
elif self.args.no_del or self.args.ftp_no_ow:
# file too old, or overwrite not allowed; reject
do_it = False
else:
# allow overwrite if user has delete permission
do_it = self.uname in vn.axs.udel
if not do_it:
t = "file already exists and no permission to overwrite: %s"
self.log(t % (vp,))
return SFTP_PERMISSION_DENIED
# Don't unlink file for append mode
elif "a" not in smode:
wunlink(self.log, ap, VF_CAREFUL)
chmod = getattr(attr, "st_mode", None)
if chmod is None:
chmod = vf.get("chmod_f", 0o644)
self.log("open(%s, %x): client did not chmod" % (vp, iflag))
else:
self.log("open(%s, %x): client set chmod 0%o" % (vp, iflag, chmod))
try:
fd = os.open(ap, iflag, chmod)
except OSError as ex:
t = "failed to os.open [%s] -> [%s] with iflag [%s] and chmod [%s]: %r"
self.log(t % (vp, ap, iflag, chmod, ex), 3)
return paramiko.SFTPServer.convert_errno(ex.errno)
if iflag & os.O_CREAT:
paramiko.SFTPServer.set_file_attr(ap, attr)
try:
f = os.fdopen(fd, smode)
except OSError as ex:
t = "failed to os.fdpen [%s] -> [%s] with smode [%s]: %r"
self.log(t % (vp, ap, smode, ex), 3)
return paramiko.SFTPServer.convert_errno(ex.errno)
ret = SFTP_FH(iflag)
ret.filename = ap
ret.readfile = f if rd else None
ret.writefile = f if wr else None
return ret
def remove(self, path: str) -> int:
try:
return self._remove(path)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return SFTP_FAILURE
def _remove(self, vp: str) -> int:
self.log("rm(%s)" % (vp,))
if self.args.no_del:
self.log("The delete feature is disabled in server config")
return SFTP_PERMISSION_DENIED
try:
self.hub.up2k.handle_rm(self.uname, self.ip, [vp], [], False, False)
self.log("rm(%s): ok" % (vp,))
return SFTP_OK
except Pebkac as ex:
t = "denied delete [%s]: %s"
self.log(t % (vp, ex))
if str(ex).startswith("file not found"):
return SFTP_NO_SUCH_FILE
try:
# write-only client trying to rm before upload?
ap, vn, _ = self.v2a(vp)
if (
self.uname not in vn.axs.uread
and self.uname not in vn.axs.uwrite
and self.uname not in vn.axs.uget
):
self.log("rm(%s): EPERM" % (vp,))
return SFTP_PERMISSION_DENIED
if not bos.path.exists(ap):
self.log(" `- file didn't exist; returning ENOENT")
return SFTP_NO_SUCH_FILE
except:
pass
return SFTP_PERMISSION_DENIED
except OSError as ex:
self.log("failed: rm(%s): %r" % (vp, ex))
return paramiko.SFTPServer.convert_errno(ex.errno)
def rename(self, oldpath: str, newpath: str) -> int:
try:
return self._rename(oldpath, newpath)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return SFTP_FAILURE
def _rename(self, svp: str, dvp: str) -> int:
self.log("mv(%s, %s)" % (svp, dvp))
if self.args.no_mv:
self.log("The rename/move feature is disabled in server config")
svp = svp.strip("/")
dvp = dvp.strip("/")
try:
self.hub.up2k.handle_mv("", self.uname, self.ip, svp, dvp)
return SFTP_OK
except Pebkac as ex:
t = "denied rename [%s] to [%s]: %s"
self.log(t % (svp, dvp, ex))
return SFTP_PERMISSION_DENIED
except OSError as ex:
self.log("mv(%s, %s): %r" % (svp, dvp, ex))
return paramiko.SFTPServer.convert_errno(ex.errno)
def mkdir(self, path: str, attr: SATTR) -> int:
try:
return self._mkdir(path, attr)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return SFTP_FAILURE
def _mkdir(self, vp: str, attr: SATTR) -> int:
self.log("mkdir(%s)" % (vp,))
try:
vn, rem = self.asrv.vfs.get(vp, self.uname, False, True)
ap = os.path.join(vn.realpath, rem)
bos.makedirs(ap, vf=vn.flags) # filezilla expects this
if attr is not None:
paramiko.SFTPServer.set_file_attr(ap, attr)
return SFTP_OK
except Pebkac as ex:
t = "denied mkdir [%s]: %s"
self.log(t % (vp, ex))
return SFTP_PERMISSION_DENIED
except OSError as ex:
self.log("mkdir(%s): %r" % (vp, ex))
return paramiko.SFTPServer.convert_errno(ex.errno)
def rmdir(self, path: str) -> int:
try:
return self._rmdir(path)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return SFTP_FAILURE
def _rmdir(self, vp: str) -> int:
self.log("rmdir(%s)" % (vp,))
try:
vn, rem = self.asrv.vfs.get(vp, self.uname, False, False, will_del=True)
ap = os.path.join(vn.realpath, rem)
bos.rmdir(ap)
return SFTP_OK
except Pebkac as ex:
t = "denied rmdir [%s]: %s"
self.log(t % (vp, ex))
return SFTP_PERMISSION_DENIED
except OSError as ex:
self.log("rmdir(%s): %r" % (vp, ex))
return paramiko.SFTPServer.convert_errno(ex.errno)
def chattr(self, path: str, attr: SATTR) -> int:
try:
return self._chattr(path, attr)
except:
self.log("unhandled exception: %s" % (min_ex(),), 1)
return SFTP_FAILURE
def _chattr(self, vp: str, attr: SATTR) -> int:
self.log("chattr(%s, %s)" % (vp, attr))
try:
vn, rem = self.asrv.vfs.get(vp, self.uname, False, True, will_del=True)
ap = os.path.join(vn.realpath, rem)
paramiko.SFTPServer.set_file_attr(ap, attr)
return SFTP_OK
except Pebkac as ex:
t = "denied chattr [%s]: %s"
self.log(t % (vp, ex))
return SFTP_PERMISSION_DENIED
except OSError as ex:
self.log("chattr(%s): %r" % (vp, ex))
return paramiko.SFTPServer.convert_errno(ex.errno)
def symlink(self, target_path: str, path: str) -> int:
return SFTP_OP_UNSUPPORTED
def readlink(self, path: str) -> str | int:
return path
def canonicalize(self, path: str) -> str:
return "/%s" % (undot(path),)
class Sftpd(object):
def __init__(self, hub: "SvcHub") -> None:
self.hub = hub
self.args = args = hub.args
self.log_func = hub.log
self.srv: list[socket.socket] = []
self.bound: list[str] = []
self.sessions = {}
ips = args.sftp_i
if "::" in ips:
ips.append("0.0.0.0")
ips = [x for x in ips if not x.startswith(("unix:", "fd:"))]
if args.sftp4:
ips = [x for x in ips if ":" not in x]
if not ips:
self.log("cannot start sftp-server; no compatible IPs in -i", 1)
return
self.hostkeys = []
hostkeytypes = (
("ed25519", "Ed25519Key", {}), # best
("ecdsa", "ECDSAKey", {"bits": 384}),
("rsa", "RSAKey", {"bits": 4096}),
("dsa", "DSSKey", {}), # worst
)
for fname, aname, opts in hostkeytypes:
fpath = "%s/ssh_host_%s_key" % (args.sftp_hostk, fname.lower())
try:
pkey = getattr(paramiko, aname).from_private_key_file(fpath)
except Exception as ex:
try:
genfun = getattr(paramiko, aname).generate
except Exception as ex2:
if args.sftpv or fname not in ("dsa", "ed25519"):
# dsa dropped in 4.0
# ed25519 not supported yet
self.log("cannot generate %s hostkey: %r" % (aname, ex2), 3)
continue
self.log("generating hostkey [%s] due to %r" % (fpath, ex))
pkey = genfun(**opts)
pkey.write_private_key_file(fpath)
pkey = getattr(paramiko, aname).from_private_key_file(fpath)
self.hostkeys.append(pkey)
if args.sftpv:
self.log("loaded hostkey %r" % (pkey,))
ips = list(ODict.fromkeys(ips)) # dedup
for ip in ips:
self._bind(ip)
self.log("listening @ %s port %s" % (self.bound, args.sftp))
def log(self, msg: str, c: Union[int, str] = 0) -> None:
self.hub.log("sftp", msg, c)
def _bind(self, ip: str) -> None:
port = self.args.sftp
try:
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
srv = socket.socket(ipv, socket.SOCK_STREAM)
if not ANYWIN or self.args.reuseaddr:
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
srv.settimeout(0) # == srv.setblocking(False)
try:
srv.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False)
except:
pass # will create another ipv4 socket instead
if getattr(self.args, "freebind", False):
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
srv.bind((ip, port))
srv.listen(10)
self.srv.append(srv)
self.bound.append(ip)
except Exception as ex:
if ip == "0.0.0.0" and "::" in self.bound:
try:
srv.close() # type: ignore
except:
pass
return # dualstack
self.log("could not listen on (%s,%s): %r" % (ip, port, ex), 3)
def _accept(self, srv: socket.socket) -> None:
cli, addr = srv.accept()
# cli.settimeout(0) # == srv.setblocking(False)
self.log("%r is connecting" % (addr,))
zs = "sftp-%s" % (addr[0],)
# Daemon(self._accept2, zs, (cli, addr))
self._accept2(cli, addr)
def _accept2(self, cli, addr) -> None:
tra = paramiko.Transport(cli)
for hkey in self.hostkeys:
tra.add_server_key(hkey)
tra.set_subsystem_handler("sftp", paramiko.SFTPServer, SFTP_Srv)
psrv = SSH_Srv(self.hub, addr)
try:
tra.start_server(server=psrv)
except Exception as ex:
self.log("%r could not establish connection: %r" % (addr, ex), 3)
cli.close()
return
chan = tra.accept()
if chan is None:
self.log("%r did not open an sftp channel" % (addr,), 3)
cli.close()
return
self.sessions[addr] = (chan, tra, psrv)
# tra.join()
# self.log("%r disconnected" % (addr,))
def run(self):
lgr = logging.getLogger("paramiko.transport")
lgr.setLevel(logging.DEBUG if self.args.sftpvv else logging.INFO)
if self.args.no_poll:
fun = self._run_select
else:
fun = self._run_poll
Daemon(fun, "sftpd")
def _run_select(self):
while not self.hub.stopping:
rx, _, _ = select.select(self.srv, [], [], 180)
for sck in rx:
self._accept(sck)
def _run_poll(self):
fd2sck = {}
poll = select.poll()
for sck in self.srv:
fd = sck.fileno()
fd2sck[fd] = sck
poll.register(fd, select.POLLIN)
while not self.hub.stopping:
pr = poll.poll(180 * 1000)
rx = [fd2sck[x[0]] for x in pr if x[1] & select.POLLIN]
for sck in rx:
self._accept(sck)
| {
"repo_id": "9001/copyparty",
"file_path": "copyparty/sftpd.py",
"license": "MIT License",
"lines": 767,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
9001/copyparty:bin/mtag/geotag.py | import json
import re
import sys
from copyparty.util import fsenc, runcmd
"""
uses exiftool to geotag images based on embedded gps coordinates in exif data
adds four new metadata keys:
.gps_lat = latitute
.gps_lon = longitude
.masl = meters above sea level
city = "city, subregion, region"
usage: -mtp .masl,.gps_lat,.gps_lon,city=ad,t10,bin/mtag/geotag.py
example: https://a.ocv.me/pub/blog/j7/8/?grid=0
"""
def main():
cmd = b"exiftool -api geolocation -n".split(b" ")
rc, so, se = runcmd(cmd + [fsenc(sys.argv[1])])
ptn = re.compile("([^:]*[^ :]) *: (.*)")
city = ["", "", ""]
ret = {}
for ln in so.split("\n"):
m = ptn.match(ln)
if not m:
continue
k, v = m.groups()
if k == "Geolocation City":
city[2] = v
elif k == "Geolocation Subregion":
city[1] = v
elif k == "Geolocation Region":
city[0] = v
elif k == "GPS Latitude":
ret[".gps_lat"] = "%.04f" % (float(v),)
elif k == "GPS Longitude":
ret[".gps_lon"] = "%.04f" % (float(v),)
elif k == "GPS Altitude":
ret[".masl"] = str(int(float(v)))
v = ", ".join(city).strip(", ")
if v:
ret["city"] = v
print(json.dumps(ret))
if __name__ == "__main__":
main()
| {
"repo_id": "9001/copyparty",
"file_path": "bin/mtag/geotag.py",
"license": "MIT License",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
9001/copyparty:bin/hooks/wget-i.py | #!/usr/bin/env python3
import os
import threading
import subprocess as sp
_ = r"""
use copyparty as a file downloader by POSTing URLs as
application/x-www-form-urlencoded (for example using the
📟 message-to-server-log in the web-ui)
this hook is a modified copy of wget.py, modified to
make it import-safe so it can be run with the 'I' flag,
which speeds up the startup time of the hook by 140x
example usage as global config:
--xm aw,I,bin/hooks/wget-i.py
parameters explained,
xm = execute on message-to-server-log
aw = only users with write-access can use this
I = import; do not fork / subprocess
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xm=aw,I,bin/hooks/wget.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all messages with the params explained above)
example usage as a volflag in a copyparty config file:
[/inc]
srv/inc
accs:
r: *
rw: ed
flags:
xm: aw,I,bin/hooks/wget.py
the volflag examples only kicks in if you send the message
while you're in the /inc folder (or any folder below there)
IMPORTANT NOTE:
because this hook uses the 'I' flag to run inside copyparty,
many other flags will not work (f,j,c3,t3600 as seen in the
original wget.py), and furthermore + more importantly we
need to be EXCEPTIONALLY CAREFUL to avoid side-effects, so
the os.chdir has been replaced with cwd=dirpath for example
"""
def do_stuff(inf):
"""
worker function which is executed in another thread to
avoid blocking copyparty while the download is running,
since we cannot use the 'f,t3600' hook-flags with 'I'
"""
# first things first; grab the logger-function which copyparty is letting us borrow
log = inf["log"]
url = inf["txt"]
if url.startswith("upload-queue-empty;"):
return
if "://" not in url:
url = "https://" + url
proto = url.split("://")[0].lower()
if proto not in ("http", "https", "ftp", "ftps"):
raise Exception("bad proto {}".format(proto))
dirpath = inf["ap"]
name = url.split("?")[0].split("/")[-1]
msg = "-- DOWNLOADING " + name
log(msg)
tfn = os.path.join(dirpath, msg)
open(tfn, "wb").close()
cmd = ["wget", "--trust-server-names", "-nv", "--", url]
try:
# two things to note here:
# - cannot use the `c3` hook-flag with `I` so mute output with stdout=sp.DEVNULL instead;
# - MUST NOT use os.chdir with 'I' so use cwd=dirpath instead
sp.check_call(cmd, cwd=dirpath, stdout=sp.DEVNULL)
except:
t = "-- FAILED TO DOWNLOAD " + name
log(t, 3) # 3=yellow=warning
open(os.path.join(dirpath, t), "wb").close()
raise # have copyparty scream about the details in the log
os.unlink(tfn)
def main(inf):
threading.Thread(target=do_stuff, args=(inf,), daemon=True).start()
| {
"repo_id": "9001/copyparty",
"file_path": "bin/hooks/wget-i.py",
"license": "MIT License",
"lines": 76,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
9001/copyparty:bin/hooks/reject-ramdisk.py | #!/usr/bin/env python3
import os
import threading
from argparse import Namespace
from jinja2.nodes import Name
from copyparty.fsutil import Fstab
from typing import Any, Optional
_ = r"""
reject an upload if the target folder is on a ramdisk; useful when you
have a volume where some folders inside are ramdisks but others aren't
example usage as global config:
--xbu I,bin/hooks/reject-ramdisk.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xbu=I,bin/hooks/reject-ramdisk.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all uploads with the params listed below)
example usage as a volflag in a copyparty config file:
[/inc]
srv/inc
accs:
r: *
rw: ed
flags:
xbu: I,bin/hooks/reject-ramdisk.py
parameters explained,
I = import; do not fork / subprocess
IMPORTANT NOTE:
because this hook is imported inside copyparty, you need to
be EXCEPTIONALLY CAREFUL to avoid side-effects, for example
DO NOT os.chdir() or anything like that, and also make sure
that the name of this file is unique (cannot be the same as
an existing python module/library)
"""
mutex = threading.Lock()
fstab: Optional[Fstab] = None
def main(ka: dict[str, Any]) -> dict[str, Any]:
global fstab
with mutex:
log = ka["log"] # this is a copyparty NamedLogger function
if not fstab:
log("<HOOK:RAMDISK> creating fstab", 6)
args = Namespace()
args.mtab_age = 1 # cache the filesystem info for 1 sec
fstab = Fstab(log, args, False)
ap = ka["ap"] # abspath the upload is going to
fs, mp = fstab.get(ap) # figure out what the filesystem is
ramdisk = fs in ("tmpfs", "overlay") # looks like a ramdisk?
# log("<HOOK:RAMDISK> fs=%r" % (fs,))
if ramdisk:
t = "Upload REJECTED because destination is a ramdisk"
return {"rc": 1, "rejectmsg": t}
return {"rc": 0}
| {
"repo_id": "9001/copyparty",
"file_path": "bin/hooks/reject-ramdisk.py",
"license": "MIT License",
"lines": 54,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
9001/copyparty:copyparty/qrkode.py | # coding: utf-8
from __future__ import print_function, unicode_literals
import os
try:
if os.environ.get("PRTY_SYS_ALL") or os.environ.get("PRTY_SYS_QRCG"):
raise ImportError()
from .stolen.qrcodegen import QrCode
qrgen = QrCode.encode_binary
VENDORED = True
except ImportError:
VENDORED = False
from qrcodegen import QrCode
if os.environ.get("PRTY_MODSPEC"):
from inspect import getsourcefile
print("PRTY_MODSPEC: qrcode:", getsourcefile(QrCode))
if True: # pylint: disable=using-constant-test
import typing
from typing import Any, Optional, Sequence, Union
if not VENDORED:
def _qrgen(data: Union[bytes, Sequence[int]]) -> "QrCode":
ret = None
V = QrCode.Ecc
for e in [V.HIGH, V.QUARTILE, V.MEDIUM, V.LOW]:
qr = QrCode.encode_binary(data, e)
qr.size = qr._size
qr.modules = qr._modules
if not ret or ret.size > qr.size:
ret = qr
return ret
qrgen = _qrgen
def qr2txt(qr: QrCode, zoom: int = 1, pad: int = 4) -> str:
tab = qr.modules
sz = qr.size
if sz % 2 and zoom == 1:
tab.append([False] * sz)
tab = [[False] * sz] * pad + tab + [[False] * sz] * pad
tab = [[False] * pad + x + [False] * pad for x in tab]
rows: list[str] = []
if zoom == 1:
for y in range(0, len(tab), 2):
row = ""
for x in range(len(tab[y])):
v = 2 if tab[y][x] else 0
v += 1 if tab[y + 1][x] else 0
row += " ▄▀█"[v]
rows.append(row)
else:
for tr in tab:
row = ""
for zb in tr:
row += " █"[int(zb)] * 2
rows.append(row)
return "\n".join(rows)
def qr2png(
qr: QrCode,
zoom: int,
pad: int,
bg: Optional[tuple[int, int, int]],
fg: Optional[tuple[int, int, int]],
ap: str,
) -> None:
from PIL import Image
tab = qr.modules
sz = qr.size
psz = sz + pad * 2
if bg:
img = Image.new("RGB", (psz, psz), bg)
else:
img = Image.new("RGBA", (psz, psz), (0, 0, 0, 0))
fg = (fg[0], fg[1], fg[2], 255)
for y in range(sz):
for x in range(sz):
if tab[y][x]:
img.putpixel((x + pad, y + pad), fg)
if zoom != 1:
img = img.resize((sz * zoom, sz * zoom), Image.Resampling.NEAREST)
img.save(ap)
def qr2svg(qr: QrCode, border: int) -> str:
parts: list[str] = []
for y in range(qr.size):
sy = border + y
for x in range(qr.size):
if qr.modules[y][x]:
parts.append("M%d,%dh1v1h-1z" % (border + x, sy))
t = """\
<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 {0} {0}" stroke="none">
<rect width="100%" height="100%" fill="#F7F7F7"/>
<path d="{1}" fill="#111111"/>
</svg>
"""
return t.format(qr.size + border * 2, " ".join(parts))
| {
"repo_id": "9001/copyparty",
"file_path": "copyparty/qrkode.py",
"license": "MIT License",
"lines": 92,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
9001/copyparty:bin/hooks/import-me.py | #!/usr/bin/env python3
from typing import Any
_ = r"""
the fastest hook in the west
(runs directly inside copyparty, not as a subprocess)
example usage as global config:
--xbu I,bin/hooks/import-me.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xbu=I,bin/hooks/import-me.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all uploads with the params listed below)
example usage as a volflag in a copyparty config file:
[/inc]
srv/inc
accs:
r: *
rw: ed
flags:
xbu: I,bin/hooks/import-me.py
parameters explained,
I = import; do not fork / subprocess
IMPORTANT NOTE:
because this hook is running inside copyparty, you need to
be EXCEPTIONALLY CAREFUL to avoid side-effects, for example
DO NOT os.chdir() or anything like that, and also make sure
that the name of this file is unique (cannot be the same as
an existing python module/library)
"""
def main(ka: dict[str, Any]) -> dict[str, Any]:
# "ka" is a dictionary with info from copyparty...
# but because we are running inside copyparty, we don't need such courtesies;
import inspect
cf = inspect.currentframe().f_back.f_back.f_back
t = "hello from hook; I am able to peek into copyparty's memory like so:\n function name: %s\n variables:\n %s\n"
t2 = "\n ".join([("%r: %r" % (k, v))[:99] for k, v in cf.f_locals.items()][:9])
logger = ka["log"]
logger(t % (cf.f_code, t2))
# must return a dictionary with:
# "rc": the retcode; 0 is ok
return {"rc": 0}
| {
"repo_id": "9001/copyparty",
"file_path": "bin/hooks/import-me.py",
"license": "MIT License",
"lines": 42,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
9001/copyparty:bin/hooks/reject-and-explain.py | #!/usr/bin/env python3
import json
import os
import re
import sys
_ = r"""
reject file upload (with a nice explanation why)
example usage as global config:
--xbu j,c1,bin/hooks/reject-and-explain.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:r:rw,ed:c,xbu=j,c1,bin/hooks/reject-and-explain.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
(share filesystem-path srv/inc as volume /inc,
readable by everyone, read-write for user 'ed',
running this plugin on all uploads with the params listed below)
example usage as a volflag in a copyparty config file:
[/inc]
srv/inc
accs:
r: *
rw: ed
flags:
xbu: j,c1,bin/hooks/reject-and-explain.py
parameters explained,
xbu = execute-before-upload (can also be xau, execute-after-upload)
j = this hook needs upload information as json (not just the filename)
c1 = this hook returns json on stdout, so tell copyparty to read that
"""
def main():
inf = json.loads(sys.argv[1])
vdir, fn = os.path.split(inf["vp"])
print("inf[vp] = %r" % (inf["vp"],), file=sys.stderr)
# the following is what decides if we'll accept the upload or reject it:
# we check if the upload-folder url matches the following regex-pattern:
ok = re.search(r"(^|/)day[0-9]+$", vdir, re.IGNORECASE)
if ok:
# allow the upload
print("{}")
return
# the upload was rejected; display the following errortext:
errmsg = "Files can only be uploaded into a folder named 'DayN' where N is a number, for example 'Day573'. This file was REJECTED: "
errmsg += inf["vp"] # if you want to mention the file's url
print(json.dumps({"rejectmsg": errmsg}))
if __name__ == "__main__":
main()
| {
"repo_id": "9001/copyparty",
"file_path": "bin/hooks/reject-and-explain.py",
"license": "MIT License",
"lines": 45,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
9001/copyparty:contrib/package/nix/partftpy/update.py | #!/usr/bin/env python3
# Update the Nix package pin
#
# Usage: ./update.sh
import base64
import json
import hashlib
import sys
from pathlib import Path
OUTPUT_FILE = Path("pin.json")
TARGET_ASSET = lambda version: f"partftpy-{version}.tar.gz"
HASH_TYPE = "sha256"
LATEST_RELEASE_URL = "https://api.github.com/repos/9001/partftpy/releases/latest"
def get_formatted_hash(binary):
hasher = hashlib.new("sha256")
hasher.update(binary)
asset_hash = hasher.digest()
encoded_hash = base64.b64encode(asset_hash).decode("ascii")
return f"{HASH_TYPE}-{encoded_hash}"
def remote_release_pin():
import requests
response = requests.get(LATEST_RELEASE_URL).json()
version = response["tag_name"].lstrip("v")
asset_info = [a for a in response["assets"] if a["name"] == TARGET_ASSET(version)][0]
download_url = asset_info["browser_download_url"]
asset = requests.get(download_url)
formatted_hash = get_formatted_hash(asset.content)
result = {"url": download_url, "version": version, "hash": formatted_hash}
return result
def main():
result = remote_release_pin()
print(result)
json_result = json.dumps(result, indent=4)
OUTPUT_FILE.write_text(json_result)
if __name__ == "__main__":
main()
| {
"repo_id": "9001/copyparty",
"file_path": "contrib/package/nix/partftpy/update.py",
"license": "MIT License",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
9001/copyparty:tests/test_shr.py | #!/usr/bin/env python3
# coding: utf-8
from __future__ import print_function, unicode_literals
import json
import os
import shutil
import sqlite3
import tempfile
import unittest
from copyparty.__init__ import ANYWIN
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
from copyparty.util import absreal
from tests import util as tu
from tests.util import Cfg
class TestShr(unittest.TestCase):
def log(self, src, msg, c=0):
m = "%s" % (msg,)
if (
"warning: filesystem-path does not exist:" in m
or "you are sharing a system directory:" in m
or "symlink-based deduplication is enabled" in m
or m.startswith("hint: argument")
):
return
print(("[%s] %s" % (src, msg)).encode("ascii", "replace").decode("ascii"))
def assertLD(self, url, auth, els, edl):
ls = self.ls(url, auth)
self.assertEqual(ls[0], len(els) == 2)
if not ls[0]:
return
a = [list(sorted(els[0])), list(sorted(els[1]))]
b = [list(sorted(ls[1])), list(sorted(ls[2]))]
self.assertEqual(a, b)
if edl is None:
edl = els[1]
can_dl = []
for fn in b[1]:
if fn == "a.db":
continue
furl = url + "/" + fn
if auth:
furl += "?pw=p1"
h, zb = self.curl(furl, True)
if h.startswith("HTTP/1.1 200 "):
can_dl.append(fn)
self.assertEqual(edl, can_dl)
def setUp(self):
self.td = tu.get_ramdisk()
td = os.path.join(self.td, "vfs")
os.mkdir(td)
os.chdir(td)
os.mkdir("d1")
os.mkdir("d2")
os.mkdir("d2/d3")
for zs in ("d1/f1", "d2/f2", "d2/d3/f3"):
with open(zs, "wb") as f:
f.write(zs.encode("utf-8"))
for dst in ("d1", "d2", "d2/d3"):
src, fn = zs.rsplit("/", 1)
os.symlink(absreal(zs), dst + "/l" + fn[-1:])
db = sqlite3.connect("a.db")
with db:
zs = r"create table sh (k text, pw text, vp text, pr text, st int, un text, t0 int, t1 int)"
db.execute(zs)
db.close()
def tearDown(self):
os.chdir(tempfile.gettempdir())
shutil.rmtree(self.td)
def cinit(self):
self.asrv = AuthSrv(self.args, self.log)
self.conn = tu.VHttpConn(self.args, self.asrv, self.log, b"", True)
def test1(self):
self.args = Cfg(
a=["u1:p1"],
v=["::A,u1", "d1:v1:A,u1", "d2/d3:d2/d3:A,u1"],
shr="/shr/",
shr1="shr/",
shr_db="a.db",
shr_v=False,
)
self.cinit()
self.assertLD("", True, [["d1", "d2", "v1"], ["a.db"]], [])
self.assertLD("d1", True, [[], ["f1", "l1", "l2", "l3"]], None)
self.assertLD("v1", True, [[], ["f1", "l1", "l2", "l3"]], None)
self.assertLD("d2", True, [["d3"], ["f2", "l1", "l2", "l3"]], None)
self.assertLD("d2/d3", True, [[], ["f3", "l1", "l2", "l3"]], None)
self.assertLD("d3", True, [], [])
jt = {
"k": "r",
"vp": ["/"],
"pw": "",
"exp": "99",
"perms": ["read"],
}
print(self.post_json("?pw=p1&share", jt)[1])
jt = {
"k": "d2",
"vp": ["/d2/"],
"pw": "",
"exp": "99",
"perms": ["read"],
}
print(self.post_json("?pw=p1&share", jt)[1])
self.conn.shutdown()
self.cinit()
self.assertLD("", True, [["d1", "d2", "v1"], ["a.db"]], [])
self.assertLD("d1", True, [[], ["f1", "l1", "l2", "l3"]], None)
self.assertLD("v1", True, [[], ["f1", "l1", "l2", "l3"]], None)
self.assertLD("d2", True, [["d3"], ["f2", "l1", "l2", "l3"]], None)
self.assertLD("d2/d3", True, [[], ["f3", "l1", "l2", "l3"]], None)
self.assertLD("d3", True, [], [])
self.assertLD("shr/d2", False, [[], ["f2", "l1", "l2", "l3"]], None)
self.assertLD("shr/d2/d3", False, [], None)
self.assertLD("shr/r", False, [["d1"], ["a.db"]], [])
self.assertLD("shr/r/d1", False, [[], ["f1", "l1", "l2", "l3"]], None)
self.assertLD("shr/r/d2", False, [], None) # unfortunate
self.assertLD("shr/r/d2/d3", False, [], None)
self.conn.shutdown()
def test2(self):
self.args = Cfg(
a=["u1:p1"],
v=["::A,u1", "d1:v1:A,u1", "d2/d3:d2/d3:A,u1"],
shr="/shr/",
shr1="shr/",
shr_db="a.db",
shr_v=False,
xvol=True,
)
self.cinit()
self.assertLD("", True, [["d1", "d2", "v1"], ["a.db"]], [])
self.assertLD("d1", True, [[], ["f1", "l1", "l2", "l3"]], None)
self.assertLD("v1", True, [[], ["f1", "l1", "l2", "l3"]], None)
self.assertLD("d2", True, [["d3"], ["f2", "l1", "l2", "l3"]], None)
self.assertLD("d2/d3", True, [[], ["f3", "l1", "l2", "l3"]], None)
self.assertLD("d3", True, [], [])
jt = {
"k": "r",
"vp": ["/"],
"pw": "",
"exp": "99",
"perms": ["read"],
}
print(self.post_json("?pw=p1&share", jt)[1])
jt = {
"k": "d2",
"vp": ["/d2/"],
"pw": "",
"exp": "99",
"perms": ["read"],
}
print(self.post_json("?pw=p1&share", jt)[1])
self.conn.shutdown()
self.cinit()
self.assertLD("", True, [["d1", "d2", "v1"], ["a.db"]], [])
self.assertLD("d1", True, [[], ["f1", "l1", "l2", "l3"]], None)
self.assertLD("v1", True, [[], ["f1", "l1", "l2", "l3"]], None)
self.assertLD("d2", True, [["d3"], ["f2", "l1", "l2", "l3"]], None)
self.assertLD("d2/d3", True, [[], ["f3", "l1", "l2", "l3"]], None)
self.assertLD("d3", True, [], [])
self.assertLD("shr/d2", False, [[], ["f2", "l1", "l2", "l3"]], ["f2", "l2"])
self.assertLD("shr/d2/d3", False, [], [])
self.assertLD("shr/r", False, [["d1"], ["a.db"]], [])
self.assertLD(
"shr/r/d1", False, [[], ["f1", "l1", "l2", "l3"]], ["f1", "l1", "l2"]
)
self.assertLD("shr/r/d2", False, [], []) # unfortunate
self.assertLD("shr/r/d2/d3", False, [], [])
self.conn.shutdown()
def ls(self, url: str, auth: bool):
zs = url + "?ls" + ("&pw=p1" if auth else "")
h, b = self.curl(zs)
if not h.startswith("HTTP/1.1 200 "):
return (False, [], [])
jo = json.loads(b)
return (
True,
[x["href"].rstrip("/") for x in jo.get("dirs") or {}],
[x["href"] for x in jo.get("files") or {}],
)
def curl(self, url: str, binary=False):
h = "GET /%s HTTP/1.1\r\nConnection: close\r\n\r\n"
HttpCli(self.conn.setbuf((h % (url,)).encode("utf-8"))).run()
if binary:
h, b = self.conn.s._reply.split(b"\r\n\r\n", 1)
return [h.decode("utf-8"), b]
return self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
def post_json(self, url: str, data):
buf = json.dumps(data).encode("utf-8")
msg = [
"POST /%s HTTP/1.1" % (url,),
"Connection: close",
"Content-Type: application/json",
"Content-Length: %d" % (len(buf),),
"\r\n",
]
buf = "\r\n".join(msg).encode("utf-8") + buf
print("PUT -->", buf)
HttpCli(self.conn.setbuf(buf)).run()
return self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
| {
"repo_id": "9001/copyparty",
"file_path": "tests/test_shr.py",
"license": "MIT License",
"lines": 200,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Aider-AI/aider:tests/basic/test_utils.py | import os
from aider.utils import safe_abs_path
def test_safe_abs_path_symlink_loop(tmp_path):
# Create circular symlink: a -> b -> a
link_a = tmp_path / "link_a"
link_b = tmp_path / "link_b"
link_a.symlink_to(link_b)
link_b.symlink_to(link_a)
# safe_abs_path must not raise, and must return an absolute path
result = safe_abs_path(str(link_a))
assert os.path.isabs(result)
| {
"repo_id": "Aider-AI/aider",
"file_path": "tests/basic/test_utils.py",
"license": "Apache License 2.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Aider-AI/aider:aider/openrouter.py | """
OpenRouter model metadata caching and lookup.
This module keeps a local cached copy of the OpenRouter model list
(downloaded from ``https://openrouter.ai/api/v1/models``) and exposes a
helper class that returns metadata for a given model in a format compatible
with litellm’s ``get_model_info``.
"""
from __future__ import annotations
import json
import time
from pathlib import Path
from typing import Dict
import requests
def _cost_per_token(val: str | None) -> float | None:
"""Convert a price string (USD per token) to a float."""
if val in (None, "", "0"):
return 0.0 if val == "0" else None
try:
return float(val)
except Exception: # noqa: BLE001
return None
class OpenRouterModelManager:
MODELS_URL = "https://openrouter.ai/api/v1/models"
CACHE_TTL = 60 * 60 * 24 # 24 h
def __init__(self) -> None:
self.cache_dir = Path.home() / ".aider" / "caches"
self.cache_file = self.cache_dir / "openrouter_models.json"
self.content: Dict | None = None
self.verify_ssl: bool = True
self._cache_loaded = False
# ------------------------------------------------------------------ #
# Public API #
# ------------------------------------------------------------------ #
def set_verify_ssl(self, verify_ssl: bool) -> None:
"""Enable/disable SSL verification for API requests."""
self.verify_ssl = verify_ssl
def get_model_info(self, model: str) -> Dict:
"""
Return metadata for *model* or an empty ``dict`` when unknown.
``model`` should use the aider naming convention, e.g.
``openrouter/nousresearch/deephermes-3-mistral-24b-preview:free``.
"""
self._ensure_content()
if not self.content or "data" not in self.content:
return {}
route = self._strip_prefix(model)
# Consider both the exact id and id without any “:suffix”.
candidates = {route}
if ":" in route:
candidates.add(route.split(":", 1)[0])
record = next((item for item in self.content["data"] if item.get("id") in candidates), None)
if not record:
return {}
context_len = (
record.get("top_provider", {}).get("context_length")
or record.get("context_length")
or None
)
pricing = record.get("pricing", {})
return {
"max_input_tokens": context_len,
"max_tokens": context_len,
"max_output_tokens": context_len,
"input_cost_per_token": _cost_per_token(pricing.get("prompt")),
"output_cost_per_token": _cost_per_token(pricing.get("completion")),
"litellm_provider": "openrouter",
}
# ------------------------------------------------------------------ #
# Internal helpers #
# ------------------------------------------------------------------ #
def _strip_prefix(self, model: str) -> str:
return model[len("openrouter/") :] if model.startswith("openrouter/") else model
def _ensure_content(self) -> None:
self._load_cache()
if not self.content:
self._update_cache()
def _load_cache(self) -> None:
if self._cache_loaded:
return
try:
self.cache_dir.mkdir(parents=True, exist_ok=True)
if self.cache_file.exists():
cache_age = time.time() - self.cache_file.stat().st_mtime
if cache_age < self.CACHE_TTL:
try:
self.content = json.loads(self.cache_file.read_text())
except json.JSONDecodeError:
self.content = None
except OSError:
# Cache directory might be unwritable; ignore.
pass
self._cache_loaded = True
def _update_cache(self) -> None:
try:
response = requests.get(self.MODELS_URL, timeout=10, verify=self.verify_ssl)
if response.status_code == 200:
self.content = response.json()
try:
self.cache_file.write_text(json.dumps(self.content, indent=2))
except OSError:
pass # Non-fatal if we can’t write the cache
except Exception as ex: # noqa: BLE001
print(f"Failed to fetch OpenRouter model list: {ex}")
try:
self.cache_file.write_text("{}")
except OSError:
pass
| {
"repo_id": "Aider-AI/aider",
"file_path": "aider/openrouter.py",
"license": "Apache License 2.0",
"lines": 107,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
Aider-AI/aider:tests/basic/test_openrouter.py | from pathlib import Path
from aider.models import ModelInfoManager
from aider.openrouter import OpenRouterModelManager
class DummyResponse:
"""Minimal stand-in for requests.Response used in tests."""
def __init__(self, json_data):
self.status_code = 200
self._json_data = json_data
def json(self):
return self._json_data
def test_openrouter_get_model_info_from_cache(monkeypatch, tmp_path):
"""
OpenRouterModelManager should return correct metadata taken from the
downloaded (and locally cached) models JSON payload.
"""
payload = {
"data": [
{
"id": "mistralai/mistral-medium-3",
"context_length": 32768,
"pricing": {"prompt": "100", "completion": "200"},
"top_provider": {"context_length": 32768},
}
]
}
# Fake out the network call and the HOME directory used for the cache file
monkeypatch.setattr("requests.get", lambda *a, **k: DummyResponse(payload))
monkeypatch.setattr(Path, "home", staticmethod(lambda: tmp_path))
manager = OpenRouterModelManager()
info = manager.get_model_info("openrouter/mistralai/mistral-medium-3")
assert info["max_input_tokens"] == 32768
assert info["input_cost_per_token"] == 100.0
assert info["output_cost_per_token"] == 200.0
assert info["litellm_provider"] == "openrouter"
def test_model_info_manager_uses_openrouter_manager(monkeypatch):
"""
ModelInfoManager should delegate to OpenRouterModelManager when litellm
provides no data for an OpenRouter-prefixed model.
"""
# Ensure litellm path returns no info so that fallback logic triggers
monkeypatch.setattr("aider.models.litellm.get_model_info", lambda *a, **k: {})
stub_info = {
"max_input_tokens": 512,
"max_tokens": 512,
"max_output_tokens": 512,
"input_cost_per_token": 100.0,
"output_cost_per_token": 200.0,
"litellm_provider": "openrouter",
}
# Force OpenRouterModelManager to return our stub info
monkeypatch.setattr(
"aider.models.OpenRouterModelManager.get_model_info",
lambda self, model: stub_info,
)
mim = ModelInfoManager()
info = mim.get_model_info("openrouter/fake/model")
assert info == stub_info
| {
"repo_id": "Aider-AI/aider",
"file_path": "tests/basic/test_openrouter.py",
"license": "Apache License 2.0",
"lines": 57,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Aider-AI/aider:aider/coders/shell.py | shell_cmd_prompt = """
4. *Concisely* suggest any shell commands the user might want to run in ```bash blocks.
Just suggest shell commands this way, not example code.
Only suggest complete shell commands that are ready to execute, without placeholders.
Only suggest at most a few shell commands at a time, not more than 1-3, one per line.
Do not suggest multi-line shell commands.
All shell commands will run from the root directory of the user's project.
Use the appropriate shell based on the user's system info:
{platform}
Examples of when to suggest shell commands:
- If you changed a self-contained html file, suggest an OS-appropriate command to open a browser to view it to see the updated content.
- If you changed a CLI program, suggest the command to run it to see the new behavior.
- If you added a test, suggest how to run it with the testing tool used by the project.
- Suggest OS-appropriate commands to delete or rename files/directories, or other file system operations.
- If your code changes add new dependencies, suggest the command to install them.
- Etc.
""" # noqa
no_shell_cmd_prompt = """
Keep in mind these details about the user's platform and environment:
{platform}
""" # noqa
shell_cmd_reminder = """
Examples of when to suggest shell commands:
- If you changed a self-contained html file, suggest an OS-appropriate command to open a browser to view it to see the updated content.
- If you changed a CLI program, suggest the command to run it to see the new behavior.
- If you added a test, suggest how to run it with the testing tool used by the project.
- Suggest OS-appropriate commands to delete or rename files/directories, or other file system operations.
- If your code changes add new dependencies, suggest the command to install them.
- Etc.
""" # noqa
| {
"repo_id": "Aider-AI/aider",
"file_path": "aider/coders/shell.py",
"license": "Apache License 2.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | documentation |
ArchiveBox/ArchiveBox:archivebox/api/middleware.py | __package__ = 'archivebox.api'
from django.http import HttpResponse
class ApiCorsMiddleware:
"""Attach permissive CORS headers for API routes (token-based auth)."""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
if request.path.startswith('/api/'):
if request.method == 'OPTIONS' and request.META.get('HTTP_ACCESS_CONTROL_REQUEST_METHOD'):
response = HttpResponse(status=204)
return self._add_cors_headers(request, response)
response = self.get_response(request)
return self._add_cors_headers(request, response)
return self.get_response(request)
def _add_cors_headers(self, request, response):
origin = request.META.get('HTTP_ORIGIN')
if not origin:
return response
response['Access-Control-Allow-Origin'] = '*'
response['Access-Control-Allow-Methods'] = 'GET, POST, PUT, PATCH, DELETE, OPTIONS'
response['Access-Control-Allow-Headers'] = (
'Authorization, X-ArchiveBox-API-Key, Content-Type, X-CSRFToken'
)
response['Access-Control-Max-Age'] = '600'
return response
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/api/middleware.py",
"license": "MIT License",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/core/host_utils.py | from __future__ import annotations
from __future__ import annotations
import re
from urllib.parse import urlparse
from archivebox.config.common import SERVER_CONFIG
_SNAPSHOT_ID_RE = re.compile(r"^[0-9a-fA-F-]{8,36}$")
def split_host_port(host: str) -> tuple[str, str | None]:
parsed = urlparse(f"//{host}")
hostname = (parsed.hostname or host or "").lower()
port = str(parsed.port) if parsed.port else None
return hostname, port
def _normalize_base_url(value: str | None) -> str:
if not value:
return ""
base = value.strip()
if not base:
return ""
if "://" not in base:
base = f"http://{base}"
parsed = urlparse(base)
if not parsed.netloc:
return ""
return f"{parsed.scheme}://{parsed.netloc}"
def normalize_base_url(value: str | None) -> str:
return _normalize_base_url(value)
def get_listen_host() -> str:
return (SERVER_CONFIG.LISTEN_HOST or "").strip()
def get_listen_parts() -> tuple[str, str | None]:
return split_host_port(get_listen_host())
def _build_listen_host(subdomain: str | None) -> str:
host, port = get_listen_parts()
if not host:
return ""
full_host = f"{subdomain}.{host}" if subdomain else host
if port:
return f"{full_host}:{port}"
return full_host
def get_admin_host() -> str:
override = _normalize_base_url(SERVER_CONFIG.ADMIN_BASE_URL)
if override:
return urlparse(override).netloc.lower()
return _build_listen_host("admin")
def get_web_host() -> str:
override = _normalize_base_url(SERVER_CONFIG.ARCHIVE_BASE_URL)
if override:
return urlparse(override).netloc.lower()
return _build_listen_host("web")
def get_api_host() -> str:
return _build_listen_host("api")
def get_public_host() -> str:
return _build_listen_host("public")
def get_snapshot_host(snapshot_id: str) -> str:
return _build_listen_host(snapshot_id)
def get_original_host(domain: str) -> str:
return _build_listen_host(domain)
def is_snapshot_subdomain(subdomain: str) -> bool:
return bool(_SNAPSHOT_ID_RE.match(subdomain or ""))
def get_listen_subdomain(request_host: str) -> str:
req_host, req_port = split_host_port(request_host)
listen_host, listen_port = get_listen_parts()
if not listen_host:
return ""
if listen_port and req_port and listen_port != req_port:
return ""
if req_host == listen_host:
return ""
suffix = f".{listen_host}"
if req_host.endswith(suffix):
return req_host[: -len(suffix)]
return ""
def host_matches(request_host: str, target_host: str) -> bool:
if not request_host or not target_host:
return False
req_host, req_port = split_host_port(request_host)
target_host_only, target_port = split_host_port(target_host)
if req_host != target_host_only:
return False
if target_port and req_port and target_port != req_port:
return False
return True
def _scheme_from_request(request=None) -> str:
if request:
return request.scheme
return "http"
def _build_base_url_for_host(host: str, request=None) -> str:
if not host:
return ""
scheme = _scheme_from_request(request)
return f"{scheme}://{host}"
def get_admin_base_url(request=None) -> str:
override = _normalize_base_url(SERVER_CONFIG.ADMIN_BASE_URL)
if override:
return override
return _build_base_url_for_host(get_admin_host(), request=request)
def get_web_base_url(request=None) -> str:
override = _normalize_base_url(SERVER_CONFIG.ARCHIVE_BASE_URL)
if override:
return override
return _build_base_url_for_host(get_web_host(), request=request)
def get_api_base_url(request=None) -> str:
return _build_base_url_for_host(get_api_host(), request=request)
# Backwards-compat aliases (archive == web)
def get_archive_base_url(request=None) -> str:
return get_web_base_url(request=request)
def get_snapshot_base_url(snapshot_id: str, request=None) -> str:
return _build_base_url_for_host(get_snapshot_host(snapshot_id), request=request)
def get_original_base_url(domain: str, request=None) -> str:
return _build_base_url_for_host(get_original_host(domain), request=request)
def build_admin_url(path: str = "", request=None) -> str:
return _build_url(get_admin_base_url(request), path)
def build_web_url(path: str = "", request=None) -> str:
return _build_url(get_web_base_url(request), path)
def build_api_url(path: str = "", request=None) -> str:
return _build_url(get_api_base_url(request), path)
def build_archive_url(path: str = "", request=None) -> str:
return _build_url(get_archive_base_url(request), path)
def build_snapshot_url(snapshot_id: str, path: str = "", request=None) -> str:
return _build_url(get_snapshot_base_url(snapshot_id, request=request), path)
def build_original_url(domain: str, path: str = "", request=None) -> str:
return _build_url(get_original_base_url(domain, request=request), path)
def _build_url(base_url: str, path: str) -> str:
if not base_url:
if not path:
return ""
return path if path.startswith("/") else f"/{path}"
if not path:
return base_url
return f"{base_url}{path if path.startswith('/') else f'/{path}'}"
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/core/host_utils.py",
"license": "MIT License",
"lines": 128,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ArchiveBox/ArchiveBox:archivebox/plugins/dns/tests/test_dns.py | """
Tests for the DNS plugin.
Tests the real DNS hook with an actual URL to verify
DNS resolution capture.
"""
import json
import shutil
import subprocess
import sys
import tempfile
import time
from pathlib import Path
from django.test import TestCase
# Import chrome test helpers
sys.path.insert(0, str(Path(__file__).parent.parent.parent / 'chrome' / 'tests'))
from chrome_test_helpers import (
chrome_session,
CHROME_NAVIGATE_HOOK,
get_plugin_dir,
get_hook_script,
)
# Get the path to the DNS hook
PLUGIN_DIR = get_plugin_dir(__file__)
DNS_HOOK = get_hook_script(PLUGIN_DIR, 'on_Snapshot__*_dns.*')
class TestDNSPlugin(TestCase):
"""Test the DNS plugin."""
def test_dns_hook_exists(self):
"""DNS hook script should exist."""
self.assertIsNotNone(DNS_HOOK, "DNS hook not found in plugin directory")
self.assertTrue(DNS_HOOK.exists(), f"Hook not found: {DNS_HOOK}")
class TestDNSWithChrome(TestCase):
"""Integration tests for DNS plugin with Chrome."""
def setUp(self):
"""Set up test environment."""
self.temp_dir = Path(tempfile.mkdtemp())
def tearDown(self):
"""Clean up."""
shutil.rmtree(self.temp_dir, ignore_errors=True)
def test_dns_records_captured(self):
"""DNS hook should capture DNS records from a real URL."""
test_url = 'https://example.com'
snapshot_id = 'test-dns-snapshot'
with chrome_session(
self.temp_dir,
crawl_id='test-dns-crawl',
snapshot_id=snapshot_id,
test_url=test_url,
navigate=False,
timeout=30,
) as (_process, _pid, snapshot_chrome_dir, env):
dns_dir = snapshot_chrome_dir.parent / 'dns'
dns_dir.mkdir(exist_ok=True)
result = subprocess.Popen(
['node', str(DNS_HOOK), f'--url={test_url}', f'--snapshot-id={snapshot_id}'],
cwd=str(dns_dir),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
env=env
)
nav_result = subprocess.run(
['node', str(CHROME_NAVIGATE_HOOK), f'--url={test_url}', f'--snapshot-id={snapshot_id}'],
cwd=str(snapshot_chrome_dir),
capture_output=True,
text=True,
timeout=120,
env=env
)
self.assertEqual(nav_result.returncode, 0, f"Navigation failed: {nav_result.stderr}")
dns_output = dns_dir / 'dns.jsonl'
for _ in range(30):
if dns_output.exists() and dns_output.stat().st_size > 0:
break
time.sleep(1)
if result.poll() is None:
result.terminate()
try:
stdout, stderr = result.communicate(timeout=5)
except subprocess.TimeoutExpired:
result.kill()
stdout, stderr = result.communicate()
else:
stdout, stderr = result.communicate()
self.assertNotIn('Traceback', stderr)
self.assertTrue(dns_output.exists(), "dns.jsonl not created")
content = dns_output.read_text().strip()
self.assertTrue(content, "DNS output should not be empty")
records = []
for line in content.split('\n'):
line = line.strip()
if not line:
continue
try:
records.append(json.loads(line))
except json.JSONDecodeError:
pass
self.assertTrue(records, "No DNS records parsed")
has_ip_record = any(r.get('hostname') and r.get('ip') for r in records)
self.assertTrue(has_ip_record, f"No DNS record with hostname + ip: {records}")
if __name__ == '__main__':
pytest.main([__file__, '-v'])
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/dns/tests/test_dns.py",
"license": "MIT License",
"lines": 101,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ArchiveBox/ArchiveBox:archivebox/tests/test_savepagenow.py | """Integration tests for /web/https://... shortcut (Save Page Now)."""
import os
import subprocess
import sys
import textwrap
from pathlib import Path
from archivebox.tests.conftest import create_test_url
def _run_savepagenow_script(initialized_archive: Path, request_url: str, expected_url: str, *, login: bool, public_add_view: bool):
project_root = Path(__file__).resolve().parents[2]
script = textwrap.dedent(
f"""
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'archivebox.core.settings')
from archivebox.config.django import setup_django
setup_django()
from django.test import Client
from django.contrib.auth import get_user_model
from archivebox.core.models import Snapshot
client = Client()
if {login!r}:
user = get_user_model().objects.create_user(username='tester', password='pw')
client.force_login(user)
target_url = {request_url!r}
resp = client.get('/web/' + target_url, HTTP_HOST='web.archivebox.localhost:8000')
assert resp.status_code == 302, resp.status_code
snapshot = Snapshot.objects.filter(url={expected_url!r}).order_by('-created_at').first()
if snapshot is None:
raise AssertionError(
"snapshot not created; status=%s location=%s count=%s"
% (
resp.status_code,
resp.get('Location'),
Snapshot.objects.count(),
)
)
assert resp['Location'] == f"/{{snapshot.url_path}}"
resp2 = client.get('/web/' + target_url, HTTP_HOST='web.archivebox.localhost:8000')
assert resp2.status_code == 302, resp2.status_code
assert Snapshot.objects.filter(url={expected_url!r}).count() == 1
assert resp2['Location'] == f"/{{snapshot.url_path}}"
"""
)
env = {
**os.environ,
'DATA_DIR': str(initialized_archive),
'USE_COLOR': 'False',
'SHOW_PROGRESS': 'False',
'PUBLIC_ADD_VIEW': 'True' if public_add_view else 'False',
'SAVE_ARCHIVEDOTORG': 'False',
'SAVE_TITLE': 'False',
'SAVE_FAVICON': 'False',
'SAVE_WGET': 'False',
'SAVE_WARC': 'False',
'SAVE_PDF': 'False',
'SAVE_SCREENSHOT': 'False',
'SAVE_DOM': 'False',
'SAVE_SINGLEFILE': 'False',
'SAVE_READABILITY': 'False',
'SAVE_MERCURY': 'False',
'SAVE_GIT': 'False',
'SAVE_YTDLP': 'False',
'SAVE_HEADERS': 'False',
'SAVE_HTMLTOTEXT': 'False',
}
return subprocess.run(
[sys.executable, '-c', script],
cwd=project_root,
env=env,
text=True,
capture_output=True,
timeout=60,
)
def _run_savepagenow_not_found_script(initialized_archive: Path, request_url: str):
project_root = Path(__file__).resolve().parents[2]
script = textwrap.dedent(
f"""
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'archivebox.core.settings')
from archivebox.config.django import setup_django
setup_django()
from django.test import Client
from archivebox.core.models import Snapshot
client = Client()
target_url = {request_url!r}
resp = client.get('/web/' + target_url, HTTP_HOST='web.archivebox.localhost:8000')
assert resp.status_code == 404, resp.status_code
assert Snapshot.objects.count() == 0
"""
)
env = {
**os.environ,
'DATA_DIR': str(initialized_archive),
'USE_COLOR': 'False',
'SHOW_PROGRESS': 'False',
'PUBLIC_ADD_VIEW': 'False',
'SAVE_ARCHIVEDOTORG': 'False',
'SAVE_TITLE': 'False',
'SAVE_FAVICON': 'False',
'SAVE_WGET': 'False',
'SAVE_WARC': 'False',
'SAVE_PDF': 'False',
'SAVE_SCREENSHOT': 'False',
'SAVE_DOM': 'False',
'SAVE_SINGLEFILE': 'False',
'SAVE_READABILITY': 'False',
'SAVE_MERCURY': 'False',
'SAVE_GIT': 'False',
'SAVE_YTDLP': 'False',
'SAVE_HEADERS': 'False',
'SAVE_HTMLTOTEXT': 'False',
}
return subprocess.run(
[sys.executable, '-c', script],
cwd=project_root,
env=env,
text=True,
capture_output=True,
timeout=60,
)
def _run_savepagenow_existing_snapshot_script(initialized_archive: Path, request_url: str, stored_url: str):
project_root = Path(__file__).resolve().parents[2]
script = textwrap.dedent(
f"""
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'archivebox.core.settings')
from archivebox.config.django import setup_django
setup_django()
from django.test import Client
from archivebox.core.models import Snapshot
from archivebox.crawls.models import Crawl
from archivebox.base_models.models import get_or_create_system_user_pk
target_url = {request_url!r}
stored_url = {stored_url!r}
created_by_id = get_or_create_system_user_pk()
crawl = Crawl.objects.create(urls=stored_url, created_by_id=created_by_id)
snapshot = Snapshot.objects.create(url=stored_url, crawl=crawl)
client = Client()
resp = client.get('/web/' + target_url, HTTP_HOST='web.archivebox.localhost:8000')
assert resp.status_code == 302, resp.status_code
assert resp['Location'] == f"/{{snapshot.url_path}}"
"""
)
env = {
**os.environ,
'DATA_DIR': str(initialized_archive),
'USE_COLOR': 'False',
'SHOW_PROGRESS': 'False',
'PUBLIC_ADD_VIEW': 'False',
'SAVE_ARCHIVEDOTORG': 'False',
'SAVE_TITLE': 'False',
'SAVE_FAVICON': 'False',
'SAVE_WGET': 'False',
'SAVE_WARC': 'False',
'SAVE_PDF': 'False',
'SAVE_SCREENSHOT': 'False',
'SAVE_DOM': 'False',
'SAVE_SINGLEFILE': 'False',
'SAVE_READABILITY': 'False',
'SAVE_MERCURY': 'False',
'SAVE_GIT': 'False',
'SAVE_YTDLP': 'False',
'SAVE_HEADERS': 'False',
'SAVE_HTMLTOTEXT': 'False',
}
return subprocess.run(
[sys.executable, '-c', script],
cwd=project_root,
env=env,
text=True,
capture_output=True,
timeout=60,
)
def test_web_add_creates_and_reuses_snapshot_logged_in(initialized_archive):
"""/web/https://... should work for authenticated users even when public add is off."""
url = create_test_url(domain='example.com', path='savepagenow-auth')
request_url = url.replace('https://', '')
result = _run_savepagenow_script(initialized_archive, request_url, url, login=True, public_add_view=False)
assert result.returncode == 0, (
"SavePageNow shortcut (logged-in) test failed.\n"
f"stdout:\n{result.stdout}\n"
f"stderr:\n{result.stderr}"
)
def test_web_add_creates_and_reuses_snapshot_public(initialized_archive):
"""/web/https://... should work when PUBLIC_ADD_VIEW is enabled without login."""
url = create_test_url(domain='example.com', path='savepagenow-public')
request_url = url.replace('https://', '')
result = _run_savepagenow_script(initialized_archive, request_url, url, login=False, public_add_view=True)
assert result.returncode == 0, (
"SavePageNow shortcut (public add) test failed.\n"
f"stdout:\n{result.stdout}\n"
f"stderr:\n{result.stderr}"
)
def test_web_add_requires_login_when_public_off(initialized_archive):
"""/web/https://... should 404 for new URLs when PUBLIC_ADD_VIEW is false and not logged in."""
url = create_test_url(domain='example.com', path='savepagenow-404')
request_url = url.replace('https://', '')
result = _run_savepagenow_not_found_script(initialized_archive, request_url)
assert result.returncode == 0, (
"SavePageNow shortcut (no public add) test failed.\n"
f"stdout:\n{result.stdout}\n"
f"stderr:\n{result.stderr}"
)
def test_web_add_redirects_existing_snapshot_when_public_off(initialized_archive):
"""/web/https://... should redirect to existing snapshot even when public add is off and not logged in."""
url = create_test_url(domain='example.com', path='savepagenow-existing')
request_url = url.replace('https://', '')
result = _run_savepagenow_existing_snapshot_script(initialized_archive, request_url, url)
assert result.returncode == 0, (
"SavePageNow shortcut (existing snapshot) test failed.\n"
f"stdout:\n{result.stdout}\n"
f"stderr:\n{result.stderr}"
)
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/tests/test_savepagenow.py",
"license": "MIT License",
"lines": 212,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ArchiveBox/ArchiveBox:archivebox/tests/test_urls.py | import os
import sys
import subprocess
import textwrap
from pathlib import Path
import pytest
REPO_ROOT = Path(__file__).resolve().parents[3]
def _merge_pythonpath(env: dict[str, str]) -> dict[str, str]:
env.pop("DATA_DIR", None)
pythonpath = env.get("PYTHONPATH", "")
if pythonpath:
env["PYTHONPATH"] = f"{REPO_ROOT}{os.pathsep}{pythonpath}"
else:
env["PYTHONPATH"] = str(REPO_ROOT)
return env
def _run_python(script: str, cwd: Path, timeout: int = 60) -> subprocess.CompletedProcess:
env = _merge_pythonpath(os.environ.copy())
return subprocess.run(
[sys.executable, "-"],
cwd=cwd,
env=env,
input=script,
capture_output=True,
text=True,
timeout=timeout,
)
def _build_script(body: str) -> str:
prelude = textwrap.dedent(
"""
import os
from pathlib import Path
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "archivebox.core.settings")
import django
django.setup()
from django.test import Client
from django.contrib.auth import get_user_model
from archivebox.core.models import Snapshot, ArchiveResult
from archivebox.config.common import SERVER_CONFIG
from archivebox.core.host_utils import (
get_admin_host,
get_api_host,
get_web_host,
get_snapshot_host,
get_original_host,
get_listen_subdomain,
split_host_port,
host_matches,
is_snapshot_subdomain,
)
def response_body(resp):
if getattr(resp, "streaming", False):
return b"".join(resp.streaming_content)
return resp.content
def ensure_admin_user():
User = get_user_model()
admin, _ = User.objects.get_or_create(
username="testadmin",
defaults={"email": "admin@example.com", "is_staff": True, "is_superuser": True},
)
admin.set_password("testpassword")
admin.save()
return admin
def get_snapshot():
snapshot = Snapshot.objects.order_by("-created_at").first()
assert snapshot is not None
return snapshot
def get_snapshot_files(snapshot):
output_rel = None
for output in snapshot.discover_outputs():
candidate = output.get("path")
if not candidate:
continue
if candidate.startswith("responses/"):
continue
if Path(snapshot.output_dir, candidate).is_file():
output_rel = candidate
break
if output_rel is None:
fallback = Path(snapshot.output_dir, "index.jsonl")
if fallback.exists():
output_rel = "index.jsonl"
assert output_rel is not None
responses_root = Path(snapshot.output_dir) / "responses" / snapshot.domain
assert responses_root.exists()
response_file = None
response_rel = None
for candidate in responses_root.rglob("*"):
if not candidate.is_file():
continue
rel = candidate.relative_to(responses_root)
if not (Path(snapshot.output_dir) / rel).exists():
response_file = candidate
response_rel = str(rel)
break
if response_file is None:
response_file = next(p for p in responses_root.rglob("*") if p.is_file())
response_rel = str(response_file.relative_to(responses_root))
response_output_path = Path(snapshot.output_dir) / response_rel
return output_rel, response_file, response_rel, response_output_path
"""
)
return prelude + "\n" + textwrap.dedent(body)
@pytest.mark.usefixtures("real_archive_with_example")
class TestUrlRouting:
data_dir: Path
def _run(self, body: str, timeout: int = 120) -> None:
script = _build_script(body)
result = _run_python(script, cwd=self.data_dir, timeout=timeout)
assert result.returncode == 0, result.stderr
assert "OK" in result.stdout
def test_host_utils_and_public_redirect(self) -> None:
self._run(
"""
snapshot = get_snapshot()
snapshot_id = str(snapshot.id)
domain = snapshot.domain
web_host = get_web_host()
admin_host = get_admin_host()
api_host = get_api_host()
snapshot_host = get_snapshot_host(snapshot_id)
original_host = get_original_host(domain)
base_host = SERVER_CONFIG.LISTEN_HOST
host_only, port = split_host_port(base_host)
assert host_only == "archivebox.localhost"
assert port == "8000"
assert web_host == "web.archivebox.localhost:8000"
assert admin_host == "admin.archivebox.localhost:8000"
assert api_host == "api.archivebox.localhost:8000"
assert snapshot_host == f"{snapshot_id}.archivebox.localhost:8000"
assert original_host == f"{domain}.archivebox.localhost:8000"
assert get_listen_subdomain(web_host) == "web"
assert get_listen_subdomain(admin_host) == "admin"
assert get_listen_subdomain(api_host) == "api"
assert get_listen_subdomain(snapshot_host) == snapshot_id
assert get_listen_subdomain(original_host) == domain
assert get_listen_subdomain(base_host) == ""
assert host_matches(web_host, get_web_host())
assert is_snapshot_subdomain(snapshot_id)
client = Client()
resp = client.get("/public.html", HTTP_HOST=web_host)
assert resp.status_code in (301, 302)
assert resp["Location"].endswith("/public/")
resp = client.get("/public/", HTTP_HOST=base_host)
assert resp.status_code in (301, 302)
assert resp["Location"].startswith(f"http://{web_host}/public/")
resp = client.get("/", HTTP_HOST=api_host)
assert resp.status_code in (301, 302)
assert resp["Location"].startswith("/api/")
print("OK")
"""
)
def test_web_admin_routing(self) -> None:
self._run(
"""
ensure_admin_user()
client = Client()
web_host = get_web_host()
admin_host = get_admin_host()
resp = client.get("/add/", HTTP_HOST=web_host)
assert resp.status_code == 200
resp = client.get("/admin/login/", HTTP_HOST=web_host)
assert resp.status_code in (301, 302)
assert admin_host in resp["Location"]
resp = client.get("/admin/login/", HTTP_HOST=admin_host)
assert resp.status_code == 200
print("OK")
"""
)
def test_snapshot_routing_and_hosts(self) -> None:
self._run(
"""
snapshot = get_snapshot()
output_rel, response_file, response_rel, response_output_path = get_snapshot_files(snapshot)
snapshot_id = str(snapshot.id)
snapshot_host = get_snapshot_host(snapshot_id)
original_host = get_original_host(snapshot.domain)
web_host = get_web_host()
client = Client()
snapshot_path = f"/{snapshot.url_path}/"
resp = client.get(snapshot_path, HTTP_HOST=web_host)
assert resp.status_code == 200
resp = client.get(f"/web/{snapshot.domain}", HTTP_HOST=web_host)
assert resp.status_code in (301, 302)
assert resp["Location"].endswith(f"/{snapshot.url_path}")
resp = client.get(f"/{snapshot.url_path}", HTTP_HOST=web_host)
assert resp.status_code == 200
date_segment = snapshot.url_path.split("/")[1]
resp = client.get(f"/web/{date_segment}/{date_segment}/{snapshot_id}/", HTTP_HOST=web_host)
assert resp.status_code == 404
resp = client.get(f"/{snapshot.url_path}/{output_rel}", HTTP_HOST=web_host)
assert resp.status_code in (301, 302)
assert snapshot_host in resp["Location"]
resp = client.get(f"/{output_rel}", HTTP_HOST=snapshot_host)
assert resp.status_code == 200
assert response_body(resp) == Path(snapshot.output_dir, output_rel).read_bytes()
resp = client.get(f"/{response_rel}", HTTP_HOST=snapshot_host)
assert resp.status_code == 200
snapshot_body = response_body(resp)
if response_output_path.exists():
assert snapshot_body == response_output_path.read_bytes()
else:
assert snapshot_body == response_file.read_bytes()
resp = client.get(f"/{response_rel}", HTTP_HOST=original_host)
assert resp.status_code == 200
assert response_body(resp) == response_file.read_bytes()
print("OK")
"""
)
def test_template_and_admin_links(self) -> None:
self._run(
"""
ensure_admin_user()
snapshot = get_snapshot()
snapshot.write_html_details()
snapshot_id = str(snapshot.id)
snapshot_host = get_snapshot_host(snapshot_id)
admin_host = get_admin_host()
web_host = get_web_host()
client = Client()
resp = client.get("/public/", HTTP_HOST=web_host)
assert resp.status_code == 200
public_html = response_body(resp).decode("utf-8", "ignore")
assert "http://web.archivebox.localhost:8000" in public_html
resp = client.get(f"/{snapshot.url_path}/index.html", HTTP_HOST=web_host)
assert resp.status_code == 200
live_html = response_body(resp).decode("utf-8", "ignore")
assert f"http://{snapshot_host}/" in live_html
assert "http://web.archivebox.localhost:8000" in live_html
static_html = Path(snapshot.output_dir, "index.html").read_text(encoding="utf-8", errors="ignore")
assert f"http://{snapshot_host}/" in static_html
client.login(username="testadmin", password="testpassword")
resp = client.get(f"/admin/core/snapshot/{snapshot_id}/change/", HTTP_HOST=admin_host)
assert resp.status_code == 200
admin_html = response_body(resp).decode("utf-8", "ignore")
assert f"http://web.archivebox.localhost:8000/{snapshot.archive_path}" in admin_html
assert f"http://{snapshot_host}/" in admin_html
result = ArchiveResult.objects.filter(snapshot=snapshot).first()
assert result is not None
resp = client.get(f"/admin/core/archiveresult/{result.id}/change/", HTTP_HOST=admin_host)
assert resp.status_code == 200
ar_html = response_body(resp).decode("utf-8", "ignore")
assert f"http://{snapshot_host}/" in ar_html
print("OK")
"""
)
def test_api_available_on_admin_and_api_hosts(self) -> None:
self._run(
"""
client = Client()
admin_host = get_admin_host()
api_host = get_api_host()
resp = client.get("/api/v1/docs", HTTP_HOST=admin_host)
assert resp.status_code == 200
resp = client.get("/api/v1/docs", HTTP_HOST=api_host)
assert resp.status_code == 200
print("OK")
"""
)
def test_api_post_with_token_on_admin_and_api_hosts(self) -> None:
self._run(
"""
ensure_admin_user()
from archivebox.api.auth import get_or_create_api_token
token = get_or_create_api_token(get_user_model().objects.get(username="testadmin"))
assert token is not None
client = Client()
admin_host = get_admin_host()
api_host = get_api_host()
payload = '{"name": "apitest-tag"}'
headers = {"HTTP_X_ARCHIVEBOX_API_KEY": token.token}
resp = client.post(
"/api/v1/core/tags/create/",
data=payload,
content_type="application/json",
HTTP_HOST=admin_host,
**headers,
)
assert resp.status_code == 200
data = resp.json()
assert data.get("success") is True
assert data.get("tag_name") == "apitest-tag"
resp = client.post(
"/api/v1/core/tags/create/",
data=payload,
content_type="application/json",
HTTP_HOST=api_host,
**headers,
)
assert resp.status_code == 200
data = resp.json()
assert data.get("success") is True
assert data.get("tag_name") == "apitest-tag"
print("OK")
"""
)
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/tests/test_urls.py",
"license": "MIT License",
"lines": 295,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ArchiveBox/ArchiveBox:archivebox/workers/management/commands/orchestrator_watch.py | from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Watch the runserver autoreload PID file and restart orchestrator on reloads."
def add_arguments(self, parser):
parser.add_argument(
"--pidfile",
default=None,
help="Path to runserver pidfile to watch",
)
parser.add_argument(
"--interval",
type=float,
default=1.0,
help="Polling interval in seconds",
)
def handle(self, *args, **kwargs):
import os
import time
from archivebox.config.common import STORAGE_CONFIG
from archivebox.machine.models import Process, Machine
from archivebox.workers.orchestrator import Orchestrator
os.environ['ARCHIVEBOX_ORCHESTRATOR_WATCHER'] = '1'
pidfile = kwargs.get("pidfile") or os.environ.get("ARCHIVEBOX_RUNSERVER_PIDFILE")
if not pidfile:
pidfile = str(STORAGE_CONFIG.TMP_DIR / "runserver.pid")
interval = max(0.2, float(kwargs.get("interval", 1.0)))
last_pid = None
def restart_orchestrator():
Process.cleanup_stale_running()
machine = Machine.current()
running = Process.objects.filter(
machine=machine,
status=Process.StatusChoices.RUNNING,
process_type__in=[
Process.TypeChoices.ORCHESTRATOR,
Process.TypeChoices.WORKER,
Process.TypeChoices.HOOK,
],
)
for proc in running:
try:
if proc.process_type == Process.TypeChoices.HOOK:
proc.kill_tree(graceful_timeout=0.5)
else:
proc.terminate(graceful_timeout=1.0)
except Exception:
continue
if not Orchestrator.is_running():
Orchestrator(exit_on_idle=False).start()
while True:
try:
if os.path.exists(pidfile):
with open(pidfile, "r") as handle:
pid = handle.read().strip() or None
else:
pid = None
if pid and pid != last_pid:
restart_orchestrator()
last_pid = pid
elif not Orchestrator.is_running():
Orchestrator(exit_on_idle=False).start()
except Exception:
pass
time.sleep(interval)
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/workers/management/commands/orchestrator_watch.py",
"license": "MIT License",
"lines": 64,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ArchiveBox/ArchiveBox:archivebox/plugins/chrome/on_Crawl__70_chrome_install.py | #!/usr/bin/env python3
"""
Emit Chromium Binary dependency for the crawl.
NOTE: We use Chromium instead of Chrome because Chrome 137+ removed support for
--load-extension and --disable-extensions-except flags, which are needed for
loading unpacked extensions in headless mode.
"""
import json
import os
import sys
def main():
# Check if Chrome is enabled
chrome_enabled = os.environ.get('CHROME_ENABLED', 'true').lower() not in ('false', '0', 'no', 'off')
if not chrome_enabled:
sys.exit(0)
record = {
'type': 'Binary',
'name': 'chromium',
'binproviders': 'puppeteer,env',
'overrides': {
'puppeteer': ['chromium@latest', '--install-deps'],
},
}
print(json.dumps(record))
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/chrome/on_Crawl__70_chrome_install.py",
"license": "MIT License",
"lines": 27,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/forumdl/on_Crawl__25_forumdl_install.py | #!/usr/bin/env python3
"""
Emit forum-dl Binary dependency for the crawl.
"""
import json
import os
import sys
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def get_env_bool(name: str, default: bool = False) -> bool:
val = get_env(name, '').lower()
if val in ('true', '1', 'yes', 'on'):
return True
if val in ('false', '0', 'no', 'off'):
return False
return default
def output_binary(name: str, binproviders: str, overrides: dict | None = None):
"""Output Binary JSONL record for a dependency."""
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'machine_id': machine_id,
}
if overrides:
record['overrides'] = overrides
print(json.dumps(record))
def main():
forumdl_enabled = get_env_bool('FORUMDL_ENABLED', True)
if not forumdl_enabled:
sys.exit(0)
output_binary(
name='forum-dl',
binproviders='pip,env',
overrides={
'pip': {
'packages': [
'--no-deps',
'--prefer-binary',
'forum-dl',
'chardet==5.2.0',
'pydantic',
'pydantic-core',
'typing-extensions',
'annotated-types',
'typing-inspection',
'beautifulsoup4',
'soupsieve',
'lxml',
'requests',
'urllib3',
'certifi',
'idna',
'charset-normalizer',
'tenacity',
'python-dateutil',
'six',
'html2text',
'warcio',
]
}
},
)
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/forumdl/on_Crawl__25_forumdl_install.py",
"license": "MIT License",
"lines": 67,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/gallerydl/on_Crawl__20_gallerydl_install.py | #!/usr/bin/env python3
"""
Emit gallery-dl Binary dependency for the crawl.
"""
import json
import os
import sys
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def get_env_bool(name: str, default: bool = False) -> bool:
val = get_env(name, '').lower()
if val in ('true', '1', 'yes', 'on'):
return True
if val in ('false', '0', 'no', 'off'):
return False
return default
def output_binary(name: str, binproviders: str):
"""Output Binary JSONL record for a dependency."""
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'machine_id': machine_id,
}
print(json.dumps(record))
def main():
gallerydl_enabled = get_env_bool('GALLERYDL_ENABLED', True)
if not gallerydl_enabled:
sys.exit(0)
output_binary(name='gallery-dl', binproviders='pip,brew,apt,env')
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/gallerydl/on_Crawl__20_gallerydl_install.py",
"license": "MIT License",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/git/on_Crawl__05_git_install.py | #!/usr/bin/env python3
"""
Emit git Binary dependency for the crawl.
"""
import json
import os
import sys
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def get_env_bool(name: str, default: bool = False) -> bool:
val = get_env(name, '').lower()
if val in ('true', '1', 'yes', 'on'):
return True
if val in ('false', '0', 'no', 'off'):
return False
return default
def output_binary(name: str, binproviders: str):
"""Output Binary JSONL record for a dependency."""
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'machine_id': machine_id,
}
print(json.dumps(record))
def main():
git_enabled = get_env_bool('GIT_ENABLED', True)
if not git_enabled:
sys.exit(0)
output_binary(name='git', binproviders='apt,brew,env')
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/git/on_Crawl__05_git_install.py",
"license": "MIT License",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/mercury/on_Crawl__40_mercury_install.py | #!/usr/bin/env python3
"""
Emit postlight-parser Binary dependency for the crawl.
"""
import json
import os
import sys
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def get_env_bool(name: str, default: bool = False) -> bool:
val = get_env(name, '').lower()
if val in ('true', '1', 'yes', 'on'):
return True
if val in ('false', '0', 'no', 'off'):
return False
return default
def output_binary(name: str, binproviders: str):
"""Output Binary JSONL record for a dependency."""
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'overrides': {
'npm': {
'packages': ['@postlight/parser'],
}
},
'machine_id': machine_id,
}
print(json.dumps(record))
def main():
mercury_enabled = get_env_bool('MERCURY_ENABLED', True)
if not mercury_enabled:
sys.exit(0)
output_binary(name='postlight-parser', binproviders='npm,env')
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/mercury/on_Crawl__40_mercury_install.py",
"license": "MIT License",
"lines": 39,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/npm/on_Crawl__00_npm_install.py | #!/usr/bin/env python3
"""
Emit node/npm Binary dependencies for the crawl.
This hook runs early in the Crawl lifecycle so node/npm are installed
before any npm-based extractors (e.g., puppeteer) run.
"""
import json
import os
import sys
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def output_binary(name: str, binproviders: str, overrides: dict | None = None) -> None:
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'machine_id': machine_id,
}
if overrides:
record['overrides'] = overrides
print(json.dumps(record))
def main() -> None:
output_binary(
name='node',
binproviders='apt,brew,env',
overrides={'apt': {'packages': ['nodejs']}},
)
output_binary(
name='npm',
binproviders='apt,brew,env',
overrides={
'apt': {'packages': ['nodejs', 'npm']},
'brew': {'packages': ['node']},
},
)
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/npm/on_Crawl__00_npm_install.py",
"license": "MIT License",
"lines": 39,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/papersdl/on_Crawl__30_papersdl_install.py | #!/usr/bin/env python3
"""
Emit papers-dl Binary dependency for the crawl.
"""
import json
import os
import sys
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def get_env_bool(name: str, default: bool = False) -> bool:
val = get_env(name, '').lower()
if val in ('true', '1', 'yes', 'on'):
return True
if val in ('false', '0', 'no', 'off'):
return False
return default
def output_binary(name: str, binproviders: str):
"""Output Binary JSONL record for a dependency."""
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'machine_id': machine_id,
}
print(json.dumps(record))
def main():
papersdl_enabled = get_env_bool('PAPERSDL_ENABLED', True)
if not papersdl_enabled:
sys.exit(0)
output_binary(name='papers-dl', binproviders='pip,env')
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/papersdl/on_Crawl__30_papersdl_install.py",
"license": "MIT License",
"lines": 34,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/puppeteer/on_Binary__12_puppeteer_install.py | #!/usr/bin/env python3
"""
Install Chromium via the Puppeteer CLI.
Usage: on_Binary__12_puppeteer_install.py --binary-id=<uuid> --machine-id=<uuid> --name=<name>
Output: Binary JSONL record to stdout after installation
"""
import json
import os
import re
import sys
from pathlib import Path
import rich_click as click
from abx_pkg import Binary, EnvProvider, NpmProvider, BinProviderOverrides
# Fix pydantic forward reference issue
NpmProvider.model_rebuild()
@click.command()
@click.option('--machine-id', required=True, help='Machine UUID')
@click.option('--binary-id', required=True, help='Binary UUID')
@click.option('--name', required=True, help='Binary name to install')
@click.option('--binproviders', default='*', help='Allowed providers (comma-separated)')
@click.option('--overrides', default=None, help='JSON-encoded overrides dict')
def main(machine_id: str, binary_id: str, name: str, binproviders: str, overrides: str | None) -> None:
if binproviders != '*' and 'puppeteer' not in binproviders.split(','):
sys.exit(0)
if name not in ('chromium', 'chrome'):
sys.exit(0)
lib_dir = os.environ.get('LIB_DIR', '').strip()
if not lib_dir:
click.echo('ERROR: LIB_DIR environment variable not set', err=True)
sys.exit(1)
npm_prefix = Path(lib_dir) / 'npm'
npm_prefix.mkdir(parents=True, exist_ok=True)
npm_provider = NpmProvider(npm_prefix=npm_prefix)
cache_dir = Path(lib_dir) / 'puppeteer'
cache_dir.mkdir(parents=True, exist_ok=True)
os.environ.setdefault('PUPPETEER_CACHE_DIR', str(cache_dir))
puppeteer_binary = Binary(
name='puppeteer',
binproviders=[npm_provider, EnvProvider()],
overrides={'npm': {'packages': ['puppeteer']}},
).load()
if not puppeteer_binary.abspath:
click.echo('ERROR: puppeteer binary not found (install puppeteer first)', err=True)
sys.exit(1)
install_args = _parse_override_packages(overrides, default=['chromium@latest', '--install-deps'])
cmd = ['browsers', 'install', *install_args]
proc = puppeteer_binary.exec(cmd=cmd, timeout=300)
if proc.returncode != 0:
click.echo(proc.stdout.strip(), err=True)
click.echo(proc.stderr.strip(), err=True)
click.echo(f'ERROR: puppeteer install failed ({proc.returncode})', err=True)
sys.exit(1)
chromium_binary = _load_chromium_binary(proc.stdout + '\n' + proc.stderr)
if not chromium_binary or not chromium_binary.abspath:
click.echo('ERROR: failed to locate Chromium after install', err=True)
sys.exit(1)
_emit_chromium_binary_record(
binary=chromium_binary,
machine_id=machine_id,
binary_id=binary_id,
)
config_patch = {
'CHROME_BINARY': str(chromium_binary.abspath),
'CHROMIUM_VERSION': str(chromium_binary.version) if chromium_binary.version else '',
}
print(json.dumps({
'type': 'Machine',
'config': config_patch,
}))
sys.exit(0)
def _parse_override_packages(overrides: str | None, default: list[str]) -> list[str]:
if not overrides:
return default
try:
overrides_dict = json.loads(overrides)
except json.JSONDecodeError:
return default
if isinstance(overrides_dict, dict):
provider_overrides = overrides_dict.get('puppeteer')
if isinstance(provider_overrides, dict):
packages = provider_overrides.get('packages')
if isinstance(packages, list) and packages:
return [str(arg) for arg in packages]
if isinstance(provider_overrides, list) and provider_overrides:
return [str(arg) for arg in provider_overrides]
if isinstance(overrides_dict, list) and overrides_dict:
return [str(arg) for arg in overrides_dict]
return default
def _emit_chromium_binary_record(binary: Binary, machine_id: str, binary_id: str) -> None:
record = {
'type': 'Binary',
'name': 'chromium',
'abspath': str(binary.abspath),
'version': str(binary.version) if binary.version else '',
'sha256': binary.sha256 or '',
'binprovider': 'puppeteer',
'machine_id': machine_id,
'binary_id': binary_id,
}
print(json.dumps(record))
def _load_chromium_binary(output: str) -> Binary | None:
candidates: list[Path] = []
match = re.search(r'(?:chromium|chrome)@[^\s]+\s+(\S+)', output)
if match:
candidates.append(Path(match.group(1)))
cache_dirs: list[Path] = []
cache_env = os.environ.get('PUPPETEER_CACHE_DIR')
if cache_env:
cache_dirs.append(Path(cache_env))
home = Path.home()
cache_dirs.extend([
home / '.cache' / 'puppeteer',
home / 'Library' / 'Caches' / 'puppeteer',
])
for base in cache_dirs:
for root in (base, base / 'chromium', base / 'chrome'):
try:
candidates.extend(root.rglob('Chromium.app/Contents/MacOS/Chromium'))
except Exception:
pass
try:
candidates.extend(root.rglob('chrome'))
except Exception:
pass
for candidate in candidates:
try:
binary = Binary(
name='chromium',
binproviders=[EnvProvider()],
overrides={'env': {'abspath': str(candidate)}},
).load()
except Exception:
continue
if binary.abspath:
return binary
return None
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/puppeteer/on_Binary__12_puppeteer_install.py",
"license": "MIT License",
"lines": 138,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ArchiveBox/ArchiveBox:archivebox/plugins/puppeteer/on_Crawl__60_puppeteer_install.py | #!/usr/bin/env python3
"""
Emit Puppeteer Binary dependency for the crawl.
"""
import json
import os
import sys
def main() -> None:
enabled = os.environ.get('PUPPETEER_ENABLED', 'true').lower() not in ('false', '0', 'no', 'off')
if not enabled:
sys.exit(0)
record = {
'type': 'Binary',
'name': 'puppeteer',
'binproviders': 'npm,env',
'overrides': {
'npm': {
'packages': ['puppeteer'],
}
},
}
print(json.dumps(record))
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/puppeteer/on_Crawl__60_puppeteer_install.py",
"license": "MIT License",
"lines": 25,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/puppeteer/tests/test_puppeteer.py | """Integration tests for puppeteer plugin."""
import json
import os
import shutil
import subprocess
import sys
import tempfile
from pathlib import Path
import pytest
from archivebox.plugins.chrome.tests.chrome_test_helpers import (
get_plugin_dir,
get_hook_script,
)
PLUGIN_DIR = get_plugin_dir(__file__)
CRAWL_HOOK = get_hook_script(PLUGIN_DIR, 'on_Crawl__*_puppeteer_install.py')
BINARY_HOOK = get_hook_script(PLUGIN_DIR, 'on_Binary__*_puppeteer_install.py')
NPM_BINARY_HOOK = PLUGIN_DIR.parent / 'npm' / 'on_Binary__10_npm_install.py'
def test_hook_scripts_exist():
assert CRAWL_HOOK and CRAWL_HOOK.exists(), f"Hook not found: {CRAWL_HOOK}"
assert BINARY_HOOK and BINARY_HOOK.exists(), f"Hook not found: {BINARY_HOOK}"
def test_crawl_hook_emits_puppeteer_binary():
with tempfile.TemporaryDirectory() as tmpdir:
env = os.environ.copy()
result = subprocess.run(
[sys.executable, str(CRAWL_HOOK)],
cwd=tmpdir,
capture_output=True,
text=True,
env=env,
timeout=30,
)
assert result.returncode == 0, f"crawl hook failed: {result.stderr}"
records = [json.loads(line) for line in result.stdout.splitlines() if line.strip().startswith('{')]
binaries = [r for r in records if r.get('type') == 'Binary' and r.get('name') == 'puppeteer']
assert binaries, f"Expected Binary record for puppeteer, got: {records}"
assert 'npm' in binaries[0].get('binproviders', ''), "puppeteer should be installable via npm provider"
def test_puppeteer_installs_chromium():
assert shutil.which('npm'), "npm is required for puppeteer installation"
with tempfile.TemporaryDirectory() as tmpdir:
tmpdir = Path(tmpdir)
lib_dir = tmpdir / 'lib' / 'arm64-darwin'
lib_dir.mkdir(parents=True, exist_ok=True)
env = os.environ.copy()
env['LIB_DIR'] = str(lib_dir)
crawl_result = subprocess.run(
[sys.executable, str(CRAWL_HOOK)],
cwd=tmpdir,
capture_output=True,
text=True,
env=env,
timeout=30,
)
assert crawl_result.returncode == 0, f"crawl hook failed: {crawl_result.stderr}"
crawl_records = [json.loads(line) for line in crawl_result.stdout.splitlines() if line.strip().startswith('{')]
puppeteer_record = next(
(r for r in crawl_records if r.get('type') == 'Binary' and r.get('name') == 'puppeteer'),
None,
)
assert puppeteer_record, f"Expected puppeteer Binary record, got: {crawl_records}"
npm_result = subprocess.run(
[
sys.executable,
str(NPM_BINARY_HOOK),
'--machine-id=test-machine',
'--binary-id=test-puppeteer',
'--name=puppeteer',
f"--binproviders={puppeteer_record.get('binproviders', '*')}",
'--overrides=' + json.dumps(puppeteer_record.get('overrides') or {}),
],
cwd=tmpdir,
capture_output=True,
text=True,
env=env,
timeout=120,
)
assert npm_result.returncode == 0, (
"puppeteer npm install failed\n"
f"stdout:\n{npm_result.stdout}\n"
f"stderr:\n{npm_result.stderr}"
)
result = subprocess.run(
[
sys.executable,
str(BINARY_HOOK),
'--machine-id=test-machine',
'--binary-id=test-binary',
'--name=chromium',
'--binproviders=puppeteer',
'--overrides=' + json.dumps({'puppeteer': ['chromium@latest', '--install-deps']}),
],
cwd=tmpdir,
capture_output=True,
text=True,
env=env,
timeout=120,
)
assert result.returncode == 0, (
"puppeteer binary hook failed\n"
f"stdout:\n{result.stdout}\n"
f"stderr:\n{result.stderr}"
)
records = [json.loads(line) for line in result.stdout.splitlines() if line.strip().startswith('{')]
binaries = [r for r in records if r.get('type') == 'Binary' and r.get('name') == 'chromium']
assert binaries, f"Expected Binary record for chromium, got: {records}"
abspath = binaries[0].get('abspath')
assert abspath and Path(abspath).exists(), f"Chromium binary path invalid: {abspath}"
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/puppeteer/tests/test_puppeteer.py",
"license": "MIT License",
"lines": 106,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ArchiveBox/ArchiveBox:archivebox/plugins/readability/on_Crawl__35_readability_install.py | #!/usr/bin/env python3
"""
Emit readability-extractor Binary dependency for the crawl.
"""
import json
import os
import sys
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def get_env_bool(name: str, default: bool = False) -> bool:
val = get_env(name, '').lower()
if val in ('true', '1', 'yes', 'on'):
return True
if val in ('false', '0', 'no', 'off'):
return False
return default
def output_binary(name: str, binproviders: str):
"""Output Binary JSONL record for a dependency."""
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'overrides': {
'npm': {
'packages': ['https://github.com/ArchiveBox/readability-extractor'],
},
},
'machine_id': machine_id,
}
print(json.dumps(record))
def main():
readability_enabled = get_env_bool('READABILITY_ENABLED', True)
if not readability_enabled:
sys.exit(0)
output_binary(name='readability-extractor', binproviders='npm,env')
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/readability/on_Crawl__35_readability_install.py",
"license": "MIT License",
"lines": 39,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/search_backend_ripgrep/on_Crawl__50_ripgrep_install.py | #!/usr/bin/env python3
"""
Emit ripgrep Binary dependency for the crawl.
"""
import os
import sys
import json
def main():
# Only proceed if ripgrep backend is enabled
search_backend_engine = os.environ.get('SEARCH_BACKEND_ENGINE', 'ripgrep').strip()
if search_backend_engine != 'ripgrep':
# Not using ripgrep, exit successfully without output
sys.exit(0)
machine_id = os.environ.get('MACHINE_ID', '')
print(json.dumps({
'type': 'Binary',
'name': 'rg',
'binproviders': 'apt,brew,env',
'overrides': {
'apt': {'packages': ['ripgrep']},
},
'machine_id': machine_id,
}))
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/search_backend_ripgrep/on_Crawl__50_ripgrep_install.py",
"license": "MIT License",
"lines": 26,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/singlefile/on_Crawl__45_singlefile_install.py | #!/usr/bin/env python3
"""
Emit single-file Binary dependency for the crawl.
"""
import json
import os
import sys
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def get_env_bool(name: str, default: bool = False) -> bool:
val = get_env(name, '').lower()
if val in ('true', '1', 'yes', 'on'):
return True
if val in ('false', '0', 'no', 'off'):
return False
return default
def output_binary(name: str, binproviders: str, overrides: dict | None = None):
"""Output Binary JSONL record for a dependency."""
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'machine_id': machine_id,
}
if overrides:
record['overrides'] = overrides
print(json.dumps(record))
def main():
singlefile_enabled = get_env_bool('SINGLEFILE_ENABLED', True)
if not singlefile_enabled:
sys.exit(0)
output_binary(
name='single-file',
binproviders='npm,env',
overrides={'npm': {'packages': ['single-file-cli']}},
)
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/singlefile/on_Crawl__45_singlefile_install.py",
"license": "MIT License",
"lines": 40,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/plugins/wget/on_Crawl__10_wget_install.py | #!/usr/bin/env python3
"""
Emit wget Binary dependency for the crawl.
"""
import json
import os
import sys
# Read config from environment (already validated by JSONSchema)
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def get_env_bool(name: str, default: bool = False) -> bool:
val = get_env(name, '').lower()
if val in ('true', '1', 'yes', 'on'):
return True
if val in ('false', '0', 'no', 'off'):
return False
return default
def get_env_int(name: str, default: int = 0) -> int:
try:
return int(get_env(name, str(default)))
except ValueError:
return default
def output_binary(name: str, binproviders: str):
"""Output Binary JSONL record for a dependency."""
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'machine_id': machine_id,
}
print(json.dumps(record))
def output_machine_config(config: dict):
"""Output Machine config JSONL patch."""
if not config:
return
record = {
'type': 'Machine',
'config': config,
}
print(json.dumps(record))
def main():
warnings = []
errors = []
# Get config values
wget_enabled = get_env_bool('WGET_ENABLED', True)
wget_save_warc = get_env_bool('WGET_SAVE_WARC', True)
wget_timeout = get_env_int('WGET_TIMEOUT') or get_env_int('TIMEOUT', 60)
wget_binary = get_env('WGET_BINARY', 'wget')
# Compute derived values (USE_WGET for backward compatibility)
use_wget = wget_enabled
# Validate timeout with warning (not error)
if use_wget and wget_timeout < 20:
warnings.append(
f"WGET_TIMEOUT={wget_timeout} is very low. "
"wget may fail to archive sites if set to less than ~20 seconds. "
"Consider setting WGET_TIMEOUT=60 or higher."
)
if use_wget:
output_binary(name='wget', binproviders='apt,brew,pip,env')
# Output computed config patch as JSONL
output_machine_config({
'USE_WGET': use_wget,
'WGET_BINARY': wget_binary,
})
for warning in warnings:
print(f"WARNING:{warning}", file=sys.stderr)
for error in errors:
print(f"ERROR:{error}", file=sys.stderr)
# Exit with error if any hard errors
sys.exit(1 if errors else 0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/wget/on_Crawl__10_wget_install.py",
"license": "MIT License",
"lines": 73,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
ArchiveBox/ArchiveBox:archivebox/plugins/ytdlp/on_Crawl__15_ytdlp_install.py | #!/usr/bin/env python3
"""
Emit yt-dlp (and related) Binary dependencies for the crawl.
"""
import json
import os
import sys
def get_env(name: str, default: str = '') -> str:
return os.environ.get(name, default).strip()
def get_env_bool(name: str, default: bool = False) -> bool:
val = get_env(name, '').lower()
if val in ('true', '1', 'yes', 'on'):
return True
if val in ('false', '0', 'no', 'off'):
return False
return default
def output_binary(name: str, binproviders: str, overrides: dict | None = None):
"""Output Binary JSONL record for a dependency."""
machine_id = os.environ.get('MACHINE_ID', '')
record = {
'type': 'Binary',
'name': name,
'binproviders': binproviders,
'machine_id': machine_id,
}
if overrides:
record['overrides'] = overrides
print(json.dumps(record))
def main():
ytdlp_enabled = get_env_bool('YTDLP_ENABLED', True)
if not ytdlp_enabled:
sys.exit(0)
output_binary(
name='yt-dlp',
binproviders='pip,brew,apt,env',
overrides={'pip': {'packages': ['yt-dlp[default]']}},
)
# Node.js (required by several JS-based extractors, declared here per legacy binaries.jsonl)
output_binary(
name='node',
binproviders='apt,brew,env',
overrides={'apt': {'packages': ['nodejs']}},
)
# ffmpeg (used by media extraction)
output_binary(name='ffmpeg', binproviders='apt,brew,env')
sys.exit(0)
if __name__ == '__main__':
main()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/plugins/ytdlp/on_Crawl__15_ytdlp_install.py",
"license": "MIT License",
"lines": 48,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/tests/test_cli_add_interrupt.py | import os
import signal
import sqlite3
import subprocess
import sys
import time
from pathlib import Path
def _run(cmd, data_dir: Path, env: dict, timeout: int = 120):
return subprocess.run(
cmd,
cwd=data_dir,
env=env,
capture_output=True,
text=True,
timeout=timeout,
)
def _make_env(data_dir: Path) -> dict:
env = os.environ.copy()
env["DATA_DIR"] = str(data_dir)
env["USE_COLOR"] = "False"
env["SHOW_PROGRESS"] = "False"
env["ARCHIVEBOX_ALLOW_NO_UNIX_SOCKETS"] = "true"
env["PLUGINS"] = "title,favicon"
# Keep it fast but still real hooks
env["SAVE_TITLE"] = "True"
env["SAVE_FAVICON"] = "True"
env["SAVE_WGET"] = "False"
env["SAVE_WARC"] = "False"
env["SAVE_PDF"] = "False"
env["SAVE_SCREENSHOT"] = "False"
env["SAVE_DOM"] = "False"
env["SAVE_SINGLEFILE"] = "False"
env["SAVE_READABILITY"] = "False"
env["SAVE_MERCURY"] = "False"
env["SAVE_GIT"] = "False"
env["SAVE_YTDLP"] = "False"
env["SAVE_HEADERS"] = "False"
env["SAVE_HTMLTOTEXT"] = "False"
return env
def _count_running_processes(db_path: Path, where: str) -> int:
for _ in range(50):
try:
conn = sqlite3.connect(db_path, timeout=1)
cur = conn.cursor()
count = cur.execute(
f"SELECT COUNT(*) FROM machine_process WHERE status = 'running' AND {where}"
).fetchone()[0]
conn.close()
return count
except sqlite3.OperationalError:
time.sleep(0.1)
return 0
def _wait_for_count(db_path: Path, where: str, target: int, timeout: int = 20) -> bool:
start = time.time()
while time.time() - start < timeout:
if _count_running_processes(db_path, where) >= target:
return True
time.sleep(0.1)
return False
def test_add_parents_workers_to_orchestrator(tmp_path):
data_dir = tmp_path / "data"
data_dir.mkdir()
env = _make_env(data_dir)
init = _run([sys.executable, "-m", "archivebox", "init", "--quick"], data_dir, env)
assert init.returncode == 0, init.stderr
add = _run([sys.executable, "-m", "archivebox", "add", "https://example.com"], data_dir, env, timeout=120)
assert add.returncode == 0, add.stderr
conn = sqlite3.connect(data_dir / "index.sqlite3")
cur = conn.cursor()
orchestrator = cur.execute(
"SELECT id FROM machine_process WHERE process_type = 'orchestrator' ORDER BY created_at DESC LIMIT 1"
).fetchone()
assert orchestrator is not None
orchestrator_id = orchestrator[0]
worker_count = cur.execute(
"SELECT COUNT(*) FROM machine_process WHERE process_type = 'worker' AND worker_type = 'crawl' "
"AND parent_id = ?",
(orchestrator_id,),
).fetchone()[0]
conn.close()
assert worker_count >= 1, "Expected crawl worker to be parented to orchestrator"
def test_add_interrupt_cleans_orphaned_processes(tmp_path):
data_dir = tmp_path / "data"
data_dir.mkdir()
env = _make_env(data_dir)
init = _run([sys.executable, "-m", "archivebox", "init", "--quick"], data_dir, env)
assert init.returncode == 0, init.stderr
proc = subprocess.Popen(
[sys.executable, "-m", "archivebox", "add", "https://example.com"],
cwd=data_dir,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
db_path = data_dir / "index.sqlite3"
saw_worker = _wait_for_count(db_path, "process_type = 'worker'", 1, timeout=20)
assert saw_worker, "Expected at least one worker to start before interrupt"
proc.send_signal(signal.SIGINT)
proc.wait(timeout=30)
# Wait for workers/hooks to be cleaned up
start = time.time()
while time.time() - start < 30:
running = _count_running_processes(db_path, "process_type IN ('worker','hook')")
if running == 0:
break
time.sleep(0.2)
assert _count_running_processes(db_path, "process_type IN ('worker','hook')") == 0, (
"Expected no running worker/hook processes after interrupt"
)
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/tests/test_cli_add_interrupt.py",
"license": "MIT License",
"lines": 110,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ArchiveBox/ArchiveBox:archivebox/tests/test_real_world_add.py | import os
import sqlite3
import subprocess
from pathlib import Path
def _find_snapshot_dir(data_dir: Path, snapshot_id: str) -> Path | None:
candidates = {snapshot_id}
if len(snapshot_id) == 32:
hyphenated = f"{snapshot_id[:8]}-{snapshot_id[8:12]}-{snapshot_id[12:16]}-{snapshot_id[16:20]}-{snapshot_id[20:]}"
candidates.add(hyphenated)
elif len(snapshot_id) == 36 and '-' in snapshot_id:
candidates.add(snapshot_id.replace('-', ''))
for needle in candidates:
for path in data_dir.rglob(needle):
if path.is_dir():
return path
return None
def _find_html_with_text(root: Path, needle: str) -> list[Path]:
hits: list[Path] = []
for path in root.rglob("*.htm*"):
if not path.is_file():
continue
try:
if needle in path.read_text(errors="ignore"):
hits.append(path)
except Exception:
continue
return hits
def test_add_real_world_example_domain(tmp_path):
os.chdir(tmp_path)
tmp_short = Path("/tmp") / f"abx-{tmp_path.name}"
tmp_short.mkdir(parents=True, exist_ok=True)
env = os.environ.copy()
env["TMP_DIR"] = str(tmp_short)
env["ARCHIVEBOX_ALLOW_NO_UNIX_SOCKETS"] = "true"
init = subprocess.run(
["archivebox", "init"],
capture_output=True,
text=True,
timeout=120,
env=env,
)
assert init.returncode == 0, f"archivebox init failed: {init.stderr}"
result = subprocess.run(
["archivebox", "add", "https://example.com"],
capture_output=True,
text=True,
timeout=900,
env=env,
)
assert result.returncode == 0, (
"archivebox add failed.\n"
f"stdout:\n{result.stdout}\n"
f"stderr:\n{result.stderr}"
)
conn = sqlite3.connect(tmp_path / "index.sqlite3")
c = conn.cursor()
snapshot_row = c.execute(
"SELECT id, url, title FROM core_snapshot WHERE url = ?",
("https://example.com",),
).fetchone()
assert snapshot_row is not None, "Snapshot for https://example.com not found in DB"
snapshot_id, snapshot_url, snapshot_title = snapshot_row
assert snapshot_title and "Example Domain" in snapshot_title, (
f"Expected title to contain Example Domain, got: {snapshot_title}"
)
failed_results = c.execute(
"SELECT COUNT(*) FROM core_archiveresult WHERE snapshot_id = ? AND status = 'failed'",
(snapshot_id,),
).fetchone()[0]
assert failed_results == 0, "Some archive results failed for example.com snapshot"
binary_workers = c.execute(
"SELECT COUNT(*) FROM machine_process WHERE process_type = 'worker' AND worker_type = 'binary'"
).fetchone()[0]
assert binary_workers > 0, "Expected BinaryWorker to run installs via BinaryMachine"
failed_binary_workers = c.execute(
"SELECT COUNT(*) FROM machine_process WHERE process_type = 'worker' AND worker_type = 'binary' "
"AND exit_code IS NOT NULL AND exit_code != 0"
).fetchone()[0]
assert failed_binary_workers == 0, "BinaryWorker reported non-zero exit codes"
queued_binaries = c.execute(
"SELECT name FROM machine_binary WHERE status != 'installed'"
).fetchall()
assert not queued_binaries, f"Some binaries did not install: {queued_binaries}"
conn.close()
snapshot_dir = _find_snapshot_dir(tmp_path, str(snapshot_id))
assert snapshot_dir is not None, "Snapshot output directory not found"
title_path = snapshot_dir / "title" / "title.txt"
assert title_path.exists(), f"Missing title output: {title_path}"
assert "Example Domain" in title_path.read_text(errors="ignore")
html_sources = []
for candidate in ("wget", "singlefile", "dom"):
for candidate_dir in (snapshot_dir / candidate, *snapshot_dir.glob(f"*_{candidate}")):
if candidate_dir.exists():
html_sources.extend(_find_html_with_text(candidate_dir, "Example Domain"))
assert len(html_sources) >= 2, (
"Expected HTML outputs from multiple extractors to contain Example Domain "
f"(found {len(html_sources)})."
)
text_hits = 0
for path in (
*snapshot_dir.glob("*_readability/content.txt"),
snapshot_dir / "readability" / "content.txt",
):
if path.exists() and "Example Domain" in path.read_text(errors="ignore"):
text_hits += 1
for path in (
*snapshot_dir.glob("*_htmltotext/htmltotext.txt"),
snapshot_dir / "htmltotext" / "htmltotext.txt",
):
if path.exists() and "Example Domain" in path.read_text(errors="ignore"):
text_hits += 1
assert text_hits >= 2, (
"Expected multiple text extractors to contain Example Domain "
f"(readability/htmltotext hits={text_hits})."
)
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/tests/test_real_world_add.py",
"license": "MIT License",
"lines": 115,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ArchiveBox/ArchiveBox:archivebox/tests/test_settings_signal_webhooks.py | from django.test import TestCase
class TestSignalWebhooksSettings(TestCase):
def test_task_handler_is_sync_in_tests(self):
from signal_webhooks.settings import webhook_settings
assert webhook_settings.TASK_HANDLER.__name__ == "sync_task_handler"
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/tests/test_settings_signal_webhooks.py",
"license": "MIT License",
"lines": 5,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
ArchiveBox/ArchiveBox:archivebox/config/ldap.py | __package__ = "archivebox.config"
from typing import Optional
from pydantic import Field
from archivebox.config.configset import BaseConfigSet
class LDAPConfig(BaseConfigSet):
"""
LDAP authentication configuration.
Only loads and validates if django-auth-ldap is installed.
These settings integrate with Django's LDAP authentication backend.
"""
toml_section_header: str = "LDAP_CONFIG"
LDAP_ENABLED: bool = Field(default=False)
LDAP_SERVER_URI: Optional[str] = Field(default=None)
LDAP_BIND_DN: Optional[str] = Field(default=None)
LDAP_BIND_PASSWORD: Optional[str] = Field(default=None)
LDAP_USER_BASE: Optional[str] = Field(default=None)
LDAP_USER_FILTER: str = Field(default="(uid=%(user)s)")
LDAP_USERNAME_ATTR: str = Field(default="username")
LDAP_FIRSTNAME_ATTR: str = Field(default="givenName")
LDAP_LASTNAME_ATTR: str = Field(default="sn")
LDAP_EMAIL_ATTR: str = Field(default="mail")
LDAP_CREATE_SUPERUSER: bool = Field(default=False)
def validate_ldap_config(self) -> tuple[bool, str]:
"""
Validate that all required LDAP settings are configured.
Returns:
Tuple of (is_valid, error_message)
"""
if not self.LDAP_ENABLED:
return True, ""
required_fields = [
"LDAP_SERVER_URI",
"LDAP_BIND_DN",
"LDAP_BIND_PASSWORD",
"LDAP_USER_BASE",
]
missing = [field for field in required_fields if not getattr(self, field)]
if missing:
return False, f"LDAP_* config options must all be set if LDAP_ENABLED=True\nMissing: {', '.join(missing)}"
return True, ""
# Singleton instance
LDAP_CONFIG = LDAPConfig()
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/config/ldap.py",
"license": "MIT License",
"lines": 42,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
ArchiveBox/ArchiveBox:archivebox/ldap/apps.py | """Django app configuration for LDAP authentication."""
__package__ = "archivebox.ldap"
from django.apps import AppConfig
class LDAPConfig(AppConfig):
"""Django app config for LDAP authentication."""
default_auto_field = 'django.db.models.BigAutoField'
name = 'archivebox.ldap'
verbose_name = 'LDAP Authentication'
| {
"repo_id": "ArchiveBox/ArchiveBox",
"file_path": "archivebox/ldap/apps.py",
"license": "MIT License",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.