Skip to content

Commit 7df1e20

Browse files
authored
Feat/2.4.0 beta1 (#1906)
2 parents 2557ce8 + aabae4d commit 7df1e20

File tree

146 files changed

+12713
-9530
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

146 files changed

+12713
-9530
lines changed

docker/docker-compose.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ services:
4040

4141
backend:
4242
container_name: bisheng-backend
43-
image: dataelement/bisheng-backend:v2.3.0
43+
image: dataelement/bisheng-backend:v2.4.0-beta1
4444
ports:
4545
- "7860:7860"
4646
environment:
@@ -78,7 +78,7 @@ services:
7878

7979
backend_worker:
8080
container_name: bisheng-backend-worker
81-
image: dataelement/bisheng-backend:v2.3.0
81+
image: dataelement/bisheng-backend:v2.4.0-beta1
8282
environment:
8383
TZ: Asia/Shanghai
8484
BS_MILVUS_CONNECTION_ARGS: '{"host":"milvus","port":"19530","user":"","password":"","secure":false}'
@@ -109,7 +109,7 @@ services:
109109

110110
frontend:
111111
container_name: bisheng-frontend
112-
image: dataelement/bisheng-frontend:v2.3.0
112+
image: dataelement/bisheng-frontend:v2.4.0-beta1
113113
ports:
114114
- "3001:3001"
115115
environment:

src/backend/bisheng/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
try:
99
# SetujuciGo to automatic modification
10-
__version__ = '2.3.0'
10+
__version__ = '2.4.0-beta1'
1111
except metadata.PackageNotFoundError:
1212
# Case where package metadata is not available.
1313
__version__ = ''

src/backend/bisheng/api/services/audit_log.py

Lines changed: 239 additions & 78 deletions
Large diffs are not rendered by default.

src/backend/bisheng/api/services/evaluation.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
from bisheng.user.domain.models.user import UserDao
3636
from bisheng.utils import generate_uuid
3737
from bisheng.worker.workflow.redis_callback import RedisCallback
38-
from bisheng.worker.workflow.tasks import execute_workflow, continue_workflow
38+
from bisheng.worker.workflow.tasks import execute_workflow, continue_workflow, workflow_stateful_worker
3939
from bisheng.workflow.common.workflow import WorkflowStatus
4040

4141
expire = 600
@@ -145,7 +145,7 @@ def upload_file(cls, file: UploadFile):
145145

146146
file_ext = os.path.basename(file.filename).split('.')[-1]
147147
file_path = f'evaluation/dataset/{file_id}.{file_ext}'
148-
minio_client.put_object_sync(bucket_name=minio_client.bucket, object_name=file_path, file=file.file.read(),
148+
minio_client.put_object_sync(bucket_name=minio_client.bucket, object_name=file_path, file=file.file,
149149
content_type=file.content_type)
150150
return file_name, file_path
151151

@@ -247,7 +247,10 @@ def execute_workflow_get_answer(workflow_info: FlowVersion, evaluation: Evaluati
247247
workflow = RedisCallback(unique_id, workflow_id, chat_id, user_id)
248248
workflow.set_workflow_data(workflow_info.data)
249249
workflow.set_workflow_status(WorkflowStatus.WAITING.value)
250-
execute_workflow.delay(unique_id, workflow_id, chat_id, user_id)
250+
hash_key = generate_uuid()
251+
worker_node = workflow_stateful_worker.find_task_node_sync(hash_key)
252+
253+
execute_workflow.apply_async([unique_id, workflow_id, chat_id, user_id], queue=worker_node)
251254

252255
# Listen for execution results of workflows
253256
input_event = None
@@ -265,7 +268,8 @@ def execute_workflow_get_answer(workflow_info: FlowVersion, evaluation: Evaluati
265268
# Only workflows entered in dialog boxes are entered by default
266269
workflow.set_user_input({input_event.message.get('node_id'): {"user_input": question}})
267270
workflow.set_workflow_status(WorkflowStatus.INPUT_OVER.value)
268-
continue_workflow.delay(unique_id, workflow_id, chat_id, user_id)
271+
worker_node = workflow_stateful_worker.find_task_node_sync(hash_key)
272+
continue_workflow.apply_async([unique_id, workflow_id, chat_id, user_id], queue=worker_node)
269273
events = []
270274
for event in workflow.sync_get_response_until_break():
271275
events.append(event)

src/backend/bisheng/api/services/flow.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ async def update_version_info(cls, request: Request, user: UserPayload, version_
204204

205205
flow_version = await FlowVersionDao.aupdate_version(version_info)
206206

207-
if flow_version.flow_type == FlowType.FLOW.value:
207+
if flow_info.flow_type == FlowType.FLOW.value:
208208
try:
209209
# Refresh this version of the form data
210210
if not get_L2_param_from_flow(flow_version.data, flow_version.flow_id, flow_version.id):

src/backend/bisheng/api/services/md_from_excel.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,9 @@ def unmerge_and_read_sheet(sheet_obj):
9393
for merged_range in merged_cell_ranges:
9494
min_col, min_row, max_col, max_row = merged_range.bounds
9595
top_left_cell_value = sheet_obj.cell(row=min_row, column=min_col).value
96+
# ignore empty rows
97+
if min_row > len(data_grid):
98+
continue
9699
for r in range(min_row, max_row + 1):
97100
for c in range(min_col, max_col + 1):
98101
data_grid[r - 1][c - 1] = top_left_cell_value
@@ -184,10 +187,12 @@ def process_dataframe_to_markdown_files(
184187
if append_header:
185188
# Handle header index outliers based on user rules
186189
if start_header_idx >= rows:
187-
logger.warning(f" Table Header Start Row {start_header_idx} Total lines exceeded {rows}. The first row will be used as the table header.")
190+
logger.warning(
191+
f"Table Header Start Row {start_header_idx} Total lines exceeded {rows}. The first row will be used as the table header.")
188192
start_header_idx, end_header_idx = 0, 0
189193
elif end_header_idx >= rows:
190-
logger.warning(f" Table Header End Row {end_header_idx} Total lines exceeded {rows}. will be truncated to the last line.")
194+
logger.warning(
195+
f"Table Header End Row {end_header_idx} Total lines exceeded {rows}. will be truncated to the last line.")
191196
end_header_idx = rows - 1
192197

193198
# Make sure the index is legitimate

src/backend/bisheng/api/services/role_group_service.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -374,22 +374,22 @@ async def get_group_dashboards(self, group_id: int, keyword: str, page_size: int
374374
return paged_res, len(res)
375375
return res, len(res)
376376

377-
def get_manage_resources(self, login_user: UserPayload, keyword: str, page: int, page_size: int) -> (list, int):
377+
async def get_manage_resources(self, login_user: UserPayload, keyword: str, page: int, page_size: int) -> (list, int):
378378
""" Get a list of apps under a user group managed by a user Contains skills, assistants, workflows"""
379379
groups = []
380380
if not login_user.is_admin():
381-
groups = [str(one.group_id) for one in UserGroupDao.get_user_admin_group(login_user.user_id)]
381+
groups = [str(one.group_id) for one in await UserGroupDao.aget_user_admin_group(login_user.user_id)]
382382
if not groups:
383383
return [], 0
384384

385385
resource_ids = []
386386
# Description is a user group administrator, need to filter to get the resources under the corresponding group
387387
if groups:
388-
group_resources = GroupResourceDao.get_groups_resource(groups, resource_types=[ResourceTypeEnum.FLOW,
388+
group_resources = await GroupResourceDao.get_groups_resource(groups, resource_types=[ResourceTypeEnum.FLOW,
389389
ResourceTypeEnum.ASSISTANT,
390390
ResourceTypeEnum.WORK_FLOW])
391391
if not group_resources:
392392
return [], 0
393393
resource_ids = [one.third_id for one in group_resources]
394394

395-
return FlowDao.get_all_apps(keyword, id_list=resource_ids, page=page, limit=page_size)
395+
return await FlowDao.aget_all_apps(keyword, id_list=resource_ids, page=page, limit=page_size)

src/backend/bisheng/api/v1/audit.py

Lines changed: 53 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -11,20 +11,20 @@
1111

1212

1313
@router.get('')
14-
def get_audit_logs(*,
15-
group_ids: Optional[List[str]] = Query(default=[], description='GroupingidVertical'),
16-
operator_ids: Optional[List[int]] = Query(default=[], description='WhoidVertical'),
17-
start_time: Optional[datetime] = Query(default=None, description='Start when'),
18-
end_time: Optional[datetime] = Query(default=None, description='End time'),
19-
system_id: Optional[str] = Query(default=None, description='Module Item'),
20-
event_type: Optional[str] = Query(default=None, description='Operation behaviors'),
21-
page: Optional[int] = Query(default=0, description='Page'),
22-
limit: Optional[int] = Query(default=0, description='Listings Per Page'),
23-
login_user: UserPayload = Depends(UserPayload.get_login_user)):
14+
async def get_audit_logs(*,
15+
group_ids: Optional[List[str]] = Query(default=[], description='GroupingidVertical'),
16+
operator_ids: Optional[List[int]] = Query(default=[], description='WhoidVertical'),
17+
start_time: Optional[datetime] = Query(default=None, description='Start when'),
18+
end_time: Optional[datetime] = Query(default=None, description='End time'),
19+
system_id: Optional[str] = Query(default=None, description='Module Item'),
20+
event_type: Optional[str] = Query(default=None, description='Operation behaviors'),
21+
page: Optional[int] = Query(default=0, description='Page'),
22+
limit: Optional[int] = Query(default=0, description='Listings Per Page'),
23+
login_user: UserPayload = Depends(UserPayload.get_login_user)):
2424
group_ids = [one for one in group_ids if one]
2525
operator_ids = [one for one in operator_ids if one]
26-
return AuditLogService.get_audit_log(login_user, group_ids, operator_ids,
27-
start_time, end_time, system_id, event_type, page, limit)
26+
return await AuditLogService.get_audit_log(login_user, group_ids, operator_ids,
27+
start_time, end_time, system_id, event_type, page, limit)
2828

2929

3030
@router.get('/operators')
@@ -36,56 +36,63 @@ def get_all_operators(*, login_user: UserPayload = Depends(UserPayload.get_login
3636

3737

3838
@router.get('/session')
39-
def get_session_list(login_user: UserPayload = Depends(UserPayload.get_login_user),
40-
flow_ids: Optional[List[str]] = Query(default=[], description='ApplicationsidVertical'),
41-
user_ids: Optional[List[int]] = Query(default=[], description='UsersidVertical'),
42-
group_ids: Optional[List[int]] = Query(default=[], description='User GroupsidVertical'),
43-
start_date: Optional[datetime] = Query(default=None, description='Start when'),
44-
end_date: Optional[datetime] = Query(default=None, description='End time'),
45-
feedback: Optional[str] = Query(default=None, description='like LikedislikeUnlikecopiedCopy:'),
46-
sensitive_status: Optional[int] = Query(default=None, description='Sensitive word review status'),
47-
page: Optional[int] = Query(default=1, description='Page'),
48-
page_size: Optional[int] = Query(default=10, description='Listings Per Page')):
39+
async def get_session_list(login_user: UserPayload = Depends(UserPayload.get_login_user),
40+
flow_ids: Optional[List[str]] = Query(default=[], description='ApplicationsidVertical'),
41+
user_ids: Optional[List[int]] = Query(default=[], description='UsersidVertical'),
42+
group_ids: Optional[List[int]] = Query(default=[], description='User GroupsidVertical'),
43+
start_date: Optional[datetime] = Query(default=None, description='Start when'),
44+
end_date: Optional[datetime] = Query(default=None, description='End time'),
45+
feedback: Optional[str] = Query(default=None,
46+
description='like LikedislikeUnlikecopiedCopy:'),
47+
sensitive_status: Optional[int] = Query(default=None,
48+
description='Sensitive word review status'),
49+
page: Optional[int] = Query(default=1, description='Page'),
50+
page_size: Optional[int] = Query(default=10, description='Listings Per Page')):
4951
""" Filter all session lists """
50-
data, total = AuditLogService.get_session_list(login_user, flow_ids, user_ids, group_ids, start_date, end_date,
51-
feedback, sensitive_status, page, page_size)
52+
data, total = await AuditLogService.get_session_list(login_user, flow_ids, user_ids, group_ids, start_date,
53+
end_date,
54+
feedback, sensitive_status, page, page_size)
5255
return resp_200(data={
5356
'data': data,
5457
'total': total
5558
})
5659

5760

5861
@router.get('/session/export')
59-
def export_session_messages(login_user: UserPayload = Depends(UserPayload.get_login_user),
60-
flow_ids: Optional[List[str]] = Query(default=[], description='ApplicationsidVertical'),
61-
user_ids: Optional[List[int]] = Query(default=[], description='UsersidVertical'),
62-
group_ids: Optional[List[int]] = Query(default=[], description='User GroupsidVertical'),
63-
start_date: Optional[datetime] = Query(default=None, description='Start when'),
64-
end_date: Optional[datetime] = Query(default=None, description='End time'),
65-
feedback: Optional[str] = Query(default=None,
66-
description='like LikedislikeUnlikecopiedCopy:'),
67-
sensitive_status: Optional[int] = Query(default=None, description='Sensitive word review status')):
62+
async def export_session_messages(login_user: UserPayload = Depends(UserPayload.get_login_user),
63+
flow_ids: Optional[List[str]] = Query(default=[],
64+
description='ApplicationsidVertical'),
65+
user_ids: Optional[List[int]] = Query(default=[], description='UsersidVertical'),
66+
group_ids: Optional[List[int]] = Query(default=[],
67+
description='User GroupsidVertical'),
68+
start_date: Optional[datetime] = Query(default=None, description='Start when'),
69+
end_date: Optional[datetime] = Query(default=None, description='End time'),
70+
feedback: Optional[str] = Query(default=None,
71+
description='like LikedislikeUnlikecopiedCopy:'),
72+
sensitive_status: Optional[int] = Query(default=None,
73+
description='Sensitive word review status')):
6874
""" Exporting a list of session detailscsvDoc. """
69-
url = AuditLogService.export_session_messages(login_user, flow_ids, user_ids, group_ids, start_date, end_date,
70-
feedback, sensitive_status)
75+
url = await AuditLogService.export_session_messages(login_user, flow_ids, user_ids, group_ids, start_date, end_date,
76+
feedback, sensitive_status)
7177
return resp_200(data={
7278
'url': url
7379
})
7480

7581

7682
@router.get('/session/export/data')
77-
def get_session_messages(login_user: UserPayload = Depends(UserPayload.get_login_user),
78-
flow_ids: Optional[List[str]] = Query(default=[], description='ApplicationsidVertical'),
79-
user_ids: Optional[List[int]] = Query(default=[], description='UsersidVertical'),
80-
group_ids: Optional[List[int]] = Query(default=[], description='User GroupsidVertical'),
81-
start_date: Optional[datetime] = Query(default=None, description='Start when'),
82-
end_date: Optional[datetime] = Query(default=None, description='End time'),
83-
feedback: Optional[str] = Query(default=None,
84-
description='like LikedislikeUnlikecopiedCopy:'),
85-
sensitive_status: Optional[int] = Query(default=None, description='Sensitive word review status')):
83+
async def get_session_messages(login_user: UserPayload = Depends(UserPayload.get_login_user),
84+
flow_ids: Optional[List[str]] = Query(default=[], description='ApplicationsidVertical'),
85+
user_ids: Optional[List[int]] = Query(default=[], description='UsersidVertical'),
86+
group_ids: Optional[List[int]] = Query(default=[], description='User GroupsidVertical'),
87+
start_date: Optional[datetime] = Query(default=None, description='Start when'),
88+
end_date: Optional[datetime] = Query(default=None, description='End time'),
89+
feedback: Optional[str] = Query(default=None,
90+
description='like LikedislikeUnlikecopiedCopy:'),
91+
sensitive_status: Optional[int] = Query(default=None,
92+
description='Sensitive word review status')):
8693
""" Export data for a list of session details """
87-
result = AuditLogService.get_session_messages(login_user, flow_ids, user_ids, group_ids, start_date, end_date,
88-
feedback, sensitive_status)
94+
result = await AuditLogService.get_session_messages(login_user, flow_ids, user_ids, group_ids, start_date, end_date,
95+
feedback, sensitive_status)
8996
return resp_200(data={
9097
'data': result
9198
})

0 commit comments

Comments
 (0)