迁移工作台
Signed-off-by: yinzijian <yinzijian@haomozhixing.onaliyun.com>
This commit is contained in:
parent
c54f10cc71
commit
298d340018
|
|
@ -85,6 +85,7 @@ def update_cross_examine_record_state_api():
|
|||
|
||||
from app.user_views import *
|
||||
from app.views_task import *
|
||||
from app.views_workstation import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
|
|
@ -6,6 +6,7 @@ from app.models import *
|
|||
from app.models_wave import RoadLinkManager
|
||||
from app.phase_db_func import PhaseTableDbHelper
|
||||
from app.task_db_func import TaskDbHelper
|
||||
from app.workstation_db_function import WorkstationDbHelper
|
||||
from tool.mysql_common_connector_pool import *
|
||||
from app.user_db_func import *
|
||||
from app.tmnet_db_func import *
|
||||
|
|
@ -90,6 +91,7 @@ db_cross = CrossDbHelper(g_cross_delay_pool)
|
|||
db_task = TaskDbHelper(g_db_pool)
|
||||
db_phasetable = PhaseTableDbHelper(g_db_pool)
|
||||
db_tmnet = TmnetDbHelper(g_roadnet_pool)
|
||||
db_workstation = WorkstationDbHelper(g_db_pool)
|
||||
|
||||
|
||||
nodeid_list = []
|
||||
|
|
|
|||
|
|
@ -240,3 +240,62 @@ class TmnetDbHelper(TableDbHelperBase):
|
|||
except Exception as error:
|
||||
self.close(conn, cursor)
|
||||
return None, error
|
||||
|
||||
def query_cross_info_all(self, crossids: []):
|
||||
conn, cursor = self.connect()
|
||||
try:
|
||||
sql = f'''select IFNULL(clui.name, c2.name) as name,
|
||||
c2.crossid,
|
||||
IFNULL(clui.location, c2.location) as location,
|
||||
c2.nodeid,
|
||||
c2.area_id
|
||||
from `cross` as c2
|
||||
left join `cross_ledger_update_info` as clui on clui.crossid = c2.crossid
|
||||
where c2.crossid in %s
|
||||
union all
|
||||
select udc.name,
|
||||
udc.crossid,
|
||||
udc.location,
|
||||
udc.nodeid,
|
||||
udc.area_id
|
||||
from `user_defined_cross` as udc
|
||||
where udc.crossid in %s'''
|
||||
print(cursor.mogrify(sql, (crossids, crossids)))
|
||||
cursor.execute(sql, (crossids, crossids))
|
||||
# 获取所有查询结果
|
||||
result = cursor.fetchall()
|
||||
self.close(conn, cursor)
|
||||
return result, None
|
||||
except Exception as e:
|
||||
self.close(conn, cursor)
|
||||
return None, e
|
||||
|
||||
def query_road_info_all(self, roadids: []):
|
||||
conn, cursor = self.connect()
|
||||
try:
|
||||
sql = f'''select
|
||||
IFNULL(rlui.name, r.name) as name,
|
||||
IFNULL(rlui.from_crossid, r.from_crossid) as from_crossid,
|
||||
IFNULL(rlui.to_crossid, r.to_crossid) as to_crossid,
|
||||
IFNULL(rlui.src_direct, r.src_direct) as src_direct,
|
||||
r.nodeid
|
||||
from `road` as r
|
||||
left join `road_ledger_update_info` as rlui on rlui.roadid = r.roadid
|
||||
where r.roadid in %s
|
||||
union all
|
||||
select name
|
||||
from_crossid,
|
||||
to_crossid,
|
||||
src_direct,
|
||||
nodeid
|
||||
from `user_defined_roads`
|
||||
where roadid in %s'''
|
||||
print(cursor.mogrify(sql, (roadids, roadids)))
|
||||
cursor.execute(sql, (roadids, roadids))
|
||||
# 获取所有查询结果
|
||||
result = cursor.fetchall()
|
||||
self.close(conn, cursor)
|
||||
return result, None
|
||||
except Exception as e:
|
||||
self.close(conn, cursor)
|
||||
return None, e
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
from app.cross_eva_views import app
|
||||
from app.workstation_worker import *
|
||||
|
||||
@app.route('/api/favorite')
|
||||
def add_favorite():
|
||||
token = request.headers.get('token')
|
||||
if not token:
|
||||
token = None
|
||||
return favorite(dict(request.args), token)
|
||||
|
||||
@app.route('/api/favorite_list')
|
||||
def favorite_list():
|
||||
token = request.headers.get('token')
|
||||
if not token:
|
||||
token = None
|
||||
return get_favorite_list(dict(request.args), token)
|
||||
|
||||
@app.route('/api/favorite_data_list')
|
||||
def favorite_data_list():
|
||||
token = request.headers.get('token')
|
||||
if not token:
|
||||
token = None
|
||||
return get_favorite_data_list(dict(request.args), token)
|
||||
|
||||
@app.route('/api/workstation_task_list')
|
||||
def workstation_task_list():
|
||||
token = request.headers.get('token')
|
||||
if not token:
|
||||
token = None
|
||||
return get_workstation_task_list(dict(request.args), token)
|
||||
|
||||
@app.route('/api/del_favorite_list', methods=['POST'])
|
||||
def del_favorite_list():
|
||||
token = request.headers.get('token')
|
||||
if not token:
|
||||
token = None
|
||||
return delete_favorite_list(request.get_json(), token)
|
||||
|
||||
# @app.route('/api/get_optimize_crosses_list')
|
||||
# def crosses_list():
|
||||
# return get_crosses_list(dict(request.args))
|
||||
#
|
||||
# @app.route('/api/cross_phase_symptom_info')
|
||||
# def cross_phase_symptom_info():
|
||||
# return get_cross_phase_symptom_info(dict(request.args))
|
||||
#
|
||||
# @app.route('/api/cross_optimize_plan_detail')
|
||||
# def cross_optimize_plan_detail():
|
||||
# return get_cross_optimize_plan_detail(dict(request.args))
|
||||
#
|
||||
# @app.route('/api/cross_init_info', methods=['POST'])
|
||||
# def cross_init_info():
|
||||
# return get_cross_init_info(request.get_json())
|
||||
#
|
||||
# @app.route('/api/cross_optimize', methods=['POST'])
|
||||
# def cross_optimize():
|
||||
# return gen_cross_optimize(request.get_json())
|
||||
#
|
||||
# @app.route('/api/save_edit_plan', methods=['POST'])
|
||||
# def save_edit_plan_detail():
|
||||
# return save_edit_plan(request.get_json())
|
||||
#
|
||||
# @app.route('/api/issued_edit_plan', methods=['POST'])
|
||||
# def issued_edit_plan_detail():
|
||||
# return issued_edit_plan(request.get_json())
|
||||
#
|
||||
# @app.route('/api/update_optimize_plan_status')
|
||||
# def update_optimize_plan_status_detail():
|
||||
# return update_optimize_plan_status(dict(request.args))
|
||||
#
|
||||
# @app.route('/api/download_optimize_plan', methods=['POST'])
|
||||
# def down_optimize_plan():
|
||||
# return download_optimize_plan(request.get_json())
|
||||
|
||||
@app.route('/api/get_road_net_detail')
|
||||
def get_road_net_detail_detail():
|
||||
return get_road_net_detail(dict(request.args))
|
||||
|
|
@ -0,0 +1,174 @@
|
|||
import json
|
||||
from collections import Counter
|
||||
|
||||
def gen_work_station_cross_data_list(cross_data_list,cross_info):
|
||||
res_list = []
|
||||
for cross_data in cross_data_list:
|
||||
cross_id = cross_data['crossid']
|
||||
jam_index = float(cross_data['jam_index']) if 'jam_index' in cross_data and cross_data['jam_index'] else 0.0
|
||||
unbalance_index = float(cross_data['unbalance_index']) if 'unbalance_index' in cross_data and cross_data['unbalance_index'] else 0.0
|
||||
flow = int(cross_data['flow']) if 'flow' in cross_data and cross_data['flow'] else 0
|
||||
queue_len = float(cross_data['queue_len']) if 'queue_len' in cross_data and cross_data['queue_len'] else -1
|
||||
if queue_len == -1:
|
||||
queue_len = '-'
|
||||
stop_times = float(cross_data['stop_times']) if 'stop_times' in cross_data and cross_data['stop_times'] else 0.0
|
||||
delay_time = float(cross_data['delay_time']) if 'delay_time' in cross_data and cross_data['delay_time'] else 0.0
|
||||
res_list.append({
|
||||
'id': cross_id,
|
||||
'name': cross_info[cross_id]['name'] if cross_info.get(cross_id) else '',
|
||||
'jam_index': jam_index,
|
||||
'unbalance_index': unbalance_index,
|
||||
'flow': flow,
|
||||
'queue_len': queue_len,
|
||||
'stop_times': stop_times,
|
||||
'delay_time': delay_time
|
||||
})
|
||||
return res_list
|
||||
|
||||
def gen_work_station_artery_data_list(artery_data_list):
|
||||
res_list = []
|
||||
for artery_data in artery_data_list:
|
||||
artery_id = artery_data['arteryid']
|
||||
arteryname = (workstation_db_pool.query_artery_name(artery_id))[0]['name']
|
||||
jam_index = float(artery_data['jam_index']) if 'jam_index' in artery_data and artery_data['jam_index'] else 0.0
|
||||
speed = float(artery_data['speed']) if 'speed' in artery_data and artery_data['speed'] else 0.0
|
||||
stop_times = float(artery_data['stop_times']) if 'stop_times' in artery_data and artery_data['stop_times'] else 0.0
|
||||
un_stop_pass = float(artery_data['un_stop_pass']) if 'un_stop_pass' in artery_data and artery_data['un_stop_pass'] else 0.0
|
||||
travel_time = float(artery_data['travel_time']) if 'travel_time' in artery_data and artery_data['travel_time'] else 0.0
|
||||
delay_time = float(artery_data['delay_time']) if 'delay_time' in artery_data and artery_data['delay_time'] else 0.0
|
||||
res_list.append({
|
||||
'id': artery_id,
|
||||
'name': arteryname,
|
||||
'jam_index': jam_index,
|
||||
'speed': speed,
|
||||
'stop_times': stop_times,
|
||||
'un_stop_pass': un_stop_pass,
|
||||
'travel_time': travel_time,
|
||||
'delay_time': delay_time
|
||||
})
|
||||
return res_list
|
||||
|
||||
def cal_task_type_num(data_list):
|
||||
items_num = len(data_list)
|
||||
num1, num2, num3, num4, num5 = 0, 0, 0, 0, 0
|
||||
for item in data_list:
|
||||
task_type = item['task_type']
|
||||
if task_type == 1:
|
||||
num1 += 1
|
||||
elif task_type == 2:
|
||||
num2 += 1
|
||||
elif task_type == 3:
|
||||
num3 += 1
|
||||
elif task_type == 4:
|
||||
num4 += 1
|
||||
elif task_type == 5:
|
||||
num5 += 1
|
||||
res = {
|
||||
'list_len': items_num,
|
||||
'detail': [
|
||||
{
|
||||
'label': '方案优化',
|
||||
'value': num1
|
||||
},
|
||||
{
|
||||
'label': '交办任务',
|
||||
'value': num2
|
||||
},
|
||||
{
|
||||
'label': '交通舆情',
|
||||
'value': num3
|
||||
},
|
||||
{
|
||||
'label': '档案优化',
|
||||
'value': num4
|
||||
},
|
||||
{
|
||||
'label': '效果巡检',
|
||||
'value': num5
|
||||
}
|
||||
],
|
||||
'task_list': data_list
|
||||
}
|
||||
return res
|
||||
|
||||
|
||||
def gen_task_statistics_res(last_month_should_done, last_month_done, last_month_should_done_but_not, need_todo,
|
||||
last_week_should_done, last_week_done, last_week_should_done_but_not, week_need_todo,
|
||||
yesterday_res, last_day_of_last_month_obj, formatted_last_week, yesterday_date):
|
||||
month_detail = gen_task_done_detail(last_month_done)
|
||||
week_detail = gen_task_done_detail(last_week_done)
|
||||
last_month = last_day_of_last_month_obj.strftime('%Y年%m月')
|
||||
yesterday_data = []
|
||||
for row in yesterday_res:
|
||||
task_name = row['task_name']
|
||||
op_info = json.loads(row['content'])
|
||||
progress_info, content = '', ''
|
||||
if op_info['operation'] == '编辑任务':
|
||||
if 'progress_info' in op_info['content'].keys():
|
||||
progress_info = op_info['content']['progress_info']
|
||||
if 'content' in op_info['content'].keys():
|
||||
content = op_info['content']['content']
|
||||
yesterday_data.append({
|
||||
'task_name': task_name,
|
||||
'progress_info': progress_info,
|
||||
'content': content
|
||||
})
|
||||
res = {
|
||||
'month': {
|
||||
'date': '上月-' + last_month,
|
||||
'data_list': [
|
||||
{
|
||||
'label': '已完成',
|
||||
'value': len(last_month_done),
|
||||
'detail_list': month_detail
|
||||
},
|
||||
{
|
||||
'label': '应完成逾期',
|
||||
'value': last_month_should_done_but_not
|
||||
},
|
||||
{
|
||||
'label': '后续待完成',
|
||||
'value': need_todo
|
||||
},
|
||||
{
|
||||
'label': '应完成',
|
||||
'value': last_month_should_done
|
||||
}
|
||||
]
|
||||
},
|
||||
'week': {
|
||||
'date': '上周-' + formatted_last_week,
|
||||
'data_list': [
|
||||
{
|
||||
'label': '已完成',
|
||||
'value': len(last_week_done),
|
||||
'detail_list': week_detail
|
||||
},
|
||||
{
|
||||
'label': '应完成逾期',
|
||||
'value': last_week_should_done_but_not
|
||||
},
|
||||
{
|
||||
'label': '后续待完成',
|
||||
'value': week_need_todo
|
||||
},
|
||||
{
|
||||
'label': '应完成',
|
||||
'value': last_week_should_done
|
||||
}
|
||||
]
|
||||
},
|
||||
'yesterday':{
|
||||
'date': '昨天-' + yesterday_date,
|
||||
'data_list': yesterday_data
|
||||
}
|
||||
}
|
||||
return res
|
||||
|
||||
def gen_task_done_detail(data_list):
|
||||
counter = Counter(data_list)
|
||||
task_type_dict = {1: '方案优化', 2: '交办任务', 3: '交通舆情', 4: '档案优化', 5: '效果巡检'}
|
||||
res = []
|
||||
for element, count in counter.items():
|
||||
res.append({'type': task_type_dict.get(element), 'count': count})
|
||||
return res
|
||||
|
|
@ -0,0 +1,196 @@
|
|||
import hashlib
|
||||
from PIL import Image
|
||||
import base64
|
||||
import io
|
||||
from app.db_func_base import *
|
||||
from flask import g, has_app_context
|
||||
|
||||
|
||||
# md5加密方法
|
||||
def md5_hash(text):
|
||||
encode_text = text.encode('utf-8')
|
||||
md5 = hashlib.md5()
|
||||
md5.update(encode_text)
|
||||
return md5.hexdigest()
|
||||
|
||||
|
||||
def image_to_base64(image_path):
|
||||
with Image.open(image_path) as img:
|
||||
# 将图片转换为字节流
|
||||
img_byte_arr = io.BytesIO()
|
||||
img.save(img_byte_arr, format='JPEG')
|
||||
img_byte_arr = img_byte_arr.getvalue()
|
||||
# 将字节流编码为Base64字符串
|
||||
base64_string = base64.b64encode(img_byte_arr).decode('utf-8')
|
||||
return base64_string
|
||||
|
||||
|
||||
class WorkstationDbHelper(TableDbHelperBase):
|
||||
def __init__(self, pool):
|
||||
self.db_pool = pool
|
||||
self.DB_Name = 'workstation'
|
||||
|
||||
def get_traffic_db_name(self, nodeid=None):
|
||||
db_traffic_name = 'traffic'
|
||||
if nodeid and str(nodeid) != '9660':
|
||||
db_traffic_name = f'traffic_{nodeid}'
|
||||
if has_app_context() and hasattr(g, "nodeid") and str(g.nodeid) != '9660':
|
||||
db_traffic_name = f'traffic_{g.nodeid}'
|
||||
return db_traffic_name
|
||||
|
||||
# 创建用户
|
||||
def create_user(self, user_name, password, user_role=2, image=None):
|
||||
pwd_md5 = md5_hash(password)
|
||||
image_base64 = ''
|
||||
if image:
|
||||
image_base64 = image_to_base64(image)
|
||||
sql = (
|
||||
f"""
|
||||
insert into {self.DB_Name}.users(user_name, pwd, user_role, image) value ({user_name}, {pwd_md5}, {user_role}, {image_base64});
|
||||
"""
|
||||
)
|
||||
return self.do_execute(sql)
|
||||
|
||||
def create_gen_corss_report_task(self, nodeid, crossid, cross_name, report_type, items, tasks, create_user_id,
|
||||
create_user_name):
|
||||
sql = (
|
||||
f"""
|
||||
insert into {self.DB_Name}.gen_report_task(nodeid, report_type, crossid, cross_name, items, tasks, create_user_id, create_user_name)
|
||||
value({nodeid}, {report_type}, '{crossid}', '{cross_name}', '{items}', '{tasks}', '{create_user_id}', '{create_user_name}');
|
||||
"""
|
||||
)
|
||||
return self.do_execute(sql)
|
||||
|
||||
def query_report_task_list(self, nodeid, create_user_id=None, create_user_name=None):
|
||||
sql = (f"""
|
||||
select * from {self.DB_Name}.gen_report_task where nodeid = {nodeid};
|
||||
""")
|
||||
if create_user_id and create_user_name:
|
||||
sql = (
|
||||
f"""
|
||||
select * from {self.DB_Name}.gen_report_task where nodeid = {nodeid} and create_user_id = '{create_user_id}' and create_user_name = '{create_user_name}';
|
||||
"""
|
||||
)
|
||||
return self.do_select(sql)
|
||||
|
||||
def update_report_task_status(self, nodeid, create_user_id, create_user_name, crossid, cross_name, report_type,
|
||||
items, tasks, status, download_url=None, failed_reason=None, file_name=None):
|
||||
sql = (
|
||||
f"""update {self.DB_Name}.gen_report_task set status = {status},failed_reason = '{failed_reason}',download_url = '{download_url}',file_name = '{file_name}'
|
||||
where nodeid = {nodeid} and report_type = {report_type} and crossid = '{crossid}' and cross_name = '{cross_name}'
|
||||
and items = '{items}' and tasks = '{tasks}' and create_user_id = '{create_user_id}'
|
||||
and create_user_name = '{create_user_name}';"""
|
||||
)
|
||||
return self.do_execute(sql)
|
||||
|
||||
def create_favorite_cross_artery(self, nodeid, create_user_id, create_user_name, favorite_type, favorite_id,
|
||||
favorite_name, area_id: int):
|
||||
sql = (
|
||||
f"""insert into {self.DB_Name}.user_favorite(nodeid, user_id, user_name,favorite_type, favorite_id, favorite_name,area_id)
|
||||
value ({nodeid},'{create_user_id}', '{create_user_name}', {favorite_type}, '{favorite_id}', '{favorite_name}',{area_id});"""
|
||||
)
|
||||
return self.do_execute(sql)
|
||||
|
||||
def query_favorite_info_list(self, nodeid, create_user_id, create_user_name, area_id: int):
|
||||
sql = (
|
||||
f"""
|
||||
select * from {self.DB_Name}.user_favorite where nodeid = {nodeid} and user_id = '{create_user_id}' and user_name = '{create_user_name}' and area_id = {area_id};
|
||||
"""
|
||||
)
|
||||
return self.do_select(sql)
|
||||
|
||||
def query_cross_rt_info(self):
|
||||
sql = (
|
||||
f"""
|
||||
select * from {self.get_traffic_db_name()}.cross_delay_rt where timestamp = (select value from {self.get_traffic_db_name()}.delay_update_info where param = 'latest_tp');
|
||||
"""
|
||||
)
|
||||
return self.do_select(sql)
|
||||
|
||||
# def query_artery_rt_info(self):
|
||||
# sql = (
|
||||
# f"""
|
||||
# select * from {self.get_traffic_db_name()}.artery_delay_rt where timestamp = (select value from {self.get_traffic_db_name()}.delay_update_info where param = 'latest_tp') and duration = 1
|
||||
# """
|
||||
# )
|
||||
# return self.do_select(sql)
|
||||
|
||||
def query_artery_name(self, arteryid):
|
||||
sql = (
|
||||
f"""select `name` from tmnet.artery where arteryid = '{arteryid}'"""
|
||||
)
|
||||
return self.do_select(sql)
|
||||
|
||||
def delete_favorite_ids(self, nodeid, ids, user_id, user_name, area_id):
|
||||
sql = (
|
||||
f"""delete from workstation.user_favorite where user_id = '{user_id}' and user_name = '{user_name}' and nodeid = '{nodeid}' and area_id = {area_id} and favorite_id in ({ids})"""
|
||||
)
|
||||
ret = self.do_execute(sql)
|
||||
return ret
|
||||
|
||||
def query_task_list(self, nodeid, user_id, area_id):
|
||||
sql = (
|
||||
f"""
|
||||
select * from task.task where executor = '{user_id}' and nodeid = {nodeid} and record_state = 0 and area_id = {area_id};
|
||||
"""
|
||||
)
|
||||
return self.do_select(sql)
|
||||
|
||||
def query_task_yesterday_operator(self, yesterday_date, nodeid):
|
||||
sql = (
|
||||
f"""
|
||||
select task_name, content
|
||||
from (select taskno, nodeid, history_date, content, operator
|
||||
from task.task_history
|
||||
where date(history_date) = '{yesterday_date}'
|
||||
and nodeid = {nodeid}) t1
|
||||
left join
|
||||
(select taskno, task_name from task.task) t2
|
||||
on t1.taskno = t2.taskno
|
||||
"""
|
||||
)
|
||||
return self.do_select(sql)
|
||||
|
||||
def check_favorite_info_exists(self, nodeid, user_id, user_name, f_id, f_name, area_id: int):
|
||||
sql = (
|
||||
f"""
|
||||
select * from workstation.user_favorite where user_id = '{user_id}' and user_name = '{user_name}' and nodeid = '{nodeid}' and favorite_id = '{f_id}' and favorite_name = '{f_name}' and area_id = {area_id};
|
||||
"""
|
||||
)
|
||||
return self.do_select(sql)
|
||||
|
||||
def query_yesterday_task_data(self, yesterday_date, nodeid, user_name, area_id):
|
||||
sql_query = (f"""
|
||||
select
|
||||
t2.task_name,
|
||||
content
|
||||
from
|
||||
(select * from task.task_history where date(history_date)='{yesterday_date}' and nodeid='{nodeid}' and content like '%%operation%%任务进度%%content%%' and operator = '{user_name}' and area_id = {area_id} order by history_date) t1
|
||||
left join
|
||||
(select task_name,taskno from task.task where record_state = 0 and nodeid = '{nodeid}' and executor = '{user_name}') t2
|
||||
on t1.taskno = t2.taskno;
|
||||
""")
|
||||
|
||||
return self.do_select(sql_query)
|
||||
|
||||
|
||||
def query_cross_delay_info(self, nodeid, start, cross_list):
|
||||
conn, cursor = self.connect()
|
||||
try:
|
||||
sql = f'''select cd1.*
|
||||
from {self.get_traffic_db_name(nodeid)}.cross_delay as cd1
|
||||
join (select crossid, max(day) max_day
|
||||
from traffic_350100.cross_delay
|
||||
where tp_start = %s
|
||||
and crossid in %s
|
||||
group by crossid) as t1 on cd1.crossid = t1.crossid and cd1.day = t1.max_day
|
||||
where cd1.crossid in %s
|
||||
and cd1.tp_start = %s'''
|
||||
print(cursor.mogrify(sql, (start, cross_list, cross_list, start)))
|
||||
cursor.execute(sql, (start, cross_list, cross_list, start))
|
||||
result = cursor.fetchall()
|
||||
self.close(conn, cursor)
|
||||
return result, None
|
||||
except Exception as error:
|
||||
self.close(conn, cursor)
|
||||
return None, error
|
||||
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue