新增导出指定日期合并后的路口延误数据列表
This commit is contained in:
parent
f0668a309e
commit
021dbd83bc
|
|
@ -91,6 +91,11 @@ def explode_cross_problem_detail_api():
|
|||
return explode_cross_problem_detail(dict(request.args))
|
||||
|
||||
|
||||
@app.route('/api/explode_cross_info_merge', methods=['GET'])
|
||||
def explode_cross_info_merge_api():
|
||||
return explode_cross_problem_detail_merge(dict(request.args))
|
||||
|
||||
|
||||
from app.user_views import *
|
||||
from app.views_task import *
|
||||
from app.views_workstation import *
|
||||
|
|
|
|||
|
|
@ -498,3 +498,87 @@ def explode_cross_problem_detail(params):
|
|||
return json.dumps(clean_dict_nan(res, '-'), ensure_ascii=False)
|
||||
|
||||
|
||||
def explode_cross_problem_detail_merge(params):
|
||||
nodeid = check_param(params, 'nodeid')
|
||||
if not nodeid:
|
||||
return json.dumps(make_common_res(2, '缺少城市信息, 请刷新后重试'))
|
||||
area_id = check_param(params, 'area_id')
|
||||
if not area_id:
|
||||
return json.dumps(make_common_res(3, '缺少辖区信息, 请刷新后重试'))
|
||||
time_range = check_param(params, 'time_range')
|
||||
if not time_range:
|
||||
return json.dumps(make_common_res(4, '缺少时间范围信息, 请刷新后重试'))
|
||||
start_date = check_param(params, 'start_date')
|
||||
if not start_date:
|
||||
return json.dumps(make_common_res(5, '缺少开始时间, 请选择开始时间'))
|
||||
end_date = check_param(params, 'end_date')
|
||||
if not end_date:
|
||||
return json.dumps(make_common_res(6, '缺少结束时间, 请选择结束时间'))
|
||||
excel = check_param(params, 'excel')
|
||||
if not excel:
|
||||
excel = 0
|
||||
excel = int(excel)
|
||||
|
||||
routing_crosses = db_tmnet.query_routing_crosses(nodeid, area_id)
|
||||
routing_crosses_dict = {item['crossid']: item for item in routing_crosses}
|
||||
cross_roads_dir_dict = gen_crossids_roads_dir_dict_by_mysql([item['crossid'] for item in routing_crosses], nodeid)
|
||||
for crossid in routing_crosses_dict.keys():
|
||||
routing_crosses_dict[crossid]['roads_dir_dict'] = cross_roads_dir_dict[crossid]
|
||||
|
||||
tp_start = 't' + str(int(str(time_range.split('-')[0]).split(':')[0]) * 100 + int(str(time_range.split('-')[0]).split(':')[1]))
|
||||
tp_end = int(str(time_range.split('-')[1]).split(':')[0]) * 100 + int(str(time_range.split('-')[1]).split(':')[1])
|
||||
|
||||
date_list = generate_date_range(start_date, end_date)
|
||||
all_cross_data = []
|
||||
for crossid in routing_crosses_dict.keys():
|
||||
days_data = db_cross.query_cross_delay_info(crossid, nodeid, date_list, tp_start)
|
||||
avg_delay = gen_avg_cross_delay_pb(days_data, '1,2,3,4,5,6,7')
|
||||
cross_info = {
|
||||
'cross_id': crossid,
|
||||
'cross_name': routing_crosses_dict[crossid]['name'],
|
||||
'weekday': '-',
|
||||
'time_range': time_range,
|
||||
'day': str(start_date) + '-' + str(end_date),
|
||||
'service_level': calc_service_level(avg_delay.delay_info.delay_time, avg_delay.delay_info.stop_times) if avg_delay and avg_delay.delay_info.car_num >= 10 else '-',
|
||||
'stop_times': round(avg_delay.delay_info.stop_times, 2) if avg_delay and avg_delay.delay_info.car_num >= 10 else '-',
|
||||
'high_park_percent': avg_delay.delay_info.high_park_percent if avg_delay and avg_delay.delay_info.car_num >= 10 else '-',
|
||||
'park_time': avg_delay.delay_info.park_time if avg_delay and avg_delay.delay_info.car_num >= 10 else '-',
|
||||
'delay_time': avg_delay.delay_info.delay_time if avg_delay and avg_delay.delay_info.car_num >= 10 else '-',
|
||||
'speed': round(avg_delay.delay_info.speed / 100, 2) if avg_delay and avg_delay.delay_info.car_num >= 10 else '-',
|
||||
'move_speed': round(avg_delay.delay_info.move_speed / 100, 2) if avg_delay and avg_delay.delay_info.car_num >= 10 else '-',
|
||||
'relative_flow_rate': 0,
|
||||
'flow': avg_delay.delay_info.car_num if avg_delay else 0,
|
||||
'jam_index': round(avg_delay.delay_info.jam_index, 2) if avg_delay and avg_delay.delay_info.car_num >= 10 else '-',
|
||||
'imbalance_index': round(avg_delay.delay_info.imbalance_index, 2) if avg_delay and avg_delay.delay_info.car_num >= 10 else '-',
|
||||
'service_level_color': 0,
|
||||
'stop_times_color': 0,
|
||||
'high_park_percent_color': 0,
|
||||
'park_time_color': 0,
|
||||
'delay_time_color': 0,
|
||||
'speed_color': 0,
|
||||
'move_speed_color': 0,
|
||||
'relative_flow_rate_color': 0,
|
||||
'flow_color': 0,
|
||||
'jam_index_color': 0,
|
||||
'imbalance_index_color': 0,
|
||||
'stop_times_rate': 0,
|
||||
'high_park_percent_rate': 0,
|
||||
'park_time_rate': 0,
|
||||
'delay_time_rate': 0,
|
||||
'speed_rate': 0,
|
||||
'move_speed_rate': 0,
|
||||
'relative_flow_rate_rate': 0,
|
||||
'flow_rate': 0,
|
||||
'jam_index_rate': 0,
|
||||
'imbalance_index_rate': 0,
|
||||
}
|
||||
all_cross_data.append(cross_info)
|
||||
if excel == 1:
|
||||
return export_excel(all_cross_data)
|
||||
res = make_common_res(0, 'ok')
|
||||
res['data'] = all_cross_data
|
||||
return json.dumps(clean_dict_nan(res, '-'), ensure_ascii=False)
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue