diff --git a/app/compare_views.py b/app/compare_views.py index d7ceb5e..d343e7f 100644 --- a/app/compare_views.py +++ b/app/compare_views.py @@ -30,4 +30,19 @@ def query_cross_survey_jobs_api(): @app.route('/api/rerun_cross_survey', methods=['GET']) def rerun_cross_survey_api(): - return rerun_cross_survey_job(request.args) \ No newline at end of file + return rerun_cross_survey_job(request.args) + + +@app.route('/api/query_cross_survey_result', methods=['GET']) +def query_cross_survey_job_detail_api(): + return query_cross_survey_result(request.args) + + +@app.route('/api/rerun_cross_survey_part', methods=['POST']) +def rerun_cross_survey_part_api(): + return rerun_cross_survey_dir(request.json) + + +@app.route('/api/query_usable_survey_crosses', methods=['GET']) +def query_usable_survey_crosses_api(): + return query_usable_survey_crosses(request.args) \ No newline at end of file diff --git a/app/cross_compare_worker.py b/app/cross_compare_worker.py index a33e90d..e891045 100644 --- a/app/cross_compare_worker.py +++ b/app/cross_compare_worker.py @@ -235,7 +235,7 @@ def query_cross_survey_job_list(params): end_index = start_index + int(page_size) job_list = db_tmnet.query_survey_job_info_by_area_id(area_id) for row in job_list: - row['cross_name'] = g_roadnet.query_cross(row['cross_id']).name + row['cross_name'] = g_roadnet.query_cross(row['crossid']).name complete_day = '-' status = row['status'] if row['end_day'] < int(datetime.now().strftime('%Y%m%d')) and status != 2: @@ -247,9 +247,10 @@ def query_cross_survey_job_list(params): row['status'] = status row['complete_day'] = complete_day row['update_time'] = row['update_time'].strftime('%Y%m%d') + row['create_time'] = row['create_time'].strftime('%Y%m%d') if keyword and keyword != '': - job_list = find_job_info(job_list, keyword) + job_list = find_job_info(keyword, job_list) if start_date and start_date != '' and end_date and end_date != '': job_list = list(filter(lambda item: str(item['end_day']) > str(end_date), job_list)) job_list = list(filter(lambda item: str(item['start_day']) < str(start_date), job_list)) @@ -297,6 +298,35 @@ def rerun_cross_survey_job(params): return json.dumps(make_common_res(9, '重跑任务失败')) +def query_usable_survey_crosses(params): + nodeid = check_param(params, 'nodeid') + if not nodeid: + return json.dumps(make_common_res(2, '缺少nodeid, 请刷新后重试')) + area_id = check_param(params, 'area_id') + if not area_id: + return json.dumps(make_common_res(3, '缺少area_id, 请刷新后重试')) + userid = check_param(params, 'userid') + if not userid: + return json.dumps(make_common_res(4, '缺少userid, 请刷新后重试')) + area_list = db_user.query_areaid_list(userid) + if not area_list or len(area_list) < 1: + return json.dumps(make_common_res(5, '用户信息异常')) + area_list = map(int, area_list) + if not str(area_id).lstrip('-').isdigit() or int(area_id) not in area_list: + return json.dumps(make_common_res(5, '辖区id异常,请检查后重试')) + + area_servey_jobs = db_tmnet.query_survey_job_info_by_area_id(area_id) + usable_crossid_list = list(set([job['crossid'] for job in area_servey_jobs.values()])) + res_list = [] + for crossid in usable_crossid_list: + cross_name = g_roadnet.query_cross(crossid).name + crossid = crossid + res_list.append({'crossid': crossid, 'cross_name': cross_name}) + res = make_common_res(0, 'ok') + res['data'] = res_list + return json.dumps(res) + + def query_cross_survey_usable_dates(params): nodeid = check_param(params, 'nodeid') if not nodeid: @@ -405,6 +435,73 @@ def query_cross_survey_result(params): 'dist': dist }) src_images[src_dir] = image_list + dir_info = {} + for item in done_inroads_list: + dir_info[item.split(':')[0]] = 0 if item.split(':')[1] != '2' else 1 + + res = make_common_res(0, 'ok') + res['data'] = { + 'crossid': crossid, + 'name': cross_static_info['name'], + 'location': cross_static_info['location'], + 'src_images': src_images, + 'ledger_info': cross_ledger_info, + 'can_rerun_dir': dir_info + } + return json.dumps(res, ensure_ascii=False) + + +def rerun_cross_survey_dir(params): + nodeid = check_param(params, 'nodeid') + if not nodeid: + return json.dumps(make_common_res(2, '缺少nodeid, 请刷新后重试')) + area_id = check_param(params, 'area_id') + if not area_id: + return json.dumps(make_common_res(3, '缺少area_id, 请刷新后重试')) + userid = check_param(params, 'userid') + if not userid: + return json.dumps(make_common_res(4, '缺少userid, 请刷新后重试')) + area_list = db_user.query_areaid_list(userid) + if not area_list or len(area_list) < 1: + return json.dumps(make_common_res(5, '用户信息异常')) + area_list = map(int, area_list) + if not str(area_id).lstrip('-').isdigit() or int(area_id) not in area_list: + return json.dumps(make_common_res(5, '辖区id异常,请检查后重试')) + crossid = check_param(params, 'crossid') + if not crossid: + return json.dumps(make_common_res(6, '缺少路口id,请刷新后重试')) + jobid = check_param(params, 'jobid') + if not jobid: + return json.dumps(make_common_res(7, '缺少任务id,请刷新后重试')) + rerun_list = check_param(params, 'rerun_list') + if not rerun_list or len(rerun_list) < 1: + return json.dumps(make_common_res(8, '缺少重跑方向,请刷新后重试')) + + job_info = db_tmnet.query_survey_job_info_by_id(jobid) + done_inroads = job_info[0]['done_inroads'] + done_inroads_list = done_inroads.split('|') + done_src_dir_dict = {item.split(':')[0]: item.split(':')[1] for item in done_inroads_list} + fail_dir_list, fail_desc = [], '失败的方向有:' + for src_dir in rerun_list: + if src_dir not in done_src_dir_dict: + fail_dir_list.append(src_dir) + fail_desc += f'{srcDir_toStr(src_dir)}' + '进口: 方向信息异常' + continue + if done_src_dir_dict[src_dir] != '2': + fail_dir_list.append(src_dir) + fail_desc += f'{srcDir_toStr(src_dir)}' + '进口: 当前方向任务状态未完成' + continue + done_src_dir_dict[src_dir] = '0' + done_inroads_str = '|'.join([key + ':' + done_src_dir_dict[key] for key in done_src_dir_dict]) + ret = db_tmnet.update_cross_survey_job_status(job_info, jobid, done_inroads_str) + if not ret: + return json.dumps(make_common_res(9, '更新任务状态失败')) + res = make_common_res(0, 'ok') + res['data'] = { + 'fail_dir_list': fail_dir_list, + 'fail_desc': fail_desc + } + return json.dumps(res) diff --git a/app/cross_monitor_worker.py b/app/cross_monitor_worker.py index 7aead81..f5552cb 100644 --- a/app/cross_monitor_worker.py +++ b/app/cross_monitor_worker.py @@ -29,21 +29,23 @@ def query_monitor_task_usable_date_list(params): tp_desc = db_tmnet.query_city_tp_info(nodeid, area_id) if not tp_desc: tp_info = [ + "00:00-23:59", "00:00-07:00", "07:00-09:00", "09:00-17:00", "17:00-19:00", "19:00-22:00", - "22:00-23:59", - "00:00-23:59" + "22:00-23:59" ] peak_tp = [ "07:00-09:00", "17:00-19:00" ] else: - tp_info = tp_desc[0]['tp_desc'].split(',') - tp_info.append("00:00-23:59") + tp_info = ["00:00-23:59"] + tps = tp_desc[0]['tp_desc'].split(',') + for item in tps: + tp_info.append(item) peak_tp = tp_desc[0]['peak_tp'].split(',') res = make_common_res(0, 'ok') res['data'] = { @@ -280,7 +282,7 @@ def query_monitor_problems(params): routing_crosses_dict[crossid]['cross_ledger_info'] = cross_ledger_info cross_report_pb = pb.xl_cross_report_t() cross_report_pb.ParseFromString(row_list[0]['data']) - cross_problem_records = db_cross.query_cross_problem_record(area_id, datetime.strptime(query_date, '%Y%m%d').strftime('%Y-%m-%d'), date_type) + cross_problem_records = db_cross.query_cross_problem_record(area_id, query_date, date_type) records = { 'normal': {}, 'tide': {} diff --git a/app/db_cross_delay.py b/app/db_cross_delay.py index 04fc1ca..f72d1f3 100644 --- a/app/db_cross_delay.py +++ b/app/db_cross_delay.py @@ -132,7 +132,7 @@ class CrossDbHelper(TableDbHelperBase): def query_cross_problem_record(self, area_id, update_date, query_type): sql = """ - select * from cross_doctor_matedata.cross_tp_problem_record where area_id = %s and date(update_time) = '%s' and date_type = '%s' + select * from cross_doctor_matedata.cross_tp_problem_record where area_id = %s and day = '%s' and date_type = '%s' """ % (area_id, update_date, query_type) return self.do_select(sql) @@ -159,7 +159,20 @@ class CrossDbHelper(TableDbHelperBase): select * from traffic_{nodeid}.csr_data where key = '{key}' and crossid = '{crossid}' """ return self.do_select(sql) - + + def check_err_data(self): + sql = """ + select * from cross_doctor_matedata.cross_phase_problems_record + """ + return self.do_select(sql) + + def update_err_data(self, crossid, start_hm, end_hm, cont_times, final_state, level_color, first_date, new_first_date): + sql = """ + update cross_doctor_matedata.cross_phase_problems_record set first_date = %s where crossid = '%s' and start_hm = %s and end_hm = %s and cont_times = %s and final_state = %s and level_color = %s and first_date = %s + """ % (new_first_date, crossid, start_hm, end_hm, cont_times, final_state, level_color, first_date) + return self.do_execute(sql) + + def query_cross_flow_usable_date_sql(self, nodeid, crossid): conn, cursor = self.connect() try: @@ -172,7 +185,7 @@ class CrossDbHelper(TableDbHelperBase): return None, error finally: self.close(conn, cursor) - + def query_cross_flowdata(self, nodeid, crossid, date_list): conn, cursor = self.connect() try: diff --git a/app/eva_common.py b/app/eva_common.py index 86a01da..bfa9bb7 100644 --- a/app/eva_common.py +++ b/app/eva_common.py @@ -284,9 +284,9 @@ def gen_overview_index(avg_cross_delay_info, inroad_static_info_dict, nodeid, da delay_time = avg_cross_delay_info.delay_info.delay_time if car_num >= 10 else '-' high_stop_turn_ratio_desc = gen_high_stop_turn_ratio_desc(avg_cross_delay_info.inroad_delay_infos, inroad_static_info_dict, car_num) if car_num >= 10 else [] tide_index_list = calc_tide_index(crossid, nodeid, date_list, roads_dir_dict) - usable_tide_list = [item for item in tide_index_list if item != 0] + usable_tide_list = [item for item in tide_index_list if item != '-' and item != 0] tide_index = round(sum(usable_tide_list) / len(usable_tide_list), 2) if len(usable_tide_list) > 0 else 0 - service_level = calc_service_level(delay_time) + service_level = calc_service_level(delay_time, stop_times) overview_res = { 'jam_index': jam_index, 'stop_times': stop_times, @@ -473,7 +473,7 @@ def gen_road_delay_index(avg_cross_delay_info, roads_dir_dict): roadid = road_index.inroadid if roadid not in road_dir.keys(): continue - service_level = calc_service_level(road_index.delay_info.delay_time) + service_level = calc_service_level(road_index.delay_info.delay_time, road_index.delay_info.stop_times) road_flow_index[roadid] = { 'src_dir': road_dir[roadid], 'stop_times': round(road_index.delay_info.stop_times, 2) if road_index.delay_info.car_num >= 5 else '-', @@ -490,7 +490,7 @@ def gen_road_delay_index(avg_cross_delay_info, roads_dir_dict): for flow_delay_info in flow_delay_infos: if flow_delay_info.turn_type not in (0, 1): continue - flow_service_level = calc_service_level(flow_delay_info.delay_info.delay_time) + flow_service_level = calc_service_level(flow_delay_info.delay_info.delay_time, flow_delay_info.delay_info.stop_times) road_flow_index[roadid]['flow_delays'][flow_delay_info.turn_type] = { 'stop_times': round(flow_delay_info.delay_info.stop_times, 2) if flow_delay_info.delay_info.car_num >= 5 else '-', 'high_park_percent': str(flow_delay_info.delay_info.high_park_percent) + '%' if flow_delay_info.delay_info.car_num >= 5 else '-', @@ -640,21 +640,31 @@ def gen_flow_turn_rate_index(avg_cross_delay_info, roads_dir_dict): return res -def calc_service_level(delay_time): +def calc_service_level(delay_time, stop_times): if delay_time == '-' or delay_time == '': return '-' if delay_time <= 10: service_level = 'A' elif 10 < delay_time <= 20: service_level = 'B' + if stop_times < 0.3: + service_level = 'A' elif 20 < delay_time <= 35: service_level = 'C' + if stop_times < 0.5: + service_level = 'B' elif 35 < delay_time <= 55: service_level = 'D' + if stop_times < 0.65: + service_level = 'C' elif 55 < delay_time <= 80: service_level = 'E' + if stop_times < 0.8: + service_level = 'D' else: service_level = 'F' + if stop_times < 1: + service_level = 'E' return service_level @@ -672,7 +682,7 @@ def calc_tide_index(crossid, nodeid, date_list, roads_dir_dict, date_type=None): # 构建对向进口道对 subtend_road_pair = gen_subtend_road_pair(roads_dir_dict) for date in date_list: - max_am_flow, max_pm_flow, tide_index, max_am_flow_road, max_pm_flow_road = 0, 0, 0, '', '' + max_am_flow, max_pm_flow, tide_index, max_am_flow_road, max_pm_flow_road = 0, 0, '-', '', '' if date not in date_am_delay_info_dict.keys() or date not in date_pm_delay_info_dict.keys(): tide_index_list.append(tide_index) continue @@ -809,7 +819,7 @@ def parse_single_cross_delay_info(crossid, nodeid, data_list, data_type, roads_d # 20251104新增流量真实数值需求 cross_car_num, # 路口失衡系数 潮汐指数(当为小时级指标时,不计算该值) 服务水平 - cross_imbalance_index, tide_index[0], calc_service_level(item_cross_delay_info.delay_info.delay_time), + cross_imbalance_index, tide_index[0], calc_service_level(item_cross_delay_info.delay_info.delay_time, item_cross_delay_info.delay_info.stop_times), # 指标变化率颜色展示flag, 不含服务水平 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] @@ -860,7 +870,7 @@ def parse_single_cross_delay_info(crossid, nodeid, data_list, data_type, roads_d # 进口道流量占比 round(road_car_num / cross_car_num * 100, 2) if cross_car_num > 0 else '-', # 服务水平 - calc_service_level(road_data_dict[roads_dir_dict[src_dir]['in']].delay_info.delay_time), + calc_service_level(road_data_dict[roads_dir_dict[src_dir]['in']].delay_info.delay_time, road_data_dict[roads_dir_dict[src_dir]['in']].delay_info.stop_times), # 指标变化率颜色展示flag, 顺序为上述指标顺序, 不含服务水平 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] @@ -891,7 +901,7 @@ def parse_single_cross_delay_info(crossid, nodeid, data_list, data_type, roads_d # 分流转向占比 round(flow_delay_info.delay_info.car_num / road_car_num * 100, 2) if road_car_num > 0 else '-', # 服务水平 - calc_service_level(flow_delay_info.delay_info.delay_time), + calc_service_level(flow_delay_info.delay_info.delay_time, flow_delay_info.delay_info.stop_times), # 指标变化率颜色展示flag 不含服务水平 0, 0, 0, 0, 0, 0, 0, 0, 0 ] diff --git a/app/monitor_common.py b/app/monitor_common.py index 7eebc0b..bd61945 100644 --- a/app/monitor_common.py +++ b/app/monitor_common.py @@ -139,7 +139,7 @@ def gen_monitor_overview_data(cross_report_pb, date_type, routing_crosses, speci for cross_delay_info in cross_delay_info_list: if special_time_range != '' and not is_overlap_greater_than_one_hour(cross_delay_info.tp.start_hm, cross_delay_info.tp.end_hm, special_time_range): continue - service_level = calc_service_level(cross_delay_info.delay_info.delay_time) + service_level = calc_service_level(cross_delay_info.delay_info.delay_time, stop_times) if service_level != '-': overview['cross_service_levels']['total'] += 1 if service_level in ('A', 'B'): @@ -252,18 +252,24 @@ def gen_monitor_cross_ledger_info(routing_crosses, nodeid, area_id, slc_company_ 'avg_num': round(phase_info.data.avg_tp_num, 2), 'tp_num': tp_num_res, } - slc_company_info = db_tmnet.query_crosses_slc_company_info(crossid_list) - internet_info = db_tmnet.query_crosses_internet_info(crossid_list) + slc_company_info = db_tmnet.query_crosses_slc_company_info(nodeid, area_id, crossid_list) + internet_info = db_tmnet.query_crosses_internet_info(nodeid, area_id, crossid_list) slc_company_info_list, internet_info_list, has_reverse_turn, reversible_lane_num = [], [], 0, 0 for row in slc_company_info: - if row['slc_company'] not in slc_company_dict.keys(): - continue - slc_company_name = slc_company_dict[row['slc_company']] - slc_num = row['num'] - slc_company_info_list.append({ - 'name': slc_company_name, - 'num': slc_num - }) + if not row['slc_company']: + slc_company_info_list.append({ + 'name': '未配置', + 'num': row['num'] + }) + else: + if int(row['slc_company']) not in slc_company_dict.keys(): + continue + slc_company_name = slc_company_dict[int(row['slc_company'])] + slc_num = row['num'] + slc_company_info_list.append({ + 'name': slc_company_name, + 'num': slc_num + }) for row in internet_info: internet_code = row['internet'] @@ -281,12 +287,11 @@ def gen_monitor_cross_ledger_info(routing_crosses, nodeid, area_id, slc_company_ 'num': num }) - for croosid in crossid_list: - inroads = db_tmnet.query_cross_inroads(croosid, nodeid) - inroadid_list = [item['roadid'] for item in inroads] - cross_has_reverse_lane = db_tmnet.check_reverse_turn(inroadid_list) - if cross_has_reverse_lane: - has_reverse_turn += 1 + all_crosses_inroads = db_tmnet.query_inroads_by_crossids(nodeid, crossid_list) + inroadid_list = [item['roadid'] for item in all_crosses_inroads] + cross_has_reverse_lane = db_tmnet.check_reverse_turn(inroadid_list) + virtual_roads = db_tmnet.query_virtual_roads(crossid_list) + has_reverse_turn = int(cross_has_reverse_lane) + int(sum([item['reverse_turn'] for item in virtual_roads])) reversible_lane_num = db_tmnet.calc_has_reversible_lane_crosses(crossid_list)[0]['num'] special_info = { 'reverse_turn': has_reverse_turn, @@ -432,7 +437,7 @@ def parse_cross_index_dict(delay_index_list, special_time_range, routing_crosses 'weekdays': weekdays, 'tp_type': tp_type, # 时段类型,默认0表示普通时段,1表示小时级时段,2表示单日典型时段 'weekdays_str': weekdays_str, - 'service_level': calc_service_level(delay_index.delay_info.delay_time), + 'service_level': calc_service_level(delay_index.delay_info.delay_time, delay_index.delay_info.stop_times), 'stop_times': round(delay_index.delay_info.stop_times, 2), 'high_park_percent': delay_index.delay_info.high_park_percent, 'park_time': delay_index.delay_info.park_time, @@ -603,7 +608,7 @@ def parse_monitor_trend_data(monitor_datas, date_type, special_time_range): for cross_delay_info in cross_delay_info_list: if special_time_range != '' and not is_overlap_greater_than_one_hour(cross_delay_info.tp.start_hm, cross_delay_info.tp.end_hm, special_time_range): continue - service_level = calc_service_level(cross_delay_info.delay_info.delay_time) + service_level = calc_service_level(cross_delay_info.delay_info.delay_time, cross_delay_info.delay_info.stop_times) if service_level != '-': total += 1 if service_level in('E', 'F'): @@ -734,7 +739,7 @@ def parse_single_cross_delay_info4monitor(crossid, nodeid, data_list, date_type, if date_type != 'day': date_list = item['week_dates'] tide_index_list = calc_tide_index(crossid, nodeid, date_list, roads_dir_dict) - usable_tide_list = [item for item in tide_index_list if item != 0] + usable_tide_list = [item for item in tide_index_list if item != '-' and item != 0] tide_index = round(sum(usable_tide_list) / len(usable_tide_list), 2) if len(usable_tide_list) > 0 else 0 else: date_list = [day] @@ -744,7 +749,7 @@ def parse_single_cross_delay_info4monitor(crossid, nodeid, data_list, date_type, item_res['high_park_percent'] = item_cross_delay_info.delay_info.high_park_percent item_res['park_time'] = item_cross_delay_info.delay_info.park_time item_res['delay_time'] = item_cross_delay_info.delay_info.delay_time - item_res['service_level'] = calc_service_level(item_cross_delay_info.delay_info.delay_time) + item_res['service_level'] = calc_service_level(item_cross_delay_info.delay_info.delay_time, item_cross_delay_info.delay_info.stop_times) item_res['speed'] = round(item_cross_delay_info.delay_info.speed / 100, 2) item_res['move_speed'] = round(item_cross_delay_info.delay_info.move_speed / 100, 2) item_res['relative_flow_rate'] = relative_flow_rate @@ -1427,6 +1432,8 @@ def monitor_phase_problems(nodeid, area_id, date_type, query_date, special_time_ elif date_type == 'weekend': weekdays = '6,7' phase_problems = [] + if special_time_range == '': + special_time_range = '00:00-23:59' res, e = GetCrossPhaseDiagnosis(int(nodeid), int(area_id), weekdays, special_time_range) if not e and len(res) > 0: items = res diff --git a/app/tmnet_db_func.py b/app/tmnet_db_func.py index 2b100bd..ab8a088 100644 --- a/app/tmnet_db_func.py +++ b/app/tmnet_db_func.py @@ -341,10 +341,10 @@ class TmnetDbHelper(TableDbHelperBase): sql = """ select t1.crossid, - if(t2.name != t1.name, t2.name, t1.name) as name, - if(t2.cross_no != t1.cross_no, t2.cross_no, t1.cross_no) as crossno, - if(t2.company != t1.company, t2.company, t1.company) as company, - if(t2.location != t1.location, t2.location, t1.location) as location, + if(t2.name is not null, t2.name, t1.name) as name, + if(t2.cross_no is not null, t2.cross_no, t1.cross_no) as crossno, + if(t2.company is not null, t2.company, t1.company) as company, + if(t2.location is not null, t2.location, t1.location) as location, t1.nodeid, t1.area_id from @@ -359,18 +359,32 @@ class TmnetDbHelper(TableDbHelperBase): """ % (nodeid, area_id, nodeid, area_id, area_id, nodeid, area_id, nodeid, area_id, nodeid, area_id, nodeid) return self.do_select(sql) - def query_crosses_slc_company_info(self, crossid_list): + def query_crosses_slc_company_info(self, nodeid, area_id, crossid_list): crossids = "'" + "', '".join(item for item in crossid_list) + "'" sql = """ - select slc_company, count(1) as num from cross_ledger_update_info where crossid in (%s) group by slc_company - """ % (crossids) + select t3.slc_company, count(1) as num + from + (select + if((t1.slc_company is null or t2.slc_company != t1.slc_company), t2.slc_company, t1.slc_company) as slc_company + from + (select crossid, slc_company from `cross` where nodeid = %s and area_id = %s and crossid in (%s)) t1 + left join + (select crossid, slc_company from `cross_ledger_update_info` where nodeid = %s and area_id = %s and crossid in (%s)) t2 on t1.crossid = t2.crossid) t3 group by t3.slc_company + """ % (nodeid, area_id, crossids, nodeid, area_id, crossids) return self.do_select(sql) - def query_crosses_internet_info(self, crossid_list): + def query_crosses_internet_info(self, nodeid, area_id, crossid_list): crossids = "'" + "', '".join(item for item in crossid_list) + "'" sql = """ - select internet, count(1) as num from cross_ledger_update_info where crossid in (%s) group by internet - """ % (crossids) + select t3.internet, count(1) as num + from + (select + if((t1.internet is null or t2.internet != t1.internet), t2.internet, t1.internet) as internet + from + (select crossid, internet from `cross` where nodeid = %s and area_id = %s and crossid in (%s)) t1 + left join + (select crossid, internet from `cross_ledger_update_info` where nodeid = %s and area_id = %s and crossid in (%s)) t2 on t1.crossid = t2.crossid) t3 group by t3.internet + """ % (nodeid, area_id, crossids, nodeid, area_id, crossids) return self.do_select(sql) def check_reverse_turn(self, roads): @@ -382,7 +396,25 @@ class TmnetDbHelper(TableDbHelperBase): check_res = 0 if res: check_res = res[0]['num'] - return False if check_res == 0 else True + return check_res + + def query_inroads_by_crossids(self, nodeid,crossid_list): + crossids = "'" + "', '".join(item for item in crossid_list) + "'" + sql = """ + select + t1.roadid, + if(t2.from_crossid is not null, t2.from_crossid, t1.from_crossid) as from_crossid, + if(t2.to_crossid is not null, t2.to_crossid, t1.to_crossid) as to_crossid, + if(t2.name is not null, t2.name, t1.name) as name, + if(t2.src_direct is not null, t2.src_direct, t1.src_direct) as src_direct, + if(t2.lane_turn_info is not null, t2.lane_turn_info, t1.lane_turn_info) as lane_turn_info + from + (select * from road where nodeid = '%s' and recordstate=0 and to_crossid in (%s) and (is_sup_road is null or is_sup_road<>1)) t1 + left join + (select * from road_ledger_update_info where nodeid = '%s' and recordstate=0 and to_crossid in (%s) and (is_sup_road is null or is_sup_road<>1)) t2 + on t1.roadid = t2.roadid; + """ % (nodeid, crossids, nodeid, crossids) + return self.do_select(sql) def calc_has_reversible_lane_crosses(self, crossid_list): crossids = "'" + "', '".join(item for item in crossid_list) + "'" @@ -395,11 +427,11 @@ class TmnetDbHelper(TableDbHelperBase): sql = """ select t1.crossid, - if(t2.name != t1.name, t2.name, t1.name) as name, - if(t2.location != t1.location, t2.location, t1.location) as location, - if(t2.internet != t1.internet, t2.internet, t1.internet) as internet, - if(t2.cross_model != t1.cross_model, t2.cross_model, t1.cross_model) as cross_model, - if(t2.slc_company != t1.slc_company, t2.slc_company, t1.slc_company) as slc_company, + if(t2.name is not null, t2.name, t1.name) as name, + if(t2.location is not null, t2.location, t1.location) as location, + if(t2.internet is not null, t2.internet, t1.internet) as internet, + if(t2.cross_model is not null, t2.cross_model, t1.cross_model) as cross_model, + if(t2.slc_company is not null, t2.slc_company, t1.slc_company) as slc_company, t1.nodeid, t1.area_id from @@ -411,6 +443,13 @@ class TmnetDbHelper(TableDbHelperBase): """ % (nodeid, area_id, crossid, nodeid, area_id, crossid, area_id, nodeid) return self.do_select(sql) + def query_virtual_roads(self, crossids): + crossids = "'" + "', '".join(item for item in crossids) + "'" + sql = """ + select roadid, reverse_turn from user_defined_roads where to_crossid in (%s) + """ % (crossids) + return self.do_select(sql) + def query_virtual_cross_info(self, crossid, nodeid, area_id): sql = """ select * from user_defined_cross where nodeid = %s and area_id = %s and crossid = '%s' @@ -425,7 +464,7 @@ class TmnetDbHelper(TableDbHelperBase): def insert_cross_survey_job(self, values): sql = """ - insert into cross_survey.cross_survey_jobs (crossid, start_day, end_day, status, done_inroads, inroads_dir, nodeid, area_id) values('%s', %s, %s, %s, '%s', '%s', %s, %s) + insert into cross_survey.cross_survey_jobs (crossid, start_day, end_day, status, done_inroads, inroads_dir, nodeid, area_id) values(%s, %s, %s, %s, %s, %s, %s, %s) """ return self.do_executemany(sql, values) @@ -443,12 +482,14 @@ class TmnetDbHelper(TableDbHelperBase): def query_survey_job_info_by_area_id(self, area_id): sql = """ - select id, crossid, start_day, end_day, update_time,status from cross_survey.cross_survey_jobs where area_id = %s + select id, crossid, start_day, end_day, update_time,status,create_time from cross_survey.cross_survey_jobs where area_id = %s """ % (area_id) return self.do_select(sql) - def update_cross_survey_job_status(self, job_info, jobid): + def update_cross_survey_job_status(self, job_info, jobid, done_inroads=None): + if not done_inroads: + done_inroads = job_info[0]['done_inroads'].replace('1', '0') sql = """ - update cross_survey.cross_survey_job set status = 0, done_inroads = '%s' where id = %s - """ % (job_info[0]['done_inroads'].replace('1', '0'), jobid) + update cross_survey.cross_survey_jobs set status = 0, done_inroads = '%s' where id = %s + """ % (done_inroads, jobid) return self.do_execute(sql) \ No newline at end of file diff --git a/test.py b/test.py index 6902306..d6549e9 100644 --- a/test.py +++ b/test.py @@ -6,6 +6,7 @@ import configparser from google.protobuf.json_format import MessageToJson +from app.common_worker import generate_date_range, convert_time from app.global_source import * import proto.xlcomm_pb2 as pb @@ -47,6 +48,31 @@ def test_get_cross_delay_data(): print(MessageToJson(cross_delay, indent=None, always_print_fields_with_no_presence=True)) +def check_err_data(): + row_list = db_cross.check_err_data() + prev_date = (datetime.now() - timedelta(days=1)).strftime('%Y%m%d') + for row in row_list: + if row['final_state'] == 6 or row['level_color'] == 4: + continue + first_date = row['first_date'] + end_date = row['end_date'] + if end_date: + date_list = generate_date_range(first_date, end_date) + else: + date_list = generate_date_range(first_date, prev_date) + phase_types = row['phase_type'] + phase_details = row['phase_detail'] + phase_type_list = phase_types.split('^') + phase_detail_list = phase_details.split('^') + if len(date_list) < len(phase_type_list): + # new_first_date = (datetime.strptime(first_date, '%Y%m%d') - timedelta(days=1)).strftime('%Y%m%d') + # ret = db_cross.update_err_data(row['crossid'], row['start_hm'], row['end_hm'], row['cont_times'], row['final_state'], row['level_color'], first_date, new_first_date) + # if ret != 1: + # print(row) + + print(len(date_list), len(phase_type_list), row['cont_times']) + + if __name__ == '__main__': init() - test_get_cross_delay_data() \ No newline at end of file + check_err_data() \ No newline at end of file