Commit 03a8930d authored by 张晓彤's avatar 张晓彤

topo first

parent 11c09e4a
File added
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
<module type="PYTHON_MODULE" version="4"> <module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager"> <component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" /> <content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.7 (waytous)" jdkType="Python SDK" /> <orderEntry type="jdk" jdkName="Python 3.7 (diapatch) (2)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
</module> </module>
\ No newline at end of file
...@@ -3,5 +3,5 @@ ...@@ -3,5 +3,5 @@
<component name="JavaScriptSettings"> <component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" /> <option name="languageLevel" value="ES6" />
</component> </component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (waytous)" project-jdk-type="Python SDK" /> <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (diapatch) (2)" project-jdk-type="Python SDK" />
</project> </project>
\ No newline at end of file
{ {
"para": { "para": {
"log_path": "/usr/local/fleet-log/dispatch",
"log_path": "/Users/guoao/Desktop/work_log/wek33/integrated-scheduling-v3-master/dispatch",
"empty_speed": 25, "empty_speed": 25,
"heavy_speed": 22, "heavy_speed": 22,
"dump_target_mass": 5000, "dump_target_mass": 5000,
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
/venv/
/.idea/
/Logs/
/waytous/
^^^^^^^^^^^^^^^^^^^^^^^^^^
Changelog for package Dispatch
^^^^^^^^^^^^^^^^^^^^^^^^^^
--------------------
[version]: 1.0.1
[message]: add
[feather]:
[fix ]:
[TODO ]:
[info ]: author: zxt ; time: 2021-07-07 15:46:00 ; email: ; tel: ;
--------------------
--------------------
[version]: 1.1.1
[message]: refactor real-time dispatch function
[feather]:
[fix ]:
[TODO ]:
[info ]: author: zxt ; time: 2021-07-23 16:59:00 ; email: ; tel: ;
--------------------
--------------------
[version]: 1.2.1
[message]: add traffic flow planning
[feather]:
[fix ]:
[TODO ]:
[info ]: author: zxt ; time: 2021-07-23 16:59:00 ; email: ; tel: ;
--------------------
--------------------
[version]: 3.0.0
[message]: change dynamic dispatch mode
[feather]:
[fix ]:
[TODO ]:
[info ]: author: zxt ; time: 2021-11-03 10:13:00 ; email: ; tel: ;
--------------------
\ No newline at end of file
This diff is collapsed.
{
"para": {
"log_path": "/Users/guoao/Desktop/work_log/wek33/integrated-scheduling-v3-master/dispatch",
"empty_speed": 25,
"heavy_speed": 22,
"dump_target_mass": 5000,
"excavator_target_mass": 5000
},
"mysql": {
"host": "192.168.28.111",
"port": "3306",
"user": "root",
"password": "Huituo@123",
"database": "waytous"
},
"postgresql": {
"host": "192.168.28.111",
"port": "5432",
"user": "postgres",
"password": "Huituo@123",
"database": "shenbao_2021520"
},
"redis": {
"host": "192.168.28.111",
"password": "Huituo@123"
}
}
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This diff is collapsed.
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/8/24 11:31
# @Author : Opfer
# @Site :
# @File : __init__.py
# @Software: PyCharm
\ No newline at end of file
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/8/24 11:28
# @Author : Opfer
# @Site :
# @File : dump.py
# @Software: PyCharm
from para_config import *
from settings import *
# 卸载设备类
class DumpInfo(WalkManage):
def __init__(self):
# 卸载设备数量
self.dynamic_dump_num = get_value("dynamic_dump_num")
# 用于动态调度的卸载设备集合
self.dynamic_dump_set = get_value("dynamic_dump_set")
# 开始时间
self.start_time = datetime.now()
# 卸载时间
self.unloading_time = np.zeros(self.dynamic_dump_num)
# 入场时间
self.entrance_time = np.zeros(self.dynamic_dump_num)
# 出场时间
self.exit_time = np.zeros(self.dynamic_dump_num)
# 卸载点物料类型
self.dump_material = {}
# 卸点优先级
self.dump_priority_coefficient = np.ones(self.dynamic_dump_num)
# 卸点卸载能力
self.dump_strength = np.zeros(self.dynamic_dump_num)
# 初始化读取映射及路网
self.period_map_para_load()
self.period_walk_para_load()
# 日志器
self.logger = get_logger("zxt.dump")
# 参数初始化
self.para_period_update()
def get_unloading_time(self):
return self.unloading_time
def get_dump_num(self):
return self.dynamic_dump_num
def get_dynamic_dump_set(self):
return self.dynamic_dump_set
def get_unloading_task_time(self):
unloading_time = self.unloading_time
dump_entrance_time = self.entrance_time
dump_exit_time = self.exit_time
unloading_task_time = unloading_time + dump_entrance_time + dump_exit_time
return unloading_task_time
# 更新卸载设备卸载时间
def update_dump_unloadtime(self):
self.unloading_time = np.zeros(self.dynamic_dump_num)
for dump_id in self.dump_uuid_to_index_dict.keys():
ave_unload_time = 0
unload_count = 0
# try:
# for query in (
# session_mysql.query(JobRecord.start_time, JobRecord.end_time)
# .join(Equipment, JobRecord.equipment_id == Equipment.equipment_id)
# .filter(Equipment.id == dump_id, JobRecord.end_time != None)
# .order_by(JobRecord.start_time.desc())
# .limit(10)
# ):
# ave_unload_time = ave_unload_time + float(
# (query.end_time - query.start_time)
# / timedelta(hours=0, minutes=1, seconds=0)
# )
# unload_count = unload_count + 1
# self.unloading_time[self.dump_uuid_to_index_dict[dump_id]] = (
# ave_unload_time / unload_count
# )
# except Exception as es:
# self.logger.error(f"卸载设备 {dump_id} 卸载时间信息缺失, 已设为默认值(1min)")
# self.logger.error(es)
self.unloading_time[self.dump_uuid_to_index_dict[dump_id]] = 5.00
# print("average_unload_time: ", self.unloading_time[self.dump_uuid_to_index_dict[dump_id]])
# 更新卸载设备出入时间
def update_dump_entrance_exit_time(self):
self.entrance_time = np.zeros(self.dynamic_dump_num)
self.exit_time = np.zeros(self.dynamic_dump_num)
now = datetime.now().strftime("%Y-%m-%d")
for dump_id in self.dump_uuid_to_index_dict.keys():
# try:
# for query in (
# session_mysql.query(WorkRecord)
# .filter(
# WorkRecord.equipment_id == dump_id, WorkRecord.work_day >= now
# )
# .first()
# ):
# self.entrance_time[self.dump_uuid_to_index_dict[dump_id]] = float(
# query.load_entrance_time / query.load_entrance_count
# )
# self.exit_time[self.dump_uuid_to_index_dict[dump_id]] = float(
# query.exit_entrance_time / query.exit_entrance_count
# )
# except Exception as es:
# self.logger.error(f"卸载设备 {dump_id} 出入场时间信息缺失, 已设为默认值(1min)")
# self.logger.error(es)
self.entrance_time[self.dump_uuid_to_index_dict[dump_id]] = 0.50
self.exit_time[self.dump_uuid_to_index_dict[dump_id]] = 0.50
def update_dump_material(self):
self.dump_material = {}
for dump_id in get_value("dynamic_dump_set"):
try:
unload_area_id = session_mysql.query(Dispatch).filter_by(dump_id=dump_id, isauto=1, isdeleted=0).first().unload_area_id
dump_material_id = session_postgre.query(DumpArea).filter_by(Id=unload_area_id).first().Material
self.dump_material[dump_id] = dump_material_id
except Exception as es:
self.logger.error("无动态派车计划可用")
self.logger.error(es)
def update_dump_priority(self):
self.dump_priority_coefficient = np.ones(self.dynamic_dump_num)
for dump_id in self.dynamic_dump_set:
try:
unload_area_index = self.dump_index_to_unload_area_index_dict[self.dump_uuid_to_index_dict[dump_id]]
unload_area_id = unload_area_index_to_uuid_dict[unload_area_index]
item = session_postgre.query(DumpArea).filter_by(Id=unload_area_id).first()
self.dump_priority_coefficient[self.dump_uuid_to_index_dict[dump_id]] += item.Priority
except Exception as es:
self.logger.error("无动态派车计划可用")
self.logger.error(es)
def update_unload_ability(self):
try:
rule3 = session_mysql.query(DispatchRule).filter_by(id=3).first()
if not rule3.disabled:
for dump_index in range(self.dynamic_dump_num):
unload_area_id = unload_area_index_to_uuid_dict[self.dump_index_to_unload_area_index_dict[dump_index]]
unload_ability = session_postgre.query(DumpArea).filter_by(Id=unload_area_id).first().UnloadAbililty
self.dump_strength[dump_index] = unload_ability # 卸载设备最大卸载能力,单位吨/小时
# if unload_ability < 200:
# raise Exception("卸载点卸载能力异常")
else:
self.dump_strength = np.full(self.dynamic_dump_num, 5000)
except Exception as es:
self.logger.error(es)
self.logger.error("卸载点卸载信息读取异常")
def reset(self):
# 卸载设备数量
self.dynamic_dump_num = get_value("dynamic_dump_num")
# 用于动态调度的卸载设备集合
self.dynamic_dump_set = get_value("dynamic_dump_set")
# 卸载时间
self.unloading_time = np.zeros(self.dynamic_dump_num)
# 入场时间
self.entrance_time = np.zeros(self.dynamic_dump_num)
# 出场时间
self.exit_time = np.zeros(self.dynamic_dump_num)
# 卸载点物料类型
self.dump_material = {}
# 卸点优先级
self.dump_priority_coefficient = np.ones(self.dynamic_dump_num)
# 卸点卸载能力
self.dump_strength = np.zeros(self.dynamic_dump_num)
def para_period_update(self):
self.reset()
# print("Dump update!")
self.logger.info("Dump update!")
# 装载周期参数
self.period_map_para_load()
self.period_walk_para_load()
# 计算平均卸载时间
self.update_dump_unloadtime()
# 计算平均进出场时间
self.update_dump_entrance_exit_time()
# 更新卸点物料
self.update_dump_material()
# 更新设备优先级
self.update_dump_priority()
# 更新卸点卸载能力
self.update_unload_ability()
#
# # 卸载目标产量
# self.dump_target_mass = np.full(self.dynamic_dump_num, dump_target_mass)
This diff is collapsed.
This diff is collapsed.
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/11/26 11:38
# @Author : Opfer
# @Site :
# @File : __init__.py
# @Software: PyCharm
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/8/3 10:51
# @Author : Opfer
# @Site :
# @File : __init__.py.py
# @Software: PyCharm
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/9/3 14:44
# @Author : Opfer
# @Site :
# @File : priority_control.py
# @Software: PyCharm
from equipment.truck import *
from equipment.dump import *
from equipment.excavator import *
from para_config import *
from path_plan.topo_graph import *
class PriorityController():
def __init__(self, dump, excavator, truck):
# 设备类
self.dump = dump
self.excavator = excavator
self.truck = truck
self.topo = Topo()
# 获取日志器
self.logger = get_logger("zxt.prio")
def weighted_walk_calc(self):
"""
计算运输路线权重, 权重影响因素:设备优先级, 物料优先级,
:return:
walk_weight: 卸载-装载区 路网权重
park_walk_weight: 备停区-装载区 路网权重
"""
dynamic_dump_num = get_value("dynamic_dump_num")
dynamic_excavator_num = get_value("dynamic_excavator_num")
dynamic_dump_set = get_value("dynamic_dump_set")
dynamic_excavator_set = get_value("dynamic_excavator_set")
walk_to_excavator_weight = np.ones((dynamic_dump_num, dynamic_excavator_num))
walk_to_dump_weight = np.ones((dynamic_excavator_num, dynamic_dump_num))
excavator_priority = self.excavator.excavator_priority_coefficient
excavator_material_priority = self.excavator.excavator_material_priority
dump_priority = self.dump.dump_priority_coefficient
dump_material_priority = np.ones(dynamic_dump_num)
park_walk_weight = np.ones((park_num, dynamic_excavator_num))
# 检测设备优先级是否启动
rule6 = session_mysql.query(DispatchRule).filter_by(id=6).first()
if not rule6.disabled:
for dump_id in dynamic_dump_set:
for excavator_id in dynamic_excavator_set:
dump_index = self.dump.dump_uuid_to_index_dict[dump_id]
excavator_inedx = self.excavator.excavator_uuid_to_index_dict[excavator_id]
walk_to_excavator_weight[dump_index][excavator_inedx] += excavator_priority[excavator_inedx]
walk_to_dump_weight[excavator_inedx][dump_index] += dump_priority[dump_index]
park_walk_weight = park_walk_weight * self.excavator.excavator_priority_coefficient
# 检测物料优先级是否启动
rule7 = session_mysql.query(DispatchRule).filter_by(id=7).first()
if not rule7.disabled:
for dump_id in dynamic_dump_set:
for excavator_id in dynamic_excavator_set:
dump_index = self.dump.dump_uuid_to_index_dict[dump_id]
excavator_inedx = self.excavator.excavator_uuid_to_index_dict[excavator_id]
walk_to_excavator_weight[dump_index][excavator_inedx] += excavator_material_priority[excavator_inedx]
walk_to_dump_weight[excavator_inedx][dump_index] += dump_material_priority[dump_index]
park_walk_weight = park_walk_weight * self.excavator.excavator_material_priority
try:
walk_to_excavator_weight = walk_to_excavator_weight - (walk_to_excavator_weight.min() - 1)
walk_to_dump_weight = walk_to_dump_weight - (walk_to_dump_weight.min() - 1)
park_walk_weight = park_walk_weight - (park_walk_weight.min() - 1)
except Exception as es:
self.logger.errro(es)
self.logger.error("优先级归一化异常")
return walk_to_excavator_weight, walk_to_dump_weight, park_walk_weight
def walk_available_calc(self):
"""
计算路网可通行性(物料, 地图, 分组三者综合)
:return:
walk_available: 路网可通行性(dump_num, excavator_num)
"""
map_walk_available = self.update_map_walk_available()
group_walk_available = self.update_group_walk_available()
material_walk_available = self.update_material_walk_available()
walk_available = map_walk_available * group_walk_available * material_walk_available
return walk_available
def update_group_walk_available(self):
"""
计算调度分组间的路网可通行性, 不同分组间路网不可通行
:return:
group_walk_available: 调度分组路网可通行性矩阵(dump_num, excavator_num)
"""
group_walk_available = np.zeros((get_value("dynamic_dump_num"), get_value("dynamic_excavator_num")))
for dump_id in get_value("dynamic_dump_set"):
for excavator_id in get_value("dynamic_excavator_set"):
item = session_mysql.query(Dispatch).filter_by(dump_id=dump_id, exactor_id=excavator_id, isauto=1,
isdeleted=0).first()
if item is not None:
dump_index = self.dump.dump_uuid_to_index_dict[dump_id]
excavator_index = self.excavator.excavator_uuid_to_index_dict[excavator_id]
group_walk_available[dump_index][excavator_index] = 1
return group_walk_available
def update_material_walk_available(self):
"""
更新物料兼容性下路网可通行性
:return:
walk_available: 物料兼容路网可通行性矩阵(dump_num, excavator_num)
"""
dynamic_dump_num = get_value("dynamic_dump_num")
dynamic_excavator_num = get_value("dynamic_excavator_num")
dynamic_dump_set = get_value("dynamic_dump_set")
dynamic_excavator_set = get_value("dynamic_excavator_set")
walk_available = np.ones((dynamic_dump_num, dynamic_excavator_num))
try:
for dump_id in dynamic_dump_set:
for excavator_id in dynamic_excavator_set:
dump_index = self.dump.dump_uuid_to_index_dict[dump_id]
excavator_inedx = self.excavator.excavator_uuid_to_index_dict[excavator_id]
# 两设备处理物料不同, 相关路网不可通行
if self.excavator.excavator_material[excavator_id] != self.dump.dump_material[dump_id]:
walk_available[dump_index][excavator_inedx] = 0
except Exception as es:
self.logger.info(es)
self.logger.info("error-12")
return walk_available
def update_map_walk_available(self):
"""
更新物理路网可通行性
:return:
walk_available: 物理路网可通行性矩阵(dump_num, excavator_num)
"""
dynamic_dump_num = get_value("dynamic_dump_num")
dynamic_excavator_num = get_value("dynamic_excavator_num")
walk_available = np.ones((dynamic_dump_num, dynamic_excavator_num))
walk_manage.period_walk_para_update()
for dump_index in range(dynamic_dump_num):
for excavator_index in range(dynamic_excavator_num):
if walk_manage.distance_to_excavator[dump_index][excavator_index] > M / 2:
walk_available[dump_index][excavator_index] = 0
return walk_available
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/9/3 14:44
# @Author : Opfer
# @Site :
# @File : priority_control.py
# @Software: PyCharm
from equipment.truck import *
from equipment.dump import *
from equipment.excavator import *
from para_config import *
from path_plan.topo_graph import *
class PriorityController():
def __init__(self, dump, excavator, truck):
# 设备类
self.dump = dump
self.excavator = excavator
self.truck = truck
# 获取日志器
self.logger = get_logger("zxt.prio")
def weighted_walk_calc(self):
"""
计算运输路线权重, 权重影响因素:设备优先级, 物料优先级,
:return:
walk_weight: 卸载-装载区 路网权重
park_walk_weight: 备停区-装载区 路网权重
"""
dynamic_dump_num = get_value("dynamic_dump_num")
dynamic_excavator_num = get_value("dynamic_excavator_num")
dynamic_dump_set = get_value("dynamic_dump_set")
dynamic_excavator_set = get_value("dynamic_excavator_set")
walk_to_excavator_weight = np.ones((dynamic_dump_num, dynamic_excavator_num))
walk_to_dump_weight = np.ones((dynamic_excavator_num, dynamic_dump_num))
excavator_priority = self.excavator.excavator_priority_coefficient
excavator_material_priority = self.excavator.excavator_material_priority
dump_priority = self.dump.dump_priority_coefficient
dump_material_priority = np.ones(dynamic_dump_num)
park_walk_weight = np.ones((park_num, dynamic_excavator_num))
rule6 = session_mysql.query(DispatchRule).filter_by(id=6).first()
if not rule6.disabled:
for dump_id in dynamic_dump_set:
for excavator_id in dynamic_excavator_set:
dump_index = self.dump.dump_uuid_to_index_dict[dump_id]
excavator_inedx = self.excavator.excavator_uuid_to_index_dict[excavator_id]
walk_to_excavator_weight[dump_index][excavator_inedx] += excavator_priority[excavator_inedx]
walk_to_dump_weight[excavator_inedx][dump_index] += dump_priority[dump_index]
park_walk_weight = park_walk_weight * self.excavator.excavator_priority_coefficient
rule7 = session_mysql.query(DispatchRule).filter_by(id=7).first()
if not rule7.disabled:
for dump_id in dynamic_dump_set:
for excavator_id in dynamic_excavator_set:
dump_index = self.dump.dump_uuid_to_index_dict[dump_id]
excavator_inedx = self.excavator.excavator_uuid_to_index_dict[excavator_id]
walk_to_excavator_weight[dump_index][excavator_inedx] += excavator_material_priority[excavator_inedx]
walk_to_dump_weight[excavator_inedx][dump_index] += dump_material_priority[dump_index]
park_walk_weight = park_walk_weight * self.excavator.excavator_material_priority
try:
walk_to_excavator_weight = walk_to_excavator_weight - (walk_to_excavator_weight.min() - 1)
walk_to_dump_weight = walk_to_dump_weight - (walk_to_dump_weight.min() - 1)
park_walk_weight = park_walk_weight - (park_walk_weight.min() - 1)
except Exception as es:
self.logger.errro(es)
self.logger.error("优先级归一化异常")
return walk_to_excavator_weight, walk_to_dump_weight, park_walk_weight
def walk_available_calc(self):
"""
计算路网可通行性(物料, 地图, 分组三者综合)
:return:
walk_available: 路网可通行性(dump_num, excavator_num)
"""
map_walk_available = self.update_map_walk_available()
group_walk_available = self.update_group_walk_available()
material_walk_available = self.update_material_walk_available()
walk_available = map_walk_available * group_walk_available * material_walk_available
return walk_available
def update_group_walk_available(self):
"""
计算调度分组间的路网可通行性, 不同分组间路网不可通行
:return:
group_walk_available: 调度分组路网可通行性矩阵(dump_num, excavator_num)
"""
group_walk_available = np.zeros((get_value("dynamic_dump_num"), get_value("dynamic_excavator_num")))
for dump_id in get_value("dynamic_dump_set"):
for excavator_id in get_value("dynamic_excavator_set"):
item = session_mysql.query(Dispatch).filter_by(dump_id=dump_id, exactor_id=excavator_id, isauto=1,
isdeleted=0).first()
if item is not None:
dump_index = self.dump.dump_uuid_to_index_dict[dump_id]
excavator_index = self.excavator.excavator_uuid_to_index_dict[excavator_id]
group_walk_available[dump_index][excavator_index] = 1
return group_walk_available
def update_material_walk_available(self):
"""
更新物料兼容性下路网可通行性
:return:
walk_available: 物料兼容路网可通行性矩阵(dump_num, excavator_num)
"""
dynamic_dump_num = get_value("dynamic_dump_num")
dynamic_excavator_num = get_value("dynamic_excavator_num")
dynamic_dump_set = get_value("dynamic_dump_set")
dynamic_excavator_set = get_value("dynamic_excavator_set")
walk_available = np.ones((dynamic_dump_num, dynamic_excavator_num))
try:
for dump_id in dynamic_dump_set:
for excavator_id in dynamic_excavator_set:
dump_index = self.dump.dump_uuid_to_index_dict[dump_id]
excavator_inedx = self.excavator.excavator_uuid_to_index_dict[excavator_id]
# 两设备处理物料不同, 相关路网不可通行
if self.excavator.excavator_material[excavator_id] != self.dump.dump_material[dump_id]:
walk_available[dump_index][excavator_inedx] = 0
except Exception as es:
self.logger.info(es)
self.logger.info("error-12")
return walk_available
def update_map_walk_available(self):
"""
更新物理路网可通行性
:return:
walk_available: 物理路网可通行性矩阵(dump_num, excavator_num)
"""
dynamic_dump_num = get_value("dynamic_dump_num")
dynamic_excavator_num = get_value("dynamic_excavator_num")
walk_available = np.ones((dynamic_dump_num, dynamic_excavator_num))
walk_manage.period_walk_para_update()
for dump_index in range(dynamic_dump_num):
for excavator_index in range(dynamic_excavator_num):
if walk_manage.distance_to_excavator[dump_index][excavator_index] > M / 2:
walk_available[dump_index][excavator_index] = 0
return walk_available
from topo_graph import *
import networkx as nx
import matplotlib.pyplot as plt
#
# topo = Topo()
# topo.generate_topo_graph()
# load_G = topo.get_load_G()
# pos = nx.shell_layout(load_G)
# nx.draw(load_G, pos, with_labels=True, node_color='red', edge_color='blue', font_size=18, width=5, node_size=600,
# alpha=0.5)
# plt.show()
This diff is collapsed.
This diff is collapsed.
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/7/21 16:45
# @Author : Opfer
# @Site :
# @File : realtime_dispatch.py
# @Software: PyCharm
# 实时调度模块
from traffic_flow.traffic_flow_planner import *
from para_config import *
from equipment.truck import TruckInfo
from equipment.excavator import ExcavatorInfo
from equipment.dump import DumpInfo
import sched
import time
from path_plan.topo_graph import *
from dispatcher import Dispatcher, PreSchedule
def process(dispatcher):
# try:
# 更新周期参数
logger.info("#####################################周期更新开始#####################################")
period_para_update()
if get_value("dynamic_dump_num") * get_value("dynamic_excavator_num") == 0:
raise Exception("无动态派车计划可用")
return
if get_value("dynamic_truck_num") == 0:
raise Exception("无动态派车可用矿卡")
return
# 清空数据库缓存
session_mysql.commit()
session_mysql.flush()
# 清空数据库缓存
session_postgre.commit()
session_postgre.flush()
# 周期更新
dispatcher.dispatcher_period_update(topo)
# try:
# 调度计算
dispatcher.schedule_construct()
# except Exception as es:
# logger.error("更新不及时")
# logger.error(es)
logger.info("#####################################周期更新结束#####################################")
# except Exception as es:
# logger.error("最外层异常捕获")
# logger.error(es)
def perform(inc, dispatcher):
"""
控制程序循环执行
:param inc: (int) 循环周期, 单位, 秒
:param dispatcher: (Dispatch Class) 矿卡调度类对象
:return: None
"""
scheduler.enter(inc, 0, perform, (inc, dispatcher))
process(dispatcher)
def main(inc, dispatcher):
"""
程序循环入口函数
:param inc: (int) 循环周期, 单位, 秒
:param dispatcher: (Dispatch Class) 矿卡调度类对象
:return: None
"""
# topo.generate_topo_graph()
scheduler.enter(0, 0, perform, (inc, dispatcher))
scheduler.run()
if __name__ == "__main__":
# 初始化日志
set_log()
# 获取日志器
logger = get_logger("zxt.main")
# 全局参数更新
period_para_update()
# 实例化设备对象
dump = DumpInfo()
excavator = ExcavatorInfo()
truck = TruckInfo(dump, excavator)
# 实例化拓扑图对象,生成拓扑图
# print('开始')
topo = Topo()
topo.generate_topo_graph()
logger.info('拓扑图已生成')
# print(topo.get_load_G().edges(data=True))
# print(topo.get_unload_G().edges(data=True))
# 实例化程序调度器
scheduler = sched.scheduler(time.time, time.sleep)
# 实例化调度预测器
pre_sch = PreSchedule(truck, excavator, dump)
# 实例化矿卡调度器
dispatcher = Dispatcher(dump, excavator, truck, pre_sch, topo)
logger.info(" ")
logger.info("调度系统启动")
main(10, dispatcher)
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/7/23 11:25
# @Author : Opfer
# @Site :
# @File : settings.py
# @Software: PyCharm
# 数据库设备, redis设置, 日志设置
from tables import *
# from urllib.parse import quote
# import logging.handlers
# import numpy as np
# import os
# from redis import StrictRedis, ConnectionPool
# import redis
# from datetime import datetime, timedelta
# import copy
# import json
# json_file = "config.json"
#
# with open(json_file) as f:
# para_config = json.load(f)["para"]
#
# with open(json_file) as f:
# mysql_config = json.load(f)["mysql"]
#
# with open(json_file) as f:
# postgre_config = json.load(f)["postgresql"]
#
# with open(json_file) as f:
# redis_config = json.load(f)["redis"]
# 全局参数
########################################################################################################################
# 空载矿卡速度,单位(km/h)
empty_speed = para_config["empty_speed"]
# 重载矿卡速度,单位(km/h)
heavy_speed = para_config["heavy_speed"]
# 卸载设备目标卸载量
dump_target_mass = para_config["dump_target_mass"]
# 挖机目标装载量
excavator_target_mass = para_config["excavator_target_mass"]
# def set_log():
#
# # 创建日志
# ########################################################################################################################
# # 日志存储地址
# log_path = para_config["log_path"]
#
# # 创建日志目录
#
# if not os.path.exists(log_path):
# os.mkdir(log_path)
# # logging初始化工作
# logging.basicConfig()
#
# logger = logging.getLogger("zxt")
# logger.setLevel(logging.INFO)
#
#
# # timefilehandler = logging.handlers.TimedRotatingFileHandler(log_path + "/dispatch.log", when='M', interval=1, backupCount=60)
# filehandler = logging.handlers.RotatingFileHandler(log_path + "/dispatch.log", maxBytes=3*1024*1024, backupCount=10)
# # filehandler = logging.handlers.RotatingFileHandler("./Logs/dispatch.log", maxBytes=3 * 1024 * 1024, backupCount=10)
# # 设置后缀名称,跟strftime的格式一样
# filehandler.suffix = "%Y-%m-%d_%H-%M.log"
#
# formatter = logging.Formatter("%(asctime)s - %(name)s: %(levelname)s %(filename)s %(message)s")
# filehandler.setFormatter(formatter)
# logger.addHandler(filehandler)
#
#
# def get_logger(module_name):
# logger = logging.getLogger(module_name)
# return logger
# 连接reids
########################################################################################################################
# redis 5 存储设备状态
pool5 = ConnectionPool(host=redis_config["host"], db=5, port=6379, password=redis_config["password"])
redis5 = StrictRedis(connection_pool=pool5)
# redis 2 存储派车计划
pool2 = ConnectionPool(host=redis_config["host"], db=2, port=6379, password=redis_config["password"])
redis2 = StrictRedis(connection_pool=pool2)
# 数据库连接设置
########################################################################################################################
# 创建对象的基类:
Base = declarative_base()
sql_str = str("mysql+mysqlconnector://" + mysql_config["user"] + ":%s@" + mysql_config["host"] + \
":" + mysql_config["port"] + "/" + mysql_config["database"])
postgre_str = str("postgresql://" + postgre_config["user"] + ":%s@" + postgre_config["host"] + \
":" + postgre_config["port"] + "/" + postgre_config["database"])
try:
engine_mysql = create_engine(
# "mysql+mysqlconnector://root:%s@192.168.28.111:3306/waytous"
# % quote("Huituo@123")
# "mysql+mysqlconnector://" + mysql_config["user"] + ":" + mysql_config["password"] + "@" + mysql_config[
# "host"] + ":" + mysql_config["port"] + "/" + mysql_config["database"]
sql_str % quote(mysql_config["password"])
)
engine_postgre = create_engine(
# "postgresql://postgres:%s@192.168.28.111:5432/shenbao_2021520"
# % quote("Huituo@123")
# "postgresql://" + postgre_config["user"] + ":" + postgre_config["password"] + "@" + postgre_config[
# "host"] + ":" + postgre_config["port"] + "/" + postgre_config["database"]
postgre_str % quote(postgre_config["password"])
)
# 创建DBsession_mysql类型:
DBsession_mysql = sessionmaker(bind=engine_mysql)
DBsession_mysql = scoped_session(DBsession_mysql)
DBsession_postgre = sessionmaker(bind=engine_postgre)
DBsession_postgre = scoped_session(DBsession_postgre)
# 创建session_mysql对象:
session_mysql = DBsession_mysql()
session_mysql.expire_on_commit = False
session_postgre = DBsession_postgre()
session_postgre.expire_on_commit = False
except Exception as es:
logger.error("数据库连接失败")
logger.error(es)
def str_to_byte(item):
return bytes(item, encoding='utf8')
def byte_to_str(item):
return str(item, encoding='utf-8')
This diff is collapsed.
This diff is collapsed.
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
# G = nx.Graph(name= 'dispatch_topo')
# # print(G.graph)
# point = np.array([1,2,3,'a','b','c','m','n'])
# for i in point:
# G.add_node(i)
# # print(G.graph)
# G.add_edge(1,'m',weight = 100)
# G.add_edge(2,'m',weight = 80)
# G.add_edge('a','m', weight = 120)
# G.add_edge('n','m',weight = 200)
# G.add_edge('b','m',weight = 200)
# G.add_edge('c','m',weight = 210)
# print(G.edges(data = True))
# nx.draw(G,node_color = 'red')
# plt.show()
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
G = nx.Graph()
# G.add_edge('a', 'b', weight=0.6)
# G.add_edge('a', 'c', weight=0.2)
# G.add_edge('c', 'd', weight=0.1)
# G.add_edge('c', 'e', weight=0.7)
# G.add_edge('c', 'f', weight=0.9)
# G.add_edge('a', 'd', weight=0.3)
G.add_edge(1,'m',length = 100)
G.add_edge(2,'m',length = 80)
G.add_edge('a','m', length = 120)
G.add_edge('n','m',length = 10)
G.add_edge('b','n',length = 100)
G.add_edge('h','n',length = 10)
G.add_edge('b','h',length = 80)
G.add_edge('c','n',length = 210)
# elarge = [(u, v) for (u, v, d) in G.edges(data=True) if d['weight'] > 0.5]
# esmall = [(u, v) for (u, v, d) in G.edges(data=True) if d['weight'] <= 0.5]
#
# pos = nx.spring_layout(G) # positions for all nodes
#
# # nodes
# nx.draw_networkx_nodes(G, pos, node_size=700)
#
# # edges
# nx.draw_networkx_edges(G, pos, edgelist=elarge,
# width=6)
# nx.draw_networkx_edges(G, pos, edgelist=esmall,
# width=6, alpha=0.5, edge_color='b', style='dashed')
#
# # labels
# nx.draw_networkx_labels(G, pos, font_size=20, font_family='sans-serif')
# plt.axis('off')
# plt.savefig("weight.jpg")
pos=nx.shell_layout(G)
nx.draw(G,pos,with_labels=True, node_color='red', edge_color='blue', font_size = 18,width = 5,node_size=600, alpha=0.5 )
# pylab.title('Self_Define Net',fontsize=15)
plt.show()
# print('dijkstra方法寻找最短路径:')
# path = nx.dijkstra_path(G, source='a', target='b', weight='length')
# print('节点a到b的路径:', path)
G_node = np.array(G.edges)
GG = G.number_of_edges()
print (G_node[0][1], type(G_node[0][1]))
print(GG,type(GG))
# G1 = nx.Graph()
# print(np.array(G1.nodes))
# G2 = nx.Graph() # 创建:空的 无向图
# G2.add_weighted_edges_from([(1,2,2),(1,3,8),(1,4,1),
# (2,3,6),(2,5,1),
# (3,4,7),(3,5,5),(3,6,1),(3,7,2),
# (4,7,9),
# (5,6,3),(5,8,2),(5,9,9),
# (6,7,4),(6,9,6),
# (7,9,3),(7,10,1),
# (8,9,7),(8,11,9),
# (9,10,1),(9,11,2),
# (10,11,4)]) # 向图中添加多条赋权边: (node1,node2,weight)
#
This diff is collapsed.
from path_plan.topo_graph import *
import networkx as nx
import matplotlib.pyplot as plt
# from traffic_flow.traffic_flow_planner import *
# from path_plan.path_planner_2 import *
# from dispatcher import Dispatcher, PreSchedule
topo = Topo()
topo.generate_topo_graph()
# path_planner = PathPlanner(topo)
# a, b = path_planner.path_cost_generate("ef89f721-5134-3ef1-91e2-95b4e5004675", "e5d0ac4e-c134-3ef1-901b-867980c8453e", False)
# print(a,b)
load_G = topo.get_load_G()
unload_G = topo.get_unload_G()
print(unload_G.edges(data=True))
print(load_G.edges(data=True))
# alpha = 1
# beta = 1
# source_area = "e44052d4-1134-6602-ea64-f14d05ecfb3f"
# target_area = "659ca4d5-2134-6602-ed05-1deddfca63f9"
# unload_source_node = str(session_postgre.query(DiggingWorkArea).filter_by(Id=source_area).first().ExitNodeId)
# unload_end_node = str(session_postgre.query(DumpArea).filter_by(Id=target_area).first().EntranceNodeId)
# # _, park_path = nx.single_source_dijkstra(unload_G, source=source_node, target=source_node, weight="real_distance")
# # path = park_path
# # for i in range(len(path)-1):
# # pair_now = [path[i], path[i+1]]
# # # edge_lane = nx.get_edge_attributes(load_G,'lane')
# # data = dict(unload_G[path[i]][path[i+1]]['lane'])
# # for u,v in data.items():
# # data[u] = [v[0],beta*3]
# # val = list(data.values())
# # unload_G[path[i]][path[i+1]]['locked_distance'] = sum([beta * sum(i[0] for i in val) + alpha * sum(i[-1] for i in val)])
#
# unload_cost = nx.dijkstra_path_length(unload_G, source=unload_source_node, target=unload_end_node,
# weight="locked_distance")
# print(unload_cost)
# # print([path[i], path[i+1]], unload_G[path[i]][path[i+1]]['locked_distance'])
# # for i in range(len(park_path) - 1):
# # data = dict(unload_G[park_path[i]][park_path[i + 1]]['lane'])
# # for u, v in data.items():
# # # u_lane_cost = lane_cost_generate(u)
# # data[u] = [v[0], beta *3]
# # val = list(data.values())
# # # unload_G[park_path[i]][park_path[i + 1]]['locked_distance'] = sum(beta * sum(i[0] for i in val) + alpha * sum(i[-1] for i in val))
# # unload_G[park_path[i]][park_path[i + 1]]['locked_distance'] = sum( beta * sum(i[0] for i in val))
# # # unload_G[park_path[i]][park_path[i + 1]]['locked_distance'] = sum(beta * sum(i[0] for i in val) + alpha * sum(i[-1] for i in val))
# # unload_G[park_path[i]][park_path[i + 1]]['locked_distance'] = sum( beta * sum(i[0] for i in val))
#
# # print(load_G.edges(data=True))
# print(unload_G.edges(data=True))
# # distance = nx.dijkstra_path_length(load_G, source="81be6a9e-8134-3ef2-141e-4bb045b8d097", target= "1c85bc2c-9134-6281-1265-fd19d0dcbd52", weight="locked_distance")
# # print(distance)
# # print(distance,path)
# # aaa = topo.get_load_source_node("7b9c8e89-7134-63ac-9c44-f183674b090c")
# # print(aaa)
# # path,_, _= topo.get_load_target_node_real("7b9c8e89-7134-63ac-9c44-f183674b090c",None)
# # print(path)
# # updata_edge = {}
# # for i in range(len(path)-1):
# # pair_now = [path[i], path[i+1]]
# # # edge_lane = nx.get_edge_attributes(load_G,'lane')
# # data = dict(load_G[path[i]][path[i+1]]['lane'])
# # for u,v in data.items():
# # data[u] = [v[0],v[0]]
# # load_G[path[i]][path[i+1]]['locked_distance'] = sum(i[-1] for i in list(data.values()))
# # print([path[i], path[i+1]], load_G[path[i]][path[i+1]]['locked_distance'])
#
#
# unload_G = topo.get_un data=True))
pos = nx.shell_layout(load_G)
nx.draw(load_G, pos, with_labels=True, node_color='red', edge_color='blue', font_size=18, width=5, node_size=600,
alpha=0.5)
plt.show()
pos = nx.shell_layout(unload_G)
nx.draw(unload_G, pos, with_labels=True, node_color='red', edge_color='blue', font_size=18, width=5, node_size=600,
alpha=0.5)
plt.show()
# unload_G = topo.get_unload_G()
# print(unload_G.edges(data=True))
#
# edge_arrtibutes = dict(nx.get_edge_attributes(unload_G,'lane'))
# print(edge_arrtibutes)
# print(type(edge_arrtibutes))
# print(edge_arrtibutes.values())
# print(type(edge_arrtibutes.values()))
# pos = nx.shell_layout(load_G)
# nx.draw(load_G, pos, with_labels=True, node_color='red', edge_color='blue', font_size=18, width=5, node_size=600,
# alpha=0.5)
# plt.show()
# a = "-rw-r--r-- 1 cs9315 22439 Feb 18 2021"
# print(len(a))
# end= time.process_time()
# print('Running time: %s Seconds'%(end-start))
import copy
import numpy as np
# # Question 1 optimisation problem
A = np.array([[1., 2., 1., -1.],
[-1., 1., 0., 2.],
[0., -1., -2., 1.]])
b = np.array([[3.], [2.], [-2.]])
x = np.array([[1.], [1.], [1.], [1.]])
k = 0
alpha = 0.1
gamma = 0.2
def gradient_step_1(A, b, x, gamma):
updated_x = np.array(A.T.dot((A.dot(x) - b)) + gamma*x)
return np.round(updated_x,4)
print_form = []
def edit_form(print_form,x):
print_form.pop(0)
x_k = copy.copy(x)
print_form.append([k, x_k])
while True:
if k > 0 and k <= 5:
x_k= copy.copy(x)
print_form.append((k,x_k))
if k == 6:
for item in print_form:
print(f"k = {item[0]}, x({item[0]}) = {item[1]}")
if k > 6:
edit_form(print_form,x)
k += 1
x -= alpha*gradient_step_1(A, b, x, gamma)
if np.linalg.norm(gradient_step_1(A, b, x, gamma), ord=2)<0.001:
edit_form(print_form, x)
k += 1
x -= alpha * gradient_step_1(A, b, x, gamma)
edit_form(print_form, x)
for item in print_form:
print(f"k = {item[0]}, x({item[0]}) = {item[1]}")
break
# x = np.array([[1.123], [1.123], [1.123], [1.123]])
# x =np.array([[-4.59627504],
# [8.95686683],
# [-9.76586351],
# [ 8.87335576]])
# print(np.around(x,2))
# for i in range(10):
# x = A.T.dot((A.dot(x) - b)) + gamma*x
# print(np.round(x,4))
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/8/31 15:17
# @Author : Opfer
# @Site :
# @File : __init__.py
# @Software: PyCharm
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
...@@ -157,19 +157,21 @@ class Dispatcher(WalkManage): ...@@ -157,19 +157,21 @@ class Dispatcher(WalkManage):
""" """
def __init__(self, dump, excavator, truck, predict_schedule): def __init__(self, dump, excavator, truck, predict_schedule,topo):
# 调度开始时间 # 调度开始时间
self.start_time = datetime.now() self.start_time = datetime.now()
# 路径规划对象 # 路径规划对象
self.path = PathPlanner() self.path = PathPlanner(topo)
# self.path = PathPlanner()
# self.topo = Topo()
# 车流对象 # 车流对象
self.traffic_flow = Traffic_flow(dump, excavator, truck) self.traffic_flow = Traffic_flow(dump, excavator, truck)
# 分组控制对象 # 分组控制对象
self.group = Group(dump, excavator, truck, self.traffic_flow) self.group = Group(dump, excavator, truck, self.traffic_flow, topo)
# 行程预测对象 # 行程预测对象
self.pre_sch = predict_schedule self.pre_sch = predict_schedule
...@@ -185,7 +187,7 @@ class Dispatcher(WalkManage): ...@@ -185,7 +187,7 @@ class Dispatcher(WalkManage):
# 获取日志器 # 获取日志器
self.logger = get_logger("zxt.dispatcher") self.logger = get_logger("zxt.dispatcher")
def dispatcher_period_update(self): def dispatcher_period_update(self,topo):
# 装载映射参数及 # 装载映射参数及
self.period_map_para_load() self.period_map_para_load()
...@@ -210,7 +212,7 @@ class Dispatcher(WalkManage): ...@@ -210,7 +212,7 @@ class Dispatcher(WalkManage):
self.cost_to_excavator, self.cost_to_dump, self.cost_park_to_excavator = self.path.walk_cost_cal() self.cost_to_excavator, self.cost_to_dump, self.cost_park_to_excavator = self.path.walk_cost_cal()
# 调度分组更新 # 调度分组更新
self.group.period_update() self.group.period_update(topo)
def truck_schedule(self, truck_id): def truck_schedule(self, truck_id):
# 规则读取 # 规则读取
......
...@@ -42,6 +42,7 @@ class DumpInfo(WalkManage): ...@@ -42,6 +42,7 @@ class DumpInfo(WalkManage):
# 参数初始化 # 参数初始化
self.para_period_update() self.para_period_update()
def get_unloading_time(self): def get_unloading_time(self):
return self.unloading_time return self.unloading_time
......
...@@ -57,7 +57,8 @@ class ExcavatorInfo(WalkManage): ...@@ -57,7 +57,8 @@ class ExcavatorInfo(WalkManage):
# 更新挖机装载时间 # 更新挖机装载时间
def update_excavator_loadtime(self): def update_excavator_loadtime(self):
load_area_id = {}
dump_area_id = {}
self.loading_time = np.zeros(self.dynamic_excavator_num) self.loading_time = np.zeros(self.dynamic_excavator_num)
for excavator_id in self.excavator_uuid_to_index_dict.keys(): for excavator_id in self.excavator_uuid_to_index_dict.keys():
......
...@@ -8,11 +8,12 @@ ...@@ -8,11 +8,12 @@
from settings import * from settings import *
from para_config import * from para_config import *
from path_plan.path_plannner import PathPlanner from path_plan.path_planner_2 import *
# from path_plan.path_plannner import *
from traffic_flow.traffic_flow_planner import traffic_flow_plan from traffic_flow.traffic_flow_planner import traffic_flow_plan
class Group(WalkManage): class Group(WalkManage):
def __init__(self, dump, excavator, truck, traffic_flow): def __init__(self, dump, excavator, truck, traffic_flow, topo):
self.dump = dump self.dump = dump
self.excavator = excavator self.excavator = excavator
self.truck = truck self.truck = truck
...@@ -40,7 +41,9 @@ class Group(WalkManage): ...@@ -40,7 +41,9 @@ class Group(WalkManage):
self.group_excavator_material_bind_modify = {} self.group_excavator_material_bind_modify = {}
self.group_dump_material_bind_modify = {} self.group_dump_material_bind_modify = {}
self.path = PathPlanner() self.path = PathPlanner(topo)
# self.path = PathPlanner()
# self.topo = Topo()
def update_dispatch_truck_group(self): def update_dispatch_truck_group(self):
# 更新矿卡-调度分组隶属关系 # 更新矿卡-调度分组隶属关系
...@@ -94,15 +97,11 @@ class Group(WalkManage): ...@@ -94,15 +97,11 @@ class Group(WalkManage):
dynamic_truck_num = get_value("dynamic_truck_num") dynamic_truck_num = get_value("dynamic_truck_num")
self.goto_dump_truck_num = np.zeros((dynamic_excavator_num, dynamic_dump_num)) self.goto_dump_truck_num = np.zeros((dynamic_excavator_num, dynamic_dump_num))
self.actual_goto_dump_traffic_flow = np.zeros( self.actual_goto_dump_traffic_flow = np.zeros((dynamic_excavator_num, dynamic_dump_num))
(dynamic_excavator_num, dynamic_dump_num)
) self.goto_excavator_truck_num = np.zeros((dynamic_dump_num, dynamic_excavator_num))
self.goto_excavator_truck_num = np.zeros( self.actual_goto_excavator_traffic_flow = np.zeros((dynamic_dump_num, dynamic_excavator_num))
(dynamic_dump_num, dynamic_excavator_num)
)
self.actual_goto_excavator_traffic_flow = np.zeros(
(dynamic_dump_num, dynamic_excavator_num)
)
# try: # try:
logger.info("dynamic_truck_num") logger.info("dynamic_truck_num")
...@@ -150,7 +149,7 @@ class Group(WalkManage): ...@@ -150,7 +149,7 @@ class Group(WalkManage):
) )
) )
def update_group_truck_flow(self): def update_group_truck_flow(self, topo):
# 更新调度分组内车实时/最佳车流 # 更新调度分组内车实时/最佳车流
...@@ -159,7 +158,7 @@ class Group(WalkManage): ...@@ -159,7 +158,7 @@ class Group(WalkManage):
actual_goto_excavator_traffic_flow, actual_goto_dump_traffic_flow = \ actual_goto_excavator_traffic_flow, actual_goto_dump_traffic_flow = \
self.traffic_flow.actual_goto_excavator_traffic_flow, self.traffic_flow.actual_goto_dump_traffic_flow self.traffic_flow.actual_goto_excavator_traffic_flow, self.traffic_flow.actual_goto_dump_traffic_flow
opt_goto_dump_traffic_flow, opt_goto_excavator_traffic_flow = traffic_flow_plan(self.truck) opt_goto_dump_traffic_flow, opt_goto_excavator_traffic_flow = traffic_flow_plan(self.truck, topo)
try: try:
...@@ -174,6 +173,7 @@ class Group(WalkManage): ...@@ -174,6 +173,7 @@ class Group(WalkManage):
print(self.device_group) print(self.device_group)
local_opt_goto_dump_traffic_flow = np.zeros((len(excavator_group), len(dump_group))) local_opt_goto_dump_traffic_flow = np.zeros((len(excavator_group), len(dump_group)))
local_opt_goto_excavator_traffic_flow = np.zeros((len(dump_group), len(excavator_group))) local_opt_goto_excavator_traffic_flow = np.zeros((len(dump_group), len(excavator_group)))
local_actual_goto_dump_traffic_flow = np.zeros((len(excavator_group), len(dump_group))) local_actual_goto_dump_traffic_flow = np.zeros((len(excavator_group), len(dump_group)))
local_actual_goto_excavator_traffic_flow = np.zeros((len(dump_group), len(excavator_group))) local_actual_goto_excavator_traffic_flow = np.zeros((len(dump_group), len(excavator_group)))
for excavator_id in excavator_group: for excavator_id in excavator_group:
...@@ -384,14 +384,14 @@ class Group(WalkManage): ...@@ -384,14 +384,14 @@ class Group(WalkManage):
return group_allow_flow_to_dump return group_allow_flow_to_dump
def period_update(self): def period_update(self,topo):
self.reset() self.reset()
self.update_dispatch_truck_group() self.update_dispatch_truck_group()
self.update_group_set() self.update_group_set()
self.update_device_group() self.update_device_group()
self.update_group_device_map() self.update_group_device_map()
self.update_group_walk_cost() self.update_group_walk_cost()
self.update_group_truck_flow() self.update_group_truck_flow(topo)
self.update_modify() self.update_modify()
def get_diaptch_truck_group(self): def get_diaptch_truck_group(self):
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment