Commit ac1dfe25 authored by Allvey's avatar Allvey

github commit

parent 5140e068
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/7/23 11:25
# @Author : Opfer
# @Site :
# @File : settings.py
# @Software: PyCharm
from tables import *
from urllib.parse import quote
import logging.handlers
# 创建日志
########################################################################################################################
# 日志存储地址
log_path = '/usr/local/fleet-log/dispatch'
# log_path = '/root/WatoDisp/logs'
# # 创建日志目录
# if not os.path.exists(log_path):
# os.mkdir(log_path)
# logging初始化工作
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# timefilehandler = logging.handlers.TimedRotatingFileHandler(log_path + "/dispatch.log", when='M', interval=1, backupCount=60)
# filehandler = logging.handlers.RotatingFileHandler(log_path + "/dispatch.log", maxBytes=3*1024*1024, backupCount=10)
filehandler = logging.handlers.RotatingFileHandler("logs/dispatch.log", maxBytes=3 * 1024 * 1024, backupCount=10)
# 设置后缀名称,跟strftime的格式一样
filehandler.suffix = "%Y-%m-%d_%H-%M.log"
formatter = logging.Formatter('%(asctime)s - %(name)s: %(levelname)s %(message)s')
filehandler.setFormatter(formatter)
logger.addHandler(filehandler)
# 连接reids
########################################################################################################################
# redis 5 存储设备状态
pool5 = ConnectionPool(host='192.168.28.111', db=5, port=6379, password='Huituo@123')
redis5 = StrictRedis(connection_pool=pool5)
# redis 2 存储派车计划
pool2 = ConnectionPool(host='192.168.28.111', db=2, port=6379, password='Huituo@123')
redis2 = StrictRedis(connection_pool=pool2)
# 数据库连接设置
########################################################################################################################
# 创建对象的基类:
Base = declarative_base()
try:
engine_mysql = create_engine('mysql+mysqlconnector://root:%s@192.168.28.111:3306/waytous' % quote('Huituo@123'))
engine_postgre = create_engine('postgresql://postgres:%s@192.168.28.111:5432/shenbao_2021520' % quote('Huituo@123'))
# 创建DBsession_mysql类型:
DBsession_mysql = sessionmaker(bind=engine_mysql)
DBsession_postgre = sessionmaker(bind=engine_postgre)
# 创建session_mysql对象:
session_mysql = DBsession_mysql()
session_postgre = DBsession_postgre()
except Exception as es:
logger.error("数据库连接失败")
logger.error(es)
\ No newline at end of file
#!E:\Pycharm Projects\Waytous
# -*- coding: utf-8 -*-
# @Time : 2021/7/23 11:23
# @Author : Opfer
# @Site :
# @File : static_data_process.py
# @Software: PyCharm
from settings import *
# 下面开始数据处理部分
########################################################################################################################
def build_work_area_uuid_index_map():
# load_area_id <-> load_area_index
# unload_area_id <-> unload_area_index
load_area_uuid_to_index_dict = {}
unload_area_uuid_to_index_dict = {}
load_area_index_to_uuid_dict = {}
unload_area_index_to_uuid_dict = {}
unload_area_num = 0
load_area_num = 0
try:
for item in session_postgre.query(WalkTime).all():
load_area = str(item.load_area_id)
unload_area = str(item.unload_area_id)
if load_area not in load_area_uuid_to_index_dict:
load_area_uuid_to_index_dict[load_area] = load_area_num
load_area_index_to_uuid_dict[load_area_num] = load_area
load_area_num = load_area_num + 1
if unload_area not in unload_area_uuid_to_index_dict:
unload_area_uuid_to_index_dict[unload_area] = unload_area_num
unload_area_index_to_uuid_dict[unload_area_num] = unload_area
unload_area_num = unload_area_num + 1
if unload_area_num < 1 or load_area_num < 1:
raise Exception("无路网信息")
except Exception as es:
logger.error(es)
return load_area_uuid_to_index_dict, unload_area_uuid_to_index_dict, \
load_area_index_to_uuid_dict, unload_area_index_to_uuid_dict
def build_park_uuid_index_map():
# park_id <-> park_index
park_uuid_to_index_dict = {}
park_index_to_uuid_dict = {}
park_num = 0
try:
for item in session_postgre.query(WalkTimePort).all():
park = str(item.park_area_id)
if park not in park_uuid_to_index_dict:
park_uuid_to_index_dict[park] = park_num
park_index_to_uuid_dict[park_num] = park
park_num = park_num + 1
if park_num < 1:
raise Exception("无备停区路网信息")
except Exception as es:
logger.error(es)
return park_uuid_to_index_dict, park_index_to_uuid_dict
def build_truck_uuid_name_map():
# truck_id <-> truck_name
truck_uuid_to_name_dict = {}
truck_name_to_uuid_dict = {}
try:
for item in session_mysql.query(Equipment).filter_by(device_type=1).all():
truck_id = item.id
truck_name = item.equipment_id
truck_name_to_uuid_dict[truck_name] = truck_id
truck_uuid_to_name_dict[truck_id] = truck_name
if len(truck_uuid_to_name_dict) < 1 or len(truck_name_to_uuid_dict) < 1:
raise Exception("无矿卡设备可用-矿卡设备映射异常")
except Exception as es:
logger.warning(es)
return truck_uuid_to_name_dict, truck_name_to_uuid_dict
def update_deveices_map(unload_area_uuid_to_index_dict, load_area_uuid_to_index_dict):
excavator_uuid_to_index_dict = {} # 用于将Excavator表中的area_id映射到index
dump_uuid_to_index_dict = {} # 用于将Dump表中的area_id映射到index
excavator_index_to_uuid_dict = {} # 用于将index映射到Excavator表中的area_id
dump_index_to_uuid_dict = {} # 用于将index映射到Dump表中的area_id
dump_uuid_to_unload_area_uuid_dict = {}
excavator_uuid_to_load_area_uuid_dict = {}
excavator_index_to_load_area_index_dict = {}
dump_index_to_unload_area_index_dict = {}
try:
excavator_num = 0
dump_num = 0
for item in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
# excavator_id <-> excavator_index
# dump_id <-> dump_index
# excavator_id <-> load_area_id
# dump_id <-> unload_area_id
# excavator_index <-> load_area_index
# dump_index <-> unload_area_index
excavator_id = item.exactor_id
load_area_id = item.load_area_id
unload_area_id = item.unload_area_id
dump_id = item.dump_id
if dump_id not in dump_uuid_to_unload_area_uuid_dict:
dump_uuid_to_index_dict[dump_id] = dump_num
dump_index_to_uuid_dict[dump_num] = dump_id
dump_uuid_to_unload_area_uuid_dict[dump_id] = unload_area_id
dump_index_to_unload_area_index_dict[dump_uuid_to_index_dict[dump_id]] = \
unload_area_uuid_to_index_dict[unload_area_id]
dump_num = dump_num + 1
if excavator_id not in excavator_uuid_to_index_dict:
excavator_uuid_to_index_dict[excavator_id] = excavator_num
excavator_index_to_uuid_dict[excavator_num] = excavator_id
excavator_uuid_to_load_area_uuid_dict[excavator_id] = load_area_id
excavator_index_to_load_area_index_dict[excavator_uuid_to_index_dict[excavator_id]] = \
load_area_uuid_to_index_dict[load_area_id]
excavator_num = excavator_num + 1
if excavator_num < 1 or dump_num < 1:
raise Exception("无动态派车计划可用-动态派车挖机/卸点映射失败")
except Exception as es:
logger.warning(es)
return {'excavator_uuid_to_index_dict': excavator_uuid_to_index_dict,
'dump_uuid_to_index_dict': dump_uuid_to_index_dict,
'excavator_index_to_uuid_dict': excavator_index_to_uuid_dict,
'dump_index_to_uuid_dict': dump_index_to_uuid_dict,
'dump_uuid_to_unload_area_uuid_dict': dump_uuid_to_unload_area_uuid_dict,
'excavator_uuid_to_load_area_uuid_dict': excavator_uuid_to_load_area_uuid_dict,
'excavator_index_to_load_area_index_dict': excavator_index_to_load_area_index_dict,
'dump_index_to_unload_area_index_dict': dump_index_to_unload_area_index_dict}
def update_truck_uuid_index_map(dynamic_truck_set):
truck_uuid_to_index_dict = {}
truck_index_to_uuid_dict = {}
# truck_id <-> truck_index
truck_num = 0
for truck_id in dynamic_truck_set:
truck_uuid_to_index_dict[truck_id] = truck_num
truck_index_to_uuid_dict[truck_num] = truck_id
truck_num = truck_num + 1
return {'truck_uuid_to_index_dict': truck_uuid_to_index_dict,
'truck_index_to_uuid_dict': truck_index_to_uuid_dict}
def update_total_truck():
# 矿卡集合
truck_list = []
try:
query = np.array(session_mysql.query(Equipment).filter_by(device_type=1, isdeleted=0).all())
for item in query:
truck_list.append(item.id)
if len(truck_list) < 1:
raise Exception("无矿卡设备可用-矿卡集合读取异常")
except Exception as es:
logger.error(es)
return truck_list
def update_fixdisp_truck():
# 固定派车矿卡集合
fixed_truck_list = []
try:
query = np.array(session_mysql.query(Dispatch).filter_by(isauto=0, isdeleted=0).all())
for item in query:
fixed_truck_list.append(item.truck_id)
if len(fixed_truck_list) < 1:
raise Exception("无固定派车计划可用-固定派车矿卡集合读取异常")
except Exception as es:
logger.error(es)
return fixed_truck_list
def update_autodisp_excavator():
# 用于动态派车的挖机集合
dynamic_excavator_list = []
try:
for item in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
dynamic_excavator_list.append(item.exactor_id)
if len(dynamic_excavator_list) < 1:
raise Exception("无动态派车计划可用-动态派车挖机/卸点集合读取异常")
except Exception as es:
logger.warning(es)
return dynamic_excavator_list
def update_autodisp_dump():
# 用于动态调度的卸载点集合
dynamic_dump_list = []
try:
for item in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
dynamic_dump_list.append(item.dump_id)
if len(dynamic_dump_list) < 1:
raise Exception("无动态派车计划可用-动态派车挖机/卸点集合读取异常")
except Exception as es:
logger.warning(es)
return dynamic_dump_list
def update_load_area():
load_area_list = []
for walk_time in session_postgre.query(WalkTime).all():
load_area_list.append(walk_time.load_area_id)
return load_area_list
def update_unload_area():
unload_area_list = []
for walk_time in session_postgre.query(WalkTime).all():
unload_area_list.append(walk_time.unload_area_id)
return unload_area_list
\ No newline at end of file
......@@ -32,6 +32,8 @@ import logging
import logging.handlers
import os
from traffic_flow_planner import *
from settings import *
from static_data_process import *
# 全局参数设定
########################################################################################################################
......@@ -60,210 +62,209 @@ task_set = [-2, 0, 1, 2, 3, 4, 5]
# Big integer
M = 100000000
# 创建日志
########################################################################################################################
# 日志存储地址
log_path = '/usr/local/fleet-log/dispatch'
# log_path = '/root/WatoDisp/logs'
# # 创建日志目录
# if not os.path.exists(log_path):
# os.mkdir(log_path)
# logging初始化工作
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# timefilehandler = logging.handlers.TimedRotatingFileHandler(log_path + "/dispatch.log", when='M', interval=1, backupCount=60)
# filehandler = logging.handlers.RotatingFileHandler(log_path + "/dispatch.log", maxBytes=3*1024*1024, backupCount=10)
filehandler = logging.handlers.RotatingFileHandler("logs/dispatch.log", maxBytes=3 * 1024 * 1024, backupCount=10)
# 设置后缀名称,跟strftime的格式一样
filehandler.suffix = "%Y-%m-%d_%H-%M.log"
formatter = logging.Formatter('%(asctime)s - %(name)s: %(levelname)s %(message)s')
filehandler.setFormatter(formatter)
logger.addHandler(filehandler)
# 连接reids
########################################################################################################################
# redis 5 存储设备状态
pool5 = ConnectionPool(host='192.168.28.111', db=5, port=6379, password='Huituo@123')
redis5 = StrictRedis(connection_pool=pool5)
# redis 2 存储派车计划
pool2 = ConnectionPool(host='192.168.28.111', db=2, port=6379, password='Huituo@123')
redis2 = StrictRedis(connection_pool=pool2)
# 数据库连接设置
########################################################################################################################
# 创建对象的基类:
Base = declarative_base()
try:
engine_mysql = create_engine('mysql+mysqlconnector://root:%s@192.168.28.111:3306/waytous' % quote('Huituo@123'))
engine_postgre = create_engine('postgresql://postgres:%s@192.168.28.111:5432/shenbao_2021520' % quote('Huituo@123'))
# 创建DBsession_mysql类型:
DBsession_mysql = sessionmaker(bind=engine_mysql)
DBsession_postgre = sessionmaker(bind=engine_postgre)
# 创建session_mysql对象:
session_mysql = DBsession_mysql()
session_postgre = DBsession_postgre()
except Exception as es:
logger.error("数据库连接失败")
logger.error(es)
# 下面开始数据处理部分
########################################################################################################################
def build_work_area_uuid_index_map():
# load_area_id <-> load_area_index
# unload_area_id <-> unload_area_index
load_area_uuid_to_index_dict = {}
unload_area_uuid_to_index_dict = {}
load_area_index_to_uuid_dict = {}
unload_area_index_to_uuid_dict = {}
unload_area_num = 0
load_area_num = 0
try:
for item in session_postgre.query(WalkTime).all():
load_area = str(item.load_area_id)
unload_area = str(item.unload_area_id)
if load_area not in load_area_uuid_to_index_dict:
load_area_uuid_to_index_dict[load_area] = load_area_num
load_area_index_to_uuid_dict[load_area_num] = load_area
load_area_num = load_area_num + 1
if unload_area not in unload_area_uuid_to_index_dict:
unload_area_uuid_to_index_dict[unload_area] = unload_area_num
unload_area_index_to_uuid_dict[unload_area_num] = unload_area
unload_area_num = unload_area_num + 1
if unload_area_num < 1 or load_area_num < 1:
raise Exception("无路网信息")
except Exception as es:
logger.error(es)
return load_area_uuid_to_index_dict, unload_area_uuid_to_index_dict, \
load_area_index_to_uuid_dict, unload_area_index_to_uuid_dict
def build_park_uuid_index_map():
# park_id <-> park_index
park_uuid_to_index_dict = {}
park_index_to_uuid_dict = {}
park_num = 0
try:
for item in session_postgre.query(WalkTimePort).all():
park = str(item.park_area_id)
if park not in park_uuid_to_index_dict:
park_uuid_to_index_dict[park] = park_num
park_index_to_uuid_dict[park_num] = park
park_num = park_num + 1
if park_num < 1:
raise Exception("无备停区路网信息")
except Exception as es:
logger.error(es)
return park_uuid_to_index_dict, park_index_to_uuid_dict
def build_truck_uuid_name_map():
# truck_id <-> truck_name
truck_uuid_to_name_dict = {}
truck_name_to_uuid_dict = {}
try:
for item in session_mysql.query(Equipment).filter_by(device_type=1).all():
truck_id = item.id
truck_name = item.equipment_id
truck_name_to_uuid_dict[truck_name] = truck_id
truck_uuid_to_name_dict[truck_id] = truck_name
if len(truck_uuid_to_name_dict) < 1 or len(truck_name_to_uuid_dict) < 1:
raise Exception("无矿卡设备可用-矿卡设备映射异常")
except Exception as es:
logger.warning(es)
return truck_uuid_to_name_dict, truck_name_to_uuid_dict
def update_deveices_map(unload_area_uuid_to_index_dict, load_area_uuid_to_index_dict):
excavator_uuid_to_index_dict = {} # 用于将Excavator表中的area_id映射到index
dump_uuid_to_index_dict = {} # 用于将Dump表中的area_id映射到index
excavator_index_to_uuid_dict = {} # 用于将index映射到Excavator表中的area_id
dump_index_to_uuid_dict = {} # 用于将index映射到Dump表中的area_id
dump_uuid_to_unload_area_uuid_dict = {}
excavator_uuid_to_load_area_uuid_dict = {}
excavator_index_to_load_area_index_dict = {}
dump_index_to_unload_area_index_dict = {}
try:
excavator_num = 0
dump_num = 0
for item in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
# excavator_id <-> excavator_index
# dump_id <-> dump_index
# excavator_id <-> load_area_id
# dump_id <-> unload_area_id
# excavator_index <-> load_area_index
# dump_index <-> unload_area_index
excavator_id = item.exactor_id
load_area_id = item.load_area_id
unload_area_id = item.unload_area_id
dump_id = item.dump_id
if dump_id not in dump_uuid_to_unload_area_uuid_dict:
dump_uuid_to_index_dict[dump_id] = dump_num
dump_index_to_uuid_dict[dump_num] = dump_id
dump_uuid_to_unload_area_uuid_dict[dump_id] = unload_area_id
dump_index_to_unload_area_index_dict[dump_uuid_to_index_dict[dump_id]] = \
unload_area_uuid_to_index_dict[unload_area_id]
dump_num = dump_num + 1
if excavator_id not in excavator_uuid_to_index_dict:
excavator_uuid_to_index_dict[excavator_id] = excavator_num
excavator_index_to_uuid_dict[excavator_num] = excavator_id
excavator_uuid_to_load_area_uuid_dict[excavator_id] = load_area_id
excavator_index_to_load_area_index_dict[excavator_uuid_to_index_dict[excavator_id]] = \
load_area_uuid_to_index_dict[load_area_id]
excavator_num = excavator_num + 1
if excavator_num < 1 or dump_num < 1:
raise Exception("无动态派车计划可用-动态派车挖机/卸点映射失败")
except Exception as es:
logger.warning(es)
return {'excavator_uuid_to_index_dict': excavator_uuid_to_index_dict,
'dump_uuid_to_index_dict': dump_uuid_to_index_dict,
'excavator_index_to_uuid_dict': excavator_index_to_uuid_dict,
'dump_index_to_uuid_dict': dump_index_to_uuid_dict,
'dump_uuid_to_unload_area_uuid_dict': dump_uuid_to_unload_area_uuid_dict,
'excavator_uuid_to_load_area_uuid_dict': excavator_uuid_to_load_area_uuid_dict,
'excavator_index_to_load_area_index_dict': excavator_index_to_load_area_index_dict,
'dump_index_to_unload_area_index_dict': dump_index_to_unload_area_index_dict}
def update_truck_uuid_index_map(dynamic_truck_set):
truck_uuid_to_index_dict = {}
truck_index_to_uuid_dict = {}
# truck_id <-> truck_index
truck_num = 0
for truck_id in dynamic_truck_set:
truck_uuid_to_index_dict[truck_id] = truck_num
truck_index_to_uuid_dict[truck_num] = truck_id
truck_num = truck_num + 1
return {'truck_uuid_to_index_dict': truck_uuid_to_index_dict,
'truck_index_to_uuid_dict': truck_index_to_uuid_dict}
# # 创建日志
# ########################################################################################################################
# # 日志存储地址
# log_path = '/usr/local/fleet-log/dispatch'
# # log_path = '/root/WatoDisp/logs'
#
# # # 创建日志目录
# # if not os.path.exists(log_path):
# # os.mkdir(log_path)
#
# # logging初始化工作
# logging.basicConfig()
#
# logger = logging.getLogger(__name__)
# logger.setLevel(logging.INFO)
#
# # timefilehandler = logging.handlers.TimedRotatingFileHandler(log_path + "/dispatch.log", when='M', interval=1, backupCount=60)
# # filehandler = logging.handlers.RotatingFileHandler(log_path + "/dispatch.log", maxBytes=3*1024*1024, backupCount=10)
# filehandler = logging.handlers.RotatingFileHandler("logs/dispatch.log", maxBytes=3 * 1024 * 1024, backupCount=10)
# # 设置后缀名称,跟strftime的格式一样
# filehandler.suffix = "%Y-%m-%d_%H-%M.log"
#
# formatter = logging.Formatter('%(asctime)s - %(name)s: %(levelname)s %(message)s')
# filehandler.setFormatter(formatter)
# logger.addHandler(filehandler)
#
# # 连接reids
# ########################################################################################################################
# # redis 5 存储设备状态
# pool5 = ConnectionPool(host='192.168.28.111', db=5, port=6379, password='Huituo@123')
#
# redis5 = StrictRedis(connection_pool=pool5)
#
# # redis 2 存储派车计划
# pool2 = ConnectionPool(host='192.168.28.111', db=2, port=6379, password='Huituo@123')
#
# redis2 = StrictRedis(connection_pool=pool2)
#
# # 数据库连接设置
# ########################################################################################################################
# # 创建对象的基类:
# Base = declarative_base()
#
# try:
# engine_mysql = create_engine('mysql+mysqlconnector://root:%s@192.168.28.111:3306/waytous' % quote('Huituo@123'))
#
# engine_postgre = create_engine('postgresql://postgres:%s@192.168.28.111:5432/shenbao_2021520' % quote('Huituo@123'))
#
# # 创建DBsession_mysql类型:
# DBsession_mysql = sessionmaker(bind=engine_mysql)
#
# DBsession_postgre = sessionmaker(bind=engine_postgre)
#
# # 创建session_mysql对象:
# session_mysql = DBsession_mysql()
#
# session_postgre = DBsession_postgre()
# except Exception as es:
# logger.error("数据库连接失败")
# logger.error(es)
# # 下面开始数据处理部分
# ########################################################################################################################
#
# def build_work_area_uuid_index_map():
# # load_area_id <-> load_area_index
# # unload_area_id <-> unload_area_index
# load_area_uuid_to_index_dict = {}
# unload_area_uuid_to_index_dict = {}
# load_area_index_to_uuid_dict = {}
# unload_area_index_to_uuid_dict = {}
#
# unload_area_num = 0
# load_area_num = 0
#
# try:
# for item in session_postgre.query(WalkTime).all():
# load_area = str(item.load_area_id)
# unload_area = str(item.unload_area_id)
# if load_area not in load_area_uuid_to_index_dict:
# load_area_uuid_to_index_dict[load_area] = load_area_num
# load_area_index_to_uuid_dict[load_area_num] = load_area
# load_area_num = load_area_num + 1
# if unload_area not in unload_area_uuid_to_index_dict:
# unload_area_uuid_to_index_dict[unload_area] = unload_area_num
# unload_area_index_to_uuid_dict[unload_area_num] = unload_area
# unload_area_num = unload_area_num + 1
# if unload_area_num < 1 or load_area_num < 1:
# raise Exception("无路网信息")
# except Exception as es:
# logger.error(es)
# return load_area_uuid_to_index_dict, unload_area_uuid_to_index_dict, \
# load_area_index_to_uuid_dict, unload_area_index_to_uuid_dict
#
#
# def build_park_uuid_index_map():
# # park_id <-> park_index
# park_uuid_to_index_dict = {}
# park_index_to_uuid_dict = {}
#
# park_num = 0
#
# try:
# for item in session_postgre.query(WalkTimePort).all():
# park = str(item.park_area_id)
# if park not in park_uuid_to_index_dict:
# park_uuid_to_index_dict[park] = park_num
# park_index_to_uuid_dict[park_num] = park
# park_num = park_num + 1
# if park_num < 1:
# raise Exception("无备停区路网信息")
# except Exception as es:
# logger.error(es)
#
# return park_uuid_to_index_dict, park_index_to_uuid_dict
#
#
# def build_truck_uuid_name_map():
# # truck_id <-> truck_name
# truck_uuid_to_name_dict = {}
# truck_name_to_uuid_dict = {}
#
# try:
# for item in session_mysql.query(Equipment).filter_by(device_type=1).all():
# truck_id = item.id
# truck_name = item.equipment_id
#
# truck_name_to_uuid_dict[truck_name] = truck_id
# truck_uuid_to_name_dict[truck_id] = truck_name
# if len(truck_uuid_to_name_dict) < 1 or len(truck_name_to_uuid_dict) < 1:
# raise Exception("无矿卡设备可用-矿卡设备映射异常")
# except Exception as es:
# logger.warning(es)
# return truck_uuid_to_name_dict, truck_name_to_uuid_dict
#
#
# def update_deveices_map(unload_area_uuid_to_index_dict, load_area_uuid_to_index_dict):
# excavator_uuid_to_index_dict = {} # 用于将Excavator表中的area_id映射到index
# dump_uuid_to_index_dict = {} # 用于将Dump表中的area_id映射到index
# excavator_index_to_uuid_dict = {} # 用于将index映射到Excavator表中的area_id
# dump_index_to_uuid_dict = {} # 用于将index映射到Dump表中的area_id
#
# dump_uuid_to_unload_area_uuid_dict = {}
# excavator_uuid_to_load_area_uuid_dict = {}
# excavator_index_to_load_area_index_dict = {}
# dump_index_to_unload_area_index_dict = {}
#
# try:
# excavator_num = 0
# dump_num = 0
# for item in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
# # excavator_id <-> excavator_index
# # dump_id <-> dump_index
# # excavator_id <-> load_area_id
# # dump_id <-> unload_area_id
# # excavator_index <-> load_area_index
# # dump_index <-> unload_area_index
# excavator_id = item.exactor_id
# load_area_id = item.load_area_id
# unload_area_id = item.unload_area_id
# dump_id = item.dump_id
# if dump_id not in dump_uuid_to_unload_area_uuid_dict:
# dump_uuid_to_index_dict[dump_id] = dump_num
# dump_index_to_uuid_dict[dump_num] = dump_id
# dump_uuid_to_unload_area_uuid_dict[dump_id] = unload_area_id
# dump_index_to_unload_area_index_dict[dump_uuid_to_index_dict[dump_id]] = \
# unload_area_uuid_to_index_dict[unload_area_id]
# dump_num = dump_num + 1
# if excavator_id not in excavator_uuid_to_index_dict:
# excavator_uuid_to_index_dict[excavator_id] = excavator_num
# excavator_index_to_uuid_dict[excavator_num] = excavator_id
# excavator_uuid_to_load_area_uuid_dict[excavator_id] = load_area_id
# excavator_index_to_load_area_index_dict[excavator_uuid_to_index_dict[excavator_id]] = \
# load_area_uuid_to_index_dict[load_area_id]
# excavator_num = excavator_num + 1
# if excavator_num < 1 or dump_num < 1:
# raise Exception("无动态派车计划可用-动态派车挖机/卸点映射失败")
# except Exception as es:
# logger.warning(es)
#
# return {'excavator_uuid_to_index_dict': excavator_uuid_to_index_dict,
# 'dump_uuid_to_index_dict': dump_uuid_to_index_dict,
# 'excavator_index_to_uuid_dict': excavator_index_to_uuid_dict,
# 'dump_index_to_uuid_dict': dump_index_to_uuid_dict,
# 'dump_uuid_to_unload_area_uuid_dict': dump_uuid_to_unload_area_uuid_dict,
# 'excavator_uuid_to_load_area_uuid_dict': excavator_uuid_to_load_area_uuid_dict,
# 'excavator_index_to_load_area_index_dict': excavator_index_to_load_area_index_dict,
# 'dump_index_to_unload_area_index_dict': dump_index_to_unload_area_index_dict}
#
# def update_truck_uuid_index_map(dynamic_truck_set):
# truck_uuid_to_index_dict = {}
# truck_index_to_uuid_dict = {}
#
# # truck_id <-> truck_index
# truck_num = 0
# for truck_id in dynamic_truck_set:
# truck_uuid_to_index_dict[truck_id] = truck_num
# truck_index_to_uuid_dict[truck_num] = truck_id
# truck_num = truck_num + 1
#
# return {'truck_uuid_to_index_dict': truck_uuid_to_index_dict,
# 'truck_index_to_uuid_dict': truck_index_to_uuid_dict}
load_area_uuid_to_index_dict, unload_area_uuid_to_index_dict, \
load_area_index_to_uuid_dict, unload_area_index_to_uuid_dict = build_work_area_uuid_index_map()
......@@ -276,43 +277,6 @@ park_num = len(park_uuid_to_index_dict)
truck_uuid_to_name_dict, truck_name_to_uuid_dict = build_truck_uuid_name_map()
########################################################################################################################
def update_total_truck():
# 矿卡集合
truck_list = []
try:
query = np.array(session_mysql.query(Equipment).filter_by(device_type=1, isdeleted=0).all())
for item in query:
truck_list.append(item.id)
if len(truck_set) < 1:
raise Exception("无矿卡设备可用-矿卡集合读取异常")
except Exception as es:
logger.error(es)
return truck_list
def update_fixdisp_truck():
# 固定派车矿卡集合
fixed_truck_list = []
try:
query = np.array(session_mysql.query(Dispatch).filter_by(isauto=0, isdeleted=0).all())
for item in query:
fixed_truck_list.append(item.truck_id)
if len(fixed_truck_list) < 1:
raise Exception("无固定派车计划可用-固定派车矿卡集合读取异常")
except Exception as es:
logger.error(es)
return fixed_truck_list
truck_set = set(update_total_truck())
fixed_truck_set = set(update_fixdisp_truck())
......@@ -323,41 +287,13 @@ dynamic_truck_set = truck_set.difference(fixed_truck_set)
logger.info("可用于动态派车的矿卡:")
logger.info(dynamic_truck_set)
def update_autodisp_excavator():
# 用于动态派车的挖机集合
dynamic_excavator_list = []
try:
for item in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
dynamic_excavator_list.append(item.exactor_id)
if len(dynamic_excavator_list) < 1:
raise Exception("无动态派车计划可用-动态派车挖机/卸点集合读取异常")
except Exception as es:
logger.warning(es)
return dynamic_excavator_list
def update_autodisp_dump():
# 用于动态调度的卸载点集合
dynamic_dump_list = []
try:
for item in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
dynamic_dump_list.append(item.dump_id)
if len(dynamic_dump_list) < 1:
raise Exception("无动态派车计划可用-动态派车挖机/卸点集合读取异常")
except Exception as es:
logger.warning(es)
return dynamic_dump_list
# 用于动态调度的挖机及卸载设备
dynamic_excavator_set = set(update_autodisp_excavator())
dynamic_dump_set = set(update_autodisp_dump())
class DeviceMap:
def __init__(self):
self.excavator_uuid_to_index_dict = {}
self.dump_uuid_to_index_dict = {}
self.excavator_index_to_uuid_dict = {}
......@@ -389,6 +325,7 @@ class DeviceMap:
self.truck_uuid_to_index_dict = truck_map_dict['truck_uuid_to_index_dict']
self.truck_index_to_uuid_dict = truck_map_dict['truck_index_to_uuid_dict']
class DumpInfo(DeviceMap):
def __init__(self):
super().__init__()
......@@ -398,10 +335,10 @@ class DumpInfo(DeviceMap):
self.dump_target_mass = np.zeros(self.dumps)
# 实际真实产量
self.cur_dump_real_mass = np.zeros(self.dumps)
# 预计产量(包含正在驶往目的地的矿卡载重)
self.pre_dump_real_mass = copy.deepcopy(self.cur_dump_real_mass)
# 模拟实际产量(防止调度修改真实产量)
self.sim_dump_real_mass = np.zeros(self.dumps)
# # 预计产量(包含正在驶往目的地的矿卡载重)
# self.pre_dump_real_mass = copy.deepcopy(self.cur_dump_real_mass)
# # 模拟实际产量(防止调度修改真实产量)
# self.sim_dump_real_mass = np.zeros(self.dumps)
# # 真实设备可用时间
# self.cur_dump_ava_time = np.zeros(self.dumps)
# # 模拟各设备可用时间(防止调度修改真实产量)
......@@ -413,7 +350,6 @@ class DumpInfo(DeviceMap):
# 卸载时间
self.unloading_time = np.zeros(self.dumps)
def update_dump_unloadtime(self):
self.unloading_time = np.zeros(self.dumps)
......@@ -449,45 +385,45 @@ class DumpInfo(DeviceMap):
self.cur_dump_real_mass[self.dump_uuid_to_index_dict[dump_id]] = \
self.cur_dump_real_mass[self.dump_uuid_to_index_dict[dump_id]] + query.load_weight
def update_pre_unload_throughout(self):
try:
self.pre_dump_real_mass = copy.deepcopy(self.cur_dump_real_mass)
for i in range(self.trucks):
# task = self.truck_current_stage[i][0]
task = self.truck_current_task[self.truck_index_to_uuid_dict[i]]
end_area_index = self.truck_current_trip[i][1]
# 若矿卡正常行驶,需要将该部分载重计入实时产量
if task in heavy_task_set:
self.pre_dump_real_mass[end_area_index] = self.pre_dump_real_mass[end_area_index] + self.payload[i]
else:
pass
except Exception as es:
logger.error("卸点预计装载量计算异常")
logger.error(es)
def update_dump_ava_time(self, dump_ava_ls):
try:
now = float((datetime.now() - self.start_time) / timedelta(hours=0, minutes=1, seconds=0))
for reach_ls in dump_ava_ls:
if len(reach_ls) != 0:
reach_ls = np.array(reach_ls)
tmp = reach_ls[np.lexsort(reach_ls[:, ::-1].T)]
for i in range(len(tmp)):
dump_index = int(tmp[i][2])
self.cur_dump_ava_time[dump_index] = max(tmp[i][0], self.cur_dump_ava_time[dump_index]) + \
self.unloading_time[dump_index]
self.cur_truck_ava_time[int(tmp[i][1])] = self.cur_dump_ava_time[dump_index]
# def update_pre_unload_throughout(self):
# try:
# self.pre_dump_real_mass = copy.deepcopy(self.cur_dump_real_mass)
# for i in range(self.trucktrucks):
# # task = self.truck_current_stage[i][0]
# task = self.truck_current_task[self.truck_index_to_uuid_dict[i]]
# end_area_index = self.truck_current_trip[i][1]
# # 若矿卡正常行驶,需要将该部分载重计入实时产量
# if task in heavy_task_set:
# self.pre_dump_real_mass[end_area_index] = self.pre_dump_real_mass[end_area_index] + self.payload[i]
# else:
# pass
# except Exception as es:
# logger.error("卸点预计装载量计算异常")
# logger.error(es)
# 若卸点可用时间严重偏离,进行修正
if abs(self.cur_dump_ava_time[dump_index] - now) > 60:
self.cur_dump_ava_time[dump_index] = now
if abs(self.cur_truck_ava_time[int(tmp[i][1])] - now) > 60:
self.cur_truck_ava_time[int(tmp[i][1])] = now
except Exception as es:
logger.error("卸点可用时间计算异常")
logger.error(es)
# def update_dump_ava_time(self, dump_ava_ls):
# try:
#
# now = float((datetime.now() - self.start_time) / timedelta(hours=0, minutes=1, seconds=0))
#
# for reach_ls in dump_ava_ls:
# if len(reach_ls) != 0:
# reach_ls = np.array(reach_ls)
# tmp = reach_ls[np.lexsort(reach_ls[:, ::-1].T)]
# for i in range(len(tmp)):
# dump_index = int(tmp[i][2])
# self.cur_dump_ava_time[dump_index] = max(tmp[i][0], self.cur_dump_ava_time[dump_index]) + \
# self.unloading_time[dump_index]
# self.cur_truck_ava_time[int(tmp[i][1])] = self.cur_dump_ava_time[dump_index]
#
# # 若卸点可用时间严重偏离,进行修正
# if abs(self.cur_dump_ava_time[dump_index] - now) > 60:
# self.cur_dump_ava_time[dump_index] = now
# if abs(self.cur_truck_ava_time[int(tmp[i][1])] - now) > 60:
# self.cur_truck_ava_time[int(tmp[i][1])] = now
# except Exception as es:
# logger.error("卸点可用时间计算异常")
# logger.error(es)
def period_update(self):
......@@ -495,10 +431,10 @@ class DumpInfo(DeviceMap):
self.load()
# 初始化卸点可用时间
self.cur_dump_ava_time = np.full(self.dumps,
(datetime.now() - self.start_time) / timedelta(hours=0, minutes=1,
seconds=0))
# # 初始化卸点可用时间
# self.cur_dump_ava_time = np.full(self.dumps,
# (datetime.now() - self.start_time) / timedelta(hours=0, minutes=1,
# seconds=0))
# 用于动态调度的卸载设备数量
self.dynamic_dump_set = set(update_autodisp_dump())
......@@ -515,8 +451,12 @@ class DumpInfo(DeviceMap):
# 卸载目标产量
self.dump_target_mass = np.full(self.dumps, dump_target_mass)
# 计算卸载点预估产量
self.update_pre_unload_throughout()
# 同步虚拟卸载量
self.sim_dump_real_mass = copy.deepcopy(self.cur_dump_real_mass)
# # 计算卸载点预估产量
# self.update_pre_unload_throughout()
class ExcavatorInfo(DeviceMap):
def __init__(self):
......@@ -527,10 +467,10 @@ class ExcavatorInfo(DeviceMap):
self.shovel_target_mass = np.zeros(self.shovels)
# 真实实际产量
self.cur_shovel_real_mass = np.zeros(self.shovels)
# 预计产量(包含正在驶往目的地的矿卡载重)
self.pre_shovel_real_mass = copy.deepcopy(self.cur_shovel_real_mass)
# 模拟实际产量(防止调度修改真实产量)
self.sim_shovel_real_mass = np.zeros(self.shovels)
# # 预计产量(包含正在驶往目的地的矿卡载重)
# self.pre_shovel_real_mass = copy.deepcopy(self.cur_shovel_real_mass)
# # 模拟实际产量(防止调度修改真实产量)
# self.sim_shovel_real_mass = np.zeros(self.shovels)
# # 真实设备可用时间
# self.cur_shovel_ava_time = np.zeros(self.shovels)
# # 模拟各设备可用时间(防止调度修改真实产量)
......@@ -576,47 +516,47 @@ class ExcavatorInfo(DeviceMap):
self.cur_shovel_real_mass[self.excavator_uuid_to_index_dict[excavator_id]] = \
self.cur_shovel_real_mass[self.excavator_uuid_to_index_dict[excavator_id]] + query.load_weight
def update_pre_load_throughout(self):
try:
self.pre_shovel_real_mass = copy.deepcopy(self.cur_shovel_real_mass)
for i in range(self.trucks):
# task = self.truck_current_stage[i][0]
task = self.truck_current_task[self.truck_index_to_uuid_dict[i]]
end_area_index = self.truck_current_trip[i][1]
# 若矿卡正常行驶,需要将该部分载重计入实时产量
if task in empty_task_set:
self.pre_shovel_real_mass[end_area_index] = self.pre_shovel_real_mass[end_area_index] + \
self.payload[i]
else:
pass
except Exception as es:
logger.error("挖机/卸点预计装载量计算异常")
logger.error(es)
def update_shovel_ava_time(self, shovel_ava_ls):
try:
now = float((datetime.now() - self.start_time) / timedelta(hours=0, minutes=1, seconds=0))
for reach_ls in shovel_ava_ls:
if len(reach_ls) != 0:
reach_ls = np.array(reach_ls)
tmp = reach_ls[np.lexsort(reach_ls[:, ::-1].T)]
for i in range(len(tmp)):
shovel_index = int(tmp[i][2])
self.cur_shovel_ava_time[shovel_index] = max(tmp[i][0],
self.cur_shovel_ava_time[shovel_index]) + \
self.loading_time[shovel_index]
self.cur_truck_ava_time[int(tmp[i][1])] = self.cur_shovel_ava_time[shovel_index]
# def update_pre_load_throughout(self):
# try:
# self.pre_shovel_real_mass = copy.deepcopy(self.cur_shovel_real_mass)
# for i in range(self.trucks):
# # task = self.truck_current_stage[i][0]
# task = self.truck_current_task[self.truck_index_to_uuid_dict[i]]
# end_area_index = self.truck_current_trip[i][1]
# # 若矿卡正常行驶,需要将该部分载重计入实时产量
# if task in empty_task_set:
# self.pre_shovel_real_mass[end_area_index] = self.pre_shovel_real_mass[end_area_index] + \
# self.payload[i]
# else:
# pass
# except Exception as es:
# logger.error("挖机/卸点预计装载量计算异常")
# logger.error(es)
# 若挖机可用时间严重偏离,进行修正
if abs(self.cur_shovel_ava_time[shovel_index] - now) > 60:
self.cur_truck_ava_time[int(tmp[i][1])] = now
if abs(self.cur_shovel_ava_time[shovel_index] - now) > 60:
self.cur_shovel_ava_time[shovel_index] = now
except Exception as es:
logger.error("挖机可用时间计算异常")
logger.error(es)
# def update_shovel_ava_time(self, shovel_ava_ls):
# try:
#
# now = float((datetime.now() - self.start_time) / timedelta(hours=0, minutes=1, seconds=0))
#
# for reach_ls in shovel_ava_ls:
# if len(reach_ls) != 0:
# reach_ls = np.array(reach_ls)
# tmp = reach_ls[np.lexsort(reach_ls[:, ::-1].T)]
# for i in range(len(tmp)):
# shovel_index = int(tmp[i][2])
# self.cur_shovel_ava_time[shovel_index] = max(tmp[i][0],
# self.cur_shovel_ava_time[shovel_index]) + \
# self.loading_time[shovel_index]
# self.cur_truck_ava_time[int(tmp[i][1])] = self.cur_shovel_ava_time[shovel_index]
#
# # 若挖机可用时间严重偏离,进行修正
# if abs(self.cur_shovel_ava_time[shovel_index] - now) > 60:
# self.cur_truck_ava_time[int(tmp[i][1])] = now
# if abs(self.cur_shovel_ava_time[shovel_index] - now) > 60:
# self.cur_shovel_ava_time[shovel_index] = now
# except Exception as es:
# logger.error("挖机可用时间计算异常")
# logger.error(es)
def period_update(self):
......@@ -624,10 +564,10 @@ class ExcavatorInfo(DeviceMap):
self.load()
# 初始化挖机可用时间
self.cur_shovel_ava_time = np.full(self.shovels,
(datetime.now() - self.start_time) / timedelta(hours=0, minutes=1,
seconds=0))
# # 初始化挖机可用时间
# self.cur_shovel_ava_time = np.full(self.shovels,
# (datetime.now() - self.start_time) / timedelta(hours=0, minutes=1,
# seconds=0))
# 用于动态调度的卸载设备数量
self.dynamic_excavator_set = set(update_autodisp_excavator())
......@@ -644,8 +584,12 @@ class ExcavatorInfo(DeviceMap):
# 卸载目标产量
self.excavator_target_mass = np.full(self.shovels, shovel_target_mass)
# 计算卸载点预估产量
self.update_pre_load_throughout()
# # 同步挖机虚拟装载量
# self.sim_shovel_real_mass = copy.deepcopy(self.cur_shovel_real_mass)
# # 计算卸载点预估产量
# self.update_pre_load_throughout()
class TruckInfo(DeviceMap):
def __init__(self):
......@@ -871,7 +815,7 @@ class TruckInfo(DeviceMap):
self.update_truck_trip()
class Dispatcher:
class Dispatcher(DeviceMap):
def __init__(self):
# 三类设备field
self.dump = DumpInfo()
......@@ -880,20 +824,6 @@ class Dispatcher:
self.truck = TruckInfo()
# # 设备数量
# self.dumps = len(dynamic_dump_set)
# self.shovels = len(dynamic_excavator_set)
# self.trucks = len(dynamic_truck_set)
# # 目标产量
# self.dump_target_mass = np.zeros(self.dumps)
# self.shovel_target_mass = np.zeros(self.shovels)
# # 真实实际产量
# self.cur_dump_real_mass = np.zeros(self.dumps)
# self.cur_shovel_real_mass = np.zeros(self.shovels)
# # 预计产量(包含正在驶往目的地的矿卡载重)
# self.pre_dump_real_mass = copy.deepcopy(self.cur_dump_real_mass)
# self.pre_shovel_real_mass = copy.deepcopy(self.cur_shovel_real_mass)
# 模拟实际产量(防止调度修改真实产量)
self.sim_dump_real_mass = np.zeros(self.dump.dumps)
self.sim_shovel_real_mass = np.zeros(self.excavator.shovels)
# 真实设备可用时间
......@@ -906,20 +836,9 @@ class Dispatcher:
# self.sim_truck_reach_shovel = np.zeros(self.trucks)
self.sim_shovel_ava_time = np.zeros(self.excavator.shovels)
self.sim_dump_ava_time = np.zeros(self.dump.dumps)
# # 用于动态派车的矿卡集合
# self.dynamic_truck_set = []
# # 用于动态派车的挖机集合
# self.dynamic_excavator_set = []
# # 用于动态调度的卸载点集合
# self.dynamic_dump_set = []
#
# # 矿卡装载/卸载时间
# self.last_load_time = {}
# self.last_unload_time = {}
#
# self.relative_last_load_time = {}
# self.relative_last_unload_time = {}
# 挖机/卸点预计产量
self.pre_dump_real_mass = np.zeros(self.dump.dumps)
self.pre_shovel_real_mass = np.zeros(self.excavator.shovels)
# # 矿卡阶段
# self.truck_current_task = {}
......@@ -944,25 +863,6 @@ class Dispatcher:
self.cur_truck_ava_time = np.zeros(self.truck.trucks)
self.sim_truck_ava_time = np.zeros(self.truck.trucks)
# # 卸载时间
# self.unloading_time = np.zeros(self.dumps)
# # 装载时间
# self.loading_time = np.zeros(self.shovels)
# 映射关系
self.excavator_uuid_to_index_dict = {}
self.dump_uuid_to_index_dict = {}
self.excavator_index_to_uuid_dict = {}
self.dump_index_to_uuid_dict = {}
self.dump_uuid_to_unload_area_uuid_dict = {}
self.excavator_uuid_to_load_area_uuid_dict = {}
self.excavator_index_to_load_area_index_dict = {}
self.dump_index_to_unload_area_index_dict = {}
self.truck_uuid_to_index_dict = {}
self.truck_index_to_uuid_dict = {}
self.actual_goto_excavator_traffic_flow = np.zeros((self.dump.dumps, self.excavator.shovels))
self.actual_goto_dump_traffic_flow = np.zeros((self.dump.dumps, self.excavator.shovels))
......@@ -1045,8 +945,8 @@ class Dispatcher:
def update_walk_time(self):
try:
for i in range(self.dumps):
for j in range(self.shovels):
for i in range(self.dump.dumps):
for j in range(self.excavator.shovels):
self.com_time_eq[i][j] = self.com_time_area[self.dump_index_to_unload_area_index_dict[i]] \
[self.excavator_index_to_load_area_index_dict[j]]
self.go_time_eq[i][j] = self.go_time_area[self.dump_index_to_unload_area_index_dict[i]] \
......@@ -1072,263 +972,21 @@ class Dispatcher:
try:
for i in range(park_num):
for j in range(self.shovels):
for j in range(self.excavator.shovels):
self.park_to_load_eq[i][j] = self.park_to_load_area[i][
self.excavator_index_to_load_area_index_dict[j]]
except Exception as es:
logger.error("备停区设备路网信息异常")
logger.error(es)
def update_truck_current_task(self):
self.truck_current_task = {}
device_name_set = redis2.keys()
def update_truck_reach_time(self):
try:
for item in device_name_set:
item = item.decode(encoding='utf-8')
json_value = json.loads(redis2.get(item))
device_type = json_value.get('type')
if device_type == 1:
if truck_name_to_uuid_dict[item] in self.dynamic_truck_set:
currentTask = json_value.get('currentTask')
self.truck_current_task[truck_name_to_uuid_dict[item]] = currentTask
shovels = self.excavator.shovels
dumps = self.dump.dumps
trucks = self.truck.trucks
except Exception as es:
logger.error("读取矿卡任务异常-reids读取异常")
logger.error(es)
logger.info("矿卡当前任务:")
logger.info(self.truck_current_task)
def update_truck_payload(self):
try:
self.payload = np.zeros(self.trucks)
for truck_id in self.dynamic_truck_set:
trcuk_index = self.truck_uuid_to_index_dict[truck_id]
truck_spec = session_mysql.query(Equipment).filter_by(id=truck_id).first().equipment_spec
# truck_spec = query.equipment_spec
self.payload[trcuk_index] = session_mysql.query(EquipmentSpec).filter_by(id=truck_spec).first().capacity
except Exception as es:
logger.error("读取矿卡有效载重异常-矿卡型号信息缺失")
logger.error(es)
def update_truck_last_leave_time(self):
self.last_load_time = {}
self.last_unload_time = {}
self.relative_last_load_time = {}
self.relative_last_unload_time = {}
# for x in redis2.keys():
# print(redis2.get(x))
try:
for item in self.dynamic_truck_set:
json_value = json.loads(redis2.get(truck_uuid_to_name_dict[item]))
device_type = json_value.get('type')
# 判断是否为矿卡
if device_type == 1:
task = self.truck_current_task[item]
if task in heavy_task_set:
last_load_time_tmp = json_value.get('lastLoadTime')
if last_load_time_tmp is not None:
self.last_load_time[item] = datetime.strptime(last_load_time_tmp, \
"%b %d, %Y %I:%M:%S %p")
else:
self.last_load_time[item] = datetime.now()
self.relative_last_load_time[item] = float((self.last_load_time[item] - self.start_time) /
timedelta(hours=0, minutes=1, seconds=0))
# print("相对last_load_time", self.relative_last_load_time[item])
logger.info("相对last_load_time")
logger.info(self.relative_last_load_time[item])
if task in empty_task_set:
last_unload_time_tmp = json_value.get('lastUnloadTime')
if last_unload_time_tmp is not None:
self.last_unload_time[item] = datetime.strptime(last_unload_time_tmp, \
"%b %d, %Y %I:%M:%S %p")
else:
self.last_unload_time[item] = datetime.now()
self.relative_last_unload_time[item] = float((self.last_unload_time[item] - self.start_time) /
timedelta(hours=0, minutes=1, seconds=0))
# print("相对last_unload_time", self.relative_last_unload_time[item])
logger.info("相对last_unload_time")
logger.info(self.relative_last_unload_time[item])
except Exception as es:
logger.error("读取矿卡可用时间异常-redis读取异常")
logger.error(es)
def update_excavator_loadtime(self):
self.loading_time = np.zeros(self.shovels)
for excavator_id in self.excavator_uuid_to_index_dict.keys():
ave_load_time = 0
load_count = 0
try:
for query in session_mysql.query(JobRecord.start_time, JobRecord.end_time). \
join(Equipment, JobRecord.equipment_id == Equipment.equipment_id). \
filter(Equipment.id == excavator_id, JobRecord.end_time != None). \
order_by(JobRecord.start_time.desc()).limit(10):
ave_load_time = ave_load_time + float(
(query.end_time - query.start_time) / timedelta(hours=0, minutes=1, seconds=0))
load_count = load_count + 1
self.loading_time[self.excavator_uuid_to_index_dict[excavator_id]] = ave_load_time / load_count
except Exception as es:
logger.error(f'挖机 {excavator_id} 装载时间信息缺失, 已设为默认值(1min)')
logger.error(es)
self.loading_time[self.excavator_uuid_to_index_dict[excavator_id]] = 1.00
def update_dump_unloadtime(self):
self.unloading_time = np.zeros(self.dumps)
for dump_id in self.dump_uuid_to_index_dict.keys():
ave_unload_time = 0
unload_count = 0
try:
for query in session_mysql.query(JobRecord.start_time, JobRecord.end_time). \
join(Equipment, JobRecord.equipment_id == Equipment.equipment_id). \
filter(Equipment.id == dump_id, JobRecord.end_time != None). \
order_by(JobRecord.start_time.desc()).limit(10):
ave_unload_time = ave_unload_time + float(
(query.end_time - query.start_time) / timedelta(hours=0, minutes=1, seconds=0))
unload_count = unload_count + 1
self.unloading_time[self.dump_uuid_to_index_dict[dump_id]] = ave_unload_time / unload_count
except Exception as es:
logger.error(f'卸载点 {dump_id} 卸载时间信息缺失, 已设为默认值(1min)')
logger.error(es)
self.unloading_time[self.dump_uuid_to_index_dict[dump_id]] = 1.00
# print("average_unload_time: ", self.unloading_time[self.dump_uuid_to_index_dict[dump_id]])
def update_actual_load_throughout(self):
self.cur_shovel_real_mass = np.zeros(self.shovels)
now = datetime.now().strftime('%Y-%m-%d')
for excavator_id in self.excavator_uuid_to_index_dict.keys():
# print(excavator_id)
for query in session_mysql.query(LoadInfo). \
join(Equipment, LoadInfo.dump_id == Equipment.equipment_id). \
filter(Equipment.id == excavator_id, LoadInfo.time > now). \
order_by(LoadInfo.time.desc()).all():
# print("time:", query.time)
# print("load_weight:", )
self.cur_shovel_real_mass[self.excavator_uuid_to_index_dict[excavator_id]] = \
self.cur_shovel_real_mass[self.excavator_uuid_to_index_dict[excavator_id]] + query.load_weight
def update_actual_unload_thoughout(self):
self.cur_dump_real_mass = np.zeros(self.dumps)
now = datetime.now().strftime('%Y-%m-%d')
for dump_id in self.dump_uuid_to_index_dict.keys():
# print(excavator_id)
for query in session_mysql.query(LoadInfo). \
join(Equipment, LoadInfo.dump_id == Equipment.equipment_id). \
filter(Equipment.id == dump_id, LoadInfo.time > now). \
order_by(LoadInfo.time.desc()).all():
# print("time:", query.time)
# print("load_weight:", )
self.cur_dump_real_mass[self.dump_uuid_to_index_dict[dump_id]] = \
self.cur_dump_real_mass[self.dump_uuid_to_index_dict[dump_id]] + query.load_weight
def update_truck_trip(self):
self.truck_current_trip = np.full((self.trucks, 2), -1)
for i in range(self.trucks):
try:
session_mysql.commit()
truck_id = self.truck_index_to_uuid_dict[i]
task = self.truck_current_task[self.truck_index_to_uuid_dict[i]]
# print("truck_task:", truck_id, task)
item = session_mysql.query(EquipmentPair).filter_by(truck_id=truck_id, isdeleted=0).first()
if task in empty_task_set + heavy_task_set and item is None:
raise Exception(f'矿卡 {truck_id} 配对关系异常')
except Exception as es:
logger.warning(es)
try:
# 若矿卡状态为空运
if task in empty_task_set:
last_unload_time = self.relative_last_unload_time[self.truck_index_to_uuid_dict[i]]
# 开始区域id
start_area_id = self.dump_uuid_to_unload_area_uuid_dict[item.dump_id]
# 开始区域序号
start_area_index = unload_area_uuid_to_index_dict[start_area_id]
end_area_id = self.excavator_uuid_to_load_area_uuid_dict[item.exactor_id]
end_area_index = load_area_uuid_to_index_dict[end_area_id]
self.truck_current_trip[i] = [self.dump_uuid_to_index_dict[item.dump_id],
self.excavator_uuid_to_index_dict[item.exactor_id]]
# if truck_uuid_to_name_dict[self.truck_index_to_uuid_dict[i]] in tmp_set:
# print("here")
# self.cur_truck_reach_shovel[i] = last_unload_time + 10 * self.com_time_area[start_area_index][
# end_area_index]
# else:
self.cur_truck_reach_shovel[i] = last_unload_time + self.com_time_area[start_area_index][
end_area_index]
# 若矿卡状态为重载
elif task in heavy_task_set:
# print("读取重载行程")
# print(item.exactor_id, item.dump_id)
last_load_time = self.relative_last_load_time[self.truck_index_to_uuid_dict[i]]
# 开始区域id
start_area_id = self.excavator_uuid_to_load_area_uuid_dict[item.exactor_id]
# 开始区域序号
start_area_index = load_area_uuid_to_index_dict[start_area_id]
# 结束区域id
end_area_id = self.dump_uuid_to_unload_area_uuid_dict[item.dump_id]
# 结束区域序号
end_area_index = unload_area_uuid_to_index_dict[end_area_id]
self.truck_current_trip[i] = [self.excavator_uuid_to_index_dict[item.exactor_id],
self.dump_uuid_to_index_dict[item.dump_id]]
self.cur_truck_reach_dump[i] = last_load_time + self.go_time_area[end_area_index][start_area_index]
# 其他状态,矿卡状态为-2,equipment_pair表不存在该矿卡
else:
pass
except Exception as es:
logger.error("矿卡行程读取异常")
logger.error(es)
self.truck_current_trip.flatten()
# print("当前矿卡行程:")
# print(self.truck_current_trip)
def update_pre_load_throughout(self):
try:
self.pre_shovel_real_mass = copy.deepcopy(self.cur_shovel_real_mass)
for i in range(self.trucks):
# task = self.truck_current_stage[i][0]
task = self.truck_current_task[self.truck_index_to_uuid_dict[i]]
end_area_index = self.truck_current_trip[i][1]
# 若矿卡正常行驶,需要将该部分载重计入实时产量
if task in empty_task_set:
self.pre_shovel_real_mass[end_area_index] = self.pre_shovel_real_mass[end_area_index] + \
self.payload[i]
else:
pass
except Exception as es:
logger.error("挖机/卸点预计装载量计算异常")
logger.error(es)
def update_pre_unload_throughout(self):
try:
self.pre_dump_real_mass = copy.deepcopy(self.cur_dump_real_mass)
for i in range(self.trucks):
# task = self.truck_current_stage[i][0]
task = self.truck_current_task[self.truck_index_to_uuid_dict[i]]
end_area_index = self.truck_current_trip[i][1]
# 若矿卡正常行驶,需要将该部分载重计入实时产量
if task in heavy_task_set:
self.pre_dump_real_mass[end_area_index] = self.pre_dump_real_mass[end_area_index] + self.payload[i]
else:
pass
except Exception as es:
logger.error("卸点预计装载量计算异常")
logger.error(es)
def update_truck_reach_time(self):
try:
shovels = self.excavator.shovels
dumps = self.dump.dumps
trucks = self.truck.trucks
truck_current_task = self.truck.truck_current_task
truck_current_task = self.truck.truck_current_task
truck_current_trip = self.truck.truck_current_trip
......@@ -1336,7 +994,6 @@ class Dispatcher:
cur_truck_reach_dump = self.truck.cur_truck_reach_dump
shovel_ava_ls = [[] for _ in range(shovels)]
dump_ava_ls = [[] for _ in range(dumps)]
for i in range(trucks):
......@@ -1431,135 +1088,61 @@ class Dispatcher:
((datetime.now() - self.start_time) / timedelta(hours=0, minutes=1,
seconds=0))
def update_pre_unload_throughout(self):
try:
self.pre_dump_real_mass = copy.deepcopy(self.dump.cur_dump_real_mass)
for i in range(self.truck.trucks):
# task = self.truck_current_stage[i][0]
task = self.truck.truck_current_task[self.truck_index_to_uuid_dict[i]]
end_area_index = self.truck.truck_current_trip[i][1]
# 若矿卡正常行驶,需要将该部分载重计入实时产量
if task in heavy_task_set:
self.pre_dump_real_mass[end_area_index] = self.pre_dump_real_mass[end_area_index] + \
self.truck.payload[i]
else:
pass
except Exception as es:
logger.error("卸点预计装载量计算异常")
logger.error(es)
def update_pre_load_throughout(self):
try:
self.pre_shovel_real_mass = copy.deepcopy(self.excavator.cur_shovel_real_mass)
for i in range(self.truck.trucks):
# task = self.truck_current_stage[i][0]
task = self.truck.truck_current_task[self.truck_index_to_uuid_dict[i]]
end_area_index = self.truck.truck_current_trip[i][1]
# 若矿卡正常行驶,需要将该部分载重计入实时产量
if task in empty_task_set:
self.pre_shovel_real_mass[end_area_index] = self.pre_shovel_real_mass[end_area_index] + \
self.truck.payload[i]
else:
pass
except Exception as es:
logger.error("挖机/卸点预计装载量计算异常")
logger.error(es)
def period_update(self):
logger.info("#####################################周期更新开始#####################################")
self.load()
self.dump.period_update()
self.excavator.period_update()
self.truck.period_update()
# # 初始化挖机可用时间
# self.cur_shovel_ava_time = np.full(self.shovels,
# (datetime.now() - self.start_time) / timedelta(hours=0, minutes=1,
# seconds=0))
#
# # 初始化卸点可用时间
# self.cur_dump_ava_time = np.full(self.dumps,
# (datetime.now() - self.start_time) / timedelta(hours=0, minutes=1,
# seconds=0))
# # 全部矿卡设备集合
# truck_set = set(update_total_truck())
#
# # 固定派车矿卡集合
# fixed_truck_set = set(update_fixdisp_truck())
#
# # 动态派车矿卡集合
# self.dynamic_truck_set = truck_set.difference(fixed_truck_set)
#
# # 更新矿卡数量
# self.trucks = len(self.dynamic_truck_set)
# # 卡车完成装载及卸载时间
# self.cur_truck_ava_time = np.zeros(self.trucks)
# self.sim_truck_ava_time = np.zeros(self.trucks)
#
# # 真实设备可用时间
# self.cur_truck_reach_dump = np.zeros(self.trucks)
# self.cur_truck_reach_shovel = np.zeros(self.trucks)
#
# # 用于动态调度的挖机及卸载设备数量
# self.dynamic_excavator_set = set(update_autodisp_excavator())
# self.dynamic_dump_set = set(update_autodisp_dump())
# # 更新挖机和卸载设备数量
# self.dumps = len(self.dynamic_dump_set)
# self.shovels = len(self.dynamic_excavator_set)
# print("检测到挖机数量:", self.shovels)
# print(self.dynamic_excavator_set)
# print("检测到卸点数量:", self.dumps)
# 挖机卸载点映射
# 方法一
# self.update_deveices_map()
# 方法二
device_map_dict = update_deveices_map(unload_area_uuid_to_index_dict, load_area_uuid_to_index_dict)
self.excavator_uuid_to_index_dict = device_map_dict['excavator_uuid_to_index_dict']
self.dump_uuid_to_index_dict = device_map_dict['dump_uuid_to_index_dict']
self.excavator_index_to_uuid_dict = device_map_dict['excavator_index_to_uuid_dict']
self.dump_index_to_uuid_dict = device_map_dict['dump_index_to_uuid_dict']
self.dump_uuid_to_unload_area_uuid_dict = device_map_dict['dump_uuid_to_unload_area_uuid_dict']
self.excavator_uuid_to_load_area_uuid_dict = device_map_dict['excavator_uuid_to_load_area_uuid_dict']
self.excavator_index_to_load_area_index_dict = device_map_dict['excavator_index_to_load_area_index_dict']
self.dump_index_to_unload_area_index_dict = device_map_dict['dump_index_to_unload_area_index_dict']
# 设备距离(不同于工作区距离)
self.update_walk_time()
# 矿卡映射
# 方法一
# self.update_truck_uuid_index_map()
# 方法二
truck_map_dict = update_truck_uuid_index_map(dynamic_truck_set)
self.truck_uuid_to_index_dict = truck_map_dict['truck_uuid_to_index_dict']
self.truck_index_to_uuid_dict = truck_map_dict['truck_index_to_uuid_dict']
# # 卡车当前任务
# self.update_truck_current_task()
# # 有效载重
# # self.payload = np.array(session_mysql.query(Truck.payload).all())
# self.update_truck_payload()
# # 卡车最后一次装载/卸载时间
# self.update_truck_last_leave_time()
# # 计算平均装载时间
# self.update_excavator_loadtime()
# # 计算平均卸载时间
# self.update_dump_unloadtime()
# # 读取实时装卸载量
# self.update_actual_unload_thoughout()
# # 读取实时载卸载量
# self.update_actual_load_throughout()
# # 读取卡车当前行程
# self.update_truck_trip()
# # 卸载目标产量
# # self.dump_target_mass = (np.array(session_mysql.query(Dump.target_mass).all())).flatten()
# self.dump_target_mass = np.full(self.dumps, dump_target_mass)
# # 电铲目标产量
# # self.shovel_target_mass = (np.array(session_mysql.query(Excavator.target_mass).all())).flatten()
# self.shovel_target_mass = np.full(self.shovels, shovel_target_mass)
# 更新实时车流
self.update_actual_traffic_flow()
# 计算理想车流
self.opt_goto_dump_traffic_flow, self.opt_goto_excavator_traffic_flow = traffic_flow_plan()
# # 计算挖机与卸载点预估产量
# self.update_pre_load_throughout()
#
# self.update_pre_unload_throughout()
# 矿卡抵达时间
shovel_reach_list, dump_reach_list = self.update_truck_reach_time()
......@@ -1569,11 +1152,13 @@ class Dispatcher:
# 卸点可用时间
self.update_dump_ava_time(dump_reach_list)
def sim_para_reset(self):
# 挖机预计装载量
self.update_pre_load_throughout()
# 卸点预计卸载量
self.update_pre_unload_throughout()
# 卡车抵达时间重置
# self.sim_truck_reach_dump = copy.deepcopy(self.cur_truck_reach_dump)
# self.sim_truck_reach_shovel = copy.deepcopy(self.cur_truck_reach_shovel)
def sim_para_reset(self):
# 设备可用时间重置
self.sim_truck_ava_time = copy.deepcopy(self.cur_truck_ava_time)
......@@ -1581,8 +1166,8 @@ class Dispatcher:
self.sim_dump_ava_time = copy.deepcopy(self.cur_dump_ava_time)
# 电铲\卸载点产量重置
self.sim_dump_real_mass = copy.deepcopy(self.dump.pre_dump_real_mass)
self.sim_shovel_real_mass = copy.deepcopy(self.excavator.pre_shovel_real_mass)
self.sim_dump_real_mass = copy.deepcopy(self.pre_dump_real_mass)
self.sim_shovel_real_mass = copy.deepcopy(self.pre_shovel_real_mass)
def truck_schedule(self, truck_id):
......@@ -1601,25 +1186,21 @@ class Dispatcher:
target = 0
sim_shovel_real_mass = self.excavator.sim_shovel_real_mass
shovel_target_mass = self.excavator.shovel_target_mass
loading_time = self.excavator.loading_time
sim_dump_real_mass = self.dump.sim_dump_real_mass
dump_target_mass = self.dump.dump_target_mass
unloading_time = self.dump.unloading_time
if task == -2:
logger.info("矿卡状态:矿卡启动或故障恢复")
logger.info("矿卡行程:无")
logger.info(f'涉及电铲:{list(self.excavator_uuid_to_index_dict.keys())}')
logger.info(f'电铲饱和度:{(1 - sim_shovel_real_mass / shovel_target_mass)}')
logger.info(f'电铲饱和度:{(1 - self.sim_shovel_real_mass / shovel_target_mass)}')
logger.info(
f'行程时间:{(np.maximum(self.sim_shovel_ava_time, now + self.park_to_load_eq[0, :]) + loading_time - now)}')
logger.info(f'行驶时间:{self.park_to_load_eq[0, :] + loading_time}')
target = np.argmax(10 * (1 - sim_shovel_real_mass / shovel_target_mass) /
target = np.argmax(10 * (1 - self.sim_shovel_real_mass / shovel_target_mass) /
(np.maximum(self.sim_shovel_ava_time,
now + self.park_to_load_eq[0, :]) + loading_time
- now))
......@@ -1631,13 +1212,13 @@ class Dispatcher:
logger.info("矿卡状态:矿卡空载")
logger.info(f'矿卡行程:{self.dump_index_to_uuid_dict[trip[0]]}-{self.excavator_index_to_uuid_dict[trip[1]]}')
logger.info(f'涉及卸点:{list(self.dump_uuid_to_index_dict.keys())}')
logger.info(f'卸点饱和度:{(1 - sim_dump_real_mass / dump_target_mass)}')
logger.info(f'卸点饱和度:{(1 - self.sim_dump_real_mass / dump_target_mass)}')
logger.info(
f'行程时间:{(np.maximum(self.sim_dump_ava_time, self.sim_truck_ava_time[truck_index] + self.go_time_eq[:, trip[1]]) + unloading_time - self.sim_truck_ava_time[truck_index])}')
logger.info(f'行驶时间:{self.go_time_eq[:, trip[1]] + unloading_time}')
# 卡车空载,计算下一次卸载点
target = np.argmax(10 * (1 - sim_dump_real_mass / dump_target_mass) /
target = np.argmax(10 * (1 - self.sim_dump_real_mass / dump_target_mass) /
(np.maximum(self.sim_dump_ava_time,
# self.sim_truck_reach_shovel[truck_index] + self.loading_time[trip[1]]
self.sim_truck_ava_time[truck_index]
......@@ -1664,13 +1245,13 @@ class Dispatcher:
logger.info("矿卡状态:矿卡重载")
logger.info(f'矿卡行程:{self.excavator_index_to_uuid_dict[trip[0]]}-{self.dump_index_to_uuid_dict[trip[1]]}')
logger.info(f'涉及卸点:{list(self.excavator_uuid_to_index_dict.keys())}')
logger.info(f'卸点饱和度:{(1 - sim_shovel_real_mass / shovel_target_mass)}')
logger.info(f'卸点饱和度:{(1 - self.sim_shovel_real_mass / shovel_target_mass)}')
logger.info(
f'行程时间:{(np.maximum(self.sim_shovel_ava_time, self.sim_truck_ava_time[truck_index] + self.com_time_eq[trip[1], :]) + loading_time - self.sim_truck_ava_time[truck_index])}')
logger.info(f'行驶时间:{self.com_time_eq[trip[1], :] + loading_time}')
# 卡车重载,计算下一次装载点
target = np.argmax(10 * (1 - sim_shovel_real_mass / shovel_target_mass) /
target = np.argmax(10 * (1 - self.sim_shovel_real_mass / shovel_target_mass) /
(np.maximum(self.sim_shovel_ava_time,
self.sim_truck_ava_time[truck_index]
+ self.com_time_eq[trip[1], :]) + loading_time
......@@ -1784,7 +1365,7 @@ class Dispatcher:
try:
record = {"truckId": self.truck_index_to_uuid_dict[i]}
task = self.truck_current_task[self.truck_index_to_uuid_dict[i]]
task = self.truck.truck_current_task[self.truck_index_to_uuid_dict[i]]
if task in empty_task_set:
item = session_mysql.query(Dispatch).filter_by(
dump_id=self.dump_index_to_uuid_dict[Seq[i][1]], isauto=1, isdeleted=0).first()
......
......@@ -13,43 +13,18 @@ from tables import *
from urllib.parse import quote
import logging
import logging.handlers
from static_data_process import *
from settings import *
# 日志设置
########################################################################################################################
# 日志存储地址
log_path = 'TFLog'
# 需要提供的值
# traffic_programme_para.excavator_strength[excavator_index] = 200 # 挖机最大装载能力,单位吨/小时
# traffic_programme_para.grade_loading_array[excavator_index] = 100 # 挖机装载物料品位
# traffic_programme_para.excavator_priority_coefficient[excavator_index] = 1 # 挖机优先级
# traffic_programme_para.dump_strength[dump_index] = 200 # 卸载设备最大卸载能力,单位吨/小时
# traffic_programme_para.grade_upper_dump_array[dump_index] = 100 # 卸点品位上限
# traffic_programme_para.grade_lower_dump_array[dump_index] = 100 # 卸点品位下限
# traffic_programme_para.dump_priority_coefficient[dump_index] = 1 # 卸载设备优先级
# logging初始化工作
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
filehandler = logging.handlers.RotatingFileHandler(log_path + "/TFPlanning.log", maxBytes=3 * 1024 * 1024,
backupCount=10)
# 设置后缀名称,跟strftime的格式一样
filehandler.suffix = "%Y-%m-%d_%H-%M.log"
formatter = logging.Formatter('%(asctime)s - %(name)s: %(levelname)s %(message)s')
filehandler.setFormatter(formatter)
logger.addHandler(filehandler)
# 数据库设置
########################################################################################################################
# 初始化数据库连接:
engine_mysql = create_engine('mysql+mysqlconnector://root:%s@192.168.28.111:3306/waytous' % quote('Huituo@123'))
engine_postgre = create_engine('postgresql://postgres:%s@192.168.28.111:5432/shenbao_2021520' % quote('Huituo@123'))
# 创建DBsession_mysql类型:
DBsession_mysql = sessionmaker(bind=engine_mysql)
DBsession_postgre = sessionmaker(bind=engine_postgre)
# 创建session_mysql对象:
session_mysql = DBsession_mysql()
session_postgre = DBsession_postgre()
class TrafficProgPara(object):
......@@ -92,9 +67,11 @@ class TrafficProgPara(object):
# 卸载道路上,每运输1吨货物需要一辆卡车运行时长,等于(该卸载道路上车辆平均运行时长/卡车平均实际装载量)
self.avg_goto_unload_point_weight = np.zeros((num_of_load_area, num_of_unload_area))
self.avg_goto_unload_point_weight = np.full((num_of_load_area, num_of_unload_area), 1)
self.goto_excavator_distance = np.zeros((num_of_dump, num_of_excavator)) # 逻辑空载运输路线距离
self.goto_dump_distance = np.zeros((num_of_excavator, num_of_dump)) # 逻辑重载运输路线距离
self.payload = 100 # 有效载重(不同型号矿卡载重不同,这里暂时认为车队是同质的)
self.min_throughout = 200 # 最小产量约束
self.truck_total_num = 0
def extract_excavator_info(traffic_programme_para):
......@@ -113,7 +90,7 @@ def extract_excavator_info(traffic_programme_para):
traffic_programme_para.excavator_uuid_to_ref_id_dict[excavator_id]] = \
traffic_programme_para.load_area_uuid_to_ref_id_dict[load_area_id]
traffic_programme_para.excavator_strength[excavator_index] = 200 # 挖机最大装载能力,单位吨/小时
traffic_programme_para.excavator_strength[excavator_index] = 300 # 挖机最大装载能力,单位吨/小时
traffic_programme_para.grade_loading_array[excavator_index] = 100 # 挖机装载物料品位
traffic_programme_para.excavator_priority_coefficient[excavator_index] = 1 # 挖机优先级
excavator_index = excavator_index + 1
......@@ -135,7 +112,7 @@ def extract_dump_info(traffic_programme_para):
traffic_programme_para.dump_uuid_to_ref_id_dict[dump_id]] = \
traffic_programme_para.unload_area_uuid_to_ref_id_dict[unload_area_id]
traffic_programme_para.dump_strength[dump_index] = 200 # 卸载设备最大卸载能力,单位吨/小时
traffic_programme_para.dump_strength[dump_index] = 300 # 卸载设备最大卸载能力,单位吨/小时
traffic_programme_para.grade_upper_dump_array[dump_index] = 100 # 卸点品位上限
traffic_programme_para.grade_lower_dump_array[dump_index] = 100 # 卸点品位下限
traffic_programme_para.dump_priority_coefficient[dump_index] = 1 # 卸载设备优先级
......@@ -179,7 +156,7 @@ def extract_walk_time_info(traffic_programme_para):
# i代表第i个电铲,j代表第j个卸载点
# walktime_goto_unload_point单位是秒,需要除以3600,转成小时
traffic_programme_para.goto_load_area_factor[unload_area_index][load_area_index] = \
(60 / 1000 * walk_time.to_load_distance / traffic_programme_para.empty_speed) / 220
(60 / 1000 * walk_time.to_load_distance / traffic_programme_para.empty_speed) / traffic_programme_para.payload
# / traffic_programme_para.avg_goto_excavator_weight[load_area_index][unload_area_index]
# 装载道路上,每提供1吨的装载能力需要一辆卡车运行时长,等于(该装载道路上车辆平均运行时长/卡车平均装载能力)
......@@ -187,7 +164,7 @@ def extract_walk_time_info(traffic_programme_para):
# i代表第i个卸载点,j代表第j个电铲
# walktime_goto_excavator单位是秒,需要除以3600,转成小时
traffic_programme_para.goto_unload_area_factor[load_area_index][unload_area_index] = \
(60 / 1000 * walk_time.to_unload_distance / traffic_programme_para.heavy_speed) / 220
(60 / 1000 * walk_time.to_unload_distance / traffic_programme_para.heavy_speed) / traffic_programme_para.payload
# / traffic_programme_para.avg_goto_excavator_weight[unload_area_index][load_area_index]
......@@ -202,6 +179,15 @@ def traffic_programme_para_init(num_of_load_area, num_of_unload_area, num_of_exc
extract_dump_info(traffic_programme_para)
# 全部矿卡设备集合
truck_set = set(update_total_truck())
# 固定派车矿卡集合
fixed_truck_set = set(update_fixdisp_truck())
# 动态派车矿卡集合
traffic_programme_para.truck_total_num = len(truck_set.difference(fixed_truck_set))
# 计算逻辑道路因子
for i in range(num_of_excavator):
for j in range(num_of_dump):
......@@ -234,7 +220,7 @@ def traffic_programme_para_init(num_of_load_area, num_of_unload_area, num_of_exc
def transportation_problem_slove(coefficient, w_ij, s_ij, b_excavator,
b_dump, grade_loading_array,
max_unload_weigh_alg_flag, truck_total_num,
goto_excavator_dis, goto_dump_dis,
goto_excavator_dis, goto_dump_dis, min_throughout,
grade_lower_array=None, grade_upper_array=None):
row = len(coefficient) # 代表电铲的个数,第i行代表第i台电铲
col = len(coefficient[0]) # 代表卸载点的个数,第j行代表第j个卸载点
......@@ -261,7 +247,7 @@ def transportation_problem_slove(coefficient, w_ij, s_ij, b_excavator,
# 定义约束条件
# 最小产量约束,仅在最小化成本模式下成立
if max_unload_weigh_alg_flag == False:
prob += pulp.lpSum(var_x) >= 300
prob += pulp.lpSum(var_x) >= min_throughout
# 矿卡总数约束,在每条道路上的车辆总数要小于矿卡总个数
# 通过矩阵按元素相乘得到每条卸载道路上的车辆个数
......@@ -325,146 +311,6 @@ def transportation_problem_slove(coefficient, w_ij, s_ij, b_excavator,
'var_y': [[pulp.value(var_y[i][j]) for j in range(row)] for i in range(col)]}
# if __name__ == '__main__':
#
# # 挖机/点集合
# excavator_set = []
# dump_set = []
# for dispatch in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
# excavator_set.append(dispatch.exactor_id)
# dump_set.append(dispatch.dump_id)
#
# excavator_set = set(excavator_set)
# dump_set = set(dump_set)
#
# # 工作区域集合
# load_area_set = []
# unload_area_set = []
# for walk_time in session_postgre.query(WalkTime).all():
# load_area_set.append(walk_time.load_area_id)
# unload_area_set.append(walk_time.unload_area_id)
#
# load_area_set = set(load_area_set)
# unload_area_set = set(unload_area_set)
#
# excavator_num = len(excavator_set)
# dump_num = len(dump_set)
#
# unload_area_num = len(unload_area_set)
# load_area_num = len(load_area_set)
#
# print("装载区数量:", load_area_num, "卸载区数量:", unload_area_num, "挖机数量:", excavator_num, "卸点数量:", dump_num)
#
# # 初始化参量
# traffic_programme_para = traffic_programme_para_init(load_area_num, unload_area_num, excavator_num, dump_num)
#
# # 卸载道路的优先系数,等于该条卸载道路对应的电铲和卸载点优先系数的乘积
# priority_coefficient = np.full((excavator_num, dump_num), 1)
#
# # 系统是否以最大化产量为目标
# max_unload_weigh_alg_flag = False
#
# # 矿卡总量
# truck_total_num = 10
#
# # # 卸载道路上,每运输1吨货物需要一辆卡车运行时长,等于(该卸载道路上车辆平均运行时长/卡车平均实际装载量)
# # # 单位为辆小时/吨
# # # i代表第i个电铲,j代表第j个卸载点
# # unload_weight_ij = np.array(([1, 2, 3], [4, 5, 6]))
# #
# # # 装载道路上,每提供1吨的装载能力需要一辆卡车运行时长,等于(该装载道路上车辆平均运行时长/卡车平均装载能力)
# # # 单位为辆小时/吨
# # # i代表第i个卸载点,j代表第j个电铲
# # load_weight_ij = np.array(([7, 8], [1, 2], [4, 5]))
# #
# # # 每个电铲的工作强度,单位是吨/小时
# # strength_excavator = np.array([20, 30])
# #
# # # 每个卸载点的工作强度,单位是吨/小时
# # strength_dump = np.array([50, 10, 50])
# #
# # # 每个电铲的矿石品位,单位是百分号%
# # grade_loading_array = np.array([40, 50])
#
# # res = transportation_problem_slove(priority_coefficient, unload_weight_ij, load_weight_ij,
# # strength_excavator, strength_dump, grade_loading_array,
# # max_unload_weigh_alg_flag, truck_total_num, np.array([40, 40, 30]),
# # np.array([40, 50, 40]))
#
# coefficient = traffic_programme_para.priority_coefficient
# w_ij = traffic_programme_para.goto_unload_point_factor
# s_ij = traffic_programme_para.goto_excavator_factor
# b_excavator = traffic_programme_para.excavator_strength
# b_dump = traffic_programme_para.dump_strength
# grade_loading_array = traffic_programme_para.grade_loading_array
# grade_lower_dump_array = traffic_programme_para.grade_lower_dump_array
# grade_upper_dump_array = traffic_programme_para.grade_upper_dump_array
# goto_excavator_distance = traffic_programme_para.goto_excavator_distance
# goto_dump_distance = traffic_programme_para.goto_dump_distance
#
# res = transportation_problem_slove(priority_coefficient, w_ij, s_ij, b_excavator, b_dump,
# grade_loading_array, max_unload_weigh_alg_flag, 10,
# goto_excavator_distance, goto_dump_distance,
# grade_upper_dump_array, grade_lower_dump_array)
#
# if max_unload_weigh_alg_flag:
# print('最大化产量', res["objective"])
# else:
# print('最小成本', res["objective"])
# print('各变量的取值为:')
# print(np.array(res['var_x']).round(3))
# print(np.array(res['var_y']).round(3))
#
# # 通过矩阵按元素相乘得到每条卸载道路上的车辆个数
# unload_traffic = res['var_x']
# print((traffic_programme_para.goto_unload_point_factor * unload_traffic).round(3))
# # 通过矩阵按元素相乘得到每条装载道路上的车辆个数
# load_traffic = res['var_y']
# print((traffic_programme_para.goto_excavator_factor * load_traffic).round(3))
# test_grade_array = np.dot(traffic_programme_para.grade_loading_array, unload_traffic)
# print(test_grade_array.round(3))
# output:
# 最大值为284230.0
# 各变量的取值为:
# [[0.0, 0.0, 6.0, 39.0, 31.0, 0.0],
# [0.0, 0.0, 0.0, 0.0, 29.0, 59.0],
# [2.0, 56.0, 38.0, 0.0, 0.0, 0.0],
# [40.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
def update_autodisp_excavator():
# 挖机集合
excavator_list = []
for dispatch in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
excavator_list.append(dispatch.exactor_id)
return excavator_list
def update_autodisp_dump():
# 卸点集合
dump_list = []
for dispatch in session_mysql.query(Dispatch).filter_by(isdeleted=0, isauto=1).all():
dump_list.append(dispatch.dump_id)
return dump_list
def update_load_area():
load_area_list = []
for walk_time in session_postgre.query(WalkTime).all():
load_area_list.append(walk_time.load_area_id)
return load_area_list
def update_unload_area():
unload_area_list = []
for walk_time in session_postgre.query(WalkTime).all():
unload_area_list.append(walk_time.unload_area_id)
return unload_area_list
def traffic_flow_plan():
excavator_list = update_autodisp_excavator()
......@@ -503,9 +349,6 @@ def traffic_flow_plan():
else:
logger.info(f'最小成本调度模式')
# 矿卡总量
truck_total_num = 10
coefficient = traffic_programme_para.priority_coefficient
w_ij = traffic_programme_para.goto_unload_point_factor
s_ij = traffic_programme_para.goto_excavator_factor
......@@ -514,12 +357,14 @@ def traffic_flow_plan():
grade_loading_array = traffic_programme_para.grade_loading_array
grade_lower_dump_array = traffic_programme_para.grade_lower_dump_array
grade_upper_dump_array = traffic_programme_para.grade_upper_dump_array
min_throughout = traffic_programme_para.min_throughout
goto_excavator_distance = traffic_programme_para.goto_excavator_distance
goto_dump_distance = traffic_programme_para.goto_dump_distance
truck_total_num = traffic_programme_para.truck_total_num
res = transportation_problem_slove(coefficient, w_ij, s_ij, b_excavator, b_dump,
grade_loading_array, max_unload_weigh_alg_flag, truck_total_num,
goto_excavator_distance, goto_dump_distance,
goto_excavator_distance, goto_dump_distance, min_throughout,
grade_upper_dump_array, grade_lower_dump_array)
if max_unload_weigh_alg_flag:
......@@ -545,3 +390,5 @@ def traffic_flow_plan():
return res["var_x"], res["var_y"]
traffic_flow_plan()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment