Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
I
integrated-scheduling-v3
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
1
Merge Requests
1
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
张晓彤
integrated-scheduling-v3
Commits
7d2bf082
Commit
7d2bf082
authored
Nov 14, 2022
by
张晓彤
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
数据库迁移优化
parent
c6d18b36
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
256 additions
and
264 deletions
+256
-264
config.json
config.json
+7
-6
dispatchInfo.py
data/dispatchInfo.py
+108
-108
static_data_process.py
data/static_data_process.py
+4
-4
excavator.py
equipment/excavator.py
+1
-1
truck.py
equipment/truck.py
+136
-145
No files found.
config.json
View file @
7d2bf082
...
...
@@ -7,16 +7,16 @@
"excavator_target_mass"
:
5000
},
"m
y
sql"
:
{
"host"
:
"192.168.
9.1
52"
,
"port"
:
"
3306
"
,
"user"
:
"
root
"
,
"m
s
sql"
:
{
"host"
:
"192.168.
88.
52"
,
"port"
:
"
1433
"
,
"user"
:
"
sa
"
,
"password"
:
"Huituo@123"
,
"database"
:
"waytous"
},
"postgresql"
:
{
"host"
:
"192.168.9.1
52
"
,
"host"
:
"192.168.9.1
97
"
,
"port"
:
"5432"
,
"user"
:
"postgres"
,
"password"
:
"Huituo@123"
,
...
...
@@ -24,7 +24,7 @@
},
"redis"
:
{
"host"
:
"192.168.9.1
52
"
,
"host"
:
"192.168.9.1
97
"
,
"password"
:
"Huituo@123"
}
}
\ No newline at end of file
data/dispatchInfo.py
View file @
7d2bf082
...
...
@@ -323,116 +323,116 @@ class DispatchInfo:
cls
.
load_area_uuid_to_index_dict
=
{}
cls
.
unload_area_uuid_to_index_dict
=
{}
# try:
groups
=
cls
.
group_excavator_dict
.
keys
()
for
item
in
groups
:
try
:
# 每个组的 excavator_id 及对应 load_area_id
excavator_ids
=
cls
.
group_excavator_dict
[
item
]
load_areas
=
[]
for
excavator_id
in
excavator_ids
:
load_areas
.
append
(
cls
.
excavator_load_dict
[
excavator_id
])
# 每个组的unload_areas
unload_areas
=
cls
.
group_unload_area_dict
[
item
]
except
Exception
as
es
:
logger
.
error
(
"装卸载区统计异常"
)
logger
.
error
(
es
)
group_excavator_uuid_to_index
=
{}
group_dump_uuid_to_index
=
{}
group_load_area_uuid_to_index
=
{}
group_unload_area_uuid_to_index
=
{}
try
:
# # unload->load distance
# unload_load_distance = np.zeros((len(unload_areas), len(load_areas)))
# for truck_id in range(len(unload_areas)):
# for j in range(len(load_areas)):
# distance = int(session_postgre.query(WalkTime).filter_by(unload_area_id=unload_areas[truck_id], load_area_id=load_areas[j]).first().to_load_distance)
# unload_load_distance[truck_id][j] = distance
# cls.load_distance[item] = unload_load_distance
group_excavator_count
=
0
group_dump_count
=
0
group_load_area_count
=
0
group_unload_area_count
=
0
# unload->load distance
unload_load_distance
=
np
.
zeros
((
len
(
cls
.
group_dump_dict
[
item
]),
len
(
cls
.
group_excavator_dict
[
item
])))
for
excavator_id
in
cls
.
group_excavator_dict
[
item
]:
try
:
groups
=
cls
.
group_excavator_dict
.
keys
()
for
item
in
groups
:
try
:
# 每个组的 excavator_id 及对应 load_area_id
excavator_ids
=
cls
.
group_excavator_dict
[
item
]
load_areas
=
[]
for
excavator_id
in
excavator_ids
:
load_areas
.
append
(
cls
.
excavator_load_dict
[
excavator_id
])
# 每个组的unload_areas
unload_areas
=
cls
.
group_unload_area_dict
[
item
]
except
Exception
as
es
:
logger
.
error
(
"装卸载区统计异常"
)
logger
.
error
(
es
)
group_excavator_uuid_to_index
=
{}
group_dump_uuid_to_index
=
{}
group_load_area_uuid_to_index
=
{}
group_unload_area_uuid_to_index
=
{}
try
:
# # unload->load distance
# unload_load_distance = np.zeros((len(unload_areas), len(load_areas)))
# for truck_id in range(len(unload_areas)):
# for j in range(len(load_areas)):
# distance = int(session_postgre.query(WalkTime).filter_by(unload_area_id=unload_areas[truck_id], load_area_id=load_areas[j]).first().to_load_distance)
# unload_load_distance[truck_id][j] = distance
# cls.load_distance[item] = unload_load_distance
group_excavator_count
=
0
group_dump_count
=
0
group_load_area_count
=
0
group_unload_area_count
=
0
for
dump_id
in
cls
.
group_dump_dict
[
item
]:
unload_area_id
=
cls
.
dump_unload_area_dict
[
dump_id
]
load_area_id
=
cls
.
excavator_load_dict
[
excavator_id
]
distance
=
int
(
session_postgre
.
query
(
WalkTime
)
.
filter_by
(
unload_area_id
=
unload_area_id
,
load_area_id
=
load_area_id
)
.
first
()
.
to_load_distance
)
unload_load_distance
[
group_dump_count
][
group_excavator_count
]
=
distance
if
excavator_id
not
in
group_excavator_uuid_to_index
:
group_excavator_uuid_to_index
[
excavator_id
]
=
group_excavator_count
if
dump_id
not
in
group_dump_uuid_to_index
:
group_dump_uuid_to_index
[
dump_id
]
=
group_dump_count
if
load_area_id
not
in
group_load_area_uuid_to_index
:
group_load_area_uuid_to_index
[
load_area_id
]
=
group_load_area_count
if
unload_area_id
not
in
group_unload_area_uuid_to_index
:
group_unload_area_uuid_to_index
[
unload_area_id
]
=
group_unload_area_count
group_excavator_uuid_to_index
=
bidict
(
group_excavator_uuid_to_index
)
group_dump_uuid_to_index
=
bidict
(
group_dump_uuid_to_index
)
group_load_area_uuid_to_index
=
bidict
(
group_load_area_uuid_to_index
)
group_unload_area_uuid_to_index
=
bidict
(
group_unload_area_uuid_to_index
)
cls
.
excavator_uuid_to_index_dict
[
item
]
=
group_excavator_uuid_to_index
cls
.
dump_uuid_to_index_dict
[
item
]
=
group_dump_uuid_to_index
cls
.
load_area_uuid_to_index_dict
[
item
]
=
group_load_area_uuid_to_index
cls
.
unload_area_uuid_to_index_dict
[
item
]
=
group_unload_area_uuid_to_index
group_dump_count
+=
1
group_unload_area_count
+=
1
group_excavator_count
+=
1
group_load_area_count
+=
1
cls
.
load_distance
[
item
]
=
unload_load_distance
except
Exception
as
es
:
logger
.
warning
(
f
'{item} 分组装载路网异常'
)
cls
.
load_distance
[
item
]
=
np
.
full
((
len
(
unload_areas
),
len
(
load_areas
)),
10000
)
logger
.
warning
(
es
)
session_postgre
.
rollback
()
session_mysql
.
rollback
()
try
:
# load->unload distance
load_unload_distance
=
np
.
zeros
((
len
(
load_areas
),
len
(
unload_areas
)))
for
i
in
range
(
len
(
load_areas
)):
for
j
in
range
(
len
(
unload_areas
)):
distance
=
int
(
session_postgre
.
query
(
WalkTime
)
.
filter_by
(
load_area_id
=
load_areas
[
i
],
unload_area_id
=
unload_areas
[
j
])
.
first
()
.
to_unload_distance
)
load_unload_distance
[
i
][
j
]
=
distance
cls
.
unload_distance
[
item
]
=
load_unload_distance
except
Exception
as
es
:
logger
.
warning
(
f
'{item} 分组卸载路网异常'
)
cls
.
unload_distance
[
item
]
=
np
.
full
((
len
(
load_areas
),
len
(
unload_areas
)),
10000
)
logger
.
warning
(
es
)
session_postgre
.
rollback
()
session_mysql
.
rollback
()
# except Exception as es:
# logger.error("路网距离更新异常-调度部分和路网部分不一致")
# logger.error(es)
# unload->load distance
unload_load_distance
=
np
.
zeros
((
len
(
cls
.
group_dump_dict
[
item
]),
len
(
cls
.
group_excavator_dict
[
item
])))
for
excavator_id
in
cls
.
group_excavator_dict
[
item
]:
group_dump_count
=
0
group_unload_area_count
=
0
for
dump_id
in
cls
.
group_dump_dict
[
item
]:
unload_area_id
=
cls
.
dump_unload_area_dict
[
dump_id
]
load_area_id
=
cls
.
excavator_load_dict
[
excavator_id
]
distance
=
int
(
session_postgre
.
query
(
WalkTime
)
.
filter_by
(
unload_area_id
=
unload_area_id
,
load_area_id
=
load_area_id
)
.
first
()
.
to_load_distance
)
unload_load_distance
[
group_dump_count
][
group_excavator_count
]
=
distance
if
excavator_id
not
in
group_excavator_uuid_to_index
:
group_excavator_uuid_to_index
[
excavator_id
]
=
group_excavator_count
if
dump_id
not
in
group_dump_uuid_to_index
:
group_dump_uuid_to_index
[
dump_id
]
=
group_dump_count
if
load_area_id
not
in
group_load_area_uuid_to_index
:
group_load_area_uuid_to_index
[
load_area_id
]
=
group_load_area_count
if
unload_area_id
not
in
group_unload_area_uuid_to_index
:
group_unload_area_uuid_to_index
[
unload_area_id
]
=
group_unload_area_count
group_dump_count
+=
1
group_unload_area_count
+=
1
group_excavator_count
+=
1
group_load_area_count
+=
1
group_excavator_uuid_to_index
=
bidict
(
group_excavator_uuid_to_index
)
group_dump_uuid_to_index
=
bidict
(
group_dump_uuid_to_index
)
group_load_area_uuid_to_index
=
bidict
(
group_load_area_uuid_to_index
)
group_unload_area_uuid_to_index
=
bidict
(
group_unload_area_uuid_to_index
)
cls
.
excavator_uuid_to_index_dict
[
item
]
=
group_excavator_uuid_to_index
cls
.
dump_uuid_to_index_dict
[
item
]
=
group_dump_uuid_to_index
cls
.
load_area_uuid_to_index_dict
[
item
]
=
group_load_area_uuid_to_index
cls
.
unload_area_uuid_to_index_dict
[
item
]
=
group_unload_area_uuid_to_index
cls
.
load_distance
[
item
]
=
unload_load_distance
except
Exception
as
es
:
logger
.
warning
(
f
'{item} 分组装载路网异常'
)
cls
.
load_distance
[
item
]
=
np
.
full
((
len
(
unload_areas
),
len
(
load_areas
)),
10000
)
logger
.
warning
(
es
)
session_postgre
.
rollback
()
session_mysql
.
rollback
()
try
:
# load->unload distance
load_unload_distance
=
np
.
zeros
((
len
(
load_areas
),
len
(
unload_areas
)))
for
i
in
range
(
len
(
load_areas
)):
for
j
in
range
(
len
(
unload_areas
)):
distance
=
int
(
session_postgre
.
query
(
WalkTime
)
.
filter_by
(
load_area_id
=
load_areas
[
i
],
unload_area_id
=
unload_areas
[
j
])
.
first
()
.
to_unload_distance
)
load_unload_distance
[
i
][
j
]
=
distance
cls
.
unload_distance
[
item
]
=
load_unload_distance
except
Exception
as
es
:
logger
.
warning
(
f
'{item} 分组卸载路网异常'
)
cls
.
unload_distance
[
item
]
=
np
.
full
((
len
(
load_areas
),
len
(
unload_areas
)),
10000
)
logger
.
warning
(
es
)
session_postgre
.
rollback
()
session_mysql
.
rollback
()
except
Exception
as
es
:
logger
.
error
(
"路网距离更新异常-调度部分和路网部分不一致"
)
logger
.
error
(
es
)
# @classmethod
# def update_device_dict(cls):
...
...
data/static_data_process.py
View file @
7d2bf082
...
...
@@ -86,7 +86,7 @@ def build_truck_uuid_name_map():
try
:
for
item
in
session_mysql
.
query
(
Equipment
)
.
filter_by
(
EQUIPMENT_TYPE_ID
=
1
)
.
all
():
truck_id
=
item
.
EQUIPMENT_ID
truck_id
=
str
(
item
.
EQUIPMENT_ID
)
truck_name
=
item
.
EQUIPMENT_NAME
.
encode
(
"latin-1"
)
.
decode
(
"GBK"
,
"ignore"
)
truck_name_to_uuid_dict
[
truck_name
]
=
truck_id
...
...
@@ -108,7 +108,7 @@ def build_equipment_uuid_name_map():
try
:
for
item
in
session_mysql
.
query
(
Equipment
)
.
filter_by
(
EQUIPMENT_TYPE_ID
=
2
)
.
all
():
truck_id
=
item
.
EQUIPMENT_ID
truck_id
=
str
(
item
.
EQUIPMENT_ID
)
truck_name
=
item
.
EQUIPMENT_NAME
excavator_uuid_to_name_dict
[
truck_id
]
=
truck_name
...
...
@@ -122,7 +122,7 @@ def build_equipment_uuid_name_map():
try
:
for
item
in
session_mysql
.
query
(
Equipment
)
.
filter_by
(
EQUIPMENT_TYPE_ID
=
3
)
.
all
():
truck_id
=
item
.
EQUIPMENT_ID
truck_id
=
str
(
item
.
EQUIPMENT_ID
)
truck_name
=
item
.
EQUIPMENT_NAME
dump_uuid_to_name_dict
[
truck_id
]
=
truck_name
...
...
@@ -245,7 +245,7 @@ def update_total_truck():
# if is_online:
# truck_list.append(item.id)
for
item
in
query
:
truck_list
.
append
(
item
.
EQUIPMENT_ID
)
truck_list
.
append
(
str
(
item
.
EQUIPMENT_ID
)
)
if
len
(
truck_list
)
<
1
:
raise
Exception
(
"无矿卡设备可用-矿卡集合读取异常"
)
...
...
equipment/excavator.py
View file @
7d2bf082
...
...
@@ -185,7 +185,7 @@ class ExcavatorInfo(WalkManage):
try
:
load_ability
=
session_mysql
.
query
(
EquipmentSpec
.
MINING_ABILILTY
)
.
\
join
(
Equipment
,
Equipment
.
EQUIPMENT_MODEL_ID
==
EquipmentSpec
.
EQUIPMENT_MODEL_ID
)
.
\
filter
(
Equipment
.
EQUIPMENT_ID
==
self
.
excavator_index_to_uuid_dict
[
excavator_index
]
)
.
first
()
filter
(
Equipment
.
EQUIPMENT_ID
==
int
(
self
.
excavator_index_to_uuid_dict
[
excavator_index
])
)
.
first
()
self
.
excavator_strength
[
excavator_index
]
=
load_ability
.
MINING_ABILILTY
# if load_ability.mining_abililty < 200:
...
...
equipment/truck.py
View file @
7d2bf082
...
...
@@ -140,35 +140,24 @@ class TruckInfo(WalkManage):
def
update_truck_current_task
(
self
):
self
.
truck_current_task
=
{}
self
.
truck_current_state
=
{}
device_name_set
=
redis2
.
keys
()
truck_name_to_uuid_dict
=
get_value
(
"truck_name_to_uuid_dict"
)
for
item
in
device_name_set
:
# try:
item
=
item
.
decode
(
encoding
=
"utf-8"
)
key_value_dict
=
redis2
.
hgetall
(
item
)
# reids str可以自动转为bytes
if
str_to_byte
(
"type"
)
in
key_value_dict
:
for
truck_id
in
self
.
dynamic_truck_set
:
try
:
# truck_id = truck_id.decode(encoding="utf-8")
key_value_dict
=
redis2
.
hgetall
(
truck_id
)
# reids str可以自动转为bytes
device_type
=
int
(
key_value_dict
[
str_to_byte
(
"type"
)])
else
:
continue
if
device_type
==
1
:
if
item
not
in
truck_name_to_uuid_dict
:
continue
else
:
if
truck_name_to_uuid_dict
[
item
]
in
self
.
dynamic_truck_set
:
if
device_type
==
1
:
if
truck_id
in
self
.
dynamic_truck_set
:
# currentTask = int(key_value_dict[str_to_byte("currentTask")])
currentTask
=
int
(
byte_to_str
(
key_value_dict
[
str_to_byte
(
"currentTask"
)]))
self
.
truck_current_task
[
truck_name_to_uuid_dict
[
item
]
]
=
currentTask
self
.
truck_current_task
[
truck_id
]
=
currentTask
currentState
=
int
(
float
(
byte_to_str
(
key_value_dict
[
str_to_byte
(
"state"
)])))
self
.
truck_current_state
[
truck_name_to_uuid_dict
[
item
]
]
=
currentState
# except Exception as es:
# self.logger.error("读取矿卡任务异常-reids读取异常")
# self.logger.error(es)
self
.
truck_current_state
[
truck_id
]
=
currentState
except
Exception
as
es
:
self
.
logger
.
error
(
"读取矿卡任务异常-reids读取异常"
)
self
.
logger
.
error
(
es
)
self
.
logger
.
info
(
"矿卡当前任务:"
)
self
.
logger
.
info
(
self
.
truck_current_task
)
...
...
@@ -240,80 +229,81 @@ class TruckInfo(WalkManage):
self
.
relative_last_load_time
=
{}
self
.
relative_last_unload_time
=
{}
try
:
truck_uuid_to_name_dict
=
get_value
(
"truck_uuid_to_name_dict"
)
for
item
in
self
.
dynamic_truck_set
:
key_value_dict
=
redis2
.
hgetall
(
truck_uuid_to_name_dict
[
item
])
device_type
=
int
(
key_value_dict
[
str_to_byte
(
"type"
)])
# 判断是否为矿卡
if
device_type
==
1
:
task
=
self
.
truck_current_task
[
item
]
if
task
in
[
3
,
4
,
5
]:
# 矿卡重载行驶或仍未出场
if
str_to_byte
(
"lastLoadTime"
)
in
key_value_dict
.
keys
():
# 若最后装载时间存在
last_load_time_tmp
=
eval
(
byte_to_str
(
key_value_dict
[
str_to_byte
(
"lastLoadTime"
)]))
tmp_time
=
datetime
.
strptime
(
last_load_time_tmp
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
if
tmp_time
>
datetime
.
strptime
(
"2000-01-01 01:01:01"
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
):
# 若最后装载时间异常
self
.
last_load_time
[
item
]
=
tmp_time
else
:
self
.
last_load_time
[
item
]
=
datetime
.
now
()
# redis2.hsetnx(truck_uuid_to_name_dict[item], str_to_byte("lastLoadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\"")
self
.
logger
.
info
(
"lastLoadTime is Error"
)
# try:
for
truck_id
in
self
.
dynamic_truck_set
:
# truck_name = truck_uuid_to_name_dict[item]
key_value_dict
=
redis2
.
hgetall
(
truck_id
)
device_type
=
int
(
key_value_dict
[
str_to_byte
(
"type"
)])
# 判断是否为矿卡
if
device_type
==
1
:
print
(
self
.
truck_current_task
)
task
=
self
.
truck_current_task
[
truck_id
]
if
task
in
[
3
,
4
,
5
]:
# 矿卡重载行驶或仍未出场
if
str_to_byte
(
"lastLoadTime"
)
in
key_value_dict
.
keys
():
# 若最后装载时间存在
last_load_time_tmp
=
eval
(
byte_to_str
(
key_value_dict
[
str_to_byte
(
"lastLoadTime"
)]))
tmp_time
=
datetime
.
strptime
(
last_load_time_tmp
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
if
tmp_time
>
datetime
.
strptime
(
"2000-01-01 01:01:01"
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
):
# 若最后装载时间异常
self
.
last_load_time
[
truck_id
]
=
tmp_time
else
:
self
.
last_load_time
[
item
]
=
datetime
.
now
()
self
.
last_load_time
[
truck_id
]
=
datetime
.
now
()
# redis2.hsetnx(truck_uuid_to_name_dict[item], str_to_byte("lastLoadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\"")
self
.
logger
.
info
(
"lastLoadTime is
None
"
)
self
.
relative_last_load_time
[
item
]
=
float
(
(
self
.
last_load_time
[
item
]
-
self
.
start_time
)
/
timedelta
(
hours
=
0
,
minutes
=
1
,
seconds
=
0
)
)
# print("相对last_load_time", self.relative_last_load_time[item]
)
self
.
logger
.
info
(
"相对last_load_time"
)
self
.
logger
.
info
(
self
.
relative_last_load_time
[
item
]
)
if
task
in
[
0
,
1
,
2
]:
# 矿卡空载行驶或仍未出场
if
str_to_byte
(
"lastUnloadTime"
)
in
key_value_dict
.
keys
():
last_unload_time_tmp
=
eval
(
key_value_dict
[
str_to_byte
(
"lastUnloadTime"
)
])
tmp_time
=
datetime
.
strptime
(
last_unload_time_tmp
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S
"
)
if
tmp_time
>
datetime
.
strptime
(
"2000-01-01 01:01:01"
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
):
self
.
last_unload_time
[
item
]
=
tmp_time
else
:
self
.
last_unload_time
[
item
]
=
datetime
.
now
(
)
# redis2.hsetnx(truck_uuid_to_name_dict[item], str_to_byte("lastUnloadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\"")
self
.
logger
.
info
(
"lastUnloadTime is Error"
)
self
.
logger
.
info
(
"lastLoadTime is
Error
"
)
else
:
self
.
last_load_time
[
truck_id
]
=
datetime
.
now
(
)
# redis2.hsetnx(truck_uuid_to_name_dict[item], str_to_byte("lastLoadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\""
)
self
.
logger
.
info
(
"lastLoadTime is None"
)
self
.
relative_last_load_time
[
truck_id
]
=
float
(
(
self
.
last_load_time
[
truck_id
]
-
self
.
start_time
)
/
timedelta
(
hours
=
0
,
minutes
=
1
,
seconds
=
0
)
)
# print("相对last_load_time", self.relative_last_load_time[item
])
self
.
logger
.
info
(
"相对last_load_time
"
)
self
.
logger
.
info
(
self
.
relative_last_load_time
[
truck_id
])
if
task
in
[
0
,
1
,
2
]:
# 矿卡空载行驶或仍未出场
if
str_to_byte
(
"lastUnloadTime"
)
in
key_value_dict
.
keys
()
:
last_unload_time_tmp
=
eval
(
key_value_dict
[
str_to_byte
(
"lastUnloadTime"
)]
)
tmp_time
=
datetime
.
strptime
(
last_unload_time_tmp
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
)
if
tmp_time
>
datetime
.
strptime
(
"2000-01-01 01:01:01"
,
"
%
Y-
%
m-
%
d
%
H:
%
M:
%
S"
):
self
.
last_unload_time
[
truck_id
]
=
tmp_time
else
:
self
.
last_unload_time
[
item
]
=
datetime
.
now
()
# key_value_dict[str_to_byte("lastUnloadTime")] = datetime.now().strftime(
# "%b %d, %Y %I:%M:%S %p"
# )
self
.
last_unload_time
[
truck_id
]
=
datetime
.
now
()
# redis2.hsetnx(truck_uuid_to_name_dict[item], str_to_byte("lastUnloadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\"")
self
.
logger
.
info
(
"lastUnloadTime is None"
)
self
.
relative_last_unload_time
[
item
]
=
float
(
(
self
.
last_unload_time
[
item
]
-
self
.
start_time
)
/
timedelta
(
hours
=
0
,
minutes
=
1
,
seconds
=
0
)
)
# print("相对last_unload_time", self.relative_last_unload_time[item])
self
.
logger
.
info
(
"相对last_unload_time"
)
self
.
logger
.
info
(
self
.
relative_last_unload_time
[
item
])
elif
task
==
-
2
:
# print(datetime.now())
self
.
last_unload_time
[
item
]
=
datetime
.
now
()
# key_value_dict["lastUnloadTime"] = datetime.now().strftime(
# "%b %d, %Y %I:%M:%S %p")
# if str_to_byte("lastUnloadTime") in key_value_dict.keys():
# # redis2.hset(truck_uuid_to_name_dict[item], str_to_byte("lastUnloadTime"),
# # datetime.now().strftime("%b %d, %Y %I:%M:%S %p"))
# redis2.hset(truck_uuid_to_name_dict[item], str_to_byte("lastUnloadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\"")
# else:
# redis2.hsetnx(truck_uuid_to_name_dict[item], str_to_byte("lastUnloadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\"")
# # redis2.hsetnx(truck_uuid_to_name_dict[item], str(json.dumps(key_value_dict)))
except
Exception
as
es
:
self
.
logger
.
error
(
"读取矿卡可用时间异常-redis读取异常"
)
self
.
logger
.
error
(
es
)
self
.
logger
.
info
(
"lastUnloadTime is Error"
)
else
:
self
.
last_unload_time
[
truck_id
]
=
datetime
.
now
()
# key_value_dict[str_to_byte("lastUnloadTime")] = datetime.now().strftime(
# "%b %d, %Y %I:%M:%S %p"
# )
# redis2.hsetnx(truck_uuid_to_name_dict[item], str_to_byte("lastUnloadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\"")
self
.
logger
.
info
(
"lastUnloadTime is None"
)
self
.
relative_last_unload_time
[
truck_id
]
=
float
(
(
self
.
last_unload_time
[
truck_id
]
-
self
.
start_time
)
/
timedelta
(
hours
=
0
,
minutes
=
1
,
seconds
=
0
)
)
# print("相对last_unload_time", self.relative_last_unload_time[item])
self
.
logger
.
info
(
"相对last_unload_time"
)
self
.
logger
.
info
(
self
.
relative_last_unload_time
[
truck_id
])
elif
task
==
-
2
:
# print(datetime.now())
self
.
last_unload_time
[
truck_id
]
=
datetime
.
now
()
# key_value_dict["lastUnloadTime"] = datetime.now().strftime(
# "%b %d, %Y %I:%M:%S %p")
# if str_to_byte("lastUnloadTime") in key_value_dict.keys():
# # redis2.hset(truck_uuid_to_name_dict[item], str_to_byte("lastUnloadTime"),
# # datetime.now().strftime("%b %d, %Y %I:%M:%S %p"))
# redis2.hset(truck_uuid_to_name_dict[item], str_to_byte("lastUnloadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\"")
# else:
# redis2.hsetnx(truck_uuid_to_name_dict[item], str_to_byte("lastUnloadTime"),
# "\"" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\"")
# # redis2.hsetnx(truck_uuid_to_name_dict[item], str(json.dumps(key_value_dict)))
# except Exception as es:
# self.logger.error("读取矿卡可用时间异常-redis读取异常")
# self.logger.error(es)
def
update_truck_trip
(
self
):
...
...
@@ -328,7 +318,7 @@ class TruckInfo(WalkManage):
session_mysql
.
commit
()
truck_id
=
self
.
truck_index_to_uuid_dict
[
i
]
task
=
self
.
truck_current_task
[
self
.
truck_index_to_uuid_dict
[
i
]]
#
print("truck_task:", truck_id, task)
print
(
"truck_task:"
,
truck_id
,
task
)
item
=
(
session_mysql
.
query
(
EquipmentPair
)
.
filter_by
(
truck_id
=
truck_id
,
isdeleted
=
0
)
...
...
@@ -523,29 +513,28 @@ class TruckInfo(WalkManage):
:return: truck_locate_dict
"""
try
:
truck_name_to_uuid_dict
=
get_value
(
"truck_name_to_uuid_dict"
)
self
.
truck_locate_dict
=
{}
device_name_set
=
redis2
.
keys
()
for
item
in
device_name_set
:
item
=
item
.
decode
(
encoding
=
'utf-8'
)
key_value_dict
=
redis2
.
hgetall
(
item
)
device_type
=
key_value_dict
[
str_to_byte
(
'type'
)]
is_online
=
key_value_dict
[
str_to_byte
(
'online'
)]
key_set
=
key_value_dict
.
keys
()
if
(
device_type
==
str_to_byte
(
"1"
))
\
and
(
str_to_byte
(
'online'
)
in
key_set
)
\
and
(
bytes
.
decode
(
is_online
)
in
[
"true"
or
"True"
])
\
and
(
str_to_byte
(
'laneId'
)
in
key_set
):
truck_locate
=
key_value_dict
[
str_to_byte
(
'laneId'
)]
if
eval
(
truck_locate
)
is
not
''
:
self
.
truck_locate_dict
[
truck_name_to_uuid_dict
[
item
]]
=
eval
(
truck_locate
)
# try:
truck_name_to_uuid_dict
=
get_value
(
"truck_name_to_uuid_dict"
)
except
Exception
as
es
:
logger
.
error
(
"车辆所在路段读取异常"
)
logger
.
error
(
es
)
return
{}
self
.
truck_locate_dict
=
{}
for
truck_id
in
self
.
dynamic_truck_set
:
# truck_id = truck_id.decode(encoding='utf-8')
key_value_dict
=
redis2
.
hgetall
(
truck_id
)
device_type
=
key_value_dict
[
str_to_byte
(
'type'
)]
is_online
=
key_value_dict
[
str_to_byte
(
'online'
)]
key_set
=
key_value_dict
.
keys
()
if
(
device_type
==
str_to_byte
(
"1"
))
\
and
(
str_to_byte
(
'online'
)
in
key_set
)
\
and
(
bytes
.
decode
(
is_online
)
in
[
"true"
or
"True"
])
\
and
(
str_to_byte
(
'laneId'
)
in
key_set
):
truck_locate
=
key_value_dict
[
str_to_byte
(
'laneId'
)]
if
eval
(
truck_locate
)
is
not
''
:
self
.
truck_locate_dict
[
truck_id
]
=
eval
(
truck_locate
)
# except Exception as es:
# logger.error("车辆所在路段读取异常")
# logger.error(es)
# return {}
################################################ long term update ################################################
...
...
@@ -612,10 +601,10 @@ class TruckInfo(WalkManage):
self
.
truck_dump_bind
=
{}
for
dump_area
in
session_postgre
.
query
(
DumpArea
)
.
all
():
if
dump_area
.
BindList
is
not
None
:
for
truck_
name
in
dump_area
.
BindList
:
self
.
truck_dump_bind
[
truck_name_to_uuid_dict
[
truck_name
]]
=
str
(
dump_area
.
Id
)
for
truck_
id
in
dump_area
.
BindList
:
# print(truck_name_to_uuid_dict)
# truck_id = truck_name_to_uuid_dict[truck_name]
self
.
truck_dump_bind
[
truck_id
]
=
str
(
dump_area
.
Id
)
except
Exception
as
es
:
self
.
logger
.
error
(
"矿卡-卸载区域绑定关系读取异常"
)
self
.
logger
.
error
(
es
)
...
...
@@ -624,22 +613,20 @@ class TruckInfo(WalkManage):
def
update_truck_excavator_bind
(
self
):
truck_name_to_uuid_dict
=
get_value
(
"truck_name_to_uuid_dict"
)
try
:
rule5
=
session_mysql
.
query
(
DispatchRule
)
.
filter_by
(
id
=
5
)
.
first
()
if
rule5
.
disabled
==
0
:
self
.
truck_excavator_bind
=
{}
for
excavator_id
in
get_value
(
"dynamic_excavator_set"
):
item
=
session_mysql
.
query
(
Equipment
)
.
filter_by
(
EQUIPMENT_ID
=
int
(
excavator_id
))
.
first
()
if
item
.
BIND_LIST
is
not
None
:
for
truck_name
in
json
.
loads
(
item
.
BIND_LIST
):
self
.
truck_excavator_bind
[
truck_name_to_uuid_dict
[
truck_name
]
]
=
excavator_id
except
Exception
as
es
:
self
.
logger
.
error
(
"矿卡-挖机绑定关系读取异常"
)
self
.
logger
.
error
(
es
)
session_postgre
.
rollback
()
session_mysql
.
rollback
()
# try:
rule5
=
session_mysql
.
query
(
DispatchRule
)
.
filter_by
(
id
=
5
)
.
first
()
if
rule5
.
disabled
==
0
:
self
.
truck_excavator_bind
=
{}
for
excavator_id
in
get_value
(
"dynamic_excavator_set"
):
item
=
session_mysql
.
query
(
Equipment
)
.
filter_by
(
EQUIPMENT_ID
=
int
(
excavator_id
))
.
first
()
if
item
.
BIND_LIST
is
not
None
:
for
truck_id
in
json
.
loads
(
item
.
BIND_LIST
):
self
.
truck_excavator_bind
[
truck_id
]
=
excavator_id
# except Exception as es:
# self.logger.error("矿卡-挖机绑定关系读取异常")
# self.logger.error(es)
# session_postgre.rollback()
# session_mysql.rollback()
def
update_truck_excavator_exclude
(
self
):
...
...
@@ -659,15 +646,11 @@ class TruckInfo(WalkManage):
session_mysql
.
rollback
()
if
rule5
.
disabled
==
0
:
for
excavator_id
in
get_value
(
"dynamic_excavator_set"
):
try
:
item
=
(
session_mysql
.
query
(
Equipment
)
.
filter_by
(
id
=
excavator_id
,
only_allowed
=
1
)
.
first
()
)
except
Exception
as
es
:
session_postgre
.
rollback
()
session_mysql
.
rollback
()
item
=
(
session_mysql
.
query
(
Equipment
)
.
filter_by
(
EQUIPMENT_ID
=
int
(
excavator_id
),
ONLY_ALLOWED
=
1
)
.
first
()
)
if
item
is
not
None
:
for
truck_id
in
self
.
dynamic_truck_set
:
if
truck_uuid_to_name_dict
[
truck_id
]
not
in
item
.
bind_list
:
...
...
@@ -764,7 +747,11 @@ class TruckInfo(WalkManage):
try
:
empty_speed
=
session_mysql
.
query
(
EquipmentSpec
)
.
\
join
(
Equipment
,
EquipmentSpec
.
EQUIPMENT_MODEL_ID
==
Equipment
.
EQUIPMENT_MODEL_ID
)
.
\
filter
(
Equipment
.
EQUIPMENT_ID
==
truck_id
)
.
first
()
.
MAX_SPEED
filter
(
Equipment
.
EQUIPMENT_ID
==
int
(
truck_id
))
.
first
()
.
MAX_SPEED
if
empty_speed
is
None
:
self
.
empty_speed
[
truck_id
]
=
20
continue
if
(
empty_speed
<=
1
)
or
(
empty_speed
>=
50
)
or
(
empty_speed
is
None
):
self
.
empty_speed
[
truck_id
]
=
20
...
...
@@ -773,7 +760,11 @@ class TruckInfo(WalkManage):
heavy_speed
=
session_mysql
.
query
(
EquipmentSpec
)
.
\
join
(
Equipment
,
EquipmentSpec
.
EQUIPMENT_MODEL_ID
==
Equipment
.
EQUIPMENT_MODEL_ID
)
.
\
filter
(
Equipment
.
EQUIPMENT_MODID
==
truck_id
)
.
first
()
.
MAX_SPEED
filter
(
Equipment
.
EQUIPMENT_ID
==
int
(
truck_id
))
.
first
()
.
MAX_SPEED
if
heavy_speed
is
None
:
self
.
empty_speed
[
truck_id
]
=
20
continue
if
(
heavy_speed
<=
1
)
or
(
heavy_speed
>=
50
)
or
(
heavy_speed
is
None
):
self
.
heavy_speed
[
truck_id
]
=
20
...
...
@@ -788,7 +779,7 @@ class TruckInfo(WalkManage):
def
update_truck_disable_list
(
self
)
->
List
:
try
:
for
item
in
session_mysql
.
query
(
Equipment
)
.
filter_by
(
EQUIPMENT_TYPE_ID
=
1
,
DISABLED
=
1
)
.
all
():
self
.
truck_disable_list
.
append
(
item
.
EQUIPMENT_ID
)
self
.
truck_disable_list
.
append
(
str
(
item
.
EQUIPMENT_ID
)
)
except
Exception
as
es
:
self
.
logger
.
error
(
"车辆禁止列表更新异常"
)
self
.
logger
.
error
(
es
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment