-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathIntervalSolver.py
More file actions
2153 lines (1614 loc) · 104 KB
/
IntervalSolver.py
File metadata and controls
2153 lines (1614 loc) · 104 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
from collections import defaultdict
import itertools
import time
import networkx as nx
import logging
import random
import sys
from gurobipy import Env, GRB, tuplelist, Constr, Var
from Solver import Solver
from operator import itemgetter
from functools import partial
from math import ceil, floor
from tools import partition, TypedDiGraph
from itertools import pairwise
from enum import Enum, IntEnum
from CheckSolution import CheckSolution, SolutionGraphCommodity, SolutionGraphConsolidation, SolutionGraphNode
from DrawLaTeX import DrawLaTeX
from ProblemData import Commodity, NodeInterval, NodeTime, ProblemData, TimedArc
check_count = 0
solve_time = 0
logging.basicConfig(format='%(message)s',level=logging.DEBUG)
logger = logging.getLogger("IntervalSolver")
# add the handlers to logger
fh = logging.FileHandler('intervalsolver.log')
logger.addHandler(fh)
class preprocessing_option(str, Enum):
node = "node"
arc = "arc"
class shortest_path_option(str, Enum):
commodity = "commodity"
shared = "shared"
edges = "edges"
class algorithm_option(IntEnum):
default = 0
multiplex = 1
eclectic = 2
reduced = 3
adaptive = 4
ALGORITHM = algorithm_option.adaptive
MIP_GAP = 0.00001
TIMEOUT = 3600 #float("inf") # seconds
IN_TREE = False
ORIGIN_CUTS = False
USER_CUTS = True # True
PATH_CUTS = False
ALLOW_SPLIT = False
SPLIT_PRECISION = 0.001
HOLDING_COSTS = False
MAX_WALK_PATHS = 100
PRECISION = 2 # decimal places
INCUMBENT_CHECK_INTERVAL = float("inf") #50
USE_HEURISTIC_START = False
## useful check for exploring solution graph
def is_node(n: SolutionGraphNode):
return isinstance(n, SolutionGraphCommodity)
# fix gurobi/cplex issue
def quicksum(col):
return sum(col)
def round_tuple(t):
return tuple(map(lambda x: isinstance(x, float) and round(x, PRECISION) or x, t)) if isinstance(t,tuple) else t
class IntervalSolver(object):
"""Solves time discretized service network design problems using an iterative approach"""
__slots__ = ['problem', 'model', 'network', 'commodity_shortest_paths', 'shared_shortest_paths', 'S', 'T', 'x', 'z', 'commodities', 'intervals', 'arcs', 'origin_destination',
'constraint_consolidation', 'constraint_flow', 'constraint_cycle' ,'constraint_path_length', 'solution_paths','consolidations', 'fixed_timepoints_model', 'timepoints',
'incumbent', 'lower_bound', 'shouldEnforceCycles', 'fixed_paths','timed_network','cons_network','suppress_output','GAP', 'incumbent_solution','all_paths', 'edge_shortest_path',
'status','timepoints_per_iteration', 'ALGORITHM', 'constraints_user', 'constraints_origin', 'constraints_dest', 'constraints_intree_path', 'constraints_intree', 'var_intree',
'constraints_holding_offset', 'constraints_holding_enforce', 'constraints_holding_enforce2', 'environment']
def __init__(self, problem: ProblemData, time_points:set[NodeTime]|None=None, full_solve=True, fixed_paths=[], suppress_output=False, gap=MIP_GAP, algorithm=None, full_discretization=False, full_results_log=None, environment=None):
self.problem = problem
self.commodities = [Commodity(NodeTime(c.a[0], round(c.a[1], PRECISION)), NodeTime(c.b[0], round(c.b[1], PRECISION)), round(c.q, PRECISION)) for c in problem.commodities]
self.S = min(c.a[1] for c in self.commodities) # time horizon
self.T = max(c.b[1] for c in self.commodities) + 1 # time horizon
self.incumbent = None # store the lowest upper bound
self.incumbent_solution = None
self.lower_bound = 0.0 # can assume optimal is >= 0
self.solution_paths = []
self.shouldEnforceCycles = True
self.fixed_paths = fixed_paths
self.suppress_output = suppress_output
self.GAP = gap
self.ALGORITHM = algorithm if algorithm is not None else ALGORITHM
self.status = False
# build graph
self.network = TypedDiGraph[int]()
for a, destinations in problem.network.items():
# support holding costs
self.network.add_edge(a, a, weight=0, capacity=1, fixed_cost=0, var_cost=1 if HOLDING_COSTS else 0)
# dispatch arcs
for b, transit_time in destinations.items():
self.shouldEnforceCycles = self.shouldEnforceCycles or problem.var_cost[0].get((a,b),0) == 0 # if all arcs have positive costs then we don't need to add extra constraints
self.network.add_edge(a, b, weight=transit_time, capacity=problem.capacities.get((a,b), 1.0), fixed_cost=problem.fixed_cost.get((a,b), transit_time))#, var_cost=problem.var_cost[0].get((a,b), 0))
self.create_shortest_paths(shortest_path_option.edges)
## testing - gets all valid paths for usable arcs in build network
#self.all_paths = [set(itertools.chain(*map(pairwise, limit_shortest_paths(self.network, c['a'][0], c['b'][0], 'weight', c['b'][1] - c['a'][1])))) for c in self.problem.commodities]
# create initial intervals/arcs
self.timepoints = set(self.trivial_network())
self.fixed_timepoints_model = time_points is not None and not full_solve
##
## Construct Gurobi model
##
#model = self.model = Model("IMCFCNF", env=Env(""))
#model = self.model = Solver(CPLEX, quiet=False)
if environment is None:
self.environment = Env("", empty=True)
self.environment.setParam('OutputFlag', False)
self.environment.start()
else:
self.environment = environment
model = self.model = Solver(quiet=(not self.fixed_timepoints_model or suppress_output), env=self.environment)
self.model.set_gap(0.04 if self.ALGORITHM >= algorithm_option.adaptive else gap)
self.model.set_timelimit(TIMEOUT)
##model.setParam(GRB.param.TimeLimit, 14400) # 4hr limit
#self.model.set_threads(1)
self.timed_network: list[TypedDiGraph[NodeInterval]] = []
# support full discretization mode (specialized model generation for full discretization because it's really slow o/w)
if not full_discretization:
self.build_network()
# set which interval contains the origin/destination
self.origin_destination = {k: TimedArc(NodeInterval(c.a[0], self.S, self.T), NodeInterval(c.b[0], self.S, self.T)) for k,c in enumerate(self.commodities)}
self.intervals = tuplelist(((n, self.S, self.T) for n in self.network.nodes()))
else:
self.build_full_network()
# set which interval contains the origin/destination
self.origin_destination = {k: TimedArc(NodeInterval(c.a[0], ceil(c.a[1]), ceil(c.a[1])+1), NodeInterval(c.b[0], floor(c.b[1]), floor(c.b[1])+1)) for k,c in enumerate(self.commodities)}
self.intervals = tuplelist(map(lambda n: (n[0], n[1], n[2]), self.cons_network.nodes()))
## in-tree constraint
Kd = {} # used later if IN_TREE
self.var_intree = {}
if IN_TREE:
Kd = {g: list(map(itemgetter(0), coll)) for g,coll in itertools.groupby(sorted(enumerate(self.commodities), key=lambda t: t[1].b[0]), key=lambda t: t[1].b[0])}
self.var_intree = {(ult_dest,orig,dest): self.model.addVar(0, 0, 1, self.model.binary(), 'intree') for ult_dest in self.network.nodes() for orig in self.network.nodes() for dest in self.network.nodes() if orig != dest and orig != ult_dest }
self.model.update()
##
## Constraints
##
build_time = time.time()
# flow constraints
self.constraint_flow: dict[tuple[int, NodeInterval], Constr] = {}
for k,G in enumerate(self.timed_network):
for n in self.intervals:
n = NodeInterval(*n)
i = [d['x'] for a1,a2,d in G.in_edges_data(n) if 'x' in d]
o = [d['x'] for a1,a2,d in G.out_edges_data(n) if 'x' in d]
if i or o:
self.constraint_flow[(k,n)] = self.model.addConstr(quicksum(i) - quicksum(o) == self.r(k,n), 'flow' + str((k,n)))
sys.stdout.write('{0:5.1f}%, {1:4.0f}s\r'.format(100*k/float(len(self.commodities)), time.time() - build_time))
sys.stdout.flush()
# Consolidation constraints
self.constraint_consolidation = {(a1,a2): self.model.addConstr(quicksum(self.timed_network[k].edge_data(a1, a2)['x'] * self.commodities[k].q for k in d['K']) <= d['z'] * self.network.edge_data(a1[0], a2[0])['capacity'], 'cons' + str((a1,a2)))
for a1,a2,d in self.cons_network.edges_data() if d['z'] is not None }
# Ensure no flat-cycles
self.constraint_cycle = None
if self.shouldEnforceCycles:
self.constraint_cycle = {}
for k,G in enumerate(self.timed_network):
for n in self.network.nodes():
outflow = [d['x'] for a1,a2,d in G.edges_data() if a1[0] == n and a2[0] != n and 'x' in d]
if outflow:
self.constraint_cycle[(k,n)] = self.model.addConstr(quicksum(outflow) <= 1, 'cycle')
# Ensure path length
if PATH_CUTS:
self.constraint_path_length = {}
for k,G in enumerate(self.timed_network):
path_length = 0
for n in self.network.nodes():
outflow = [d['x']*self.transit(a1[0],a2[0]) for a1,a2,d in G.edges_data() if a1[0] == n and a2[0] != n and 'x' in d]
if outflow:
path_length += quicksum(outflow)
self.constraint_path_length[k] = model.addConstr(self.commodities[k].a[1] + path_length <= self.commodities[k].b[1], 'path')
## in-tree constraint
if IN_TREE:
self.constraints_intree_path = {(ult_dest, orig): model.addConstr(quicksum([self.var_intree[ult_dest, orig, dest] for dest in self.network.nodes() if orig != dest and orig != ult_dest]) == 1, 'intree')
for ult_dest in self.network.nodes()
for orig in self.network.nodes() if orig != ult_dest}
self.constraints_intree = {}
for ult_dest,coll in Kd.items():
for k in coll:
for orig in self.network.nodes():
for dest in self.network.nodes():
if orig != dest and orig != ult_dest:
outflow = [d['x'] for a1,a2,d in self.timed_network[k].edges(data=True) if a1[0] == orig and a2[0] == dest and 'x' in d]
if outflow:
self.constraints_intree[k,orig,dest] = model.addConstr(quicksum(outflow) <= self.var_intree[ult_dest, orig, dest], 'intree')
# holding cost support
self.constraints_holding_offset = {}
self.constraints_holding_enforce = {}
self.constraints_holding_enforce2 = {}
# if HOLDING_COSTS:
# # todo remove if holding cost is 0
# for k,G in enumerate(self.timed_network):
# for n in self.intervals:
# i = [d['x'] for a1,a2,d in G.in_edges(n, data=True) if 'x' in d and a1[0] != a2[0]]
# o = [(d['x'], d['y']) for a1,a2,d in G.out_edges(n, data=True) if 'x' in d and 'y' in d and a1[0] == a2[0]] # get holding arc out
# if i and o:
# self.constraints_holding_offset[k,n] = self.model.addConstr(1 + o[0][1] >= quicksum(i) + o[0][0], 'holding-offset')
# self.constraints_holding_enforce[k,n] = self.model.addConstr(o[0][1] <= o[0][0], 'holding-enforce')
# self.constraints_holding_enforce2[k,a[0]] = self.model.addConstr(o[0][1] <= quicksum(i), 'holding-enforce')
## User cuts
self.constraints_origin = []
self.constraints_dest = []
self.constraints_user = []
self.user_cuts()
# add timepoints
self.model.update()
if time_points is not None:
self.timepoints.update(time_points)
self.add_network_timepoints(time_points)
##
## testing user cuts
##
# Consider a location and time in the network (l,t). Furthermore, consider all the commodities that become available at that location and time.
# Suppose the sum of those commodity sizes is q, then I know that the flow out of (l,t) has to be at least the roundup of q (assuming q is specified in terms of trailer size).
# I can add that as a cut. Furthermore, if there is only a single outbound arc at (l,t), then the same argument can be made at the location and time at the end of the arc.
def user_cuts(self):
# remove cuts
for c in self.constraints_user:
self.model.removeCons(c)
for c in self.constraints_origin:
self.model.removeCons(c)
for c in self.constraints_dest:
self.model.removeCons(c)
self.model.update()
self.constraints_user = []
self.constraints_origin = {}
self.constraints_dest = {}
## add cuts
# todo when # outbound edges = 1
if USER_CUTS:
for a1,a2,d in self.cons_network.edges_data():
if 'z' in d and d['z'] is not None:
for c in d['K']:
self.constraints_user.append(self.model.addConstr(d['z'] >= ceil(self.commodities[c].q/self.network.edge_data(a1[0], a2[0])['capacity']) * self.timed_network[c].edge_data(a1, a2)['x'], 'user'))
## after the origin timed-node, we must dispatch at some point. Sum all dispatch arcs >= ceil(q/max capacity)
# each commodity
if ORIGIN_CUTS:
# get commodities that share origin
for orig, K in itertools.groupby(sorted(map(lambda t: (t[1][0][0],t[0]), self.origin_destination.items())), itemgetter(0)):
K = list(map(itemgetter(1),K))
q = sum(self.commodities[k].q for k in K)
dispatch_arcs = set()
max_capacity = 0
# get intervals at origin node
for i in self.intervals.select(orig):
for k in K:
# get all dispatch arcs after and out of origin node (that are valid for commodity via shortest paths)
for a1,a2 in self.timed_network[k].out_edges(i, data=False):
d = self.cons_network.edge_data(a1, a2)
if 'z' in d and d['z'] is not None:
if not dispatch_arcs:
max_capacity = self.network.edge_data(a1[0], a2[0])['capacity']
else:
if max_capacity < self.network.edge_data(a1[0], a2[0])['capacity']:
max_capacity = self.network.edge_data(a1[0], a2[0])['capacity']
dispatch_arcs.add((a1,a2))
cons = self.model.addConstr(quicksum(self.cons_network.edge_data(a1, a2)['z'] for a1,a2 in dispatch_arcs) >= ceil(q/max_capacity), 'user')
for k in K:
self.constraints_origin[k] = cons
# get commodities that share dest
for dest, K in itertools.groupby(sorted(map(lambda t: (t[1][1][0],t[0]), self.origin_destination.items())), itemgetter(0)):
K = list(map(itemgetter(1),K))
q = sum(self.commodities[k].q for k in K)
dispatch_arcs = set()
max_capacity = 0
# get intervals at dest node
for i in self.intervals.select(dest):
for k in K:
# get all dispatch arcs before and into dest node (that are valid for commodity via shortest paths)
for a1,a2 in self.timed_network[k].in_edges(i, data=False):
d = self.cons_network.edge_data(a1, a2)
if 'z' in d and d['z'] is not None:
if not dispatch_arcs:
max_capacity = self.network.edge_data(a1[0], a2[0])['capacity']
else:
if max_capacity < self.network.edge_data(a1[0], a2[0])['capacity']:
max_capacity = self.network.edge_data(a1[0], a2[0])['capacity']
dispatch_arcs.add((a1,a2))
cons = self.model.addConstr(quicksum(self.cons_network.edge_data(a1, a2)['z'] for a1,a2 in dispatch_arcs) >= ceil(q/max_capacity), 'user')
for k in K:
self.constraints_dest[k] = cons
def build_network(self):
all_arcs: list[list[TimedArc]] = []
for k in range(len(self.commodities)):
G = TypedDiGraph[NodeInterval]()
v = partial(self.V, k)
#v = lambda a: self.V(k,a) and (a[0][0],a[1][0]) in self.all_paths[k]
# Add node-intervals
G.add_nodes_from(NodeInterval(n, self.S, self.T) for n in self.network.nodes())
# Create timed arcs
all_arcs.append(list(filter(v, (TimedArc(NodeInterval(e[0], self.S, self.T), NodeInterval(e[1], self.S, self.T))
for e in (self.network.edges() if not self.fixed_paths else self.fixed_paths[k])))))
G.add_edges_from(((a[0], a[1], {
'x': self.model.addVar(obj=(self.problem.var_cost[k].get((a[0][0],a[1][0]),0) * self.commodities[k].q if a[0][0] != a[1][0] else self.problem.var_cost[k].get((a[0][0],a[1][0]),0) * self.commodities[k].q * (a[0][2] - a[0][1])), lb=0, ub=1, type=self.model.binary() if not ALLOW_SPLIT else self.model.continuous(), name='x' + str(k) + ',' + str(a)),
'y': self.model.addVar(obj=(0 if a[0][0] != a[1][0] else -self.problem.var_cost[k].get((a[0][0],a[1][0]),0) * self.commodities[k].q * (a[0][2] - a[0][1])), lb=0, ub=1, type=self.model.binary() if not ALLOW_SPLIT else self.model.continuous(), name='y' + str(k) + ',' + str(a)),
}) for a in all_arcs[k]))
self.timed_network.append(G)
# create consolidation network
self.cons_network = TypedDiGraph[NodeInterval]()
self.cons_network.add_nodes_from(NodeInterval(n, self.S, self.T) for n in self.network.nodes())
# cons = itertools.groupby(sorted(((a.source.T, a.target.T),k) for k,arcs in enumerate(all_arcs) for a in arcs), itemgetter(0))
cons = itertools.groupby(sorted((a,k) for k,arcs in enumerate(all_arcs) for a in arcs), itemgetter(0))
# need to store K in order to use it twice
def create_data(a,coll):
K = set(map(itemgetter(1),coll))
return {'z': self.model.addVar(obj=(self.network.edge_data(a[0][0], a[1][0])['fixed_cost']), lb=0,
ub=self.model.inf(),
name='z' + str(a), type=self.model.integer()),
'K': K}
self.cons_network.add_edges_from(((a[0], a[1], create_data(a,K)) for a,K in cons))
##
## Code for generating a full network model - yes it's horrible, but its much faster to run than reusing code
##
def build_full_network(self):
build_time = time.time()
# group timepoints by node
time_points = [NodeTime(n,t) for n in self.network.nodes() for t in range(int(self.S), int(self.T+1), 1)]
interval_cache = {g: [NodeInterval(g,t1,t2) for t1,t2 in pairwise(sorted(set(map(itemgetter(1), coll)) | set([self.S, self.T])))] for g,coll in itertools.groupby(time_points, itemgetter(0)) }
redirected_arcs = set()
arcs: list[list[TimedArc]] = []
# Create arcs ((n1, t1, t2), (n2, t3, t4)) pairs
for k,c in enumerate(self.commodities):
origin,dest = c.a, c.b
# setup storage arcs
origin_to_arc = self.shortest_paths(k, origin[0])
arcs.append([TimedArc(i1,i2) for n in self.network.nodes() if n in origin_to_arc and self.shortest_path(k, n, dest[0]) is not None
for (i1,i2) in pairwise(interval_cache[n]) if self.is_valid_storage_arc(TimedArc(i1,i2), origin, dest, origin_to_arc.get(n, None), self.shortest_path(k, n, dest[0]))])
for e in self.network.edges():
origin_to_arc = self.shortest_path(k, origin[0], e[0])
arc_to_dest = self.shortest_path(k, e[1], dest[0])
if origin_to_arc is not None and arc_to_dest is not None:
arcs[k].extend(self.create_arcs_between_nodes(k, e[0], e[1], origin, dest, interval_cache[e[0]], interval_cache[e[1]], redirected_arcs))
sys.stdout.write('{0:5.1f}%, {1:4.0f}s\r'.format(100*k/float(len(self.commodities)), time.time() - build_time))
sys.stdout.flush()
# add 'redirected' arcs to all commodities so we don't miss consolidation opportunities
for k,c in enumerate(self.commodities):
origin,dest = c.a, c.b
origin_to_arc = self.shortest_paths(k, origin[0])
missing_arcs = set(a for a in redirected_arcs
if a not in arcs[k] and self.is_arc_valid(a, origin, dest, origin_to_arc.get(a[0][0], None), self.transit(a[0][0], a[1][0]), self.shortest_path(k, a[1][0], dest[0])))
# add missing arcs if valid for k
arcs[k].extend(missing_arcs)
G = TypedDiGraph[NodeInterval]()
G.add_nodes_from(itertools.chain(*interval_cache.values()))
# added holding cost support
G.add_edges_from(((a[0], a[1], {'x': self.model.addVar(obj=(self.problem.var_cost[k].get((a[0][0],a[1][0]),0) * self.commodities[k].q * ((a[0][2] - a[0][1]) if a[0][0] == a[1][0] else 1)), lb=0, ub=1, type=self.model.binary() if not ALLOW_SPLIT else self.model.continuous(), name='x' + str(k) + ',' + str(a)) })
for a in arcs[k]))
self.timed_network.append(G)
# create consolidation network
self.cons_network = TypedDiGraph[NodeInterval]()
self.cons_network.add_nodes_from(itertools.chain(*interval_cache.values()))
cons = itertools.groupby(sorted(((a.source.T, a.target.T), k) for k,k_arcs in enumerate(arcs) for a in k_arcs if a[0][0] != a[1][0]), itemgetter(0))
self.cons_network.add_edges_from(((a[0],a[1],{'z': self.model.addVar(obj=(self.network.edge_data(a[0][0], a[1][0])['fixed_cost']), lb=0, ub=self.model.inf(), name='z' + str(a), type=self.model.integer()),
'K': set(map(itemgetter(1),K))})
for a,K in cons))
return arcs
##
## Creates all arcs for a commodity for all given intervals between two nodes. Also lists any 'redirected' arcs (using shortest paths)
##
def create_arcs_between_nodes(self, k, n1, n2, origin, dest, origin_intervals, destination_intervals, redirected_arcs):
new_arcs = []
it = iter(destination_intervals) # for each destination interval
i2 = next(it)
transit_time = self.transit(n1,n2)
origin_to_arc = self.shortest_path(k, origin[0], n1)
arc_to_dest = self.shortest_path(k, n2, dest[0])
arc_to_arc = transit_time
# for each origin interval
for i1 in origin_intervals:
time = i1[1] + transit_time
# the first / previously used interval is also valid for this interval
if time < i2[2] and (i1 is not None and i2 is not None and (i2[0] != origin[0] or i1[0] == origin[0]) and ((i1[0] != dest[0] or i2[0] == dest[0]) and (origin[1] + origin_to_arc < i1[2]) and (i2[1] + arc_to_dest <= dest[1]) and (((dest[1] - arc_to_dest >= origin[1] + origin_to_arc + arc_to_arc) and (origin[1] + origin_to_arc + arc_to_arc < i2[2]) and (i1[1] + arc_to_arc + arc_to_dest <= dest[1]))))):
new_arcs.append((i1, i2))
else:
# skip to correct interval
while i2 is not None and time >= i2[2]:
i2 = next(it, None)
# keep skipping if invalid, up until latest time
while i2 is not None and i1[2] + transit_time >= i2[2]:
if (i1 is not None and i2 is not None and (i2[0] != origin[0] or i1[0] == origin[0]) and ((i1[0] != dest[0] or i2[0] == dest[0]) and (origin[1] + origin_to_arc < i1[2]) and (i2[1] + arc_to_dest <= dest[1]) and (((dest[1] - arc_to_dest >= origin[1] + origin_to_arc + arc_to_arc) and (origin[1] + origin_to_arc + arc_to_arc < i2[2]) and (i1[1] + arc_to_arc + arc_to_dest <= dest[1]))))):
new_arcs.append((i1,i2))
# redirected
if not (i1[1] + transit_time >= i2[1] and i1[1] + transit_time < i2[2]):
redirected_arcs.add((i1,i2))
break
i2 = next(it, None)
# we are done for this arc
if i2 is None:
break
# possibly the last transit time (from above loop) is valid
if i1[2] + transit_time < i2[2] and (i1 is not None and i2 is not None and (i2[0] != origin[0] or i1[0] == origin[0]) and ((i1[0] != dest[0] or i2[0] == dest[0]) and (origin[1] + origin_to_arc < i1[2]) and (i2[1] + arc_to_dest <= dest[1]) and (((dest[1] - arc_to_dest >= origin[1] + origin_to_arc + arc_to_arc) and (origin[1] + origin_to_arc + arc_to_arc < i2[2]) and (i1[1] + arc_to_arc + arc_to_dest <= dest[1]))))):
new_arcs.append((i1,i2))
# redirected
if not (i1[1] + transit_time >= i2[1] and i1[1] + transit_time < i2[2]):
redirected_arcs.add((i1,i2))
return new_arcs
def r(self, k: int, i: tuple[int, float, float]) -> int:
# at origin
if self.origin_destination[k].source == i:
return -1
# at destination
elif self.origin_destination[k].target == i:
return 1
return 0
def infeasible(self):
if self.fixed_paths:
return len([c for k,c in enumerate(self.commodities) if c.a[1] + sum(self.transit(*a) for a in self.fixed_paths[k]) > c.b[1]]) > 0
else:
return len([c for k,c in enumerate(self.commodities) if c.a[1] + self.shortest_path(k,c.a[0],c.b[0]) > c.b[1]]) > 0
# walk up the tree from node t, and return all root nodes that have maximum early times
def walks(self, G: TypedDiGraph[SolutionGraphNode], t: SolutionGraphCommodity, top=False):
roots = []
stack: list[tuple[list[SolutionGraphNode], list[SolutionGraphNode]]] = [([t],[])] # tail nodes, current path
# 'recursive' walk up tree
while stack:
tails, path = stack.pop()
for v in tails:
edges = [(0.0,v)]
k = v[0] if isinstance(v, SolutionGraphCommodity) else 0
while edges: # choose latest edge with largest commodity and latest time window
v = edges[0][1]
path.insert(0, v)
# choose 'latest' arc, preferring to stay with current commodity if tie
if isinstance(v, SolutionGraphCommodity):
k = v[0]
edges = sorted([((float(G.node_data(e1)['early']), len(self.commodities) - abs(k-e1[0]) if isinstance(e1, SolutionGraphCommodity) else 0), e1) for e1,_ in G.in_edges(v)], reverse=True)
# for consolidations, choose all commodities that have the max early time (add to stack)
if not top and not is_node(v) and len(roots) < MAX_WALK_PATHS:
k_,group = next(itertools.groupby(edges, lambda x: x[0][0]), (None,[]))
group = list(group)
stack.append((list(map(itemgetter(1), [g for g in group if g != edges[0]])), path[:]))
roots.append((v, path))
return roots
##
## Iterative Solve
##
def solve(self, draw_filename='', start_time=time.time(), write_filename=''):
global solve_time
global USE_HEURISTIC_START
logger = logging.getLogger("IntervalSolver")
USE_HEURISTIC_START = False
info = []
info_time = time.time()
start_time= time.time()
# feasibility check: shortest path cannot reach destination - gurobi doesn't pick it up, because no arcs exist
if self.infeasible():
if not self.suppress_output:
print("INFEASIBLE")
self.status = False
return info
variable_gap = True
iterations = -1
new_timepoints = set((NodeTime(*tp) for tp in self.timepoints)) # not required, but might be good for testing
len_timepoints = 0 # used to halt processing if haven't added unique timepoints in a while (hack - this shouldn't happen if my code was good)
it_timepoints = 0
self.timepoints_per_iteration = [(0, n,t) for n,t in self.timepoints]
s = CheckSolution(self, self.environment)
solve_time = 0
# output statistics
logger.info('{0:>3}, {1:>10}, {2:>10}, {3:>7}, {4:>6}, {5:>6}, {6}'.format(*'{0}#,LB,UB,Gap,Time,Solver,Type [TP]'.format('G').split(',')))
while True:
iterations += 1
if write_filename != '':
self.model.update()
self.model.write(write_filename + "_" + str(iterations) + ".lp") # debug the model
t0 = time.time()
if USE_HEURISTIC_START:
self.model.update()
relaxed = self.model.model.relax()
t0 = time.time()
relaxed.optimize()
self.lower_bound = float(max(relaxed.objBound, self.lower_bound) if self.lower_bound is not None else relaxed.objBound)
self.status = True if relaxed.status in [GRB.status.TIME_LIMIT, GRB.status.INTERRUPTED] and self.incumbent and (self.incumbent - self.lower_bound) < self.incumbent * self.GAP else (relaxed.status == GRB.status.OPTIMAL)
else:
self.solve_lower_bound(s)
#self.solve_lower_bound() # Solve lower bound problem
solve_time += time.time() - t0
self.model.set_timelimit(max(0, TIMEOUT - solve_time))
# return if infeasible, time out or interupted
if not self.status or solve_time > TIMEOUT:
if self.model.is_abort() or solve_time > TIMEOUT:
### output statistics
self.solution_paths, self.consolidations = self.get_inprogress()
solution, cycle = self.get_network_solution()
# if valid path length (for LP)
if not list(filter(lambda tw: tw[0] > tw[1], [solution.node_data(SolutionGraphCommodity(k,c.b[0]))['tw'] for k,c in enumerate(self.commodities)])):
t0 = time.time()
if s.validate(self.solution_paths, self.consolidations):
solve_time += time.time() - t0
solution_cost = s.get_solution_cost()
self.incumbent = min(self.incumbent, solution_cost) if self.incumbent is not None else solution_cost
logger.info("{0:>3}, {1:10.1f}, {2:10.1f}, {3:7.2%}, {4:6.2f}, {5:6.2f}, ABORTED".format(iterations, self.lower_bound, self.incumbent, ((self.incumbent - self.lower_bound)/self.incumbent), time.time()-start_time, solve_time))
logger.info('\nit: {0}, new tp: {1}, intervals: {2}, vars: {3}, time: {4:.2f} ({5:.2f})\n'.format(iterations, len(new_timepoints), len(self.intervals), len(self.model.getVars()), time.time()-start_time, solve_time))
elif not self.suppress_output:
logger.error("INFEASIBLE")
self.status = False
info.append((self.lower_bound, self.incumbent, time.time()-info_time, solve_time, 0, self.model.NumVars, self.model.NumConstrs, self.model.PresolveNumVars, self.model.PresolveNumConstrs, iterations, ((self.incumbent - self.lower_bound)/self.incumbent) if self.incumbent is not None and self.incumbent > 0 else None)) # track information about the iteration
return info
using_heuristic = USE_HEURISTIC_START
if using_heuristic:
heuristic_objective, heuristic_solution_paths, heuristic_consolidations = self.solve_heuristic_lower_bound(False)
best_obj = heuristic_objective
best_sol = (heuristic_solution_paths, heuristic_consolidations)
for run in range(0):
heuristic_objective, heuristic_solution_paths, heuristic_consolidations = self.solve_heuristic_lower_bound(True)
if best_obj > heuristic_objective:
best_obj = heuristic_objective
best_sol = (heuristic_solution_paths, heuristic_consolidations)
self.solution_paths, self.consolidations = best_sol
if self.incumbent is not None and best_obj > self.incumbent:
USE_HEURISTIC_START = False
else:
self.solution_paths, self.consolidations = self.get_inprogress()
# return if we are only doing one iteration
if self.fixed_timepoints_model:
logger.info("{0:>3}, {1:10.1f}, ".format(iterations, self.lower_bound))
self.status = True
return iterations, len(new_timepoints), solve_time
solution, cycle = self.get_network_solution()
# draw the timepoints if required
portrait = True
if draw_filename != '':
pdf = DrawLaTeX(self.commodities, self.network)
pdf.draw_solution_network(solution, cycle)
pdf.draw_latex(portrait)
pdf.save(draw_filename + str(iterations))
# Checks gap of a lower bound / upper bound solutions - 1% gap
if self.incumbent is not None and (self.incumbent - self.lower_bound) < self.incumbent*self.GAP:
# output statistics
output = "{0:>3}{2}, {1:10.1f}, ".format(iterations, self.lower_bound, "*" if using_heuristic else "")
output += "{0:10.1f}, {1:7.2%}, {2:6.2f}, {3:6.2f},".format(self.incumbent, ((self.incumbent - self.lower_bound)/self.incumbent), time.time()-start_time, solve_time)
logger.info(output + " Optimal Solution")
logger.info('\nit: {0}, new tp: {1}, intervals: {2}, vars: {3}, time: {4:.2f} ({5:.2f})\n'.format(iterations, len(new_timepoints), len(self.intervals), len(self.model.getVars()), time.time()-start_time, solve_time))
self.timepoints.update(new_timepoints)
break
##
## Add time points if needed
##
# switch to default algorithm if we're stuck (hack for bad code)
if self.ALGORITHM >= algorithm_option.eclectic and it_timepoints < 2:
path_failure, path_length_timepoints, window_timepoints, cycle_timepoints, mutual_timepoints = self.find_timepoints_all(solution, cycle)
elif self.ALGORITHM >= algorithm_option.multiplex and it_timepoints < 2:
path_failure, path_length_timepoints, window_timepoints, cycle_timepoints, mutual_timepoints = self.find_timepoints_multiplex(solution, cycle)
else:
path_failure, path_length_timepoints, window_timepoints, cycle_timepoints, mutual_timepoints = self.find_timepoints_default(solution, cycle)
#path_failure, path_length_timepoints, window_timepoints, cycle_timepoints, mutual_timepoints = self.find_timepoints_advanced(solution, cycle)
#path_failure, path_length_timepoints, window_timepoints, cycle_timepoints, mutual_timepoints = self.find_timepoints_simple(solution, cycle)
tp = path_length_timepoints | window_timepoints | cycle_timepoints | mutual_timepoints
# output statistics
output = "{0:>3}{2}, {1:10.1f}, ".format(iterations, self.lower_bound, "*" if using_heuristic else "")
# if invalid path, skip the LP (since it's infeasible)
if path_failure and self.incumbent is None:
output += "{0:>10}, {0:>7}, {1:6.2f}, {2:6.2f}, P[{3}]".format('-', time.time()-start_time, solve_time, len(path_length_timepoints))
else:
# Solve UB LP, check solution costs
t0 = time.time()
if path_failure or s.validate(self.solution_paths, self.consolidations):
solve_time += time.time() - t0
if not path_failure:
solution_cost = s.get_solution_cost()
if self.incumbent is None or solution_cost < self.incumbent:
self.incumbent = solution_cost
self.incumbent_solution = (self.solution_paths, s.get_consolidations(), self.get_in_tree_paths())
assert self.incumbent is not None, "Incumbent should not be None at this point"
output += "{0:10.1f}, {1:7.2%}, {2:6.2f}, {3:6.2f},".format(self.incumbent, ((self.incumbent - self.lower_bound)/self.incumbent), time.time()-start_time, solve_time)
# Checks gap of a lower bound / upper bound solutions - 1% gap
if (self.incumbent - self.lower_bound) < self.incumbent*self.GAP:
logger.info(output + " Optimal Solution")
logger.info('\nit: {0}, new tp: {1}, intervals: {2}, vars: {3}, time: {4:.2f} ({5:.2f})\n'.format(iterations, len(new_timepoints), len(self.intervals), len(self.model.getVars()), time.time()-start_time, solve_time))
self.timepoints.update(new_timepoints)
self.status = True
break
##
## holding cost - should only hit here if cts time feasible, but lowerbound is not tight (due to holding costs)
# if HOLDING_COSTS and not tp:
# # how to choose timepoints for holding arcs? Want to find the arc with the most cost in the solution that isn't accounted for in the lowerbound
# test = [{ha: (self.network[ha[1]][ha[1]]['var_cost'] if self.problem.commodities[k]['b'][0] == ha[1] else self.network[ha[0]][ha[0]]['var_cost']) * self.problem.commodities[k]['q'] * h.X for ha,h in H.items()} for k,H in enumerate(s.h)]
# pass
# variable gap for faster solve (turn off if no new time points found)
if self.ALGORITHM >= algorithm_option.adaptive:
if not tp or variable_gap and (self.incumbent - self.lower_bound)*0.25 < self.incumbent * self.GAP:
self.model.set_gap(self.GAP*0.98)
self.model.set_aggressive_cuts() # focus on proving optimality
variable_gap = False
elif variable_gap:
self.model.set_gap(((self.incumbent - self.lower_bound)/self.incumbent * 0.25))
# Stop endless loop - hack for my bad code
if len(new_timepoints) == len_timepoints:
it_timepoints += 1
USE_HEURISTIC_START = False
else:
len_timepoints = len(new_timepoints)
it_timepoints = 0
if it_timepoints > 5: # arbitrary number
self.write_raw_timepoints("endless.txt")
self.write_raw_solution('endless.sol')
def all_nodes(G, t):
nodes = set([t])
roots = set()
stack = [t]
# 'recursive' walk up tree
while stack:
v = stack.pop()
prev_nodes = set(e[0] for e in G.in_edges(v))
if not prev_nodes:
roots.add(v)
nodes.update(prev_nodes)
stack.extend(prev_nodes)
# 'recursive' walk down tree
stack = list(roots)
while stack:
v = stack.pop()
next_nodes = set(e[1] for e in G.out_edges(v))
nodes.update(next_nodes)
stack.extend(next_nodes)
return nodes
for i,G_ in enumerate(nx.weakly_connected_components(solution.G)):
G = solution.subgraph(G_)
## test code - only return partial network based on invalid root/tails
#K = set(n[0] for n in G if type(n[1]) is not frozenset)
#tails = [last for last in ((k,self.commodities[k]['b'][0]) for k in K) if 'valid' in solution.nodes[last] and not solution.nodes[last]['valid']]
#nodes = set()
#for t in tails[:1]:
# nodes.update(all_nodes(G, t))
#G = nx.subgraph(G, nodes)
if True: #any(True for n in G if not G.nodes[n]['valid']):
pdf = DrawLaTeX(self.commodities, self.network)
pdf.draw_solution_network(G, cycle)
#pdf.draw_timeline(self.intervals, self.arcs, 24, self.get_solution(), [0,2,3])
pdf.draw_latex(False)
pdf.save("endless_" + str(i))
output += " Endless Loop\n"
logger.error(output)
self.status = False
sys.exit()
break
if window_timepoints:
output += " W[{0}]".format(len(window_timepoints-new_timepoints))
if cycle_timepoints:
output += " C[{0}]".format(len(cycle_timepoints-new_timepoints))
if mutual_timepoints:
output += " M[{0}]".format(len(mutual_timepoints-new_timepoints))
if len(path_length_timepoints-new_timepoints) > 0:
output += " P[{0}]".format(len(path_length_timepoints-new_timepoints))
# track information about the iteration
info.append((self.lower_bound, self.incumbent, time.time()-info_time, solve_time, len(tp - new_timepoints), self.model.NumVars, self.model.NumConstrs, None, None, iterations, ((self.incumbent - self.lower_bound)/self.incumbent) if self.incumbent is not None and self.incumbent > 0 else None))
if not self.suppress_output:
logger.info(output)
self.add_network_timepoints(tp)
new_timepoints.update(tp)
self.timepoints_per_iteration.extend((iterations+1, n,t) for n,t in tp)
info.append((self.lower_bound, self.incumbent, time.time()-info_time, solve_time, 0, self.model.NumVars, self.model.NumConstrs, self.model.PresolveNumVars, self.model.PresolveNumConstrs, iterations,((self.incumbent - self.lower_bound)/self.incumbent) if self.incumbent is not None and self.incumbent > 0 else None)) # track information about the iteration
return info
def write_timepoints(self, filename):
if not self.infeasible():
with open(filename, "w") as file:
file.write("data = [")
for i,n,t in self.timepoints_per_iteration:
file.write("{{i:{0},n:{1},t:{2}}},".format(i,n,int(t)))
file.write("];")
def write_raw_timepoints(self, filename):
with open(filename, "w") as file:
file.write("[")
for i,n,t in self.timepoints_per_iteration:
file.write("({0},{1}),".format(n,int(t)))
file.write("]")
# solve the model
def solve_lower_bound(self, solution_check: CheckSolution):
global check_count
global last_print
check_count = time.time()
last_print = time.time()
incumbent_check = [time.time()]
def callback(model, where):
global check_count
global last_print
global solve_time
if where == GRB.callback.MIP:
objbst = model.cbGet(GRB.callback.MIP_OBJBST)
objbnd = model.cbGet(GRB.callback.MIP_OBJBND)
self.lower_bound = max(objbnd, self.lower_bound) if self.lower_bound is not None else objbnd
if self.incumbent is not None and (self.incumbent - self.lower_bound) < self.incumbent * self.GAP:
model.terminate()
return # terminate
if time.time() > last_print + 1:
last_print = time.time()
if self.incumbent is not None:
sys.stdout.write('{0:7.2%}, {1:7.2%} {2:7.1f}s\r'.format((objbst-objbnd)/objbst, (self.incumbent - self.lower_bound) / self.incumbent, solve_time + time.time() - check_count))
else:
sys.stdout.write('{0:7.2%} {1:7.1f}s\r'.format((objbst-objbnd)/objbst, solve_time + time.time() - check_count))
sys.stdout.flush()
elif where == GRB.callback.MIPSOL:
if time.time() > incumbent_check[0] + INCUMBENT_CHECK_INTERVAL and (self.incumbent is None or model.cbGet(GRB.callback.MIPSOL_OBJBST) < self.incumbent):
incumbent_check[0] = time.time()
# try to get solution
solution_paths, consolidations = self.get_inprogress(True)
cons = [SolutionGraphConsolidation(c, frozenset(k)) for c,K in consolidations.items() for k in K if len(k) > 1]
solution, _ = self.get_network_solution(solution_paths, cons)
if not list(filter(lambda tw: tw[0] > tw[1], [solution.node_data(SolutionGraphCommodity(k,c.b[0]))['tw'] for k,c in enumerate(self.commodities)])):
if solution_check.validate(solution_paths, consolidations):
solution_cost = solution_check.get_solution_cost()
if self.incumbent is None or solution_cost < self.incumbent:
self.incumbent = solution_cost
self.solution_paths = solution_paths
self.consolidations = consolidations
self.model.update()
#self.model.write('test.lp')
if self.suppress_output:
self.model.optimize()
else:
self.model.optimize(callback)
self.lower_bound = max(self.model.objBound(), self.lower_bound) if self.lower_bound is not None else self.model.objBound()
self.status = True if self.model.is_abort() and self.incumbent and (self.incumbent - self.lower_bound) < self.incumbent * self.GAP else self.model.is_optimal()
def solve_heuristic_lower_bound(self, randomize):
# process commodities in random order
K = list(range(0,len(self.commodities)))
if randomize:
random.shuffle(K)
else:
K.sort(key=lambda k: self.commodities[k].q, reverse=True)
consolidation_network = self.cons_network.copy()
# create a path-graph for each commodity - this will simply be a path if freight does not allow splitting
path_graphs = [nx.DiGraph() for k in self.timed_network]
for k in K:
commodity_network = self.timed_network[k].copy()
for a1,a2,d in commodity_network.edges_data():
# holding arcs
if a1[0] == a2[0]:
d['weight'] = 0
continue
# dispatch arcs
if 'SOLUTION_K' not in consolidation_network.edge_data(a1, a2):
consolidation_network.edge_data(a1, a2)['SOLUTION_K'] = set()
remaining_quantity = sum([self.commodities[sk].q for sk in consolidation_network.edge_data(a1, a2)['SOLUTION_K']]) % self.network.edge_data(a1[0], a2[0])['capacity']
consolidation_quantity = (self.commodities[k].q + remaining_quantity) / self.network.edge_data(a1[0], a2[0])['capacity']
consolidation_cost = self.network.edge_data(a1[0], a2[0])['fixed_cost'] * (ceil(consolidation_quantity) - (1 if remaining_quantity > 0 else 0))
d['weight'] = self.problem.var_cost[k].get((a1[0],a2[0]), 0.0) * self.commodities[k].q + consolidation_cost + 0.001 # small constant to prevent cycles
## update consolidation network cost
#for a1,a2,d in consolidation_network.edges(data=True):
# if 'SOLUTION_K' not in d:
# d['SOLUTION_K'] = set()
# remaining_quantity = sum([self.commodities[sk]['q'] for sk in d['SOLUTION_K']]) % self.network[a1[0]][a2[0]]['capacity']
# consolidation_quantity = (self.commodities[k]['q'] + remaining_quantity) / self.network[a1[0]][a2[0]]['capacity']
# consolidation_cost = self.network[a1[0]][a2[0]]['fixed_cost'] * (ceil(consolidation_quantity) - (1 if remaining_quantity > 0 else 0))
# d['weight'] = self.network[a1[0]][a2[0]]['var_cost'] * self.commodities[k]['q'] + consolidation_cost
# find cheapest path
path = nx.shortest_path(commodity_network.G, source=self.origin_destination[k].source, target=self.origin_destination[k].target, weight='weight')
# update consolidation network solution
for arc in pairwise(path):
if arc[0][0] != arc[1][0]:
consolidation_network.edge_data(arc[0], arc[1])['SOLUTION_K'].add(k)
path_graphs[k].add_edge(arc[0][0], arc[1][0])