@@ -304,7 +304,7 @@ def test_page_rank():
304
304
"7" : 0.14074777909144864 ,
305
305
"8" : 0.11786468661230831 ,
306
306
}
307
- assert actual . get_all_with_names () == expected
307
+ assert actual == expected
308
308
309
309
310
310
def test_temporal_reachability ():
@@ -326,7 +326,7 @@ def test_temporal_reachability():
326
326
"8" : [],
327
327
}
328
328
329
- assert actual . get_all_with_names () == expected
329
+ assert actual == expected
330
330
331
331
332
332
def test_degree_centrality ():
@@ -339,7 +339,7 @@ def test_degree_centrality():
339
339
g .add_edge (0 , 1 , 4 , {})
340
340
g .add_edge (0 , 2 , 3 , {})
341
341
g .add_edge (0 , 2 , 4 , {})
342
- assert degree_centrality (g ). get_all_with_names () == {
342
+ assert degree_centrality (g ) == {
343
343
"1" : 1.0 ,
344
344
"2" : 1.0 ,
345
345
"3" : 2 / 3 ,
@@ -374,17 +374,15 @@ def test_single_source_shortest_path():
374
374
g .add_edge (0 , 2 , 4 , {})
375
375
res_one = single_source_shortest_path (g , 1 , 1 )
376
376
res_two = single_source_shortest_path (g , 1 , 2 )
377
- assert res_one . get_all_with_names () == {
377
+ assert res_one == {
378
378
"1" : ["1" ],
379
379
"2" : ["1" , "2" ],
380
380
"4" : ["1" , "4" ],
381
381
}
382
382
assert (
383
- res_two .get_all_with_names ()
384
- == {"1" : ["1" ], "2" : ["1" , "2" ], "3" : ["1" , "2" , "3" ], "4" : ["1" , "4" ]}
383
+ res_two == {"1" : ["1" ], "2" : ["1" , "2" ], "3" : ["1" , "2" , "3" ], "4" : ["1" , "4" ]}
385
384
) or (
386
- res_two .get_all_with_names ()
387
- == {"1" : ["1" ], "3" : ["1" , "4" , "3" ], "2" : ["1" , "2" ], "4" : ["1" , "4" ]}
385
+ res_two == {"1" : ["1" ], "3" : ["1" , "4" , "3" ], "2" : ["1" , "2" ], "4" : ["1" , "4" ]}
388
386
)
389
387
390
388
@@ -404,19 +402,19 @@ def test_dijsktra_shortest_paths():
404
402
res_one = dijkstra_single_source_shortest_paths (g , "A" , ["F" ])
405
403
res_two = dijkstra_single_source_shortest_paths (g , "B" , ["D" , "E" , "F" ])
406
404
assert res_one .get ("F" )[0 ] == 8.0
407
- assert res_one .get ("F" )[1 ] == ["A" , "C" , "E" , "F" ]
405
+ assert res_one .get ("F" )[1 ]. name == ["A" , "C" , "E" , "F" ]
408
406
assert res_two .get ("D" )[0 ] == 5.0
409
407
assert res_two .get ("F" )[0 ] == 6.0
410
- assert res_two .get ("D" )[1 ] == ["B" , "C" , "D" ]
411
- assert res_two .get ("F" )[1 ] == ["B" , "C" , "E" , "F" ]
408
+ assert res_two .get ("D" )[1 ]. name == ["B" , "C" , "D" ]
409
+ assert res_two .get ("F" )[1 ]. name == ["B" , "C" , "E" , "F" ]
412
410
413
- with pytest .raises (ValueError ) as excinfo :
411
+ with pytest .raises (Exception ) as excinfo :
414
412
dijkstra_single_source_shortest_paths (g , "HH" , ["F" ])
415
- assert "Source node not found " in str (excinfo .value )
413
+ assert "Node HH does not exist " in str (excinfo .value )
416
414
417
- with pytest .raises (ValueError ) as excinfo :
415
+ with pytest .raises (Exception ) as excinfo :
418
416
dijkstra_single_source_shortest_paths (g , "A" , ["F" ], weight = "NO" )
419
- assert "Weight property not found on edges " in str (excinfo .value )
417
+ assert "Property NO does not exist " in str (excinfo .value )
420
418
421
419
422
420
def test_betweenness_centrality ():
@@ -442,7 +440,7 @@ def test_betweenness_centrality():
442
440
g .add_edge (0 , e [0 ], e [1 ], {})
443
441
444
442
res = betweenness_centrality (g , normalized = False )
445
- assert res . get_all_with_names () == {
443
+ assert res == {
446
444
"0" : 0.0 ,
447
445
"1" : 1.0 ,
448
446
"2" : 4.0 ,
@@ -452,7 +450,7 @@ def test_betweenness_centrality():
452
450
}
453
451
454
452
res = betweenness_centrality (g , normalized = True )
455
- assert res . get_all_with_names () == {
453
+ assert res == {
456
454
"0" : 0.0 ,
457
455
"1" : 0.05 ,
458
456
"2" : 0.2 ,
@@ -484,13 +482,13 @@ def test_balance_algorithm():
484
482
]
485
483
for src , dst , val , time in edges_str :
486
484
g .add_edge (time , src , dst , {"value_dec" : val })
487
- result = algorithms .balance (g , "value_dec" , "both" , None ). get_all_with_names ( )
485
+ result = algorithms .balance (g , "value_dec" , "both" )
488
486
assert result == {"1" : - 26.0 , "2" : 7.0 , "3" : 12.0 , "4" : 5.0 , "5" : 2.0 }
489
487
490
- result = algorithms .balance (g , "value_dec" , "in" , None ). get_all_with_names ( )
488
+ result = algorithms .balance (g , "value_dec" , "in" )
491
489
assert result == {"1" : 6.0 , "2" : 12.0 , "3" : 15.0 , "4" : 20.0 , "5" : 2.0 }
492
490
493
- result = algorithms .balance (g , "value_dec" , "out" , None ). get_all_with_names ( )
491
+ result = algorithms .balance (g , "value_dec" , "out" )
494
492
assert result == {"1" : - 32.0 , "2" : - 5.0 , "3" : - 3.0 , "4" : - 15.0 , "5" : 0.0 }
495
493
496
494
@@ -530,13 +528,11 @@ def test_temporal_SEIR():
530
528
g .add_edge (4 , 4 , 5 )
531
529
# Should be seeded with 2 vertices
532
530
res = algorithms .temporal_SEIR (g , 2 , 1.0 , 0 , rng_seed = 1 )
533
- seeded = [v for v in res .get_all_values () if v .infected == 0 ]
531
+ seeded = [v for v in res .values () if v .infected == 0 ]
534
532
assert len (seeded ) == 2
535
533
536
- res = algorithms .temporal_SEIR (g , [1 ], 1.0 , 0 , rng_seed = 1 ).sort_by_value (
537
- reverse = False
538
- )
539
- for i , (n , v ) in enumerate (res ):
534
+ res = algorithms .temporal_SEIR (g , [1 ], 1.0 , 0 , rng_seed = 1 ).sorted (reverse = False )
535
+ for i , (n , v ) in enumerate (res .items ()):
540
536
assert n == g .node (i + 1 )
541
537
assert v .infected == i
542
538
@@ -584,7 +580,7 @@ def test_fast_rp():
584
580
for src , dst , ts in edges :
585
581
g .add_edge (ts , src , dst )
586
582
587
- result = algorithms .fast_rp (g , 16 , 1.0 , [1.0 , 1.0 ], 42 ). get_all_with_names ()
583
+ result = algorithms .fast_rp (g , 16 , 1.0 , [1.0 , 1.0 ], 42 )
588
584
baseline = {
589
585
"7" : [
590
586
0.0 ,
0 commit comments