File config.py changed (mode: 100644) (index b770edd..1cf7629) |
1 |
|
#config.py
|
|
2 |
|
import metrics
|
|
3 |
|
import normalizations
|
|
4 |
|
import advancedscores
|
|
5 |
|
|
|
6 |
|
#redis keys for indexes and values
|
|
7 |
|
node_index_key = 'all_nodes'
|
|
8 |
|
metric_index_key = 'all_metrics'
|
|
9 |
|
score_index_key = 'all_scores'
|
|
10 |
|
|
|
11 |
|
node_neighbors_prefix = 'node_neighbors:'
|
|
12 |
|
node_prefix = 'node_metrics:'
|
|
13 |
|
metric_prefix = 'metric:'
|
|
14 |
|
score_prefix = 'score:'
|
|
15 |
|
statistics_prefix = 'statistics:'
|
|
16 |
|
|
|
17 |
|
normalization_suffix = '_normalized'
|
|
18 |
|
|
|
19 |
|
# definition of all base metrics for which absolute values will be calculcated for each node in the first step
|
|
20 |
|
# key is the name of the metric and value is the implemented method which exposes the required interface
|
|
21 |
|
# interface: each method takes the node as the single parameter, performs the necessary calculation and
|
|
22 |
|
# returns a float containing the value for the specified node
|
|
23 |
|
|
|
24 |
|
base_metrics = { 'clustering_coefficient' : metrics.clustering_coefficient,
|
|
25 |
|
'degree' : metrics.degree,
|
|
26 |
|
'average_neighbor_degree' : metrics.average_neighbor_degree,
|
|
27 |
|
'iterated_average_neighbor_degree': metrics.iterated_average_neighbor_degree,
|
|
28 |
|
'betweenness_centrality' : metrics.betweenness_centrality,
|
|
29 |
|
'eccentricity' : metrics.eccentricity,
|
|
30 |
|
'average_shortest_path_length' : metrics.average_shortest_path_length
|
|
31 |
|
}
|
|
32 |
|
|
|
33 |
|
|
|
34 |
|
# some metrics might require some corrections or post processing which relies on the value of other metrics or normalizations
|
|
35 |
|
# key is the metric name and value the method for correction
|
|
36 |
|
|
|
37 |
|
advanced_metrics = {'corrected_clustering_coefficient' : metrics.correct_clustering_coefficient,
|
|
38 |
|
'corrected_average_neighbor_degree' : metrics.correct_average_neighbor_degree,
|
|
39 |
|
'corrected_iterated_average_neighbor_degree': metrics.correct_iterated_average_neighbor_degree}
|
|
40 |
|
|
|
41 |
|
|
|
42 |
|
# for every metric, a normalization method has to be specified
|
|
43 |
|
# key is the name of the metric and value is the normalization method which also has to expose the required interface
|
|
44 |
|
# interface: normalization methods, take the name of the (absolute) metric as the single argument, no return value is required
|
|
45 |
|
# the method itself shall access the data which is required for normalization from the redis instance
|
|
46 |
|
# and the corresponding keys/values for the specified metric
|
|
47 |
|
# it shall then loop over all nodes and calculate the normalized value for the node and the metric
|
|
48 |
|
# afterwards it should save the result to redis using "metric_name_normalized" as the key
|
|
49 |
|
# the result is stored inside the node's hash for metrics
|
|
50 |
|
|
|
51 |
|
# also needs to include corrected metrics with their respective names
|
|
52 |
|
#
|
|
53 |
|
normalization_methods = { 'clustering_coefficient' : normalizations.min_max,
|
|
54 |
|
'corrected_clustering_coefficient' : normalizations.min_max,
|
|
55 |
|
'degree' : normalizations.min_max,
|
|
56 |
|
'average_neighbor_degree' : normalizations.min_max,
|
|
57 |
|
'corrected_average_neighbor_degree' : normalizations.min_max,
|
|
58 |
|
'iterated_average_neighbor_degree' : normalizations.min_max,
|
|
59 |
|
'corrected_iterated_average_neighbor_degree': normalizations.min_max,
|
|
60 |
|
'betweenness_centrality' : normalizations.min_max,
|
|
61 |
|
'eccentricity' : normalizations.max_min,
|
|
62 |
|
'average_shortest_path_length' : normalizations.max_min
|
|
63 |
|
}
|
|
64 |
|
|
|
65 |
|
|
|
66 |
|
# the easiest case for a score is a combination of normalized metric values with a weight which adds up to 1
|
|
67 |
|
# such scores can easily be defined here
|
|
68 |
|
# note: names are not methods but redis keys
|
|
69 |
|
|
|
70 |
|
scores = {'unified_risk_score': { 'degree': 0.25,
|
|
71 |
|
'corrected_average_neighbor_degree': 0.15,
|
|
72 |
|
'corrected_iterated_average_neighbor_degree': 0.1,
|
|
73 |
|
'betweenness_centrality': 0.25,
|
|
74 |
|
'eccentricity': 0.125,
|
|
75 |
|
'average_shortest_path_length': 0.125}
|
|
76 |
|
}
|
|
77 |
|
|
|
78 |
|
|
|
79 |
|
# other scores might require a more sophisticated algorithm to be calculated
|
|
80 |
|
# such scores need to be added here and implemented like the example below
|
|
81 |
|
|
|
82 |
|
advanced_scores = {'advanced_unified_risk_score': advancedscores.adv_unified_risk_score}
|
|
83 |
|
|
|
84 |
|
# Redis
|
|
85 |
|
REDIS_PORT = 6379
|
|
86 |
|
REDIS_HOST = 'redis'
|
|
|
1 |
|
#config.py |
|
2 |
|
import metrics |
|
3 |
|
import normalizations |
|
4 |
|
import advancedscores |
|
5 |
|
|
|
6 |
|
#redis keys for indexes and values |
|
7 |
|
node_index_key = 'all_nodes' |
|
8 |
|
metric_index_key = 'all_metrics' |
|
9 |
|
score_index_key = 'all_scores' |
|
10 |
|
|
|
11 |
|
node_neighbors_prefix = 'node_neighbors:' |
|
12 |
|
node_prefix = 'node_metrics:' |
|
13 |
|
metric_prefix = 'metric:' |
|
14 |
|
score_prefix = 'score:' |
|
15 |
|
statistics_prefix = 'statistics:' |
|
16 |
|
|
|
17 |
|
normalization_suffix = '_normalized' |
|
18 |
|
|
|
19 |
|
# definition of all base metrics for which absolute values will be calculcated for each node in the first step |
|
20 |
|
# key is the name of the metric and value is the implemented method which exposes the required interface |
|
21 |
|
# interface: each method takes the node as the single parameter, performs the necessary calculation and |
|
22 |
|
# returns a float containing the value for the specified node |
|
23 |
|
|
|
24 |
|
base_metrics = { 'clustering_coefficient' : metrics.clustering_coefficient, |
|
25 |
|
'degree' : metrics.degree, |
|
26 |
|
'average_neighbor_degree' : metrics.average_neighbor_degree, |
|
27 |
|
'iterated_average_neighbor_degree': metrics.iterated_average_neighbor_degree, |
|
28 |
|
'betweenness_centrality' : metrics.betweenness_centrality, |
|
29 |
|
'eccentricity' : metrics.eccentricity, |
|
30 |
|
'average_shortest_path_length' : metrics.average_shortest_path_length |
|
31 |
|
} |
|
32 |
|
|
|
33 |
|
|
|
34 |
|
# some metrics might require some corrections or post processing which relies on the value of other metrics or normalizations |
|
35 |
|
# key is the metric name and value the method for correction |
|
36 |
|
|
|
37 |
|
advanced_metrics = {'corrected_clustering_coefficient' : metrics.correct_clustering_coefficient, |
|
38 |
|
'corrected_average_neighbor_degree' : metrics.correct_average_neighbor_degree, |
|
39 |
|
'corrected_iterated_average_neighbor_degree': metrics.correct_iterated_average_neighbor_degree} |
|
40 |
|
|
|
41 |
|
|
|
42 |
|
# for every metric, a normalization method has to be specified |
|
43 |
|
# key is the name of the metric and value is the normalization method which also has to expose the required interface |
|
44 |
|
# interface: normalization methods, take the name of the (absolute) metric as the single argument, no return value is required |
|
45 |
|
# the method itself shall access the data which is required for normalization from the redis instance |
|
46 |
|
# and the corresponding keys/values for the specified metric |
|
47 |
|
# it shall then loop over all nodes and calculate the normalized value for the node and the metric |
|
48 |
|
# afterwards it should save the result to redis using "metric_name_normalized" as the key |
|
49 |
|
# the result is stored inside the node's hash for metrics |
|
50 |
|
|
|
51 |
|
# also needs to include corrected metrics with their respective names |
|
52 |
|
# |
|
53 |
|
normalization_methods = { 'clustering_coefficient' : normalizations.min_max, |
|
54 |
|
'corrected_clustering_coefficient' : normalizations.min_max, |
|
55 |
|
'degree' : normalizations.min_max, |
|
56 |
|
'average_neighbor_degree' : normalizations.min_max, |
|
57 |
|
'corrected_average_neighbor_degree' : normalizations.min_max, |
|
58 |
|
'iterated_average_neighbor_degree' : normalizations.min_max, |
|
59 |
|
'corrected_iterated_average_neighbor_degree': normalizations.min_max, |
|
60 |
|
'betweenness_centrality' : normalizations.min_max, |
|
61 |
|
'eccentricity' : normalizations.max_min, |
|
62 |
|
'average_shortest_path_length' : normalizations.max_min |
|
63 |
|
} |
|
64 |
|
|
|
65 |
|
|
|
66 |
|
# the easiest case for a score is a combination of normalized metric values with a weight which adds up to 1 |
|
67 |
|
# such scores can easily be defined here |
|
68 |
|
# note: names are not methods but redis keys |
|
69 |
|
|
|
70 |
|
scores = {'unified_risk_score': { 'degree': 0.25, |
|
71 |
|
'corrected_average_neighbor_degree': 0.15, |
|
72 |
|
'corrected_iterated_average_neighbor_degree': 0.1, |
|
73 |
|
'betweenness_centrality': 0.25, |
|
74 |
|
'eccentricity': 0.125, |
|
75 |
|
'average_shortest_path_length': 0.125} |
|
76 |
|
} |
|
77 |
|
|
|
78 |
|
|
|
79 |
|
# other scores might require a more sophisticated algorithm to be calculated |
|
80 |
|
# such scores need to be added here and implemented like the example below |
|
81 |
|
|
|
82 |
|
advanced_scores = {'advanced_unified_risk_score': advancedscores.adv_unified_risk_score} |
|
83 |
|
|
|
84 |
|
# Redis |
|
85 |
|
REDIS_PORT = 6379 |
|
86 |
|
REDIS_HOST = 'redis' |