/config.py (b160ebc40fe35a499a9335db937985b806035b46) (4854 bytes) (mode 100644) (type blob)

#config.py
import metrics
import normalizations
import advancedscores

#redis keys for indexes and values
node_index_key        = 'all_nodes'
metric_index_key      = 'all_metrics'
score_index_key       = 'all_scores'

node_neighbors_prefix = 'node_neighbors:'
node_prefix           = 'node_metrics:'
metric_prefix         = 'metric:'
score_prefix          = 'score:'
statistics_prefix     = 'statistics:'

normalization_suffix  = '_normalized'

# definition of all base metrics for which absolute values will be calculcated for each node in the first step
# key is the name of the metric and value is the implemented method which exposes the required interface
# interface: each method takes the node as the single parameter, performs the necessary calculation and
# returns a float containing the value for the specified node

base_metrics  = { 'clustering_coefficient'          : metrics.clustering_coefficient,
                  'degree'                          : metrics.degree,
                  'average_neighbor_degree'         : metrics.average_neighbor_degree,
                  'iterated_average_neighbor_degree': metrics.iterated_average_neighbor_degree,
                #  'betweenness_centrality'          : metrics.betweenness_centrality,
                  'betweenness_centrality_gt'       : metrics.betweenness_centrality_gt,
                #  'eccentricity'                    : metrics.eccentricity,
                  'average_shortest_path_length'    : metrics.average_shortest_path_length
          }


# some metrics might require some corrections or post processing which relies on the value of other metrics or normalizations
# key is the metric name and value the method for correction

advanced_metrics = {'corrected_clustering_coefficient'          : metrics.correct_clustering_coefficient,
                    'corrected_average_neighbor_degree'         : metrics.correct_average_neighbor_degree,
                    'corrected_iterated_average_neighbor_degree': metrics.correct_iterated_average_neighbor_degree}


# for every metric, a normalization method has to be specified
# key is the name of the metric and value is the normalization method which also has to expose the required interface
# interface: normalization methods, take the name of the (absolute) metric as the single argument, no return value is required
# the method itself shall access the data which is required for normalization from the redis instance
# and the corresponding keys/values for the specified metric
# it shall then loop over all nodes and calculate the normalized value for the node and the metric
# afterwards it should save the result to redis using "metric_name_normalized" as the key
# the result is stored inside the node's hash for metrics

# also needs to include corrected metrics with their respective names
# 
normalization_methods = { 'clustering_coefficient'                    : normalizations.min_max,
                          'corrected_clustering_coefficient'          : normalizations.min_max,
                          'degree'                                    : normalizations.min_max,
                          'average_neighbor_degree'                   : normalizations.min_max,
                          'corrected_average_neighbor_degree'         : normalizations.min_max,
                          'iterated_average_neighbor_degree'          : normalizations.min_max,
                          'corrected_iterated_average_neighbor_degree': normalizations.min_max,
                        #  'betweenness_centrality'                 : normalizations.min_max,
                          'betweenness_centrality_gt'                 : normalizations.min_max,
                        #  'eccentricity'                              : normalizations.max_min,
                          'average_shortest_path_length'              : normalizations.max_min
                        }


# the easiest case for a score is a combination of normalized metric values with a weight which adds up to 1
# such scores can easily be defined here
# note: names are not methods but redis keys

scores = {'unified_risk_score': { 'degree': 0.25,
                                  'corrected_average_neighbor_degree': 0.15,
                                  'corrected_iterated_average_neighbor_degree': 0.1,
                                  'betweenness_centrality_gt': 0.25,
                                #  'eccentricity': 0.125,
                                  'average_shortest_path_length': 0.25}
                      }


# other scores might require a more sophisticated algorithm to be calculated
# such scores need to be added here and implemented like the example below

advanced_scores = {'advanced_unified_risk_score': advancedscores.adv_unified_risk_score}

# Redis
REDIS_PORT = 6379
REDIS_HOST = 'redis'
REDIS_DB = 1


Mode Type Size Ref File
100644 blob 6 0d20b6487c61e7d1bde93acf4a14b7a89083a16d .gitignore
100644 blob 103 924a1df9f7338af770d3cf3d4b0ce2673f10d1b0 README.md
100644 blob 0 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 __init__.py
100644 blob 1256 489636a0073e3dfe2bfd04ee893d609d304a8490 advancedscores.py
100644 blob 4854 b160ebc40fe35a499a9335db937985b806035b46 config.py
040000 tree - 1eae5e19b1eff05e464e361e3f50f3df23f1b754 data
100644 blob 4728 68b7ae967b6cb349e54a32d6d00528321f1825b3 file_importer.py
100644 blob 2132 b09bec79c503afa1126d86335d1372443e207773 graph tool test.py
100644 blob 716 359eb7179fa58d67044228556f7d9c38b5caec85 indexing.py
100644 blob 33232205 7ca601d1ca32abf3244359ce3ad85ea6a1b60010 log
100644 blob 6238 ae4072595be1e113bc28964ec19c6758b1b3ce20 metric_calculator.py
100644 blob 8308 6673fc89a71f9a87f57997fbd96c7ed3c36fb7ff metrics.py
100644 blob 1665 a959a8cc528f486a80a84e2ab233457870d255a1 normalizations.py
100644 blob 1565 32abe33200f0e8dd3bf4973e5956c7ab8545ca4b pearson.py
100644 blob 1696 26df05e3ec9f549013f400a6f5f5df7fdb617c2e start.py
100644 blob 2144 fb03eaa1cd8eb0d6c17b2019fe4c877a32bb7059 statistics.py
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/coria/coria-backend

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/coria/coria-backend

Clone this repository using git:
git clone git://git.rocketgit.com/user/coria/coria-backend

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main