/metric_calculator.py (888c63bb17693deb49d9ae367ed7a3b9cdbeebe0) (6361 bytes) (mode 100644) (type blob)

import networkx as nx
import graph_tool.all as gt
import redis as rd
import numpy as np
import indexing
import statistics
import normalizations
import config
import datetime as dt


class MetricCalculator(object):
  def __init__ (self, graph,graph_gt):
    #class constructor
    #define required class variables such as the graph to work on, the redis connection and the nodes of the graph

    print ('Starting metric_calculator!')
    self.graph                = graph
    self.graph_gt             = graph_gt
    self.redis                = rd.StrictRedis(host=config.REDIS_HOST, port=config.REDIS_PORT, db=1)
#    self.graph_gt_labels      = graph_gt_labels
    self.redis                = rd.StrictRedis(host='localhost', port=6379, db=0)
    self.nodes                = nx.nodes(graph)


    # configuration variables are read from the config file and are also saved to class variables for easy access
    self.node_index_key       = config.node_index_key
    self.metric_index_key     = config.metric_index_key
    self.score_index_key      = config.score_index_key
    
    self.node_neighbors_prefix = config.node_neighbors_prefix
    self.node_prefix           = config.node_prefix
    self.metric_prefix         = config.metric_prefix
    self.score_prefix          = config.score_prefix
    self.statistics_prefix     = config.statistics_prefix

    self.normalization_suffix  = config.normalization_suffix

    self.base_metrics          = config.base_metrics
    self.advanced_metrics      = config.advanced_metrics

    self.normalization_methods = config.normalization_methods

    self.scores                = config.scores
    self.advanced_scores       = config.advanced_scores


    
  def start(self):
    #clean all data in Redis
    self.redis.flushdb()
    
    #index creation
    self.create_indexes()
    

    #main calculations
    self.calculate_metrics()
    self.calculate_advanced_metrics()
    self.normalize_metrics()
    self.calculate_scores()
    self.calculate_advanced_scores()

    #statistics
    self.calculate_statistics()

##################
#### INDEXING ####
##################
  def create_indexes(self):
    #call methods defined in indexing.py
    indexing.index_nodes(self)
    indexing.index_neighbors(self)
    indexing.index_metrics(self)
    indexing.index_scores(self)

###########################
#### CALCULATION LOOPS ####
###########################
  
  def calculate_metrics(self):
    # loop through all defined metrics and call specified calculation method for each node
    print ('Starting calculate_metrics')
    for metric_name in self.base_metrics:
      metric_method = self.base_metrics[metric_name]
  
    # loop through all nodes
      for node in self.nodes:
        # call calculation method of supplied metric for current node
        node = int(node)
        value = float(metric_method(self,node))
     
        #store result in node values
        self.redis.hset(self.node_prefix+str(node), metric_name, value)

        #also store result to metric set
        self.redis.zadd(self.metric_prefix+metric_name, value, str(node))

  
  def calculate_advanced_metrics(self):
    # loop through all defined_advanced_metrics and call specified calculation method
    print ('Starting calculate_advanced_metrics')
    for advanced_metric_name in self.advanced_metrics:
      metric_method = self.advanced_metrics[advanced_metric_name]

      # loop through all nodes
      for node in self.nodes:
        node = int(node)
        value = float(metric_method(self,node))

        #store result in node values
        self.redis.hset(self.node_prefix+str(node), advanced_metric_name, value)

        #also store result to metric set
        self.redis.zadd(self.metric_prefix+advanced_metric_name, value, str(node))


  # loop through all defined normalizations and call respective normalization method
  # no default normalizations for metrics not listed in the "normalization_methods" hash
  def normalize_metrics(self):
    #fallback normalization: min-max
    print ('Starting normalize_metrics')
    all_metrics = dict(self.base_metrics.items() + self.advanced_metrics.items())

    for metric_name in all_metrics:
      if self.normalization_methods.has_key(metric_name):
        normalization_method = self.normalization_methods[metric_name]
      else:
        #fallback normalization is min-max
        normalization_method = normalizations.min_max
      normalization_method(self,metric_name)
    

  def calculate_scores(self):
    print ('Starting calculate_scores')  
    for score_name in self.scores:
      metrics_with_weights = self.scores[score_name]

      for node in self.nodes:
        score_value = 0.0

        # get normalized values
        for metric in metrics_with_weights:
          weight = self.scores[score_name][metric]
          value = float(self.redis.hget(self.node_prefix+str(node),metric+self.normalization_suffix))
          score_value += weight * value
          
        self.redis.hset(self.node_prefix+str(node),score_name, score_value)
        self.redis.zadd(self.score_prefix+score_name, score_value, str(node))

  def calculate_advanced_scores(self):
    print ('Starting calculate_advanced_scores')       
    for advanced_score in self.advanced_scores:
      self.advanced_scores[advanced_score](self)   


  #############
  # statistics
  #############
  
  def calculate_statistics(self):
    print ('Starting calculate_statistics')
    for metric in self.base_metrics:
      #absolute and normalized
      statistics.calculate_statistics(self, metric, self.metric_prefix+metric)
      statistics.calculate_statistics(self, metric+self.normalization_suffix, self.metric_prefix+metric+self.normalization_suffix)

    for advanced_metric in self.advanced_metrics:
      #absolute and normalized
      statistics.calculate_statistics(self, advanced_metric, self.metric_prefix+advanced_metric)
      statistics.calculate_statistics(self, advanced_metric+self.normalization_suffix, self.metric_prefix+advanced_metric+self.normalization_suffix)

    for score in self.scores:
      statistics.calculate_statistics(self, score, self.score_prefix+score)

    for advanced_score in self.advanced_scores:
      statistics.calculate_statistics(self, advanced_score, self.score_prefix+advanced_score)

    statistics.calculate_correlations(self)



Mode Type Size Ref File
100644 blob 6 0d20b6487c61e7d1bde93acf4a14b7a89083a16d .gitignore
100644 blob 103 924a1df9f7338af770d3cf3d4b0ce2673f10d1b0 README.md
100644 blob 0 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 __init__.py
100644 blob 1256 489636a0073e3dfe2bfd04ee893d609d304a8490 advancedscores.py
100644 blob 4841 c18bb46d148ccb4e04370864e7352cae635f865d config.py
040000 tree - 1eae5e19b1eff05e464e361e3f50f3df23f1b754 data
100644 blob 4728 68b7ae967b6cb349e54a32d6d00528321f1825b3 file_importer.py
100644 blob 2132 b09bec79c503afa1126d86335d1372443e207773 graph tool test.py
100644 blob 716 359eb7179fa58d67044228556f7d9c38b5caec85 indexing.py
100644 blob 33232205 7ca601d1ca32abf3244359ce3ad85ea6a1b60010 log
100644 blob 6361 888c63bb17693deb49d9ae367ed7a3b9cdbeebe0 metric_calculator.py
100644 blob 8308 6673fc89a71f9a87f57997fbd96c7ed3c36fb7ff metrics.py
100644 blob 1665 a959a8cc528f486a80a84e2ab233457870d255a1 normalizations.py
100644 blob 1575 7a6cc1ce0ca8ab13c12325ce4ac45044544ed9a1 pearson.py
100644 blob 1696 26df05e3ec9f549013f400a6f5f5df7fdb617c2e start.py
100644 blob 2144 fb03eaa1cd8eb0d6c17b2019fe4c877a32bb7059 statistics.py
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/coria/coria-backend

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/coria/coria-backend

Clone this repository using git:
git clone git://git.rocketgit.com/user/coria/coria-backend

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main