1# -*- coding: utf-8 -*-
2# This file is part of Xpra.
3# Copyright (C) 2012-2021 Antoine Martin <antoine@xpra.org>
4# Xpra is released under the terms of the GNU GPL v2, or, at your option, any
5# later version. See the file COPYING for details.
6
7#cython: boundscheck=False, wraparound=False
8
9from time import monotonic
10
11cdef extern from "math.h":
12    double log(double x)
13
14from math import sqrt
15def logp(double x):
16    return log(1.0+x)*1.4426950408889634
17
18cdef inline double clogp(double x):
19    return log(1.0+x)*1.4426950408889634
20
21SMOOTHING_NAMES = {sqrt: "sqrt", logp: "logp"}
22def smn(fn):
23    return str(SMOOTHING_NAMES.get(fn, fn))
24
25
26def calculate_time_weighted_average(data):
27    """
28        Given a list of items of the form [(event_time, value)],
29        this method calculates a time-weighted average where
30        recent values matter a lot more than more ancient ones.
31    """
32    assert len(data)>0
33    cdef double now = monotonic()
34    cdef double tv = 0.0
35    cdef double tw = 0.0
36    cdef double rv = 0.0
37    cdef double rw = 0.0
38    cdef double event_time
39    cdef double value
40    cdef double delta
41    cdef double w
42    for event_time, value in data:
43        #newer matter more:
44        delta = now-event_time
45        w = 1.0/(1.0+delta)
46        tv += value*w
47        tw += w
48        w = 1.0/(0.1+delta**2)
49        rv += value*w
50        rw += w
51    return tv / tw, rv / rw
52
53def time_weighted_average(data, double min_offset=0.1, double rpow=2.0):
54    """
55        Given a list of items of the form [(event_time, value)],
56        this method calculates a time-weighted average where
57        recent values matter a lot more than more ancient ones.
58        We take the "rpow" power of the time offset.
59        (defaults to 2, which means we square it)
60    """
61    assert len(data)>0
62    cdef double now = monotonic()
63    cdef double tv = 0.0
64    cdef double tw = 0.0
65    cdef double w
66    cdef double delta
67    for event_time, value in data:
68        delta = now-event_time
69        assert delta>=0, "invalid event_time=%s, now=%s, delta=%s" % (event_time, now, delta)
70        w = 1.0/(min_offset+delta**rpow)
71        tv += value*w
72        tw += w
73    return tv / tw
74
75def calculate_timesize_weighted_average_score(data):
76    """
77        This is a time weighted average where the size
78        of each record also gives it a weight boost.
79        This is to prevent small packets from skewing the average.
80        Data format: (event_time, size, value)
81    """
82    cdef double size_avg = sum(x for _, x, _ in data)/len(data)
83    cdef double now = monotonic()
84    cdef double tv = 0.0
85    cdef double tw = 0.0
86    cdef double rv = 0.0
87    cdef double rw = 0.0
88    cdef double event_time
89    cdef int size
90    cdef int value
91    cdef double pw
92    cdef double w
93    cdef double delta
94    for event_time, size, value in data:
95        if value<0:
96            continue        #invalid record
97        delta = now-event_time
98        pw = clogp(size/size_avg)
99        w = pw/(1.0+delta)*size
100        tv += w*value
101        tw += w
102        w = pw/(0.1+delta**2)*size
103        rv += w*value
104        rw += w
105    return int(tv / tw), int(rv / rw)
106
107def calculate_timesize_weighted_average(data, float unit=1.0):
108    #the value is elapsed time,
109    #so we want to divide by the value:
110    recs = tuple((a,b,unit/c) for a,b,c in data)
111    return calculate_size_weighted_average(recs)
112
113def calculate_size_weighted_average(data):
114    """
115        This is a time weighted average where the size
116        of each record also gives it a weight boost.
117        This is to prevent small packets from skewing the average.
118        Data format: (event_time, size, value)
119    """
120    cdef double size_avg = sum(x for _, x, _ in data)/len(data)
121    if size_avg<=0:
122        size_avg = 1
123    cdef double now = monotonic()
124    cdef double tv = 0.0
125    cdef double tw = 0.0
126    cdef double rv = 0.0
127    cdef double rw = 0.0
128    cdef double event_time
129    cdef double size
130    cdef double size_ps
131    cdef double value
132    cdef double pw
133    cdef double w
134    cdef double delta
135    for event_time, size, value in data:
136        if value<=0:
137            continue        #invalid record
138        delta = now-event_time
139        pw = clogp(size/size_avg)
140        size_ps = max(1, size*value)
141        w = pw/(1.0+delta)
142        tv += w*size_ps
143        tw += w*size
144        w = pw/(0.1+delta**2)
145        rv += w*size_ps
146        rw += w*size
147    if tw<=0:
148        tw = 1
149    if rw<=0:
150        rw = 1
151    return tv / tw, rv / rw
152
153def calculate_for_target(metric, float target_value, float avg_value, float recent_value, float aim=0.5, float div=1.0, float slope=0.1, smoothing=logp, float weight_multiplier=1.0):
154    """
155        Calculates factor and weight to try to bring us closer to 'target_value'.
156
157        The factor is a function of how far the 'recent_value' is from it,
158        and of how things are progressing (better or worse than average),
159        'aim' controls the proportion of each. (at 0.5 it is an average of both,
160        the closer to 0 the more target matters, the closer to 1.0 the more average matters)
161    """
162    assert aim>0.0 and aim<1.0
163    #target factor: how far are we from 'target'
164    cdef double d = div
165    cdef double target_factor = (recent_value/d)/(slope+target_value/d)
166    #average factor: how far are we from the 'average'
167    cdef double avg_factor = (recent_value/d)/(slope+avg_value/d)
168    #aimed average: combine the two factors above with the 'aim' weight distribution:
169    cdef double aimed_average = target_factor*(1.0-aim) + avg_factor*aim
170    factor = smoothing(aimed_average)
171    weight = smoothing(max(0.0, 1.0-factor, factor-1.0)) * weight_multiplier
172    info = {"avg"       : int(1000.0*avg_value),
173            "recent"    : int(1000.0*recent_value),
174            "target"    : int(1000.0*target_value),
175            "aim"       : int(1000.0*aim),
176            "aimed_avg" : int(1000.0*aimed_average),
177            "div"       : int(1000.0*div),
178            "smoothing" : smn(smoothing),
179            "weight_multiplier" : int(1000.0*weight_multiplier),
180            }
181    return metric, info, factor, weight
182
183def calculate_for_average(metric, float avg_value, float recent_value, float div=1.0, float weight_offset=0.5, float weight_div=1.0):
184    """
185        Calculates factor and weight based on how far we are from the average value.
186        This is used by metrics for which we do not know the optimal target value.
187    """
188    cdef double avg = avg_value/div
189    cdef double recent = recent_value/div
190    cdef double factor = clogp(recent/avg)
191    cdef double weight = max(0, max(factor, 1.0/factor)-1.0+weight_offset)/weight_div
192    info = {"avg"   : int(1000.0*avg),
193            "recent": int(1000.0*recent)}
194    return metric, info, float(factor), float(weight)
195
196def queue_inspect(metric, time_values, float target=1.0, float div=1.0, smoothing=logp):
197    """
198        Given an historical list of values and a current value,
199        figure out if things are getting better or worse.
200    """
201    #inspect a queue size history: figure out if things are better or worse than before
202    if len(time_values)==0:
203        return metric, {}, 1.0, 0.0
204    avg, recent = calculate_time_weighted_average(tuple(time_values))
205    weight_multiplier = sqrt(max(avg, recent) / div / target)
206    return calculate_for_target(metric, target, avg, recent, aim=0.25, div=div, slope=1.0, smoothing=smoothing, weight_multiplier=weight_multiplier)
207