|
24 | 24 | -->
|
25 | 25 | <!DOCTYPE html>
|
26 | 26 | <html>
|
27 |
| -<body> |
28 | 27 |
|
| 28 | +<style> |
| 29 | +.frac { |
| 30 | + display: inline-block; |
| 31 | + position: relative; |
| 32 | + vertical-align: middle; |
| 33 | + letter-spacing: 0.001em; |
| 34 | + text-align: center; |
| 35 | +} |
| 36 | +.frac > span { |
| 37 | + display: block; |
| 38 | + padding-top: 0.01em; |
| 39 | + padding-bottom: 0.01em; |
| 40 | +} |
| 41 | +.frac span.bottom { |
| 42 | + border-top: thin solid white; |
| 43 | + padding-top: 0.4em; |
| 44 | + padding-bottom: 0.3em; |
| 45 | +} |
| 46 | +.frac span.symbol { display: none; } |
| 47 | +</style> |
| 48 | + |
| 49 | +<body> |
29 | 50 |
|
30 | 51 | <table align="center" border="1">
|
31 | 52 | <tr bgcolor="lightblue">
|
|
36 | 57 | <PRE><B>
|
37 | 58 | enum class loss_function_type : unsigned char {
|
38 | 59 | // P = Probability(Actual), Q = Probability(Model)
|
39 |
| - kullback_leibler = 1, // L = ∑[P(x) * log(P(x) / Q(x))] |
| 60 | + kullback_leibler = 1, // L = ∑[P(x) * log <div class="frac"> <span>P(x)</span> <span class="symbol">/</span> <span class="bottom">Q(x)</span></div>] |
40 | 61 |
|
41 | 62 | // y = Actual, ŷ = Model
|
42 |
| - mean_abs_error = 2, // L = ∑[|y<sub>i</sub> - ŷ<sub>i</sub>|] / N |
| 63 | + mean_abs_error = 2, // L = <div class="frac"> <span>∑|y<sub>i</sub> - ŷ<sub>i</sub>|</span> <span class="symbol">/</span> <span class="bottom">N</span> </div> |
43 | 64 |
|
44 | 65 | // y = Actual, ŷ = Model
|
45 |
| - mean_sqr_error = 3, // L = ∑[(y<sub>i</sub> - ŷ<sub>i</sub>)<sup>2</sup>] / N |
| 66 | + mean_sqr_error = 3, // L = <div class="frac"> <span>∑(y<sub>i</sub> - ŷ<sub>i</sub>)<sup>2</sup></span> <span class="symbol">/</span> <span class="bottom">N</span> </div> |
46 | 67 |
|
47 | 68 | // y = Actual, ŷ = Model
|
48 |
| - mean_sqr_log_error = 4, // L = ∑[(log(1 + y<sub>i</sub>) - log(1 + ŷ<sub>i</sub>))<sup>2</sup>] / N |
| 69 | + mean_sqr_log_error = 4, // L = <div class="frac"> <span>∑[(log(1 + y<sub>i</sub>) - log(1 + ŷ<sub>i</sub>))<sup>2</sup>]</span> <span class="symbol">/</span> <span class="bottom">N</span> </div> |
49 | 70 |
|
50 | 71 | // y = Actual, P(y<sub>i</sub>) = Model probability prediction
|
51 |
| - cross_entropy = 5, // L = -∑[y<ub>i</sub> * log(P(y<sub>i</sub>))] / N |
| 72 | + cross_entropy = 5, // L = <div class="frac"> <span>-∑[y<ub>i</sub> * log(P(y<sub>i</sub>))]</span> <span class="symbol">/</span> <span class="bottom">N</span> </div> |
52 | 73 |
|
53 | 74 | // y = Actual binary (0/1), P(y<sub>i</sub>) = Model probability prediction
|
54 |
| - binary_cross_entropy = 6, // L = ∑[-(y<ub>i</sub> * log(P(y<sub>i</sub>))) + (1 - y<sub>i</sub>) * log(1 - P(y<sub>i</sub>))] / N |
| 75 | + binary_cross_entropy = 6, // L = <div class="frac"> <span>∑[-(y<ub>i</sub> * log(P(y<sub>i</sub>))) + (1 - y<sub>i</sub>) * log(1 - P(y<sub>i</sub>))]</span> <span class="symbol">/</span> <span class="bottom">N</span> </div> |
55 | 76 |
|
56 | 77 | // y = Actual, ŷ = Model
|
57 | 78 | categorical_hinge = 7, // L = max[∑[(1 - y<sub>i</sub>) * ŷ<sub>i</sub>] - ∑[y<sub>i</sub> * ŷ<sub>i</sub>] + 1, 0]
|
58 | 79 |
|
59 | 80 | // Y = Actual, Ŷ = Model
|
60 |
| - cosine_similarity = 8, // L = (Y . Ŷ) / (||Y|| * ||Ŷ||) |
| 81 | + cosine_similarity = 8, // L = <div class="frac"> <span>Y . Ŷ</span> <span class="symbol">/</span> <span class="bottom">||Y|| * ||Ŷ||</span> </div> |
61 | 82 |
|
62 | 83 | // y = Actual, ŷ = Model
|
63 |
| - log_cosh = 9, // L = ∑[log(cosh(ŷ<sub>i</sub> - y<sub>i</sub>))] / N |
| 84 | + log_cosh = 9, // L = <div class="frac"> <span>∑log(cosh(ŷ<sub>i</sub> - y<sub>i</sub>))</span> <span class="symbol">/</span> <span class="bottom">N</span> </div> |
64 | 85 | };</B></PRE> </font>
|
65 | 86 | </td>
|
66 | 87 | <td>
|
|
0 commit comments