diff --git a/experiments/test_1/moboqm9_results.csv b/experiments/test_1/moboqm9_results.csv new file mode 100644 index 0000000..7eacac0 --- /dev/null +++ b/experiments/test_1/moboqm9_results.csv @@ -0,0 +1,953 @@ +,iteration_qEHVI,iteration_qNEHVI,iteration_random,gap,mu +0,41.0,,,0.3175,1.4629 +1,0.0,0.0,0.0,0.256,3.2802 +2,,,,0.2505,2.4907 +3,,,,0.2583,2.2947 +4,8.0,,,0.1668,3.0301 +5,,,,0.2752,3.8744 +6,0.0,0.0,0.0,0.2075,2.5868 +7,,,,0.3213,0.618 +8,0.0,0.0,0.0,0.2984,1.9793 +9,,,,0.2004,2.9097 +10,,,,0.2922,1.5842 +11,,,,0.2096,2.8857 +12,,,,0.2334,4.6484 +13,,,,0.1544,6.7416 +14,,,,0.2697,1.3868 +15,,34.0,,0.339,1.339 +16,,10.0,,0.3314,0.2013 +17,,,,0.1753,1.7848 +18,,,,0.1947,1.7661 +19,,,,0.2697,5.2064 +20,,,,0.2015,2.0846 +21,,,,0.2346,2.746 +22,,,,0.2627,3.2337 +23,,,,0.211,3.8613 +24,0.0,0.0,0.0,0.1976,3.6251 +25,,,,0.2439,3.4985 +26,,,,0.269,4.9165 +27,27.0,,,0.3051,1.4563 +28,,,,0.2768,3.2857 +29,,,,0.204,3.4526 +30,,,,0.2657,2.3216 +31,,,,0.2176,1.9071 +32,,,,0.2964,1.5365 +33,,,,0.219,1.4697 +34,0.0,0.0,0.0,0.2464,4.4529 +35,,,,0.2544,1.1823 +36,,,,0.2082,1.3742 +37,0.0,0.0,0.0,0.2479,0.9256 +38,,,,0.2061,2.0972 +39,,,,0.2564,1.0469 +40,,,,0.3293,2.1866 +41,,,,0.2777,3.3684 +42,,,,0.2092,4.3917 +43,,,,0.2295,4.5968 +44,0.0,0.0,0.0,0.2371,1.6613 +45,,,,0.2976,4.129 +46,,,,0.2415,1.7361 +47,,,8.0,0.2234,4.4805 +48,,,,0.2473,1.1852 +49,,,,0.2077,1.3077 +50,,,,0.3369,2.6126 +51,,,,0.2444,3.6032 +52,,,,0.3191,2.5543 +53,,,,0.3101,1.4048 +54,,,,0.2489,3.0214 +55,,,,0.2997,3.0931 +56,,,,0.3235,0.8875 +57,,,,0.2857,3.9892 +58,,,,0.1983,1.8925 +59,,,,0.1584,0.8374 +60,,,,0.2034,1.7235 +61,,3.0,,0.1701,4.6182 +62,,,,0.2798,1.5233 +63,,,,0.232,1.279 +64,,,,0.1952,1.0141 +65,,,,0.2562,1.6793 +66,,,2.0,0.2256,2.2826 +67,,,,0.2216,3.0122 +68,,,,0.2315,3.2703 +69,,,,0.1857,5.2631 +70,,,,0.2066,3.36 +71,,,,0.2077,1.9299 +72,,,,0.227,1.7274 +73,,,,0.2323,4.5593 +74,,,,0.2232,3.8768 +75,31.0,,,0.3291,1.6227 +76,,,,0.2872,5.9647 +77,,,,0.2288,5.0172 +78,0.0,0.0,0.0,0.2478,1.175 +79,,,,0.2241,5.9092 +80,,,,0.2366,2.5612 +81,,,,0.1658,3.665 +82,,31.0,,0.2076,2.791 +83,,,,0.2067,4.7563 +84,,,,0.2504,3.3721 +85,,,,0.2289,1.8629 +86,,,49.0,0.2778,4.5518 +87,,,,0.1825,4.4764 +88,,,,0.2915,2.5313 +89,0.0,0.0,0.0,0.2405,1.4019 +90,,,,0.2182,4.5426 +91,,,,0.222,2.9243 +92,,23.0,,0.2767,1.1837 +93,,,,0.2092,3.3371 +94,,,,0.1917,3.8885 +95,,,,0.2289,3.8914 +96,,,,0.2848,2.0902 +97,,46.0,,0.2582,4.8278 +98,,,,0.1763,2.2535 +99,,,,0.2556,0.2195 +100,,,,0.2677,3.06 +101,,,,0.2535,0.2401 +102,40.0,,,0.338,1.035 +103,,,,0.2071,3.5075 +104,0.0,0.0,0.0,0.2477,3.8106 +105,,,,0.2211,4.2313 +106,,,,0.2327,1.4619 +107,0.0,0.0,0.0,0.2425,4.4519 +108,,,,0.2702,4.257 +109,,,,0.2097,3.0758 +110,,,,0.25,1.3211 +111,,,,0.2361,1.48 +112,,2.0,,0.2731,3.8591 +113,,,,0.2089,0.5948 +114,,,,0.2447,5.7818 +115,29.0,,,0.3409,1.7539 +116,,,,0.278,4.9417 +117,,19.0,,0.2263,3.8287 +118,,,,0.2281,0.2081 +119,,,,0.2664,2.245 +120,0.0,0.0,0.0,0.2469,3.5751 +121,,,,0.2291,2.7579 +122,,,,0.1875,2.1267 +123,0.0,0.0,0.0,0.364,0.04 +124,,,5.0,0.2641,4.6565 +125,,,,0.3211,0.1249 +126,,,,0.2364,1.149 +127,,7.0,,0.2029,8.0221 +128,,,,0.2818,3.3596 +129,0.0,0.0,0.0,0.2278,4.2017 +130,,,,0.1903,2.742 +131,,,,0.2286,2.1272 +132,,,,0.1989,2.8042 +133,,,29.0,0.1811,3.1691 +134,,,23.0,0.3115,1.9354 +135,,,,0.3113,2.6337 +136,,,,0.2535,0.5569 +137,,,,0.2103,5.2904 +138,,,44.0,0.3107,2.207 +139,,,,0.2672,3.5974 +140,,,,0.1539,1.296 +141,,,,0.2647,3.4199 +142,0.0,0.0,0.0,0.1728,2.9112 +143,,,,0.2549,4.5187 +144,,26.0,,0.2324,2.618 +145,,,,0.2184,2.9623 +146,,,,0.2615,3.6395 +147,,,,0.2961,2.1126 +148,0.0,0.0,0.0,0.2298,3.4733 +149,,,,0.2535,4.1008 +150,34.0,,,0.3511,0.0942 +151,,,,0.2551,3.2983 +152,,,,0.3212,1.773 +153,,,,0.1736,2.7214 +154,,,16.0,0.1906,4.077 +155,,,,0.3065,0.1706 +156,18.0,,,0.3045,2.4422 +157,24.0,,,0.3135,0.8964 +158,,,,0.2396,4.7323 +159,,,,0.2546,0.2274 +160,,,,0.3171,1.3804 +161,,22.0,,0.3247,2.212 +162,,8.0,,0.3183,0.3909 +163,,,,0.207,2.0756 +164,,,,0.2879,1.2029 +165,,,,0.2308,2.3427 +166,,,,0.1678,0.6529 +167,,,,0.2218,3.7599 +168,0.0,0.0,0.0,0.3558,0.0893 +169,,,,0.2097,2.938 +170,,,,0.2231,2.893 +171,,,,0.2204,2.1673 +172,0.0,0.0,0.0,0.3147,2.8333 +173,0.0,0.0,0.0,0.231,0.134 +174,,,,0.2823,0.6517 +175,,,,0.2108,1.844 +176,32.0,,,0.2385,1.6377 +177,,,28.0,0.3175,1.4129 +178,,,,0.3015,4.7434 +179,,,,0.2507,3.286 +180,,,,0.2236,2.0273 +181,,,,0.1948,3.3885 +182,,39.0,,0.1789,2.2726 +183,,,,0.3133,2.1136 +184,,,,0.1675,2.3233 +185,0.0,0.0,0.0,0.3218,1.9931 +186,0.0,0.0,0.0,0.3091,0.705 +187,,,,0.1859,4.0974 +188,,,,0.2214,2.2555 +189,,,,0.2408,1.7805 +190,,,,0.2139,4.9643 +191,,,,0.2293,1.1135 +192,,,,0.3105,5.8609 +193,,,,0.284,3.4788 +194,,,,0.2233,1.2105 +195,,,,0.2355,1.3996 +196,,,,0.2762,1.7563 +197,,,,0.3117,3.1666 +198,,,,0.2225,3.7357 +199,0.0,0.0,0.0,0.2141,2.2343 +200,,,,0.1871,2.9361 +201,,,,0.2286,3.2016 +202,,,,0.2432,2.8546 +203,,,,0.2972,0.9917 +204,,,,0.2614,3.5214 +205,,,,0.2805,3.5214 +206,,27.0,,0.3207,0.9939 +207,,,,0.2239,3.055 +208,,,32.0,0.2207,2.725 +209,2.0,,,0.3124,2.3293 +210,,,,0.314,2.4604 +211,,,,0.2161,2.8215 +212,,,,0.2797,4.1681 +213,,,,0.273,3.0878 +214,,,,0.1941,2.0677 +215,,,,0.2045,4.1624 +216,,36.0,,0.179,1.6742 +217,,,,0.2679,2.7755 +218,,,,0.2674,2.595 +219,,,,0.1835,3.1792 +220,0.0,0.0,0.0,0.1866,1.7883 +221,,,,0.2571,3.8421 +222,,,,0.2591,4.5935 +223,,,,0.2205,3.1413 +224,,,38.0,0.2695,3.5054 +225,,,,0.2011,4.886 +226,,,,0.2361,3.6047 +227,,,,0.2181,2.2806 +228,,,,0.2473,3.5412 +229,14.0,,,0.3086,2.0179 +230,,,,0.3027,2.7393 +231,,,,0.316,1.1771 +232,,,,0.2732,5.7085 +233,,,,0.341,2.0991 +234,,,,0.3255,3.279 +235,,,,0.2433,4.1157 +236,,,,0.3262,1.328 +237,,,,0.3122,1.5652 +238,,,,0.2965,2.9997 +239,,,,0.244,1.2331 +240,0.0,0.0,0.0,0.2003,2.8905 +241,,,,0.2412,3.7834 +242,,,,0.1659,1.0488 +243,,,,0.2469,2.266 +244,,13.0,,0.2852,2.863 +245,,,,0.2016,7.4632 +246,,,,0.2428,1.4407 +247,,,,0.2171,2.9374 +248,,,,0.3081,1.9417 +249,,,,0.2551,2.2998 +250,,,,0.2143,3.0826 +251,,43.0,,0.2514,1.6352 +252,,,,0.3374,1.7459 +253,,,,0.169,3.6673 +254,,30.0,,0.2238,1.4944 +255,,,,0.2858,4.1443 +256,,,,0.2883,1.8868 +257,,,,0.2087,2.4939 +258,46.0,,,0.3331,1.3506 +259,,,,0.2824,1.9348 +260,,,,0.2544,4.6464 +261,,,,0.2599,1.5222 +262,,,,0.2264,2.6641 +263,,,,0.2385,1.297 +264,,,,0.2834,4.1328 +265,,,,0.233,3.899 +266,,,,0.1781,4.8684 +267,,,,0.2089,5.2172 +268,,,,0.3004,1.9227 +269,,,,0.3168,1.1793 +270,,,50.0,0.2606,2.7318 +271,,,,0.2394,1.6632 +272,36.0,,,0.309,1.4167 +273,,,,0.2776,1.6877 +274,,,35.0,0.3701,0.1024 +275,,,,0.2289,1.3646 +276,,,,0.3111,1.9371 +277,,,,0.2188,2.5193 +278,,,,0.2664,6.3538 +279,,,,0.1869,1.2751 +280,,,,0.2795,2.1095 +281,,,,0.2992,2.7603 +282,,,,0.2861,0.8399 +283,,4.0,,0.2886,2.6206 +284,,,,0.165,4.4762 +285,,,,0.2198,5.5693 +286,,,,0.2253,2.5128 +287,6.0,,,0.2691,2.8181 +288,,,,0.263,2.8467 +289,37.0,,,0.3129,2.8241 +290,,,,0.3309,1.1386 +291,0.0,0.0,0.0,0.2177,1.5816 +292,,,,0.274,3.328 +293,49.0,,,0.268,0.9121 +294,,,,0.2349,3.4809 +295,,,,0.1607,2.3612 +296,,37.0,,0.2292,2.2093 +297,,16.0,,0.2594,3.0106 +298,,,,0.2073,1.2474 +299,,,,0.2882,1.2166 +300,,,,0.2294,2.4039 +301,,,,0.215,3.0695 +302,,,,0.213,4.7993 +303,,,,0.3317,1.3639 +304,,,47.0,0.223,3.9358 +305,0.0,0.0,0.0,0.2173,3.1173 +306,,,,0.1789,0.6333 +307,,,,0.2029,3.8398 +308,0.0,0.0,0.0,0.2297,3.4826 +309,,,,0.2611,2.9943 +310,,,,0.2368,1.3102 +311,,21.0,,0.264,4.6502 +312,,,,0.1885,1.6304 +313,,,,0.2044,3.3462 +314,,,,0.2167,0.8164 +315,,,,0.2975,0.6806 +316,,,,0.2348,2.0956 +317,,,,0.3045,1.747 +318,,,,0.161,4.7159 +319,,,,0.2569,0.2046 +320,,,,0.2217,3.57 +321,0.0,0.0,0.0,0.1534,5.0863 +322,42.0,,,0.3288,1.3323 +323,,,,0.2749,4.6763 +324,0.0,0.0,0.0,0.1884,5.2756 +325,,,,0.2897,0.8911 +326,,,45.0,0.307,1.4712 +327,,,,0.1694,1.4531 +328,,,,0.2256,4.733 +329,25.0,,,0.2602,0.1796 +330,,,43.0,0.249,3.5698 +331,,,15.0,0.3009,1.127 +332,,,,0.2579,0.632 +333,,,,0.2758,2.7726 +334,,,,0.2248,6.2972 +335,,,,0.1746,6.154 +336,,32.0,,0.2927,2.1379 +337,0.0,0.0,0.0,0.1988,1.9825 +338,,,,0.1865,3.2496 +339,0.0,0.0,0.0,0.2211,1.0083 +340,,,,0.3087,1.4789 +341,39.0,,,0.2102,2.9431 +342,,,,0.2112,3.0528 +343,,,,0.2281,1.8066 +344,,,,0.1879,3.337 +345,,,,0.2231,4.8992 +346,,,,0.2832,2.6137 +347,,42.0,,0.2084,2.973 +348,,,,0.2608,2.2093 +349,0.0,0.0,0.0,0.2594,1.7875 +350,,,,0.2056,2.9546 +351,,,,0.1575,3.2123 +352,,,,0.2087,3.3448 +353,,,,0.2347,1.8943 +354,,,10.0,0.2214,3.1919 +355,0.0,0.0,0.0,0.2287,1.8442 +356,,,,0.1932,1.5198 +357,0.0,0.0,0.0,0.2186,2.7464 +358,,,,0.218,3.6964 +359,,,,0.2098,1.5933 +360,,,,0.2164,1.7166 +361,,,,0.2849,4.5218 +362,,47.0,,0.1771,3.9739 +363,,,37.0,0.2789,2.6101 +364,,,,0.1909,1.4227 +365,,,,0.269,1.887 +366,,,7.0,0.2467,3.0074 +367,,,,0.2604,3.2694 +368,0.0,0.0,0.0,0.2049,4.5693 +369,,,,0.2405,1.5497 +370,,,,0.2817,3.1926 +371,,,,0.2413,3.1466 +372,50.0,,,0.3241,2.6014 +373,,,,0.2489,1.2303 +374,,,33.0,0.3163,1.7406 +375,,,,0.3448,3.8627 +376,0.0,0.0,0.0,0.2677,3.7205 +377,,,,0.309,1.5942 +378,,,,0.2483,0.2434 +379,,,,0.2087,4.3287 +380,,,,0.2197,2.6039 +381,45.0,,,0.2515,1.6978 +382,,,,0.1453,2.4406 +383,,,,0.3431,1.4985 +384,,,,0.2606,3.0525 +385,0.0,0.0,0.0,0.2999,2.3575 +386,,,,0.221,1.4507 +387,,,,0.163,1.6452 +388,,15.0,,0.3288,1.3969 +389,,,,0.2801,4.7947 +390,,,,0.1795,2.3846 +391,0.0,0.0,0.0,0.2877,2.6055 +392,,,,0.2999,1.4527 +393,,,,0.3428,1.758 +394,,,,0.2805,2.7191 +395,,,,0.2217,1.774 +396,,,,0.2732,2.3066 +397,,,,0.2155,2.3557 +398,,,,0.187,2.7489 +399,,,,0.2538,1.6792 +400,0.0,0.0,0.0,0.321,2.4049 +401,0.0,0.0,0.0,0.3158,2.3452 +402,,,,0.2591,2.3534 +403,,,39.0,0.1391,4.7451 +404,0.0,0.0,0.0,0.2472,0.408 +405,,,,0.2366,2.0732 +406,,40.0,,0.2238,1.4333 +407,,,,0.3191,1.1223 +408,,,,0.152,2.1982 +409,,,,0.1439,3.0861 +410,,,,0.1885,4.2544 +411,,,,0.2456,2.7097 +412,,35.0,,0.2363,2.6876 +413,,,,0.1466,3.58 +414,,,,0.2474,2.8993 +415,,,,0.2585,1.7789 +416,,,,0.1887,1.7889 +417,44.0,,,0.2979,1.3995 +418,0.0,0.0,0.0,0.3103,1.9981 +419,,,,0.2408,2.2452 +420,,,,0.2279,4.4264 +421,48.0,,,0.3156,1.1041 +422,,,,0.2505,3.227 +423,,,,0.2922,1.9289 +424,,,,0.2304,0.5637 +425,,,,0.2074,2.9711 +426,,,,0.2257,1.3313 +427,,,,0.2628,2.6476 +428,,,,0.2064,2.4875 +429,,,,0.2222,2.0378 +430,,,,0.2422,1.1163 +431,,,,0.3282,4.0434 +432,,,,0.2432,3.084 +433,,,,0.2832,1.5886 +434,0.0,0.0,0.0,0.2579,3.2364 +435,,,,0.2144,6.1594 +436,,,,0.2324,0.9312 +437,,,,0.2229,5.0602 +438,,,,0.1783,4.9663 +439,,,,0.1882,3.5531 +440,,,,0.2919,0.1843 +441,,,,0.2791,3.8464 +442,,,,0.2505,2.266 +443,,,,0.2085,3.5004 +444,,,,0.2166,4.2214 +445,,,,0.3121,4.0956 +446,,,,0.2059,4.0625 +447,,,,0.3127,1.8614 +448,,,,0.2669,1.8819 +449,,,,0.2642,5.1864 +450,,,,0.2405,3.7364 +451,0.0,0.0,0.0,0.2849,0.9899 +452,,,,0.2333,1.8348 +453,,,41.0,0.1704,1.3946 +454,0.0,0.0,0.0,0.2531,1.4869 +455,0.0,0.0,0.0,0.2793,4.4077 +456,,,,0.2729,3.8863 +457,,,,0.2681,2.8891 +458,,,,0.2275,1.6909 +459,,,17.0,0.2479,3.7225 +460,,,,0.3242,2.4383 +461,,,,0.2346,1.7544 +462,,,,0.2573,1.6631 +463,,,,0.205,2.1278 +464,,,,0.3231,2.8173 +465,0.0,0.0,0.0,0.1819,3.8244 +466,,,,0.3032,2.6925 +467,,,,0.3227,1.4351 +468,0.0,0.0,0.0,0.2108,0.0004 +469,,17.0,,0.1751,4.2539 +470,,,,0.22,1.318 +471,,,,0.212,2.0696 +472,,,,0.2661,3.1987 +473,,,,0.2147,1.3683 +474,,,,0.3281,2.3981 +475,0.0,0.0,0.0,0.306,2.6323 +476,,,,0.2208,1.8844 +477,,,,0.3428,1.0363 +478,,,,0.2708,3.0662 +479,,,,0.2562,4.4873 +480,,45.0,,0.2458,1.5763 +481,0.0,0.0,0.0,0.2193,1.0849 +482,0.0,0.0,0.0,0.3132,1.7925 +483,,,,0.2222,3.1775 +484,,,,0.2767,4.9695 +485,,,,0.3205,2.5876 +486,,,,0.2215,1.4109 +487,0.0,0.0,0.0,0.2956,1.177 +488,,,,0.2553,3.8474 +489,,49.0,,0.2929,2.3061 +490,,,,0.2326,2.9401 +491,,,,0.3055,1.1814 +492,0.0,0.0,0.0,0.3149,2.5581 +493,,,22.0,0.2225,3.9821 +494,,,,0.1713,2.9872 +495,33.0,,,0.3006,2.7524 +496,,38.0,,0.2815,4.212 +497,17.0,,,0.2578,2.085 +498,,,,0.2478,3.7892 +499,,,,0.2764,3.6093 +500,7.0,,,0.3035,2.8605 +501,,,,0.3192,2.0731 +502,,,,0.2516,2.8192 +503,,,,0.2783,1.8737 +504,,,,0.2581,4.9281 +505,,,,0.2271,2.9503 +506,0.0,0.0,0.0,0.2733,2.6405 +507,,,,0.212,5.2768 +508,0.0,0.0,0.0,0.2103,3.9562 +509,,,,0.2138,0.3291 +510,,,11.0,0.2035,2.1619 +511,,,,0.2673,6.1135 +512,0.0,0.0,0.0,0.3255,1.2172 +513,0.0,0.0,0.0,0.2977,2.396 +514,23.0,,,0.261,0.31 +515,,,46.0,0.2707,3.3498 +516,,,,0.2287,3.2583 +517,1.0,,,0.3145,1.3399 +518,,,,0.3264,1.1722 +519,,,,0.2729,3.7707 +520,,,,0.3074,0.9597 +521,,,,0.2311,2.9004 +522,12.0,,,0.3042,3.3424 +523,,,,0.283,1.0965 +524,,41.0,,0.2711,1.5817 +525,,,,0.21,2.7943 +526,,,,0.2732,3.5571 +527,,,,0.242,2.5694 +528,,,,0.2329,1.7287 +529,,,,0.224,3.9778 +530,,,,0.2849,1.8335 +531,,,,0.2669,5.1074 +532,,,,0.334,1.229 +533,,,,0.2365,4.786 +534,,,,0.3377,1.7192 +535,,,,0.2255,2.8832 +536,,,,0.2127,2.1659 +537,,,,0.2217,2.3278 +538,,14.0,,0.1467,0.6492 +539,,,,0.306,4.1317 +540,,,,0.3262,1.6491 +541,,,,0.2794,1.6337 +542,,,,0.3037,1.2903 +543,0.0,0.0,0.0,0.178,2.9816 +544,,,,0.3114,2.3562 +545,,,,0.2706,0.7448 +546,,,30.0,0.1896,5.7504 +547,,,,0.2253,3.2656 +548,,,36.0,0.2866,1.6028 +549,,,,0.2914,4.0736 +550,,,,0.2804,2.4929 +551,,,,0.25,1.855 +552,0.0,0.0,0.0,0.1258,2.5965 +553,,,,0.2168,2.5511 +554,,,,0.1983,3.2955 +555,19.0,,,0.2776,4.1255 +556,,,,0.2697,2.4766 +557,,,,0.2174,2.7093 +558,,,,0.2753,3.504 +559,,,,0.2874,0.7 +560,,,,0.2631,1.2552 +561,,50.0,,0.2143,5.3939 +562,,,,0.2258,2.6203 +563,,,,0.3421,4.1251 +564,,,,0.2583,3.6164 +565,,,,0.2098,3.2775 +566,,,,0.2599,5.2758 +567,,,,0.2767,0.8243 +568,,,,0.2796,4.1598 +569,,,,0.305,2.4561 +570,,,,0.2283,3.2465 +571,,,,0.2971,0.4763 +572,,,,0.2192,2.2759 +573,,,,0.185,3.7118 +574,,,,0.1785,2.4173 +575,,,,0.2079,1.5551 +576,,12.0,,0.3225,0.9057 +577,,33.0,,0.2975,2.2176 +578,,,,0.3183,1.7085 +579,,,,0.2898,0.1873 +580,47.0,,,0.3362,1.8294 +581,0.0,0.0,0.0,0.3245,0.8824 +582,,,,0.3157,1.3786 +583,,,4.0,0.1186,1.4394 +584,,,1.0,0.217,3.7026 +585,0.0,0.0,0.0,0.2285,2.3868 +586,21.0,,,0.3136,2.0946 +587,,,,0.2226,2.2734 +588,,,,0.3071,0.8436 +589,,,,0.3246,2.9806 +590,,,,0.2087,4.6876 +591,,,21.0,0.3324,0.2662 +592,0.0,0.0,0.0,0.3149,4.8979 +593,,,,0.1727,5.1346 +594,,,,0.2005,3.4861 +595,,,,0.2316,1.9079 +596,0.0,0.0,0.0,0.277,2.9076 +597,,,,0.2747,3.9471 +598,,,,0.2695,4.4856 +599,,,,0.1525,3.5189 +600,,,,0.2,1.3688 +601,,,,0.3054,1.3755 +602,,,,0.192,3.86 +603,,,18.0,0.201,2.0559 +604,,,,0.2649,4.6556 +605,,,,0.1976,6.1151 +606,,,,0.3177,0.995 +607,,,,0.3171,1.566 +608,,,,0.156,6.4154 +609,,,,0.2647,4.8804 +610,,,,0.2416,3.8425 +611,0.0,0.0,0.0,0.2091,4.5963 +612,,,,0.2142,9.4561 +613,,,,0.2596,2.4202 +614,,,,0.2479,1.485 +615,0.0,0.0,0.0,0.2198,2.7536 +616,26.0,,,0.306,1.7203 +617,,,,0.1971,3.9244 +618,,,,0.2373,4.2441 +619,,,,0.2982,4.4289 +620,,,,0.2221,2.1892 +621,0.0,0.0,0.0,0.2429,0.2616 +622,43.0,,,0.2484,1.8072 +623,,,,0.3217,0.5711 +624,0.0,0.0,0.0,0.2303,4.3875 +625,,,,0.2966,2.9549 +626,,,27.0,0.1879,3.0803 +627,,,,0.2683,1.1859 +628,,,,0.1921,4.636 +629,0.0,0.0,0.0,0.2422,0.1781 +630,0.0,0.0,0.0,0.2245,2.7334 +631,,,,0.296,1.2861 +632,,,,0.3127,0.9582 +633,,,,0.2445,0.7578 +634,0.0,0.0,0.0,0.3349,0.4531 +635,,,,0.2078,3.0028 +636,,,,0.2384,3.1011 +637,,,,0.2294,1.1701 +638,,,,0.3085,1.8511 +639,,,,0.1827,6.7784 +640,,,20.0,0.2725,3.7305 +641,16.0,,,0.3244,2.1081 +642,,,,0.1942,2.517 +643,,,,0.1968,2.5386 +644,,,,0.2068,1.7391 +645,,,,0.3237,1.5143 +646,,,9.0,0.2088,2.9236 +647,,,,0.2182,4.3902 +648,,,,0.2827,1.1406 +649,,,,0.2967,0.435 +650,,,,0.3248,1.9646 +651,,,,0.3212,2.3789 +652,,,,0.2,3.6688 +653,,,,0.2705,5.2388 +654,,,,0.2221,3.3457 +655,,,,0.2803,5.2281 +656,,,,0.2191,3.2013 +657,,,,0.2173,4.1664 +658,,,,0.2408,1.7062 +659,,,,0.2095,3.9167 +660,,,,0.2514,6.0516 +661,0.0,0.0,0.0,0.157,2.8373 +662,,6.0,,0.295,3.4358 +663,,,,0.2139,4.3001 +664,,,,0.3512,0.0886 +665,,,,0.3274,1.5259 +666,,,,0.3149,3.3521 +667,,,,0.2785,3.9549 +668,,,,0.3209,1.7655 +669,,,,0.3019,1.5872 +670,,,34.0,0.2625,1.2591 +671,,,,0.2171,1.492 +672,,,,0.312,1.0297 +673,,,,0.2874,0.7874 +674,,,,0.296,1.3839 +675,,,,0.3189,1.5101 +676,,,,0.1968,3.5802 +677,,,,0.1998,2.7685 +678,,,48.0,0.2414,1.4565 +679,,,,0.2598,1.4744 +680,,,,0.2265,1.605 +681,,,,0.2206,3.8322 +682,,48.0,,0.3001,4.3797 +683,,,,0.2181,3.5195 +684,,,,0.2873,5.3318 +685,,,,0.226,5.789 +686,,,,0.2652,4.0976 +687,35.0,,,0.3,2.645 +688,,,,0.2112,1.6874 +689,,,,0.2077,1.8805 +690,,,,0.2508,1.5998 +691,,,,0.263,4.1882 +692,,,,0.2458,4.5978 +693,,,,0.2237,2.4324 +694,10.0,,,0.3724,0.0303 +695,,,,0.2766,1.4475 +696,,,,0.1694,0.5719 +697,,,12.0,0.2593,4.0931 +698,,,,0.2362,1.1058 +699,,,,0.2853,5.1713 +700,,,,0.1767,2.7167 +701,,,,0.2099,2.904 +702,,,,0.3027,4.912 +703,,,,0.2773,5.8584 +704,,,,0.3294,0.9964 +705,,,,0.2556,3.1594 +706,,28.0,,0.227,5.1026 +707,,,,0.1898,4.2346 +708,,,,0.3237,1.0447 +709,,,,0.2807,1.1504 +710,,,,0.2894,0.9072 +711,,,,0.2234,1.1688 +712,,,,0.271,4.2162 +713,,,,0.219,1.467 +714,,,,0.2318,2.3393 +715,,,3.0,0.1688,2.7918 +716,,,,0.3196,4.8429 +717,,,,0.2531,3.6373 +718,,,,0.2266,3.0249 +719,,,,0.1751,0.9691 +720,,,,0.1812,5.6546 +721,,,,0.3012,1.4615 +722,,,,0.209,2.8027 +723,,,,0.3286,1.4304 +724,,,,0.1499,4.8855 +725,,,,0.2074,2.0659 +726,0.0,0.0,0.0,0.1992,4.1084 +727,,,,0.2211,3.0691 +728,,,,0.3351,1.5924 +729,,,,0.235,2.2374 +730,,,,0.3167,2.0476 +731,,,,0.1901,2.5084 +732,38.0,,,0.3305,1.4249 +733,,,,0.223,3.6035 +734,0.0,0.0,0.0,0.2165,1.367 +735,,,,0.233,2.9282 +736,0.0,0.0,0.0,0.2321,2.9884 +737,,,,0.2208,3.4898 +738,,,,0.1826,7.2508 +739,,,,0.2396,1.4064 +740,,,,0.2156,4.8437 +741,,,,0.215,2.3456 +742,,,,0.2557,1.1442 +743,,,,0.3132,1.1691 +744,,,,0.2444,2.6073 +745,,,,0.2336,3.5151 +746,22.0,,,0.248,1.5886 +747,,,,0.267,2.9201 +748,,,,0.3036,1.1871 +749,,,,0.2783,2.6451 +750,3.0,,,0.2752,1.7382 +751,,,,0.1345,2.1903 +752,,,,0.169,2.5034 +753,,,,0.2984,2.3912 +754,,,,0.2138,2.5168 +755,,,,0.2886,2.9435 +756,,,,0.3331,2.5828 +757,,,,0.2131,2.9267 +758,,,,0.2841,2.6444 +759,,,,0.3216,2.4624 +760,,,,0.2806,3.662 +761,,24.0,,0.2015,4.3572 +762,,,,0.2656,3.4208 +763,,,,0.1787,0.961 +764,,,,0.23,3.419 +765,,,,0.3113,1.6339 +766,28.0,,,0.3268,1.9935 +767,0.0,0.0,0.0,0.3149,2.6206 +768,,,,0.2212,2.6652 +769,,,,0.2431,5.175 +770,,,,0.2984,2.1246 +771,,,,0.2802,1.5201 +772,,,14.0,0.2559,2.1901 +773,,,,0.335,0.1108 +774,,,,0.1954,1.8674 +775,0.0,0.0,0.0,0.2557,3.4005 +776,,,,0.2661,1.3128 +777,,9.0,,0.2146,0.3674 +778,,,,0.2042,3.4843 +779,,,,0.241,1.0998 +780,,,,0.2236,3.4444 +781,,,,0.2652,3.443 +782,,,,0.2516,3.5755 +783,,,,0.2103,0.7582 +784,,,,0.3475,3.9382 +785,,,,0.2187,3.6446 +786,13.0,,,0.2575,0.2678 +787,,,,0.2568,0.309 +788,,,,0.2797,1.8349 +789,,,,0.2758,4.0301 +790,,,,0.3184,1.0813 +791,,,,0.1983,3.3211 +792,,,,0.2557,4.6058 +793,,,,0.1805,3.991 +794,,,,0.2616,1.6585 +795,,25.0,,0.2571,3.2131 +796,,,,0.2016,7.4632 +797,,,,0.1804,2.9989 +798,,,,0.1826,2.6282 +799,,,,0.2606,1.841 +800,,,,0.2026,2.8711 +801,0.0,0.0,0.0,0.3316,1.8694 +802,,,,0.2992,2.427 +803,11.0,,,0.2232,3.7292 +804,,,,0.326,1.4937 +805,,,,0.2755,2.1064 +806,,,24.0,0.2213,1.8504 +807,,,,0.2675,2.4906 +808,,,,0.3215,1.6951 +809,,,,0.2677,3.7185 +810,,,,0.2074,1.2557 +811,,,25.0,0.2045,2.0094 +812,0.0,0.0,0.0,0.1784,6.0575 +813,,,,0.2454,0.991 +814,,,,0.2363,3.1611 +815,,,,0.2165,2.2866 +816,,,,0.2216,2.1211 +817,,,,0.2671,5.3967 +818,,,,0.1824,3.6918 +819,,,,0.1639,5.7845 +820,,,,0.2096,3.1117 +821,,,,0.2848,1.7139 +822,,,,0.2406,2.0414 +823,,,,0.2021,2.88 +824,,,,0.2676,2.7126 +825,,,,0.1656,2.5298 +826,,,6.0,0.2199,3.1605 +827,,,,0.3036,1.6839 +828,15.0,,,0.3225,2.099 +829,,,,0.2667,2.8915 +830,,,,0.2195,4.1963 +831,,,,0.2192,2.5304 +832,,,,0.2346,0.7271 +833,,,,0.2688,3.1217 +834,,,,0.2609,1.4192 +835,,,,0.275,3.3791 +836,,,,0.1936,3.6583 +837,,,,0.3107,3.6692 +838,,,,0.2606,1.6698 +839,,,,0.2208,2.5795 +840,,,,0.2204,3.2913 +841,,,,0.2837,4.0326 +842,,,,0.2219,3.1486 +843,,,,0.262,1.0008 +844,,,,0.2168,2.7292 +845,0.0,0.0,0.0,0.2672,4.0766 +846,,,,0.2748,4.3959 +847,,,,0.3196,0.3628 +848,,,,0.2006,0.875 +849,,,19.0,0.2914,1.7348 +850,,,,0.2741,2.7756 +851,,,,0.2308,1.9387 +852,,,,0.2347,1.8422 +853,,,26.0,0.2549,2.1063 +854,,,,0.2117,2.0325 +855,0.0,0.0,0.0,0.2105,4.8282 +856,,,,0.3651,0.0511 +857,,,,0.2636,4.1759 +858,,,,0.2564,1.2223 +859,,,,0.2689,4.4179 +860,,,,0.2673,2.8486 +861,,,13.0,0.2132,2.1444 +862,,,,0.3093,1.3947 +863,5.0,,,0.2941,1.3946 +864,,,,0.3117,3.1848 +865,,,,0.2201,2.9885 +866,,20.0,,0.2725,3.0627 +867,,44.0,,0.1623,9.1712 +868,,,,0.173,3.3136 +869,9.0,,,0.3169,0.9967 +870,,,,0.2431,1.4092 +871,,,,0.1804,4.2213 +872,,29.0,,0.3014,2.2795 +873,,11.0,,0.3392,1.4945 +874,,,,0.1915,3.0103 +875,,1.0,,0.2994,5.1864 +876,,,,0.2406,1.0206 +877,,,,0.1751,6.3635 +878,,,,0.227,2.1015 +879,,,,0.2232,3.8203 +880,,,,0.2249,2.5578 +881,,,,0.2177,3.029 +882,,,,0.2644,4.6158 +883,,,,0.2103,3.3257 +884,,,,0.2261,1.7875 +885,0.0,0.0,0.0,0.2833,5.2996 +886,,,,0.2209,1.3809 +887,0.0,0.0,0.0,0.2686,1.0801 +888,,,,0.2327,3.3473 +889,20.0,,,0.2812,1.665 +890,,,,0.1877,4.1743 +891,,,,0.2861,5.3166 +892,,,,0.245,0.6396 +893,,,,0.2651,0.7465 +894,,,,0.2848,3.3455 +895,0.0,0.0,0.0,0.2313,3.2749 +896,,,40.0,0.3021,1.1884 +897,,,,0.2674,4.6216 +898,,,,0.2317,0.6438 +899,,,,0.2225,2.4606 +900,,,42.0,0.1722,3.7285 +901,,,,0.352,1.9443 +902,,,,0.2979,2.1453 +903,,,,0.3901,0.0898 +904,,,,0.2254,2.804 +905,,,,0.2603,4.817 +906,,,,0.285,0.8935 +907,,,,0.1907,4.0409 +908,,,,0.2183,5.6256 +909,,,,0.3132,3.0238 +910,0.0,0.0,0.0,0.2107,3.0129 +911,,,,0.2553,5.3849 +912,,,,0.2422,3.2974 +913,,,,0.2432,4.7113 +914,,,,0.2087,3.8539 +915,,,,0.3345,0.7438 +916,,,,0.1613,4.5832 +917,0.0,0.0,0.0,0.1711,1.3808 +918,4.0,,,0.3315,2.4828 +919,,,,0.1913,5.0855 +920,,,,0.191,2.1746 +921,,,,0.2275,3.1277 +922,,,,0.2122,3.0752 +923,0.0,0.0,0.0,0.2602,0.9294 +924,0.0,0.0,0.0,0.2716,2.9896 +925,,,31.0,0.2476,2.382 +926,0.0,0.0,0.0,0.2326,2.4246 +927,,,,0.1476,2.9258 +928,,18.0,,0.3036,1.2762 +929,,,,0.2419,0.19 +930,,,,0.2949,3.9285 +931,,,,0.2434,2.224 +932,,,,0.2081,4.2489 +933,,,,0.2341,2.4354 +934,,,,0.2239,3.0992 +935,,,,0.2386,0.7508 +936,,5.0,,0.1828,8.2984 +937,0.0,0.0,0.0,0.1555,3.9292 +938,,,,0.245,2.9042 +939,,,,0.3068,2.1489 +940,,,,0.2063,1.9512 +941,,,,0.199,1.359 +942,,,,0.335,0.9394 +943,0.0,0.0,0.0,0.2138,6.4388 +944,,,,0.3061,2.1492 +945,,,,0.3001,1.8487 +946,0.0,0.0,0.0,0.2314,4.9054 +947,,,,0.2411,1.9818 +948,,,,0.2024,2.3348 +949,30.0,,,0.3361,0.9057 +950,,,,0.2587,4.0378 +951,,,,0.2868,5.636 diff --git a/experiments/test_1/moboqm9_results.png b/experiments/test_1/moboqm9_results.png new file mode 100644 index 0000000..1eb0dde Binary files /dev/null and b/experiments/test_1/moboqm9_results.png differ diff --git a/experiments/test_1/script.py b/experiments/test_1/script.py index a43c8b9..e4bb158 100644 --- a/experiments/test_1/script.py +++ b/experiments/test_1/script.py @@ -13,22 +13,16 @@ surrogate_model="GaussianProcess", targets=["gap", "mu"], target_bools=[True, True], - num_total_points=100, - num_seed_points=10, - n_iters=100, - num_candidates=10) + num_total_points=1000, + num_seed_points=100, + n_iters=50, + num_candidates=1) moboqm9 = MOBOQM9(params) moboqm9.run_optimization() fig = plot_results(moboqm9.dataframe, [True, True]) fig.tight_layout() -# plt.savefig("figures/moboqm9_results.png") +plt.savefig("moboqm9_results.png") plt.show() - -""" -panel1 => iteration vs hv [qEHVI qNEHVI random] -panel2 => pareto front for qEHVI -panel3 => pareto front for qNEHVI -panel4 => pareto front for random -""" +moboqm9.dataframe.to_csv("moboqm9_results.csv") diff --git a/src/acquisition_functions.py b/src/acquisition_functions.py index 5ac56bd..466ef1a 100644 --- a/src/acquisition_functions.py +++ b/src/acquisition_functions.py @@ -39,7 +39,7 @@ def optimize_qEHVI(model, reference, y_train, x_test, n_candidates): q=n_candidates, unique=True ) - return torch.tensor(candidates) + return candidates def optimize_qNEHVI(model, reference, x_train, x_test, n_candidates): """ @@ -69,4 +69,4 @@ def optimize_qNEHVI(model, reference, x_train, x_test, n_candidates): q=n_candidates, unique=True ) - return torch.tensor(candidates) \ No newline at end of file + return candidates \ No newline at end of file diff --git a/src/data/cm_featurizer.py b/src/data/cm_featurizer.py index d45d0e0..16ff4e4 100644 --- a/src/data/cm_featurizer.py +++ b/src/data/cm_featurizer.py @@ -1,6 +1,7 @@ from dscribe.descriptors import CoulombMatrix from ase import db - +from pathlib import Path +import numpy as np def get_max_number_of_atoms(indices): """ @@ -13,7 +14,7 @@ def get_max_number_of_atoms(indices): int: Maximum number of atoms for the MOBOQM9 model. """ max_number_of_atoms = 0 - with db.connect("QM9_data.db") as qm9: + with db.connect(str(Path(__file__).parent / "QM9_data.db")) as qm9: for i, row in enumerate(qm9.select()): if i in indices: atoms = row.toatoms() @@ -36,15 +37,23 @@ def get_coulomb_matrix(indices, targets): max_number_of_atoms = get_max_number_of_atoms(indices) cm = CoulombMatrix(n_atoms_max=max_number_of_atoms) - atoms_list, targets = [], [] - with db.connect("QM9_data.db") as qm9: + atoms_list, computed_targets = [], [] + with db.connect(str(Path(__file__).parent / "QM9_data.db")) as qm9: for i, row in enumerate(qm9.select()): if i in indices: - atoms_list.append(row.toatoms()) - target_list = [] + is_OK = True for target in targets: - target_list.append(row[target]) - targets.append(target_list) + try: + row[target] + except AttributeError: + is_OK = False + break + if is_OK: + atoms_list.append(row.toatoms()) + target_list = [] + for target in targets: + target_list.append(row[target]) + computed_targets.append(target_list) features = cm.create(atoms_list, n_jobs=4) - return features, targets \ No newline at end of file + return features, np.array(computed_targets) \ No newline at end of file diff --git a/src/mobo_qm9.py b/src/mobo_qm9.py index 7711229..7b05ad6 100644 --- a/src/mobo_qm9.py +++ b/src/mobo_qm9.py @@ -62,7 +62,7 @@ def __init__(self, params: MOBOQM9Parameters): self.dataframe = pd.DataFrame.from_dict(self.from_target_dict()) self.acq_met = {"qEHVI": False, "qNEHVI": False, "random": False} - def form_target_dict(self): + def from_target_dict(self): """ Forms the target dictionary for the MOBOQM9 model. @@ -70,16 +70,17 @@ def form_target_dict(self): target_dict: Target dictionary for the MOBOQM9 model. """ target_dict = {} + target_dict["iteration_qEHVI"] = [None] * len(self.targets) + target_dict["iteration_qNEHVI"] = [None] * len(self.targets) + target_dict["iteration_random"] = [None] * len(self.targets) for itarg, target in enumerate(self.params.targets): target_dict[target] = self.targets[:, itarg] - target_dict["iteration_qEHVI"] = [None] * len(self.params.targets) - target_dict["iteration_qNEHVI"] = [None] * len(self.params.targets) - target_dict["iteration_random"] = [None] * len(self.params.targets) - for itrain, train_mask in enumerate(self.train_indices["qEHVI"]): - if train_mask: - target_dict["iteration_qeHVI"] = 0 - target_dict["iteration_qNEHVI"] = 0 - target_dict["iteration_random"] = 0 + + for itrain, train_mask in enumerate(self.train_indices["qEHVI"]): + if train_mask: + target_dict["iteration_qEHVI"][itrain] = 0 + target_dict["iteration_qNEHVI"][itrain] = 0 + target_dict["iteration_random"][itrain] = 0 return target_dict def get_features_and_targets(self): @@ -107,9 +108,9 @@ def get_surrogate_model(self, acq): returns: model: Surrogate model for the MOBOQM9 model. """ - features = torch.tensor(self.features[self.train_indices["acq"]], + features = torch.tensor(self.features[self.train_indices[acq]], dtype=torch.double) - targets = torch.tensor(self.correct_sign(self.targets[self.train_indices["acq"]]), + targets = torch.tensor(self.correct_sign(self.targets[self.train_indices[acq]]), dtype=torch.double) var = torch.full_like(targets, 1e-6) @@ -122,7 +123,7 @@ def get_surrogate_model(self, acq): models = [SingleTaskGP(features, targets[:, i].unsqueeze(-1), - noise=var[:, i].unsqueeze(-1), + var[:, i].unsqueeze(-1), input_transform=Normalize(d=features.shape[-1]), outcome_transform=Standardize(m=1), likelihood=gpytorch.likelihoods.GaussianLikelihood(), @@ -154,11 +155,11 @@ def optimize_acquisition_function(self, model, acq): returns: candidates: Candidates for the MOBOQM9 model. """ - y_train = self.correct_sign(self.targets[self.train_indices["acq"]]) + y_train = self.correct_sign(self.targets[self.train_indices[acq]]) y_train = torch.tensor(y_train, dtype=torch.double) - x_train = torch.tensor(self.features[self.train_indices["acq"]], dtype=torch.double) - x_test = torch.tensor(self.features[~self.train_indices["acq"]], dtype=torch.double) - reference = y_train.mean(0)[0] + x_train = torch.tensor(self.features[self.train_indices[acq]], dtype=torch.double) + x_test = torch.tensor(self.features[~self.train_indices[acq]], dtype=torch.double) + reference = y_train.min(0)[0] if acq == "qEHVI": return optimize_qEHVI(model=model, reference=reference, @@ -186,11 +187,11 @@ def run_optimization(self): model = self.get_surrogate_model(acq) if acq == "random": for _ in range(self.params.num_candidates): - idx = np.random.choice(np.where(~self.train_indices)[0]) + idx = np.random.choice(np.where(~self.train_indices[acq])[0]) self.train_indices[acq][idx] = True self.dataframe.at[idx, "iteration_random"] = iter + 1 else: - candidates = self.optimize_acquisition_function(model) + candidates = self.optimize_acquisition_function(model, acq) self.update_train_indices(candidates, acq, iter) self.stopping_criteria_met(acq) @@ -204,9 +205,9 @@ def get_train_indices(self): train_indices: Train indices for the MOBOQM9 model. """ # add latin hypercube sampling if time permits - temp_indices = np.random.randint(0, self.params.num_total_points, + temp_indices = np.random.randint(0, len(self.targets), self.params.num_seed_points) - mask = np.zeros(len(self.total_indices), dtype=bool) + mask = np.zeros(len(self.targets), dtype=bool) mask[temp_indices] = True return {"qEHVI": mask, "qNEHVI": mask, "random": mask} diff --git a/src/utils.py b/src/utils.py index 00ffccb..52bea09 100644 --- a/src/utils.py +++ b/src/utils.py @@ -1,4 +1,3 @@ -import pandas as pd from botorch.utils.multi_objective.box_decompositions import DominatedPartitioning import matplotlib.pyplot as plt import torch @@ -32,9 +31,10 @@ def plot_results(df, target_bool): global_hv = hv.compute_hypervolume().item() hv_iterations = {"qEHVI": [], "qNEHVI": [], "random": []} - max_iterations = df['iteration'].max() - for iter_no in range(max_iterations + 1): - for acq in ["qEHVI", "qNEHVI", "random"]: + + for acq in ["qEHVI", "qNEHVI", "random"]: + max_iterations = int(df[f'iteration_{acq}'].max()) + for iter_no in range(max_iterations + 1): current_data = df[df[f'iteration_{acq}'] <= iter_no] local_values = torch.tensor(current_data[targets].values) for mask in target_bool: @@ -42,51 +42,60 @@ def plot_results(df, target_bool): local_values[:, mask] *= -1 hv = DominatedPartitioning(ref_point=ref_point, Y=local_values) hv_iterations[acq].append(hv.compute_hypervolume().item()) - ax[0].plot(np.array(hv_iterations["qEHVI"])/global_hv * 100, marker='o', label="qEHVI") - ax[0].plot(np.array(hv_iterations["qNEHVI"])/ global_hv * 100, marker='*', label="qNEHVI") - ax[0].plot(np.array(hv_iterations["random"]) / global_hv, marker='s', label="random") + ax[0].plot(np.array(hv_iterations["qEHVI"]) / global_hv * 100, marker='o', label="qEHVI") + ax[0].plot(np.array(hv_iterations["qNEHVI"]) / global_hv * 100, marker='*', label="qNEHVI") + ax[0].plot(np.array(hv_iterations["random"]) / global_hv * 100, marker='s', label="random") ax[0].set_xlabel('Iteration') ax[0].set_ylabel('Hypervolume') ax[0].set_title('Hypervolume per Iteration') ax[0].legend(loc="center right") # Plot qEHVI + max_iterations = int(df['iteration_qEHVI'].max()) current_data = df[df['iteration_qEHVI'] <= max_iterations] qEHVI_values = torch.tensor(current_data[targets].values) qEHVI_pareto_idx = is_non_dominated(qEHVI_values) qEHVI_pareto_front = qEHVI_values[qEHVI_pareto_idx] - ax[1].scatter(df[targets[0]], df[targets[1]], alpha=0.5) - ax[1].scatter(global_pareto_front[:, 0], global_pareto_front[:, 1], - color='blue', label="Global Pareto Front", marker="*") ax[1].scatter(qEHVI_pareto_front[:, 0], qEHVI_pareto_front[:, 1], - color='red', label="qEHVI Pareto Front", marker="s") + color='red', label="qEHVI Pareto Front", marker="*", s=100) + ax[1].scatter(df[targets[0]], df[targets[1]], alpha=0.3) + ax[1].scatter(global_pareto_front[:, 0], global_pareto_front[:, 1], + color='green', label="Global Pareto Front", marker="s") ax[1].set_xlabel(targets[0]) ax[1].set_ylabel(targets[1]) + ax[1].set_title('Pareto Front for qEHVI') + ax[1].legend(loc="upper right") # Plot qNEHVI + max_iterations = int(df['iteration_qNEHVI'].max()) current_data = df[df['iteration_qNEHVI'] <= max_iterations] qNEHVI_values = torch.tensor(current_data[targets].values) qNEHVI_pareto_idx = is_non_dominated(qNEHVI_values) qNEHVI_pareto_front = qNEHVI_values[qNEHVI_pareto_idx] - ax[2].scatter(df[targets[0]], df[targets[1]], alpha=0.5) - ax[2].scatter(global_pareto_front[:, 0], global_pareto_front[:, 1], - color='blue', label="Global Pareto Front", marker="*") ax[2].scatter(qNEHVI_pareto_front[:, 0], qNEHVI_pareto_front[:, 1], - color='red', label="qNEHVI Pareto Front", marker="s") + color='red', label="qNEHVI Pareto Front", marker="*", s=100) + ax[2].scatter(df[targets[0]], df[targets[1]], alpha=0.3) + ax[2].scatter(global_pareto_front[:, 0], global_pareto_front[:, 1], + color='green', label="Global Pareto Front", marker="s") ax[2].set_xlabel(targets[0]) ax[2].set_ylabel(targets[1]) + ax[2].set_title('Pareto Front for qNEHVI') + ax[2].legend(loc="upper right") - # Plot qEHVI + # Plot random + max_iterations = int(df['iteration_random'].max()) current_data = df[df['iteration_random'] <= max_iterations] random_values = torch.tensor(current_data[targets].values) random_pareto_idx = is_non_dominated(random_values) random_pareto_front = random_values[random_pareto_idx] - ax[3].scatter(df[targets[0]], df[targets[1]], alpha=0.5) - ax[3].scatter(global_pareto_front[:, 0], global_pareto_front[:, 1], - color='blue', label="Global Pareto Front", marker="*") ax[3].scatter(random_pareto_front[:, 0], random_pareto_front[:, 1], - color='red', label="Random Pareto Front", marker="s") + color='red', label="Random Pareto Front", marker="*", s=100) + ax[3].scatter(df[targets[0]], df[targets[1]], alpha=0.3) + ax[3].scatter(global_pareto_front[:, 0], global_pareto_front[:, 1], + color='green', label="Global Pareto Front", marker="s") ax[3].set_xlabel(targets[0]) ax[3].set_ylabel(targets[1]) + ax[3].set_title('Pareto Front for random') + ax[3].legend(loc="upper right") return fig \ No newline at end of file