From 802f9b16a14744175a4b5ce2f97ff39eb0581775 Mon Sep 17 00:00:00 2001 From: ckrew <153777116+ckrew@users.noreply.github.com> Date: Thu, 18 Jan 2024 08:53:23 -0600 Subject: [PATCH] Added Tests 93% Python Coverage (#12) * added function docstrings to tested functions * updated some function names for clarity * updated get_head_and_tag_names to use remote repo instead of local * updated get_workflow_job_url to use all python apis instead of request library * updated some variable names to be more clear * created helper functions to remove duplicated code * move some helper functions into resource helpers * linted all code with flake8 * finished tests for most python files * fixed minor mamba command issue --- .coverage | Bin 86016 -> 53248 bytes .../tests/.coveragerc => .coveragerc | 3 +- .flake8 | 2 + .gitignore | 1 + pytest.ini | 4 + .../app_store/application_files/__init__.py | 0 tethysapp/app_store/begin_install.py | 126 ++- tethysapp/app_store/controllers.py | 49 +- tethysapp/app_store/git_install_handlers.py | 3 +- tethysapp/app_store/helpers.py | 296 ++--- tethysapp/app_store/installation_handlers.py | 110 +- tethysapp/app_store/notifications.py | 50 +- tethysapp/app_store/public/js/main.js | 6 +- tethysapp/app_store/public/js/utilities.js | 2 +- tethysapp/app_store/resource_helpers.py | 578 +++++++--- tethysapp/app_store/scaffold_handler.py | 2 +- .../{conda_install.sh => mamba_install.sh} | 0 tethysapp/app_store/submission_handlers.py | 956 +++++++++------- tethysapp/app_store/tests/conftest.py | 280 +++++ tethysapp/app_store/tests/files/bad_setup.py | 34 + .../app_store/tests/files/basic_install.yml | 13 + .../app_store/tests/files/basic_meta.yaml | 31 + .../app_store/tests/files/complex_install.yml | 16 + .../app_store/tests/files/complex_meta.yaml | 32 + .../app_store/tests/files/install_pip.sh | 2 + .../app_store/tests/files/recipe_meta.yaml | 28 + tethysapp/app_store/tests/files/setup.py | 33 + .../integrated_tests/test_controllers.py | 105 ++ tethysapp/app_store/tests/pytest.ini | 5 - tethysapp/app_store/tests/test_settings.py | 21 - tethysapp/app_store/tests/test_submission.py | 101 -- tethysapp/app_store/tests/tests.py | 168 --- .../tests/unit_tests/test_begin_install.py | 346 ++++++ .../tests/unit_tests/test_helpers.py | 171 +++ .../unit_tests/test_installation_handlers.py | 357 ++++++ .../tests/unit_tests/test_notifications.py | 116 ++ .../tests/unit_tests/test_resource_helpers.py | 1006 +++++++++++++++++ .../unit_tests/test_submission_handlers.py | 538 +++++++++ .../unit_tests/test_uninstall_handlers.py | 193 ++++ .../tests/unit_tests/test_update_handlers.py | 114 ++ .../tests/unit_tests/test_utilities.py | 12 + tethysapp/app_store/uninstall_handlers.py | 23 +- tethysapp/app_store/update_handlers.py | 47 +- tethysapp/app_store/utilities.py | 12 - 44 files changed, 4748 insertions(+), 1244 deletions(-) rename tethysapp/app_store/tests/.coveragerc => .coveragerc (76%) create mode 100644 pytest.ini create mode 100644 tethysapp/app_store/application_files/__init__.py rename tethysapp/app_store/scripts/{conda_install.sh => mamba_install.sh} (100%) create mode 100644 tethysapp/app_store/tests/conftest.py create mode 100644 tethysapp/app_store/tests/files/bad_setup.py create mode 100644 tethysapp/app_store/tests/files/basic_install.yml create mode 100644 tethysapp/app_store/tests/files/basic_meta.yaml create mode 100644 tethysapp/app_store/tests/files/complex_install.yml create mode 100644 tethysapp/app_store/tests/files/complex_meta.yaml create mode 100644 tethysapp/app_store/tests/files/install_pip.sh create mode 100644 tethysapp/app_store/tests/files/recipe_meta.yaml create mode 100644 tethysapp/app_store/tests/files/setup.py create mode 100644 tethysapp/app_store/tests/integrated_tests/test_controllers.py delete mode 100644 tethysapp/app_store/tests/pytest.ini delete mode 100644 tethysapp/app_store/tests/test_settings.py delete mode 100644 tethysapp/app_store/tests/test_submission.py delete mode 100755 tethysapp/app_store/tests/tests.py create mode 100644 tethysapp/app_store/tests/unit_tests/test_begin_install.py create mode 100644 tethysapp/app_store/tests/unit_tests/test_helpers.py create mode 100644 tethysapp/app_store/tests/unit_tests/test_installation_handlers.py create mode 100644 tethysapp/app_store/tests/unit_tests/test_notifications.py create mode 100644 tethysapp/app_store/tests/unit_tests/test_resource_helpers.py create mode 100644 tethysapp/app_store/tests/unit_tests/test_submission_handlers.py create mode 100644 tethysapp/app_store/tests/unit_tests/test_uninstall_handlers.py create mode 100644 tethysapp/app_store/tests/unit_tests/test_update_handlers.py create mode 100644 tethysapp/app_store/tests/unit_tests/test_utilities.py diff --git a/.coverage b/.coverage index 3ba5259e9203507f3064c6e389d03cf6bc623e38..64577b693ed5707b38758b9acac0e86b2e5c9391 100644 GIT binary patch delta 1622 zcmb`H&ui0Q7{}ivZ<1Dd^TZ7VsYpa+UTik1$7$WM!-GA{gO;qiN(=o(lOBqPG#>P- zcv9FL{RdR4J2=6kf*|xDdKrQTf7KlX8OmVud6Nw@SHX76;d!6ueUi_U_sRE3b6#%F zOSfWjln}yh8)wjo*IJKe?wV2}o@kBg`x$0bHcOxl)4;R`+FAC6&9?XHH<_DKDe(@N z`AR8gIyoobus4=$wR$4hu+X(^r&8Oz17q{gUOrc>=4y7l+L%(1HP#o)wYS3T`sGr= zwhNWAmCu%EiUGJNBRJC+_%E)LvmM*4mkW*+xZGNH-R)KG62)@kyISr_rF@l_#4eqe zkUp|OcL!>DaD*^Z3^Dsbx}i_YJ7ie}-Xja&b5cNNY$sg^!S?tPoFnj<&9SeL1x-r@^u&)w9zoT1+M?v@J) zg{G-D(u?lm9RADSXk_KyHDd03F9H@b@!AaqNj%X_{qX z%bUGzW|o!<_`a1^mRP>MWr}E)TB&HH?A-rz@121e#5%zH`TYLx+ntZ{oO|wlzUQ3p zdCq#zId|Tic!O21PBxj-byjs4az_FIidCx-LUQ=;1^*pCF*t;d3y=aQ_x+r56twcR zl3IWiqK$}JuAD^;SL{>{kbk8}m#&xZ66>UiaD)Hx4Dbx_4Dby6wHdI@QHZ^Ld;|{_ zSak_$daGHNs5f(el@sPp8aIEEdj7ca@sre?pL&o?4Zq>T)#KD=Q;s@AZ&oK8()4PB zG0Bjqvl@&kYHO5}?t*oCHcdKn zBK(uPqy{`%lMsnHnXo7g=TDjDkP3gf_lRC{{WRq#M z7_0`9QLWF_CuUjoN$nS8o4-uE#b=KlTdhAAbkM#7Y$v^2FTnJqGmwwlWv zux95n6YL;EXJ!hT+GKwaV)}n-ZA@e#4(fz7Q-Z6lu@+;|Lji%h!0}jemNAhjDi$UU ziKcA5S(l;@vft&Zh5u_q0@DeRio7QU+|P7cOh6mb;*{BZL}G7WU%^9C){N}AWfx)> z6LREaJJqP;R=Qqi$uc`Cl0T{>9U85*q@#@3??7^fi$&h!eD9YW`*Ye5yF+3V3&q}E zUIH6eBA9xK`S0eE2*#(i2()Pk@S~rhv!<%=jXUEF5g+I#@{aPlU&NUxXhWF4qP0s1 zHp=UKSoYJQ(r5mYE>UKDTBAH&pEo>Pmj<;-hR$GyA`5je*8-@S2a|O0ur9%rWo7M` zxn#|gX?t4Z%K@}rJGh6Z2D6Vt@t$q6SPamtGR>AH(&4Kfn5wfdwUA|?I>BU0)9Z{} z+W;DI76{W|QL}Nlw^h=#Vp#GRjFjwwY@8g83scR{heGElU*Jej*BO)S?G@7ubEr`F zGli20hpR+DQ9Td}r3H#;8%zhN{Vr2uIq-urfD;BaU6;facP3Mu5OY%X_rP(j(L|k9 zZ#AUr)yyMoWLU$>(OJ}qWlEkGdl9y(ND%d!_n zuAb3apixiPIihX9BwUQ7&COPOnFSRt^md%>gyGJK4(5^zfvMcM98BO?a6D!2>&ONx zA(jHF0l`21$1}h)z%#%zz%#%zz%#%zz%#%zz%#%zz%#%z@W00ZDIf%5X8cc3mk?D2 zfA}BI0M7u=0M7u=0M7u=0M7u=0M7u=0M7u=0MEdGCj$zC>`ghAc#NO~q_4Mq5kPoQ zXi&HeQbkbL5Os~Z^WT|3KGr+~JOexfJOexfJOexfJOexfJOexfJOexfJOd5}6l8CK zV-|prk-k#oQ0M7u=0M7u=0M7u=0M7u=0M7u=0M7u= zz<)Ud-EtrS?lA=XC8%^JF<#UW&cR&pKb`@e0iFS#0iFS#0iFS#0iFS#0iFS#0iFS# zfzC5PO%vL@1;MGNbbWA&!4wRq4yNW=lJwbjmtAO646D;k7I*}N)|9~>RT>Ou!6xao zmQ=mo3P&9#KrB=mp-q4RmzZVI7?)rjC9|Kl0p8Q>Y<8Q>Y<8Q>Y<8Q>Y< z8Q>Y<8Q>Y<8TcP$fFzYtX8%9){Xh71=f45?KbXmUl6VGq26zT|26zT|26zT|26zT| z26zT|26zULM<(6Zhw`O7DJ3OR(W+Y2ZPhQT%c}FLZ&asLCsfB& zAFAF}y`g$V^@3`fYP0Gwl})u;Wl^Q6^s42mg{s-AIMsMnlxn1ExN3kZK-E*#O(j#2 z%0}fK# zqXd<3j$LRM=Xe3Vz&UoJot)!&^gQQy4n4;?cAy=c<5~19=h%+6bB=9j8|QciJ;OP+ zqOF``3);dto<>h|j;GL5oa0IKB>?&QS>P1cmzGdIUYt2bB?uWE$4U$J;XU|$i_K}QL#uUkP3>5SW#HW zih=@GtXac~{CrldUd@WUJXYl9vLYvk71`OW$jV}c)yfKsg%xHqD>5@#k&(d)lZh2Z zBP-I=S&^2;idCyvv2rCV3g%!!ktkCOOk(9)W#6(skB(OrKW5tRUtk7y% zv3xlzmMvq&(xt3evV;|j7qjBQ2U)Rb5i1rhWW|C7ta#u7R?MHzih1)`F?TL2=FDNm z?AffCHH#H9XR>0(3|7R)vts&mR!p15inusdOr6S#DN|T6c`_>|O=88wiL96~ffeJ& zvtryhR>a1#LZe|tObjc=j%7u3G%KQ_SP>b?iiikSj2Xj<@Nia)9?gnTqgXL=BrC$g zSP>e^ijWXij2OX+;9yn+1+ilIa8?W(#)_dsSutb?D+Ui{#h^i~7&wp>0|v07e}7i= z>&J?|eOb|`4=Vx#S)o?5A|QYj{{F1!-J2DDeys5IWks)Etnl$+MbDnB@b+d!j~=Y> z@?wRjCo8&lXN89cE4p=Kg}XZ|D2f#-6)TiVRwxv#kjq&ild(c7Wrak-3bB|KA`vUx z+*lzLvVtU8K@dWrR4Qfm|0DOmZbi+foM(V%fM^$SBhLWO0M7u=0M7u=0M7u=0M7u=0M7u=z+agG{{8=7IrsUr^9=9| z@C@(_@C@(_@C@(_@C@(_@C@(_{8bo`34|yTJ&CB#sAs4Ls6MJIs@GMisv*kDu+QG8 z3{w28cu}!T(MSH1{AKw{d5Ek|c0jgL<|REQHA{O*ev-U~J|5}wjMn8>s1PySGL;LPnp^e0i znj;jeF2xee{DELHPC|o4ivn%}Gx~ixq;JkPB(}z!(GToM+7natNm*%nvvUO0 z9f?4qE;-qhmgKA-5KR_fatB3Sm!fx8_75WsxF(ZjPGb^`9>uqlG=qzs^v2KTG=p)K zb42~dl0}%QZ1YW9CeHowqs$a9C-d^bXE(`|xXLwadyXNqFe9Cm;2f$qz9QJQHdUXN z!L?jH!buBeh>T)O;rE&#v`H})ncCjb_Bcn{a};U9gm9|0Jn&-7R$5tBLz=;A&@)*o z#LE~~55941H6&`24MuwdziK3zjuBN>Io*_`XAOVELZK}LTZ=7f>3X9z*kMhM4>>dZ zwS0LQQnGS}(AK*vxUE$xpc&>1Z9}_)mSJXUh_p0)qLt$fKvTO4Y6r|QnO9jdbWnu? zXv%b<&8I6db+V%5@j{!p6If0V`VJUks?MAZO`zTk$q?a{Ro0gDxruu2%iW;p)&a#N zeWJ;%gJzQ%KqAs5;=&{TCa7gBY`BvW!Ss46B4ZB&OCOwKYU zG9R4Rf+d-CiC~8Sqr_{HTjS1{qM4~#813mDF=Cef1k);gs+LIxhu|JX=65I+5ECtf z1ffehs_T^J;!x6n8!nT02yS3U%!G-iy$=J;mdObEi5(aO>yn^j4?2@albN{DbV?m5 zLl;mO+K$LLk|Mq%ULy2FU%^EsMfFmZ!#RCH$_hoK{E~cwe3JMnS+n?i$xzv0S&rhQ zVm;a}ixK@QlPY&8lf;4Kqter&_XRuM3IvM;Dz|XqHQ{r@Swb;+RFn=Q|FhCZq!E%E zZl6f@iV7sT;zgoye>k+mv{`6uGqS9DZYm%+NoQH5g+HkYCY?D6NXd5l1daH^aN69& z9HOTfR;QaR!K~1lxGFxFGgxL)0zzJnS@pG##98g!j$yQ0_2*%>lnx@f?oTk38|Xo; zG$lY%Mq?J;HV{rm7J}FQH~=76Xh4{;3sZAgF#DNyORyKg!Fa{N5u~FOO~I?69coT# z9E6!h7!zkh=bfot0R@uD*lG9CqzPkO205H;NNJh(La%hiT!Cea)~ZWLV=Blcn5}HP z=Bl@t4fYm)#WbPK8>1R(7bl6Cmdh|O!zzf<9L#?5{h)Mg+O+oasGTXa1#~uI(33J` zr87x%!09k2&)SV#LYCFa*7Nha!n%{~%@!|0o^5I}G*xKp-q{3aq?xRaq%Q4BmZn1= zl$nD_Fy%s)F6v5po!DQDS0&iVca-foJS}@6b7)P)tw`9S*)n>ZB*{e)ZLx@t+ENKRqpa87-T`6KrSxw7xl-6Z+Uy-Zj#9i^G=M=GDG*|G*gb&2oqUgThWVzwy=(u+WUYqb%Gsu>@1-z z@IM6%5sku~S$FC3JJFZbL7QqZ1gFlXwR5$voh_%xgi*;z;i$SgRJ544Vn9bOc52v$ z2V!vVyiZQ&ngG|lZ=neyHFB=dHn6LCo{`sLqyQO-+0FYyqX$T?B#yzXN&gUygNnec z;QtWHKK~F7mhhaz8vg&1%=-Tr^ak_(|4HgWDo}Mz^^VG_8l$XMzM)*D90BkD_bSYa z5%N3oGWkRD7`aIHrEIHgp{%#`qV#XlmC_NCYRP_ySrRJ#O}tN>BaRT&iH?glh!%)? zyIphJ>z3sf=|&1q2{#DigoxZrt|A8!=ZQ_k1i>xAZozc)8w8H`n-n8NHVWglslA49 zuC1Ml=EAW66V_oE*HY0^%{tYH^P&NG=zj{>Ud_&o0?@$!5Ga(lIgx-ju#0%EMK~@3 zAicW?$rZZzF#y%Oi%>1K!L)Ed^XVd*y$FvR4Orq1V>wTxj2{IM(hftobb4lu1dP5N z#&9)W=PG?l7yx$fETB`d8W-9rf}GXB403h)TLcL~jy-q;z}W;slRA69+A z7);ycFcH4ks%;lcMfJi;Y=+5f)uja|=`-}OYQmVvt-2WFgPqtm3u&TyVg<`2_TGgw z;of-F4x{X{kS4qbwj#40rgb6BXfG03@JxbHY*|P%-V;m_r%8vXLcP=OLYk=V*i_y7 z=3t};UPTMxJ5`qv-LUM5tuC4VPjKry{aANMF-9BRs@wKOZWM);-c|tT)d*u$SQgo; z-0}u}k`f}0F?wiKZTB^JgrbAgwZ4dtk>k}{tub(2Juq1Y`rZEAN?Y~YUkWin3OMr4 z;5eC3q=ZEI9l?NDIbO#{iaV(56z*sdR!LzeTI0B5@3aASL7NtP-Djb5$JD-`jASad{tkoHmM#^`73`> z?pGElrzk1KSBhugt9X9$i}GjXGvrd)N3tB*Q0XPC%OZth=iZ_eL!MOi* zkyaGoR^j%X+e~4d@Dsg%}|C4fY9S3PzxxAxQk+_#CiUoNAWq zy+VKV-pQ~@*?@sj{vEwbc6Y(OOA0hu0E5&0y5BVAcq?vNxTOs(10<(u$|ws~73xR| zGe^<-LOs+>qE(nB*RjPylZlsV)|ws5)txs#Ow9lQFU(L~0NQSTm|y}V+}g_p$!YV$ zL?eJGI};s9Vaw)+3F)1Uic^7)Ndpj!afl1GPPF=@M$EiM7{G3Ex_f91-&Ak|t@^?K%w$P67o z1^l_KwhMLd5{}6$01l%!)h?XVl9zZb;9!_~yEw4q#dV#^)a8JNQBSsu=CT4UZW$nT zmU-8g%2SpC6viB3yODBUOEPsy7g7hUSnGnN$%_H0v$^NAlw|6IU5Js(3ZuA1fYe#O zUDwG>UkFgXowRhTDJ3NhN==TznwSd3X6k~jq>x!l?pX3L?Eyf-sNC8$gEq@2rp@mH zR-5G$6XyXIMq}P?yjm7xjh_oB7=6t4QMeVGadQBpvo_$m{B81VfaD8Of?2_-8nGgqz%HKuVDeUkWSYsKs0dv{%5E+jZ>6RIJ=W$o`bgX$oj~S>phx z!V7(wqpc70Mj1mkG_zOG) z64VGuM5;`oKu)QzLePp_jtXluPGpOxS-awT!3Jhywrsq<&^4f}Jh z*jg-)b-giMch3V{05~GX+A{{Z`m;6#_*pvQItmt^I!_& z`Ol3{$6MZt+_*-R41TI)LZoM;o=_x#?3F~s$JUq35XchYsz)O6V|cmPJptT(5{lVH zM|ZwDn<`wYa}!8yV|4JEvP(bp9a>D6UF!J|{1dQ9YQK7c-wFsXa>X{zr&wA4XkzLZ z1yYZM&?I7CE!-WWC6wR>ZK9(NYOwL%sng#^5+=6=V1qj%H3n=bVvx-Vc57B~=M z>xxd(0-pz%=N}-JI`~HDCwg~t!GsADJm`Rw) z>~3D$>^YZFoJ%Bm&TUE`*^BvwjwgKPFd@w$;vFGvqv^HHbhzI~0_kkH1*6XGr>nSlL(vva^ei)`a)6F5IYIS4({8(E$Uxyy)t;)t)8b| zpBT?%ktCitUm7Syw=SeNy&n4Mj;vSrPrPHAhW_)8fN9jH{|XY%FW|_V9-d1ts(kd4=G8tEl;_Ci?S$>b^auL2`no5q>`kwuu)N z)`f^xD2_cFLW$y-~>@3%4a(7+56tUYPET;E2eKX?xl`vCud3EB_n(3uYt8X5;QIRvc`aq0EQ*-z5 zmo;(!YN)*%Qg?Qjrdq6=1n7Q~h$K_#vh4w6&_uXgK9Ojg@WW%Or>?o57;&^>_ z+!f>DUa#@HhTlyHF6}8A2Un_blU1=G{bPyJ4Za^NDE~$ftbywl8se_$_*+F+j);sG z_sZV%+A=Y4-YQ+_TX&{vv`;TMR_Ya;G{^JWOOrCI#G)7mC`KS33$o`}V)W&kr-UBS za4{g7IJBYoP!nCzL{}O~uPE>v7DX5bCR+0=j^B8{a#CecM3F}%^JpZIe7^R>{F(LA6&TSOELmw*hu30~EbZLNWG(6H{G!c3r-FL_+a4#4|T$p}bhHlp7B0ULzKTfm0PGkcKi|p+xd~zVZ-o_6#Ae zkL_Mf*+ND@=0B7$f@lnA9#tGjA6b&tBlld>n#!7j%A(DITa1$%M!)l25|p#u8QDL) z_!(!87aL5U+-SJ@M1JbIWX5OF`O<%m-Ab+y9a9whz&1pL-Wj`(Iv0x2it3N+$G%yS z&A6Za&i??^R3r9%`q2TV%DwgROHYpaG31KB zmwyQleHR^46*KH|(ZQQH&sH=Y)&4kYtT-4F;2uo;C=OyAL4-s!92~0Q0?{x=4kN@v znHxjli6M+*2q6&Ccn~WB=yf zwu=4@caIiS*8E=g<5+sTVAWgihpu?*{RLkfDeSpC1vSo^nRzGRKoeXCe4ZaOQ&ZWicRz?lbicwgwWUW7oTslBrIjSnm1i6N{mqS+8*+oY^@ZF0 z`aYL^^k_lfiIv|UxFP7(2fRl2A(n1Gf6!ERyri=6xALl@!sfj{-KLfWGRcSxG?tus zci)oilg6{#Hr^O`uB@bd`;DrrCxXw-y4!Hzy4vtVohJijKOwA%FE{-(mdU(^SL2O=c3 zls){y>{kyi{jDja>Gr#(#)8Tl4Kamad$mP}uKxS;=H`sUWjpJB|Hj83)JOUgugxmE zkyqJJmp3{7&`QHw@zeKSG^hr%X(Qk(ftxG|*S!p!L@&6)v6UblJmWD4bwoz zf7rZ-{<(2%$>*2io2!p#18RScq2;f?o_XqG-p+{XHCO7EG>@g}`6r6Bzg5*~zd0Up zb?2GtgZ*av3>2&)7Db*4{dDR}M+el83X~egS3cY*Dd=B+_U`^KbI$xq=hx8RR5m?R zTYmeDg?@GEX6mC&o11D+y2W`j3HIG-!xp!GdT(E$wlv`?2ng zFC48S(VL$rnrrB)MiN~sDHGn38vTn8_r0}QdaMK$)HiOSFMIX?j)(LpRSN2+e|qHi zlULVN{#q&U@&eysUQji}7B<&i4Lk4xy0~M9Pn0Kk=X%BnhYeqHU`P6nN(eTwtYlw$ z-kOrz2O9zoYnrNZYrfr6H9Y6A;#iTHZr*=>$*X%!dzyc)^AdLlW%ur1hf*HkPy5BQFJ>=PUwLj&&(*Baf-|M+Mt}6aDG2%!rz-YE^=E3goFRgpAg^RigTEK6 z=1Ww7>Y+M5S*%hr_)1ZI-uAEdXOiMR3b>xEAddK`m&T*_-a@~NP+@&ZxZsQBNyWM5 zgda8>7kkU$nGtdVwm~PPFg5IB>CeO8l@U@I-0+lr{^c_QkrbRNslb*dfe5c%BtgxO zR1Ljc718^{hJT}Cy5XQ3dMES1n|GsofGR@8hsurL^*WdK=Du^+=X?6hu`YbG`eZ{G zi5j1H=`g`Qf*czXneWRLgCFxhmgzFhhX34V#r5C*bMBryetFORQhNT2tzW-+_Jglr zGe%wae3 z%66P@NPHLkPLDl{P-;#&bkyOw_ZBa(T>1N=-{_&i^vRU3zBGlShMp^mbz(6j+g(i5 zhKj(EEF#XItkn*CyX@lX#pe&~`7V3?sMwFTdVTQ8fGIIQ53C+|WcQ#M12olPB@u8X zmbrmW4>#ggXjs^CA(PM1LZUH88m~U1sgv$5)Epc-XRjeYC6DM?4BdcNc0`_w5L_$k zUwmOpAJlYjTLtr zKJyimOhie-eJa5OmFbsboC{|tBvDWst zA(bdr^5gk`>##^ztzo3W* zjp4sb-EAtkp+=jMryu;s1yL_l_k!4_1TPiZNXTspaP>eNh@DICkW!l*t~}5a*)dYN zO~&3?UwHibDe?mWCcCnrmWW zVepD#PpzTpB3f8X)WJac+8!Gnk+EwXF{l{+31hDuS+lFEX=^pDX$~$YZoP3-c#N*? zxd*B0%3~IwW{=_&8GWTZySi~tEM2X+9`@_NTamv+{(QIo$a0PBSc%2be6l3sM)q;D zjj)jr^g{$%yIfg}B*pL#Gjk>zcyXZSp*3uM=^v~1unTs^dy#3jozGO*E16|Y$rgVzw5)pyu>XIfw zaZ^g87!^03t0u)Z0=QA3hhShU0G9`{5hFhEu_2bRVq&pR!Bc1Y(NDhelGm|(foCz{ zE-Hf2u&PK9RtPe+@a{?a%iHwQqWYir-mT6kqsKNjR5vv*YA7!$`mq{jqiY_$P*Uk} z^^3HUFSK`QQ30b|K#0~bat$#xpB+)>6OENO*6b_VTVFK!YQuu#TlO_BoqTH1@kz^` z-CG-KG|f$Z_plFeV1nuB>`>#dZ>rxoeE37JXK&ur_yr`RYhgc0MXMRa)r2Guq(>g% zt{9pNu0^@RXIF$uPbGOAkosQxSKgH!?C+{2bbCrr`Oawai{iLrF!QF!froqL5d8+6 zJ1}FjDjTl+vrmOpS5#DdMRylv!L=$&;B5st!b zqUo|9$9}Md<0i9o4?3|}Yxf*FusNRloC~|x$Z6^61n2}~*-Z;lf({OzG$vVsO-6m1 zDTO^$R|EZKtleJipscd$AVssGyNzu9|9ma?y$Gp!NoJD;_AtVsm~awAirJLK9u_(c zdhJ;2u>TM;LC?l-5_J2qmJ$9D7^HI+3=*&wOSXe!(Zc6JtWbdAq;C73Y(N|jqYH$y zAp2fK-Rdl}UdJ4QI2FbvSoX%CBg># ztuao=u?LlINP=VDbD*w+@W$DMKoQ2yH^=!n+^z*?R}bd{7~^duU|?+LaA0uf1O&T) z#KPDVYyVsu5GNyzw86G)jD7MBgqG#v4Dl0T6pXdA#i7j+VQ*JCPk_-B0SZRd;fmsv zh$)2tfU%<9-jCs8VZU(MwKb^#fH2mqw+G}DpO`fOgRw8g1;$B#T0ZFGZ)9@mW7x^% zpn=>;t2>k=7c8eJjn4xZ40E+7solMrVYyhHKl^SH`#F;``E@i3ZA0>xnmICwpi9*WfWeM{v_NZT&W5Z4wwF&yiKZ! z3&cxOqja={Cf^`d5MF}QfCTtbynxsu~lkg7uYiDbR%6~{mb_FLWe2j@)o z$2buE-uC8xohI}woL=nHRY-fM32lditf?-7f$gAebKs2T&f&D*X+qDyXHKM@LTv3c zp{;O=rdQ`c+H{)G7WjAo**Pe#(}bRmBJ(f+Tv}>-rwKiU8M=$IbEgSC8A;}JG<2@h zgf_zgxd9lu_Zo4#ohGyi^Q(dP;yKkbXk#bnw0D}&hG;S$BS9`~Iyz10iB2dxI!$Oj z91bk#XwXibCiHlR^3l1|gdW5DID%N* z92o6&fY4evW819*5wU|f8{9*9`R<5@O9u$q;ACxO2f}Z^1B8k(&z5#Xf`f5NMG@w? z&@Md=`-Mvns1Wn0@b-Y5;!}Y6g&`LhC;c^;ha21VU3x8TdqDXeOOgwgQGF3L_}&%~qGuI_Xu6)}p;#OT!86#60wPK&6 zK`)LX(=qhy2Jg_?-Jz}&*OYRvCr}AqfcBE@2yoXJ(t;7qmgKQPGQ+DE;QI%G9no}Y ro9=x*fp$V!?A~cWr@96`-|5c0)9VTJ9On3@g__ov9Ng;(wB!E)x)ztN diff --git a/tethysapp/app_store/tests/.coveragerc b/.coveragerc similarity index 76% rename from tethysapp/app_store/tests/.coveragerc rename to .coveragerc index adbd054..4ab39a0 100644 --- a/tethysapp/app_store/tests/.coveragerc +++ b/.coveragerc @@ -6,4 +6,5 @@ omit = *tests/*, *urls.py, *wsgi.py, - manage.py \ No newline at end of file + manage.py, + *workspaces/* \ No newline at end of file diff --git a/.flake8 b/.flake8 index 6deafc2..8c76cbb 100644 --- a/.flake8 +++ b/.flake8 @@ -1,2 +1,4 @@ [flake8] max-line-length = 120 +exclude = tethysapp/app_store/workspaces/ + diff --git a/.gitignore b/.gitignore index 1bcb323..95ecbd1 100755 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ tethysapp/app_store/workspaces/app_workspace/gitsubmission/ tethysapp/app_store/workspaces/app_workspace/install_status/ tethysapp/app_store/workspaces/app_workspace/logs/ tethysapp/app_store/workspaces/app_workspace/develop/ +tethysapp/app_store/model.py docs/_build .vscode/ diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..4e843d1 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +DJANGO_SETTINGS_MODULE=tethys_portal.settings +python_files = test_*.py *_tests.py +addopts = --ignore-glob=*workspaces/* --cov --cov-report term-missing --disable-warnings \ No newline at end of file diff --git a/tethysapp/app_store/application_files/__init__.py b/tethysapp/app_store/application_files/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tethysapp/app_store/begin_install.py b/tethysapp/app_store/begin_install.py index 72105ea..dbc75cf 100755 --- a/tethysapp/app_store/begin_install.py +++ b/tethysapp/app_store/begin_install.py @@ -2,80 +2,79 @@ import time import importlib import subprocess +import tethysapp +import site from django.core.cache import cache from subprocess import call -from .helpers import check_all_present, get_app_instance_from_path, logger, send_notification -from .resource_helpers import get_resource +from .helpers import check_all_present, logger, send_notification +from .resource_helpers import get_resource, get_app_instance_from_path def handle_property_not_present(prop): + """Handles any issues if certain properties/metadata are not present + + Args: + prop (dict): application metadata + """ # TODO: Generate an error message that metadata is incorrect for this application pass -def process_post_install_scripts(path): - # Check if scripts directory exists - scripts_dir = os.path.join(path, 'scripts') +def process_post_install_scripts(scripts_dir): + """Process any post installation scripts from the installed application + + Args: + path (str): Path to the application base directory + """ if os.path.exists(scripts_dir): - logger.info("TODO: Process scripts dir.") # Currently only processing the pip install script, but need to add ability to process post scripts as well + pass def detect_app_dependencies(app_name, channel_layer, notification_method=send_notification): - """ - Method goes through the app.py and determines the following: - 1.) Any services required - 2.) Thredds? - 3.) Geoserver Requirement? - 4.) Custom Settings required for installation? - """ + """Check the application for pip (via a pip_install.sh) and custom setting dependencies - # Get Conda Packages location - # Tried using conda_prefix from env as well as conda_info but both of them are not reliable - # Best method is to import the module and try and get the location from that path - # @TODO : Ensure that this works through multiple runs - import tethysapp - # store_pkg = importlib.import_module(app_channel) + Args: + app_name (str): Name of the application being installed + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + notification_method (Object, optional): Method of how to send notifications. Defaults to send_notification + which is a WebSocket. + """ + logger.info("Running a DB sync") call(['tethys', 'db', 'sync']) cache.clear() - # clear_url_caches() + # After install we need to update the sys.path variable so we can see the new apps that are installed. # We need to do a reload here of the sys.path and then reload the tethysapp # https://stackoverflow.com/questions/25384922/how-to-refresh-sys-path - import site importlib.reload(site) importlib.reload(tethysapp) - # importlib.reload(store_pkg) - # paths = list() - # paths = list(filter(lambda x: app_name in x, store_pkg.__path__)) - paths = list(filter(lambda x: app_name in x, tethysapp.__path__)) + installed_app_paths = [path for path in tethysapp.__path__ if app_name in path] - if len(paths) < 1: + if len(installed_app_paths) < 1: logger.error("Can't find the installed app location.") return - # Check for any pre install script to install pip dependencies - - app_folders = next(os.walk(paths[0]))[1] - app_scripts_path = os.path.join(paths[0], app_folders[0], 'scripts') + installed_app_path = installed_app_paths[0] + app_folders = next(os.walk(installed_app_path))[1] + app_scripts_path = os.path.join(installed_app_path, app_folders[0], 'scripts') pip_install_script_path = os.path.join(app_scripts_path, 'install_pip.sh') if os.path.exists(pip_install_script_path): logger.info("PIP dependencies found. Running Pip install script") notification_method("Running PIP install....", channel_layer) - p = subprocess.Popen(['sh', pip_install_script_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + process = subprocess.Popen(['sh', pip_install_script_path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) while True: - output = p.stdout.readline() + output = process.stdout.readline() if output == '': break if output: - # Checkpoints for the output str_output = str(output.strip()) logger.info(str_output) if (check_all_present(str_output, ['PIP Install Complete'])): @@ -84,8 +83,9 @@ def detect_app_dependencies(app_name, channel_layer, notification_method=send_no notification_method("PIP install completed", channel_layer) # @TODO: Add support for post installation scripts as well. + process_post_install_scripts(app_scripts_path) - app_instance = get_app_instance_from_path(paths) + app_instance = get_app_instance_from_path(installed_app_paths) custom_settings_json = [] custom_settings = app_instance.custom_settings() @@ -102,15 +102,23 @@ def detect_app_dependencies(app_name, channel_layer, notification_method=send_no "data": custom_settings_json, "returnMethod": "set_custom_settings", "jsHelperFunction": "processCustomSettings", - "app_py_path": str(paths[0]) + "app_py_path": str(installed_app_path) } notification_method(get_data_json, channel_layer) return -def conda_install(app_metadata, app_channel, app_label, app_version, channel_layer): +def mamba_install(app_metadata, app_channel, app_label, app_version, channel_layer): + """Run a conda install with a application using the anaconda package + Args: + app_metadata (dict): Dictionary representing an app and its conda metadata + app_channel (str): Conda channel to use for the app install + app_label (str): Conda label to use for the app install + app_version (str): App version to use for app install + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ start_time = time.time() send_notification("Mamba install may take a couple minutes to complete depending on how complicated the " "environment is. Please wait....", channel_layer) @@ -121,7 +129,7 @@ def conda_install(app_metadata, app_channel, app_label, app_version, channel_lay # Running the conda install as a subprocess to get more visibility into the running process dir_path = os.path.dirname(os.path.realpath(__file__)) - script_path = os.path.join(dir_path, "scripts", "conda_install.sh") + script_path = os.path.join(dir_path, "scripts", "mamba_install.sh") app_name = app_metadata['name'] + "=" + app_version @@ -133,9 +141,8 @@ def conda_install(app_metadata, app_channel, app_label, app_version, channel_lay install_command = [script_path, app_name, label_channel] # Running this sub process, in case the library isn't installed, triggers a restart. - p = subprocess.Popen(install_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - + success = True while True: output = p.stdout.readline() if output == '': @@ -153,31 +160,46 @@ def conda_install(app_metadata, app_channel, app_label, app_version, channel_lay if (check_all_present(str_output, ['All requested packages already installed.'])): send_notification("Application package is already installed in this conda environment.", channel_layer) - if (check_all_present(str_output, ['Mamba Install Complete'])): - break + if (check_all_present(str_output, ['libmamba Could not solve for environment specs', 'critical'])): + success = False + send_notification("Failed to resolve environment specs when installing.", + channel_layer) if (check_all_present(str_output, ['Found conflicts!'])): - send_notification("Mamba install found conflicts." - "Please try running the following command in your terminal's" + success = False + send_notification("Mamba install found conflicts. " + "Please try running the following command in your terminal's " "conda environment to attempt a manual installation : " - "mamba install -c " + label_channel + " " + app_name, + f"mamba install -c {label_channel} {app_name}", channel_layer) + if (check_all_present(str_output, ['Mamba Install Complete'])): + break send_notification("Mamba install completed in %.2f seconds." % (time.time() - start_time), channel_layer) + return success + def begin_install(installData, channel_layer, app_workspace): + """Using the install data, this function will retrieve a specific app resource and install the application as well + as update any app dependencies + Args: + installData (dict): User provided information about the application that should be installed + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + app_workspace (str): Path pointing to the app workspace within the app store + """ resource = get_resource(installData["name"], installData['channel'], installData['label'], app_workspace) + if not resource: + send_notification(f"Failed to get the {installData['name']} resource", channel_layer) + return - send_notification("Starting installation of app: " + resource['name'] + " from store " + installData['channel'] + - " with label " + installData['label'], channel_layer) - send_notification("Installing Version: " + installData["version"], channel_layer) + send_notification(f"Starting installation of app: {resource['name']} from store {installData['channel']} " + f"with label {installData['label']}", channel_layer) + send_notification(f"Installing Version: {installData['version']}", channel_layer) - try: - conda_install(resource, installData['channel'], installData['label'], installData["version"], channel_layer) - except Exception as e: - logger.error("Error while running conda install") - logger.error(e) + successful_install = mamba_install(resource, installData['channel'], installData['label'], installData["version"], + channel_layer) + if not successful_install: send_notification("Error while Installing Conda package. Please check logs for details", channel_layer) return diff --git a/tethysapp/app_store/controllers.py b/tethysapp/app_store/controllers.py index 48e8497..09135d4 100755 --- a/tethysapp/app_store/controllers.py +++ b/tethysapp/app_store/controllers.py @@ -5,9 +5,7 @@ from tethys_sdk.routing import controller from .resource_helpers import get_stores_reformatted - -from .app import AppStore as app -from .utilities import decrypt +from .helpers import get_conda_stores ALL_RESOURCES = [] CACHE_KEY = "warehouse_app_resources" @@ -15,18 +13,22 @@ @controller( name='home', url='app-store', - permissions_required='use_app_store', - app_workspace=True, + permissions_required='use_app_store' ) -def home(request, app_workspace): - available_stores_data_dict = app.get_custom_setting("stores_settings")['stores'] - encryption_key = app.get_custom_setting("encryption_key") - for store in available_stores_data_dict: - store['github_token'] = decrypt(store['github_token'], encryption_key) +def home(request): + """Created the context for the home page of the app store + + Args: + request (Django Request): Django request object containing information about the user and user request + + Returns: + object: Rendered html Django object + """ + available_stores = get_conda_stores() context = { - 'storesData': available_stores_data_dict, - 'show_stores': True if len(available_stores_data_dict) > 0 else False + 'storesData': available_stores, + 'show_stores': True if len(available_stores) > 0 else False } return render(request, 'app_store/home.html', context) @@ -35,17 +37,20 @@ def home(request, app_workspace): @controller( name='get_available_stores', url='app-store/get_available_stores', - permissions_required='use_app_store', - app_workspace=True, + permissions_required='use_app_store' ) -def get_available_stores(request, app_workspace): +def get_available_stores(request): + """Retrieves the available stores through an ajax request - available_stores_data_dict = app.get_custom_setting("stores_settings") - encryption_key = app.get_custom_setting("encryption_key") - for store in available_stores_data_dict['stores']: - store['github_token'] = decrypt(store['github_token'], encryption_key) + Args: + request (Django Request): Django request object containing information about the user and user request - return JsonResponse(available_stores_data_dict) + Returns: + JsonResponse: A json reponse of the available conda stores + """ + available_stores = get_conda_stores() + available_stores_dict = {"stores": available_stores} + return JsonResponse(available_stores_dict) @controller( @@ -54,12 +59,12 @@ def get_available_stores(request, app_workspace): permissions_required='use_app_store', app_workspace=True, ) -def get_resources_multiple_stores(request, app_workspace): +def get_merged_resources(request, app_workspace): stores_active = request.GET.get('active_store') object_stores_formatted_by_label_and_channel = get_stores_reformatted(app_workspace, refresh=False, - stores=stores_active) + conda_channels=stores_active) tethys_version_regex = re.search(r'([\d.]+[\d])', tethys_version).group(1) object_stores_formatted_by_label_and_channel['tethysVersion'] = tethys_version_regex diff --git a/tethysapp/app_store/git_install_handlers.py b/tethysapp/app_store/git_install_handlers.py index ff9e7ff..a486ba0 100644 --- a/tethysapp/app_store/git_install_handlers.py +++ b/tethysapp/app_store/git_install_handlers.py @@ -22,8 +22,9 @@ from subprocess import (Popen, PIPE, STDOUT) from datetime import datetime +from conda.cli.python_api import run_command as conda_run, Commands from .app import AppStore as app -from .helpers import Commands, conda_run, get_override_key, logger +from .helpers import get_override_key, logger from .installation_handlers import restart_server FNULL = open(os.devnull, 'w') diff --git a/tethysapp/app_store/helpers.py b/tethysapp/app_store/helpers.py index 4813d5d..01abf4b 100644 --- a/tethysapp/app_store/helpers.py +++ b/tethysapp/app_store/helpers.py @@ -1,22 +1,14 @@ -import pkgutil -import inspect -import sys -import importlib import logging -import json -import shutil import os -import re -from tethys_apps.base import TethysAppBase from django.conf import settings -from django.urls.base import clear_url_caches from django.core.cache import cache from asgiref.sync import async_to_sync -from conda.cli.python_api import run_command as conda_run, Commands from string import Template from subprocess import run +from .utilities import decrypt +from .app import AppStore as app logger = logging.getLogger('tethys.apps.app_store') # Ensure that this logger is putting everything out. @@ -35,6 +27,15 @@ def get_override_key(): def check_all_present(string, substrings): + """Checks to see if all substrings are contained within a string + + Args: + string (str): The string to check + substrings (list): List of strings that should be within the main string + + Returns: + bool: True if all substrings are in the string. False if any substrings are not in the string + """ result = True for substring in substrings: if substring not in string: @@ -44,114 +45,24 @@ def check_all_present(string, substrings): def run_process(args): + """Run a subprocess with the given arguments and log any errors + + Args: + args (list): List of arguemtns to use for the subprocess + """ result = run(args, capture_output=True) logger.info(result.stdout) if result.returncode != 0: logger.error(result.stderr) -def check_if_app_installed(app_name): - return_obj = {} - try: - [resp, err, code] = conda_run( - Commands.LIST, ["-f", "--json", app_name]) - if code != 0: - # In here maybe we just try re running the install - logger.error( - "ERROR: Couldn't get list of installed apps to verify if the conda install was successfull") - else: - conda_search_result = json.loads(resp) - if len(conda_search_result) > 0: - # return conda_search_result[0]["version"] - return_obj['isInstalled'] = True - return_obj['channel'] = conda_search_result[0]["channel"] - return_obj['version'] = conda_search_result[0]["version"] - return return_obj - - else: - return_obj['isInstalled'] = False - return return_obj - except RuntimeError: - err_string = str(err) - if "Path not found" in err_string and "tethysapp_warehouse" in err_string: - # Old instance of warehouse files present. Need to cleanup - err_path = err_string.split(": ")[1] - if "EGG-INFO" in err_path: - err_path = err_path.replace("EGG-INFO", '') - - if os.path.exists(err_path): - shutil.rmtree(err_path) - - logger.info("Cleaning up: " + err_path) - return check_if_app_installed(app_name) - - -def add_if_exists(a, b, keys): - if not a: - return b - for key in keys: - if key in a: - b[key] = a[key] - return b - - -def add_if_exists_keys(a, final_a, keys, channel, label): - if not a: - return final_a - for key in keys: - if key not in final_a: - final_a[key] = {} - if channel not in final_a[key]: - final_a[key][channel] = {} - if label not in final_a[key][channel] and key in a: - final_a[key][channel][label] = a[key] - - return final_a - - -def get_app_instance_from_path(paths): - app_instance = None - for _, modname, ispkg in pkgutil.iter_modules(paths): - if ispkg: - app_module = __import__('tethysapp.{}'.format( - modname) + ".app", fromlist=['']) - for name, obj in inspect.getmembers(app_module): - # Retrieve the members of the app_module and iterate through - # them to find the the class that inherits from AppBase. - try: - # issubclass() will fail if obj is not a class - if (issubclass(obj, TethysAppBase)) and (obj is not TethysAppBase): - # Assign a handle to the class - AppClass = getattr(app_module, name) - # Instantiate app - app_instance = AppClass() - app_instance.sync_with_tethys_db() - # We found the app class so we're done - break - except TypeError: - continue - return app_instance - - -def reload_urlconf(urlconf=None): - if urlconf is None: - urlconf = settings.ROOT_URLCONF - if urlconf in sys.modules: - importlib.reload(sys.modules[urlconf]) - clear_url_caches() - - -def send_notif(msg, channel_layer): - return channel_layer.group_send( - "notifications", - { - "type": "install_notifications", - "message": msg - } - ) - - def send_notification(msg, channel_layer): + """Send a message using the django channel layers. Handles the async and sync functionalities and compatibilities + + Args: + msg (str): Message to send to the django channel layer + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ async_to_sync(channel_layer.group_send)( "notifications", { "type": "install_notifications", @@ -160,17 +71,31 @@ def send_notification(msg, channel_layer): ) -# Template Generator - def apply_template(template_location, data, output_location): - filein = open(template_location) - src = Template(filein.read()) - result = src.substitute(data) + """Apply data to a template file and save it in the designated location + + Args: + template_location (str): path to the template that will be used + data (dict): Dictionary containing information on what keys to look for and what to replace it with + output_location (str): path to newly created file with the applied data + """ + with open(template_location) as filein: + src = Template(filein.read()) + result = src.substitute(data) + with open(output_location, "w") as f: f.write(result) def parse_setup_py(file_location): + """Parses a setup.py file to get the app metadata + + Args: + file_location (str): Path to the setup.py file to parse + + Returns: + dict: A dictionary of key value pairs of application metadata + """ params = {} found_setup = False with open(file_location, "r") as f: @@ -196,86 +121,79 @@ def parse_setup_py(file_location): params[parts[0].strip()] = value return params -# Get apps that might have been installed via GitHub install process - -def find_string_in_line(line): - # try singleQuotes First - matches = re.findall("'([^']*)'", line) - if len(matches) > 0: - return matches[0] - else: - # try double quotes - matches = re.findall('"([^"]*)"', line) - if len(matches) > 0: - return matches[0] +def get_github_install_metadata(app_workspace): + """Get resource metadata for all applications already installed. + + Args: + app_workspace (str): Path pointing to the app workspace within the app store + + Returns: + list: List of resources found in the installed directory + """ + cached_app = cache.get(CACHE_KEY) + if cached_app: + return cached_app + + logger.info("GitHub Apps list cache miss") + workspace_directory = app_workspace.path + workspace_apps_path = os.path.join( + workspace_directory, 'apps', 'installed') + if (not os.path.exists(workspace_apps_path)): + cache.set(CACHE_KEY, []) + return [] + + possible_apps = [f.path for f in os.scandir( + workspace_apps_path) if f.is_dir()] + github_installed_apps_list = [] + for possible_app in possible_apps: + installed_app = { + 'name': '', + 'installed': True, + 'metadata': + { + 'channel': 'tethysapp', + 'license': 'BSD 3-Clause License', + }, + 'installedVersion': '', + 'path': possible_app + } + setup_path = os.path.join(possible_app, 'setup.py') + setup_py_data = parse_setup_py(setup_path) + installed_app["name"] = setup_py_data.get('name') + installed_app["installedVersion"] = setup_py_data.get('version') + installed_app["metadata"]["description"] = setup_py_data.get('description') + installed_app["author"] = setup_py_data.get('author') + installed_app["dev_url"] = setup_py_data.get('url') + github_installed_apps_list.append(installed_app) + cache.set(CACHE_KEY, github_installed_apps_list) + return github_installed_apps_list -def get_github_install_metadata(app_workspace): - if (cache.get(CACHE_KEY) is None): - logger.info("GitHub Apps list cache miss") - workspace_directory = app_workspace.path - workspace_apps_path = os.path.join( - workspace_directory, 'apps', 'installed') - if (not os.path.exists(workspace_apps_path)): - cache.set(CACHE_KEY, []) - return [] - - possible_apps = [f.path for f in os.scandir( - workspace_apps_path) if f.is_dir()] - github_installed_apps_list = [] - for possible_app in possible_apps: - installed_app = { - 'name': '', - 'installed': True, - 'metadata': - { - 'channel': 'tethysapp', - 'license': 'BSD 3-Clause License', - }, - 'installedVersion': '', - 'path': possible_app - } - setup_path = os.path.join(possible_app, 'setup.py') - with open(setup_path, 'rt') as myfile: - for myline in myfile: - if 'app_package' in myline and 'find_resource_files' not in myline and 'release_package' not in myline: # noqa e501 - installed_app["name"] = find_string_in_line(myline) - continue - if 'version' in myline: - installed_app["installedVersion"] = find_string_in_line( - myline) - continue - if 'description' in myline: - installed_app["metadata"]["description"] = find_string_in_line( - myline) - continue - if 'author' in myline: - installed_app["author"] = find_string_in_line(myline) - continue - if 'description' in myline: - installed_app["installedVersion"] = find_string_in_line( - myline) - continue - if 'url' in myline: - installed_app["dev_url"] = find_string_in_line( - myline) - continue - github_installed_apps_list.append(installed_app) - cache.set(CACHE_KEY, github_installed_apps_list) - return github_installed_apps_list - else: - return cache.get(CACHE_KEY) +def get_conda_stores(active_only=False, conda_channels="all"): + """Get the conda stores from the custom settings and decrypt tokens as well + Args: + active_only (bool, optional): Option to only retrieve the active stores. Defaults to False. + conda_channels (str, optional): Option to only retrieve certain stores based on the conda channel name. + Defaults to "all". -def check_github_install(app_name, app_workspace): - possible_apps = get_github_install_metadata(app_workspace) - print(possible_apps) + Returns: + list: List of stores to use for retrieving resources + """ + available_stores = app.get_custom_setting("stores_settings")['stores'] + encryption_key = app.get_custom_setting("encryption_key") + if active_only: + available_stores = [store for store in available_stores if store['active']] -def get_github_installed_apps(): + if conda_channels != "all": + if isinstance(conda_channels, str): + conda_channels = conda_channels.split(",") + available_stores = [store for store in available_stores if store['conda_channel'] in conda_channels] - # print(possible_apps) + for store in available_stores: + store['github_token'] = decrypt(store['github_token'], encryption_key) - return "" + return available_stores diff --git a/tethysapp/app_store/installation_handlers.py b/tethysapp/app_store/installation_handlers.py index a8179e3..aa7b7da 100644 --- a/tethysapp/app_store/installation_handlers.py +++ b/tethysapp/app_store/installation_handlers.py @@ -1,4 +1,3 @@ -import json import os import sys import subprocess @@ -7,7 +6,6 @@ from django.core.exceptions import ObjectDoesNotExist from pathlib import Path -from conda.cli.python_api import run_command as conda_run, Commands from tethys_apps.models import CustomSetting, TethysApp from tethys_apps.utilities import (get_app_settings, link_service_to_app_setting) from tethys_cli.cli_helpers import get_manage_path @@ -16,12 +14,21 @@ from .app import AppStore as app from .begin_install import detect_app_dependencies -from .helpers import get_app_instance_from_path, logger, run_process, send_notification +from .resource_helpers import get_app_instance_from_path, check_if_app_installed +from .helpers import logger, run_process, send_notification from .model import * # noqa: F401, F403 def get_service_options(service_type): - # # List existing services + """Use the service list command line command to get available tethys services for spatial, persistent, wps, or + datasets + + Args: + service_type (str): tethys service type. Can be 'spatial', 'persistent', 'wps', or 'dataset' + + Returns: + list: List of tethys services for the specified service type + """ args = Namespace() for conf in ['spatial', 'persistent', 'wps', 'dataset']: @@ -38,11 +45,20 @@ def get_service_options(service_type): "name": service.name, "id": service.id }) + return existing_services def restart_server(data, channel_layer, app_workspace, run_collect_all=True): - + """Runs some tethys commands after an application is installed. Once finished, try to restart the server to get + the changes made + + Args: + data (dict): Dictionary of data with app information and restart type + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + app_workspace (str): Path pointing to the app workspace within the app store + run_collect_all (bool, optional): Detemines if collect all needs to be ran. Defaults to True. + """ # Check if Install Running file is present and delete it workspace_directory = app_workspace.path install_running_path = os.path.join(workspace_directory, 'install_status', 'installRunning') @@ -59,7 +75,7 @@ def restart_server(data, channel_layer, app_workspace, run_collect_all=True): # Run SyncStores logger.info("Running Syncstores for app: " + data["name"]) send_notification("Running Syncstores for app: " + data["name"], channel_layer) - intermediate_process = ['python', manage_path, 'syncstores', data["name"], '-f'] + intermediate_process = ['python', manage_path, 'syncstores', data["name"], '-f'] run_process(intermediate_process) if 'runserver' in sys.argv: @@ -81,7 +97,7 @@ def restart_server(data, channel_layer, app_workspace, run_collect_all=True): intermediate_process = ['python', manage_path, 'collectstatic', '--noinput'] run_process(intermediate_process) # Run collectworkspaces command - intermediate_process = ['python', manage_path, 'collectworkspaces', '--force'] + intermediate_process = ['python', manage_path, 'collectworkspaces', '--force'] run_process(intermediate_process) try: @@ -106,44 +122,42 @@ def restart_server(data, channel_layer, app_workspace, run_collect_all=True): def continueAfterInstall(installData, channel_layer): + """If install is still running, check if the app is installed and check that the correct version is installed - # Check if app is installed - [resp, err, code] = conda_run(Commands.LIST, [installData['name'], "--json"]) - # logger.info(resp, err, code) - if code != 0: - # In here maybe we just try re running the install - logger.error("ERROR: Couldn't get list of installed apps to verify if the conda install was successfull") - else: - conda_search_result = json.loads(resp) - # Check if matching version found - for package in conda_search_result: - if package["version"] == installData['version']: - send_notification("Resuming processing...", channel_layer) - # detect_app_dependencies(installData['name'], installData['version'], channel_layer) - detect_app_dependencies(installData['name'], channel_layer) - - break - else: - send_notification( - "Server error while processing this installation. Please check your logs", channel_layer) - logger.error("ERROR: ContinueAfterInstall: Correct version is not installed of this package.") + Args: + installData (dict): User provided information about the application that should be installed + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ + app_data = check_if_app_installed(installData['name']) + + if app_data['isInstalled']: + if app_data["version"] == installData['version']: + send_notification("Resuming processing...", channel_layer) + detect_app_dependencies(installData['name'], channel_layer) + else: + send_notification( + "Server error while processing this installation. Please check your logs", channel_layer) + logger.error("ERROR: ContinueAfterInstall: Correct version is not installed of this package.") def set_custom_settings(custom_settings_data, channel_layer): + """Get custom settings from the the app and set the actual value in tethys using the custom settings data + Args: + custom_settings_data (dict): Dictionary containing information about the custom settings of the app + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ current_app = get_app_instance_from_path([custom_settings_data['app_py_path']]) - if "skip" in custom_settings_data: - if (custom_settings_data["skip"]): - logger.info("Skip/NoneFound option called.") + if custom_settings_data.get("skip"): + logger.info("Skip/NoneFound option called.") - msg = "Custom Setting Configuration Skipped" - if "noneFound" in custom_settings_data: - if custom_settings_data["noneFound"]: - msg = "No Custom Settings Found to process." - send_notification(msg, channel_layer) - process_settings(current_app, custom_settings_data['app_py_path'], channel_layer) - return + msg = "Custom Setting Configuration Skipped" + if custom_settings_data.get("noneFound"): + msg = "No Custom Settings Found to process." + send_notification(msg, channel_layer) + process_settings(current_app, custom_settings_data['app_py_path'], channel_layer) + return current_app_name = current_app.name custom_settings = current_app.custom_settings() @@ -174,6 +188,14 @@ def set_custom_settings(custom_settings_data, channel_layer): def process_settings(app_instance, app_py_path, channel_layer): + """Retrieve the app settings and processes unlinked and non custom settings. Also get potential existing service + options that can be used later for linking + + Args: + app_instance (TethysAppBase Instance): Tethys app instance for the installed application + app_py_path (str): Path to the app.py file for the application + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ app_settings = get_app_settings(app_instance.package) # In the case the app isn't installed, has no settings, or it is an extension, @@ -185,7 +207,7 @@ def process_settings(app_instance, app_py_path, channel_layer): services = [] for setting in unlinked_settings: - if setting.__class__.__name__ == "CustomSetting": + if "CustomSetting" in setting.__class__.__name__: continue service_type = get_service_type_from_setting(setting) newSetting = { @@ -209,6 +231,12 @@ def process_settings(app_instance, app_py_path, channel_layer): def configure_services(services_data, channel_layer): + """Link applications to the specified services + + Args: + services_data (dict): Contains information about a service for linking and the application it is for + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ try: link_service_to_app_setting(services_data['service_type'], services_data['service_id'], @@ -228,6 +256,12 @@ def configure_services(services_data, channel_layer): def getServiceList(data, channel_layer): + """_summary_ + + Args: + data (dict): Contains the type of setting to be used to get available services + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ get_data_json = { "data": {"settingType": data['settingType'], "newOptions": get_service_options(data['settingType'])}, diff --git a/tethysapp/app_store/notifications.py b/tethysapp/app_store/notifications.py index 3f6f646..c48613b 100644 --- a/tethysapp/app_store/notifications.py +++ b/tethysapp/app_store/notifications.py @@ -4,8 +4,8 @@ from .uninstall_handlers import uninstall_app # noqa: F401 from .git_install_handlers import get_log_file # noqa: F401 from .update_handlers import update_app # noqa: F401 -from .resource_helpers import clear_cache # noqa: F401 -from .submission_handlers import process_branch, pull_git_repo_all # noqa: F401 +from .resource_helpers import clear_conda_channel_cache # noqa: F401 +from .submission_handlers import process_branch, initialize_local_repo_for_active_stores # noqa: F401 # called with threading.Thread from .begin_install import begin_install # noqa: F401 from tethys_sdk.routing import consumer @@ -24,35 +24,49 @@ ) class notificationsConsumer(AsyncWebsocketConsumer): async def connect(self): - + """Connects to the websocket consumer and adds a notifications group to the channel + """ await self.accept() await self.channel_layer.group_add("notifications", self.channel_name) logger.info(f"Added {self.channel_name} channel to notifications") - async def disconnect(self, close_code): + async def disconnect(self, _): + """Disconnects from the websocket consumer and removes a notifications group from the channel + """ await self.channel_layer.group_discard("notifications", self.channel_name) logger.info(f"Removed {self.channel_name} channel from notifications") async def install_notifications(self, event): + """Sends a notification to the notifications group channel + + Args: + event (dict): event dictionary containing the message that will be sent to the channel group + """ message = event['message'] - logger.info(f"print message {message} at {self.channel_name}") await self.send(text_data=json.dumps({'message': message, })) - logger.info(f"Got message {event} at {self.channel_name}") + logger.info(f"Sent message {message} at {self.channel_name}") async def receive(self, text_data): - logger.info(f"Received message {text_data} at {self.channel_name}") + """Receives information from the user and runs the specified functions and arguments + Args: + text_data (str): Json string of information on what function the server should run + """ + logger.info(f"Received message {text_data} at {self.channel_name}") text_data_json = json.loads(text_data) - function_name = text_data_json['type'] + function_name = text_data_json.get('type') + if not function_name: + logger.info("Can't redirect incoming message.") + return + module_name = sys.modules[__name__] args = [text_data_json['data'], self.channel_layer] - app_workspace = await sync_to_async(get_app_workspace, thread_sensitive=True)(app) - # app_workspace = get_app_workspace(app) - if "type" in text_data_json: - if text_data_json['type'] in ['begin_install', 'restart_server', 'get_log_file', 'pull_git_repo_all', - 'update_app', 'uninstall_app']: - args.append(app_workspace) - thread = threading.Thread(target=getattr(module_name, function_name), args=args) - thread.start() - else: - logger.info("Can't redirect incoming message.") + + app_workspace_functions = ['begin_install', 'restart_server', 'get_log_file', + 'initialize_local_repo_for_active_stores', 'update_app', 'uninstall_app'] + if function_name in app_workspace_functions: + app_workspace = await sync_to_async(get_app_workspace, thread_sensitive=True)(app) + args.append(app_workspace) + + thread = threading.Thread(target=getattr(module_name, function_name), args=args) + thread.start() diff --git a/tethysapp/app_store/public/js/main.js b/tethysapp/app_store/public/js/main.js index c4a703b..ad814ab 100755 --- a/tethysapp/app_store/public/js/main.js +++ b/tethysapp/app_store/public/js/main.js @@ -357,8 +357,8 @@ const startInstall = (appName,channel_app,label_app,current_version) => { JSON.stringify({ data: { name: appName, - channel:channel_app, - label:label_app, + channel: channel_app, + label: label_app, version: current_version }, type: `begin_install` @@ -420,7 +420,7 @@ const getRepoForAdd = () => { url: githubURL, stores: active_stores }, - type: `pull_git_repo_all` + type: `initialize_local_repo_for_active_stores` }) ) } else { diff --git a/tethysapp/app_store/public/js/utilities.js b/tethysapp/app_store/public/js/utilities.js index 39a2a4b..0865017 100644 --- a/tethysapp/app_store/public/js/utilities.js +++ b/tethysapp/app_store/public/js/utilities.js @@ -70,7 +70,7 @@ const reloadCacheRefresh = () => { notification_ws.send( JSON.stringify({ data: {}, - type: `clear_cache` + type: `clear_conda_channel_cache` }) ) // Refresh Page diff --git a/tethysapp/app_store/resource_helpers.py b/tethysapp/app_store/resource_helpers.py index e1ad3f5..994115e 100644 --- a/tethysapp/app_store/resource_helpers.py +++ b/tethysapp/app_store/resource_helpers.py @@ -3,8 +3,11 @@ import ast import re import semver +from tethys_apps.base import TethysAppBase from tethys_portal import __version__ as tethys_version import copy +import pkgutil +import inspect import os import json @@ -12,83 +15,188 @@ import shutil from pkg_resources import parse_version import yaml -from .utilities import get_available_stores_values -from .helpers import check_if_app_installed, add_if_exists_keys, logger +from .helpers import logger, get_conda_stores from conda.cli.python_api import run_command as conda_run, Commands -CACHE_KEY = "" +def clear_conda_channel_cache(data, channel_layer): + """Clears Django cache for all the conda stores -def clear_cache(data, channel_layer): - available_stores_data_dict = get_available_stores_values("all") + Args: + data (dict): Data to use for clearing cache + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ + available_stores_data_dict = get_conda_stores() for store in available_stores_data_dict: store_name = store['conda_channel'] for conda_label in store['conda_labels']: cache_key = f'{store_name}_{conda_label}_app_resources' cache.delete(cache_key) - return +def create_pre_multiple_stores_labels_obj(app_workspace, refresh=False, conda_channels='all'): + """Creates a dictionary of resources based on conda channels and conda labels -def create_pre_multiple_stores_labels_obj(app_workspace, refresh=False, stores='all'): - available_stores_data_dict = get_available_stores_values(stores) + Args: + app_workspace (str): Path pointing to the app workspace within the app store + refresh (bool, optional): Indicates whether resources should be refreshed or use a cache. Defaults to False. + conda_channels (str/list, optional): Name of the conda channel to use for app discovery. Defaults to 'all'. + + Returns: + dict: A reformatted app resource dictionary based solely on the conda channel See the example below. + + { + 'conda_channel1': { + 'conda_label1': { + 'availableApps': {'app1_name': }, + 'installedApps': {'app1_name': }, + 'incompatibleApps': {} + }, + 'conda_label2': { + 'availableApps': {'app2_name': }, + 'installedApps': {}, + 'incompatibleApps': {'app3_name': } + } + } + } + """ + available_stores_data_dict = get_conda_stores(conda_channels=conda_channels) object_stores = {} - # fetch resources for each store and label for store in available_stores_data_dict: - store_name = store['conda_channel'] - object_stores[store_name] = {} + conda_channel = store['conda_channel'] + object_stores[conda_channel] = {} for conda_label in store['conda_labels']: - cache_key = f'{store_name}_{conda_label}_app_resources' - object_stores[store_name][conda_label] = get_resources_single_store(app_workspace, refresh, - store_name, conda_label, - cache_key=cache_key) + cache_key = f'{conda_channel}_{conda_label}_app_resources' + object_stores[conda_channel][conda_label] = get_resources_single_store(app_workspace, refresh, + conda_channel, conda_label, + cache_key=cache_key) return object_stores def get_new_stores_reformated_by_labels(object_stores): + """Merge all app resources in a given conda channel into channel based dictionaries of availableApps, installedApps, + and incompatibleApps. + + Args: + object_stores (dict): A dictionary of app resources based on conda channel and then conda label + + Returns: + dict: A reformatted app resource dictionary based solely on the conda channel See the example below. + + { + 'conda_channel1': { + 'availableApps': {'app1_name': , 'app2_name': }, + 'installedApps': {'app1_name': }, + 'incompatibleApps': {'app3_name': } + }, + 'conda_channel2': { + 'availableApps': {'app4_name': , + 'installedApps': {}, + 'incompatibleApps': {'app5_name': } + } + } + """ new_store_reformatted = {} - for store in object_stores: - new_store_reformatted[store] = {} - list_labels_store = list(object_stores[store].keys()) - list_type_apps = list(object_stores[store][list_labels_store[0]].keys()) + for conda_channel in object_stores: + new_store_reformatted[conda_channel] = {} + list_labels_store = list(object_stores[conda_channel].keys()) + list_type_apps = list(object_stores[conda_channel][list_labels_store[0]].keys()) for type_app in list_type_apps: if type_app != 'tethysVersion': - new_store_reformatted[store][type_app] = merge_labels_single_store(object_stores[store], store, type_app) # noqa: E501 + new_store_reformatted[conda_channel][type_app] = merge_labels_single_store( + object_stores[conda_channel], conda_channel, type_app) + return new_store_reformatted -def get_stores_reformatted(app_workspace, refresh=False, stores='all'): +def get_stores_reformatted(app_workspace, refresh=False, conda_channels='all'): + """Retrieve a dictionary of app resources and metadata from the conda channels. Reformat the dictionary to + provide a list of available apps, installed apps, and incompatible apps + + Args: + app_workspace (str): Path pointing to the app workspace within the app store + refresh (bool, optional): Indicates whether resources should be refreshed or use a cache. Defaults to False. + conda_channels (str/list, optional): Name of the conda channel to use for app discovery. Defaults to 'all'. - object_stores_raw = create_pre_multiple_stores_labels_obj(app_workspace, refresh, stores) + Returns: + dict: list of available apps, installed apps, and incompatible apps across all specified channels + """ + object_stores_raw = create_pre_multiple_stores_labels_obj(app_workspace, refresh, conda_channels) object_stores_formatted_by_label = get_new_stores_reformated_by_labels(object_stores_raw) object_stores_formatted_by_channel = get_stores_reformated_by_channel(object_stores_formatted_by_label) - list_stores_formatted_by_channel = reduce_level_obj(object_stores_formatted_by_channel) + list_stores_formatted_by_channel = { + 'availableApps': [metadata for _, metadata in object_stores_formatted_by_channel['availableApps'].items()], + 'installedApps': [metadata for _, metadata in object_stores_formatted_by_channel['installedApps'].items()], + 'incompatibleApps': [metadata for _, metadata in object_stores_formatted_by_channel['incompatibleApps'].items()] + } return list_stores_formatted_by_channel -def object_to_list(obj_con): - new_list = [] - for key in obj_con: - new_list.append(obj_con[key]) - return new_list - +def get_stores_reformated_by_channel(stores): + """Reformats a dictionary of conda channel based resources into a status based dictionary -def reduce_level_obj(complex_obj): - for key in complex_obj: - if type(complex_obj[key]) is dict: - complex_obj[key] = object_to_list(complex_obj[key]) - return complex_obj + Args: + stores (dict): Dictionary of apps based on conda channels + Returns: + dict: Dictionary of apps based on status, i.e. availableApps, installedApps, and incompatibleApps. See the + example below. -def get_stores_reformated_by_channel(stores): + { + 'availableApps': {'app1_name': , 'app2_name': }, + 'installedApps': {'app1_name': }, + 'incompatibleApps': {'app3_name': } + } + """ app_channel_obj = get_app_channel_for_stores(stores) merged_channels_app = merge_channels_of_apps(app_channel_obj, stores) return merged_channels_app def merge_channels_of_apps(app_channel_obj, stores): + """Merge resource information for apps that have the same name across conda channels + + Args: + app_channel_obj (dict): Dictionary with information about apps and in what conda channels they can be found. + See get_app_channel_for_stores return information + stores (dict): Dictionary of app information based on conda channels + + Returns: + dict: Dictionary of merged apps across multiple channels based on status, i.e. availableApps, installedApps, and + incompatibleApps. See the example below. + + { + 'availableApps': {'app1_name': , 'app2_name': }, + 'installedApps': {'app1_name': }, + 'incompatibleApps': {'app3_name': } + } + + app1_metadata_dict would contain information about the app across multiple channels. See the example resource + metadata below. + + { + 'name': 'app_name, + 'installed': {'conda_channel1_name': {'main': False}, 'conda_channel2_name': {'dev': False}}, + 'installedVersion': {'conda_channel1_name': {'main': "1.0"}, 'conda_channel2_name': {'dev': "1.0"}}, + 'latestVersion': {'conda_channel1_name': {'main': "1.0"}, 'conda_channel2_name': {'dev': "1.0"}}, + 'versions': {'conda_channel1_name': {'main': []}, 'conda_channel2_name': {'dev': []}}, + 'versionURLs': {'conda_channel1_name': {'main': []}, 'conda_channel2_name': {'dev': []}}, + 'channels_and_labels': {'conda_channel1_name': {'main': []}, 'conda_channel2_name': {'dev': []}}, + 'timestamp': {'conda_channel1_name': {'main': "timestamp"}, 'conda_channel2_name': {'dev': "timestamp"}}, + 'compatibility': {'conda_channel1_name': {'main': {}}, 'conda_channel2_name': {'dev': {}}}, + 'license': {'conda_channel1_name': {'main': None}, 'conda_channel2_name': {'dev': None}}, + 'licenses': {'conda_channel1_name': {'main': []}, 'conda_channel2_name': {'dev': []}}, + 'author': {'conda_channel1_name': {'main': 'author'}, 'conda_channel2_name': {'dev': 'author'}}, + 'description': {'conda_channel1_name': {'main': 'description'}, + 'conda_channel2_name': {'dev': 'description'}}, + 'author_email': {'conda_channel1_name': {'main': 'author_email'}, + 'conda_channel2_name': {'dev': 'author_email'}}, + 'keywords': {'conda_channel1_name': {'main': 'keywords'}, 'conda_channel2_name': {'dev': 'keywords'}}, + 'dev_url': {'conda_channel1_name': {'main': 'dev_url'}, 'conda_channel2_name': {'dev': 'dev_url'}} + } + """ merged_channels_app = {} for channel in stores: for type_app in stores[channel]: @@ -112,6 +220,21 @@ def merge_channels_of_apps(app_channel_obj, stores): def get_app_channel_for_stores(stores): + """Parses a dictionary of resources based on conda channels and provides a summary of apps shared across channels + + Args: + stores (dict): Dictionary of apps based on conda channels + + Returns: + dict: Summary of apps and channels based on status, i.e. availableApps, installedApps, and incompatibleApps. + See the example below + + { + 'availableApps': {'app1_name': ['conda_channel1', 'conda_channel2'], 'app2_name': ['conda_channel1']}, + 'installedApps': {'app1_name': ['conda_channel1']}, + 'incompatibleApps': {'app3_name': ['conda_channel1', 'conda_channel2']} + } + """ app_channel_obj = {} for channel in stores: for type_apps in stores[channel]: @@ -127,41 +250,21 @@ def get_app_channel_for_stores(stores): return app_channel_obj -def get_app_level_for_store(store, type_apps): - - apps_levels = {} - levels = list(store.keys()) - for level in levels: - apps = list(store[level][type_apps].keys()) - for app in apps: - if app in apps_levels: - apps_levels[app].append(level) - else: - apps_levels[app] = [] - apps_levels[app].append(level) - return apps_levels - +def get_app_label_obj_for_store(store, type_apps): + """Parse the app resources to get a dictionary of all apps and any labels that the app uses -def merge_levels_for_app_in_store(apps_channels, store, channel, type_apps): - new_store_label_obj = {} - for app in apps_channels: - if app not in new_store_label_obj: - new_store_label_obj[app] = {} - for label in store: - if label not in apps_channels[app]: - continue - for key in store[label][type_apps][app]: - if key != 'name': - if key not in new_store_label_obj[app]: - new_store_label_obj[app][key] = { - channel: {} - } - for label_app in store[label][type_apps][app][key][channel]: - new_store_label_obj[app][key][channel][label_app] = store[label][type_apps][app][key][channel][label_app] # noqa: E501 - return new_store_label_obj + Args: + store (dict): Apps that are found from the conda channel + type_apps (str): availableApps, installedApps, or incompatibleApps + Returns: + dict: Dictionary containing all the apps and any labels that the app can be found in. See the example below -def get_app_label_obj_for_store(store, type_apps): + { + 'app_name': ['main'], + 'app2_name': ['main', 'dev'] + } + """ apps_label = {} labels = list(store.keys()) for label in labels: @@ -172,10 +275,22 @@ def get_app_label_obj_for_store(store, type_apps): else: apps_label[app] = [] apps_label[app].append(label) + return apps_label -def merge_labels_for_app_in_store(apps_label, store, channel, type_apps): +def merge_labels_for_app_in_store(apps_label, store, conda_channel, type_apps): + """Merge labels in the app resource metadata + + Args: + apps_label (dict): Dictionary containing all the apps and any labels that the app can be found in + store (dict): Apps that are found from the conda channel + conda_channel (str): Name of the conda channel to use for app discovery + type_apps (str): availableApps, installedApps, or incompatibleApps + + Returns: + dict: Merged app resource information for each label in the conda channel + """ new_store_label_obj = {} for app in apps_label: if app not in new_store_label_obj: @@ -183,54 +298,79 @@ def merge_labels_for_app_in_store(apps_label, store, channel, type_apps): for label in store: if label not in apps_label[app]: continue - for key in store[label][type_apps][app]: + for key in store[label][type_apps].get(app, []): if key != 'name': if key not in new_store_label_obj[app]: new_store_label_obj[app][key] = { - channel: {} + conda_channel: {} } - for label_app in store[label][type_apps][app][key][channel]: - new_store_label_obj[app][key][channel][label_app] = store[label][type_apps][app][key][channel][label_app] # noqa: E501 + for label_app in store[label][type_apps][app][key][conda_channel]: + new_store_label_obj[app][key][conda_channel][label_app] = store[label][type_apps][app][key][conda_channel][label_app] # noqa: E501 else: new_store_label_obj[app][key] = store[label][type_apps][app][key] + return new_store_label_obj def merge_labels_single_store(store, channel, type_apps): + """Merges all resources from all the labels for a specific conda channel + + Args: + store (dict): Apps that are found from the conda channel + conda_channel (str): Name of the conda channel to use for app discovery + type_apps (str): availableApps, installedApps, or incompatibleApps + Returns: + dict: Merged resource dictionary for all apps within conda channel + """ apps_labels = get_app_label_obj_for_store(store, type_apps) merged_label_store = merge_labels_for_app_in_store(apps_labels, store, channel, type_apps) + return merged_label_store -def get_resources_single_store(app_workspace, require_refresh, conda_package, conda_label, cache_key): +def get_resources_single_store(app_workspace, require_refresh, conda_channel, conda_label, cache_key): + """Get all the resources for a specific conda channel and conda label. Once resources have been retreived, check + each resource if it is installed. Once that is checked loop through each version in the metadata. For each version + we are checking the compatibility map to see if the compatible tethys version will work with this portal setup. + + Args: + app_workspace (str): Path pointing to the app workspace within the app store + require_refresh (bool): Indicates whether resources should be refreshed or use a cache + conda_channel (str): Name of the conda channel to use for app discovery + conda_label (str): Name of the conda label to use for app discovery + cache_key (str): Key to be used for caching strategy + + Returns: + Dict: A dictionary that contains resource info for availableApps, installedApps, incompatibleApps, and + current tethysVersion + """ installed_apps = {} available_apps = {} incompatible_apps = {} - all_resources = fetch_resources(app_workspace, require_refresh, conda_package, conda_label, cache_key) + all_resources = fetch_resources(app_workspace, conda_channel, conda_label=conda_label, cache_key=cache_key, + refresh=require_refresh) tethys_version_regex = re.search(r'([\d.]+[\d])', tethys_version).group(1) for resource in all_resources: - if resource["installed"][conda_package][conda_label]: + if resource["installed"][conda_channel][conda_label]: installed_apps[resource['name']] = resource - tethys_version_regex = re.search(r'([\d.]+[\d])', tethys_version).group(1) - add_compatible = False add_incompatible = False new_compatible_app = copy.deepcopy(resource) - new_compatible_app['versions'][conda_package][conda_label] = [] + new_compatible_app['versions'][conda_channel][conda_label] = [] new_incompatible_app = copy.deepcopy(new_compatible_app) - for version in resource['versions'][conda_package][conda_label]: + for version in resource['versions'][conda_channel][conda_label]: # Assume if not found, that it is compatible with Tethys Platform 3.4.4 compatible_tethys_version = "<=3.4.4" - if version in resource['compatibility'][conda_package][conda_label]: - compatible_tethys_version = resource['compatibility'][conda_package][conda_label][version] + if version in resource['compatibility'][conda_channel][conda_label]: + compatible_tethys_version = resource['compatibility'][conda_channel][conda_label][version] if semver.match(tethys_version_regex, compatible_tethys_version): add_compatible = True - new_compatible_app['versions'][conda_package][conda_label].append(version) + new_compatible_app['versions'][conda_channel][conda_label].append(version) else: add_incompatible = True - new_incompatible_app['versions'][conda_package][conda_label].append(version) + new_incompatible_app['versions'][conda_channel][conda_label].append(version) if add_compatible: available_apps[resource['name']] = new_compatible_app @@ -247,140 +387,191 @@ def get_resources_single_store(app_workspace, require_refresh, conda_package, co return return_object -def fetch_resources(app_workspace, refresh=False, conda_package="tethysapp", conda_label="main", cache_key=None): +def check_if_app_installed(app_name): + """Check if the app is installed with conda. If so, return additional information about the resource + + Args: + app_name (str): name of the potentially installed app + + Returns: + dict: Dictionary containing additional information about the application + """ + return_obj = {'isInstalled': False} + [resp, err, code] = conda_run(Commands.LIST, ["-f", "--json", app_name]) + if code != 0: + # In here maybe we just try re running the install + logger.error( + "ERROR: Couldn't get list of installed apps to verify if the conda install was successful") + else: + conda_search_result = json.loads(resp) + if len(conda_search_result) > 0: + return_obj['isInstalled'] = True + return_obj['channel'] = conda_search_result[0]["channel"] + return_obj['version'] = conda_search_result[0]["version"] + return return_obj + + return return_obj - CHANNEL_NAME = conda_package +def fetch_resources(app_workspace, conda_channel, conda_label="main", cache_key=None, refresh=False): + """Perform a conda search with the given channel and label to get all the available resources for potential + installation + + Args: + app_workspace (str): Path pointing to the app workspace within the app store + conda_channel (str): Name of the conda channel to use for app discovery + conda_label (str, optional): Name of the conda label to use for app discovery. Defaults to "main". + cache_key (str, optional): Key to be used for caching strategy. Defaults to None. + refresh (bool, optional): Indicates whether resources should be refreshed or use a cache. Defaults to False. + + Raises: + Exception: Error searching for apps in the conda channel + + Returns: + dict: Dictionary representing all the conda channel applications and metadata + """ + if not cache_key: + cache_key = conda_channel + + conda_search_channel = conda_channel if conda_label != 'main': - CHANNEL_NAME = f'{conda_package}/label/{conda_label}' + conda_search_channel = f'{conda_channel}/label/{conda_label}' - CACHE_KEY = cache_key - refresh = True + cached_resources = cache.get(cache_key) - if (cache.get(CACHE_KEY) is None) or refresh: + if not cached_resources or refresh: # Look for packages: logger.info("Refreshing list of apps cache") - [resp, err, code] = conda_run(Commands.SEARCH, ["-c", CHANNEL_NAME, "--override-channels", "-i", "--json"]) + [resp, err, code] = conda_run(Commands.SEARCH, + ["-c", conda_search_channel, "--override-channels", "-i", "--json"]) if code != 0: # In here maybe we just try re running the install - raise Exception(f"ERROR: Couldn't search packages in the {CHANNEL_NAME} channel") + raise Exception(f"ERROR: Couldn't search packages in the {conda_search_channel} channel") conda_search_result = json.loads(resp) resource_metadata = [] logger.info("Total Apps Found:" + str(len(conda_search_result))) - if 'error' in conda_search_result and 'The following packages are not available from current channels' in conda_search_result['error']: # noqa: E501 - logger.info(f'no packages found with the label {conda_label} in channel {CHANNEL_NAME}') + if 'The following packages are not available from current channels' in conda_search_result.get('error', ""): + logger.info(f'no packages found with the label {conda_label} in channel {conda_channel}') return resource_metadata for app_package in conda_search_result: - installed_version = check_if_app_installed(app_package) newPackage = { 'name': app_package, 'installed': { - conda_package: { + conda_channel: { conda_label: False } }, 'versions': { - conda_package: { + conda_channel: { conda_label: [] } }, 'versionURLs': { - conda_package: { + conda_channel: { conda_label: [] } }, 'channels_and_labels': { - conda_package: { + conda_channel: { conda_label: [] } }, 'timestamp': { - conda_package: { + conda_channel: { conda_label: conda_search_result[app_package][-1]["timestamp"] } }, 'compatibility': { - conda_package: { + conda_channel: { conda_label: {} } }, 'license': { - conda_package: { + conda_channel: { conda_label: None } }, 'licenses': { - conda_package: { + conda_channel: { conda_label: [] } } } if "license" in conda_search_result[app_package][-1]: - newPackage["license"][conda_package][conda_label] = conda_search_result[app_package][-1]["license"] + newPackage["license"][conda_channel][conda_label] = conda_search_result[app_package][-1]["license"] if installed_version['isInstalled']: - if CHANNEL_NAME == installed_version['channel']: - newPackage["installed"][conda_package][conda_label] = True - newPackage["installedVersion"] = { - conda_package: {} - } - newPackage["installedVersion"][conda_package][conda_label] = installed_version['version'] + if conda_channel == installed_version['channel']: + newPackage["installed"][conda_channel][conda_label] = True + newPackage["installedVersion"] = {conda_channel: {}} + newPackage["installedVersion"][conda_channel][conda_label] = installed_version['version'] + for conda_version in conda_search_result[app_package]: - newPackage["versions"][conda_package][conda_label].append(conda_version.get('version')) - newPackage["versionURLs"][conda_package][conda_label].append(conda_version.get('url')) - newPackage["licenses"][conda_package][conda_label].append(conda_version.get('license')) + newPackage["versions"][conda_channel][conda_label].append(conda_version.get('version')) + newPackage["versionURLs"][conda_channel][conda_label].append(conda_version.get('url')) + newPackage["licenses"][conda_channel][conda_label].append(conda_version.get('license')) if "license" in conda_version: try: license_json = json.loads(conda_version['license'].replace("', '", '", "') .replace("': '", '": "').replace("'}", '"}').replace("{'", '{"')) if 'tethys_version' in license_json: - newPackage["compatibility"][conda_package][conda_label][conda_version['version']] = license_json.get('tethys_version') # noqa: E501 + newPackage["compatibility"][conda_channel][conda_label][conda_version['version']] = license_json.get('tethys_version') # noqa: E501 except (ValueError, TypeError): pass resource_metadata.append(newPackage) - resource_metadata = process_resources(resource_metadata, app_workspace, conda_package, conda_label) + resource_metadata = process_resources(resource_metadata, app_workspace, conda_channel, conda_label) - cache.set(CACHE_KEY, resource_metadata) + cache.set(cache_key, resource_metadata) return resource_metadata else: logger.info("Found in cache") - return cache.get(CACHE_KEY) + return cached_resources def process_resources(resources, app_workspace, conda_channel, conda_label): + """Process resources based on the metadata given. Check compatibility with the current app store, add additional + metadata to the resources for licenses, versions, and urls. If the licensing information can't be found in the conda + metadata then use the versionurl to download a file and try to extract the information + + Args: + resources (list): List of resources to process + app_workspace (str): Path pointing to the app workspace within the app store + conda_channel (str): Name of the conda channel to use for app discovery + conda_label (str, optional): Name of the conda label to use for app discovery. + + Returns: + (list): List of updated resources + """ for app in resources: workspace_folder = os.path.join(app_workspace.path, 'apps') if not os.path.exists(workspace_folder): os.makedirs(workspace_folder) tethys_version_regex = re.search(r'([\d.]+[\d])', tethys_version).group(1) - # Set Latest Version - app["latestVersion"] = { - conda_channel: {} - } + app["latestVersion"] = {conda_channel: {}} - app["latestVersion"][conda_channel][conda_label] = app.get("versions").get(conda_channel).get(conda_label)[-1] + app["latestVersion"][conda_channel][conda_label] = app["versions"][conda_channel][conda_label][-1] + license = app["license"][conda_channel][conda_label] - # Check if latest version is compatible. If not, append an asterisk - license = app.get("license").get(f"{conda_channel}").get(f"{conda_label}") comp_dict = None compatible = None try: comp_dict = ast.literal_eval(license) except Exception: pass + if comp_dict and 'tethys_version' in comp_dict: compatible = comp_dict['tethys_version'] @@ -390,36 +581,21 @@ def process_resources(resources, app_workspace, conda_channel, conda_label): if not semver.match(tethys_version_regex, compatible): app["latestVersion"][conda_channel][conda_label] = app["latestVersion"][conda_channel][conda_label] + "*" - if (app['installed']): + if app['installed'][conda_channel][conda_label]: + app["updateAvailable"] = {conda_channel: {conda_label: False}} if 'installedVersion' in app: latestVersion = app["latestVersion"][conda_channel][conda_label] installedVersion = app["installedVersion"][conda_channel][conda_label] - if (latestVersion.find("*") is False): + if "*" not in latestVersion: if parse_version(latestVersion) > parse_version(installedVersion): - app["updateAvailable"] = { - conda_channel: { - conda_label: True - } - } - else: - app["updateAvailable"] = { - conda_channel: { - conda_label: False - } - } - else: - app["updateAvailable"] = { - conda_channel: { - conda_label: False - } - } - latest_version_url = app.get("versionURLs").get(f"{conda_channel}").get(f"{conda_label}")[-1] + app["updateAvailable"] = {conda_channel: {conda_label: True}} + + latest_version_url = app.get("versionURLs")[conda_channel][conda_label][-1] file_name = latest_version_url.split('/') folder_name = app.get("name") # Check for metadata in the Search Description # That path will work for newly submitted apps with warehouse ver>0.25 - try: if "license" not in app or app['license'][conda_channel][conda_label] is None: raise ValueError @@ -428,24 +604,13 @@ def process_resources(resources, app_workspace, conda_channel, conda_label): .replace("'}", '"}').replace("{'", '{"')) # create new one - app = add_if_exists_keys(license_metadata, app, [ + app = add_keys_to_app_metadata(license_metadata, app, [ 'author', 'description', 'license', 'author_email', 'keywords'], conda_channel, conda_label) if "url" in license_metadata: - app['dev_url'] = { - conda_channel: { - conda_label: '' - } - } - app['dev_url'][conda_channel][conda_label] = license_metadata["url"] - + app['dev_url'] = {conda_channel: {conda_label: license_metadata["url"]}} else: - app['dev_url'] = { - conda_channel: { - conda_label: '' - } - } - app['dev_url'][conda_channel][conda_label] = '' + app['dev_url'] = {conda_channel: {conda_label: ''}} except (ValueError, TypeError): # There wasn't json found in license. Get Metadata from downloading the file @@ -464,11 +629,7 @@ def process_resources(resources, app_workspace, conda_channel, conda_label): shutil.unpack_archive(download_path, output_path) - app["filepath"] = { - conda_channel: { - conda_label: output_path - } - } + app["filepath"] = {conda_channel: {conda_label: output_path}} # Get Meta.Yaml for this file try: @@ -478,18 +639,18 @@ def process_resources(resources, app_workspace, conda_channel, conda_label): meta_yaml = yaml.safe_load(f) # Add metadata to the resources object. - attr_about = ['author', 'description', 'dev_url', 'license'] + attr_about = ['author', 'description', 'license'] attr_extra = ['author_email', 'keywords'] - app = add_if_exists_keys(meta_yaml.get('about'), app, attr_about, conda_channel, conda_label) - app = add_if_exists_keys(meta_yaml.get('extra'), app, attr_extra, conda_channel, conda_label) + app = add_keys_to_app_metadata(meta_yaml.get('about'), app, attr_about, conda_channel, + conda_label) + app = add_keys_to_app_metadata(meta_yaml.get('extra'), app, attr_extra, conda_channel, + conda_label) + if 'dev_url' not in app: - app['dev_url'] = { - conda_channel: { - conda_label: '' - } - } - app['dev_url'][conda_channel][conda_label] = '' + app['dev_url'] = {conda_channel: {conda_label: ''}} + else: + logger.info("No yaml file available to retrieve metadata") except Exception as e: logger.info("Error happened while downloading package for metadata") logger.error(e) @@ -497,8 +658,19 @@ def process_resources(resources, app_workspace, conda_channel, conda_label): return resources -def get_resource(resource_name, channel, label, app_workspace): - all_resources = fetch_resources(app_workspace=app_workspace, conda_package=channel, conda_label=label) +def get_resource(resource_name, conda_channel, conda_label, app_workspace): + """Get a specific resource based on channel, label, and app name + + Args: + resource_name (str): Name of the app resource + conda_channel (str): Name of the conda channel to use for app discovery + conda_label (str): Name of the conda label to use for app discovery + app_workspace (str): Path pointing to the app workspace within the app store + + Returns: + dict: Dictionary representing the desired resource and metadata + """ + all_resources = fetch_resources(app_workspace, conda_channel, conda_label=conda_label) resource = [x for x in all_resources if x['name'] == resource_name] @@ -506,3 +678,61 @@ def get_resource(resource_name, channel, label, app_workspace): return resource[0] else: return None + + +def add_keys_to_app_metadata(additional_metadata, app_metadata, keys_to_add, conda_channel, conda_label): + """Update an apps metadata (based on conda channels and labels) from a dictionary of additional metadata and a list + of keys to add + + Args: + additional_metadata (dict): Dictionary contianing addition information to add to the app metadata + app_metadata (dict): Dictionary representing an application and all the metadata needed to install it + keys_to_add (list): List of keys to add to the app_metadata from the additional metadata + conda_channel (str): Name of the conda channel to use for app discovery + conda_label (str): Name of the conda label to use for app discovery + + Returns: + dict: A new app metadata dictionary with the added keys and information + """ + if not additional_metadata: + return app_metadata + for key in keys_to_add: + if key not in app_metadata: + app_metadata[key] = {} + if conda_channel not in app_metadata[key]: + app_metadata[key][conda_channel] = {} + if conda_label not in app_metadata[key][conda_channel] and key in additional_metadata: + app_metadata[key][conda_channel][conda_label] = additional_metadata[key] + + return app_metadata + + +def get_app_instance_from_path(paths): + """Dynamically import and instantiate an app instance for a tethysapp based on the python path to the application + + Args: + paths (str): Python path to the installed application + + Returns: + Instantiated TethysApp: A tethyspp instance for the installed application + """ + app_instance = None + for _, modname, ispkg in pkgutil.iter_modules(paths): + if ispkg: + app_module = __import__(f'tethysapp.{modname}.app', fromlist=['']) + for name, obj in inspect.getmembers(app_module): + # Retrieve the members of the app_module and iterate through + # them to find the the class that inherits from AppBase. + try: + # issubclass() will fail if obj is not a class + if (issubclass(obj, TethysAppBase)) and (obj is not TethysAppBase): + # Assign a handle to the class + AppClass = getattr(app_module, name) + # Instantiate app + app_instance = AppClass() + app_instance.sync_with_tethys_db() + # We found the app class so we're done + break + except TypeError: + continue + return app_instance diff --git a/tethysapp/app_store/scaffold_handler.py b/tethysapp/app_store/scaffold_handler.py index 0a2d1fa..83bf4a5 100644 --- a/tethysapp/app_store/scaffold_handler.py +++ b/tethysapp/app_store/scaffold_handler.py @@ -36,7 +36,7 @@ def install_app(app_path): intermediate_process = ['python', manage_path, 'collectstatic', '--noinput'] run_process(intermediate_process) # Run collectworkspaces command - intermediate_process = ['python', manage_path, 'collectworkspaces', '--force'] + intermediate_process = ['python', manage_path, 'collectworkspaces', '--force'] run_process(intermediate_process) diff --git a/tethysapp/app_store/scripts/conda_install.sh b/tethysapp/app_store/scripts/mamba_install.sh similarity index 100% rename from tethysapp/app_store/scripts/conda_install.sh rename to tethysapp/app_store/scripts/mamba_install.sh diff --git a/tethysapp/app_store/submission_handlers.py b/tethysapp/app_store/submission_handlers.py index 9f09b79..fc87cd3 100644 --- a/tethysapp/app_store/submission_handlers.py +++ b/tethysapp/app_store/submission_handlers.py @@ -7,27 +7,32 @@ import stat import json import time -import requests -import ast -from requests.exceptions import HTTPError +import re +from github.GithubException import UnknownObjectException from pathlib import Path from .helpers import logger, send_notification, apply_template, parse_setup_py -LOCAL_DEBUG_MODE = False CHANNEL_NAME = 'tethysapp' -def update_dependencies(github_dir, recipe_path, source_files_path, keywords=None, email=""): - if not keywords: - keywords = [] +def update_anaconda_dependencies(github_dir, recipe_path, source_files_path, keywords=None, email=""): + """Updates the anaconda package dependencies for the submitted github application. This file will be used in the + github actions to build the anaconda package for the application. + + Args: + github_dir (str): The directory path that contains the cloned github repository + recipe_path (str): The directory path that contains necessary files for building the anaconda package + source_files_path (str): The directory path that contains additional templates needed for anaconda recipes + keywords (list, optional): Keywords in the extra section of the anaconda packages meta yaml. Defaults to None. + email (str, optional): Author email in the extra section of the anaconda packages meta yaml. Defaults to "". + """ install_yml = os.path.join(github_dir, 'install.yml') + app_files_dir = os.path.join(github_dir, 'tethysapp') meta_yaml = os.path.join(source_files_path, 'meta_reqs.yaml') meta_extras = os.path.join(source_files_path, 'meta_extras.yaml') - app_files_dir = os.path.join(recipe_path, '../tethysapp') - app_folders = next(os.walk(app_files_dir))[1] app_scripts_path = os.path.join(app_files_dir, app_folders[0], 'scripts') @@ -42,13 +47,15 @@ def update_dependencies(github_dir, recipe_path, source_files_path, keywords=Non with open(meta_extras) as f: meta_extras_file = yaml.safe_load(f) + if not keywords: + keywords = [] + meta_extras_file['extra']['author_email'] = email meta_extras_file['extra']['keywords'] = keywords meta_yaml_file['requirements']['run'] = install_yml_file['requirements']['conda']['packages'] - # Check if any pip dependencies are present - + # Dynamically create an bash install script for pip install dependency if ("pip" in install_yml_file['requirements']): pip_deps = install_yml_file['requirements']["pip"] if pip_deps is not None: @@ -61,131 +68,91 @@ def update_dependencies(github_dir, recipe_path, source_files_path, keywords=Non st = os.stat(pre_link) os.chmod(pre_link, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) + # Add additional package metadata to meta.yml for anaconda packaging with open(os.path.join(recipe_path, 'meta.yaml'), 'a') as f: yaml.safe_dump(meta_extras_file, f, default_flow_style=False) f.write("\n") yaml.safe_dump(meta_yaml_file, f, default_flow_style=False) -def repo_exists(repo_name, organization): - - try: - organization.get_repo(repo_name) - logger.info("Repo Exists. Will have to delete") - return True - except Exception: - logger.info("Repo doesn't exist") - return False - - -# def validate_git_repo(install_data, channel_layer): - -# github_url = install_data.get("url") -# repo_name = github_url.split("/")[-1].replace(".git", "") -# user = github_url.split("/")[-2] - -# # Here check if it a fork :P -# get_data_json = validation_is_a_fork(user, repo_name, json_response) -# if bool(get_data_json): -# send_notification(get_data_json, channel_layer) - -# # validate if it is a valid setup.py -# branch = "main" -# get_data_json = validation_is_setup_complete(user, repo_name, branch, json_response) -# if bool(get_data_json): -# send_notification(get_data_json, channel_layer) - -# # get the app_package_name and version from the setup.py -# app_package_name, version_setup = get_app_name_and_version(user, repo_name, branch) - -# json_response = {} -# mssge_string = '' -# json_response['submission_github_url'] = github_url - -# conda_search_result = subprocess.run(['conda', 'search', "-c", CHANNEL_NAME, "--override-channels", "-i", "--json"], # noqa: E501 -# stdout=subprocess.PIPE) - -# conda_search_result = json.loads(conda_search_result.stdout) -# json_response["isNewApplication"] = True - -# for conda_package in conda_search_result: -# if app_package_name in conda_package: -# json_response["isNewApplication"] = False -# if "license" in conda_search_result[conda_package][-1]: - -# conda_search_result_package = conda_search_result[conda_package] - -# # Check if it is a new version -# get_data_json = validation_is_new_version(conda_search_result_package, version_setup, json_response) +def get_github_repo(repo_name, organization): + """Retrieve the github repository. If the repository exists, use the existing repository, otherwise create a new one -# if bool(get_data_json): -# send_notification(get_data_json, channel_layer) + Args: + repo_name (str): Name of the github repository to check + organization (github.Github.Organization): github organization that hosts the repositories -# # Check if if it the app_package name is already in the conda channel. -# # check if the submission url is the same as the dev url -# # check if the app_package name is the same as an already submitted application. -# # This mean they are different apps with the same package name -# get_data_json = validation_is_new_app(github_url, app_package_name, json_response, channel_layer) -# send_notification(get_data_json, channel_layer) + Returns: + tethysapp_repo (github.Github.Repository): returns a repository object, whether an existing repo or a newly + created one + """ + try: + tethysapp_repo = organization.get_repo(repo_name) + logger.info(f"{organization.login}/{repo_name} Exists. Will have to delete") + return tethysapp_repo -# json_response['next_move'] = True -# mssge_string = f'

The application {repo_name} is a new application, the version {version_setup} will be ' \ -# 'submitted to the app store' -# get_data_json = { -# "data": { -# "mssge_string": mssge_string, -# "metadata": json_response -# }, -# "jsHelperFunction": "validationResults", -# "helper": "addModalHelper" -# } -# send_notification(get_data_json, channel_layer) + except UnknownObjectException as e: + logger.info(f"Received a {e.status} error when checking {organization.login}/{repo_name}. Error: {e.message}") + logger.info(f"Creating a new repository at {organization.login}/{repo_name}") + tethysapp_repo = organization.create_repo( + repo_name, + allow_rebase_merge=True, + auto_init=False, + description="For Tethys App Store Purposes", + has_issues=False, + has_projects=False, + has_wiki=False, + private=False, + ) + return tethysapp_repo -def pull_git_repo_all(install_data, channel_layer, app_workspace): +def initialize_local_repo_for_active_stores(install_data, channel_layer, app_workspace): + """Loop through all stores and initialize a local github repo for each active store within the app workspace + Args: + install_data (dict): Dictionary containing installation information such as the github url and a list of stores + and associated metadata + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + app_workspace (str): Path pointing to the app workspace within the app store + """ github_url = install_data.get("url") - active_stores = install_data.get("stores") - for store_name in active_stores: - if (active_stores[store_name]['active']): - pull_git_repo(github_url, active_stores[store_name], channel_layer, app_workspace) - - -def pull_git_repo(github_url, active_store, channel_layer, app_workspace): - - # This function does the following: - # 1 Check if the the directory is a current repository or initialize, and then select or create the remote origin - # 2 Fetch the data from the origin remote - # 3 Checkout the master/main branch depending on the repository - # 4 Pull the changes if any - # 5 Get the references to get the branches - - # github_url = install_data.get("url") + stores = install_data.get("stores") + for store_name in stores: + if (stores[store_name]['active']): + initialize_local_repo(github_url, stores[store_name], channel_layer, app_workspace) + + +def initialize_local_repo(github_url, active_store, channel_layer, app_workspace): + """Create and initialize a local github repo with a path for a specific conda channel. Once a repo is initialized, + get a list of branches and send back the information to the application submission modal. + + Args: + github_url (str): Url for the github repo that will be submitted to the app store + active_store (str): Name of the store that will be used for creating github files and app submission + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + app_workspace (str): Path pointing to the app workspace within the app store + """ + # Create/Refresh github directories within the app workspace for the given channel app_name = github_url.split("/")[-1].replace(".git", "") github_dir = os.path.join(app_workspace.path, 'gitsubmission', active_store['conda_channel']) + app_github_dir = os.path.join(github_dir, app_name) - # create if github Dir does not exist if not os.path.exists(github_dir): os.makedirs(github_dir) - app_github_dir = os.path.join(github_dir, app_name) - - # 1 Check if the the directory is a current repository or initialize, and then select or create the remote origin if os.path.exists(app_github_dir): shutil.rmtree(app_github_dir) + # Initialize the github repo and fetch repo = git.Repo.init(app_github_dir) origin = repo.create_remote('origin', github_url) - - # 2 Fetch the data from the origin remote origin.fetch() - # 3 Get the references to get the branches - remote_refs = repo.remote().refs - branches = [] - for refs in remote_refs: - branches.append(refs.name.replace("origin/", "")) + # Get remote branches and get list of branch names + branches = [refs.name.replace("origin/", "") for refs in repo.remote().refs] + # Send notification back to websocket about available branches and other store information get_data_json = { "data": { "branches": branches, @@ -201,185 +168,33 @@ def pull_git_repo(github_url, active_store, channel_layer, app_workspace): send_notification(get_data_json, channel_layer) -def apply_setup_template(template_path, setup_path, setup_data): - # reading from file1 and writing to file2 - # open the file using read only mode - handle = open(template_path, "r") - - # reading the file and storing the data in content - content = handle.read() - # replacing the data using replace() - for key in setup_data.keys(): - if f'replace_{key}' in content: - content = content.replace(f'replace_{key}', setup_data[key]) - # content = content.replace("File", "Data") - - # close the file - handle.close() - - handle = open(setup_path, "w") - handle.write(content) - handle.close() - - -def get_app_name_and_version(user, repo_name, branch): - github_object_api = github.Github() - github_submit_repo = github_object_api.get_repo(f'{user}/{repo_name}') - setup_content_object = github_submit_repo.get_contents('setup.py', ref=branch) - setup_content = setup_content_object.decoded_content.decode('utf-8') - app_package_name = '' - version_setup = '' - - left0 = 'version' - right0 = 'description' - susbstring0 = setup_content[setup_content.index(left0)+len(left0):setup_content.index(right0)] - version_setup = susbstring0.strip().replace("'", "").replace(",", "").split('=')[1] - - left = 'app_package' - right = 'release_package' - susbstring = setup_content[setup_content.index(left)+len(left):setup_content.index(right)] - app_package_name = susbstring.strip().replace("'", "").split('=')[1].strip(' ') - - return app_package_name, version_setup - - -def validation_is_setup_complete(user, repo_name, branch, json_response): - github_object_api = github.Github() - github_submit_repo = github_object_api.get_repo(f'{user}/{repo_name}') - setup_content_object = github_submit_repo.get_contents('setup.py', ref=branch) - setup_content = setup_content_object.decoded_content.decode() - - prejson_string = setup_content.split("setup(")[-1].replace("\n", "").replace(", ", ",").replace("dependencies,)", "dependencies").strip().split(",") # noqa: E501 - # json_dict = {} - array_emptyness = [] - string_fields = '

    ' - get_data_json = {} - - for line in prejson_string: - property_name = line.split("=")[0].strip() - property_value = line.split("=")[1].strip().replace("'", "") - if property_value == '': - array_emptyness.append(property_name) - string_fields += f'
  • {property_name}
  • ' - # json_dict[property_name] = property_value - - string_fields += '
' - if array_emptyness: - mssge_string = f'

The setup.py of your repository contain the following fields empty: {string_fields}

' - json_response['next_move'] = False - get_data_json = { - "data": { - "mssge_string": mssge_string, - "metadata": json_response - }, - "jsHelperFunction": "validationResults", - "helper": "addModalHelper" - } - - return get_data_json - - -def validation_is_a_fork(user, repo_name, json_response): - get_data_json = {} - github_object_api = github.Github() - github_submit_repo = github_object_api.get_repo(f'{user}/{repo_name}') - if github_submit_repo.fork: - parent_repo = github_submit_repo.parent.html_url - mssge_string = f'

Your repository is a fork, Please submit a pull request to the original app repository ' \ - f'Here, and ask the owner to submit the app to the app store ' \ - 'later.

' - json_response['next_move'] = False - get_data_json = { - "data": { - "mssge_string": mssge_string, - "metadata": json_response - }, - "jsHelperFunction": "validationResults", - "helper": "addModalHelper" - } - # send_notification(get_data_json, channel_layer) - return get_data_json - - -def validation_is_new_app(github_url, app_package_name, json_response): - get_data_json = {} - if json_response["latest_github_url"] == github_url.replace(".git", ""): - mssge_string = "

The submitted Github url is an update of an existing application, The app store will " \ - "proceed to pull the repository

" - json_response['next_move'] = True - get_data_json = { - "data": { - "mssge_string": mssge_string, - "metadata": json_response - }, - "jsHelperFunction": "validationResults", - "helper": "addModalHelper" - } - - else: - mssge_string = f'

The app_package name {app_package_name} of the submitted GitHub url was found at an already submitted ' \ - 'application.

  • If the application is the same, please open a pull ' \ - 'request
  • If the application is not the same, please change the name of the ' \ - 'app_package found at the setup.py, app.py and other files
' - json_response['next_move'] = False - get_data_json = { - "data": { - "mssge_string": mssge_string, - "metadata": json_response - }, - "jsHelperFunction": "validationResults", - "helper": "addModalHelper" - } - return get_data_json - - -def validation_is_new_version(conda_search_result_package, version_setup, json_response): - get_data_json = {} - json_response["latest_github_url"] = ast.literal_eval(conda_search_result_package[-1]['license'])['dev_url'] - - # json_response["github_urls"] = [] - json_response["versions"] = [] - - string_versions = '
    ' - for conda_version in conda_search_result_package: - json_response.get("versions").append(conda_version.get('version')) - # json_response.get("metadata").get("license").get('url').append(conda_version.get('version')) - # json_response.get("github_urls").append(ast.literal_eval(conda_version.get('license')).get('dev_url')) - string_versions += f'
  • {conda_version.get("version")}
  • ' - - string_versions += '
' - # CHECK if it is a new version or not - if version_setup in json_response["versions"]: - mssge_string = f'

The current version of your application is {version_setup}, and it was already ' \ - f'submitted.

Current versions of your application are: {string_versions}

' \ - '

Please use a new version in the setup.py and install.yml files

' - json_response['next_move'] = False - - get_data_json = { - "data": { - "mssge_string": mssge_string, - "metadata": json_response - }, - "jsHelperFunction": "validationResults", - "helper": "addModalHelper" - } - - return get_data_json - - -# some ideas of how to refactor the code here for testing def generate_label_strings(conda_labels): + """Creates a string of labels for the anaconda upload + + Args: + conda_labels (list): List of conda labels for the package + + Returns: + str: A string of the conda label with additional labels and a --label prefix. i.e + 'main --label dev' + """ labels_string = '' for i in range(len(conda_labels)): if i < 1: labels_string += conda_labels[i] else: labels_string += f' --label {conda_labels[i]}' + return labels_string def create_tethysapp_warehouse_release(repo, branch): + """Uses the github repository class to create a new branch or merge existing branch for tethysapp_warehouse_release + + Args: + repo (github.Github.Repository): github repository class for the organization + branch (str): name of the existing local branch + """ if 'tethysapp_warehouse_release' not in repo.heads: repo.create_head('tethysapp_warehouse_release') else: @@ -388,11 +203,25 @@ def create_tethysapp_warehouse_release(repo, branch): def generate_current_version(setup_py_data): + """Get the app version from the setup.py data + + Args: + setup_py_data (dict): App metadata from setup.py + + Returns: + current_version (str): App version from the setup.py data + """ current_version = setup_py_data["version"] + return current_version def reset_folder(file_path): + """Deletes a folder and recreates it + + Args: + file_path (str): path that will be recreated + """ if os.path.exists(file_path): shutil.rmtree(file_path) @@ -400,13 +229,31 @@ def reset_folder(file_path): def copy_files_for_recipe(source, destination, files_changed): + """Copy files to a location and return a boolen if files were moved + + Args: + source (str): Path for the source file + destination (str): Path for the destination file + files_changed (bool): _description_ + + Returns: + boolean: True if files were moved. False if files were not moved + """ if not os.path.exists(destination): files_changed = True shutil.copyfile(source, destination) + return files_changed def create_upload_command(labels_string, source_files_path, recipe_path): + """Copy the conda upload file and use it as a template with the passed data + + Args: + labels_string (str): A string of labels to be used. i.e. "main --label dev" or "main" + source_files_path (str): Path to the source files in the app store repo + recipe_path (str): Path to the conda recipes in the cloned application + """ label = {'label_string': labels_string} if os.path.exists(os.path.join(recipe_path, 'upload_command.txt')): os.remove(os.path.join(recipe_path, 'upload_command.txt')) @@ -418,27 +265,39 @@ def create_upload_command(labels_string, source_files_path, recipe_path): label, os.path.join(recipe_path, 'upload_command.txt')) -def drop_keywords(setup_py_data): - # setup_py_data = parse_setup_py(filename) - keywords = [] - email = "" - try: - keywords = setup_py_data.pop('keywords', None) - email = setup_py_data["author_email"] - - # Clean up keywords - keywords = keywords.replace('"', '').replace("'", '') - if ',' in keywords: - keywords = keywords.split(',') - keywords = list(map(lambda x: x.strip(), keywords)) - - except Exception as err: - logger.error("Error ocurred while formatting keywords from setup.py") - logger.error(err) +def get_keywords_and_email(setup_py_data): + """Parses the setup.py dictionary to extract the keywords and the email + + Args: + setup_py_data (dict): Application metadata derived from setup.py + + Returns: + [keywords(list), email(str)]: A list of keywords and the author email + """ + keywords = setup_py_data.get("keywords") + if keywords: + keywords = keywords.replace(' ', '').replace('"', '').replace("'", '').split(',') + else: + keywords = [] + logger.warning("No keywords found in setup.py") + + email = setup_py_data.get("author_email", "") + if not email: + logger.warning("No author email found in setup.py") + return keywords, email def create_template_data_for_install(install_data, setup_py_data): + """Join the install_data information with the setup_py information to create template data for conda install + + Args: + install_data (dict): Data from the application submission form by the user + setup_py_data (dict): Application metadata from the cloned repository's setup.py + + Returns: + dict: master dictionary use for templates, specifically for conda install + """ install_yml = os.path.join(install_data['github_dir'], 'install.yml') with open(install_yml) as f: install_yml_file = yaml.safe_load(f) @@ -451,42 +310,55 @@ def create_template_data_for_install(install_data, setup_py_data): return template_data -def fix_setup(filename): +def fix_setup(setup_py): + """Update the setup.py file and fix any old/bad code that won't work with the app store + + Args: + setup_py (str): Path to the setup.py file + + Returns: + str: Name of the app package in setup.py + """ rel_package = "" - with fileinput.FileInput(filename, inplace=True) as f: + with fileinput.FileInput(setup_py, inplace=True) as f: for line in f: # logger.info(line) if "import find_all_resource_files" in line or "import find_resource_files" in line: print("from setup_helper import find_all_resource_files", end='\n') + elif "namespace =" in line: + new_replace_line = line.replace("TethysAppBase.package_namespace", "namespace") + print(new_replace_line, end='') + elif ("setup(" in line): print(line, end='') - elif "namespace =" in line: - print('', end='\n') + elif ("app_package = " in line): - rel_package = line + rel_package = re.findall("app_package = ['\"](.*)['\"]", line)[0] print("namespace = 'tethysapp'") print(line, end='') elif "from tethys_apps.base.app_base import TethysAppBase" in line: - print('', end='\n') - - elif "TethysAppBase.package_namespace" in line: - new_replace_line = line.replace("TethysAppBase.package_namespace", "namespace") - print(new_replace_line, end='\n') + print('', end='') elif "resource_files = find_resource_files" in line: print("resource_files = find_all_resource_files(app_package, namespace)", end='\n') elif "resource_files += find_resource_files" in line: - print('', end='\n') + print('', end='') + else: print(line, end='') return rel_package def remove_init_file(install_data): + """Deletes the init file from the local github repository + + Args: + install_data (dict): Data from the application submission form by the user + """ init_path = os.path.join(install_data['github_dir'], '__init__.py') if os.path.exists(init_path): @@ -494,6 +366,14 @@ def remove_init_file(install_data): def apply_main_yml_template(source_files_path, workflows_path, rel_package, install_data): + """Creates a new main.yaml from the main_template.yaml and install data information + + Args: + source_files_path (str): The directory path that contains additional templates needed for anaconda recipes + workflows_path (str): The directory path that contains necessary files for github workflows + rel_package (str): The name of the application packge + install_data (dict): Data from the application submission form by the user + """ source = os.path.join(source_files_path, 'main_template.yaml') destination = os.path.join(workflows_path, 'main.yaml') app_name = rel_package.replace("app_package", '').replace("=", '').replace("'", "").strip() @@ -509,138 +389,155 @@ def apply_main_yml_template(source_files_path, workflows_path, rel_package, inst apply_template(source, template_data, destination) -def check_repo_exists_remote(repo_name, organization): - if repo_exists(repo_name, organization): - tethysapp_repo = organization.get_repo(repo_name) +def get_head_and_tag_names(tethysapp_remote): + """Use the github repository object to get a list of tags, heads, and remote references - if not repo_exists(repo_name, organization): - # Create the required repo: - tethysapp_repo = organization.create_repo( - repo_name, - allow_rebase_merge=True, - auto_init=False, - description="For Tethys App Store Purposes", - has_issues=False, - has_projects=False, - has_wiki=False, - private=False, - ) - return tethysapp_repo + Args: + repo (github.Github.Repository): github repository class for the organization + Returns: + list: list of tags, heads, and remote references for the repository + """ + return [ref.ref for ref in tethysapp_remote.get_git_refs()] -def get_head_names(repo): - heads_names_list = [] - for ref in repo.references: - heads_names_list.append(ref.name) - return heads_names_list +def create_current_tag_version(current_version, heads_names_list): + """Creates a new tag to use based on app version, date, and tag creation attempt + Args: + current_version (str): Version of the submitted application + heads_names_list (list): List of the existing tags and heads for the repository -def create_current_tag_version(current_version, heads_names_list): - current_tag_name = '' + Returns: + str: The new tag name to use for the git push + """ today = time.strftime("%Y_%m_%d") + valid_tag = False dev_attempt = 0 - current_tag_name = "v" + str(current_version) + "_" + str(dev_attempt) + "_" + today + while not valid_tag: + current_tag_name = "v" + str(current_version) + "_" + str(dev_attempt) + "_" + today - if current_tag_name in heads_names_list: - dev_attempt += 1 + if [head for head in heads_names_list if current_tag_name in head]: + dev_attempt += 1 + else: + valid_tag = True - current_tag_name = "v" + str(current_version) + "_" + str(dev_attempt) + "_" + today return current_tag_name def check_if_organization_in_remote(repo, github_organization, remote_url): + """Check if the organization is in the repo remotes - # if 'tethysapp' in repo.remotes: + Args: + repo (git.Github.Repository): git repository class for the local application + github_organization (str): Name of the organization + remote_url (str): Url for the github repository + + Returns: + git.Github.Repository: local github repository class for the specified organization + """ if github_organization in repo.remotes: logger.info("Remote already exists") tethysapp_remote = repo.remotes[github_organization] - # tethysapp_remote = repo.remotes.tethysapp tethysapp_remote.set_url(remote_url) else: - # tethysapp_remote = repo.create_remote('tethysapp', remote_url) tethysapp_remote = repo.create_remote(github_organization, remote_url) + return tethysapp_remote -def add_and_commit_if_files_changed(repo, files_changed, current_tag_name): +def push_to_warehouse_release_remote_branch(repo, tethysapp_remote, current_tag_name, files_changed): + """Perform an add and commit on the local repo if files change + + Args: + repo (git.Github.Repository): git repository class for the local application + tethysapp_remote (git.Github.Repository): git repository class for the remote repo + current_tag_name (str): tag name to use for the git commit + files_changed (bool): True if files have changes since last commit/clone + """ if files_changed: repo.git.add(A=True) repo.git.commit(m=f'tag version {current_tag_name}') - - -def push_to_warehouse_release_remote_branch(repo, tethysapp_remote, current_tag_name, files_changed): - add_and_commit_if_files_changed(repo, files_changed, current_tag_name) - tethysapp_remote.push('tethysapp_warehouse_release') + tethysapp_remote.push('tethysapp_warehouse_release', force=True) def create_head_current_version(repo, current_tag_name, heads_names_list, tethysapp_remote): + """Push the current code to the remote repo + + Args: + repo (git.Github.Repository): git repository class for the local application + current_tag_name (str): tag name to use for the git commit + heads_names_list (list): List of the existing tags and heads for the repository + tethysapp_remote (git.Github.Repository): git repository class for the remote repo + """ if current_tag_name not in heads_names_list: - new_release_branch = repo.create_head(current_tag_name) - repo.git.checkout(current_tag_name) - # push the new branch in remote - tethysapp_remote.push(new_release_branch) + release_branch = repo.create_head(current_tag_name) else: - repo.git.checkout(current_tag_name) - # push the new branch in remote - tethysapp_remote.push(current_tag_name) + release_branch = current_tag_name + + repo.git.checkout(current_tag_name) + tethysapp_remote.push(release_branch) def create_tags_for_current_version(repo, current_tag_name, heads_names_list, tethysapp_remote): + """Create/Replace tags for the release + + Args: + repo (git.Github.Repository): git repository class for the local application + current_tag_name (str): tag name to use for the git commit + heads_names_list (list): List of the existing tags and heads for the repository + tethysapp_remote (git.Github.Repository): git repository class for the remote repo + """ tag_name = current_tag_name + "_release" - if tag_name not in heads_names_list: - - # Create tag - new_tag = repo.create_tag( - tag_name, - ref=repo.heads["tethysapp_warehouse_release"], - message=f'This is a tag-object pointing to tethysapp_warehouse_release branch with release version {current_tag_name}', # noqa: E501 - ) - tethysapp_remote.push(new_tag) - - else: + if tag_name in heads_names_list: repo.git.tag('-d', tag_name) # remove locally - tethysapp_remote.push(refspec=(':%s' % (tag_name))) # remove from remote - new_tag = repo.create_tag( - tag_name, - ref=repo.heads["tethysapp_warehouse_release"], - message=f'This is a tag-object pointing to tethysapp_warehouse_release branch with release version {current_tag_name}', # noqa: E501 - ) - tethysapp_remote.push(new_tag) - - -def get_workflow_job_url(tethysapp_repo, github_organization, key): - workflowFound = False - - # Sometimes due to weird conda versioning issues the get_workflow_runs is not found - # In that case return no value for the job_url and handle it in JS - try: - while not workflowFound: - time.sleep(4) - if tethysapp_repo.get_workflow_runs().totalCount > 0: - logger.info("Obtained Workflow for Submission. Getting Job URL") - - try: - # response = requests.get(tethysapp_repo.get_workflow_runs()[0].jobs_url, auth=('tethysapp', key)) - response = requests.get(tethysapp_repo.get_workflow_runs()[0].jobs_url, - auth=(github_organization, key)) - - response.raise_for_status() - jsonResponse = response.json() - workflowFound = jsonResponse["total_count"] > 0 - - except HTTPError as http_err: - logger.error(f'HTTP error occurred while getting Jobs from GITHUB API: {http_err}') - except Exception as err: - logger.error(f'Other error occurred while getting jobs from GITHUB API: {err}') - - if workflowFound: - job_url = jsonResponse["jobs"][0]["html_url"] - - logger.info("Obtained Job URL: " + job_url) - except AttributeError: - logger.info("Unable to obtain Workflow Run") - job_url = None + tethysapp_remote.push(refspec=(f':{tag_name}')) # remove from remote + + new_tag = repo.create_tag( + tag_name, + ref=repo.heads["tethysapp_warehouse_release"], + message=f'This is a tag-object pointing to tethysapp_warehouse_release branch with release version {current_tag_name}', # noqa: E501 + ) + tethysapp_remote.push(new_tag) + + +def get_workflow_job_url(repo, tethysapp_repo, current_tag_name): + """Uses information from the local code repository and the remote github repository to get the workflow job from + the tethysapp_warehouse_release push + + Args: + repo (git.Github.Repository): git repository class for the local application + tethysapp_remote (git.Github.Repository): git repository class for the remote repo + current_tag_name (str): tag name to use for the git commit + + Returns: + str: HTML url for the workflow job from the tethysapp_warehouse_release push + """ + job_found = False + job_url = None + elapsed_time = 0 + timeout = 60 + latest_head_sha = repo.head.object.hexsha + + while not job_found and elapsed_time <= timeout: + time.sleep(4) + elapsed_time += 4 + + workflow_runs = tethysapp_repo.get_workflow_runs() + workflow = [workflow for workflow in workflow_runs if current_tag_name in workflow.display_title] + if workflow: + workflow = workflow[0] + logger.info("Obtained Workflow for Submission. Getting Job URL") + + job = [job for job in workflow.jobs() if job.head_sha == latest_head_sha] + job_found = True if job else False + + if job_found: + job_url = job[0].html_url + logger.info("Obtained Job URL: " + job_url) + + if not job_found: + logger.error(f"Failed to get the job url within {timeout} seconds") return job_url @@ -648,10 +545,10 @@ def get_workflow_job_url(tethysapp_repo, github_organization, key): def process_branch(install_data, channel_layer): # 1. Get Variables github_organization = install_data["github_organization"] - key = install_data["github_token"] - g = github.Github(key) + github_token = install_data["github_token"] + g = github.Github(github_token) repo = git.Repo(install_data['github_dir']) - filename = os.path.join(install_data['github_dir'], 'setup.py') + setup_py = os.path.join(install_data['github_dir'], 'setup.py') conda_labels = install_data["conda_labels"] labels_string = generate_label_strings(conda_labels) files_changed = False @@ -660,7 +557,7 @@ def process_branch(install_data, channel_layer): origin = repo.remote(name='origin') repo.git.checkout(install_data['branch']) origin.pull() - setup_py_data = parse_setup_py(filename) + setup_py_data = parse_setup_py(setup_py) current_version = generate_current_version(setup_py_data) # 3. create head tethysapp_warehouse_release and checkout the head @@ -689,7 +586,7 @@ def process_branch(install_data, channel_layer): create_upload_command(labels_string, source_files_path, recipe_path) # 8. Drop keywords from setup.py - keywords, email = drop_keywords(setup_py_data) + keywords, email = get_keywords_and_email(setup_py_data) # 9 get the data from the install.yml and create a metadata dict template_data = create_template_data_for_install(install_data, setup_py_data) @@ -702,10 +599,10 @@ def process_branch(install_data, channel_layer): files_changed = copy_files_for_recipe(source, destination, files_changed) # 11. Fix setup.py file to remove dependency on tethys - rel_package = fix_setup(filename) + rel_package = fix_setup(setup_py) # 12. Update the dependencies of the package - update_dependencies(install_data['github_dir'], recipe_path, source_files_path, keywords, email) + update_anaconda_dependencies(install_data['github_dir'], recipe_path, source_files_path, keywords, email) # 13. apply data to the main.yml for the github action apply_main_yml_template(source_files_path, workflows_path, rel_package, install_data) @@ -713,18 +610,14 @@ def process_branch(install_data, channel_layer): # 14. remove __init__.py file if present at top level remove_init_file(install_data) - if LOCAL_DEBUG_MODE: - logger.info("Completed Local Debug Processing for Git Repo") - return - # 15. Check if this repo already exists on our remote: repo_name = install_data['github_dir'].split('/')[-1] organization = g.get_organization(github_organization) - tethysapp_repo = check_repo_exists_remote(repo_name, organization) + tethysapp_repo = get_github_repo(repo_name, organization) - heads_names_list = get_head_names(repo) + heads_names_list = get_head_and_tag_names(tethysapp_repo) current_tag_name = create_current_tag_version(current_version, heads_names_list) - remote_url = tethysapp_repo.git_url.replace("git://", "https://" + key + ":x-oauth-basic@") + remote_url = tethysapp_repo.git_url.replace("git://", "https://" + github_token + ":x-oauth-basic@") tethysapp_remote = check_if_organization_in_remote(repo, github_organization, remote_url) # 16. add, commit, and push to the tethysapp_warehouse_release remote branch @@ -737,7 +630,7 @@ def process_branch(install_data, channel_layer): create_tags_for_current_version(repo, current_tag_name, heads_names_list, tethysapp_remote) # 19. return workflow job url - job_url = get_workflow_job_url(tethysapp_repo, github_organization, key) + job_url = get_workflow_job_url(repo, tethysapp_repo, current_tag_name) get_data_json = { "data": { @@ -749,3 +642,236 @@ def process_branch(install_data, channel_layer): "helper": "addModalHelper" } send_notification(get_data_json, channel_layer) + + +# The functions below are not being used but may want to be implemented in the future + +# def validate_git_repo(install_data, channel_layer): + +# github_url = install_data.get("url") +# repo_name = github_url.split("/")[-1].replace(".git", "") +# user = github_url.split("/")[-2] +# json_response = {} + +# # Here check if it a fork :P +# get_data_json = validation_is_a_fork(user, repo_name, json_response) +# if bool(get_data_json): +# send_notification(get_data_json, channel_layer) + +# # validate if it is a valid setup.py +# branch = "main" +# get_data_json = validation_is_setup_complete(user, repo_name, branch, json_response) +# if bool(get_data_json): +# send_notification(get_data_json, channel_layer) + +# # get the app_package_name and version from the setup.py +# app_package_name, version_setup = get_app_name_and_version(user, repo_name, branch) + +# json_response = {} +# mssge_string = '' +# json_response['submission_github_url'] = github_url + +# conda_search_result = subprocess.run( +# ['conda', 'search', "-c", CHANNEL_NAME, "--override-channels", "-i", "--json"], stdout=subprocess.PIPE) + +# conda_search_result = json.loads(conda_search_result.stdout) +# json_response["isNewApplication"] = True + +# for conda_package in conda_search_result: +# if app_package_name in conda_package: +# json_response["isNewApplication"] = False +# if "license" in conda_search_result[conda_package][-1]: +# conda_search_result_package = conda_search_result[conda_package] + +# # Check if it is a new version +# get_data_json = validation_is_new_version(conda_search_result_package, version_setup, json_response) + +# if bool(get_data_json): +# send_notification(get_data_json, channel_layer) + +# # Check if if it the app_package name is already in the conda channel. +# # check if the submission url is the same as the dev url +# # check if the app_package name is the same as an already submitted application. +# # This mean they are different apps with the same package name +# get_data_json = validation_is_new_app(github_url, app_package_name, json_response, channel_layer) +# send_notification(get_data_json, channel_layer) + +# json_response['next_move'] = True +# mssge_string = f'

The application {repo_name} is a new application, the version {version_setup} will be ' \ +# 'submitted to the app store' +# get_data_json = { +# "data": { +# "mssge_string": mssge_string, +# "metadata": json_response +# }, +# "jsHelperFunction": "validationResults", +# "helper": "addModalHelper" +# } +# send_notification(get_data_json, channel_layer) + + +# def apply_setup_template(template_path, setup_path, setup_data): +# # reading from file1 and writing to file2 +# # open the file using read only mode +# handle = open(template_path, "r") + +# # reading the file and storing the data in content +# content = handle.read() +# # replacing the data using replace() +# for key in setup_data.keys(): +# if f'replace_{key}' in content: +# content = content.replace(f'replace_{key}', setup_data[key]) +# # content = content.replace("File", "Data") + +# # close the file +# handle.close() + +# handle = open(setup_path, "w") +# handle.write(content) +# handle.close() + + +# def get_app_name_and_version(user, repo_name, branch): +# github_object_api = github.Github() +# github_submit_repo = github_object_api.get_repo(f'{user}/{repo_name}') +# setup_content_object = github_submit_repo.get_contents('setup.py', ref=branch) +# setup_content = setup_content_object.decoded_content.decode('utf-8') +# app_package_name = '' +# version_setup = '' + +# left0 = 'version' +# right0 = 'description' +# susbstring0 = setup_content[setup_content.index(left0) + len(left0):setup_content.index(right0)] +# version_setup = susbstring0.strip().replace("'", "").replace(",", "").split('=')[1] + +# left = 'app_package' +# right = 'release_package' +# susbstring = setup_content[setup_content.index(left) + len(left):setup_content.index(right)] +# app_package_name = susbstring.strip().replace("'", "").split('=')[1].strip(' ') + +# return app_package_name, version_setup + + +# def validation_is_setup_complete(user, repo_name, branch, json_response): +# github_object_api = github.Github() +# github_submit_repo = github_object_api.get_repo(f'{user}/{repo_name}') +# setup_content_object = github_submit_repo.get_contents('setup.py', ref=branch) +# setup_content = setup_content_object.decoded_content.decode() + +# prejson_string = setup_content.split("setup(")[-1].replace("\n", "").replace(", ", ",").replace("dependencies,)", "dependencies").strip().split(",") # noqa: E501 +# # json_dict = {} +# array_emptyness = [] +# string_fields = '

    ' +# get_data_json = {} + +# for line in prejson_string: +# property_name = line.split("=")[0].strip() +# property_value = line.split("=")[1].strip().replace("'", "") +# if property_value == '': +# array_emptyness.append(property_name) +# string_fields += f'
  • {property_name}
  • ' +# # json_dict[property_name] = property_value + +# string_fields += '
' +# if array_emptyness: +# mssge_string = f'

The setup.py of your repository contain the following fields empty: {string_fields}

' +# json_response['next_move'] = False +# get_data_json = { +# "data": { +# "mssge_string": mssge_string, +# "metadata": json_response +# }, +# "jsHelperFunction": "validationResults", +# "helper": "addModalHelper" +# } + +# return get_data_json + + +# def validation_is_a_fork(user, repo_name, json_response): +# get_data_json = {} +# github_object_api = github.Github() +# github_submit_repo = github_object_api.get_repo(f'{user}/{repo_name}') +# if github_submit_repo.fork: +# parent_repo = github_submit_repo.parent.html_url +# mssge_string = f'

Your repository is a fork, Please submit a pull request to the original app repository ' \ +# f'Here, and ask the owner to submit the app to the app store ' \ +# 'later.

' +# json_response['next_move'] = False +# get_data_json = { +# "data": { +# "mssge_string": mssge_string, +# "metadata": json_response +# }, +# "jsHelperFunction": "validationResults", +# "helper": "addModalHelper" +# } +# # send_notification(get_data_json, channel_layer) +# return get_data_json + + +# def validation_is_new_app(github_url, app_package_name, json_response): +# get_data_json = {} +# if json_response["latest_github_url"] == github_url.replace(".git", ""): +# mssge_string = "

The submitted Github url is an update of an existing application, The app store will " \ +# "proceed to pull the repository

" +# json_response['next_move'] = True +# get_data_json = { +# "data": { +# "mssge_string": mssge_string, +# "metadata": json_response +# }, +# "jsHelperFunction": "validationResults", +# "helper": "addModalHelper" +# } + +# else: +# mssge_string = f'

The app_package name {app_package_name} of the submitted GitHub url was found at an already submitted ' \ +# 'application.

  • If the application is the same, please open a pull ' \ +# 'request
  • If the application is not the same, please change the name of the ' \ +# 'app_package found at the setup.py, app.py and other files
' +# json_response['next_move'] = False +# get_data_json = { +# "data": { +# "mssge_string": mssge_string, +# "metadata": json_response +# }, +# "jsHelperFunction": "validationResults", +# "helper": "addModalHelper" +# } +# return get_data_json + + +# def validation_is_new_version(conda_search_result_package, version_setup, json_response): +# get_data_json = {} +# json_response["latest_github_url"] = ast.literal_eval(conda_search_result_package[-1]['license'])['dev_url'] + +# # json_response["github_urls"] = [] +# json_response["versions"] = [] + +# string_versions = '
    ' +# for conda_version in conda_search_result_package: +# json_response.get("versions").append(conda_version.get('version')) +# # json_response.get("metadata").get("license").get('url').append(conda_version.get('version')) +# # json_response.get("github_urls").append(ast.literal_eval(conda_version.get('license')).get('dev_url')) +# string_versions += f'
  • {conda_version.get("version")}
  • ' + +# string_versions += '
' +# # CHECK if it is a new version or not +# if version_setup in json_response["versions"]: +# mssge_string = f'

The current version of your application is {version_setup}, and it was already ' \ +# f'submitted.

Current versions of your application are: {string_versions}

' \ +# '

Please use a new version in the setup.py and install.yml files

' +# json_response['next_move'] = False + +# get_data_json = { +# "data": { +# "mssge_string": mssge_string, +# "metadata": json_response +# }, +# "jsHelperFunction": "validationResults", +# "helper": "addModalHelper" +# } + +# return get_data_json diff --git a/tethysapp/app_store/tests/conftest.py b/tethysapp/app_store/tests/conftest.py new file mode 100644 index 0000000..8c8905f --- /dev/null +++ b/tethysapp/app_store/tests/conftest.py @@ -0,0 +1,280 @@ +import pytest +from unittest.mock import MagicMock +import shutil +from pathlib import Path +from tethys_apps.base import TethysAppBase + + +class TestApp(TethysAppBase): + name = 'Test App' + init_ran = False + package = 'test_app' + + def __init__(self): + self.init_ran = True + + def custom_settings(self): + mock_setting = MagicMock() + mock_setting.name = "mock_setting" + return [mock_setting] + + +@pytest.fixture() +def tethysapp(): + return TestApp + + +@pytest.fixture() +def app_store_dir(): + app_store_dir = Path(__file__).parent.parent + + return app_store_dir + + +@pytest.fixture() +def app_files_dir(app_store_dir): + app_files_dir = app_store_dir / "application_files" + + return app_files_dir + + +@pytest.fixture() +def test_files_dir(): + current_dir = Path(__file__).parent + app_files_dir = current_dir / "files" + + return app_files_dir + + +@pytest.fixture +def store(): + def _store(id, default=True, active=True, conda_labels=None): + if not conda_labels: + conda_labels = ['main'] + + return { + 'default': default, + 'conda_labels': conda_labels, + 'github_token': f'fake_token_{id}', + 'conda_channel': f'conda_channel_{id}', + 'github_organization': f'org_{id}', + 'conda_style': 'blue', + 'active': active + } + return _store + + +@pytest.fixture +def all_active_stores(store): + return { + "active_default": store("active_default"), + "active_not_default": store("active_not_default", default=False) + } + + +@pytest.fixture +def mix_active_inactive_stores(store): + return { + "active_default": store("active_default"), + "inactive_not_default": store("inactive_not_default", default=False, active=False) + } + + +@pytest.fixture +def all_inactive_stores(store): + return { + "inactive_default": store("inactive_default", active=False), + "inactive_not_default": store("inactive_not_default", default=False, active=False) + } + + +@pytest.fixture +def fresh_resource(): + def _fresh_resource(app_name, conda_channel, conda_label): + return { + 'name': app_name, + 'installed': {conda_channel: {conda_label: False}}, + 'versions': {conda_channel: {conda_label: ["1.0"]}}, + 'versionURLs': {conda_channel: {conda_label: ["versionURL"]}}, + 'channels_and_labels': {conda_channel: {conda_label: []}}, + 'timestamp': {conda_channel: {conda_label: "timestamp"}}, + 'compatibility': {conda_channel: {conda_label: {}}}, + 'license': {conda_channel: {conda_label: None}}, + 'licenses': {conda_channel: {conda_label: []}} + } + return _fresh_resource + + +@pytest.fixture +def resource(): + def _resource(app_name, conda_channel, conda_label): + return { + 'name': app_name, + 'installed': {conda_channel: {conda_label: False}}, + 'installedVersion': {conda_channel: {conda_label: "1.0"}}, + 'latestVersion': {conda_channel: {conda_label: "1.0"}}, + 'versions': {conda_channel: {conda_label: ["1.0"]}}, + 'versionURLs': {conda_channel: {conda_label: ["versionURL"]}}, + 'channels_and_labels': {conda_channel: {conda_label: []}}, + 'timestamp': {conda_channel: {conda_label: "timestamp"}}, + 'compatibility': {conda_channel: {conda_label: {}}}, + 'license': {conda_channel: {conda_label: None}}, + 'licenses': {conda_channel: {conda_label: []}}, + 'author': {conda_channel: {conda_label: 'author'}}, + 'description': {conda_channel: {conda_label: 'description'}}, + 'author_email': {conda_channel: {conda_label: 'author_email'}}, + 'keywords': {conda_channel: {conda_label: 'keywords'}}, + 'dev_url': {conda_channel: {conda_label: 'url'}} + } + return _resource + + +@pytest.fixture() +def store_with_resources(resource, store): + def _store_with_resources(store_name, conda_labels, available_apps_label=None, available_apps_name="", + installed_apps_label=None, installed_apps_name="", incompatible_apps_label=None, + incompatible_apps_name=""): + active_store = store(store_name, conda_labels=conda_labels) + available_app = {} + installed_app = {} + incompatible_app = {} + + if available_apps_label: + if available_apps_name: + app_name = available_apps_name + else: + app_name = f"{store_name}_available_app_{available_apps_label}" + available_app = {app_name: resource(app_name, active_store['conda_channel'], available_apps_label)} + + if installed_apps_label: + if installed_apps_name: + app_name = installed_apps_name + else: + app_name = f"{store_name}_installed_app_{installed_apps_label}" + installed_app = {app_name: resource(app_name, active_store['conda_channel'], installed_apps_label)} + + if incompatible_apps_label: + if incompatible_apps_name: + app_name = incompatible_apps_name + else: + app_name = f"{store_name}_incompatible_app_{incompatible_apps_label}" + incompatible_app = {app_name: resource(app_name, active_store['conda_channel'], incompatible_apps_label)} + + resources = { + 'availableApps': available_app, + 'installedApps': installed_app, + 'incompatibleApps': incompatible_app + } + + return (active_store, resources) + + return _store_with_resources + + +@pytest.fixture() +def tethysapp_base(tmp_path): + tethysapp_base_dir = tmp_path / "tethysapp-test_app" + tethysapp_base_dir.mkdir() + + tethysapp_dir = tethysapp_base_dir / "tethysapp" + tethysapp_dir.mkdir() + + app_dir = tethysapp_dir / "test_app" + app_dir.mkdir() + + return tethysapp_base_dir + + +@pytest.fixture() +def tethysapp_base_with_application_files(tethysapp_base, app_files_dir, test_files_dir): + + conda_recipes_dir = tethysapp_base / "conda.recipes" + conda_recipes_dir.mkdir() + + meta_template = app_files_dir / "meta_template.yaml" + tethysapp_meta_template = conda_recipes_dir / "meta.yaml" + shutil.copy(meta_template, tethysapp_meta_template) + + getChannels = app_files_dir / "getChannels.py" + tethysapp_getChannels = tethysapp_base / "getChannels.py" + shutil.copy(getChannels, tethysapp_getChannels) + + setup_helper = app_files_dir / "setup_helper.py" + tethysapp_setup_helper = tethysapp_base / "setup_helper.py" + shutil.copy(setup_helper, tethysapp_setup_helper) + + setup_helper = test_files_dir / "setup.py" + tethysapp_setup_helper = tethysapp_base / "setup.py" + shutil.copy(setup_helper, tethysapp_setup_helper) + + setup_helper = test_files_dir / "install_pip.sh" + tethysapp_scripts = tethysapp_base / "tethysapp" / "test_app" / "scripts" + tethysapp_scripts.mkdir(parents=True) + tethysapp_setup_helper = tethysapp_scripts / "install_pip.sh" + shutil.copy(setup_helper, tethysapp_setup_helper) + + setup_helper = app_files_dir / "__init__.py" + tethysapp_setup_helper = tethysapp_base / "__init__.py" + shutil.copy(setup_helper, tethysapp_setup_helper) + + return tethysapp_base + + +@pytest.fixture() +def basic_tethysapp(tethysapp_base_with_application_files, test_files_dir): + test_install_yaml = test_files_dir / "basic_install.yml" + tethysapp_install_yaml = tethysapp_base_with_application_files / "install.yml" + shutil.copy(test_install_yaml, tethysapp_install_yaml) + + return tethysapp_base_with_application_files + + +@pytest.fixture() +def complex_tethysapp(tethysapp_base_with_application_files, test_files_dir): + test_install_yaml = test_files_dir / "complex_install.yml" + tethysapp_install_yaml = tethysapp_base_with_application_files / "install.yml" + shutil.copy(test_install_yaml, tethysapp_install_yaml) + + return tethysapp_base_with_application_files + + +@pytest.fixture() +def basic_meta_yaml(test_files_dir): + basic_meta_yaml = test_files_dir / "basic_meta.yaml" + + return basic_meta_yaml + + +@pytest.fixture() +def complex_meta_yaml(test_files_dir): + complex_meta_yaml = test_files_dir / "complex_meta.yaml" + + return complex_meta_yaml + + +@pytest.fixture() +def install_pip_bash(test_files_dir): + install_pip_bash = test_files_dir / "install_pip.sh" + + return install_pip_bash + + +@pytest.fixture() +def mock_admin_request(rf, admin_user): + def _mock_admin_request(url, request_body=None, headers=None): + request = rf.get(url, request_body, headers) + request.user = admin_user + return request + + return _mock_admin_request + + +@pytest.fixture() +def mock_no_permission_request(rf, django_user_model): + def _mock_no_permission_request(url, request_body=None, headers=None): + request = rf.get(url, request_body, headers) + new_user = django_user_model.objects.create(username="someone", password="something") + request.user = new_user + return request + + return _mock_no_permission_request diff --git a/tethysapp/app_store/tests/files/bad_setup.py b/tethysapp/app_store/tests/files/bad_setup.py new file mode 100644 index 0000000..382ce41 --- /dev/null +++ b/tethysapp/app_store/tests/files/bad_setup.py @@ -0,0 +1,34 @@ +from setuptools import setup, find_namespace_packages +from setup_helper import find_resource_files +from tethys_apps.base.app_base import TethysAppBase + +# -- Apps Definition -- # +TethysAppBase.package_namespace = 'tethysapp' +app_package = 'test_app' +release_package = 'tethysapp-' + app_package + +# -- Python Dependencies -- # +dependencies = [] + +# -- Get Resource File -- # +resource_files = find_resource_files(app_package, TethysAppBase.package_namespace) +resource_files += find_resource_files(app_package, TethysAppBase.package_namespace) + + +setup( + False, + name=release_package, + version='0.0.1', + description='example', + long_description='This is just an example for testing', + keywords='example,test', + author='Tester', + author_email='tester@email.com', + url='', + license='BSD-3', + packages=find_namespace_packages(), + package_data={'': resource_files}, + include_package_data=True, + zip_safe=False, + install_requires=dependencies, +) diff --git a/tethysapp/app_store/tests/files/basic_install.yml b/tethysapp/app_store/tests/files/basic_install.yml new file mode 100644 index 0000000..18a32fb --- /dev/null +++ b/tethysapp/app_store/tests/files/basic_install.yml @@ -0,0 +1,13 @@ +# This file should be committed to your app code. +version: 1.0 +# This should match the app - package name in your setup.py +name: tethysapp-test_app + +requirements: + # Putting in a skip true param will skip the entire section. Ignoring the option will assume it be set to False + skip: false + conda: + channels: + packages: + pip: +post: \ No newline at end of file diff --git a/tethysapp/app_store/tests/files/basic_meta.yaml b/tethysapp/app_store/tests/files/basic_meta.yaml new file mode 100644 index 0000000..c62c453 --- /dev/null +++ b/tethysapp/app_store/tests/files/basic_meta.yaml @@ -0,0 +1,31 @@ +{% set data = load_setup_py_data(setup_file='../setup.py', from_recipe_dir=True) %} +{% set name = data.get('name')|replace("tethysapp-", "")|lower %} + +package: + name: {{ name }} + version: {{ data.get('version') }} + +about: + author: {{ data.get('author')}} + description: {{ data.get('description')}} + license: "$metadataObj" + dev_url: {{ data.get('dev_url') }} + +outputs: + - name: {{ name }} +extra: + author_email: '' + keywords: [] + recipe-maintainers: + - TethysAppWarehouse + +build: + noarch: python + preserve_egg_dir: true + script: python setup.py install --record=record.txt +requirements: + build: + - python + run: null +source: + path: .. diff --git a/tethysapp/app_store/tests/files/complex_install.yml b/tethysapp/app_store/tests/files/complex_install.yml new file mode 100644 index 0000000..d24f60a --- /dev/null +++ b/tethysapp/app_store/tests/files/complex_install.yml @@ -0,0 +1,16 @@ +# This file should be committed to your app code. +version: 1.0 +tethys_version: ">=4.0" +# This should match the app - package name in your setup.py +name: test_app + +requirements: + # Putting in a skip true param will skip the entire section. Ignoring the option will assume it be set to False + skip: false + conda: + channels: conda-forge + packages: + - numpy + pip: + - requests +post: \ No newline at end of file diff --git a/tethysapp/app_store/tests/files/complex_meta.yaml b/tethysapp/app_store/tests/files/complex_meta.yaml new file mode 100644 index 0000000..4b6cdcd --- /dev/null +++ b/tethysapp/app_store/tests/files/complex_meta.yaml @@ -0,0 +1,32 @@ +{% set data = load_setup_py_data(setup_file='../setup.py', from_recipe_dir=True) %} +{% set name = data.get('name')|replace("tethysapp-", "")|lower %} + +package: + name: {{ name }} + version: {{ data.get('version') }} + +about: + author: {{ data.get('author')}} + description: {{ data.get('description')}} + license: "$metadataObj" + dev_url: {{ data.get('dev_url') }} + +outputs: + - name: {{ name }} +extra: + author_email: '' + keywords: [] + recipe-maintainers: + - TethysAppWarehouse + +build: + noarch: python + preserve_egg_dir: true + script: python setup.py install --record=record.txt +requirements: + build: + - python + run: + - numpy +source: + path: .. diff --git a/tethysapp/app_store/tests/files/install_pip.sh b/tethysapp/app_store/tests/files/install_pip.sh new file mode 100644 index 0000000..825f597 --- /dev/null +++ b/tethysapp/app_store/tests/files/install_pip.sh @@ -0,0 +1,2 @@ +pip install requests +echo "PIP Install Complete" \ No newline at end of file diff --git a/tethysapp/app_store/tests/files/recipe_meta.yaml b/tethysapp/app_store/tests/files/recipe_meta.yaml new file mode 100644 index 0000000..e0fbe56 --- /dev/null +++ b/tethysapp/app_store/tests/files/recipe_meta.yaml @@ -0,0 +1,28 @@ +package: + name: test_app + version: 1.0 + +about: + author: author + description: description + license: "{}" + +outputs: + - name: test_app + +extra: + author_email: author_email + keywords: keywords + recipe-maintainers: + - TethysAppWarehouse + +build: + noarch: python + preserve_egg_dir: true + script: python setup.py install --record=record.txt +requirements: + build: + - python + run: null +source: + path: .. diff --git a/tethysapp/app_store/tests/files/setup.py b/tethysapp/app_store/tests/files/setup.py new file mode 100644 index 0000000..868dfa3 --- /dev/null +++ b/tethysapp/app_store/tests/files/setup.py @@ -0,0 +1,33 @@ +from setuptools import setup, find_namespace_packages +from setup_helper import find_all_resource_files + +# -- Apps Definition -- # +namespace = 'tethysapp' +namespace = 'tethysapp' +app_package = 'test_app' +release_package = 'tethysapp-' + app_package + +# -- Python Dependencies -- # +dependencies = [] + +# -- Get Resource File -- # +resource_files = find_all_resource_files(app_package, namespace) + + +setup( + False, + name=release_package, + version='0.0.1', + description='example', + long_description='This is just an example for testing', + keywords='example,test', + author='Tester', + author_email='tester@email.com', + url='', + license='BSD-3', + packages=find_namespace_packages(), + package_data={'': resource_files}, + include_package_data=True, + zip_safe=False, + install_requires=dependencies, +) diff --git a/tethysapp/app_store/tests/integrated_tests/test_controllers.py b/tethysapp/app_store/tests/integrated_tests/test_controllers.py new file mode 100644 index 0000000..d5fd09d --- /dev/null +++ b/tethysapp/app_store/tests/integrated_tests/test_controllers.py @@ -0,0 +1,105 @@ +from tethysapp.app_store.controllers import (home, get_available_stores, get_merged_resources) +from unittest.mock import call, MagicMock +import json + + +def test_home_stores(mocker, tmp_path, store, mock_admin_request): + request = mock_admin_request('/apps/app-store') + active_store = store('active_default') + mocker.patch('tethys_apps.base.workspace.get_app_workspace', return_value=str(tmp_path)) + mocker.patch('tethys_apps.utilities.get_active_app') + mocker.patch('tethysapp.app_store.controllers.get_conda_stores', return_value=[active_store]) + mock_render = mocker.patch('tethysapp.app_store.controllers.render') + + home(request) + + expected_context = { + 'storesData': [active_store], + 'show_stores': True + } + mock_render.assert_has_calls([ + call(request, 'app_store/home.html', expected_context) + ]) + + +def test_home_no_stores(mocker, tmp_path, mock_admin_request): + request = mock_admin_request('/apps/app-store') + mocker.patch('tethys_apps.base.workspace.get_app_workspace', return_value=str(tmp_path)) + mocker.patch('tethys_apps.utilities.get_active_app') + mocker.patch('tethysapp.app_store.controllers.get_conda_stores', return_value=[]) + mock_render = mocker.patch('tethysapp.app_store.controllers.render') + + home(request) + + expected_context = { + 'storesData': [], + 'show_stores': False + } + mock_render.assert_has_calls([ + call(request, 'app_store/home.html', expected_context) + ]) + + +def test_home_no_access(mocker, mock_no_permission_request): + mock_messages = MagicMock() + request = mock_no_permission_request('/apps/app-store') + request._messages = mock_messages + mocker.patch('tethys_apps.utilities.get_active_app') + mock_render = mocker.patch('tethysapp.app_store.controllers.render') + + home(request) + + mock_render.assert_not_called() + mock_messages.add.assert_called_with(30, "We're sorry, but the operation you requested cannot be found.", '') + + +def test_get_available_stores(mocker, tmp_path, store, mock_admin_request): + request = mock_admin_request('/app-store/get_available_stores') + active_store = store('active_default') + mocker.patch('tethys_apps.base.workspace.get_app_workspace', return_value=str(tmp_path)) + mocker.patch('tethys_apps.utilities.get_active_app') + mocker.patch('tethysapp.app_store.controllers.get_conda_stores', return_value=[active_store]) + + stores = get_available_stores(request) + expected_stores = {"stores": [active_store]} + assert json.loads(stores.content) == expected_stores + + +def test_get_available_stores_no_access(mocker, mock_no_permission_request): + mock_messages = MagicMock() + request = mock_no_permission_request('/app-store/get_available_stores') + request._messages = mock_messages + mocker.patch('tethys_apps.utilities.get_active_app') + + get_available_stores(request) + + mock_messages.add.assert_called_with(30, "We're sorry, but the operation you requested cannot be found.", '') + + +def test_get_merged_resources(store, resource, mocker, mock_admin_request, tmp_path): + request = mock_admin_request('/app-store/get_merged_resources') + active_store = store('active_default', conda_labels=['main', 'dev']) + mocker.patch('tethys_apps.base.workspace.get_app_workspace', return_value=str(tmp_path)) + mocker.patch('tethys_apps.utilities.get_active_app') + request.active_store = active_store + app_resource_main = resource("test_app", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource2_main = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource2_dev = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][1]) + mocker.patch('tethysapp.app_store.controllers.tethys_version', "4.0.0") + + list_stores = { + 'availableApps': [app_resource_main], + 'installedApps': [app_resource_main], + 'incompatibleApps': [app_resource2_main, app_resource2_dev] + } + mocker.patch('tethysapp.app_store.controllers.get_stores_reformatted', return_value=list_stores) + + object_stores = get_merged_resources(request) + + expected_list_stores = { + 'availableApps': [app_resource_main], + 'installedApps': [app_resource_main], + 'incompatibleApps': [app_resource2_main, app_resource2_dev], + 'tethysVersion': "4.0.0" + } + assert json.loads(object_stores.content) == expected_list_stores diff --git a/tethysapp/app_store/tests/pytest.ini b/tethysapp/app_store/tests/pytest.ini deleted file mode 100644 index ed6a436..0000000 --- a/tethysapp/app_store/tests/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -DJANGO_SETTINGS_MODULE=tethysapp.app_store.tests.test_settings -python_files = test_*.py *_tests.py - -; addopts = --nomigrations --cov=./ --cov-report=html \ No newline at end of file diff --git a/tethysapp/app_store/tests/test_settings.py b/tethysapp/app_store/tests/test_settings.py deleted file mode 100644 index 1fd0dbb..0000000 --- a/tethysapp/app_store/tests/test_settings.py +++ /dev/null @@ -1,21 +0,0 @@ -import sys - -sys.path.append('/home/gio/tethysdev/tethys/tethys_portal') - -# DATABASES = { -# "default": { -# "ENGINE": "django.db.backends.sqlite3", -# "NAME": "memory", -# } -# } - -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'tethys_platform', - 'USER': 'tethys_super', - 'PASSWORD': 'pass', - 'HOST': 'localhost', - 'PORT': '5436' - } -} diff --git a/tethysapp/app_store/tests/test_submission.py b/tethysapp/app_store/tests/test_submission.py deleted file mode 100644 index c9061b0..0000000 --- a/tethysapp/app_store/tests/test_submission.py +++ /dev/null @@ -1,101 +0,0 @@ - -# from tethysapp.app_store.app import AppStore as app_store -from tethysapp.app_store.submission_handlers import pull_git_repo -# from tethysapp.app_store.utilities import decrypt -# from dotenv import load_dotenv,dotenv_values - -# from tethys_sdk.testing import TethysTestCase - -# from tethysapp.app_store.helpers import send_notification -from unittest import mock # because unittest's mock works great with pytest -# import shutil -import os -# from django.test import Client -# from django.test import TestCase -import github - -import pytest -# import django -# django.setup() -github_repos = [ - "https://github.com/BYU-Hydroinformatics/tethysapp-hydrafloods.git", # hydrafloods app - "https://github.com/BYU-Hydroinformatics/Water-Data-Explorer.git" # water_data_explorer app -] - -# load_dotenv(dotenv_path=os.path.join(os.getcwd(),"app_store/tests/.env")) -# config = dotenv_values() - -stores = [ - { - "default": True, - "conda_labels": [ - "main", - "dev" - ], - "github_token": "gAAAAABj-CqGQzM2cZga_ISIPgyOFcR-uxHxtfqnqSs96XN210wAP4dPgKHWueKphMu3cXqMaIOpx49VzEblsJVkwhgUbphZ9WaSIKqEmqk2Bi_mf_hLgDX4EnvIpKqs5iDxOqo25vXi", # noqa: E501 - "conda_channel": "tethysapp", - "github_organization": "tethysapp" - }, - { - "default": False, - "conda_labels": [ - "main", - "dev" - ], - "github_token": "gAAAAABj_jTAaNDK1OAxfswFwQwpoYDd4NZzVgpV1H4ol2Zh2zrg9hPXtGHkKPfjHBRayw9wEkb3ByJwbOgk3Xj7iPJl_QC_VPxtoRuPIESl_jYuohoGlBbIdaU9tEH347Wqp9-7fg4i", # noqa: E501 - "conda_channel": "elkingio", - "github_organization": "lost-melancholic-tribe" - } -] - - -class TestSubmissionHandlers: - # @pytest.mark.parametrize("mock_os_exits_results",[False,False,False,True]) - # @mock.patch("tethysapp.app_store.submission_handlers.os.path.exists", autospec=True) - @pytest.mark.django_db - @pytest.mark.parametrize("github_url", github_repos) - @pytest.mark.parametrize("active_store", stores) - @mock.patch("tethysapp.app_store.submission_handlers.send_notification") - def test_pull_git_repo(self, mock_send_notification, active_store, github_url): - fake_app_workspace_path = '/home/gio/tethysdev/applications/tethysapp-tethys_app_store/tethysapp/' \ - 'app_store/tests/fake_app_workspace' - mock_send_notification.return_value = "Fake notification sent" - - app_workspace = mock.MagicMock() - app_workspace.path = fake_app_workspace_path - # def side_os_exists_effect(args): - # return mock_os_exits_results - - # mock_exists.side_effect = side_os_exists_effect - - channel_layer = mock.MagicMock() - - # get the branches manually to use assert ## - repo_name = github_url.split("/")[-1].replace(".git", "") - user = github_url.split("/")[-2] - github_object_api = github.Github() - repo = github_object_api.get_repo(f'{user}/{repo_name}') - branches = list(repo.get_branches()) - list_branch = [] - for branch in branches: - list_branch.append(branch.name) - - # get the github dir path - app_name = github_url.split("/")[-1].replace(".git", "") - fake_app_path = os.path.join(fake_app_workspace_path, "gitsubmission", active_store['conda_channel'], app_name) - get_data_json = { - "data": { - "branches": list_branch, - "github_dir": fake_app_path, - "conda_channel": active_store['conda_channel'], - "github_token": active_store['github_token'], - "conda_labels": active_store['conda_labels'], - "github_organization": active_store['github_organization'] - }, - "jsHelperFunction": "showBranches", - "helper": "addModalHelper" - } - - pull_git_repo(github_url, active_store, channel_layer, app_workspace) - - mock_send_notification.assert_called_with(get_data_json, channel_layer) diff --git a/tethysapp/app_store/tests/tests.py b/tethysapp/app_store/tests/tests.py deleted file mode 100755 index 880b31f..0000000 --- a/tethysapp/app_store/tests/tests.py +++ /dev/null @@ -1,168 +0,0 @@ -# Most of your test classes should inherit from TethysTestCase -from tethys_sdk.testing import TethysTestCase - -# Use if your app has persistent stores that will be tested against. -# Your app class from app.py must be passed as an argument to the TethysTestCase functions to both -# create and destroy the temporary persistent stores for your app used during testing -# from ..app import Warehouse - -# Use if you'd like a simplified way to test rendered HTML templates. -# You likely need to install BeautifulSoup, as it is not included by default in Tethys Platform -# 1. Open a terminal -# 2. Enter command ". /usr/lib/tethys/bin/activate" to activate the Tethys python environment -# 3. Enter command "pip install beautifulsoup4" -# For help, see https://www.crummy.com/software/BeautifulSoup/bs4/doc/ -# from bs4 import BeautifulSoup - -""" -To run any tests: - 1. Open a terminal - 2. Enter command ". /usr/lib/tethys/bin/activate" to activate the Tethys python environment - 3. In settings.py make sure that the tethys_default database user is set to tethys_super - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'NAME': 'tethys_default', - 'USER': 'tethys_super', - 'PASSWORD': 'pass', - 'HOST': '127.0.0.1', - 'PORT': '5435' - } - } - 4. Enter tethys test command. - The general form is: "tethys test -f tethys_apps.tethysapp..... - " - See below for specific examples - - To run all tests across this app: - Test command: "tethys test -f tethys_apps.tethysapp.warehouse" - - To run all tests in this file: - Test command: "tethys test -f tethys_apps.tethysapp.warehouse.tests.tests" - - To run tests in the WarehouseTestCase class: - Test command: "tethys test -f tethys_apps.tethysapp.warehouse.tests.tests.WarehouseTestCase" - - To run only the test_if_tethys_platform_is_great function in the WarehouseTestCase class: - Test command: "tethys test -f tethys_apps.tethysapp.warehouse.tests.tests.WarehouseTestCase. - test_if_tethys_platform_is_great" - -To learn more about writing tests, see: - https://docs.djangoproject.com/en/1.9/topics/testing/overview/#writing-tests - https://docs.python.org/2.7/library/unittest.html#module-unittest -""" - - -class WarehouseTestCase(TethysTestCase): - """ - In this class you may define as many functions as you'd like to test different aspects of your app. - Each function must start with the word "test" for it to be recognized and executed during testing. - You could also create multiple TethysTestCase classes within this or other python files to organize your tests. - """ - - def set_up(self): - """ - This function is not required, but can be used if any environmental setup needs to take place before - execution of each test function. Thus, if you have multiple test that require the same setup to run, - place that code here. For example, if you are testing against any persistent stores, you should call the - test database creation function here, like so: - - self.create_test_persistent_stores_for_app(Warehouse) - - If you are testing against a controller that check for certain user info, you can create a fake test user and - get a test client, like so: - - #The test client simulates a browser that can navigate your app's url endpoints - self.c = self.get_test_client() - self.user = self.create_test_user(username="joe", password="secret", email="joe@some_site.com") - # To create a super_user, use "self.create_test_superuser(*params)" with the same params - - # To force a login for the test user - self.c.force_login(self.user) - - # If for some reason you do not want to force a login, you can use the following: - login_success = self.c.login(username="joe", password="secret") - - NOTE: You do not have place these functions here, but if they are not placed here and are needed - then they must be placed at the beginning of your individual test functions. Also, if a certain - setup does not apply to all of your functions, you should either place it directly in each - function it applies to, or maybe consider creating a new test file or test class to group similar - tests. - """ - pass - - def tear_down(self): - """ - This function is not required, but should be used if you need to tear down any environmental setup - that took place before execution of the test functions. If you are testing against any persistent - stores, you should call the test database destruction function from here, like so: - - self.destroy_test_persistent_stores_for_app(Warehouse) - - NOTE: You do not have to set these functions up here, but if they are not placed here and are needed - then they must be placed at the very end of your individual test functions. Also, if certain - tearDown code does not apply to all of your functions, you should either place it directly in each - function it applies to, or maybe consider creating a new test file or test class to group similar - tests. - """ - pass - - def is_tethys_platform_great(self): - return True - - def test_if_tethys_platform_is_great(self): - """ - This is an example test function that can be modified to test a specific aspect of your app. - It is required that the function name begins with the word "test" or it will not be executed. - Generally, the code written here will consist of many assert methods. - A list of assert methods is included here for reference or to get you started: - assertEqual(a, b) a == b - assertNotEqual(a, b) a != b - assertTrue(x) bool(x) is True - assertFalse(x) bool(x) is False - assertIs(a, b) a is b - assertIsNot(a, b) a is not b - assertIsNone(x) x is None - assertIsNotNone(x) x is not None - assertIn(a, b) a in b - assertNotIn(a, b) a not in b - assertIsInstance(a, b) isinstance(a, b) - assertNotIsInstance(a, b) !isinstance(a, b) - Learn more about assert methods here: - https://docs.python.org/2.7/library/unittest.html#assert-methods - """ - - self.assertEqual(self.is_tethys_platform_great(), True) - self.assertNotEqual(self.is_tethys_platform_great(), False) - self.assertTrue(self.is_tethys_platform_great()) - self.assertFalse(not self.is_tethys_platform_great()) - self.assertIs(self.is_tethys_platform_great(), True) - self.assertIsNot(self.is_tethys_platform_great(), False) - - def test_home_controller(self): - """ - This is an example test function of how you might test a controller that returns an HTML template rendered - with context variables. - """ - - # If all test functions were testing controllers or required a test client for another reason, the following - # 3 lines of code could be placed once in the set_up function. Note that in that case, each variable should be - # prepended with "self." (i.e. self.c = ...) to make those variables "global" to this test class and able to be - # used in each separate test function. - c = self.get_test_client() - user = self.create_test_user(username="joe", password="secret", email="joe@some_site.com") - c.force_login(user) - - # Have the test client "browse" to your home page - response = c.get('/apps/warehouse/') # The final '/' is essential for all pages/controllers - - # Test that the request processed correctly (with a 200 status code) - self.assertEqual(response.status_code, 200) - - ''' - NOTE: Next, you would likely test that your context variables returned as expected. That would look - something like the following: - - context = response.context - self.assertEqual(context['my_integer'], 10) - ''' diff --git a/tethysapp/app_store/tests/unit_tests/test_begin_install.py b/tethysapp/app_store/tests/unit_tests/test_begin_install.py new file mode 100644 index 0000000..d21bee6 --- /dev/null +++ b/tethysapp/app_store/tests/unit_tests/test_begin_install.py @@ -0,0 +1,346 @@ +from unittest.mock import call, MagicMock +from tethysapp.app_store.begin_install import (handle_property_not_present, process_post_install_scripts, + detect_app_dependencies, mamba_install, begin_install) + + +def test_handle_property_not_present(): + handle_property_not_present("") + + +def test_process_post_install_scripts(tmp_path): + scripts_dir = tmp_path / "scripts" + scripts_dir.mkdir() + + process_post_install_scripts(tmp_path) + + +def test_detect_app_dependencies_pip_no_settings(mocker, tethysapp_base_with_application_files): + app_name = "test_app" + channel_layer = MagicMock() + mock_ws = MagicMock() + mocker.patch('tethysapp.app_store.begin_install.call') + mocker.patch('tethysapp.app_store.begin_install.cache') + mocker.patch('tethysapp.app_store.begin_install.importlib') + mock_subprocess = mocker.patch('tethysapp.app_store.begin_install.subprocess') + mock_subprocess.Popen().stdout.readline.side_effect = ["still_running", "PIP Install Complete"] + mock_tethysapp = mocker.patch('tethysapp.app_store.begin_install.tethysapp') + mock_app = MagicMock() + mock_app.custom_settings.return_value = [] + mocker.patch('tethysapp.app_store.begin_install.get_app_instance_from_path', return_value=mock_app) + + mock_tethysapp.__path__ = [str(tethysapp_base_with_application_files / "tethysapp")] + + detect_app_dependencies(app_name, channel_layer, mock_ws) + + expected_data_json = { + "data": [], + "returnMethod": "set_custom_settings", + "jsHelperFunction": "processCustomSettings", + "app_py_path": str(tethysapp_base_with_application_files / "tethysapp") + } + mock_ws.assert_has_calls([ + call("Running PIP install....", channel_layer), + call("PIP install completed", channel_layer), + call(expected_data_json, channel_layer) + ]) + assert mock_subprocess.Popen().stdout.readline.call_count == 2 + + +def test_detect_app_dependencies_pip_settings(mocker, tethysapp_base_with_application_files): + app_name = "test_app" + channel_layer = MagicMock() + mock_ws = MagicMock() + mocker.patch('tethysapp.app_store.begin_install.call') + mocker.patch('tethysapp.app_store.begin_install.cache') + mocker.patch('tethysapp.app_store.begin_install.importlib') + mock_subprocess = mocker.patch('tethysapp.app_store.begin_install.subprocess') + mock_subprocess.Popen().stdout.readline.side_effect = [""] + mock_tethysapp = mocker.patch('tethysapp.app_store.begin_install.tethysapp') + mock_app = MagicMock() + mock_setting = MagicMock(default=True, description="description") + mock_setting.name = "name" + mock_app.custom_settings.return_value = [mock_setting] + mocker.patch('tethysapp.app_store.begin_install.get_app_instance_from_path', return_value=mock_app) + + mock_tethysapp.__path__ = [str(tethysapp_base_with_application_files / "tethysapp")] + + detect_app_dependencies(app_name, channel_layer, mock_ws) + + expected_data_json = { + "data": [{"name": "name", "description": "description", "default": "True"}], + "returnMethod": "set_custom_settings", + "jsHelperFunction": "processCustomSettings", + "app_py_path": str(tethysapp_base_with_application_files / "tethysapp") + } + mock_ws.assert_has_calls([ + call("Running PIP install....", channel_layer), + call("PIP install completed", channel_layer), + call("Processing App's Custom Settings....", channel_layer), + call(expected_data_json, channel_layer) + ]) + assert mock_subprocess.Popen().stdout.readline.call_count == 1 + + +def test_detect_app_dependencies_no_pip_no_settings(mocker, tethysapp_base_with_application_files): + test_install_pip = tethysapp_base_with_application_files / "tethysapp" / "test_app" / "scripts" / "install_pip.sh" + test_install_pip.unlink() + app_name = "test_app" + channel_layer = MagicMock() + mock_ws = MagicMock() + mocker.patch('tethysapp.app_store.begin_install.call') + mocker.patch('tethysapp.app_store.begin_install.cache') + mocker.patch('tethysapp.app_store.begin_install.importlib') + mock_subprocess = mocker.patch('tethysapp.app_store.begin_install.subprocess') + mock_tethysapp = mocker.patch('tethysapp.app_store.begin_install.tethysapp') + mock_app = MagicMock() + mock_app.custom_settings.return_value = [] + mocker.patch('tethysapp.app_store.begin_install.get_app_instance_from_path', return_value=mock_app) + + mock_tethysapp.__path__ = [str(tethysapp_base_with_application_files / "tethysapp")] + + detect_app_dependencies(app_name, channel_layer, mock_ws) + + expected_data_json = { + "data": [], + "returnMethod": "set_custom_settings", + "jsHelperFunction": "processCustomSettings", + "app_py_path": str(tethysapp_base_with_application_files / "tethysapp") + } + mock_ws.assert_called_once_with(expected_data_json, channel_layer) + assert mock_subprocess.Popen().stdout.readline.call_count == 0 + + +def test_detect_app_dependencies_no_app_path(mocker, caplog): + app_name = "test_app" + channel_layer = MagicMock() + mock_ws = MagicMock() + mocker.patch('tethysapp.app_store.begin_install.call') + mocker.patch('tethysapp.app_store.begin_install.cache') + mocker.patch('tethysapp.app_store.begin_install.importlib') + mock_tethysapp = mocker.patch('tethysapp.app_store.begin_install.tethysapp') + + mock_tethysapp.__path__ = [] + + detect_app_dependencies(app_name, channel_layer, mock_ws) + + mock_ws.assert_not_called() + assert "Can't find the installed app location." in caplog.messages + + +def test_mamba_install_success(resource, mocker): + app_channel = "test_channel" + app_label = "dev" + app_version = "" + app_resource = resource("test_app", app_channel, app_label) + mock_channel = MagicMock() + mock_ws = mocker.patch('tethysapp.app_store.begin_install.send_notification') + mock_sp = mocker.patch('tethysapp.app_store.begin_install.subprocess') + mock_time = mocker.patch('tethysapp.app_store.begin_install.time') + mock_time.time.side_effect = [10, 20] + mock_sp.Popen().stdout.readline.side_effect = [ + "Running Mamba Install", "Collecting package metadata: done", "Solving environment: done", + "Verifying transaction: done", "All requested packages already installed.", "Mamba Install Complete"] + + successful_install = mamba_install(app_resource, app_channel, app_label, app_version, mock_channel) + + mock_ws.assert_has_calls([ + call("Mamba install may take a couple minutes to complete depending on how complicated the " + "environment is. Please wait....", mock_channel), + call("Package Metadata Collection: Done", mock_channel), + call("Solving Environment: Done", mock_channel), + call("Verifying Transaction: Done", mock_channel), + call("Application package is already installed in this conda environment.", mock_channel), + call("Mamba install completed in 10.00 seconds.", mock_channel) + ]) + assert successful_install + + +def test_mamba_install_output_failure(resource, mocker): + app_channel = "test_channel" + app_label = "dev" + app_version = "" + app_resource = resource("test_app", app_channel, app_label) + mock_channel = MagicMock() + mock_ws = mocker.patch('tethysapp.app_store.begin_install.send_notification') + mock_sp = mocker.patch('tethysapp.app_store.begin_install.subprocess') + mock_time = mocker.patch('tethysapp.app_store.begin_install.time') + mock_time.time.side_effect = [10, 20] + mock_sp.Popen().stdout.readline.side_effect = [ + "Running Mamba Install", "critical libmamba Could not solve for environment specs", "Mamba Install Complete"] + + successful_install = mamba_install(app_resource, app_channel, app_label, app_version, mock_channel) + + mock_ws.assert_has_calls([ + call("Mamba install may take a couple minutes to complete depending on how complicated the " + "environment is. Please wait....", mock_channel), + call("Failed to resolve environment specs when installing.", mock_channel), + call("Mamba install completed in 10.00 seconds.", mock_channel) + ]) + assert not successful_install + + +def test_mamba_install_output_failure2(resource, mocker): + app_name = "test_app" + app_channel = "test_channel" + app_label = "dev" + app_resource = resource(app_name, app_channel, app_label) + app_version = app_resource['latestVersion'][app_channel][app_label] + mock_channel = MagicMock() + mock_ws = mocker.patch('tethysapp.app_store.begin_install.send_notification') + mock_sp = mocker.patch('tethysapp.app_store.begin_install.subprocess') + mock_time = mocker.patch('tethysapp.app_store.begin_install.time') + mock_time.time.side_effect = [10, 20] + mock_sp.Popen().stdout.readline.side_effect = [ + "Running Mamba Install", "Found conflicts!", "Mamba Install Complete"] + + successful_install = mamba_install(app_resource, app_channel, app_label, app_version, mock_channel) + + mock_ws.assert_has_calls([ + call("Mamba install may take a couple minutes to complete depending on how complicated the " + "environment is. Please wait....", mock_channel), + call("Mamba install found conflicts. Please try running the following command in your terminal's " + f"conda environment to attempt a manual installation : mamba install -c {app_channel}/label/{app_label} " + f"{app_name}={app_version}", + mock_channel), + call("Mamba install completed in 10.00 seconds.", mock_channel) + ]) + assert not successful_install + + +def test_mamba_install_output_failure3(resource, mocker): + app_name = "test_app" + app_channel = "test_channel" + app_label = "main" + app_resource = resource(app_name, app_channel, app_label) + app_version = app_resource['latestVersion'][app_channel][app_label] + mock_channel = MagicMock() + mock_ws = mocker.patch('tethysapp.app_store.begin_install.send_notification') + mock_sp = mocker.patch('tethysapp.app_store.begin_install.subprocess') + mock_time = mocker.patch('tethysapp.app_store.begin_install.time') + mock_time.time.side_effect = [10, 20] + mock_sp.Popen().stdout.readline.side_effect = [ + "Running Mamba Install", "Found conflicts!", ""] + + successful_install = mamba_install(app_resource, app_channel, app_label, app_version, mock_channel) + + mock_ws.assert_has_calls([ + call("Mamba install may take a couple minutes to complete depending on how complicated the " + "environment is. Please wait....", mock_channel), + call("Mamba install found conflicts. Please try running the following command in your terminal's " + f"conda environment to attempt a manual installation : mamba install -c {app_channel} " + f"{app_name}={app_version}", + mock_channel), + call("Mamba install completed in 10.00 seconds.", mock_channel) + ]) + assert not successful_install + + +def test_begin_install(resource, mocker): + mock_channel = MagicMock() + mock_workspace = MagicMock() + app_name = "test_app" + app_channel = "test_channel" + app_label = "main" + app_resource = resource(app_name, app_channel, app_label) + app_version = app_resource['latestVersion'][app_channel][app_label] + install_data = { + "name": app_name, + "label": app_label, + "channel": app_channel, + "version": app_version + } + + mock_ws = mocker.patch('tethysapp.app_store.begin_install.send_notification') + mocker.patch('tethysapp.app_store.begin_install.get_resource', return_value=app_resource) + mocker.patch('tethysapp.app_store.begin_install.mamba_install', return_value=True) + mocker.patch('tethysapp.app_store.begin_install.detect_app_dependencies') + + begin_install(install_data, mock_channel, mock_workspace) + + mock_ws.assert_has_calls([ + call(f"Starting installation of app: {app_name} from store {app_channel} with label {app_label}", mock_channel), + call(f"Installing Version: {app_version}", mock_channel), + ]) + + +def test_begin_install_no_resource(mocker): + mock_channel = MagicMock() + mock_workspace = MagicMock() + app_name = "test_app" + app_channel = "test_channel" + app_label = "main" + install_data = { + "name": app_name, + "label": app_label, + "channel": app_channel, + "version": "1.0" + } + + mock_ws = mocker.patch('tethysapp.app_store.begin_install.send_notification') + mocker.patch('tethysapp.app_store.begin_install.get_resource', return_value=None) + + begin_install(install_data, mock_channel, mock_workspace) + + mock_ws.assert_has_calls([ + call(f"Failed to get the {install_data['name']} resource", mock_channel) + ]) + + +def test_begin_install_failed_install(resource, mocker): + mock_channel = MagicMock() + mock_workspace = MagicMock() + app_name = "test_app" + app_channel = "test_channel" + app_label = "main" + app_resource = resource(app_name, app_channel, app_label) + app_version = app_resource['latestVersion'][app_channel][app_label] + install_data = { + "name": app_name, + "label": app_label, + "channel": app_channel, + "version": app_version + } + + mock_ws = mocker.patch('tethysapp.app_store.begin_install.send_notification') + mocker.patch('tethysapp.app_store.begin_install.get_resource', return_value=app_resource) + mocker.patch('tethysapp.app_store.begin_install.mamba_install', return_value=False) + + begin_install(install_data, mock_channel, mock_workspace) + + mock_ws.assert_has_calls([ + call(f"Starting installation of app: {app_name} from store {app_channel} with label {app_label}", mock_channel), + call(f"Installing Version: {app_version}", mock_channel), + call("Error while Installing Conda package. Please check logs for details", mock_channel) + ]) + + +def test_begin_install_failed_dependencies(resource, mocker, caplog): + mock_channel = MagicMock() + mock_workspace = MagicMock() + app_name = "test_app" + app_channel = "test_channel" + app_label = "main" + app_resource = resource(app_name, app_channel, app_label) + app_version = app_resource['latestVersion'][app_channel][app_label] + install_data = { + "name": app_name, + "label": app_label, + "channel": app_channel, + "version": app_version + } + + mock_ws = mocker.patch('tethysapp.app_store.begin_install.send_notification') + mocker.patch('tethysapp.app_store.begin_install.get_resource', return_value=app_resource) + mocker.patch('tethysapp.app_store.begin_install.mamba_install', return_value=True) + + with mocker.patch('tethysapp.app_store.begin_install.detect_app_dependencies', + side_effect=Exception('mocked error')): + begin_install(install_data, mock_channel, mock_workspace) + + mock_ws.assert_has_calls([ + call(f"Starting installation of app: {app_name} from store {app_channel} with label {app_label}", + mock_channel), + call(f"Installing Version: {app_version}", mock_channel), + call("Error while checking package for services", mock_channel) + ]) + assert 'mocked error' in caplog.messages diff --git a/tethysapp/app_store/tests/unit_tests/test_helpers.py b/tethysapp/app_store/tests/unit_tests/test_helpers.py new file mode 100644 index 0000000..8dce5c9 --- /dev/null +++ b/tethysapp/app_store/tests/unit_tests/test_helpers.py @@ -0,0 +1,171 @@ +import pytest +import shutil +from unittest.mock import MagicMock +from tethysapp.app_store.helpers import (parse_setup_py, get_conda_stores, check_all_present, run_process, + send_notification, apply_template, get_github_install_metadata) + + +@pytest.mark.parametrize( + "substrings, expected_outcome", [ + (["This", "testing"], True), + (["This", "not present"], False)]) +def test_check_all_present(substrings, expected_outcome): + string = "This is a testing string" + present = check_all_present(string, substrings) + + assert present is expected_outcome + + +def test_run_process(mocker, caplog): + + mock_run_results = MagicMock(stdout="standard output", returncode=10, stderr="standard error") + mock_run = mocker.patch('tethysapp.app_store.helpers.run', return_value=mock_run_results) + + args = ["executable", "arg1", "arg2"] + run_process(args) + + mock_run.assert_called_with(args, capture_output=True) + assert "standard output" in caplog.messages + assert "standard error" in caplog.messages + + +def test_send_notification(mocker): + channel_layer = MagicMock(group_send="some_function") + mock_async_to_sync = mocker.patch('tethysapp.app_store.helpers.async_to_sync') + msg = "testing functionality" + + send_notification(msg, channel_layer) + + expected_args = ["notifications", {"type": "install_notifications", "message": msg}] + assert mock_async_to_sync.some_function.called_once_with(expected_args) + + +def test_apply_template(app_files_dir, tmp_path): + upload_template = app_files_dir / "upload_command.txt" + data = {"label_string": "main"} + output_location = tmp_path / "upload_command.txt" + + apply_template(upload_template, data, output_location) + + assert output_location.read_text() == "anaconda upload --force --label main noarch/*.tar.bz2" + + +def test_parse_setup_py(test_files_dir): + setup_py = test_files_dir / "setup.py" + + parsed_data = parse_setup_py(setup_py) + + expected_data = { + 'name': 'release_package', 'version': '0.0.1', 'description': 'example', + 'long_description': 'This is just an example for testing', 'keywords': 'example,test', + 'author': 'Tester', 'author_email': 'tester@email.com', 'url': '', 'license': 'BSD-3' + } + assert parsed_data == expected_data + + +def test_get_github_install_metadata(tmp_path, test_files_dir, mocker): + mock_cache = mocker.patch('tethysapp.app_store.helpers.cache') + mock_cache.get.return_value = None + mock_installed_app = tmp_path / "apps" / "installed" / "test_app" + mock_installed_app.mkdir(parents=True) + shutil.copyfile(test_files_dir / "setup.py", mock_installed_app / "setup.py") + mock_workspace = MagicMock(path=tmp_path) + + installed_apps = get_github_install_metadata(mock_workspace) + + expected_apps = { + 'name': 'release_package', 'installed': True, 'installedVersion': '0.0.1', + 'metadata': {'channel': 'tethysapp', 'license': 'BSD 3-Clause License', 'description': 'example'}, + 'path': str(mock_installed_app), 'author': 'Tester', 'dev_url': '' + } + assert installed_apps[0] == expected_apps + mock_cache.set.assert_called_with("warehouse_github_app_resources", installed_apps) + + +def test_get_github_install_metadata_cached(mocker): + mock_cache = mocker.patch('tethysapp.app_store.helpers.cache') + apps = [{ + 'name': 'release_package', 'installed': True, 'installedVersion': '0.0.1', + 'metadata': {'channel': 'tethysapp', 'license': 'BSD 3-Clause License', 'description': 'example'}, + 'path': 'app_path', 'author': 'Tester', 'dev_url': '' + }] + mock_cache.get.return_value = apps + + installed_apps = get_github_install_metadata("workspace_path") + + assert installed_apps == apps + + +def test_get_github_install_metadata_no_apps(tmp_path, mocker): + mock_cache = mocker.patch('tethysapp.app_store.helpers.cache') + mock_cache.get.return_value = None + mock_installed_app = tmp_path / "apps" + mock_installed_app.mkdir(parents=True) + mock_workspace = MagicMock(path=tmp_path) + + installed_apps = get_github_install_metadata(mock_workspace) + + assert installed_apps == [] + mock_cache.set.assert_called_with("warehouse_github_app_resources", []) + + +def test_get_conda_stores(mocker, store): + mock_app = mocker.patch('tethysapp.app_store.helpers.app') + encryption_key = 'fake_encryption_key' + active_store = store('active_default') + inactive_store = store("inactive_not_default", default=False, active=False) + mock_app.get_custom_setting.side_effect = [{'stores': [active_store, inactive_store]}, encryption_key] + mocker.patch('tethysapp.app_store.helpers.decrypt', return_value='decrypted_token') + + stores = get_conda_stores() + + active_store['github_token'] = 'decrypted_token' + inactive_store['github_token'] = 'decrypted_token' + expected_stores = [active_store, inactive_store] + assert stores == expected_stores + + +def test_get_conda_stores_active(mocker, store): + mock_app = mocker.patch('tethysapp.app_store.helpers.app') + encryption_key = 'fake_encryption_key' + active_store = store('active_default') + inactive_store = store("inactive_not_default", default=False, active=False) + mock_app.get_custom_setting.side_effect = [{'stores': [active_store, inactive_store]}, encryption_key] + mocker.patch('tethysapp.app_store.helpers.decrypt', return_value='decrypted_token') + + stores = get_conda_stores(active_only=True) + + active_store['github_token'] = 'decrypted_token' + expected_stores = [active_store] + assert stores == expected_stores + + +def test_get_conda_stores_specific_str(mocker, store): + mock_app = mocker.patch('tethysapp.app_store.helpers.app') + encryption_key = 'fake_encryption_key' + active_store = store('active_default') + inactive_store = store("inactive_not_default", default=False, active=False) + mock_app.get_custom_setting.side_effect = [{'stores': [active_store, inactive_store]}, encryption_key] + mocker.patch('tethysapp.app_store.helpers.decrypt', return_value='decrypted_token') + + stores = get_conda_stores(conda_channels="conda_channel_inactive_not_default") + + inactive_store['github_token'] = 'decrypted_token' + expected_stores = [inactive_store] + assert stores == expected_stores + + +def test_get_conda_stores_specific_list(mocker, store): + mock_app = mocker.patch('tethysapp.app_store.helpers.app') + encryption_key = 'fake_encryption_key' + active_store = store('active_default') + inactive_store = store("inactive_not_default", default=False, active=False) + mock_app.get_custom_setting.side_effect = [{'stores': [active_store, inactive_store]}, encryption_key] + mocker.patch('tethysapp.app_store.helpers.decrypt', return_value='decrypted_token') + + stores = get_conda_stores(conda_channels=["conda_channel_inactive_not_default", "conda_channel_active_default"]) + + active_store['github_token'] = 'decrypted_token' + inactive_store['github_token'] = 'decrypted_token' + expected_stores = [active_store, inactive_store] + assert stores == expected_stores diff --git a/tethysapp/app_store/tests/unit_tests/test_installation_handlers.py b/tethysapp/app_store/tests/unit_tests/test_installation_handlers.py new file mode 100644 index 0000000..f736b29 --- /dev/null +++ b/tethysapp/app_store/tests/unit_tests/test_installation_handlers.py @@ -0,0 +1,357 @@ +from unittest.mock import MagicMock, call +from argparse import Namespace +from django.core.exceptions import ObjectDoesNotExist +from tethys_sdk.app_settings import CustomSetting +from tethysapp.app_store.installation_handlers import (get_service_options, restart_server, continueAfterInstall, + set_custom_settings, process_settings, configure_services, + getServiceList) + + +def test_get_service_options(mocker): + mock_query_set = MagicMock(id=1) + mock_query_set.name = "service_setting" + mock_services_list_command = mocker.patch('tethysapp.app_store.installation_handlers.services_list_command', + return_value=[[mock_query_set]]) + service_type = "spatial" + + services = get_service_options(service_type) + + expected_services = [{"name": "service_setting", "id": 1}] + assert services == expected_services + expected_args = Namespace(spatial=True) + assert mock_services_list_command.called_with(expected_args) + + +def test_restart_server_dev_server(mocker, caplog, tmp_path): + app_files = tmp_path / "tethysapp" / "app_store" + app_files.mkdir(parents=True) + function_file = app_files / "fake_file.py" + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + mock_run_process = mocker.patch('tethysapp.app_store.installation_handlers.run_process') + mocker.patch('tethysapp.app_store.installation_handlers.get_manage_path', return_value="manage_path") + mocker.patch('tethysapp.app_store.installation_handlers.sys.argv', ["manage_path", "runserver"]) + mocker.patch('tethysapp.app_store.installation_handlers.os.path.realpath', return_value=function_file) + data = {"name": "test_app", "restart_type": "install"} + mock_channel = MagicMock() + mock_workspace = MagicMock(path=str(tmp_path)) + + restart_server(data, mock_channel, mock_workspace) + + assert f"Running Syncstores for app: {data['name']}" in caplog.messages + mock_ws.assert_called_with(f"Running Syncstores for app: {data['name']}", mock_channel) + mock_run_process.assert_called_with(['python', "manage_path", 'syncstores', data["name"], '-f']) + assert "Dev Mode. Attempting to restart by changing file" in caplog.messages + model_py = app_files / "model.py" + assert model_py.read_text() == f'print("{data["name"]} installed in dev mode")\n' + + +def test_restart_server_dev_server_install_scaffold_running(mocker, caplog, tmp_path): + app_files = tmp_path / "tethysapp" / "app_store" + app_files.mkdir(parents=True) + function_file = app_files / "fake_file.py" + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + mock_run_process = mocker.patch('tethysapp.app_store.installation_handlers.run_process') + mocker.patch('tethysapp.app_store.installation_handlers.get_manage_path', return_value="manage_path") + mocker.patch('tethysapp.app_store.installation_handlers.sys.argv', ["manage_path", "runserver"]) + mocker.patch('tethysapp.app_store.installation_handlers.os.path.realpath', return_value=function_file) + data = {"name": "test_app", "restart_type": "install"} + mock_channel = MagicMock() + mock_workspace = MagicMock(path=str(tmp_path)) + install_status_dir = tmp_path / "install_status" + install_status_dir.mkdir(parents=True) + installRunning = install_status_dir / "installRunning" + installRunning.touch() + assert installRunning.is_file() + + scaffoldRunning = install_status_dir / "scaffoldRunning" + scaffoldRunning.touch() + assert scaffoldRunning.is_file() + + restart_server(data, mock_channel, mock_workspace) + + assert not installRunning.is_file() + assert not scaffoldRunning.is_file() + assert f"Running Syncstores for app: {data['name']}" in caplog.messages + mock_ws.assert_called_with(f"Running Syncstores for app: {data['name']}", mock_channel) + mock_run_process.assert_called_with(['python', "manage_path", 'syncstores', data["name"], '-f']) + assert "Dev Mode. Attempting to restart by changing file" in caplog.messages + model_py = app_files / "model.py" + assert model_py.read_text() == f'print("{data["name"]} installed in dev mode")\n' + + +def test_restart_server_prod_server_run_collect_all(mocker, caplog, tmp_path): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + mock_run_process = mocker.patch('tethysapp.app_store.installation_handlers.run_process') + mock_subprocess = mocker.patch('tethysapp.app_store.installation_handlers.subprocess') + mocker.patch('tethysapp.app_store.installation_handlers.app.get_custom_setting', return_value="custom_setting") + mocker.patch('tethysapp.app_store.installation_handlers.get_manage_path', return_value="manage_path") + mock_os_system = mocker.patch('tethysapp.app_store.installation_handlers.os.system') + data = {"name": "test_app", "restart_type": "install"} + mock_channel = MagicMock() + mock_workspace = MagicMock(path=str(tmp_path)) + + restart_server(data, mock_channel, mock_workspace) + + assert f"Running Syncstores for app: {data['name']}" in caplog.messages + mock_ws.assert_has_calls([ + call(f"Running Syncstores for app: {data['name']}", mock_channel), + call(f"Running Tethys Collectall for app: {data['name']}", mock_channel), + call("Server Restarting . . .", mock_channel) + ]) + mock_run_process.assert_has_calls([ + call(['python', "manage_path", 'syncstores', data["name"], '-f']), + call(['python', "manage_path", 'pre_collectstatic']), + call(['python', "manage_path", 'collectstatic', '--noinput']), + call(['python', "manage_path", 'collectworkspaces', '--force']) + ]) + mock_subprocess.run.assert_called_with(['sudo', '-h'], check=True) + mock_os_system.assert_called_with('echo custom_setting|sudo -S supervisorctl restart all') + assert "Running Tethys Collectall" in caplog.messages + + +def test_restart_server_prod_server_docker(mocker, caplog, tmp_path): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + mock_run_process = mocker.patch('tethysapp.app_store.installation_handlers.run_process') + mocker.patch('tethysapp.app_store.installation_handlers.subprocess.run', side_effect=[Exception("No sudo")]) + mocker.patch('tethysapp.app_store.installation_handlers.get_manage_path', return_value="manage_path") + mocker.patch('tethysapp.app_store.installation_handlers.os.path.isdir', return_value=True) + restart = tmp_path / "restart" + mocker.patch('tethysapp.app_store.installation_handlers.Path', return_value=restart) + data = {"name": "test_app", "restart_type": "install"} + mock_channel = MagicMock() + mock_workspace = MagicMock(path=str(tmp_path)) + + restart_server(data, mock_channel, mock_workspace, run_collect_all=False) + + assert f"Running Syncstores for app: {data['name']}" in caplog.messages + mock_ws.assert_has_calls([ + call(f"Running Syncstores for app: {data['name']}", mock_channel), + call("Server Restarting . . .", mock_channel) + ]) + mock_run_process.assert_has_calls([ + call(['python', "manage_path", 'syncstores', data["name"], '-f']) + ]) + assert "No sudo" in caplog.messages + assert "No SUDO. Docker container implied. Restarting without SUDO" in caplog.messages + assert "Restart Directory found. Creating restart file." in caplog.messages + assert restart.is_file() + + +def test_restart_server_prod_server_docker_retry_no_sudo(mocker, caplog, tmp_path): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + mock_run_process = mocker.patch('tethysapp.app_store.installation_handlers.run_process') + mocker.patch('tethysapp.app_store.installation_handlers.subprocess.run', side_effect=[Exception("No sudo")]) + mocker.patch('tethysapp.app_store.installation_handlers.get_manage_path', return_value="manage_path") + mocker.patch('tethysapp.app_store.installation_handlers.os.path.isdir', return_value=False) + mock_os_system = mocker.patch('tethysapp.app_store.installation_handlers.os.system') + data = {"name": "test_app", "restart_type": "install"} + mock_channel = MagicMock() + mock_workspace = MagicMock(path=str(tmp_path)) + + restart_server(data, mock_channel, mock_workspace, run_collect_all=False) + + assert f"Running Syncstores for app: {data['name']}" in caplog.messages + mock_ws.assert_has_calls([ + call(f"Running Syncstores for app: {data['name']}", mock_channel), + call("Server Restarting . . .", mock_channel) + ]) + mock_run_process.assert_has_calls([ + call(['python', "manage_path", 'syncstores', data["name"], '-f']) + ]) + assert "No sudo" in caplog.messages + assert "No SUDO. Docker container implied. Restarting without SUDO" in caplog.messages + mock_os_system.assert_called_with('supervisorctl restart all') + + +def test_continueAfterInstall(mocker): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + mock_dad = mocker.patch('tethysapp.app_store.installation_handlers.detect_app_dependencies') + app_install_data = {'isInstalled': True, 'channel': 'channel', 'version': '1.0'} + mocker.patch('tethysapp.app_store.installation_handlers.check_if_app_installed', return_value=app_install_data) + install_data = {"name": "test_app", "version": "1.0"} + mock_channel = MagicMock() + + continueAfterInstall(install_data, mock_channel) + + mock_ws.assert_called_with("Resuming processing...", mock_channel) + mock_dad.assert_called_with(install_data['name'], mock_channel) + + +def test_continueAfterInstall_incorrect_version(mocker, caplog): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + mocker.patch('tethysapp.app_store.installation_handlers.detect_app_dependencies') + app_install_data = {'isInstalled': True, 'channel': 'channel', 'version': '1.0'} + mocker.patch('tethysapp.app_store.installation_handlers.check_if_app_installed', return_value=app_install_data) + install_data = {"name": "test_app", "version": "1.5"} + mock_channel = MagicMock() + + continueAfterInstall(install_data, mock_channel) + + mock_ws.assert_called_with("Server error while processing this installation. Please check your logs", mock_channel) + assert "ERROR: ContinueAfterInstall: Correct version is not installed of this package." in caplog.messages + + +def test_set_custom_settings(tethysapp, tmp_path, mocker): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + app = tethysapp() + tethysapp_object = MagicMock(id=1) + mocker.patch('tethysapp.app_store.installation_handlers.get_app_instance_from_path', return_value=app) + mock_process_settings = mocker.patch('tethysapp.app_store.installation_handlers.process_settings') + mocker.patch('tethysapp.app_store.installation_handlers.TethysApp.objects.get', return_value=tethysapp_object) + mock_actual_setting = MagicMock(value="test") + mocker.patch('tethysapp.app_store.installation_handlers.CustomSetting.objects.get', + return_value=mock_actual_setting) + custom_settings_data = {"app_py_path": tmp_path, "settings": {"mock_setting": "setting_value"}} + mock_channel = MagicMock() + + set_custom_settings(custom_settings_data, mock_channel) + + mock_ws.assert_has_calls([ + call("Custom Settings configured.", mock_channel), + call({"data": {}, "jsHelperFunction": "customSettingConfigComplete"}, mock_channel) + ]) + mock_process_settings.assert_called_once() + assert mock_actual_setting.value == custom_settings_data['settings']['mock_setting'] + assert mock_actual_setting.clean.call_count == 1 + assert mock_actual_setting.save.call_count == 1 + + +def test_set_custom_settings_skip(tethysapp, tmp_path, mocker, caplog): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + app = tethysapp() + mocker.patch('tethysapp.app_store.installation_handlers.get_app_instance_from_path', return_value=app) + mock_process_settings = mocker.patch('tethysapp.app_store.installation_handlers.process_settings') + custom_settings_data = {"app_py_path": tmp_path, "skip": True, "noneFound": True} + mock_channel = MagicMock() + + set_custom_settings(custom_settings_data, mock_channel) + + mock_ws.assert_has_calls([ + call("No Custom Settings Found to process.", mock_channel), + ]) + mock_process_settings.assert_called_once() + assert "Skip/NoneFound option called." in caplog.messages + + +def test_set_custom_settings_object_dne(tethysapp, tmp_path, mocker, caplog): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + app = tethysapp() + mocker.patch('tethysapp.app_store.installation_handlers.get_app_instance_from_path', return_value=app) + mocker.patch('tethysapp.app_store.installation_handlers.TethysApp.objects.get', side_effect=[ObjectDoesNotExist]) + custom_settings_data = {"app_py_path": tmp_path, "settings": {"mock_setting": "setting_value"}} + mock_channel = MagicMock() + + set_custom_settings(custom_settings_data, mock_channel) + + mock_ws.assert_has_calls([ + call("Error Setting up custom settings. Check logs for more details", mock_channel) + ]) + assert "Couldn't find app instance to get the ID to connect the settings to" in caplog.messages + + +def test_process_settings(tmp_path, tethysapp, mocker): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + custom_setting = CustomSetting( + name='default_name', + type=CustomSetting.TYPE_STRING, + description='Default model name.', + required=True, + default="Name_123" + ) + mock_setting = MagicMock(required=True, description="description") + mock_setting.name = "test_setting" + mock_settings = {"unlinked_settings": [mock_setting, custom_setting]} + mocker.patch('tethysapp.app_store.installation_handlers.get_app_settings', return_value=mock_settings) + mocker.patch('tethysapp.app_store.installation_handlers.get_service_type_from_setting', return_value="spatial") + mocker.patch('tethysapp.app_store.installation_handlers.get_setting_type_from_setting', return_value="ds_spatial") + service_options = {"name": "spatial_service", "id": 1} + mocker.patch('tethysapp.app_store.installation_handlers.get_service_options', return_value=service_options) + app = tethysapp() + mock_channel = MagicMock() + + process_settings(app, tmp_path, mock_channel) + + expected_data_json = { + "data": [{ + "name": "test_setting", + "required": True, + "description": "description", + "service_type": "spatial", + "setting_type": "ds_spatial", + "options": service_options + }], + "returnMethod": "configure_services", + "jsHelperFunction": "processServices", + "app_py_path": tmp_path, + "current_app_name": "test_app" + } + mock_ws.assert_called_with(expected_data_json, mock_channel) + + +def test_process_settings_no_settings(tmp_path, tethysapp, mocker): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + mocker.patch('tethysapp.app_store.installation_handlers.get_app_settings', return_value=[]) + app = tethysapp() + mock_channel = MagicMock() + + process_settings(app, tmp_path, mock_channel) + + mock_ws.assert_called_with("No Services found to configure.", mock_channel) + + +def test_configure_services(mocker): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + mock_link = mocker.patch('tethysapp.app_store.installation_handlers.link_service_to_app_setting') + mock_channel = MagicMock() + services_data = { + "app_name": "test_setting", + "service_name": "spatial_service", + "service_type": "spatial", + "setting_type": "ds_spatial", + "service_id": 1 + } + + configure_services(services_data, mock_channel) + + mock_link.assert_called_with(services_data['service_type'], services_data['service_id'], services_data['app_name'], + services_data['setting_type'], services_data['service_name']) + get_data_json = { + "data": {"serviceName": services_data['service_name']}, + "jsHelperFunction": "serviceConfigComplete" + } + mock_ws.assert_called_with(get_data_json, mock_channel) + + +def test_configure_services_error(mocker, caplog): + mocker.patch('tethysapp.app_store.installation_handlers.link_service_to_app_setting', + side_effect=[Exception("failed to link")]) + mock_channel = MagicMock() + services_data = { + "app_name": "test_setting", + "service_name": "spatial_service", + "service_type": "spatial", + "setting_type": "ds_spatial", + "service_id": 1 + } + + config = configure_services(services_data, mock_channel) + + assert config is None + assert "failed to link" in caplog.messages + assert "Error while linking service" in caplog.messages + + +def test_getServiceList(mocker): + mock_ws = mocker.patch('tethysapp.app_store.installation_handlers.send_notification') + service_options = {"name": "spatial_service", "id": 1} + mocker.patch('tethysapp.app_store.installation_handlers.get_service_options', return_value=service_options) + mock_channel = MagicMock() + data = {"settingType": "spatial"} + + getServiceList(data, mock_channel) + + get_data_json = { + "data": {"settingType": data['settingType'], "newOptions": service_options}, + "jsHelperFunction": "updateServiceListing" + } + mock_ws.assert_called_with(get_data_json, mock_channel) diff --git a/tethysapp/app_store/tests/unit_tests/test_notifications.py b/tethysapp/app_store/tests/unit_tests/test_notifications.py new file mode 100644 index 0000000..80b168f --- /dev/null +++ b/tethysapp/app_store/tests/unit_tests/test_notifications.py @@ -0,0 +1,116 @@ +import pytest +import json +from unittest.mock import AsyncMock, MagicMock +from channels.testing import WebsocketCommunicator +from channels.layers import get_channel_layer +from django.test import override_settings +from tethysapp.app_store.notifications import notificationsConsumer + + +@pytest.mark.asyncio +async def test_notificationsConsumer_connect_disconnect(caplog): + consumer = notificationsConsumer + consumer.channel_layer_alias = "testlayer" + channel_layers_setting = { + "testlayer": {"BACKEND": "channels.layers.InMemoryChannelLayer"} + } + with override_settings(CHANNEL_LAYERS=channel_layers_setting): + communicator = WebsocketCommunicator(notificationsConsumer.as_asgi(), "GET", "install/notifications") + connected, _ = await communicator.connect() + assert connected + channel_layer = get_channel_layer("testlayer") + channel_name = list(channel_layer.channels.keys())[0] + + await communicator.disconnect() + assert f"Added {channel_name} channel to notifications" in caplog.messages + assert f"Removed {channel_name} channel from notifications" in caplog.messages + + +@pytest.mark.asyncio +async def test_notificationsConsumer_install_notifications(caplog): + consumer = notificationsConsumer + consumer.channel_layer_alias = "testlayer" + channel_layers_setting = { + "testlayer": {"BACKEND": "channels.layers.InMemoryChannelLayer"} + } + with override_settings(CHANNEL_LAYERS=channel_layers_setting): + communicator = WebsocketCommunicator(notificationsConsumer.as_asgi(), "GET", "install/notifications") + connected, _ = await communicator.connect() + assert connected + + channel_layer = get_channel_layer("testlayer") + channel_name = list(channel_layer.channels.keys())[0] + + message = "Sending a message" + await channel_layer.group_send("notifications", {"type": "install_notifications", "message": message}) + await communicator.receive_from() + + await communicator.disconnect() + assert f"Added {channel_name} channel to notifications" in caplog.messages + assert f"Sent message {message} at {channel_name}" in caplog.messages + assert f"Removed {channel_name} channel from notifications" in caplog.messages + + +@pytest.mark.asyncio +async def test_notificationsConsumer_receive_begin_install(mocker, caplog): + mock_begin_install = mocker.patch('tethysapp.app_store.notifications.begin_install') + mock_workspace = MagicMock() + mock_get_workspace = AsyncMock(return_value=mock_workspace) + mocker.patch('tethysapp.app_store.notifications.sync_to_async', side_effect=[mock_get_workspace]) + mock_threading = mocker.patch('tethysapp.app_store.notifications.threading') + consumer = notificationsConsumer + consumer.channel_layer_alias = "testlayer" + channel_layers_setting = { + "testlayer": {"BACKEND": "channels.layers.InMemoryChannelLayer"} + } + with override_settings(CHANNEL_LAYERS=channel_layers_setting): + communicator = WebsocketCommunicator(notificationsConsumer.as_asgi(), "GET", "install/notifications") + connected, _ = await communicator.connect() + assert connected + + channel_layer = get_channel_layer("testlayer") + channel_name = list(channel_layer.channels.keys())[0] + + install_data = { + "data": { + "name": "appName", + "channel": "channel_app", + "label": "label_app", + "version": "current_version" + }, + "type": "begin_install" + } + await communicator.send_json_to(install_data) + + await communicator.disconnect() + assert f"Added {channel_name} channel to notifications" in caplog.messages + assert f"Received message {json.dumps(install_data)} at {channel_name}" in caplog.messages + assert f"Removed {channel_name} channel from notifications" in caplog.messages + mock_threading.Thread.assert_called_with(target=mock_begin_install, + args=[install_data['data'], channel_layer, mock_workspace]) + mock_threading.Thread().start.assert_called_once() + + +@pytest.mark.asyncio +async def test_notificationsConsumer_receive_invalid_type(caplog): + consumer = notificationsConsumer + consumer.channel_layer_alias = "testlayer" + channel_layers_setting = { + "testlayer": {"BACKEND": "channels.layers.InMemoryChannelLayer"} + } + with override_settings(CHANNEL_LAYERS=channel_layers_setting): + communicator = WebsocketCommunicator(notificationsConsumer.as_asgi(), "GET", "install/notifications") + connected, _ = await communicator.connect() + assert connected + + channel_layer = get_channel_layer("testlayer") + channel_name = list(channel_layer.channels.keys())[0] + + install_data = {"data": {}} + await communicator.send_json_to(install_data) + + await communicator.disconnect() + assert f"Added {channel_name} channel to notifications" in caplog.messages + assert f"Received message {json.dumps(install_data)} at {channel_name}" in caplog.messages + assert "Can't redirect incoming message." in caplog.messages + assert f"Removed {channel_name} channel from notifications" in caplog.messages diff --git a/tethysapp/app_store/tests/unit_tests/test_resource_helpers.py b/tethysapp/app_store/tests/unit_tests/test_resource_helpers.py new file mode 100644 index 0000000..913a983 --- /dev/null +++ b/tethysapp/app_store/tests/unit_tests/test_resource_helpers.py @@ -0,0 +1,1006 @@ +from unittest.mock import call, MagicMock +import json +import pytest +import shutil +import sys +from tethysapp.app_store.resource_helpers import (create_pre_multiple_stores_labels_obj, get_resources_single_store, + get_new_stores_reformated_by_labels, get_stores_reformated_by_channel, + get_app_channel_for_stores, merge_channels_of_apps, fetch_resources, + get_stores_reformatted, clear_conda_channel_cache, process_resources, + merge_labels_single_store, get_app_label_obj_for_store, + merge_labels_for_app_in_store, get_resource, check_if_app_installed, + add_keys_to_app_metadata, get_app_instance_from_path) + + +def test_clear_conda_channel_cache(mocker, store): + store_name = 'active_default' + conda_labels = ['main', 'dev'] + active_store = store(store_name, conda_labels=conda_labels) + mocker.patch('tethysapp.app_store.resource_helpers.get_conda_stores', return_value=[active_store]) + mock_cache = mocker.patch('tethysapp.app_store.resource_helpers.cache') + + clear_conda_channel_cache({}, None) + + mock_calls = [call(f'{active_store["conda_channel"]}_{conda_label}_app_resources') for conda_label in conda_labels] + mock_cache.delete.assert_has_calls(mock_calls) + + +def test_create_pre_multiple_stores_labels_obj(tmp_path, mocker, store, resource): + active_store = store('active_default', conda_labels=['main', 'dev']) + mocker.patch('tethysapp.app_store.resource_helpers.get_conda_stores', return_value=[active_store]) + app_resource_main = resource("test_app", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource2_main = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource_dev = resource("test_app", active_store['conda_channel'], active_store['conda_labels'][1]) + main_resources = { + 'availableApps': {"test_app": app_resource_main}, + 'installedApps': {"test_app": app_resource_main}, + 'incompatibleApps': {"test_app2": app_resource2_main}, + 'tethysVersion': "4.0.0", + } + dev_resources = { + 'availableApps': {}, + 'installedApps': {}, + 'incompatibleApps': {"test_app": app_resource_dev}, + 'tethysVersion': "4.0.0", + } + mocker.patch('tethysapp.app_store.resource_helpers.get_resources_single_store', + side_effect=[main_resources, dev_resources]) + + object_stores = create_pre_multiple_stores_labels_obj(tmp_path) + + expected_object_stores = { + active_store['conda_channel']: { + "main": main_resources, + "dev": dev_resources + } + } + + assert object_stores == expected_object_stores + + +def test_get_resources_single_store_compatible_and_installed(tmp_path, mocker, resource): + require_refresh = False + conda_channel = 'test_channel' + conda_label = 'main' + cache_key = 'test_cache_key' + + app_resource = resource("test_app", conda_channel, conda_label) + app_resource['versions'][conda_channel][conda_label] = ["1.0"] + app_resource['compatibility'][conda_channel][conda_label] = {'1.0': '>=4.0.0'} + app_resource["installed"][conda_channel][conda_label] = True + + app_resource2 = resource("test_app2", conda_channel, conda_label) + app_resource2['versions'][conda_channel][conda_label] = ["1.0"] + app_resource2['compatibility'][conda_channel][conda_label] = {'1.0': '<4.0.0'} + + mocker.patch('tethysapp.app_store.resource_helpers.fetch_resources', return_value=[app_resource, app_resource2]) + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "4.0.0") + + resources = get_resources_single_store(tmp_path, require_refresh, conda_channel, conda_label, cache_key) + + expected_resources = { + 'availableApps': {"test_app": app_resource}, + 'installedApps': {"test_app": app_resource}, + 'incompatibleApps': {"test_app2": app_resource2}, + 'tethysVersion': "4.0.0", + } + + assert expected_resources == resources + + +def test_get_new_stores_reformated_by_labels(store_with_resources): + store1, main_resources1 = store_with_resources("store1", ['main'], available_apps_label="main", + installed_apps_label="main") + store1, dev_resources1 = store_with_resources("store1", ['dev'], incompatible_apps_label="dev") + store2, main_resources2 = store_with_resources("store1", ['main'], available_apps_label="main", + installed_apps_label="main") + store2, dev_resources2 = store_with_resources("store1", ['dev'], incompatible_apps_label="dev") + + object_stores = { + store1['conda_channel']: { + "main": main_resources1, + "dev": dev_resources1 + }, + store2['conda_channel']: { + "main": main_resources2, + "dev": dev_resources2 + } + } + + reformatted_object_stores = get_new_stores_reformated_by_labels(object_stores) + + expected_object_stores = { + store1['conda_channel']: { + 'availableApps': main_resources1['availableApps'], + 'installedApps': main_resources1['installedApps'], + 'incompatibleApps': dev_resources1['incompatibleApps'] + }, + store2['conda_channel']: { + 'availableApps': main_resources2['availableApps'], + 'installedApps': main_resources2['installedApps'], + 'incompatibleApps': dev_resources2['incompatibleApps'] + } + } + + assert reformatted_object_stores == expected_object_stores + + +def test_get_stores_reformated_by_channel(store_with_resources): + store1, store1_resources = store_with_resources("store_name1", ['main', 'dev'], available_apps_label="main", + installed_apps_label="main", incompatible_apps_label="dev") + store2, store2_resources = store_with_resources("store_name2", ['main'], available_apps_label="main", + installed_apps_label="main") + + object_stores = {store1['conda_channel']: store1_resources, store2['conda_channel']: store2_resources} + + reformatted_object_stores = get_stores_reformated_by_channel(object_stores) + + expected_object_stores = { + 'availableApps': {**store1_resources['availableApps'], **store2_resources['availableApps']}, + 'installedApps': {**store1_resources['installedApps'], **store2_resources['installedApps']}, + 'incompatibleApps': {**store1_resources['incompatibleApps'], **store2_resources['incompatibleApps']} + } + + assert reformatted_object_stores == expected_object_stores + + +def test_get_app_channel_for_stores(store_with_resources): + available_app_name = "available_app_name" + installed_app_name = "installed_app_name" + incompatible_app_name = "incompatible_app_name" + store1, store1_resources = store_with_resources("store_name1", ['main', 'dev'], + available_apps_label="main", available_apps_name=available_app_name, + installed_apps_label="main", installed_apps_name=installed_app_name, + incompatible_apps_label="dev", + incompatible_apps_name=incompatible_app_name) + store2, store2_resources = store_with_resources("store_name2", ['main'], + available_apps_label="main", available_apps_name=available_app_name, + installed_apps_label="main", installed_apps_name=installed_app_name) + + object_stores = {store1['conda_channel']: store1_resources, store2['conda_channel']: store2_resources} + + app_channel_obj = get_app_channel_for_stores(object_stores) + + expected_app_channel_obj = { + 'availableApps': {available_app_name: [store1['conda_channel'], store2['conda_channel']]}, + 'installedApps': {installed_app_name: [store1['conda_channel'], store2['conda_channel']]}, + 'incompatibleApps': {incompatible_app_name: [store1['conda_channel']]} + } + + assert app_channel_obj == expected_app_channel_obj + + +def test_merge_channels_of_apps(store_with_resources): + available_app_name = "available_app_name" + installed_app_name = "installed_app_name" + incompatible_app_name = "incompatible_app_name" + store1, store1_resources = store_with_resources("store_name1", ['main', 'dev'], + available_apps_label="main", available_apps_name=available_app_name, + installed_apps_label="main", installed_apps_name=installed_app_name, + incompatible_apps_label="dev", + incompatible_apps_name=incompatible_app_name) + store2, store2_resources = store_with_resources("store_name2", ['main'], + available_apps_label="dev", available_apps_name=available_app_name, + installed_apps_label="main", installed_apps_name=installed_app_name) + + object_stores = {store1['conda_channel']: store1_resources, store2['conda_channel']: store2_resources} + + app_channel_obj = { + 'availableApps': {available_app_name: [store1['conda_channel'], store2['conda_channel']]}, + 'installedApps': {installed_app_name: [store1['conda_channel'], store2['conda_channel']]}, + 'incompatibleApps': {incompatible_app_name: [store1['conda_channel']]} + } + + merged_channels_app = merge_channels_of_apps(app_channel_obj, object_stores) + + expected_object_stores = { + 'availableApps': { + available_app_name: { + 'name': available_app_name, + 'installed': {store1['conda_channel']: {'main': False}, store2['conda_channel']: {'dev': False}}, + 'installedVersion': {store1['conda_channel']: {'main': "1.0"}, + store2['conda_channel']: {'dev': "1.0"}}, + 'latestVersion': {store1['conda_channel']: {'main': "1.0"}, store2['conda_channel']: {'dev': "1.0"}}, + 'versions': {store1['conda_channel']: {'main': ["1.0"]}, store2['conda_channel']: {'dev': ["1.0"]}}, + 'versionURLs': {store1['conda_channel']: {'main': ["versionURL"]}, + store2['conda_channel']: {'dev': ["versionURL"]}}, + 'channels_and_labels': {store1['conda_channel']: {'main': []}, store2['conda_channel']: {'dev': []}}, + 'timestamp': {store1['conda_channel']: {'main': "timestamp"}, + store2['conda_channel']: {'dev': "timestamp"}}, + 'compatibility': {store1['conda_channel']: {'main': {}}, store2['conda_channel']: {'dev': {}}}, + 'license': {store1['conda_channel']: {'main': None}, store2['conda_channel']: {'dev': None}}, + 'licenses': {store1['conda_channel']: {'main': []}, store2['conda_channel']: {'dev': []}}, + 'author': {store1['conda_channel']: {'main': 'author'}, store2['conda_channel']: {'dev': 'author'}}, + 'description': {store1['conda_channel']: {'main': 'description'}, + store2['conda_channel']: {'dev': 'description'}}, + 'author_email': {store1['conda_channel']: {'main': 'author_email'}, + store2['conda_channel']: {'dev': 'author_email'}}, + 'keywords': {store1['conda_channel']: {'main': 'keywords'}, + store2['conda_channel']: {'dev': 'keywords'}}, + 'dev_url': {store1['conda_channel']: {'main': 'url'}, store2['conda_channel']: {'dev': 'url'}} + } + }, + 'installedApps': { + installed_app_name: { + 'name': installed_app_name, + 'installed': {store1['conda_channel']: {'main': False}, store2['conda_channel']: {'main': False}}, + 'installedVersion': {store1['conda_channel']: {'main': "1.0"}, + store2['conda_channel']: {'main': "1.0"}}, + 'latestVersion': {store1['conda_channel']: {'main': "1.0"}, store2['conda_channel']: {'main': "1.0"}}, + 'versions': {store1['conda_channel']: {'main': ["1.0"]}, store2['conda_channel']: {'main': ["1.0"]}}, + 'versionURLs': {store1['conda_channel']: {'main': ["versionURL"]}, + store2['conda_channel']: {'main': ["versionURL"]}}, + 'channels_and_labels': {store1['conda_channel']: {'main': []}, store2['conda_channel']: {'main': []}}, + 'timestamp': {store1['conda_channel']: {'main': "timestamp"}, + store2['conda_channel']: {'main': "timestamp"}}, + 'compatibility': {store1['conda_channel']: {'main': {}}, store2['conda_channel']: {'main': {}}}, + 'license': {store1['conda_channel']: {'main': None}, store2['conda_channel']: {'main': None}}, + 'licenses': {store1['conda_channel']: {'main': []}, store2['conda_channel']: {'main': []}}, + 'author': {store1['conda_channel']: {'main': 'author'}, store2['conda_channel']: {'main': 'author'}}, + 'description': {store1['conda_channel']: {'main': 'description'}, + store2['conda_channel']: {'main': 'description'}}, + 'author_email': {store1['conda_channel']: {'main': 'author_email'}, + store2['conda_channel']: {'main': 'author_email'}}, + 'keywords': {store1['conda_channel']: {'main': 'keywords'}, + store2['conda_channel']: {'main': 'keywords'}}, + 'dev_url': {store1['conda_channel']: {'main': 'url'}, store2['conda_channel']: {'main': 'url'}} + } + }, + 'incompatibleApps': { + incompatible_app_name: { + 'name': incompatible_app_name, + 'installed': {store1['conda_channel']: {'dev': False}}, + 'installedVersion': {store1['conda_channel']: {'dev': "1.0"}}, + 'latestVersion': {store1['conda_channel']: {'dev': "1.0"}}, + 'versions': {store1['conda_channel']: {'dev': ["1.0"]}}, + 'versionURLs': {store1['conda_channel']: {'dev': ["versionURL"]}}, + 'channels_and_labels': {store1['conda_channel']: {'dev': []}}, + 'timestamp': {store1['conda_channel']: {'dev': "timestamp"}}, + 'compatibility': {store1['conda_channel']: {'dev': {}}}, + 'license': {store1['conda_channel']: {'dev': None}}, + 'licenses': {store1['conda_channel']: {'dev': []}}, + 'author': {store1['conda_channel']: {'dev': 'author'}}, + 'description': {store1['conda_channel']: {'dev': 'description'}}, + 'author_email': {store1['conda_channel']: {'dev': 'author_email'}}, + 'keywords': {store1['conda_channel']: {'dev': 'keywords'}}, + 'dev_url': {store1['conda_channel']: {'dev': 'url'}} + } + } + } + + assert merged_channels_app == expected_object_stores + + +def test_merge_channels_of_apps_missing_app(store_with_resources): + available_app_name = "available_app_name" + installed_app_name = "installed_app_name" + incompatible_app_name = "incompatible_app_name" + store1, store1_resources = store_with_resources("store_name1", ['main', 'dev'], + available_apps_label="main", available_apps_name=available_app_name, + installed_apps_label="main", installed_apps_name=installed_app_name, + incompatible_apps_label="dev", + incompatible_apps_name=incompatible_app_name) + store2, store2_resources = store_with_resources("store_name2", ['main'], + available_apps_label="dev", available_apps_name=available_app_name, + installed_apps_label="main", installed_apps_name=installed_app_name) + + object_stores = {store1['conda_channel']: store1_resources, store2['conda_channel']: store2_resources} + + app_channel_obj = { + 'availableApps': {}, + 'installedApps': {installed_app_name: [store1['conda_channel'], store2['conda_channel']]}, + 'incompatibleApps': {incompatible_app_name: [store1['conda_channel']]} + } + + merged_channels_app = merge_channels_of_apps(app_channel_obj, object_stores) + + expected_object_stores = { + 'availableApps': { + available_app_name: {} + }, + 'installedApps': { + installed_app_name: { + 'name': installed_app_name, + 'installed': {store1['conda_channel']: {'main': False}, store2['conda_channel']: {'main': False}}, + 'installedVersion': {store1['conda_channel']: {'main': "1.0"}, + store2['conda_channel']: {'main': "1.0"}}, + 'latestVersion': {store1['conda_channel']: {'main': "1.0"}, store2['conda_channel']: {'main': "1.0"}}, + 'versions': {store1['conda_channel']: {'main': ["1.0"]}, store2['conda_channel']: {'main': ["1.0"]}}, + 'versionURLs': {store1['conda_channel']: {'main': ["versionURL"]}, + store2['conda_channel']: {'main': ["versionURL"]}}, + 'channels_and_labels': {store1['conda_channel']: {'main': []}, store2['conda_channel']: {'main': []}}, + 'timestamp': {store1['conda_channel']: {'main': "timestamp"}, + store2['conda_channel']: {'main': "timestamp"}}, + 'compatibility': {store1['conda_channel']: {'main': {}}, store2['conda_channel']: {'main': {}}}, + 'license': {store1['conda_channel']: {'main': None}, store2['conda_channel']: {'main': None}}, + 'licenses': {store1['conda_channel']: {'main': []}, store2['conda_channel']: {'main': []}}, + 'author': {store1['conda_channel']: {'main': 'author'}, store2['conda_channel']: {'main': 'author'}}, + 'description': {store1['conda_channel']: {'main': 'description'}, + store2['conda_channel']: {'main': 'description'}}, + 'author_email': {store1['conda_channel']: {'main': 'author_email'}, + store2['conda_channel']: {'main': 'author_email'}}, + 'keywords': {store1['conda_channel']: {'main': 'keywords'}, + store2['conda_channel']: {'main': 'keywords'}}, + 'dev_url': {store1['conda_channel']: {'main': 'url'}, store2['conda_channel']: {'main': 'url'}} + } + }, + 'incompatibleApps': { + incompatible_app_name: { + 'name': incompatible_app_name, + 'installed': {store1['conda_channel']: {'dev': False}}, + 'installedVersion': {store1['conda_channel']: {'dev': "1.0"}}, + 'latestVersion': {store1['conda_channel']: {'dev': "1.0"}}, + 'versions': {store1['conda_channel']: {'dev': ["1.0"]}}, + 'versionURLs': {store1['conda_channel']: {'dev': ["versionURL"]}}, + 'channels_and_labels': {store1['conda_channel']: {'dev': []}}, + 'timestamp': {store1['conda_channel']: {'dev': "timestamp"}}, + 'compatibility': {store1['conda_channel']: {'dev': {}}}, + 'license': {store1['conda_channel']: {'dev': None}}, + 'licenses': {store1['conda_channel']: {'dev': []}}, + 'author': {store1['conda_channel']: {'dev': 'author'}}, + 'description': {store1['conda_channel']: {'dev': 'description'}}, + 'author_email': {store1['conda_channel']: {'dev': 'author_email'}}, + 'keywords': {store1['conda_channel']: {'dev': 'keywords'}}, + 'dev_url': {store1['conda_channel']: {'dev': 'url'}} + } + } + } + + assert merged_channels_app == expected_object_stores + + +def test_reduce_level_obj(store, resource, mocker): + active_store = store('active_default', conda_labels=['main', 'dev']) + mocker.patch('tethysapp.app_store.resource_helpers.get_conda_stores', return_value=[active_store]) + app_resource_main = resource("test_app", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource2_main = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource_dev = resource("test_app", active_store['conda_channel'], active_store['conda_labels'][1]) + main_resources = { + 'availableApps': {"test_app": app_resource_main}, + 'installedApps': {"test_app": app_resource_main}, + 'incompatibleApps': {"test_app2": app_resource2_main}, + 'tethysVersion': "4.0.0", + } + dev_resources = { + 'availableApps': {}, + 'installedApps': {}, + 'incompatibleApps': {"test_app": app_resource_dev}, + 'tethysVersion': "4.0.0", + } + object_stores = { + active_store['conda_channel']: { + "main": main_resources, + "dev": dev_resources + } + } + mocker.patch('tethysapp.app_store.resource_helpers.create_pre_multiple_stores_labels_obj', + return_value=object_stores) + + list_stores = get_stores_reformatted(object_stores) + + expected_list_stores = { + 'availableApps': [app_resource_main], + 'installedApps': [app_resource_main], + 'incompatibleApps': [app_resource2_main, app_resource_dev] + } + + assert list_stores == expected_list_stores + + +def test_merge_labels_single_store(store, resource): + active_store = store('active_default', conda_labels=['main', 'dev']) + app_resource_main = resource("test_app", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource2_main = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource_dev = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][1]) + main_resources = { + 'availableApps': {"test_app": app_resource_main}, + 'installedApps': {"test_app": app_resource_main}, + 'incompatibleApps': {"test_app2": app_resource2_main}, + 'tethysVersion': "4.0.0", + } + dev_resources = { + 'availableApps': {}, + 'installedApps': {}, + 'incompatibleApps': {"test_app2": app_resource_dev}, + 'tethysVersion': "4.0.0", + } + conda_channel = active_store['conda_channel'] + object_stores = { + conda_channel: { + "main": main_resources, + "dev": dev_resources + } + } + + ref_object_stores = merge_labels_single_store(object_stores[conda_channel], conda_channel, 'availableApps') + expected_object_stores = main_resources['availableApps'] + assert ref_object_stores == expected_object_stores + + ref_object_stores = merge_labels_single_store(object_stores[conda_channel], conda_channel, 'installedApps') + expected_object_stores = main_resources['installedApps'] + assert ref_object_stores == expected_object_stores + + ref_object_stores = merge_labels_single_store(object_stores[conda_channel], conda_channel, 'incompatibleApps') + expected_object_stores = expected_object_stores = {'test_app2': { + 'name': "test_app2", + 'installed': {active_store['conda_channel']: {'main': False, 'dev': False}}, + 'installedVersion': {active_store['conda_channel']: {'main': "1.0", 'dev': "1.0"}}, + 'latestVersion': {active_store['conda_channel']: {'main': "1.0", 'dev': "1.0"}}, + 'versions': {active_store['conda_channel']: {'main': ["1.0"], 'dev': ["1.0"]}}, + 'versionURLs': {active_store['conda_channel']: {'main': ["versionURL"], 'dev': ["versionURL"]}}, + 'channels_and_labels': {active_store['conda_channel']: {'main': [], 'dev': []}}, + 'timestamp': {active_store['conda_channel']: {'main': "timestamp", 'dev': "timestamp"}}, + 'compatibility': {active_store['conda_channel']: {'main': {}, 'dev': {}}}, + 'license': {active_store['conda_channel']: {'main': None, 'dev': None}}, + 'licenses': {active_store['conda_channel']: {'main': [], 'dev': []}}, + 'author': {active_store['conda_channel']: {'main': 'author', 'dev': 'author'}}, + 'description': {active_store['conda_channel']: {'main': 'description', 'dev': 'description'}}, + 'author_email': {active_store['conda_channel']: {'main': 'author_email', 'dev': 'author_email'}}, + 'keywords': {active_store['conda_channel']: {'main': 'keywords', 'dev': 'keywords'}}, + 'dev_url': {active_store['conda_channel']: {'main': 'url', 'dev': 'url'}} + }} + assert ref_object_stores == expected_object_stores + + +def test_get_app_label_obj_for_store(store, resource): + active_store = store('active_default', conda_labels=['main', 'dev']) + app_resource_main = resource("test_app", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource2_main = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource_dev = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][1]) + main_resources = { + 'availableApps': {"test_app": app_resource_main}, + 'installedApps': {"test_app": app_resource_main}, + 'incompatibleApps': {"test_app2": app_resource2_main}, + 'tethysVersion': "4.0.0", + } + dev_resources = { + 'availableApps': {}, + 'installedApps': {}, + 'incompatibleApps': {"test_app2": app_resource_dev}, + 'tethysVersion': "4.0.0", + } + conda_channel = active_store['conda_channel'] + object_stores = { + conda_channel: { + "main": main_resources, + "dev": dev_resources + } + } + + apps_labels = get_app_label_obj_for_store(object_stores[conda_channel], 'availableApps') + assert apps_labels == {'test_app': ['main']} + + apps_labels = get_app_label_obj_for_store(object_stores[conda_channel], 'installedApps') + assert apps_labels == {'test_app': ['main']} + + apps_labels = get_app_label_obj_for_store(object_stores[conda_channel], 'incompatibleApps') + assert apps_labels == {'test_app2': ['main', 'dev']} + + +def test_merge_labels_for_app_in_store(store, resource): + active_store = store('active_default', conda_labels=['main', 'dev']) + app_resource_main = resource("test_app", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource2_main = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][0]) + app_resource_dev = resource("test_app2", active_store['conda_channel'], active_store['conda_labels'][1]) + main_resources = { + 'availableApps': {"test_app": app_resource_main}, + 'installedApps': {"test_app": app_resource_main}, + 'incompatibleApps': {"test_app2": app_resource2_main}, + 'tethysVersion': "4.0.0", + } + dev_resources = { + 'availableApps': {}, + 'installedApps': {}, + 'incompatibleApps': {"test_app2": app_resource_dev}, + 'tethysVersion': "4.0.0", + } + conda_channel = active_store['conda_channel'] + object_stores = { + conda_channel: { + "main": main_resources, + "dev": dev_resources + } + } + app_labels = {'test_app2': ['main', 'dev']} + + merged_label_store = merge_labels_for_app_in_store(app_labels, object_stores[conda_channel], conda_channel, + 'incompatibleApps') + + expected_object_stores = {'test_app2': { + 'name': "test_app2", + 'installed': {active_store['conda_channel']: {'main': False, 'dev': False}}, + 'installedVersion': {active_store['conda_channel']: {'main': "1.0", 'dev': "1.0"}}, + 'latestVersion': {active_store['conda_channel']: {'main': "1.0", 'dev': "1.0"}}, + 'versions': {active_store['conda_channel']: {'main': ["1.0"], 'dev': ["1.0"]}}, + 'versionURLs': {active_store['conda_channel']: {'main': ["versionURL"], 'dev': ["versionURL"]}}, + 'channels_and_labels': {active_store['conda_channel']: {'main': [], 'dev': []}}, + 'timestamp': {active_store['conda_channel']: {'main': "timestamp", 'dev': "timestamp"}}, + 'compatibility': {active_store['conda_channel']: {'main': {}, 'dev': {}}}, + 'license': {active_store['conda_channel']: {'main': None, 'dev': None}}, + 'licenses': {active_store['conda_channel']: {'main': [], 'dev': []}}, + 'author': {active_store['conda_channel']: {'main': 'author', 'dev': 'author'}}, + 'description': {active_store['conda_channel']: {'main': 'description', 'dev': 'description'}}, + 'author_email': {active_store['conda_channel']: {'main': 'author_email', 'dev': 'author_email'}}, + 'keywords': {active_store['conda_channel']: {'main': 'keywords', 'dev': 'keywords'}}, + 'dev_url': {active_store['conda_channel']: {'main': 'url', 'dev': 'url'}} + }} + + assert merged_label_store == expected_object_stores + + +def test_fetch_resources(tmp_path, mocker, resource): + conda_search_rep = json.dumps({ + "test_app": [{ + "arch": None, + "build": "py_0", + "build_number": 0, + "channel": "https://conda.anaconda.org/test_channel/noarch", + "constrains": [], + "depends": [ + "pandas" + ], + "fn": "test_app-1.9-py_0.tar.bz2", + "license": "{'name': 'test_app', 'version': '1.9', 'description': 'description', " + "'long_description': 'long_description', 'author': 'author', 'author_email': 'author_email', " + "'url': 'url', 'license': 'BSD 3-Clause Clear', 'tethys_version': '>=4.0.0'}", + "md5": "ab2eb7cc691f4fd984a2216401fabfa1", + "name": "test_app", + "noarch": "python", + "package_type": "noarch_python", + "platform": None, + "sha256": "f38c3e39fe3442dc4a72b1acf7415a5e90443139c6684042a5ddf328d06a9354", + "size": 1907887, + "subdir": "noarch", + "timestamp": 1663012608139, + "url": "https://conda.anaconda.org/test_channel/noarch/test_app-1.9-py_0.tar.bz2", + "version": "1.9" + }] + }) + app_installation = {'isInstalled': False} + app_resource = resource("test_app", 'conda_channel', 'dev') + mock_conda = mocker.patch('tethysapp.app_store.resource_helpers.conda_run', + return_value=[conda_search_rep, None, 0]) + mocker.patch('tethysapp.app_store.resource_helpers.check_if_app_installed', return_value=app_installation) + mocker.patch('tethysapp.app_store.resource_helpers.process_resources', return_value=app_resource) + mock_cache = mocker.patch('tethysapp.app_store.resource_helpers.cache') + mock_cache.get.side_effect = [None] + + fetched_resource = fetch_resources(tmp_path, "test_channel", conda_label="dev") + + mock_conda.assert_called_with("search", ["-c", "test_channel/label/dev", "--override-channels", "-i", "--json"]) + mock_cache.set.assert_called_with("test_channel", app_resource) + assert fetched_resource == app_resource + + +def test_fetch_resources_already_installed_no_license(tmp_path, mocker, resource): + conda_search_rep = json.dumps({ + "test_app": [{ + "arch": None, + "build": "py_0", + "build_number": 0, + "channel": "https://conda.anaconda.org/test_channel/noarch", + "constrains": [], + "depends": [ + "pandas" + ], + "fn": "test_app-1.9-py_0.tar.bz2", + "license": "BSD", + "md5": "ab2eb7cc691f4fd984a2216401fabfa1", + "name": "test_app", + "noarch": "python", + "package_type": "noarch_python", + "platform": None, + "sha256": "f38c3e39fe3442dc4a72b1acf7415a5e90443139c6684042a5ddf328d06a9354", + "size": 1907887, + "subdir": "noarch", + "timestamp": 1663012608139, + "url": "https://conda.anaconda.org/test_channel/noarch/test_app-1.9-py_0.tar.bz2", + "version": "1.9" + }] + }) + app_installation = {'isInstalled': True, 'channel': 'test_channel', 'version': "1"} + app_resource = resource("test_app", 'conda_channel', 'main') + mock_conda = mocker.patch('tethysapp.app_store.resource_helpers.conda_run', + return_value=[conda_search_rep, None, 0]) + mocker.patch('tethysapp.app_store.resource_helpers.check_if_app_installed', return_value=app_installation) + mocker.patch('tethysapp.app_store.resource_helpers.process_resources', return_value=app_resource) + mock_cache = mocker.patch('tethysapp.app_store.resource_helpers.cache') + mock_cache.get.side_effect = [None] + + fetched_resource = fetch_resources(tmp_path, "test_channel", conda_label="main") + + mock_conda.assert_called_with("search", ["-c", "test_channel", "--override-channels", "-i", "--json"]) + mock_cache.set.assert_called_with("test_channel", app_resource) + assert fetched_resource == app_resource + + +def test_fetch_resources_no_resources(tmp_path, mocker, caplog): + conda_search_rep = json.dumps({"error": "The following packages are not available from current channels"}) + mock_conda = mocker.patch('tethysapp.app_store.resource_helpers.conda_run', + return_value=[conda_search_rep, None, 0]) + mock_cache = mocker.patch('tethysapp.app_store.resource_helpers.cache') + mock_cache.get.side_effect = [None] + + fetched_resource = fetch_resources(tmp_path, "test_channel", conda_label="dev") + + mock_conda.assert_called_with("search", ["-c", "test_channel/label/dev", "--override-channels", "-i", "--json"]) + assert 'no packages found with the label dev in channel test_channel' in caplog.messages + assert fetched_resource == [] + + +def test_fetch_resources_non_zero_code(tmp_path, mocker): + conda_search_rep = json.dumps({}) + mocker.patch('tethysapp.app_store.resource_helpers.conda_run', return_value=[conda_search_rep, None, 9]) + + with pytest.raises(Exception) as e_info: + fetch_resources(tmp_path, "test_channel") + assert e_info.message == "ERROR: Couldn't search packages in the conda_channel channel" + + +def test_fetch_resources_cached(tmp_path, mocker, resource, caplog): + app_resource = resource("test_app", 'conda_channel', 'main') + mock_cache = mocker.patch('tethysapp.app_store.resource_helpers.cache') + mock_cache.get.return_value = app_resource + + fetched_resource = fetch_resources(tmp_path, "test_channel") + + assert "Found in cache" in caplog.messages + assert fetched_resource == app_resource + + +def test_process_resources_with_license_installed_update_available(fresh_resource, resource, tmp_path, mocker): + mock_workspace = MagicMock(path=tmp_path) + conda_channel = "test_channel" + conda_label = "main" + app_resources = fresh_resource("test_app", conda_channel, conda_label) + app_resources['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'url': 'url', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + app_resources['installed'] = {conda_channel: {conda_label: True}} + app_resources['installedVersion'] = {conda_channel: {conda_label: "0.9"}} + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "4.0.0") + + processed_resources = process_resources([app_resources], mock_workspace, conda_channel, conda_label)[0] + processed_resources['keywords'][conda_channel][conda_label] = 'keywords' + + expected_resource = resource("test_app", conda_channel, conda_label) + expected_resource['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'url': 'url', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + expected_resource['updateAvailable'] = {conda_channel: {conda_label: True}} + expected_resource['installed'] = {conda_channel: {conda_label: True}} + expected_resource['installedVersion'] = {conda_channel: {conda_label: "0.9"}} + + assert processed_resources == expected_resource + + +def test_process_resources_with_license_installed(fresh_resource, resource, tmp_path, mocker): + mock_workspace = MagicMock(path=tmp_path) + conda_channel = "test_channel" + conda_label = "main" + app_resources = fresh_resource("test_app", conda_channel, conda_label) + app_resources['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'url': 'url', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + app_resources['installed'] = {conda_channel: {conda_label: True}} + app_resources['installedVersion'] = {conda_channel: {conda_label: "1.0"}} + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "3.9.0") + + processed_resources = process_resources([app_resources], mock_workspace, conda_channel, conda_label)[0] + processed_resources['keywords'][conda_channel][conda_label] = 'keywords' + + expected_resource = resource("test_app", conda_channel, conda_label) + expected_resource['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'url': 'url', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + expected_resource['updateAvailable'] = {conda_channel: {conda_label: False}} + expected_resource['installed'] = {conda_channel: {conda_label: True}} + expected_resource['latestVersion'] = {conda_channel: {conda_label: "1.0*"}} + + assert processed_resources == expected_resource + + +def test_process_resources_with_license_installed_without_version(fresh_resource, resource, tmp_path, mocker): + mock_workspace = MagicMock(path=tmp_path) + conda_channel = "test_channel" + conda_label = "main" + app_resources = fresh_resource("test_app", conda_channel, conda_label) + app_resources['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'url': 'url', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + app_resources['installed'] = {conda_channel: {conda_label: True}} + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "4.0.0") + + processed_resources = process_resources([app_resources], mock_workspace, conda_channel, conda_label)[0] + processed_resources['keywords'][conda_channel][conda_label] = 'keywords' + + expected_resource = resource("test_app", conda_channel, conda_label) + expected_resource['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'url': 'url', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + expected_resource['updateAvailable'] = {conda_channel: {conda_label: False}} + expected_resource['installed'] = {conda_channel: {conda_label: True}} + del expected_resource['installedVersion'] + + assert processed_resources == expected_resource + + +def test_process_resources_with_license_not_installed(fresh_resource, resource, tmp_path, mocker): + mock_workspace = MagicMock(path=tmp_path) + conda_channel = "test_channel" + conda_label = "main" + app_resources = fresh_resource("test_app", conda_channel, conda_label) + app_resources['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'url': 'url', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "4.0.0") + + processed_resources = process_resources([app_resources], mock_workspace, conda_channel, conda_label)[0] + processed_resources['keywords'][conda_channel][conda_label] = 'keywords' + + expected_resource = resource("test_app", conda_channel, conda_label) + expected_resource['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'url': 'url', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + del expected_resource['installedVersion'] + + assert processed_resources == expected_resource + + +def test_process_resources_with_license_not_installed_no_license_url(fresh_resource, resource, tmp_path, mocker): + mock_workspace = MagicMock(path=tmp_path) + conda_channel = "test_channel" + conda_label = "main" + app_resources = fresh_resource("test_app", conda_channel, conda_label) + app_resources['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "4.0.0") + + processed_resources = process_resources([app_resources], mock_workspace, conda_channel, conda_label)[0] + processed_resources['keywords'][conda_channel][conda_label] = 'keywords' + + expected_resource = resource("test_app", conda_channel, conda_label) + expected_resource['license'][conda_channel][conda_label] = json.dumps({ + 'name': 'test_app', 'version': '1.9', 'description': 'description', 'long_description': 'long_description', + 'author': 'author', 'author_email': 'author_email', 'license': 'BSD 3-Clause Clear', + 'tethys_version': '>=4.0.0'}) + expected_resource['dev_url'][conda_channel][conda_label] = '' + del expected_resource['installedVersion'] + + assert processed_resources == expected_resource + + +def test_process_resources_no_license_no_meta_yaml_not_installed(fresh_resource, tmp_path, mocker, caplog): + mock_workspace = MagicMock(path=tmp_path) + conda_channel = "test_channel" + conda_label = "main" + app_resources = fresh_resource("test_app", conda_channel, conda_label) + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "4.0.0") + mock_urllib = mocker.patch('tethysapp.app_store.resource_helpers.urllib') + mock_shutil = mocker.patch('tethysapp.app_store.resource_helpers.shutil') + + processed_resources = process_resources([app_resources], mock_workspace, conda_channel, conda_label)[0] + + expected_resource = fresh_resource("test_app", conda_channel, conda_label) + expected_resource['latestVersion'] = {conda_channel: {conda_label: "1.0*"}} + filepath = tmp_path / "apps" / conda_channel / conda_label / "test_app" + filepath.mkdir(parents=True) + expected_resource['filepath'] = {conda_channel: {conda_label: str(filepath)}} + + assert processed_resources == expected_resource + download_path = tmp_path / "apps" / conda_channel / conda_label / "versionURL" + mock_urllib.request.urlretrieve.assert_called_with("versionURL", str(download_path)) + mock_shutil.unpack_archive.assert_called_with(str(download_path), str(filepath)) + assert "License field metadata not found. Downloading: versionURL" in caplog.messages + assert "No yaml file available to retrieve metadata" in caplog.messages + + +def test_process_resources_no_license_no_meta_yaml_not_installed_output_exists(fresh_resource, tmp_path, + mocker, caplog): + mock_workspace = MagicMock(path=tmp_path) + conda_channel = "test_channel" + conda_label = "main" + app_resources = fresh_resource("test_app", conda_channel, conda_label) + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "4.0.0") + mock_urllib = mocker.patch('tethysapp.app_store.resource_helpers.urllib') + mock_shutil = mocker.patch('tethysapp.app_store.resource_helpers.shutil') + filepath = tmp_path / "apps" / conda_channel / conda_label / "test_app" + filepath.mkdir(parents=True) + + processed_resources = process_resources([app_resources], mock_workspace, conda_channel, conda_label)[0] + + expected_resource = fresh_resource("test_app", conda_channel, conda_label) + expected_resource['latestVersion'] = {conda_channel: {conda_label: "1.0*"}} + expected_resource['filepath'] = {conda_channel: {conda_label: str(filepath)}} + + assert processed_resources == expected_resource + download_path = tmp_path / "apps" / conda_channel / conda_label / "versionURL" + mock_urllib.request.urlretrieve.assert_called_with("versionURL", str(download_path)) + mock_shutil.unpack_archive.assert_called_with(str(download_path), str(filepath)) + assert "License field metadata not found. Downloading: versionURL" in caplog.messages + assert "No yaml file available to retrieve metadata" in caplog.messages + + +def test_process_resources_no_license_not_installed(fresh_resource, resource, tmp_path, mocker, caplog, test_files_dir): + mock_workspace = MagicMock(path=tmp_path) + conda_channel = "test_channel" + conda_label = "main" + app_resources = fresh_resource("test_app", conda_channel, conda_label) + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "4.0.0") + mock_urllib = mocker.patch('tethysapp.app_store.resource_helpers.urllib') + mock_shutil = mocker.patch('tethysapp.app_store.resource_helpers.shutil') + filepath = tmp_path / "apps" / conda_channel / conda_label / "test_app" + filepath.mkdir(parents=True) + recipes = filepath / "info" / "recipe" + recipes.mkdir(parents=True) + test_meta_yaml = test_files_dir / "recipe_meta.yaml" + recipes_meta_yaml = recipes / "meta.yaml" + shutil.copyfile(test_meta_yaml, recipes_meta_yaml) + + processed_resources = process_resources([app_resources], mock_workspace, conda_channel, conda_label)[0] + + expected_resource = resource("test_app", conda_channel, conda_label) + expected_resource['dev_url'] = {conda_channel: {conda_label: ""}} + expected_resource['latestVersion'] = {conda_channel: {conda_label: "1.0*"}} + expected_resource['filepath'] = {conda_channel: {conda_label: str(filepath)}} + del expected_resource['installedVersion'] + + assert processed_resources == expected_resource + download_path = tmp_path / "apps" / conda_channel / conda_label / "versionURL" + mock_urllib.request.urlretrieve.assert_called_with("versionURL", str(download_path)) + mock_shutil.unpack_archive.assert_called_with(str(download_path), str(filepath)) + assert "License field metadata not found. Downloading: versionURL" in caplog.messages + + +def test_process_resources_no_license_yaml_exception(fresh_resource, tmp_path, mocker, caplog, + test_files_dir): + mock_workspace = MagicMock(path=tmp_path) + conda_channel = "test_channel" + conda_label = "main" + app_resources = fresh_resource("test_app", conda_channel, conda_label) + mocker.patch('tethysapp.app_store.resource_helpers.tethys_version', "4.0.0") + mock_urllib = mocker.patch('tethysapp.app_store.resource_helpers.urllib') + mock_shutil = mocker.patch('tethysapp.app_store.resource_helpers.shutil') + filepath = tmp_path / "apps" / conda_channel / conda_label / "test_app" + filepath.mkdir(parents=True) + recipes = filepath / "info" / "recipe" + recipes.mkdir(parents=True) + test_meta_yaml = test_files_dir / "basic_meta.yaml" + recipes_meta_yaml = recipes / "meta.yaml" + shutil.copyfile(test_meta_yaml, recipes_meta_yaml) + + processed_resources = process_resources([app_resources], mock_workspace, conda_channel, conda_label)[0] + + expected_resource = fresh_resource("test_app", conda_channel, conda_label) + expected_resource['latestVersion'] = {conda_channel: {conda_label: "1.0*"}} + expected_resource['filepath'] = {conda_channel: {conda_label: str(filepath)}} + + assert processed_resources == expected_resource + download_path = tmp_path / "apps" / conda_channel / conda_label / "versionURL" + mock_urllib.request.urlretrieve.assert_called_with("versionURL", str(download_path)) + mock_shutil.unpack_archive.assert_called_with(str(download_path), str(filepath)) + assert "License field metadata not found. Downloading: versionURL" in caplog.messages + assert "Error happened while downloading package for metadata" in caplog.messages + + +def test_get_resource(resource, tmp_path, mocker): + conda_channel = "test_channel" + conda_label = "main" + app_resource = resource("test_app", conda_channel, conda_label) + mocker.patch('tethysapp.app_store.resource_helpers.fetch_resources', return_value=[app_resource]) + + resource_response = get_resource("test_app", conda_channel, conda_label, tmp_path) + + assert resource_response == app_resource + + +def test_get_resource_none(tmp_path, mocker): + conda_channel = "test_channel" + conda_label = "main" + mocker.patch('tethysapp.app_store.resource_helpers.fetch_resources', return_value=[]) + + resource_response = get_resource("test_app", conda_channel, conda_label, tmp_path) + + assert resource_response is None + + +def test_check_if_app_installed_installed(mocker): + conda_run_resp = json.dumps([{"channel": "conda_channel", 'version': '1.0'}]) + mocker.patch('tethysapp.app_store.resource_helpers.conda_run', return_value=[conda_run_resp, "", 0]) + + response = check_if_app_installed("test_app") + + expected_response = { + 'isInstalled': True, + 'channel': "conda_channel", + 'version': '1.0' + } + assert response == expected_response + + +def test_check_if_app_installed_not_installed(mocker): + conda_run_resp = json.dumps([{}]) + mocker.patch('tethysapp.app_store.resource_helpers.conda_run', return_value=[conda_run_resp, "", 10]) + + response = check_if_app_installed("test_app") + + expected_response = { + 'isInstalled': False + } + assert response == expected_response + + +def test_add_keys_to_app_metadata(): + conda_channel = "conda_channel" + conda_label = "conda_label" + additional_data = { + "author": "author", + "description": "description" + } + app = { + "name": "test_app", + "version": {conda_channel: {conda_label: "1.0"}} + } + additional_keys = ["author"] + new_dict = add_keys_to_app_metadata(additional_data, app, additional_keys, conda_channel, conda_label) + + expected_new_dict = { + "name": "test_app", + "version": {conda_channel: {conda_label: "1.0"}}, + "author": {conda_channel: {conda_label: "author"}} + } + assert new_dict == expected_new_dict + + +def test_add_keys_to_app_metadata_no_additional_data(): + conda_channel = "conda_channel" + conda_label = "conda_label" + additional_data = {} + app = { + "name": "test_app", + "version": {conda_channel: {conda_label: "1.0"}} + } + additional_keys = ["author"] + new_dict = add_keys_to_app_metadata(additional_data, app, additional_keys, conda_channel, conda_label) + + expected_new_dict = { + "name": "test_app", + "version": {conda_channel: {conda_label: "1.0"}} + } + assert new_dict == expected_new_dict + + +def test_get_app_instance_from_path(mocker, tmp_path, tethysapp): + app_name = "test_app" + mock_module = MagicMock(test_app=tethysapp) + sys.modules[f'tethysapp.{app_name}.app'] = mock_module + mocker.patch('tethysapp.app_store.resource_helpers.pkgutil.iter_modules', return_value=[["", app_name, True]]) + mocker.patch('tethysapp.app_store.resource_helpers.inspect.getmembers', return_value=[["test_app", tethysapp]]) + + get_app_instance_from_path(tmp_path) + app_instance = get_app_instance_from_path(tmp_path) + + assert app_instance.init_ran + + +def test_get_app_instance_from_path_typeerror(mocker, tmp_path, tethysapp): + app_name = "test_app" + mock_module = MagicMock() + mock_module.test_app = "Not a Class" + sys.modules[f'tethysapp.{app_name}.app'] = mock_module + mocker.patch('tethysapp.app_store.resource_helpers.pkgutil.iter_modules', return_value=[["", app_name, True]]) + mocker.patch('tethysapp.app_store.resource_helpers.inspect.getmembers', return_value=[["test_app", tethysapp]]) + + app_instance = get_app_instance_from_path(tmp_path) + + assert app_instance is None diff --git a/tethysapp/app_store/tests/unit_tests/test_submission_handlers.py b/tethysapp/app_store/tests/unit_tests/test_submission_handlers.py new file mode 100644 index 0000000..dfdbd60 --- /dev/null +++ b/tethysapp/app_store/tests/unit_tests/test_submission_handlers.py @@ -0,0 +1,538 @@ +import pytest +import shutil +import os +import filecmp +from unittest.mock import call, MagicMock +from github.GithubException import UnknownObjectException +from tethysapp.app_store.submission_handlers import (update_anaconda_dependencies, get_github_repo, + initialize_local_repo_for_active_stores, initialize_local_repo, + generate_label_strings, create_tethysapp_warehouse_release, + generate_current_version, reset_folder, copy_files_for_recipe, + create_upload_command, get_keywords_and_email, + create_template_data_for_install, fix_setup, remove_init_file, + apply_main_yml_template, get_head_and_tag_names, + create_current_tag_version, check_if_organization_in_remote, + push_to_warehouse_release_remote_branch, + create_head_current_version, create_tags_for_current_version, + get_workflow_job_url, process_branch) + + +def test_update_anaconda_dependencies_no_pip(basic_tethysapp, app_files_dir, basic_meta_yaml): + + recipe_path = basic_tethysapp / "conda.recipes" + test_install_pip = basic_tethysapp / "tethysapp" / "test_app" / "scripts" / "install_pip.sh" + test_install_pip.unlink() + + update_anaconda_dependencies(basic_tethysapp, recipe_path, app_files_dir) + + test_app_meta_yaml = recipe_path / "meta.yaml" + assert filecmp.cmp(test_app_meta_yaml, basic_meta_yaml, shallow=False) + assert not test_install_pip.is_file() + + +def test_update_anaconda_dependencies_with_pip(complex_tethysapp, app_files_dir, complex_meta_yaml, install_pip_bash): + + recipe_path = complex_tethysapp / "conda.recipes" + test_install_pip = complex_tethysapp / "tethysapp" / "test_app" / "scripts" / "install_pip.sh" + + update_anaconda_dependencies(complex_tethysapp, recipe_path, app_files_dir) + + test_app_meta_yaml = recipe_path / "meta.yaml" + assert filecmp.cmp(test_app_meta_yaml, complex_meta_yaml, shallow=False) + assert filecmp.cmp(test_install_pip, install_pip_bash, shallow=False) + + +def test_repo_exists(mocker, caplog): + organization_login = "test_org" + repo_name = "test_app" + mock_organization = mocker.patch('github.Organization.Organization') + mock_organization.login = organization_login + mock_repository = MagicMock(full_name="github-org/test_app") + mock_organization.get_repo.return_value = mock_repository + + tethysapp_repo = get_github_repo(repo_name, mock_organization) + assert tethysapp_repo == mock_repository + + mock_organization.get_repo.assert_called_once() + mock_organization.create_repo.assert_not_called() + + logger_message = f"{organization_login}/{repo_name} Exists. Will have to delete" + assert logger_message in caplog.messages + + +def test_repo_does_not_exist(mocker, caplog): + organization_login = "test_org" + repo_name = "test_app" + error_status = 404 + error_message = "Not Found" + + mock_organization = mocker.patch('github.Organization.Organization') + mock_organization.login = organization_login + mock_organization.get_repo.side_effect = UnknownObjectException(error_status, message=error_message) + mock_repository = MagicMock(full_name="github-org/test_app") + mock_organization.create_repo.return_value = mock_repository + + tethysapp_repo = get_github_repo(repo_name, mock_organization) + assert tethysapp_repo == mock_repository + + mock_organization.get_repo.assert_called_once() + mock_organization.create_repo.assert_called_once() + + logger_message = f"Received a {error_status} error when checking {organization_login}/{repo_name}. " \ + f"Error: {error_message}" + assert logger_message in caplog.messages + + logger_message = f"Creating a new repository at {organization_login}/{repo_name}" + assert logger_message in caplog.messages + + +@pytest.mark.parametrize( + "stores, expected_call_count", [ + (pytest.lazy_fixture("all_active_stores"), 2), + (pytest.lazy_fixture("mix_active_inactive_stores"), 1), + (pytest.lazy_fixture("all_inactive_stores"), 0)]) +def test_initialize_local_repo_for_active_stores(stores, expected_call_count, mocker): + install_data = { + "url": "https://github.com/notrealorg/fakeapp", + "stores": stores + } + + channel_layer = MagicMock() + app_workspace = "fake_path" + mock_initialize_local_repo = mocker.patch('tethysapp.app_store.submission_handlers.initialize_local_repo') + + initialize_local_repo_for_active_stores(install_data, channel_layer, app_workspace) + + assert mock_initialize_local_repo.call_count == expected_call_count + + +def test_initialize_local_repo_fresh(store, tmp_path, mocker): + github_url = "https://github.com/notrealorg/fakeapp" + active_store = store("active_default") + channel_layer = MagicMock() + app_workspace = MagicMock(path=tmp_path) + + mock_repo = MagicMock() + mock_branch1 = MagicMock() + mock_branch1.name = 'origin/commit1' + mock_branch2 = MagicMock() + mock_branch2.name = 'origin/commit2' + mock_git = mocker.patch('git.Repo.init', side_effect=[mock_repo]) + mock_ws = mocker.patch('tethysapp.app_store.submission_handlers.send_notification') + + mock_repo.remote().refs = [mock_branch1, mock_branch2] + initialize_local_repo(github_url, active_store, channel_layer, app_workspace) + + expected_github_dur = tmp_path / "gitsubmission" / active_store['conda_channel'] + expected_app_github_dur = expected_github_dur / "fakeapp" + assert expected_github_dur.is_dir() + + mock_git.create_remote.called_with(['origin', github_url]) + mock_git.create_remote().fetch.called_once() + + expected_data_json = { + "data": { + "branches": ["commit1", "commit2"], + "github_dir": expected_app_github_dur, + "conda_channel": active_store['conda_channel'], + "github_token": active_store['github_token'], + "conda_labels": active_store['conda_labels'], + "github_organization": active_store['github_organization'] + }, + "jsHelperFunction": "showBranches", + "helper": "addModalHelper" + } + + mock_ws.called_with([expected_data_json, channel_layer]) + + +def test_initialize_local_repo_already_exists(store, tmp_path, mocker): + github_url = "https://github.com/notrealorg/fakeapp" + active_store = store("active_default") + channel_layer = MagicMock() + app_workspace = MagicMock(path=tmp_path) + expected_github_dur = tmp_path / "gitsubmission" / active_store['conda_channel'] + expected_app_github_dur = expected_github_dur / "fakeapp" + expected_app_github_dur.mkdir(parents=True) + + mock_repo = MagicMock() + mock_branch1 = MagicMock() + mock_branch1.name = 'origin/commit1' + mock_branch2 = MagicMock() + mock_branch2.name = 'origin/commit2' + mock_git = mocker.patch('git.Repo.init', side_effect=[mock_repo]) + mock_ws = mocker.patch('tethysapp.app_store.submission_handlers.send_notification') + + mock_repo.remote().refs = [mock_branch1, mock_branch2] + initialize_local_repo(github_url, active_store, channel_layer, app_workspace) + + assert expected_github_dur.is_dir() + + mock_git.create_remote.called_with(['origin', github_url]) + mock_git.create_remote().fetch.called_once() + + expected_data_json = { + "data": { + "branches": ["commit1", "commit2"], + "github_dir": expected_app_github_dur, + "conda_channel": active_store['conda_channel'], + "github_token": active_store['github_token'], + "conda_labels": active_store['conda_labels'], + "github_organization": active_store['github_organization'] + }, + "jsHelperFunction": "showBranches", + "helper": "addModalHelper" + } + + mock_ws.called_with([expected_data_json, channel_layer]) + + +@pytest.mark.parametrize( + "conda_labels, expected_label_string", [ + (["dev", "main"], "dev --label main"), + (["main"], "main")]) +def test_generate_label_strings(conda_labels, expected_label_string): + label_string = generate_label_strings(conda_labels) + + assert label_string == expected_label_string + + +def test_create_tethysapp_warehouse_release_app_store_branch_not_exists(): + mock_repo = MagicMock(heads=['main']) + branch = "test_branch" + create_tethysapp_warehouse_release(mock_repo, branch) + + mock_repo.create_head.assert_called_with('tethysapp_warehouse_release') + mock_repo.git.checkout.assert_not_called() + mock_repo.git.merge.assert_not_called() + + +def test_create_tethysapp_warehouse_release_app_store_branch_exists(): + mock_repo = MagicMock(heads=['tethysapp_warehouse_release']) + branch = "test_branch" + create_tethysapp_warehouse_release(mock_repo, branch) + + mock_repo.create_head.assert_not_called() + mock_repo.git.checkout.assert_called_with('tethysapp_warehouse_release') + mock_repo.git.merge.assert_called_with(branch) + + +def test_generate_current_version(): + setup_py_data = { + "version": "1.0" + } + version = generate_current_version(setup_py_data) + + assert version == setup_py_data['version'] + + +def test_reset_folder(tmp_path): + test_path = tmp_path / "test_dir" + test_path.mkdir() + test2_path = test_path / "test2_dir" + test2_path.mkdir() + + reset_folder(test_path) + + assert not test2_path.is_dir() + + +def test_copy_files_for_recipe(tmp_path, app_files_dir): + file = "main_template.yaml" + files_changed = False + src = app_files_dir / file + dest = tmp_path / file + + files_changed = copy_files_for_recipe(src, dest, files_changed) + + assert files_changed + assert dest.is_file() + + # Rerun to test functionality for existing file + files_changed = False + files_changed = copy_files_for_recipe(src, dest, files_changed) + + assert not files_changed + assert dest.is_file() + + +def test_create_upload_command(tmp_path, app_files_dir): + labels_string = "main --label dev" + create_upload_command(labels_string, app_files_dir, tmp_path) + + upload_command_file = tmp_path / "upload_command.txt" + assert "anaconda upload --force --label main --label dev noarch/*.tar.bz2" == upload_command_file.read_text() + + # Rerun to test functionality for existing file + labels_string = "main" + create_upload_command(labels_string, app_files_dir, tmp_path) + + upload_command_file = tmp_path / "upload_command.txt" + assert "anaconda upload --force --label main noarch/*.tar.bz2" == upload_command_file.read_text() + + +@pytest.mark.parametrize( + "setup_py_data, expected_keywords, expected_email", [ + ({"keywords": "example, test", "author_email": "tester@email.com"}, ["example", "test"], "tester@email.com"), + ({"keywords": "example", "author_email": "tester@email.com"}, ["example"], "tester@email.com"), + ({"keywords": "", "author_email": ""}, [], ""), + ({}, [], "")]) +def test_get_keywords_and_email(setup_py_data, expected_keywords, expected_email): + + keywords, email = get_keywords_and_email(setup_py_data) + + assert keywords == expected_keywords + assert email == expected_email + + +def test_create_template_data_for_install(complex_tethysapp): + install_data = {'github_dir': complex_tethysapp, "dev_url": "https://github.com/notrealorg/fakeapp"} + setup_py_data = { + 'name': 'release_package', 'version': '0.0.1', 'description': 'example', + 'long_description': 'This is just an example for testing', 'keywords': 'example,test', + 'author': 'Tester', 'author_email': 'tester@email.com', 'url': '', 'license': 'BSD-3' + } + template_data = create_template_data_for_install(install_data, setup_py_data) + + expected_template_data = { + 'metadataObj': "{'name': 'release_package', 'version': '0.0.1', 'description': 'example', " + "'long_description': 'This is just an example for testing', 'keywords': 'example,test', " + "'author': 'Tester', 'author_email': 'tester@email.com', 'url': '', 'license': 'BSD-3', " + "'tethys_version': '>=4.0', 'dev_url': 'https://github.com/notrealorg/fakeapp'}" + } + assert template_data == expected_template_data + + +def test_fix_setup(test_files_dir, tmp_path): + bad_setup = test_files_dir / "bad_setup.py" + good_setup = test_files_dir / "setup.py" + tmp_setup = tmp_path / "setup2.py" + shutil.copyfile(bad_setup, tmp_setup) + + app_package = fix_setup(tmp_setup) + + assert app_package == "test_app" + assert filecmp.cmp(tmp_setup, good_setup, shallow=False) + + +def test_remove_init_file(tethysapp_base_with_application_files): + install_data = {"github_dir": tethysapp_base_with_application_files} + + remove_init_file(install_data) + + init_file = tethysapp_base_with_application_files / "__init__.py" + init_file.is_file() + + +def test_apply_main_yml_template(app_files_dir, tmp_path, mocker): + rel_package = "test_app" + install_data = {"email": "test@email.com"} + mock_apply_template = mocker.patch('tethysapp.app_store.submission_handlers.apply_template') + apply_main_yml_template(app_files_dir, tmp_path, rel_package, install_data) + + source = os.path.join(app_files_dir, 'main_template.yaml') + template_data = { + 'subject': "Tethys App Store: Build complete for " + rel_package, + 'email': install_data['email'], + 'buildMsg': """ + Your Tethys App has been successfully built and is now available on the Tethys App Store. + This is an auto-generated email and this email is not monitored for replies. + Please send any queries to gromero@aquaveo.com + """ + } + destination = os.path.join(tmp_path, 'main.yaml') + mock_apply_template.assert_called_with(source, template_data, destination) + + +def test_get_head_and_tag_names(): + tag1 = MagicMock(ref="tag1") + tag2 = MagicMock(ref="tag2") + mock_repo = MagicMock() + mock_repo.get_git_refs.return_value = [tag1, tag2] + + heads = get_head_and_tag_names(mock_repo) + + assert heads == ["tag1", "tag2"] + + +def test_create_current_tag_version(mocker): + current_version = "1.0" + head_names_list = [f"v{current_version}_0_2024_1_1", f"v{current_version}_1_2024_1_1"] + mock_time = mocker.patch('tethysapp.app_store.submission_handlers.time') + mock_time.strftime.return_value = "2024_1_1" + + tag = create_current_tag_version(current_version, head_names_list) + + expected_tag = f"v{current_version}_2_2024_1_1" + assert tag == expected_tag + + +def test_check_if_organization_in_remote_exists(): + mock_remote = MagicMock() + github_organization = "test_org" + mock_repo = MagicMock(remotes={github_organization: mock_remote}) + remote_url = "https://github.com/notrealorg/fakeapp" + + tethysapp_remote = check_if_organization_in_remote(mock_repo, github_organization, remote_url) + + assert mock_remote == tethysapp_remote + mock_remote.set_url.assert_called_with(remote_url) + mock_repo.create_remote.assert_not_called() + + +def test_check_if_organization_in_remote_dne(): + mock_remote = MagicMock() + github_organization = "test_org" + mock_repo = MagicMock(remotes={}) + mock_repo.create_remote.side_effect = [mock_remote] + remote_url = "https://github.com/notrealorg/fakeapp" + + tethysapp_remote = check_if_organization_in_remote(mock_repo, github_organization, remote_url) + + assert mock_remote == tethysapp_remote + mock_remote.set_url.assert_not_called() + mock_repo.create_remote.assert_called_with(github_organization, remote_url) + + +def test_push_to_warehouse_release_remote_branch(): + mock_repo = MagicMock() + mock_remote = MagicMock() + file_changed = True + current_tag_name = "test_tag" + + push_to_warehouse_release_remote_branch(mock_repo, mock_remote, current_tag_name, file_changed) + + mock_repo.git.add.assert_called_with(A=True) + mock_repo.git.commit.assert_called_with(m=f"tag version {current_tag_name}") + mock_remote.push.assert_called_with('tethysapp_warehouse_release', force=True) + + +def test_create_head_current_version(): + mock_repo = MagicMock() + mock_branch = MagicMock() + current_tag_name = "v1.0_2_2024_1_1" + head_names_list = ["v1.0_0_2024_1_1", "v1.0_1_2024_1_1"] + mock_remote = MagicMock() + mock_repo.create_head.side_effect = [mock_branch] + + create_head_current_version(mock_repo, current_tag_name, head_names_list, mock_remote) + + mock_repo.git.checkout.assert_called_with(current_tag_name) + mock_remote.push.assert_called_with(mock_branch) + + +def test_create_head_current_version_new_tag(): + mock_repo = MagicMock() + current_tag_name = "v1.0_1_2024_1_1" + head_names_list = ["v1.0_0_2024_1_1", "v1.0_1_2024_1_1"] + mock_remote = MagicMock() + + create_head_current_version(mock_repo, current_tag_name, head_names_list, mock_remote) + + mock_repo.git.checkout.assert_called_with(current_tag_name) + mock_remote.push.assert_called_with(current_tag_name) + + +def test_create_tags_for_current_version_dne(): + current_tag_name = "v1.0_2_2024_1_1" + head_names_list = ["v1.0_0_2024_1_1", "v1.0_1_2024_1_1"] + mock_repo = MagicMock(heads={"tethysapp_warehouse_release": "ref"}) + mock_remote = MagicMock() + mock_tag = MagicMock() + mock_repo.create_tag.side_effect = [mock_tag] + + create_tags_for_current_version(mock_repo, current_tag_name, head_names_list, mock_remote) + + mock_repo.create_tag.assert_called_with( + f"{current_tag_name}_release", + ref="ref", + message=f'This is a tag-object pointing to tethysapp_warehouse_release branch with release version {current_tag_name}') # noqa: E501 + mock_remote.push.assert_called_with(mock_tag) + + +def test_create_tags_for_current_version_exists(): + current_tag_name = "v1.0_1_2024_1_1" + head_names_list = ["v1.0_0_2024_1_1", "v1.0_1_2024_1_1_release"] + mock_repo = MagicMock(heads={"tethysapp_warehouse_release": "ref"}) + mock_remote = MagicMock() + mock_tag = MagicMock() + mock_repo.create_tag.side_effect = [mock_tag] + + create_tags_for_current_version(mock_repo, current_tag_name, head_names_list, mock_remote) + + mock_repo.git.tag.assert_called_with('-d', f"{current_tag_name}_release") + mock_repo.create_tag.assert_called_with( + f"{current_tag_name}_release", + ref="ref", + message=f'This is a tag-object pointing to tethysapp_warehouse_release branch with release version {current_tag_name}') # noqa: E501 + mock_remote.push.assert_has_calls([call(refspec=f":{current_tag_name}_release"), call(mock_tag)]) + + +def test_get_workflow_job_url(mocker): + current_tag_name = "v1.0_1_2024_1_1" + hex = "abc123" + mocker.patch('tethysapp.app_store.submission_handlers.time') + + mock_repo = MagicMock() + mock_repo.head.object.hexsha = hex + mock_remote_repo = MagicMock() + mock_job = MagicMock(head_sha=hex, html_url="job_url") + mock_workflow = MagicMock(display_title="tag version v1.0_1_2024_1_1") + mock_workflow.jobs.return_value = [mock_job] + mock_remote_repo.get_workflow_runs.return_value = [mock_workflow] + + job_url = get_workflow_job_url(mock_repo, mock_remote_repo, current_tag_name) + + assert job_url == "job_url" + + +def test_get_workflow_job_url_not_found(mocker): + current_tag_name = "v1.0_1_2024_1_1" + hex = "abc123" + mocker.patch('tethysapp.app_store.submission_handlers.time') + + mock_repo = MagicMock() + mock_repo.head.object.hexsha = hex + mock_remote_repo = MagicMock() + mock_job = MagicMock(head_sha="123abc", html_url="job_url") + mock_workflow = MagicMock(display_title="tag version v1.0_1_2024_1_1") + mock_workflow.jobs.return_value = [mock_job] + mock_remote_repo.get_workflow_runs.return_value = [mock_workflow] + + job_url = get_workflow_job_url(mock_repo, mock_remote_repo, current_tag_name) + + assert job_url is None + + +def test_process_branch(mix_active_inactive_stores, mocker, basic_tethysapp): + dev_url = "https://github.com/notrealorg/fakeapp" + install_data = { + "github_organization": "fake_org", + "github_token": "fake_token", + "github_dir": str(basic_tethysapp), + "stores": mix_active_inactive_stores, + "dev_url": dev_url, + "email": "test@email.com", + "conda_labels": ["main", "dev"], + "conda_channel": "test_channel", + "branch": "test_branch" + } + mock_channel = MagicMock() + mock_github = mocker.patch('tethysapp.app_store.submission_handlers.github') + mock_github.Github().get_organization().get_repo().git_url.replace.return_value = dev_url + mocker.patch('tethysapp.app_store.submission_handlers.git') + mocker.patch('tethysapp.app_store.submission_handlers.get_workflow_job_url', return_value="job_url") + mock_send_notification = mocker.patch('tethysapp.app_store.submission_handlers.send_notification') + + process_branch(install_data, mock_channel) + + expected_data_json = { + "data": { + "githubURL": dev_url, + "job_url": "job_url", + "conda_channel": "test_channel" + }, + "jsHelperFunction": "addComplete", + "helper": "addModalHelper" + } + mock_send_notification.assert_called_with(expected_data_json, mock_channel) diff --git a/tethysapp/app_store/tests/unit_tests/test_uninstall_handlers.py b/tethysapp/app_store/tests/unit_tests/test_uninstall_handlers.py new file mode 100644 index 0000000..874d1e0 --- /dev/null +++ b/tethysapp/app_store/tests/unit_tests/test_uninstall_handlers.py @@ -0,0 +1,193 @@ +from conda.exceptions import PackagesNotFoundError +from unittest.mock import MagicMock, call +from tethys_apps.exceptions import TethysAppSettingNotAssigned +from tethysapp.app_store.uninstall_handlers import (send_uninstall_messages, uninstall_app) + + +def test_send_uninstall_messages(mocker): + mock_sn = mocker.patch('tethysapp.app_store.uninstall_handlers.send_notification') + mock_channel = MagicMock() + + message = "uninstall message" + send_uninstall_messages(message, mock_channel) + + expected_json = {"target": "uninstallNotices", "message": message} + mock_sn.assert_called_with(expected_json, mock_channel) + + +def test_uninstall_app(mocker, caplog, app_store_dir): + mock_sn = mocker.patch('tethysapp.app_store.uninstall_handlers.send_uninstall_messages') + mock_subprocess = mocker.patch('tethysapp.app_store.uninstall_handlers.subprocess') + mock_subprocess.Popen().stdout.readline.side_effect = ["uninstall still".encode('utf-8'), + "Mamba Remove Complete\n".encode('utf-8')] + mocker.patch('tethysapp.app_store.uninstall_handlers.get_manage_path', return_value="manage_path") + mock_setting = MagicMock() + mock_setting.__str__.side_effect = ["setting1"] + mock_setting.persistent_store_database_exists.side_effect = [True] + mock_app = MagicMock(persistent_store_database_settings=[mock_setting]) + mocker.patch('tethysapp.app_store.uninstall_handlers.TethysApp.objects.filter', side_effect=[[mock_app]]) + mock_channel = MagicMock() + mock_workspace = MagicMock() + + uninstall_data = {'name': 'test_app'} + uninstall_app(uninstall_data, mock_channel, mock_workspace) + + mock_subprocess.call.assert_called_with(['python', "manage_path", 'tethys_app_uninstall', "test_app", "-f"]) + uninstall_script = str(app_store_dir / "scripts" / "mamba_uninstall.sh") + mock_subprocess.Popen.assert_called_with([uninstall_script, "test_app"], + stdout=mock_subprocess.PIPE, stderr=mock_subprocess.STDOUT) + mock_sn.assert_has_calls([ + call('Starting Uninstall. Please wait...', mock_channel), + call('Tethys App Uninstalled. Running Conda/GitHub Cleanup...', mock_channel), + call("uninstall still", mock_channel), + call('Uninstall completed. Restarting server...', mock_channel) + ]) + assert "Dropping Database for persistent store setting: setting1" in caplog.messages + assert "uninstall still" in caplog.messages + + +def test_uninstall_app_no_persistent_stores(mocker, caplog, app_store_dir): + mock_sn = mocker.patch('tethysapp.app_store.uninstall_handlers.send_uninstall_messages') + mock_subprocess = mocker.patch('tethysapp.app_store.uninstall_handlers.subprocess') + mock_subprocess.Popen().stdout.readline.side_effect = ["uninstall still".encode('utf-8'), + "Mamba Remove Complete\n".encode('utf-8')] + mock_subprocess.call.side_effect = [KeyboardInterrupt] + mocker.patch('tethysapp.app_store.uninstall_handlers.get_manage_path', return_value="manage_path") + mock_app = MagicMock(persistent_store_database_settings=[]) + mocker.patch('tethysapp.app_store.uninstall_handlers.TethysApp.objects.filter', side_effect=[[mock_app]]) + mock_channel = MagicMock() + mock_workspace = MagicMock() + + uninstall_data = {'name': 'test_app'} + uninstall_app(uninstall_data, mock_channel, mock_workspace) + + mock_subprocess.call.assert_called_with(['python', "manage_path", 'tethys_app_uninstall', "test_app", "-f"]) + uninstall_script = str(app_store_dir / "scripts" / "mamba_uninstall.sh") + mock_subprocess.Popen.assert_called_with([uninstall_script, "test_app"], + stdout=mock_subprocess.PIPE, stderr=mock_subprocess.STDOUT) + mock_sn.assert_has_calls([ + call('Starting Uninstall. Please wait...', mock_channel), + call('Tethys App Uninstalled. Running Conda/GitHub Cleanup...', mock_channel), + call("uninstall still", mock_channel), + call('Uninstall completed. Restarting server...', mock_channel) + ]) + assert "No Persistent store services found for: test_app" in caplog.messages + assert "uninstall still" in caplog.messages + + +def test_uninstall_app_no_target_app(mocker, caplog, app_store_dir): + mock_sn = mocker.patch('tethysapp.app_store.uninstall_handlers.send_uninstall_messages') + mock_subprocess = mocker.patch('tethysapp.app_store.uninstall_handlers.subprocess') + mock_subprocess.Popen().stdout.readline.side_effect = ["uninstall still".encode('utf-8'), + "Mamba Remove Complete\n".encode('utf-8')] + mocker.patch('tethysapp.app_store.uninstall_handlers.get_manage_path', return_value="manage_path") + mocker.patch('tethysapp.app_store.uninstall_handlers.TethysApp.objects.filter', side_effect=[[]]) + mock_channel = MagicMock() + mock_workspace = MagicMock() + + uninstall_data = {'name': 'test_app'} + uninstall_app(uninstall_data, mock_channel, mock_workspace) + + mock_subprocess.call.assert_called_with(['python', "manage_path", 'tethys_app_uninstall', "test_app", "-f"]) + uninstall_script = str(app_store_dir / "scripts" / "mamba_uninstall.sh") + mock_subprocess.Popen.assert_called_with([uninstall_script, "test_app"], + stdout=mock_subprocess.PIPE, stderr=mock_subprocess.STDOUT) + mock_sn.assert_has_calls([ + call('Starting Uninstall. Please wait...', mock_channel), + call('Tethys App Uninstalled. Running Conda/GitHub Cleanup...', mock_channel), + call("uninstall still", mock_channel), + call('Uninstall completed. Restarting server...', mock_channel) + ]) + assert "Couldn't find the target application for removal of databases. Continuing clean up" in caplog.messages + assert "uninstall still" in caplog.messages + + +def test_uninstall_app_bad_setting(mocker, caplog, app_store_dir): + mock_sn = mocker.patch('tethysapp.app_store.uninstall_handlers.send_uninstall_messages') + mock_subprocess = mocker.patch('tethysapp.app_store.uninstall_handlers.subprocess') + mock_subprocess.Popen().stdout.readline.side_effect = ["uninstall still".encode('utf-8'), + "Mamba Remove Complete\n".encode('utf-8')] + mocker.patch('tethysapp.app_store.uninstall_handlers.get_manage_path', return_value="manage_path") + mock_setting = MagicMock() + mock_setting.__str__.side_effect = ["setting1"] + mock_setting.persistent_store_database_exists.side_effect = [Exception("bad_setting")] + mock_app = MagicMock(persistent_store_database_settings=[mock_setting]) + mocker.patch('tethysapp.app_store.uninstall_handlers.TethysApp.objects.filter', side_effect=[[mock_app]]) + mock_channel = MagicMock() + mock_workspace = MagicMock() + + uninstall_data = {'name': 'test_app'} + uninstall_app(uninstall_data, mock_channel, mock_workspace) + + mock_subprocess.call.assert_called_with(['python', "manage_path", 'tethys_app_uninstall', "test_app", "-f"]) + uninstall_script = str(app_store_dir / "scripts" / "mamba_uninstall.sh") + mock_subprocess.Popen.assert_called_with([uninstall_script, "test_app"], + stdout=mock_subprocess.PIPE, stderr=mock_subprocess.STDOUT) + mock_sn.assert_has_calls([ + call('Starting Uninstall. Please wait...', mock_channel), + call('Tethys App Uninstalled. Running Conda/GitHub Cleanup...', mock_channel), + call("uninstall still", mock_channel), + call('Uninstall completed. Restarting server...', mock_channel) + ]) + assert "bad_setting" in caplog.messages + assert "Couldn't connect to database for removal. Continuing clean up" in caplog.messages + assert "uninstall still" in caplog.messages + + +def test_uninstall_app_setting_not_assigned(mocker, caplog, app_store_dir): + mock_sn = mocker.patch('tethysapp.app_store.uninstall_handlers.send_uninstall_messages') + mock_subprocess = mocker.patch('tethysapp.app_store.uninstall_handlers.subprocess') + mock_subprocess.Popen().stdout.readline.side_effect = ["uninstall still".encode('utf-8'), + "Mamba Remove Complete\n".encode('utf-8')] + mocker.patch('tethysapp.app_store.uninstall_handlers.get_manage_path', return_value="manage_path") + mock_setting = MagicMock() + mock_setting.__str__.side_effect = ["setting1"] + mock_setting.persistent_store_database_exists.side_effect = [TethysAppSettingNotAssigned] + mock_app = MagicMock(persistent_store_database_settings=[mock_setting]) + mocker.patch('tethysapp.app_store.uninstall_handlers.TethysApp.objects.filter', side_effect=[[mock_app]]) + mock_channel = MagicMock() + mock_workspace = MagicMock() + + uninstall_data = {'name': 'test_app'} + uninstall_app(uninstall_data, mock_channel, mock_workspace) + + mock_subprocess.call.assert_called_with(['python', "manage_path", 'tethys_app_uninstall', "test_app", "-f"]) + uninstall_script = str(app_store_dir / "scripts" / "mamba_uninstall.sh") + mock_subprocess.Popen.assert_called_with([uninstall_script, "test_app"], + stdout=mock_subprocess.PIPE, stderr=mock_subprocess.STDOUT) + mock_sn.assert_has_calls([ + call('Starting Uninstall. Please wait...', mock_channel), + call('Tethys App Uninstalled. Running Conda/GitHub Cleanup...', mock_channel), + call("uninstall still", mock_channel), + call('Uninstall completed. Restarting server...', mock_channel) + ]) + assert "uninstall still" in caplog.messages + + +def test_uninstall_app_PackagesNotFoundError(mocker, caplog, app_store_dir): + mock_sn = mocker.patch('tethysapp.app_store.uninstall_handlers.send_uninstall_messages') + mock_subprocess = mocker.patch('tethysapp.app_store.uninstall_handlers.subprocess') + mock_subprocess.Popen.side_effect = [PackagesNotFoundError("test_app")] + mocker.patch('tethysapp.app_store.uninstall_handlers.get_manage_path', return_value="manage_path") + mock_app = MagicMock(persistent_store_database_settings=[]) + mocker.patch('tethysapp.app_store.uninstall_handlers.TethysApp.objects.filter', side_effect=[[mock_app]]) + git_app = {"name": "test_app", "path": "app_path"} + mocker.patch('tethysapp.app_store.uninstall_handlers.get_github_install_metadata', side_effect=[[git_app]]) + mocker.patch('tethysapp.app_store.uninstall_handlers.shutil') + mocker.patch('tethysapp.app_store.uninstall_handlers.clear_github_cache_list') + mock_channel = MagicMock() + mock_workspace = MagicMock() + + uninstall_data = {'name': 'test_app'} + uninstall_app(uninstall_data, mock_channel, mock_workspace) + + mock_subprocess.call.assert_called_with(['python', "manage_path", 'tethys_app_uninstall', "test_app", "-f"]) + uninstall_script = str(app_store_dir / "scripts" / "mamba_uninstall.sh") + mock_subprocess.Popen.assert_called_with([uninstall_script, "test_app"], + stdout=mock_subprocess.PIPE, stderr=mock_subprocess.STDOUT) + mock_sn.assert_has_calls([ + call('Starting Uninstall. Please wait...', mock_channel), + call('Tethys App Uninstalled. Running Conda/GitHub Cleanup...', mock_channel), + call('Uninstall completed. Restarting server...', mock_channel) + ]) + assert "No Persistent store services found for: test_app" in caplog.messages diff --git a/tethysapp/app_store/tests/unit_tests/test_update_handlers.py b/tethysapp/app_store/tests/unit_tests/test_update_handlers.py new file mode 100644 index 0000000..09c90f1 --- /dev/null +++ b/tethysapp/app_store/tests/unit_tests/test_update_handlers.py @@ -0,0 +1,114 @@ +from unittest.mock import MagicMock, call +from tethysapp.app_store.update_handlers import update_app, send_update_msg, conda_update + + +def test_send_update_msg(mocker): + mock_sn = mocker.patch('tethysapp.app_store.update_handlers.send_notification') + mock_channel = MagicMock() + + message = "update message" + send_update_msg(message, mock_channel) + + expected_json = {"target": "update-notices", "message": message} + mock_sn.assert_called_with(expected_json, mock_channel) + + +def test_conda_update(mocker, app_store_dir): + mocker.patch('tethysapp.app_store.update_handlers.time.time', side_effect=[10, 20]) + mock_send_update_msg = mocker.patch('tethysapp.app_store.update_handlers.send_update_msg') + mock_subprocess = mocker.patch('tethysapp.app_store.update_handlers.subprocess') + mock_subprocess.Popen().stdout.readline.side_effect = ["Collecting package metadata: done".encode('utf-8'), + "Solving environment: done".encode('utf-8'), + "Verifying transaction: done".encode('utf-8'), + "All requested packages already installed.".encode('utf-8'), + "Found conflicts!: conflicting requests".encode('utf-8'), + "Mamba Update Complete".encode('utf-8')] + mock_channel = MagicMock() + app_name = "test_app" + app_version = "1.0.0" + conda_channel = "conda_channel" + conda_label = "conda_label" + + conda_update(app_name, app_version, conda_channel, conda_label, mock_channel) + + update_script = str(app_store_dir / "scripts" / "mamba_update.sh") + mock_subprocess.Popen.assert_called_with( + [update_script, f'{app_name}={app_version}', f'{conda_channel}/label/{conda_label}'], + stdout=mock_subprocess.PIPE, stderr=mock_subprocess.STDOUT) + mock_send_update_msg.assert_has_calls([ + call("Updating the Conda environment may take a couple minutes to complete depending on how " + "complicated the environment is. Please wait....", mock_channel), + call("Package Metadata Collection: Done", mock_channel), + call("Solving Environment: Done", mock_channel), + call("Verifying Transaction: Done", mock_channel), + call("Application package is already installed in this conda environment.", mock_channel), + call("Mamba install found conflicts. Please try running the following command in your terminal's conda " + f"environment to attempt a manual installation : mamba install -c {conda_channel} {app_name}", + mock_channel), + call("Conda update completed in 10.00 seconds.", mock_channel) + ]) + + +def test_conda_update_2(mocker, app_store_dir): + mocker.patch('tethysapp.app_store.update_handlers.time.time', side_effect=[10, 20]) + mock_send_update_msg = mocker.patch('tethysapp.app_store.update_handlers.send_update_msg') + mock_subprocess = mocker.patch('tethysapp.app_store.update_handlers.subprocess') + mock_subprocess.Popen().stdout.readline.return_value = "" + mock_channel = MagicMock() + app_name = "test_app" + app_version = "1.0.0" + conda_channel = "conda_channel" + conda_label = "conda_label" + + conda_update(app_name, app_version, conda_channel, conda_label, mock_channel) + + update_script = str(app_store_dir / "scripts" / "mamba_update.sh") + mock_subprocess.Popen.assert_called_with( + [update_script, f'{app_name}={app_version}', f'{conda_channel}/label/{conda_label}'], + stdout=mock_subprocess.PIPE, stderr=mock_subprocess.STDOUT) + mock_send_update_msg.assert_has_calls([ + call("Updating the Conda environment may take a couple minutes to complete depending on how " + "complicated the environment is. Please wait....", mock_channel), + call("Conda update completed in 10.00 seconds.", mock_channel) + ]) + + +def test_update_app(mocker): + mock_restart = mocker.patch('tethysapp.app_store.update_handlers.restart_server') + mock_conda_update = mocker.patch('tethysapp.app_store.update_handlers.conda_update') + mock_channel = MagicMock() + mock_workspace = MagicMock() + data = { + "name": "test_app", + "version": "1.0.0", + "channel": "conda_channel", + "label": "conda_label" + } + + update_app(data, mock_channel, mock_workspace) + + expected_data = {"restart_type": "update", "name": data["name"]} + mock_conda_update.assert_called_with(data["name"], data["version"], data["channel"], data["label"], mock_channel) + mock_restart.assert_called_with(data=expected_data, channel_layer=mock_channel, app_workspace=mock_workspace) + + +def test_update_app_exception(mocker, caplog): + mock_restart = mocker.patch('tethysapp.app_store.update_handlers.restart_server') + mock_send_update_msg = mocker.patch('tethysapp.app_store.update_handlers.send_update_msg') + mocker.patch('tethysapp.app_store.update_handlers.conda_update', side_effect=[Exception("Conda failed")]) + mock_channel = MagicMock() + mock_workspace = MagicMock() + data = { + "name": "test_app", + "version": "1.0.0", + "channel": "conda_channel", + "label": "conda_label" + } + + update_app(data, mock_channel, mock_workspace) + + assert "Error while running conda install during the update process" in caplog.messages + assert "Conda failed" in caplog.messages + mock_send_update_msg.assert_called_with("Error while Installing Conda package. Please check logs for details", + mock_channel) + mock_restart.assert_not_called() diff --git a/tethysapp/app_store/tests/unit_tests/test_utilities.py b/tethysapp/app_store/tests/unit_tests/test_utilities.py new file mode 100644 index 0000000..fa8b5f5 --- /dev/null +++ b/tethysapp/app_store/tests/unit_tests/test_utilities.py @@ -0,0 +1,12 @@ +from tethysapp.app_store.utilities import encrypt, decrypt +from cryptography.fernet import Fernet + + +def test_encrypt(): + key = Fernet.generate_key().decode() + password = "my password" + + encrypted_pass = encrypt(password, key) + decrypted_pass = decrypt(encrypted_pass, key) + + assert decrypted_pass == password diff --git a/tethysapp/app_store/uninstall_handlers.py b/tethysapp/app_store/uninstall_handlers.py index 97875d8..e92b0de 100644 --- a/tethysapp/app_store/uninstall_handlers.py +++ b/tethysapp/app_store/uninstall_handlers.py @@ -1,6 +1,7 @@ from conda.exceptions import PackagesNotFoundError from tethys_cli.cli_helpers import get_manage_path from tethys_apps.exceptions import TethysAppSettingNotAssigned +from tethys_apps.models import TethysApp import subprocess import shutil @@ -10,6 +11,12 @@ def send_uninstall_messages(msg, channel_layer): + """Send a message to the django channel about the uninstall status + + Args: + msg (str): Message to send to the django channel + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ data_json = { "target": 'uninstallNotices', "message": msg @@ -18,6 +25,14 @@ def send_uninstall_messages(msg, channel_layer): def uninstall_app(data, channel_layer, app_workspace): + """Removed app database connections and uninstall the app. Try to uninstall with mamba first and if that fails, + assume it is a github app and try to uninstall that. + + Args: + data (dict): Information about the app that will be uninstalled + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + app_workspace (str): Path pointing to the app workspace within the app store + """ manage_path = get_manage_path({}) app_name = data['name'] @@ -26,7 +41,6 @@ def uninstall_app(data, channel_layer, app_workspace): try: # Check if application had provisioned any Persistent stores and clear them out - from tethys_apps.models import TethysApp target_app = TethysApp.objects.filter(package=app_name)[0] ps_db_settings = target_app.persistent_store_database_settings @@ -36,7 +50,7 @@ def uninstall_app(data, channel_layer, app_workspace): try: if setting.persistent_store_database_exists(): logger.info( - "Droping Database for persistent store setting: " + str(setting)) + "Dropping Database for persistent store setting: " + str(setting)) setting.drop_persistent_store_database() except TethysAppSettingNotAssigned: pass @@ -54,8 +68,7 @@ def uninstall_app(data, channel_layer, app_workspace): logger.info( "Couldn't connect to database for removal. Continuing clean up") - process = ['python', manage_path, 'tethys_app_uninstall', app_name] - process.append('-f') + process = ['python', manage_path, 'tethys_app_uninstall', app_name, '-f'] try: subprocess.call(process) @@ -90,7 +103,7 @@ def uninstall_app(data, channel_layer, app_workspace): # This was installed using GitHub. Try to clean out github_installed = get_github_install_metadata(app_workspace) for app in github_installed: - if app['name'] == data['name']: + if app['name'] == app_name: # remove App Directory shutil.rmtree(app['path']) diff --git a/tethysapp/app_store/update_handlers.py b/tethysapp/app_store/update_handlers.py index 58b88af..bbd16ae 100644 --- a/tethysapp/app_store/update_handlers.py +++ b/tethysapp/app_store/update_handlers.py @@ -7,6 +7,12 @@ def send_update_msg(msg, channel_layer): + """Send a message to the django channel about the update status + + Args: + msg (str): Message to send to the django channel + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ data_json = { "target": 'update-notices', "message": msg @@ -14,13 +20,19 @@ def send_update_msg(msg, channel_layer): send_notification(data_json, channel_layer) -def conda_update(app_name, app_version, app_channel, app_label, channel_layer): +def conda_update(app_name, app_version, conda_channel, conda_label, channel_layer): + """Update the existing conda version to the specified version + Args: + app_name (str): Name of the installed app + app_version (str): Version of the app that will be installed + conda_channel (str): Name of the conda channel to use for app discovery + conda_label (str, optional): Name of the conda label to use for app discovery. + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + """ start_time = time.time() - start_msg = ("Updating the Conda environment may take a " - "couple minutes to complete depending on how " - "complicated the environment is. Please wait...." - ) + start_msg = ("Updating the Conda environment may take a couple minutes to complete depending on how " + "complicated the environment is. Please wait....") send_update_msg(start_msg, channel_layer) @@ -28,10 +40,10 @@ def conda_update(app_name, app_version, app_channel, app_label, channel_layer): script_path = os.path.join(dir_path, "scripts", "mamba_update.sh") app_name_with_version = app_name + "=" + app_version - label_channel = f'{app_channel}' + label_channel = f'{conda_channel}' - if app_label != 'main': - label_channel = f'{app_channel}/label/{app_label}' + if conda_label != 'main': + label_channel = f'{conda_channel}/label/{conda_label}' install_command = [script_path, app_name_with_version, label_channel] # Running this sub process, in case the library isn't installed, triggers a restart. @@ -42,11 +54,7 @@ def conda_update(app_name, app_version, app_channel, app_label, channel_layer): if output == '': break if output: - - # Checkpoints for the output - str_output = str(output.strip()) - str_output = str(output.decode('utf-8')) - logger.info(str_output) + str_output = str(output.strip().decode('utf-8')) if (check_all_present(str_output, ['Collecting package metadata', 'done'])): send_update_msg("Package Metadata Collection: Done", channel_layer) if (check_all_present(str_output, ['Solving environment', 'done'])): @@ -59,17 +67,22 @@ def conda_update(app_name, app_version, app_channel, app_label, channel_layer): if (check_all_present(str_output, ['Mamba Update Complete'])): break if (check_all_present(str_output, ['Found conflicts!', 'conflicting requests'])): - send_update_msg("Mamba install found conflicts." - "Please try running the following command in your terminal's" - "conda environment to attempt a manual installation : " - "mamba install -c " + app_channel + " " + app_name, + send_update_msg("Mamba install found conflicts. Please try running the following command in your " + "terminal's conda environment to attempt a manual installation : mamba install -c " + f"{conda_channel} {app_name}", channel_layer) send_update_msg("Conda update completed in %.2f seconds." % (time.time() - start_time), channel_layer) def update_app(data, channel_layer, app_workspace): + """Attempts to update an application to the specified version. Restarts the server after updating + Args: + data (dict): Information about the app that will be updated + channel_layer (Django Channels Layer): Asynchronous Django channel layer from the websocket consumer + app_workspace (str): Path pointing to the app workspace within the app store + """ try: conda_update(data["name"], data["version"], data["channel"], data["label"], channel_layer) diff --git a/tethysapp/app_store/utilities.py b/tethysapp/app_store/utilities.py index 0f27cd1..ad29d5a 100644 --- a/tethysapp/app_store/utilities.py +++ b/tethysapp/app_store/utilities.py @@ -1,5 +1,4 @@ from cryptography.fernet import Fernet -from .app import AppStore as app def encrypt(message: str, key: str) -> str: @@ -8,14 +7,3 @@ def encrypt(message: str, key: str) -> str: def decrypt(token: str, key: str) -> str: return (Fernet(key.encode()).decrypt(token.encode())).decode() - - -def get_available_stores_values(active_store): - available_stores_data_dict = app.get_custom_setting("stores_settings")['stores'] - if active_store != 'all': - available_stores_data_dict = list(filter(lambda x: x['conda_channel'] == active_store, - available_stores_data_dict)) - encryption_key = app.get_custom_setting("encryption_key") - for store in available_stores_data_dict: - store['github_token'] = decrypt(store['github_token'], encryption_key) - return available_stores_data_dict