From b1f8e4ede9b3ae4c7e34556d19a6d70dbec97344 Mon Sep 17 00:00:00 2001 From: iulusoy Date: Tue, 8 Oct 2024 08:05:19 +0000 Subject: [PATCH] =?UTF-8?q?Deploying=20to=20gh-pages=20from=20@=20ssciwr/A?= =?UTF-8?q?MMICO@65531c62045d9e251f5e0429a6dd33961bd392ac=20=F0=9F=9A=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- build/doctrees/environment.pickle | Bin 149024 -> 149029 bytes build/doctrees/faq_link.doctree | Bin 31566 -> 31632 bytes .../notebooks/DemoNotebook_ammico.doctree | Bin 185929 -> 185929 bytes build/doctrees/readme_link.doctree | Bin 32473 -> 32962 bytes build/html/.buildinfo | 2 +- build/html/_static/documentation_options.js | 2 +- build/html/ammico.html | 6 +++--- build/html/create_API_key_link.html | 6 +++--- build/html/faq_link.html | 12 ++++++------ build/html/genindex.html | 6 +++--- build/html/index.html | 6 +++--- build/html/license_link.html | 6 +++--- build/html/modules.html | 6 +++--- build/html/notebooks/DemoNotebook_ammico.html | 12 ++++++------ build/html/notebooks/Example cropposts.html | 6 +++--- build/html/py-modindex.html | 4 ++-- build/html/readme_link.html | 16 ++++++++-------- build/html/search.html | 4 ++-- build/html/set_up_credentials.html | 6 +++--- source/conf.py | 4 ++-- 20 files changed, 52 insertions(+), 52 deletions(-) diff --git a/build/doctrees/environment.pickle b/build/doctrees/environment.pickle index 54726c448fe80399c83579549439ab55bec36bcf..af4fa9ce7d35ce0eaf6a9d900df46fd820fbba6d 100644 GIT binary patch delta 9938 zcmd6td0f_2*1$a%WZzd25CuWh6c9uS5s*+3)5N9RP!dEQMNnw5kViC35R>|0*KAQU zr*d-Cp4XmcD$|)!b7oA_TrkaDbHxqUl=s|w?sK)_kN4lVPoLj8=X=h*_uTyotg3Tf zTj#uS9<%G!vzKK7Tkq`P9+O`(rOXoN*3?cwBO)?EI>$v;*h{6c>$?;_`>7QU0j{$P z^74xdCe0|CIlZCIvA87vr5Ocf#_IS0!!yX2T<)OSk#bp{ngUyR5vyG*=*x zC)$^Zp*`TOh3&_PIhS=ta?muJL1ongc!cQn3Cnrf7%;ZN$7&yANm_Vp906wxi|hT^nfL^b zeM0YRY)lJ;1Y_HPz|5q_;qi}E6)oCT0kZBsk0Vqw62}|4=>g7^L_IFxx#@u&#Wai5 z-DyOJW@Tg!8=2WqFBcLzjQ#1!MpL>A-;l=q?dnYxhWCK>6UBP_KRWz4h5rw|hB(7F zBRoOuF_E`VSwZ=vmr5s1nO%@yFtfZUZ$=q*S!@U4b*8Zxm_7nwg;A7I4f8Bb8NmQ% zi)-c^4lv)S&(4Ah%aQC47*twb8RiXmP}B_HZG1d3f^S#_BaQz&mu7Sx6=B4TIA{2c zj4=AI&Q}qx06Q zLY1@JEMZe7kk3+A@V$zkPO zQQBK&cB;(BFWpvUzMJ4=ES%+G`Tmtj$`|vRtJ1P7maCGt=I&M60kfC#-mY>}n(rI# zq!|ay9W28aUQ)5yi)`&ps%@(xR=yK#sjoYuBC6lA4M~0dBgIy}(@E9)Q$w=SLV2Rn z^52V7+MnLF z^+8ovz~8)84H#fd{;R!F@r}2o?Avq7Z`u;3+FG$SN=1&}nPKt!$8}o`+Y?m8_?<~g zTe~Y&Y0dx6R9g0rwp^{Jql_Q-xEU8t`&f4E{Xr$@)ELEMwt=f<|GrayL_U^BKOa`D zoj+)sXQ##>s@b@GKFTOP5@;#;MP)7eZ>%~RZ29!qepPPqaoe1)Jn5)1vYvKUHCRt~ z;mO;<&0>GfHU+oOcTi!G7nOz$_2E}{fTOYF6K~6!%Tp+TS9SsyOQ)tKs;TdT#T`~Xj^=mI6G z165sNiwbzIJ2ctU9xz7*9EpL;iXMuCV@ll~59(C3uImkIzgc-7P`k+bW|CH4Nrn@u z*t8UARBBEtFlxfcu5huIq{C9>@yvjoNojwD~gyv_xQ4Ib}vY=8}LEi1NU;%04%AkU@-^#&E+MJic zPL%Mo*1w9&Se6~|8ZJ;{YlELPq5=n8vT>C#guGvwA&oSrDsUlh=LJwtY{wf=PTJ6g zfTwXsSMamiEux0~tVRvY5pF+g$Hh3UQj1>#uTYV1--1_x=NG_Cd=rH6ZS^=@F;j79 zdc2L(>zR$yJ80BkzV035dnekaH^6`Z5VnuCe}kAZseZN$Ml02mLvN*ySq{^by7xVp zq10jTL$OkKe*n{!I^-j`13ah@5^a+xR^9vjRv~6Ib2S`I6HocgNj!Nb z;=i&;h{q0E;-!N^)QUpXib6baP>5PlRPz1@U}ztaTa8SuM%Gay6RVLm)W`~IWcfw1 z_##=l1`BUH0E3~LyBvfB6sZRx9Yxhaobzh__k%FNK~!vVHHV--ic^Q6ABwQUkcpz;Fl3Z3A=vqx>+7|wH#LU+FE7NFk(wl~KU$@DlayH#--S9Uc&a9TE)a0bSpwfKxo^!gddr)oH_KLaEAU(SL% zy1zXOMJS@q;gYWA@0^1Yidw^4F5sfZSH%zlHb%Kd2|IjVbP9P zAsR*2Rft9L+Euxbzr6}6RQwL#h5_3eE!i@!L({C43IN_8PcT2{&?Wx&~ob?@!kt8HM|GNG7*Gx$X8q zgp*!;ORnS1j0@$uO#9q*nRfIINI^LD1|*{R@P35-vtEhNdf@ucU!#(dqpo{u3N(1inEq5V< z7Ef8Yac^BHZ@dSAPU5F2Wy1Z;JvrX5-;-U~a!(HX-Fwgn>yEt-sVGYC%L)3DtQ%w{ zJdl%D@IV$=N!H#6@*wd1U5-HZ@9+#3um2qe$P7&(*f~=OFL?kVy!atF^M83U7mRR! zC@UHC5Qb7nzw!XzkGv5)FmT^PnUK#TnR6D2d5`4jvFDN8nxQRtiC6P! zEfQK=WD38xh^@YC7{jw`*)oP(aoJ%e4*m8Z5YGdak75TfaSV20OdMcuGB%#l$rb73 z^1Zs|D-&nTEwTo?v9Z*p&7w=2`P+^xk*B${wtR^j^KqP1R9=jW;$3$p z#xUER4MnlsUDkV^M4|^1r(C@U6KC}WvNAlGxJh626hq6u@nqt9wfB-)-I zCU@JPiLw$1*pzK>qpd5t( z?c_qMX~#-o9>3p?6{DCI#5#hR*N|8j#F8E4kA7147enzB_H2)HXXcaJvpFb^v}bcs zybz2vnE9??%)`v@1><@!^V|@|spwcfc`T&y`B~k1F zikdELpNNWPWhnMW<1((|sa>(3Rs6fIY(0wU-Pmdr?%mlQ6vw)=MJT2|#pc_p#W!{^ zckaPnL_VVjn}^~`53Fwi&+CahU;*FU6IW^l_lRNRP!z@BG*<9;V{qas_^ue%6UE&a zoZ<=|7mHI>!OLTDsw((5v22io{At=^KI{Vs;+^7H7vx!StkOaLDE?jE(vA(|!M#`z zRPx@v*m(JKc+>q(ZQ3lvrxu>ZCOVnXDz#arINW*&$}}h=@UjNtiol3D8i-o~L4^hn z379nq#znrc!EBnZNwy@WY7HJyT5oCqR7s5n_5>Dd;7DMJ1}+4aYT!=bZ31TOu@@0T zhxig$ra=G!u0ari_cRa}1@`xS4Z;X~s6jY^l^TdEW1+dhv|5vB3R$B;cLHlQ=t0_!zMA+SLLx#QA^Y|Vz>I_Ty(TYE$Tkhe6ZnS);$p)h+cn4|uv3G40=qOQB=BzyOayidK0Q`%H zs}9*sz+Hp=1Uxl3NWfczMgqPX93|kd!EpkC8k|y#BuJC96cVh#Zv;X$xI`dKgX;u3 zX>f}`ga&sAL~3xKK$j?S{>evv8#;Wu>JU2u-8FC^&_e@f0x=r65s1^klR&%%J_LGe zVD=}{M-%b*!4;6CK?efK8gwF%qCsZ@sTy=4kgh>D0vQ_gAdqR+B$mh^4dMx8X^==D zTZ1G5&uY+*z;F#x35?KS0D+MJe18jK(?UV~8tCJG>bDHnrp zC>+GeHZUy#trvwQ5Bwx@7s&N5i1Zg~e+v0c8l)2_)*zEWsRmgD$~1VEz{?tpATURR z(FnR@UKN^*r4TtOGEwms#EAJaf_F=1X=eFr#c9Of5nKS7e}k~(?;4k(hQ*fKfOM}A zmU!}+-Y54*HXz-rY?f&?xj)fhErHJjFv}V~C-Q|3Sx;c22Ac@{j|Q6we67J>34E); zcLcU-u#Lb!HP|jD3_JFNCc7x)KN{>Nuvdeh2>h(U0Ro3LXe4k{gJT3vXmFCi85Cw3 z!m~tv(;*iKT-M+UfomFECvZyu`P<)S0(WgdPQ!g+$wzDn&GADUknR?_Hl)9d{H%+u zz$*mWYVbM%Ck@O5Ts2rgz(a#K33zL;n1G)RgqfBS2^550)dbp0!0r39Ec10-X80?6 z@{1i=ZxRTn?tE*HKLeJIK z9)Zwvv$aPc^jvD~5ePj^fAILDRWBL39x7KA&rM^&v5#z~h^g~aKlhrHUr=5!rMzUe z{QDFv)nW_dpQo|#j`q>G+Qf*C7T}+Fb>VfHEYwMyV{`G>6ns-AEBRlX00@}? delta 10040 zcmd6tiCdM`*1%a?;D8{LOrkh2C~ATzsE7!NnuOtihG~kVI2>h2u(Hr27+xV%^gX&Q zwY0LUw?XY(JzmSM)zxm34L3zoa~5YDb40$i_u6OEz5D~;^YpxH?cZ8^?e*?qy*rMZ zs@*qKyKh^_TJ(&LwJl;Fxr3Xnk7s?bXd5|^fyThd_R^LUS?Vm6-@C3R@0Xr<)72Q* zyHguee&N))v-66r)gE*5a;N6!<;d5q*S$Xqh_vrkoR`Yye^_Z6XLK7-ATE}&X zStJc=+~_&QC06qSfxP~dfQ(=fG=LYzvVay=bA?eB*A2>z596XN3(+#S+9zjf;go`0 z%(rwhZxno0w1=l_^=ihF;JB$ChgiMr^8+4U{$6YHfo6ht4ualFKuGbD$|lD zsU-%kGHuAAmKeOs^j$u+#9&pXQPy!b5mO9UW!hCfO}0dGRi?IAs8!KYHa`!WZaZx} z?w9T^(phees?IaEC#M-9$q|MvVVbcgG1#c+pJQxGo@AWupJ}X52sFG>+Mzzx$R0S| z=rdrj(I=@rWXoQkB?ZE8)Z&fFDZxfkaG+dHIDUGYboHrskF8O(`jyBZoEqk(!uNBe$b)_NFMKholaEdI$|5_NOrY z?*ulz!T3s-a@RA7vt=;~(kfuS?RHvQfKr>!kk?#ck+CWx z9W1sJ8S5BS*h({f0m_VJBYrSejt=L`(;>_Fd1Q*wbxgPsH|o6c*Q{_OY4lj**D>LI z>2!!Nj*Lz*LPm!h1F~ir6Gny`-)7|)zN5m8;bWFSh4I|TxyDmtN6Eu&e%l!jysRR- zgeI%VKB37f@{7=96*(a^Sw*f0O;(KiLX#E4)de+KSy~HCR+f&a&8>DWDV$Q=RJ7J% zmuipa=gyv;XA6182YB*g@a2;?LTg)QcDZc&*-nr97iq?UNkP2m5O~>+kN;JntuL8u z-#&L!xgMBQ$V3c3+lbsZG7o6 z+uk4igg@m9`{$~=_T<~2{!jH8xNBe~4QSWw|(v|8wEdu8!H)o;`i$G~}2&Z^(5 z)sL0+^=0>!wz{T)G~?Ck5aZeqKR8mwM?@^w(RYlWmtbBdwZhod)p=DTJ(9O(P5XH@#81n zHs_sZn*!K6?7B=XTf5ypDy4y+byQl>=j|P7?u}Gh?U%ik)^4BDjM^^;8Qb=|*fxKC zLq+@Gn}N#f;dg_THtIkWX~v#2k+y9=QPzHj*O5rRZ7*)h6Tdj#O(74S`~{edbszZJ z{11MuLSH@P*acokf>pprb&-7Umk?~5d-R-oJA7w>f^3_Q*Q%Z?PB`|#rcsKq^eb*h^ZQV`BFxzfB7I)pv zV7~0%cr-8laYl7GanG?A?>tb4$@cJ3smi9Zanb5-;9^=8?F3FvPCJ5O2FSf&w*sh@ zvhQpGR*GhS&IP_yYEKiWc)YI%1o3Ovz}^0gJE&u1N4SPYv7c%MZ>wG_z2PdUyfOf* z%=h1ek8x{~ul*}uP_@Fov^A(LWk(;Sa-_bz@;Z1LYi9Y{Z9(vN6>?r1P%~+t-4@ig z)B_T#Z|OgVhl8~&rzZSkNspM8B__+8PZ30j?>2qzT9 zywnl3k4pwnLtxojqn0Qr4OC6GUmXmp(AXP>LImUOr$ZmU;Z5i#zD#TRef$FP%%@;K zzqbMk@dM?{tr;+k?_H-V{m$Xg6-3j|p7=DBDfK`myrt9yBjE)`4v+=79i{JHEb?ZB+A{E!&~#7%<;`=D9S4utA! zFPse}O1+&Ac!nhnErPX*e)9snrqoHrFkh)xN?@T<=f4Pecx1PgFT=Zv4t)gmqUhdbFb{aAbezC%vpVqoHTeGBp2CUhwE`#5KLgiz_?TgQ>q_MHqtzPQ+ZqrE zqU~o-vxPM?!tWy0s#4B~e8c_A z>9V+KBMdWs8Yiz3^(o^26EaHVwI&1PohHLWUT89yyv-mKR~Z!I9+MF$FEQ8>H<%1} za|K`i4iv%y-uMnI#jUXPU3iv3nZ0fkgvTU^%hE)vjEX{BlTe5|5(*I&g@}qmT#ZnO zs3&CQDrJaD8KF`JsFeMe$nHyI?@H{v{~!#5GVXl{;!q4egzH|$ z7axKY6ki{LK`2_*LM(~_wUFQ{hB3F|-nGz<^IGshzPT0#qNuBd0VpC4!w?ka!;psJ zk=R3FjV%JhUA> z4!J0jeuv2@-ufM$L-Fu;2`8R_iP-w|1WZB^auSBoSZ2QUB=i*55qGP(Al%6b|1X;V z?Ia9EuNx=jyoH~VulxKd$VRyP6y%5qr=@u1w2XJ;G;ZZG-s=oxq9{8fTfd!=wuH0r z48oeTGRHs8!c=TcI44_QoP(Lz>V6*gaT(|5ad(w*Qyq*zkzEI4P<&V?hjqRHqbTF8 z{JAN+sb21_>GhC+aAQ5BQ0NvMzyJRHVLgN+?{Hm)oqS#HaRl>b@S7} zgYP~7K26h@c>~hOCzyObxB*@mdiM>P+~pfGxgIxVahm@HI`HSzI(bcV5$uj=Lkv!^}H!#Xh_vkIn5nkccVt`~yN!WRjRoqKd?36iKpX zG=w=h&4`;3S6N~$vzjkPU>Qk>l*dI>l(u+snygW1a9Hi^F4k*|Uvv+;pjdMcx}o^$ z9;BIM;Nmdj@!vvuxBJi@dHQ`hsSEGRM8CQ(=eFU#EHUv9`UYkeS#LGeSNoCp@g zdSNR*NVd#DGW1(PtOvsHgXB9q2g})cGFa}nWx=cn=JUp2mX9L84eJP{d>M(YZCIkK z{HqRreuv|&%|C>VB#_EOGa-ZT2w`2(|3nCzk78n5j8)1Hw8e}{`Qx^1DmjnkOS54p z|F|8lYAJu%j;%$(L)m^PwNGl#UItjiYdf$gShTt$CcJ>hb;P~9fR}b;qoJJt*b#Fp z=SiIqmh*~EYz~Tho!A-_E5g`L6vM*VZdY-0svg91iy@6?M6hZC?Rb3&gz^UwY#KV| zMzX~yE<~~|WKQMjufQOFq%-?F@?~AvFTyK|6_YuL@3BBIzZr$6www>{%I2HoFtLOA z@~$kF-|xy^L_<+Gwvi0U>HOzzESYC_XH^s;l)Ei~_T1$OmWQ6Ro?zuDu90}52V01( zn>|=DEacOo@tiE=yQ6W3TX^djHXcPm3@)>UuZ_WlweSNmEE+{a4D07A|7K?4Cpli7 zM)t(%v+#L6aZxRNZ%>>m3wMjfvv1*{QffqISiNHJ!t`R8JKztA|hD8JM4Oncon9DWEki=Y}!DCA6 zFB$*^sno!k!0Q^A2rSj0C4uD{v?8#AfCb;!hlrtFS`%2MK_CIHK^p>ZXwZ(pS`9i7 zSg%1Cfwwh?6u@FNZ`34;T;A27JAq9aL=*U{25|(oXwa9y2O9Jz@R0^d1hz^bcTNhC z?b;=cfK7v;1a@eUPQb3ga036(U<83rH5g4`j|O8ASa47LQmt!Mg-*Xz)IPTN-R3a7Tj=3Eb6SD}nnOd=iG|4=3QECOgQbL4#cc8a4Qo0CZNh z%KSNj78-m(z(s?95in`+H39d|IR7*O-xBfAE?gLr6&-0! zp{1_5MIf|XY;F+uxoGlj%+68B#N5jrV>U(E6H{3WXZ%NE;-88w#D6N*g|8UGLfu4? N^YJ$azHrCl?9Holv{@LJZN6UcpPA_i+vE+xVw2yM$TL2g%vY+&_;|8GsS?u{h;SBA z;L+rsQVXevYzz!Ng5~)|DarYzc_q5JnR%rpsl`*WK2N?^s>N0Tl+G%h%vTmCoeyH> z0EyBb&b-9j)RM{qpt{;A8d(*S+smwZLGqoY8Hym0(#hw`q8M{FYn5j*a#RB)yqU5} VH_xuHX5uIU@shG~Cf}c&0+QGw_A)atC`}ejm6)8Lw1tsta!raXBiH8dDc6`dG&00` z&Od6H1C`x6`A@PGD^Lum>UXmAytj&^z|CyO? zuua||EH>GoRG#tL3==I46Ae==l1-9K%`GfUEiEi93`~*@%uLhFQztL^yo#Wr z&3Ru8`3S1ozMqLv!CE0LG0nor)XXS3)gsX(ImIL?F)1a{*d#3_)!Z=Ez%X_Ce_O_7 z1XWGW`yw`-?-nEH_9u3X)tm&?Z?E@YoIF9n#Ly_sBH7%;#KJhq#N5!r!o)Hy&D6le zJjFE0(jX-bWVz1t{{M{j)92n~v?iz?;G%uS6=(h^gX%}tWcl9P-L3=EBvjFZeQ5|ffFOeQb+yo#Wr z&3Ru8`3S1ozMqLv!CJw>GBL@_JUP`k$spCl+%VO`+{7?BE!il^$T-E)Jk4s&D6}&(!#>TB-O~k(#$Z$+{8H1 zBE{G!HPI4mxz6Y_^1+8IvwbJ(d|H^cbu;$6zWs5{_!&~ m^bqvbl&lOfkUB!%nv&J5#K2J6!%>u)mY0~j{n&Y?W;OtW^_;l? diff --git a/build/doctrees/readme_link.doctree b/build/doctrees/readme_link.doctree index 4f0116b6b1c651123d01fc20cfdbe3688b1229f4..2050e0259a6202c47c0ab5dd800ae2b756254bb7 100644 GIT binary patch delta 2967 zcmb_ee{56N73N<1CC??wfYawkY=_uh zl@wAUlyz>r4t891>!_--qORRQD~67B2nO3Cb((1U4^k&}K>t`(soFx>253~;x$iyO z*}7HQw11B7J?Gr-eCNCOzWYA>f?S=W+!5}@pB;Id6HVk3m^7Vk7hgT{~;ma0Ot5yl?1yy`Zu$tylhmjSxS4UQ@g0ST@+*O4@XYHXURB+q59*$b8 z>FaELJ>0PJ51q)Z)h3N_+G1Rjnor3fIuY0O1&4F~W^4cDST5uFk{}7@Hv{z+Y3R$v79F(teI! z&++_&%yVZQPp-pYT^m}=hu_ulbOGhvsDt5RJ>7d2inm;YV>T`PRW$$)*Po>iWQ-g2 zT|^D7uC1%LD7-Kb*xJV8Z*6TS>o8`5bq#u*R*n7W#0QERm_3BukocIm7Wx{hNsW|w zvcaIqX2Ne9^bF{xtBf?lku9}ge^g6q*=VJNE_F`A4y=-ukWCZ`y*SKDn4#LH{#OaR zOR|R|lZ5D%l}5Pvh#HPK3K^Xj9NN-<+r|Zljf}B29Lc=#$W+K5ONrH3SbP$j7Dv(G zCW_hiOOx5D z1@JXLN)qr=^FH!-Y{)EKUvmquzQ>q-WH!QZOFb!srA|3?xrNtiIC&fX>1NNvM=f{S z{)_L*zNvse78q^yc8?wiOy|B~aQiW!<(zarUu|_!BgZ~jkiWf-lta_@S2M9Q0~NGE zitYJ-l+7Mur=n>&6t$OO$<6Hsv_0)1v~zpvG?};tK5u`M+k;v};YzU%e$io}8V*{k zOW=!+mq-9^?W};aofRa6;%&qEnl2Yw2GOFF(4>!c?I$!LcUaoJoy@~v_v>ic+Oi%S zeu|blx5D8b-Y}mNU7O@8{o(zUD?W*Ag&wZoL;|&QK{V`87~9 zwiVPYksy(kJ4sy+IZlHgm@%6HzzD) zP$|-A@O&jD4;&A57`l6bNCk8rC=SnBRpw_TL(*FIKqg0Ke9rPPa+Qboh%XX zPVzLF&+cG4G}%PxD{{zQ%xMM`j!A^ti7@u5YI;vC>pWG1ju)riC5f~*dXb{6YrgS7 z#aVgzZkND@83Qpx>x`aQz&B&$nzNCYXEtEu;!Hz%E&7thPe-Qq?~BC4F-s^Ij>iJg zl<0ko|*{x>-8rQ&6H`Ec9r z=Ml8GN-C8s><&j`{)q{HEEox=o=b^$u}-f=s-ENqqW-4>%zKfoZp$)3hetnr_ve=X}04lr>ua z@bP!gIp6R6&N<(A?tSr+>h*sy^+okpUzxwAmW*r-oaNul7-HcXQp7F{MCx2 zNQPhVPPx6)6XyfM85v~bCHWz_OjL`hqGntcb&*uJ*W>mIXo8YM2bd4mO@&^q9NZ4enFN#gSccoBb@hp+%9)W{sKi6 zQ5{`7l+M^yWNBq9&{DdOJp=XTT4=Ah02SsFs%y!x(2)n%t4i5**sQ9}vLHDVa=oT- z4Nr<$kz&wTitovjbaVw?0!Tn3SfScB<) zB(iGUyd4#sWmWKIr8dSD{!zK2+T3FJM`YN>$*^3U3nTh`)$lp^ zRrM?IsIC&8t#$}>})k?Y!u!S<=c6|XxeZKkLeIH4-$!cLPBwKBk zsu4uGADbafprEsn;C`1Z1GF(uYQ6hLXlvi>X_uQvMh{ zSlbyPesXh}llMSu-a|e35&6Y>@DeT)J&5yNQ+n{pZ5d!MQivhk-c^}UFJo~IGp4xj zf!%qPx_GHp;7PPyhBsPV>@U0|Z|zM1k1d1U0>7sOF57BYGT7RTK>GjKGP07?u>#?h zGslP1unP)X|5`ZZa|JCyUpU|rEI}bO5%y2G0>YHw4Y{43AkDZTJ1{-?2XjE(*2uCE zcpRUvv|TCFE3onVEV=QZ#PMUpPx7J)x{n=X7Tjp4DU&`;CyIv8-3Fd1fx0#D7VbLm z{(k~nvrt;Zupe&(;^=8m&KPA0eZyIWJJZD(_n~8^hZ@HWmS)vqWl8Xc<7xFMZVke^ zJ{NM0>G0k5B2|(aY^9m-hxV^A3H1F2c)Y{F0=!u?U;f;oMg997jVM)vXf?s*-eT0f*!zgG z&(&vSX|OzI0?Sb?#87YcIkzF z6FKmm(?jfYe2aO+gu2m@i>mol{+jq;q=G#bXP`T#Ak4$mXc4)*&`_CBCc*mX3B0}; zVQTbm!7|&6K^~rsKbkA#%4m=+%Ml6aTIJ0zOP z&_v=(B$i05k$4tGIHL19X9W6?3(}{iM3EMGltmt4kDlA-q-D-Anm0ee(r@CrQ^kWU zM~*IhUY4#Pa%TyT7s)v84ThW^k2B=kyow zZ~!w$!*z__Y31x9yZeRnp#uB`oUSzf=9N504}HAY5xqU&^ZW7pzQA{;!0!u&C~`PFY5p(r4Cc20 diff --git a/build/html/.buildinfo b/build/html/.buildinfo index 52166454..ebf1f6bc 100644 --- a/build/html/.buildinfo +++ b/build/html/.buildinfo @@ -1,4 +1,4 @@ # Sphinx build info version 1 # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: cfb548d3f0892cdaa09a4601a129febf +config: 9487e2edbbf95a60cd8fdb622afe617f tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/build/html/_static/documentation_options.js b/build/html/_static/documentation_options.js index d1f22919..033e08ef 100644 --- a/build/html/_static/documentation_options.js +++ b/build/html/_static/documentation_options.js @@ -1,5 +1,5 @@ const DOCUMENTATION_OPTIONS = { - VERSION: '0.0.1', + VERSION: '0.2.2', LANGUAGE: 'en', COLLAPSE_INDEX: false, BUILDER: 'html', diff --git a/build/html/ammico.html b/build/html/ammico.html index 9be207b0..433ccf61 100644 --- a/build/html/ammico.html +++ b/build/html/ammico.html @@ -6,14 +6,14 @@ - text module — AMMICO 0.0.1 documentation + text module — AMMICO 0.2.2 documentation - + @@ -228,7 +228,7 @@
  • - Edit on GitHub + Edit on GitHub

  • diff --git a/build/html/create_API_key_link.html b/build/html/create_API_key_link.html index e782159f..6bef995b 100644 --- a/build/html/create_API_key_link.html +++ b/build/html/create_API_key_link.html @@ -6,14 +6,14 @@ - Instructions how to generate and enable a google Cloud Vision API key — AMMICO 0.0.1 documentation + Instructions how to generate and enable a google Cloud Vision API key — AMMICO 0.2.2 documentation - + @@ -73,7 +73,7 @@
  • - Edit on GitHub + Edit on GitHub

  • diff --git a/build/html/faq_link.html b/build/html/faq_link.html index ed37d838..267809c0 100644 --- a/build/html/faq_link.html +++ b/build/html/faq_link.html @@ -6,14 +6,14 @@ - FAQ — AMMICO 0.0.1 documentation + FAQ — AMMICO 0.2.2 documentation - + @@ -87,7 +87,7 @@
  • - Edit on GitHub + Edit on GitHub

  • @@ -200,9 +200,9 @@

    What happens if I don’t have internet access - can I still use ammico?

    Why don’t I get probabilistic assessments of age, gender and race when running the Emotion Detector?

    Due to well documented biases in the detection of minorities with computer vision tools, and to the ethical implications of such detection, these parts of the tool are not directly made available to users. To access these capabilities, users must first agree with a ethical disclosure statement that reads:

    -

    “DeepFace and RetinaFace provide wrappers to trained models in face recognition and emotion detection. Age, gender and race/ethnicity models were trained on the backbone of VGG-Face with transfer learning. -ETHICAL DISCLOSURE STATEMENT: -The Emotion Detector uses DeepFace and RetinaFace to probabilistically assess the gender, age and race of the detected faces. Such assessments may not reflect how the individuals identify. Additionally, the classification is carried out in simplistic categories and contains only the most basic classes (for example, “male” and “female” for gender, and seven non-overlapping categories for ethnicity). To access these probabilistic assessments, you must therefore agree with the following statement: “I understand the ethical and privacy implications such assessments have for the interpretation of the results and that this analysis may result in personal and possibly sensitive data, and I wish to proceed.”

    +

    “DeepFace and RetinaFace provide wrappers to trained models in face recognition and emotion detection. Age, gender and race/ethnicity models were trained on the backbone of VGG-Face with transfer learning.

    +

    ETHICAL DISCLOSURE STATEMENT:

    +

    The Emotion Detector uses DeepFace and RetinaFace to probabilistically assess the gender, age and race of the detected faces. Such assessments may not reflect how the individuals identify. Additionally, the classification is carried out in simplistic categories and contains only the most basic classes (for example, “male” and “female” for gender, and seven non-overlapping categories for ethnicity). To access these probabilistic assessments, you must therefore agree with the following statement: “I understand the ethical and privacy implications such assessments have for the interpretation of the results and that this analysis may result in personal and possibly sensitive data, and I wish to proceed.”

    This disclosure statement is included as a separate line of code early in the flow of the Emotion Detector. Once the user has agreed with the statement, further data analyses will also include these assessments.

    diff --git a/build/html/genindex.html b/build/html/genindex.html index 6ce85806..8a0c17b8 100644 --- a/build/html/genindex.html +++ b/build/html/genindex.html @@ -5,14 +5,14 @@ - Index — AMMICO 0.0.1 documentation + Index — AMMICO 0.2.2 documentation - + @@ -70,7 +70,7 @@
  • - Edit on GitHub + Edit on GitHub

  • diff --git a/build/html/index.html b/build/html/index.html index f7c8eba4..047b0981 100644 --- a/build/html/index.html +++ b/build/html/index.html @@ -6,14 +6,14 @@ - Welcome to AMMICO’s documentation! — AMMICO 0.0.1 documentation + Welcome to AMMICO’s documentation! — AMMICO 0.2.2 documentation - + @@ -74,7 +74,7 @@
  • - Edit on GitHub + Edit on GitHub

  • diff --git a/build/html/license_link.html b/build/html/license_link.html index b0a73e2b..8842259c 100644 --- a/build/html/license_link.html +++ b/build/html/license_link.html @@ -6,14 +6,14 @@ - License — AMMICO 0.0.1 documentation + License — AMMICO 0.2.2 documentation - + @@ -72,7 +72,7 @@
  • - Edit on GitHub + Edit on GitHub

  • diff --git a/build/html/modules.html b/build/html/modules.html index fbb1c141..1bb3125e 100644 --- a/build/html/modules.html +++ b/build/html/modules.html @@ -6,14 +6,14 @@ - AMMICO package modules — AMMICO 0.0.1 documentation + AMMICO package modules — AMMICO 0.2.2 documentation - + @@ -83,7 +83,7 @@
  • - Edit on GitHub + Edit on GitHub

  • diff --git a/build/html/notebooks/DemoNotebook_ammico.html b/build/html/notebooks/DemoNotebook_ammico.html index d0782cd4..8fe40b6a 100644 --- a/build/html/notebooks/DemoNotebook_ammico.html +++ b/build/html/notebooks/DemoNotebook_ammico.html @@ -6,7 +6,7 @@ - AMMICO Demonstration Notebook — AMMICO 0.0.1 documentation + AMMICO Demonstration Notebook — AMMICO 0.2.2 documentation @@ -14,7 +14,7 @@ - + @@ -106,7 +106,7 @@
  • - Edit on GitHub + Edit on GitHub

  • @@ -523,7 +523,7 @@

    Read in a csv file containing text and translating/analysing the text

    The detector modules

    The different detector modules with their options are explained in more detail in this section. ## Text detector Text on the images can be extracted using the TextDetector class (text module). The text is initally extracted using the Google Cloud Vision API and then translated into English with googletrans. The translated text is cleaned of whitespace, linebreaks, and numbers using Python syntax and spaCy.

    -

    4627534faec74c6cb30013b3b78abb84

    +

    a481a91a1d8c4b57885989804c065f7e

    The user can set if the text should be further summarized, and analyzed for sentiment and named entity recognition, by setting the keyword analyse_text to True (the default is False). If set, the transformers pipeline is used for each of these tasks, with the default models as of 03/2023. Other models can be selected by setting the optional keyword model_names to a list of selected models, on for each task: model_names=["sshleifer/distilbart-cnn-12-6", "distilbert-base-uncased-finetuned-sst-2-english", "dbmdz/bert-large-cased-finetuned-conll03-english"] for summary, sentiment, and ner. To be even more specific, revision numbers can also be selected by specifying the optional keyword revision_numbers to a list of revision numbers for each model, for example revision_numbers=["a4f8f3e", "af0f99b", "f2482bf"].

    Please note that for the Google Cloud Vision API (the TextDetector class) you need to set a key in order to process the images. This key is ideally set as an environment variable using for example

    @@ -615,7 +615,7 @@

    The detector modules

    Image summary and query

    The SummaryDetector can be used to generate image captions (summary) as well as visual question answering (VQA).

    -

    89ab67ce3b0e471e8741cfc2b23d97f1

    +

    faf82562ce8a4cd4babda34fde71e01e

    This module is based on the LAVIS library. Since the models can be quite large, an initial object is created which will load the necessary models into RAM/VRAM and then use them in the analysis. The user can specify the type of analysis to be performed using the analysis_type keyword. Setting it to summary will generate a caption (summary), questions will prepare answers (VQA) to a list of questions as set by the user, summary_and_questions will do both. Note that the desired analysis type needs to be set here in the initialization of the detector object, and not when running the analysis for each image; the same holds true for the selected model.

    The implemented models are listed below.

    @@ -878,7 +878,7 @@

    BLIP2 models

    Detection of faces and facial expression analysis

    Faces and facial expressions are detected and analyzed using the EmotionDetector class from the faces module. Initially, it is detected if faces are present on the image using RetinaFace, followed by analysis if face masks are worn (Face-Mask-Detection). The probabilistic detection of age, gender, race, and emotions is carried out with deepface, but only if the disclosure statement has been accepted (see above).

    -

    08b5f56998844e20961d743a8d32ea91

    +

    412f8c74483b4718849ff5047d5b90df

    Depending on the features found on the image, the face detection module returns a different analysis content: If no faces are found on the image, all further steps are skipped and the result "face": "No", "multiple_faces": "No", "no_faces": 0, "wears_mask": ["No"], "age": [None], "gender": [None], "race": [None], "emotion": [None], "emotion (category)": [None] is returned. If one or several faces are found, up to three faces are analyzed if they are partially concealed by a face mask. If yes, only age and gender are detected; if no, also race, emotion, and dominant emotion are detected. In case of the latter, the output could look like this: "face": "Yes", "multiple_faces": "Yes", "no_faces": 2, "wears_mask": ["No", "No"], "age": [27, 28], "gender": ["Man", "Man"], "race": ["asian", None], "emotion": ["angry", "neutral"], "emotion (category)": ["Negative", "Neutral"], where for the two faces that are detected (given by no_faces), some of the values are returned as a list with the first item for the first (largest) face and the second item for the second (smaller) face (for example, "emotion" returns a list ["angry", "neutral"] signifying the first face expressing anger, and the second face having a neutral expression).

    diff --git a/build/html/notebooks/Example cropposts.html b/build/html/notebooks/Example cropposts.html index 201e0dda..69935eb2 100644 --- a/build/html/notebooks/Example cropposts.html +++ b/build/html/notebooks/Example cropposts.html @@ -6,7 +6,7 @@ - Crop posts module — AMMICO 0.0.1 documentation + Crop posts module — AMMICO 0.2.2 documentation @@ -14,7 +14,7 @@ - + @@ -76,7 +76,7 @@
  • - Edit on GitHub + Edit on GitHub

  • diff --git a/build/html/py-modindex.html b/build/html/py-modindex.html index 6445c5d1..772f90c8 100644 --- a/build/html/py-modindex.html +++ b/build/html/py-modindex.html @@ -5,14 +5,14 @@ - Python Module Index — AMMICO 0.0.1 documentation + Python Module Index — AMMICO 0.2.2 documentation - + diff --git a/build/html/readme_link.html b/build/html/readme_link.html index e462c0ed..b449d1b2 100644 --- a/build/html/readme_link.html +++ b/build/html/readme_link.html @@ -6,14 +6,14 @@ - AMMICO - AI Media and Misinformation Content Analysis Tool — AMMICO 0.0.1 documentation + AMMICO - AI Media and Misinformation Content Analysis Tool — AMMICO 0.2.2 documentation - + @@ -85,7 +85,7 @@
  • - Edit on GitHub + Edit on GitHub

  • @@ -147,25 +147,25 @@

    Installation
    pip install ammico
     
    -

    This will install the package and its dependencies locally. If after installation you get some errors when running some modules, please follow the instructions in the FAQ.

    +

    This will install the package and its dependencies locally. If after installation you get some errors when running some modules, please follow the instructions in the FAQ.

    Usage

    -

    The main demonstration notebook can be found in the notebooks folder and also on google colab: [Open In Colab].

    +

    The main demonstration notebook can be found in the notebooks folder and also on google colab: Open In Colab.

    There are further sample notebooks in the notebooks folder for the more experimental features:

    1. Topic analysis: Use the notebook get-text-from-image.ipynb to analyse the topics of the extraced text.
      -You can run this notebook on google colab: [Open In Colab
      +You can run this notebook on google colab: Open In Colab
      Place the data files and google cloud vision API key in your google drive to access the data.

    2. To crop social media posts use the cropposts.ipynb notebook. -You can run this notebook on google colab: [Open In Colab

    3. +You can run this notebook on google colab: Open In Colab

    Features

    Text extraction

    -

    The text is extracted from the images using google-cloud-vision. For this, you need an API key. Set up your google account following the instructions on the google Vision AI website or as described here. +

    The text is extracted from the images using google-cloud-vision. For this, you need an API key. Set up your google account following the instructions on the google Vision AI website or as described here. You then need to export the location of the API key as an environment variable:

    export GOOGLE_APPLICATION_CREDENTIALS="location of your .json"
     
    diff --git a/build/html/search.html b/build/html/search.html index 8ff80553..c8a516a4 100644 --- a/build/html/search.html +++ b/build/html/search.html @@ -5,7 +5,7 @@ - Search — AMMICO 0.0.1 documentation + Search — AMMICO 0.2.2 documentation @@ -13,7 +13,7 @@ - + diff --git a/build/html/set_up_credentials.html b/build/html/set_up_credentials.html index 2a9b6a39..7a546c9f 100644 --- a/build/html/set_up_credentials.html +++ b/build/html/set_up_credentials.html @@ -6,14 +6,14 @@ - Instructions how to generate and enable a google Cloud Vision API key — AMMICO 0.0.1 documentation + Instructions how to generate and enable a google Cloud Vision API key — AMMICO 0.2.2 documentation - + @@ -71,7 +71,7 @@
  • - Edit on GitHub + Edit on GitHub

  • diff --git a/source/conf.py b/source/conf.py index 047d47bb..c8d1d6bd 100644 --- a/source/conf.py +++ b/source/conf.py @@ -15,7 +15,7 @@ project = "AMMICO" copyright = "2022, Scientific Software Center, Heidelberg University" author = "Scientific Software Center, Heidelberg University" -release = "0.0.1" +release = "0.2.2" # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration @@ -31,7 +31,7 @@ "github_user": "ssciwr", # Username "github_repo": "AMMICO", # Repo name "github_version": "main", # Version - "conf_py_path": "/source/", # Path in the checkout to the docs root + "conf_py_path": "/docs/source/", # Path in the checkout to the docs root } templates_path = ["_templates"]