From 80ced5eba4f6eab477e8921de943cc5b88a681b6 Mon Sep 17 00:00:00 2001
From: Sean Hatfield <seanhatfield5@gmail.com>
Date: Thu, 22 Feb 2024 12:48:57 -0800
Subject: [PATCH] [FEAT] PerplexityAI Support (#778)

* add LLM support for perplexity

* update README & example env

* fix ENV keys in example env files

* slight changes for QA of perplexity support

* Update Perplexity AI name

---------

Co-authored-by: timothycarambat <rambat1010@gmail.com>
---
 README.md                                     |   5 +-
 docker/.env.example                           |   4 +
 .../LLMSelection/PerplexityOptions/index.jsx  |  88 ++++++++
 frontend/src/media/llmprovider/perplexity.png | Bin 0 -> 15863 bytes
 .../GeneralSettings/LLMPreference/index.jsx   |  11 +
 .../Steps/DataHandling/index.jsx              |   9 +
 .../Steps/LLMPreference/index.jsx             |  12 +-
 server/.env.example                           |   4 +
 server/models/systemSettings.js               |  12 ++
 server/utils/AiProviders/perplexity/index.js  | 204 ++++++++++++++++++
 server/utils/AiProviders/perplexity/models.js |  49 +++++
 .../AiProviders/perplexity/scripts/.gitignore |   1 +
 .../perplexity/scripts/chat_models.txt        |  11 +
 .../AiProviders/perplexity/scripts/parse.mjs  |  44 ++++
 server/utils/helpers/customModels.js          |  18 ++
 server/utils/helpers/index.js                 |   3 +
 server/utils/helpers/updateENV.js             |  11 +
 17 files changed, 483 insertions(+), 3 deletions(-)
 create mode 100644 frontend/src/components/LLMSelection/PerplexityOptions/index.jsx
 create mode 100644 frontend/src/media/llmprovider/perplexity.png
 create mode 100644 server/utils/AiProviders/perplexity/index.js
 create mode 100644 server/utils/AiProviders/perplexity/models.js
 create mode 100644 server/utils/AiProviders/perplexity/scripts/.gitignore
 create mode 100644 server/utils/AiProviders/perplexity/scripts/chat_models.txt
 create mode 100644 server/utils/AiProviders/perplexity/scripts/parse.mjs

diff --git a/README.md b/README.md
index ff50a8587..200355707 100644
--- a/README.md
+++ b/README.md
@@ -71,6 +71,7 @@ Some cool features of AnythingLLM
 - [LM Studio (all models)](https://lmstudio.ai)
 - [LocalAi (all models)](https://localai.io/)
 - [Together AI (chat models)](https://www.together.ai/)
+- [Perplexity (chat models)](https://www.perplexity.ai/)
 - [Mistral](https://mistral.ai/)
 
 **Supported Embedding models:**
@@ -108,8 +109,8 @@ Mintplex Labs & the community maintain a number of deployment methods, scripts,
 |----------------------------------------|----:|-----|---------------|------------|
 | [![Deploy on Docker][docker-btn]][docker-deploy] | [![Deploy on AWS][aws-btn]][aws-deploy] | [![Deploy on GCP][gcp-btn]][gcp-deploy] | [![Deploy on DigitalOcean][do-btn]][aws-deploy] | [![Deploy on Render.com][render-btn]][render-deploy] |
 
-| Railway |
-|----------------------------------------|
+| Railway                                             |
+| --------------------------------------------------- |
 | [![Deploy on Railway][railway-btn]][railway-deploy] |
 
 [or set up a production AnythingLLM instance without Docker →](./BARE_METAL.md)
diff --git a/docker/.env.example b/docker/.env.example
index b14d3c6ed..eed505782 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -48,6 +48,10 @@ GID='1000'
 # MISTRAL_API_KEY='example-mistral-ai-api-key'
 # MISTRAL_MODEL_PREF='mistral-tiny'
 
+# LLM_PROVIDER='perplexity'
+# PERPLEXITY_API_KEY='my-perplexity-key'
+# PERPLEXITY_MODEL_PREF='codellama-34b-instruct'
+
 # LLM_PROVIDER='huggingface'
 # HUGGING_FACE_LLM_ENDPOINT=https://uuid-here.us-east-1.aws.endpoints.huggingface.cloud
 # HUGGING_FACE_LLM_API_KEY=hf_xxxxxx
diff --git a/frontend/src/components/LLMSelection/PerplexityOptions/index.jsx b/frontend/src/components/LLMSelection/PerplexityOptions/index.jsx
new file mode 100644
index 000000000..0b392cf41
--- /dev/null
+++ b/frontend/src/components/LLMSelection/PerplexityOptions/index.jsx
@@ -0,0 +1,88 @@
+import System from "@/models/system";
+import { useState, useEffect } from "react";
+
+export default function PerplexityOptions({ settings }) {
+  return (
+    <div className="flex gap-x-4">
+      <div className="flex flex-col w-60">
+        <label className="text-white text-sm font-semibold block mb-4">
+          Perplexity API Key
+        </label>
+        <input
+          type="password"
+          name="PerplexityApiKey"
+          className="bg-zinc-900 text-white placeholder-white placeholder-opacity-60 text-sm rounded-lg focus:border-white block w-full p-2.5"
+          placeholder="Perplexity API Key"
+          defaultValue={settings?.PerplexityApiKey ? "*".repeat(20) : ""}
+          required={true}
+          autoComplete="off"
+          spellCheck={false}
+        />
+      </div>
+      <PerplexityModelSelection settings={settings} />
+    </div>
+  );
+}
+
+function PerplexityModelSelection({ settings }) {
+  const [customModels, setCustomModels] = useState([]);
+  const [loading, setLoading] = useState(true);
+
+  useEffect(() => {
+    async function findCustomModels() {
+      setLoading(true);
+      const { models } = await System.customModels("perplexity");
+      setCustomModels(models || []);
+      setLoading(false);
+    }
+    findCustomModels();
+  }, []);
+
+  if (loading || customModels.length == 0) {
+    return (
+      <div className="flex flex-col w-60">
+        <label className="text-white text-sm font-semibold block mb-4">
+          Chat Model Selection
+        </label>
+        <select
+          name="PerplexityModelPref"
+          disabled={true}
+          className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
+        >
+          <option disabled={true} selected={true}>
+            -- loading available models --
+          </option>
+        </select>
+      </div>
+    );
+  }
+
+  return (
+    <div className="flex flex-col w-60">
+      <label className="text-white text-sm font-semibold block mb-4">
+        Chat Model Selection
+      </label>
+      <select
+        name="PerplexityModelPref"
+        required={true}
+        className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
+      >
+        {customModels.length > 0 && (
+          <optgroup label="Available Perplexity Models">
+            {customModels.map((model) => {
+              return (
+                <option
+                  key={model.id}
+                  value={model.id}
+                  selected={settings?.PerplexityModelPref === model.id}
+                >
+                  {model.id}
+                </option>
+              );
+            })}
+          </optgroup>
+        )}
+      </select>
+    </div>
+  );
+}
diff --git a/frontend/src/media/llmprovider/perplexity.png b/frontend/src/media/llmprovider/perplexity.png
new file mode 100644
index 0000000000000000000000000000000000000000..f4767169a12cbe2189eab51d8c59deafe4ae5b33
GIT binary patch
literal 15863
zcmch;XIN9+x-J}0sx+kt(jiC-9qB>31e78*G%1OIbfklTf;1H{!6<?OQABAXp-GdD
z2p9!HdIv>H=pa(g$ouZS_F8Ly-}!OQxwh8@>dc&Dj`8&Sex4+onOtI|KSvLNKo}8*
zdRHM33M29lEj73jDb7I!{-X0YMBaoznA*rc6ptjCxWPq@Abr~)T|XZmv~Lh3EKm)l
zrl)NX;1hm9F2L7HO-==NL+yf`tg7+_1$osAiYl@fl;u?v<zYc;@`|$Z;E(L3Lwj)u
z<m9HOrEQSymCI^wem>GJC_h)UbeNAncma4tm>T$}4?4(2Ak4?x_oiAHT=cIy)WE;V
zmt{mX=>`7dk47Nsy4qDega5b{{0}bb5ftRFCL<FX8Y&&CAnh0EE+eO^swyKZFC#B6
z1@4f#8SWe85+>z)Qy8rDuS@BnZ@L9~`UiRX`3jJic5(F!4uXq{{^PE{w}f*0$7}q9
z1HJ!x3Cc|d?Tsc2=BA9CwA{b!66K~A<Qe3R{<n>5dk6j76>m@SNz}YueBI%qVNxjc
zb(di8AW=;#-@l$m382t7J>7i;qy+rYf&Sj;FwdZH4H@!Iy*>YFslTrL>*_N9AA9)g
zwtqVE|DiOrgMtD*UH>5w-<#4de_aVCi%}YASjGiKmWk}Y$mH**)U;7I|7qjolUSew
z{_P5o3i$(nm%y87%W!`*T=XjXreAQN8~UG3;XfAC4Me*Hp;4Ohvhs>jvhq^$@)|P#
z_R&AIsqr5l=!XR%P?~5ZB^4!QU?i?8vZ_*wuBvD$6<0-+l!Bt1vfK6RD)Md$D*ssP
z???XIN(QcAp$qaC<drU{%7N!pFQ~}=<C(uc{BMs~`USgrkdLMLUpxJ;YyWlo|Bssg
zRb2mh)PEK0|06~F|C4)>wfvXglJyHd0Qyz<7yW{3YTCg;9)5wEmYzNWx-Nk(KCUi7
z8swY*^Mijo-2c5Pg2?eNcKDAShPn9vHwy$etNmpyeu0*Le%_k8F1{fy8Z!TW`G2>`
z|BOTbb`OY1|75~4|A=UsCgvPD5QvH^LQmT=EPG}AM<mboq4NH2&vGHsvC5oeeXUEP
zG=0pIMW<4{RB%`3_xSf)If*}*RntDlZJvS(e$b-qP@)r|JK0Yuom<;isBB+eSaPFi
zb+sp(h9KOPJB~#54el=0ZqPbjhx~b#`MdGQ=4neSD{VbJ-{hl4h)|wZXD(DozUhDJ
zm-EIhm08%I2xr=BTa2#5(+3mR6pV$U0|odms-^!(loYn>%pIdjDV$kt5poq4W^q%}
zWjPsh?P}An_X>AER0X?4htEgGNR4>w+=$|d>ubV{-;R`&96EdURPu-KAyoVgp|>rp
zzi4VEd&RYKX4cVISsmU&u;s13EpAa$OC?6oO#6R2YRpb=2X84aboRf&M5+pzxi)@_
zvo4hXzR`&1siKgI`}b!Zy+j*-*3s4V<c>*2ZB6Lk3Vjve6~U2`H=}38wX>Rh`{O0y
zk@Hqo@0R`Z&?ug>Pkr{lnSzJ8jv9;8J95YLB`g^(6Q{<jOg<K-JZ8cP-*rKZ<|d;0
zSLEu4o!bYRUBn7!NZs_|U~zPL-ps-l>}YWCuJIt#Wf^JhX?5cW+NDs8Ti9XNd4~_-
z+}%uxA7mt9YpH!SJmq)n`w4qqUZS~_1%;LB#`pwmwIxK-MAXipbNyGPfnwTym#?4G
zx7exk(^S}~S_wD9h=Wwx5GM`W)LRV38-3;Dw-+AD)gN6^urui3!f1BnZV4t9Pk}8=
z5Q1BwDPe`hw2O2B;MSi<g}j3g1Hg{;55I_Utb_nf47r%?Kd~^E^$$Jw*j`<Lo)Rx6
zu6Ny)^2wu$%t*;14?eEe_JxorxJvw^D~-;1<DI!%Cy-WOBi+wEF6M>AUdvz?loDj>
zF&go38EE9d<ZLl$S%=lG$Y^Zuyuhnl)C~2dGOO1Hgt|N|m_DhcKJHYm{zYCsA@u2J
zz+J(`(E1l!gxiZ;2bGnqDJ^)B5}Vkkmpaz2>q?d~F|~H3Z>dw~=ag*2?A4zaN;s+6
z-fnou(CuHbelP3dn1{janph-;;*K^Kge*mcn|ogNqLz??Ldmkn1JBFy3Y$_Sq1PHF
zk`_kYG{%_(7(PLsZ^M49H<obc2~4#Y1}@)2+a}yYC!Xl7?p)ii?)7jnq~@VULy$|z
zs$lKFZJ5Fkfg#)r-qm5f13keOC*P#PPIbaUA?*7Ig*gN-rW#w^7M6fcs{K%&vH;}T
z-<Mi+qFU+MmdR0PE`wHiVczblqO`p_1#MX2GNf;27HPyRcdDq!qY<8ZZ!pARarMVy
z2*fR|k6BM!>xKhtMo3r#@4Gve*)l)Kbnt?3*5ckWGJzhL<(z+RIEu$lxq6y4F8#hz
z*1DksObFVQXoKLE6FU^p!K_`@m*eXuHct$}`S^?ekW6OFE8TACWwn$sTn7gjwKLr)
zx-A%&uJ~L{fnCPtv9O?8N~PRIp#0ln^oFKMt#6t5+bqyD>1SA1b~g2q=5jxOA)UNE
zJK269simZ#ykHSOh6RLy;hA6M3TxfEH0!p}OhT7|{?1b8^V32u=OvBJL9^eyn?j+v
zW?tltCFI}KC2o{g^7h{|cCh6@4St$?!5X$5apGLUrFT=)LH2C1ZsAB38~eNbbM9`Y
z@tFk3K~kb>Gzz0PCn7qHLW}E}H>vloJh|pH5p+7!!rVVHgfE5=tZTc3T%D#cO}7nE
z`Kl<Na51|7{=(=ipQIK>V2Xz7y^5p7?HBJelOEjPnts%Goiihv!v$kjrpzS@6ES0B
zCk?B~HPqBE&Q@xQA2w92GQxq!(bSgoVDBp;RGwzt?Z3dTg25)Y{S@PO5`W*C%K$lj
zp6+)fcD=7zUIoR)AH4nk+4+~-u!2gwNNmnyzA24vNYx<LyNc^G7HG!o@uS-c7dUvj
zdvz#v<(=1f(XmPC(7Al*bd;k=v~@NqpmN2ISYFA>AG+<bJON*Ogr54kfJmLaYD}ZK
z$R*3r702bNVsD?^xp9*dLy@4(rPYIp2ckXr@zw5NPm6Dd>+)%G^aQcxprL`r;?zBj
zDLR&=O=xAqXim^>c(y2)kJ59YmD2~m*0c}GWN9LWgrmYL=NL63XgCpU>6x5q$D2O|
zpNP=eL{MumZS!XLEg(Ooq&zN?1j=VLpSxM}!8=>@w?Q2Hck0Ax*=5A5S)0VdHK1<S
z9^giLz>S1yDS^I=U!Ekt1FXa!666-);@RKtj||?1jjB;lguN-u-h>_M9nW;A$=?Rf
zs?)mmq!W*N37s|zq5k~MdIqQ}NhxdZN=)p`i%>{ktGu%6>4F;){w-HiYCan7!j0Pp
z2;M7glOt+!gqogCfBt?Fv_u$KkR7lfc$yI_6-DPJA)vI1K5gDz_<T!GI)iil3kK)w
z2%YG_GfuB1;y~yt7*6;-&t47ho*q`MA0467w;CpF$sH2fzJKsE*i1SL?`m<#F0GpK
z@I5~>gk=Z=XFCh5B|UE!#$Nw@@b03>F1(vWB%p|a)l$VHF1);*+>F-Wm?OVqH7p!&
zx4!2B5;?>0NuX?k087-hUNg6mS(Wxa>aDgg|8ns{T=yUo?Ux7~av8b$^iOHSC`qw;
z_E!@N?in?_%ezE0JOkd9+Y*|gtwTcMi;CKaDW^1cVC+`<IYsWj-<C%2z(Tt^@N<vw
z6s_sSpAB2zpWA`PosN@FdgAEvW*7Frq&kvxc^dfGvHvzqHQ2+wILCJhj(8D{Id9v?
z*DRUHJVUdLpcVnD=R)lH63h8RImS#`&pmnCTvzhUUqZrZa5sVA{`-s44)oY^@{X6X
zANLN_OTK@dJ`$t-0Bmfb4&GhxDMf=w82dt~N6l+=uME|M{@MMWwbom5`fUW}Lvc|H
zGkqYEg5DK~q2ir;cx}g>vl87b{PX(6@{&`e%HVhIzgaA;+Ee=JMu$#Dhc=wPQvF~7
zL9c>)oPVp%F}*dNAuwu-$#q}7o7HMx!<c*Fegw|v8H*l;sDzP&msnKvu(oIWVtlaW
zpP5(TD27uZyP+81>xp+&OYtZU*WV`cKjp;Y9O8jf=2T~BSz7|lCBMn<C2>~b=NXO$
zN9YhNYp6wvm!fUIrwik{?~F5z{yaJHx_V~x`Ds{j#43WXySHWW#wn&BsEr_XJy|i`
z*a<?^*9*jr9LHO}rtf*nXVe%;u>z2^vuA}A<R+MHczHOiZCrRWo7AP04kDdD&Z;AJ
zpt@%#%@dMn5ju@Pdo^b_VRh$8Gv$>hY$EpMqDhTuZ<SPFrND-sHHmns9}NunabgBC
zL0~oqk21x{$7_5cz~onvAtqz(`8cKn0*L<(wp9jJR>dkj9=usich7Tc{`#f-8j@Lw
zpA-CyWv~Kblk<Xi&nB`68tt{^x(`R%L^9Y|vWrTg4JTN62Zcwr`d6&Kt9QMnNt{YY
zS6_U)4ehU>H;ZP<8^$sq7(THOfO0-f$H`_wqdb-dt;c*fHp}x|oo`S-wo}V3Z0h=S
zwIqWt0T>(9XBN9R{^0yK<_FGQt&2M&8eLC~4$9Iup}JiB4Z2@Wz`K9Z*PB@-?0Ep+
z1@*^Z4@|nUVz!}5x~Eq40yCOB67!+KE-fOi-=}K;t6>8o9KDLE{I~T~`S?W=%+wmE
z!*<ePO_tavD9QQsS?m0ui53fp26t|~i;RlyJ<7Z2dwgWT`qVd#D`AfL)Xjj}TFOf|
z31A5Zps?(abNKr|r1I^EeS`BmQyqemHH#z7tS0d8b}Mze&hu*qaiQ?87w?fOjwLJO
z3yUFP&SnV|CGMH=XPI%$Z$9q17yN0MW<oc((?Lu#;zNjmE(57Ja!z+`wK>E`V$83z
z-caZVQqzsP#C3iPKR-NTbWowfFCOVS)@Whj0Pha9R}aXq<DcDpK1^_^@WOV<AV=2`
z)d8mE5^(kcFNrk|Cna6^p0!l@`ygb7cOmw+AKn;xW)d|VbE=<@8Wm(R)+}Omm!6%7
zG^!NW7xZ@qHIMZ(lZpo>GdfttoUN@^!AQPG*0@^!A4`R6*gTEpWM@<PF<G(J`%wCi
z2&1-R)y78Uv2h6pttF&Wd59L%LwvsV`Ov&zZ^~oG$vZ`Hx_r9wpHMnh1`?mYTTfr2
z*#HIkpS2s}W?+q;h+r|g<IVIc<pa_R+)l&;oAW&q_t;(y!me_<4c-lGZs?>_-9%Et
zw0Ro53z)f&wVKIy%c)L%JHvz)-U~jw!2AWZS)4%vCtc~tym89J8YeW^I^j>QMu9cd
zM&J~*hX`Xs*n_uTj2@hP+hMg*^pwEsro2n2E=qjktLDN?-wVxf>471{hO+Kst<Wrv
zk*%b+#kC~l=N`|3X-Z%a=Pcpf?1`*{@8}MWB3{_QyJo|ORV>_^*AKkg^xuhct#W3#
zNVh<r`@gMHcCcov_gm`d>FaxC&&`_x&i1y%kvjr+`x>v^5@OIRvid;;Cb3+>|CMzS
zL}Vi<3m_-R!}F~4yRV8@7>{d;h_}RVmADVnX*IEmP<_07dF@a?($giFU8*!3bB&q7
zr7KqyiOo#y2N|E(eciC{9JL6A?FWK}jSXFa&}=_myV~Xb1mT;};_ZDZV%|6~DLRK6
zo}HrOClV}TCHgG_UcI6`ej)HUZFCDlEm9(9e1_fvv!3;$(*F~E<MN0302B0&HpkaD
z)Ik8<#P!9#VOpe7_IZH~D`-!Ubc&ygFqP+z6tbnrqmdI^q_88(Qyl*Z8@D3{-f;P<
zOAL&-g-4^sF^3MkeUe%}+!2@%W<lV~!~yGRQL^?I`04gF2jhNHa7W_a@@OU%d}0#l
z>Ywnxh0m`%5#zJeQo+BQ@HTM{3KSo|q@39B3xO{hXsnrjtr(c7jhK9~3p;fc;~!Dt
z&NbcW{zgNE*vAh{rE}#S;$X)P4obbUpTVZ`kKu@oBqG-w4%t_w-e_h;jbI6Q-u?$a
zzsa6I#Q&oXpWjH%DGg6V#X(xgx=Xn5NCltM9>*=;mJ3Z2x?(4O`1{itP<)fBLtU;1
zY!Z2SQ|>e`A`b>e=*(HxQJ#YGQC|oS{@c*W;_AHUp)ma1^JRwYk~eK%>tRrXOQij6
z^EEd!%<~6u<AS`Y7oRmROZh~fTeZxnJi$jXw2hw!S=NR+vU)olQ{Z@QtV|R!;cAAl
z5xRVaeph7H!AFFr{5~-Pq?7A3$|;3;qcrgD#6<-Dx+#W_G7eM!Du(+gr}M_NC5Q_%
zuX5BW?TA;0@BX#w-Rs<?Tbzlg@L$o{Z;RSY@6ofg;-NmL<1kfvXQ)u3B2zJMdkM>(
z8HwRIGAivFAZVB)Q!<`#Ce5@alJ~{`!Ke8`T*K%RyvyidPD>Mn?w5&8lNTyPxNW8z
z1Aht#LFxFbLcJd4UWs<Hwqh{pox)V|N>o)VN4Xg`MwFa>P<;s=UB@ekc-?E#`R2yG
zLF}9cBEoRlybo7GBwTKJ^NT*a7^a)8P25=68jB$IweHh}ZgtK1xUuf7R8lKW@NJih
zA}x~n38IM6Hg%&2kJDb8Fk`_K$_dY^X?zww-(q-(z_u2p--V07**_&K5uf!yp>l*x
zfN$(eh@6<Gz0eju-wQt<spJ=d<7J_uFvYP&Z137ujDA(C)imwvfA(?^+tZe5cPcdi
z4_#rQ7V*gfDfpdHmizCAKr#`;4M!VdAZiugc`Y(vAQcq+PQBRsA-!0Q)<@+Pt(+MZ
z!sM8&c50cdC~DdHg)9>3Ga*Pt`C_Ft8~ZcX(11u^KATz^4HJqKC9i_f0x#H}U?k?R
zN8KvIapHTuU(bR1{+O@k?qYlIcOwW$C9H{gQ~SLx5KxtGd^=RRR@93{nPNiD#bH7+
zK*+sf(ssQ4Q&8{e<pj8~DVx<cXL{@oa;EvdVs=SMTiU&iA?!=X<_MM*u?F`{Y<9d2
z6EDD6`g}i-zKNQq8{Q@Qb$_i#2iD6Lpq2IeiNwE#HS;k8>k_1=Or1l`cwfo)A}Q$t
zzs<4m`2G{#eYWtEVDcI1h@VK`yU-LM=evYT4jwmCOq2jBuAH>0cr+e!iglMG>S#Aa
zg1+|rs`K@L@cs{tzcSVXZ1`(M3s`)Z4&xH73RtV%e@t73c>H=_=z1f7E@y`!Z*gbu
z^TAUJU+~|X#*$e!r6{UHi~hYB&AJp?Ea}*glq~x~F_=F(N`U!c^xmI8cXIb5u7602
z8V+SWCfQ8pc2A*#m0uUqBdr7_Q%37Vv0^%B;>*GvEG^kh^oQTwQag=dW8vn%rEhMf
zf*M{SiJG%GbLf9Kh&CE#jT1Fyo;p7ID%WOUuy;TroqEo7{3Std2+bvFQt&|rZDFFX
z?d*i~msMkA1}^A2V0vr<lA{V6*RQH;g(s!?7FbSmIh&pN=U=qLtvtf#&Et`5f++{b
z^w-STs6zB_7mKNHEa_)nPT!mSEV{+fc_WtZ{=SYabv_c~4^w9C3?#f$R_d61%(?*>
zl8Us+=1#|3whonkAN-|h#f--j`8no<gadYW6B5-EVJ~8CQ2QxYx(uJjyKv#OJ<m|_
z3_ZRh+D-p3D|?RAO$P|5B9B^l>i%={w1&PCGYXh`cGA6+ws&K&gn4QE0F{8*1=)$p
z5LvP0kdZs%sjnUeIFwh-_VW=pq18G$?!T+6q#;d2V|p%JMG>&_S&#<`Eet^YDy=xY
z7EXdJZ+x!L8hd>!>I@SyAx|tUntPNt`M#`}=cPqS^k}E(x<LcG4NYRG2sHRGv>@P3
z<q3LA;O(P36aIO<;I>W=!9}gnA*fTsVA8A67q;bf*G_T>B3Mo~3DzA`Yn*RkK(M?k
ztNr~=(8q5N2FMJTGa?%H<~{1hBLT}cUsBDi*a25HBDy*=;Pt?x9-czQEINkq!lSNO
z*LDstUb<Pn=B+OWxW94WjS}>}y*)EJ1V;X<UFVKoSB1R9_S~1e?`DGXe7w)Rb@Z|w
zty@uo>Ywi1EDtCxJyVHX<olD3M69d0h_}BVF>1S{|DF*Pmv`seIk+`{{%rm>(^qwz
znG_VY^~%S-Lj8-9TB`E7;@AA=n(s7EthB0T0)8P`ST!?Y>4q?rjN)0ittgRJYffHg
zUp-cuY@SVLXSm!u6KG7MM;n&+60%6A!C0{3X4Y2)D)p}#YN_{nSM~}g!$=|IV#mg2
z%GMY6J+fN(%6Ybz(Am3%@;-iH?lP+pL);TL-RU2fj7W@i-aBQ>o96@8(6uLXU23<x
zrFd)l6sFc$t33#Gj~3skSot)A0`~OT_m$g>fLyp6i4CY4q)dd7>xD5#hoCy;dbb&-
zc<UAI!yh7d4ULmVM=tVwrDiZ1DJNH9P!M***)ixhf5<iP#qo{_AA%PR(JytdNnCO-
z&yU68-m1-XRhnt(vsV`_%<s=V@xUka>$;fr;eHm3R=rc-Bl%?o1p9w<Gc?GoQ11ea
z$6h#Mx@y<g_)hHdOTB{yTSh%Z)1y_2m5k9|;_l^%c6F&0GfS%?rg!cL=G)o`Cwm#}
zWOLE?b6;dNQbg+hgknx(;(zrhW*3$=&9+k|P26r69hvYKxF|vg@ZlXK+xl)QUypQ&
z_x@&iq3aYN#S}*&v2f!Ev0|EsHiHqE+0eC7uWe8g2G~>5RG@8u;#DlXnoUzSDjO!$
z*-XG<bTm7!rh*<&iAr{tiMAYqsxohCuwsyK5J?k>s|c=t0exGnf*$HPzdo{Upvc6X
z(WQTyT-i02(x)w;g`*$u_%6|C$XGtoW;<EmKenn=zg-s98SMT$C|qu*b9MO9Qr?%=
zT&S*b2n`u86zt<F@h|Jr0W0*n>vHGsS_z)+ZXMVAq3pSd`8J6rmN=ciYs=-n^3gJ$
zCr`<kjdmH?#~~}$^+LanS4w!6On$zlNziD--j~nxmsMbNzabT-c#%8VX$M*@`N>;n
zdV;Oq)`|(7WEPYpb*fWj<}pNzYVBksZjOpt<b5S7G#BvE0F12xBti8NgG;d(VBZUH
z;hPCr3>bZ8h>bt-$HKp=gA&x(n-kiXrKDX6HV;9G;&$+PM7>vZm;gP&V6>0l#?OI>
zF}mhS%mo#*LM6vR{mZrIk(g0-L4?>y_1O=qR~eBH*U^(w)uA}Z7tLsz6na``B1Qw)
z@d>hmuzR!BQZ)<urhOQEO8<a*KXyzSut|?G@yEdxOy|v89`ViM=i6~1uM1P$hX_z*
z5Uj43h^opZPdfQ%&8`h`L}Fg?7D3>!5_fT-7{4=4s+Lp)=uGPlUyoEB3Ar=*tYdWF
zGfZD@M;^AQKJ)?#;r@ZRAvi3n@#~$a_=>A(VBHoT4J1b9Yp7%*aem69YiT=tK5D0d
z{wYXebP$Ie5tEt9B}NY(zU>-o>m|Tk0n&}HmpBaECPe=Ulx_9qHcY?7Jv=5jxIbsy
z)+n(FF9J`}M%bAKT9ObE(%6`{qKhx+zqF<(W}4N6Hn{)BmvG<Yi@?S81J&-ziM5V_
zA|6y9HFLiZ(&{|Ex_#H+T`MVyUM|07;oZRZ)4TBKAvd!Gxdzej;lf_51Kkw)-A^x<
z;?eU7`1u8r5P(&b5Oz}>k26ue2P_G&a~FX2OT3B<U0Od8zK;h9mz(`kziC;sfz;Zc
z>#hoTD+p~E4&GhXk;|X}su#D=wN5`f|CNRG&h-28q-706jQtXlujnqwVfl5FZnis@
zO{QJ%F&28^(Ses4pw@{kCjQ5z1yaK*rW@7Ye|`PgtI-9@ltSsw+0t}|3~=JU2wcmF
zP1r?mQuFf0#w$P-z})?xzhR^V9JtN0c>*1vp=bQ_K%xOKp7aY5F;VB3^(Ylo9|tdA
zYb^;<ha3JnNdZnNZ(?OLg|ZTNg>S*0rQFF<xCuyXxe2fW;N5KAUdUaq`hkBbZWw_X
zt$gP$PA3HHa%>0oY;pyOV|R&hC{BC(<lwG07d38~$@WqwNMHkNCh~pI=Sm8^KYmFB
zz+}QCw3xjb+>w+8LfoGTJlXhmV0RFJ(C&T&B834uU~Hz4x0mdIuKf#$bIaqJPaY-!
zjHZFhncbdPaX#xQ|Ga5{oR2i+e<F7#GCS%9O&}n2-342vphnZ$=ZFrcnnqHW&18%p
zc~=adG~O2mOpVssHniFpaCHH_goENxu*AU;(RYkn8WNT_2k9FoFn9t^<t2da^8iWY
zY@hqP?gylJvcM`(tR`{e`=!F^F<0V7W{MEB)4$H`wX0delLw<`D#J0Ye7b>drW;+D
z6G`+Nz-SP8u?HEbgSRg#WsUQS+~5CJHzosnk9XR4Q`|p+msw3876`R@z!8Z>qHl=U
zin_z!+jtZepNzPcCG8g2Da%PI*lu*S`=Y1c9>c^9?>PFY%yQS|@m4(gwM&}3o_9SN
ziaYgS*{Hr<FPQcA$yYL!k0&c;0+`?%GJ+4syDwU@jXlk59lfG-@IrS9DFI}}2GmJ=
z3qM!%o%<_F*0c|r^1AzTlZ;o9zeddNjxS9eh{<=zTjs3=y`?rh`Xe}ItVkX2;8(hX
zHxnt;GP}OV@`ekD{n3taoQZz>pDO9M5je`&PH-$PKqMCQVtHECLE=>V8BVI20Bq}v
zUM$07AQexS%sXd`J^{kTsTDtu_7b&Z537F($xNox{BV5)37)M2L34gbMH;ETmN~6~
zP=I+(k7CD4X}?eSgV-~x@S<UwHB^8Wa?&4<rgb)>e*Rz+I_WzI3}t0p+H^cop>ld0
zM7LY``L}770O|bkbG^~*;a5XXLAjeiqe_iVA6B9Ix$XBf;BcYrN5g0-ehxq=4WwO?
z80#_CFL8o!VESF7+mvAe8DeTO{p@-PNHR#dt2<@xT=(}1&L<TUvDs}gjzyQS%8}K|
zJJ2i3z%~SE)1f~2`QK`w&Z1cvf8sb&@BZes#C|~qUIe1ii$|NerpX%uWpg=VL;uQg
zjA`L`QE}gF(JuU)$(b@>T=Q}4DoA9)kRb?<&z6zY!eFnq!JmF+*TmU;|1@W?2~}9{
z*0tJm(Uy+H)S=vn8{QA7-aS0+8x!J#M`IcJLp}SG#lqIUs?)+$dCDGKnK9kAj#5{=
z^G9;^LrpR{6$WlYZxU=NM$lxI#DZ^(+xHtGs$iI!f)X@0(!Pa(X37a_bnM@y&)?<K
zF$ZpWl?46p9edx^aQ{xD@&Of4#fxy9Bi5n@<&#Lyeo;zdDyd4KDVM>yo5@H1_dJIW
zX{WsNdEEkVBkh_{^&x%lC+>?G>`^1?NpJ49UunH+ZdLK%N~zUdJ{f7f8yXiw8Bl<`
z2c8G;-_E*v&6i@v@ogT=`HX^<*34h@JJ3TzP)zfehU2br_7EykO|+~l@bfZ&?ns7r
z$F}D($nLY{mUh*F4()XZUY-q3jQM$17^u}8eZEFuTmVF!1o$$B=S_kbEhy$<GxK|G
zrYW_4wDH!DzqCrk!=%0?9G9!H`G@`D)!LE|2c*qG%(d^c06oHLGp$c}gvydDzu+>S
zi!$PK8eNcFkch}oVKeyAp8haJ8Yu;Br9V0D37WG5Cap+bdP8!%auN|@GSMF6SaRv`
z%j?d59|W7(qlYQv^j%!T5-HTn&;@zE3%j>NYA1vE*D&q-!^>6fi)L(NASmW=+poVZ
z)$D;uDZoxJ<n&X<4`S=@*}Of+Jbocri0S&vnMZ%M@s!Twh5NH634x-PG<gsBmLQ19
zRTXZ{y=~zQYFj@H4&@FYI~}X*^Ya)Qu&Xlf&|II(S)#vJ%6{Nc@dzEOS`G}k#u7h2
zQt;i#p4C%+x~Y*ZyBMSt{)$jH-*EIMKEK!*u*#sZZbqTu<j{mi$2yzsXEt8laWz(N
zJjV&3GN_TvGLL8hr3iXMsywya(7YZy+r}_6zyZ8lS|70>cN0&T-Ok}4xO35hzFx&3
zwj$y?76vfc<3h<b!c0^&juTbUxpr$r!&txhM?;)%kuLKVjA{pVf+DVuc?<s%pp3Im
zz4t0Yzm$n0_djZq3s!*0hg$I{vlLl`STsuJXtqp=xG{NHEjvG*<p&<6iR<em_eb`8
z?t#*2qvF!qMl-#Jg;Tr-XvRE6d@I!bpn3HKR;=8e3%XbEIT0YV^8Vyfx)RE7QzN*a
zCh^ec^1|!|OgD5xUjY6%Vqc>>?@(Ojnhg~eI!)q*KiMr)b|4`s0m?h|uv|1D6sOt2
zm!DQVj`$_Ghz2yl1FDOX(oJ|n!1%K-!MoW?+&#thFUOyScT-Cq4U7}<`TSs#;F2}E
zMD+J)5vZciHQ$O0$5?T_37a=#k513&#-oz@n8L2EVZ8imT!?`Nr;J~ZJe$|tf!#{b
zUOdswL}PbJbj_f_>d#DvGbsd=DJWM4Nv-FwK9tThS&<TP7Ut{Pk!KgiG!j6+0a-sj
zM$Ik}ZS4@N2-dix{k|K=19~xpT~JzH;Mjq-CV>*B!<eRf$`TbdHjp+fvJ1K)j&Ni4
zCL*8+ML9F-Jcg%ZgY|$Bc@iPG`v&>>tDW7Tvh@t0h{U)U9FLikkkMZwS^Mwde;|L6
zigU!I+l1HO`A!D(?Z}CRHrtu@T`=7sIL)TRZlU;{Lbjo*rt%t~#LK9Oz@-4ufJ7sf
zLirHh?bvBSaug_%0LXK?H$t*;nR~0|sx=9LFMT(ax#0>nn~h*IueeC{LDMcShhtuN
zc*_3hb8=l+-FwEIZKjshj9WpErW3W_pUN)3<Ckdg6#%_cPFqmZx>e{f7OKr6kSvt4
z<M~&cjNu283B0DeF{`Yb;nR4_=wruRo%X_3x<pvyGQ!&Z)wH*dNVMgXf^F!nh(~O%
zm3Cm))NGlwA%Fn@(=+~&O@w9t{eAgX`oGL|PKMcP`rQG~4%E|PpMnVF30NWNs-Uog
zu}2}y{0)aq&}0=%7M*c~grwHoL(ROE7`6q$R8!vQRxqVu&Qf+!i6c@dOxLs{ulM@$
zIO_=lbe*><2XGBZpl9+w`73_j)zgX!$HN2wir|R%#QL?E_UX360V@@DbZ3lD9_gx;
z^T1-;v%6F<bsqJjrMGI&p4cOA=M}0SdC}Y}V=yYn%k>JR=O3zl$iiE>`Ju76Nl7#L
zkK_*Tab<F&u{GQM-pN)_1nNBvIZCUru$GmNVGcop*QndZ`<U5X>Tu!Z+TG1(@yRMK
z1kI&RQmOLqKUZ})Ce1OEDrhPu@T!`am-fM*>NZpm73>T~8>;Lhj)z~Cc?=d7-u(*V
zk3Z3`P?nnN2CVj62kGO-4=o#CvkorZ-7)bAI^vtSaibN4o3idz<_0i?<HDhDcqa)=
zHzdht3eesmNk5?lE6VAc%Lb4%osM5|0Upxd-R0{nQD&GIfL=#0762|s9=)P`C<<=m
zDkydi1G5gk-t7hBCu(lSG#7p9${VLx`lvF&1}G$+-rhERQquSbXQ9XGjGy*i8-hea
ziC8=IvemZ&!wvC^`65HlrtRMheH82_x6%MI1zwnlkkP$bJylCkOcnz3CXOL$VBREj
zJtZy?BA_HsIgCh7*PA?!Hcd*^G0>^Mc2xY^=p!{6i~`}MWI7H$6%vmpLJIDKCIBdV
zWM5?)YaaihIhP1?3%kxo8m=Wz)r6kq;4iGN_XwRDnUdAB;rxbrwPM7q#6nkk>|=07
z9|i50b#TnT=47n*6pDhjwZcH#*2FFf);HsJxUuULqT(MD%<kicePYKz$_@G`pKyI)
zEiKi_!K2D0#FLJ1L@NOY+GP{!v|4u|xk?VJFgP?HN^kh!UaP`<dr}&GT+G>MmU3;x
zQNagn)a2mZ>T^C%9Fff?ic#4qcZiXGI<XM;rs-^kM#4)+GVfHNqdwjUS55<6`h${^
zQ-tpBp)QV>7UpZDBYS@ddX#?ii{Hb=OkYa@b&}c_iJ9HpU4PtR9rbNEvF_A8^>dh7
z_5xOA-|>WEF_x1Qpmcrd*aDW#7o+bWjdzm#6sL`dO*qqP<ufNK@+7xV%Gf7^5x8<n
z1dG3-@+tG{M^&$$wNIK?xMi1iu3S{EnCS-?8r3|3qJ<dzAi4ZCPN#fN2n^f+#^XNm
zk8dqww|?D;!LI=aZyKpU)%*@j3wXK%e*KaPiVE<#{#_%fY3hGurv;(<+FHJWLV%`l
zjXKgin)piaG&9PB81F}ayN=efwsPR+=_uU%bCCvwOWhYN`ls(ecs_sD;xUc&Oa!dp
zDCl>PM<%`xKG+?p4d~Q0y$bXPniYCzalE)p%OkvY8i{T8MV>!Ylr>KPAXU&8u6=gC
z;-~-A{*)|KJC-*0yd>N^0#{K7hybbmv&zRFr1sG(Kp{>cqgY^q@~pG#4*GIV#S*%9
zQQ4zswhh+JACjY)`Il9P65(0fKvo=J78`%szhxRgyO-G;j&|g}OnkKqy;v4JThgJ_
z1FIt;p6Z?EqFWCu%{vJQ{V=F>h;c!GWY2YQmUsxoTPHug(Vy|7Wn`d7;*O<%G?~z!
z>I`_Wl3nNz|KX9;50$}g*Sl_+1|FJRJ=JyiE^^R>&>%uf=_=Ioy4!>%UadZ)fv)7m
zETBbC8MA|ipSHQx>%vcX5ua0_fiu^PM`gK~US&H;7R_*qE1)KvW5)1_d}9K%lskHQ
zxzds+e#uRJ03DoaU14A5FzalSv6^sh6QH&%AQzE+jjqW_kG2AM9G_`5g@CB)X`&5o
z=38^%a@xJ|jc0SAo-Uppz+wTZj)l&EqGW6r#>=D#I;oT@L<}Dj=shiD00cq9*gZ!D
zXvDm7HX`n<-|uHHMZH3C!*)J~XHEiK<@e40`D=;PdGz`-5g*WPsL!*B-3rG{>_Bs*
zC3Qlswv=kDc3je*@eiSf_)!0}0u2cL?`JhIP#u<&V5`QQ87RXnQPsp<Z^y97h4-SK
zz!*xlq4ypVj0N8qCjq0z2sFJB1C>W`<he#L1R_ogNV7{iimjOYa$;S=4U}8EpPlN*
zQJdwWNHThR$noK3+9)3Ixif|)=TB<Kf+^1#H1SMfirNGcbCxq>4tKP&lgJAXD|$Cj
zBMN4ge!xUjEG%vxs_&Wfam$@1Yd97X1^{@^0KpQ%)7#aCH*6$8cR4e*;1Q;QL92Wd
z!M)gQCx?(a8+#BACa&J;AK+MyOac(nXBk<j!y6sbKxfSiHxAb&zjt;S35r{zh*&-h
z>+ae0$FG#Y0LJ&VDT5^hZYt;s)lR2o6q$nRU(-c$HOUPO5O^-EB0t#?F_EoZywoCa
zEpxUpq-`6&ui^}LmE!VP9qAqA%N*Yi^}2u{vLhj{>rgpkpa*rN^C;zG24g0gbAD+p
z&p*aOc)HO<Ok<ih!V3hPp9k=R_wRzGpyN~wx!X`vJEQf>?VkZ%=rwy*qkQHUa)+t*
z&>ab=ZZ);r%8iveR+c>E$K$jSkU?f&>FS`VoM*TAbgzN}rx@05Q8xsT?>0XFS1_>;
znw$<j9gO5knx|lXFDpjJgzQ*R^`ZX~>Ep;ZiFnHQL=?$WCiuplgiI;L&(q?RxXv(u
z@SvP{8uSAMr<|{!yHs%+LX95kzi51L>5>dN>ut@Hcc%ATzTZh;ZwE79Ja06AA$)D3
zEZI`$uIsG1;^)g-lL3!1c=W!I(AVPTrxM<l%iw0B-@NiWJj!{g+5Sq&6ZzK=;Ez(8
zB=F>YFT9Ar)P*>FzOMNiBJ`rARhE0Wm^!aqlsnfTXzc&IjYr*^z&dQ$o+p0O?K1_k
z?Tf@EmiJS9o8DDE=fb=0V02u5O<l_7Ij}5_yvtY34s%YdfT_+WUv&dFjwp}%z9pmq
zLNn{QUX1pmkx|o4_I5Ab(8B?M_zx8ifCuDK#uWNBEAuTNrPZ@*lvGX@z;t6{TUEIL
z?;-xzIvb(NPyO5P;ZeAyXD=HY1&3ClAB;iMKRb(peMF;M1`uCoSC{~9rmrimsZzfl
z=oiL06SFQWRL56+JT?T-5<UVGk$DhqEPgd^8TjhtGk5WGJFu0q)cdohlT}*gR=~Wc
z)VpQD_#vwaviakIhID1n)C=iZ{7acj`M<lZBXFle)PE%!IG@p{WiiWolwgZbNMxi&
zbNYXz_a%OwjSB&{&Z*U?x`;R5#GRQtFOq;V{M9%~2)alNY+h~p9;fR?@+bmq3pZe_
zegxf}yQmi70Phk3qa9oEDr9&=9th;)KienxrauzL0;#h|2oB=u_BC1rMBN~uGB`Lh
z1bg0yac1_fLMLx{w|?uxjN)HHQw;BY*VzCq^Q)~;XmAAas<|ZyqNi78vA(T=Zp1Ty
zq2mDzUAhy+xvaWG<9!W8TzZdTQfvQwyNZns*O`J*`+QSgwV9`CTZ!CPJh-#`xp7Ny
zV=4(t;N*JHB3;3I+FY(%4Wpr}GBd;nK6|f=zRy<anX{xn;!FdhYJl)R^JmsIu1fU1
z;~fEVWdk~Tkk?@^8ripXh0D7CrZy9&GuV^v#Oe+MrAknvlgu3if7fw80K`1&TDI3u
z-y04*dUEy0=}VzF@Hho~x}M9eGy7ofC%={bRU$!wr9B)cn5fOA!&fEx+d-eMuZ66~
z{b%#j*_Xu%=@BgJ=;JHQE}oa}6}i7Dlwd-;3!T5?!es<x)*|jjqx`rS1mD^35t#Q~
z&mAcuZ0Wv^c-GJZ>SsHv)tA5Z>~bm?FBJeels6#Tol71(rnQ9lJblZ?6d3WH(ew<x
z)5ubavqY;5StuizaG3S*<<QfPFO~q@cW`7AAozZ|no+9)8inU*`0Kq>SClS?f5k=>
z*3%X+Sqs2#r;Sb+-cM~=uP?3)$CUsf@KU2$u#QXInjWMMkf!;5AdK>oD&lQ59G&7f
zE3LE@f|SiXXf%*4kb_kNR&TySGP1UsGGmIwHK%HG#RK~6^JFX9iw4ZSF9wiyz~I8~
zS|g1!XQMWuhbv%;@VlSl^8qj`kOwgfP*KJXJQo05wmkC|lzR(5m*---vA7|WT~yK{
z9MG%|*cPQFq!Pme=K+BF7i6=^lRGTrNn}81eFfCvYJ_m<R@gAMB4a8jnCj}RYilD=
zH!#endKA~Vc3_VA!4xVP&zc*iwUe~D^agi)l`Bf`zBD)80CUB7LYe4$Y~v__jq3EZ
zt;wK2zudmAYIT29Im+6E#c{;R8-hs_VZvsO$4b*Br{(d_MwiKha6l6Gvsw5bEy(OA
zDK*u&zpr`jF0M7@8J6y1Moj^_cRR}(6V<`9hf-V69JFcRcae^taAS({!d#ayHc4Nm
zFpd=_KrP*Y(nBmQ-CqF`1In2}@fmm!eqhJ<nupRV;yToWM!BHH1OM`ah~A}XTPGy}
zj|OYNl;vzQ)E?EXox(de1CFdbk8%94tbyC_!idIaaxK;8fZhN~3X6v$a{8FVPSS$-
z4aLEi89(%mzqR6Q3dc3aYIDT`L4O>BJKR`o`lB=(ju}C1xPqCKmgpVmEnnc7kfXIL
ztag{++b}C~OW#EQc447P3m&cIW=8!4G>|~-97>4^*#U#j<cQyoUP1=~VomoV<He_0
zRP0UU$=JyaFu=Fi7HmVj%0|WW>62DQrVCdRIHy6t-IrCNb^NTmWBg+fz@>*9)0x=V
zr=R1G#&(_P1#v<$KkMY_Ym8$XLxf`fb6*#beC|YfGdZynNfX#8!gH%r2<C!ONIEt<
z3Ht~g2J*RLBbI>^&=Jd*#Uun$C>wunmb!O^<8CKwa|sMaVphP2)~YVKI6E&g1zNQl
zj@E4Dk=1dIIzeoyC@^cm6!Dz^8f_H7jUjwL5hJy70mMUf1lEhRvx&P3n%q1a<l9{@
zv!yPeZorN2!n>W@bGHb9V`rZ5t>Ri82YIAj#mcd{b438)(z|Z&k!Ds6?yA#OYA<^U
zJPwiHi??B1xFhh53}K`>n9r{A1<Xmpn|6_!ND(lGl_M<^_{>)U8rOxVRY?7T);_sb
z=clm?dj@FA{bQFwwENzgk*5NK(M5AhDmMT=!b!PRp^*W@Zib~qY;ntxEnuK}!O)j7
zD@!bxXx<5Pf3pqq)3&yJ4FT0>)8RY56(um8FIJec*p99r_?ED5FU)r-A7tBm{zvH?
z7%&@LKofzJSi2q<eCy=sd26LD7&F?vX?pD7hX?3(9X73Itlus<1#-=533*T!ND?G1
za)alg3V1gdG?SM(l~TyK)c|;UJo@kp?f&Xzw`g?x?6Ua?9BkaolKsWz?v&k|8EXJX
zoL<9P3CoBCw!_aGhQ1t-CRp-b9udr&(cqS|F6LqVWW^5k*3;AX_o=~5ti5Yc1_$mA
zWH{Ho>yP0vh-aLjsn#-po%=HW1;<&k3#*hNXsj@b9s>tVe*o`#4Cpm(uqEzTYsT`_
znv38QK=0FHy)4f~Vx|{whvQOy;6;A(#mPgwvoDw0#D>-KI`REPQtq`X_X2cZcR2Pi
z>YnzZJNU9gpbs7j9s(_tHUkgx)R@){7SQefwG*2dl?ZN$Fx^m$#Kdo_cVF5h09vL8
z^E1Ft<u`R)Us@pM%$h5j3s1-pctoQ-CqzpLfOY@?O65hDb^x?2ru}^y@H+s5N9gc?
z8OiB48$u{BTa{&)lxi30PDfi4aFeqriedK@_{tCB?<hZoG?oj*VmUE$)`6E5fvq${
z>CGG>TfsOxoI0O*ar8`%gk;O&xl?_tGkYc&hQ-KME7~%3v`^g&tjnUYhST<G1lvOn
z{(Iyxo5zWToES3r=)T5VY7jmWt(O^=HfwxPy>epz*_@0K^OQ{@lhC7r(F*;2&po%_
zBWE>0Nu<3r1iVGFC3g&+Cm<j!$n-(1Unele0j9SDT@?Ijk;o_ckvexq=7A4+v~w4I
zmx_8``V0gB{aK_j-RD8u^mD{cpAo)>j3ZlZCJ05ISnflY_e+2a6jPAn8$f!3ArJj?
zVALsTf}nwWOvNq%SSuA<?hz}7PmTg~Bd;sK?33BUMt;?<WCp}YK=M~}^7l1p5iMVO
zczVuUTU_P<FHF~?nD_kikcB;Y=%Zl*bmb|Se&FfqYP)?)9DENW=9&rUM6FT+Sv-0&
zMe5F#J$;rvxkhaDXyQt}jp%>-=A6K53DG;4;~tr}&Va@4Z?3ol0m)WaPVE!3jX_1K
pHbM^-n>L}RN6ne>Kl$q@W_RV)FT13jMDkav5&9;2r8>@c{uc;#T-*Qv

literal 0
HcmV?d00001

diff --git a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
index 45ad5fd70..8c51e559e 100644
--- a/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
+++ b/frontend/src/pages/GeneralSettings/LLMPreference/index.jsx
@@ -14,6 +14,7 @@ import LocalAiLogo from "@/media/llmprovider/localai.png";
 import TogetherAILogo from "@/media/llmprovider/togetherai.png";
 import MistralLogo from "@/media/llmprovider/mistral.jpeg";
 import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
+import PerplexityLogo from "@/media/llmprovider/perplexity.png";
 import PreLoader from "@/components/Preloader";
 import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
 import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
@@ -26,8 +27,10 @@ import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
 import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
 import MistralOptions from "@/components/LLMSelection/MistralOptions";
 import HuggingFaceOptions from "@/components/LLMSelection/HuggingFaceOptions";
+
 import LLMItem from "@/components/LLMSelection/LLMItem";
 import { MagnifyingGlass } from "@phosphor-icons/react";
+import PerplexityOptions from "@/components/LLMSelection/PerplexityOptions";
 
 export default function GeneralLLMPreference() {
   const [saving, setSaving] = useState(false);
@@ -153,6 +156,14 @@ export default function GeneralLLMPreference() {
       options: <MistralOptions settings={settings} />,
       description: "Run open source models from Mistral AI.",
     },
+    {
+      name: "Perplexity AI",
+      value: "perplexity",
+      logo: PerplexityLogo,
+      options: <PerplexityOptions settings={settings} />,
+      description:
+        "Run powerful and internet-connected models hosted by Perplexity AI.",
+    },
     {
       name: "Native",
       value: "native",
diff --git a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
index c86a62a43..f9c4c4169 100644
--- a/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/Steps/DataHandling/index.jsx
@@ -11,6 +11,7 @@ import LMStudioLogo from "@/media/llmprovider/lmstudio.png";
 import LocalAiLogo from "@/media/llmprovider/localai.png";
 import MistralLogo from "@/media/llmprovider/mistral.jpeg";
 import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
+import PerplexityLogo from "@/media/llmprovider/perplexity.png";
 import ZillizLogo from "@/media/vectordbs/zilliz.png";
 import AstraDBLogo from "@/media/vectordbs/astraDB.png";
 import ChromaLogo from "@/media/vectordbs/chroma.png";
@@ -109,6 +110,14 @@ const LLM_SELECTION_PRIVACY = {
     ],
     logo: HuggingFaceLogo,
   },
+  perplexity: {
+    name: "Perplexity AI",
+    description: [
+      "Your chats will not be used for training",
+      "Your prompts and document text used in response creation are visible to Perplexity AI",
+    ],
+    logo: PerplexityLogo,
+  },
 };
 
 const VECTOR_DB_PRIVACY = {
diff --git a/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx b/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
index 6970dfa1f..296a28d9e 100644
--- a/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
+++ b/frontend/src/pages/OnboardingFlow/Steps/LLMPreference/index.jsx
@@ -11,6 +11,7 @@ import TogetherAILogo from "@/media/llmprovider/togetherai.png";
 import AnythingLLMIcon from "@/media/logo/anything-llm-icon.png";
 import MistralLogo from "@/media/llmprovider/mistral.jpeg";
 import HuggingFaceLogo from "@/media/llmprovider/huggingface.png";
+import PerplexityLogo from "@/media/llmprovider/perplexity.png";
 import OpenAiOptions from "@/components/LLMSelection/OpenAiOptions";
 import AzureAiOptions from "@/components/LLMSelection/AzureAiOptions";
 import AnthropicAiOptions from "@/components/LLMSelection/AnthropicAiOptions";
@@ -21,12 +22,13 @@ import GeminiLLMOptions from "@/components/LLMSelection/GeminiLLMOptions";
 import OllamaLLMOptions from "@/components/LLMSelection/OllamaLLMOptions";
 import MistralOptions from "@/components/LLMSelection/MistralOptions";
 import HuggingFaceOptions from "@/components/LLMSelection/HuggingFaceOptions";
+import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
+import PerplexityOptions from "@/components/LLMSelection/PerplexityOptions";
 import LLMItem from "@/components/LLMSelection/LLMItem";
 import System from "@/models/system";
 import paths from "@/utils/paths";
 import showToast from "@/utils/toast";
 import { useNavigate } from "react-router-dom";
-import TogetherAiOptions from "@/components/LLMSelection/TogetherAiOptions";
 
 const TITLE = "LLM Preference";
 const DESCRIPTION =
@@ -128,6 +130,14 @@ export default function LLMPreference({
       options: <MistralOptions settings={settings} />,
       description: "Run open source models from Mistral AI.",
     },
+    {
+      name: "Perplexity AI",
+      value: "perplexity",
+      logo: PerplexityLogo,
+      options: <PerplexityOptions settings={settings} />,
+      description:
+        "Run powerful and internet-connected models hosted by Perplexity AI.",
+    },
     {
       name: "Native",
       value: "native",
diff --git a/server/.env.example b/server/.env.example
index ec6abcac9..863486ad4 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -41,6 +41,10 @@ JWT_SECRET="my-random-string-for-seeding" # Please generate random string at lea
 # TOGETHER_AI_API_KEY='my-together-ai-key'
 # TOGETHER_AI_MODEL_PREF='mistralai/Mixtral-8x7B-Instruct-v0.1'
 
+# LLM_PROVIDER='perplexity'
+# PERPLEXITY_API_KEY='my-perplexity-key'
+# PERPLEXITY_MODEL_PREF='codellama-34b-instruct'
+
 # LLM_PROVIDER='mistral'
 # MISTRAL_API_KEY='example-mistral-ai-api-key'
 # MISTRAL_MODEL_PREF='mistral-tiny'
diff --git a/server/models/systemSettings.js b/server/models/systemSettings.js
index 29949d3d7..415448282 100644
--- a/server/models/systemSettings.js
+++ b/server/models/systemSettings.js
@@ -176,6 +176,18 @@ const SystemSettings = {
             TogetherAiApiKey: !!process.env.TOGETHER_AI_API_KEY,
             TogetherAiModelPref: process.env.TOGETHER_AI_MODEL_PREF,
 
+            // For embedding credentials when ollama is selected.
+            OpenAiKey: !!process.env.OPEN_AI_KEY,
+            AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
+            AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
+            AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
+          }
+        : {}),
+      ...(llmProvider === "perplexity"
+        ? {
+            PerplexityApiKey: !!process.env.PERPLEXITY_API_KEY,
+            PerplexityModelPref: process.env.PERPLEXITY_MODEL_PREF,
+
             // For embedding credentials when ollama is selected.
             OpenAiKey: !!process.env.OPEN_AI_KEY,
             AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
diff --git a/server/utils/AiProviders/perplexity/index.js b/server/utils/AiProviders/perplexity/index.js
new file mode 100644
index 000000000..df20df203
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/index.js
@@ -0,0 +1,204 @@
+const { NativeEmbedder } = require("../../EmbeddingEngines/native");
+const { chatPrompt } = require("../../chats");
+const { handleDefaultStreamResponse } = require("../../helpers/chat/responses");
+
+function perplexityModels() {
+  const { MODELS } = require("./models.js");
+  return MODELS || {};
+}
+
+class PerplexityLLM {
+  constructor(embedder = null, modelPreference = null) {
+    const { Configuration, OpenAIApi } = require("openai");
+    if (!process.env.PERPLEXITY_API_KEY)
+      throw new Error("No Perplexity API key was set.");
+
+    const config = new Configuration({
+      basePath: "https://api.perplexity.ai",
+      apiKey: process.env.PERPLEXITY_API_KEY,
+    });
+    this.openai = new OpenAIApi(config);
+    this.model =
+      modelPreference || process.env.PERPLEXITY_MODEL_PREF || "pplx-7b-online"; // Give at least a unique model to the provider as last fallback.
+    this.limits = {
+      history: this.promptWindowLimit() * 0.15,
+      system: this.promptWindowLimit() * 0.15,
+      user: this.promptWindowLimit() * 0.7,
+    };
+
+    this.embedder = !embedder ? new NativeEmbedder() : embedder;
+    this.defaultTemp = 0.7;
+  }
+
+  #appendContext(contextTexts = []) {
+    if (!contextTexts || !contextTexts.length) return "";
+    return (
+      "\nContext:\n" +
+      contextTexts
+        .map((text, i) => {
+          return `[CONTEXT ${i}]:\n${text}\n[END CONTEXT ${i}]\n\n`;
+        })
+        .join("")
+    );
+  }
+
+  allModelInformation() {
+    return perplexityModels();
+  }
+
+  streamingEnabled() {
+    return "streamChat" in this && "streamGetChatCompletion" in this;
+  }
+
+  promptWindowLimit() {
+    const availableModels = this.allModelInformation();
+    return availableModels[this.model]?.maxLength || 4096;
+  }
+
+  async isValidChatCompletionModel(model = "") {
+    const availableModels = this.allModelInformation();
+    return availableModels.hasOwnProperty(model);
+  }
+
+  constructPrompt({
+    systemPrompt = "",
+    contextTexts = [],
+    chatHistory = [],
+    userPrompt = "",
+  }) {
+    const prompt = {
+      role: "system",
+      content: `${systemPrompt}${this.#appendContext(contextTexts)}`,
+    };
+    return [prompt, ...chatHistory, { role: "user", content: userPrompt }];
+  }
+
+  async isSafe(_input = "") {
+    // Not implemented so must be stubbed
+    return { safe: true, reasons: [] };
+  }
+
+  async sendChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
+    if (!(await this.isValidChatCompletionModel(this.model)))
+      throw new Error(
+        `Perplexity chat: ${this.model} is not valid for chat completion!`
+      );
+
+    const textResponse = await this.openai
+      .createChatCompletion({
+        model: this.model,
+        temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
+        n: 1,
+        messages: await this.compressMessages(
+          {
+            systemPrompt: chatPrompt(workspace),
+            userPrompt: prompt,
+            chatHistory,
+          },
+          rawHistory
+        ),
+      })
+      .then((json) => {
+        const res = json.data;
+        if (!res.hasOwnProperty("choices"))
+          throw new Error("Perplexity chat: No results!");
+        if (res.choices.length === 0)
+          throw new Error("Perplexity chat: No results length!");
+        return res.choices[0].message.content;
+      })
+      .catch((error) => {
+        throw new Error(
+          `Perplexity::createChatCompletion failed with: ${error.message}`
+        );
+      });
+
+    return textResponse;
+  }
+
+  async streamChat(chatHistory = [], prompt, workspace = {}, rawHistory = []) {
+    if (!(await this.isValidChatCompletionModel(this.model)))
+      throw new Error(
+        `Perplexity chat: ${this.model} is not valid for chat completion!`
+      );
+
+    const streamRequest = await this.openai.createChatCompletion(
+      {
+        model: this.model,
+        stream: true,
+        temperature: Number(workspace?.openAiTemp ?? this.defaultTemp),
+        n: 1,
+        messages: await this.compressMessages(
+          {
+            systemPrompt: chatPrompt(workspace),
+            userPrompt: prompt,
+            chatHistory,
+          },
+          rawHistory
+        ),
+      },
+      { responseType: "stream" }
+    );
+    return streamRequest;
+  }
+
+  async getChatCompletion(messages = null, { temperature = 0.7 }) {
+    if (!(await this.isValidChatCompletionModel(this.model)))
+      throw new Error(
+        `Perplexity chat: ${this.model} is not valid for chat completion!`
+      );
+
+    const { data } = await this.openai
+      .createChatCompletion({
+        model: this.model,
+        messages,
+        temperature,
+      })
+      .catch((e) => {
+        throw new Error(e.response.data.error.message);
+      });
+
+    if (!data.hasOwnProperty("choices")) return null;
+    return data.choices[0].message.content;
+  }
+
+  async streamGetChatCompletion(messages = null, { temperature = 0.7 }) {
+    if (!(await this.isValidChatCompletionModel(this.model)))
+      throw new Error(
+        `Perplexity chat: ${this.model} is not valid for chat completion!`
+      );
+
+    const streamRequest = await this.openai.createChatCompletion(
+      {
+        model: this.model,
+        stream: true,
+        messages,
+        temperature,
+      },
+      { responseType: "stream" }
+    );
+    return streamRequest;
+  }
+
+  handleStream(response, stream, responseProps) {
+    return handleDefaultStreamResponse(response, stream, responseProps);
+  }
+
+  // Simple wrapper for dynamic embedder & normalize interface for all LLM implementations
+  async embedTextInput(textInput) {
+    return await this.embedder.embedTextInput(textInput);
+  }
+  async embedChunks(textChunks = []) {
+    return await this.embedder.embedChunks(textChunks);
+  }
+
+  async compressMessages(promptArgs = {}, rawHistory = []) {
+    const { messageArrayCompressor } = require("../../helpers/chat");
+    const messageArray = this.constructPrompt(promptArgs);
+    return await messageArrayCompressor(this, messageArray, rawHistory);
+  }
+}
+
+module.exports = {
+  PerplexityLLM,
+  perplexityModels,
+};
diff --git a/server/utils/AiProviders/perplexity/models.js b/server/utils/AiProviders/perplexity/models.js
new file mode 100644
index 000000000..258cfeace
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/models.js
@@ -0,0 +1,49 @@
+const MODELS = {
+  "codellama-34b-instruct": {
+    id: "codellama-34b-instruct",
+    name: "codellama-34b-instruct",
+    maxLength: 16384,
+  },
+  "codellama-70b-instruct": {
+    id: "codellama-70b-instruct",
+    name: "codellama-70b-instruct",
+    maxLength: 16384,
+  },
+  "llama-2-70b-chat": {
+    id: "llama-2-70b-chat",
+    name: "llama-2-70b-chat",
+    maxLength: 4096,
+  },
+  "mistral-7b-instruct": {
+    id: "mistral-7b-instruct",
+    name: "mistral-7b-instruct",
+    maxLength: 8192,
+  },
+  "mixtral-8x7b-instruct": {
+    id: "mixtral-8x7b-instruct",
+    name: "mixtral-8x7b-instruct",
+    maxLength: 8192,
+  },
+  "pplx-7b-chat": {
+    id: "pplx-7b-chat",
+    name: "pplx-7b-chat",
+    maxLength: 8192,
+  },
+  "pplx-70b-chat": {
+    id: "pplx-70b-chat",
+    name: "pplx-70b-chat",
+    maxLength: 8192,
+  },
+  "pplx-7b-online": {
+    id: "pplx-7b-online",
+    name: "pplx-7b-online",
+    maxLength: 8192,
+  },
+  "pplx-70b-online": {
+    id: "pplx-70b-online",
+    name: "pplx-70b-online",
+    maxLength: 8192,
+  },
+};
+
+module.exports.MODELS = MODELS;
diff --git a/server/utils/AiProviders/perplexity/scripts/.gitignore b/server/utils/AiProviders/perplexity/scripts/.gitignore
new file mode 100644
index 000000000..94a2dd146
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/scripts/.gitignore
@@ -0,0 +1 @@
+*.json
\ No newline at end of file
diff --git a/server/utils/AiProviders/perplexity/scripts/chat_models.txt b/server/utils/AiProviders/perplexity/scripts/chat_models.txt
new file mode 100644
index 000000000..83f6d2a80
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/scripts/chat_models.txt
@@ -0,0 +1,11 @@
+| Model                     | Context Length | Model Type      |
+| :------------------------ | :------------- | :-------------- |
+| `codellama-34b-instruct`  | 16384          | Chat Completion |
+| `codellama-70b-instruct`  | 16384          | Chat Completion |
+| `llama-2-70b-chat`        | 4096           | Chat Completion |
+| `mistral-7b-instruct` [2] | 8192 [1]       | Chat Completion |
+| `mixtral-8x7b-instruct`   | 8192 [1]       | Chat Completion |
+| `pplx-7b-chat`            | 8192           | Chat Completion |
+| `pplx-70b-chat`           | 8192           | Chat Completion |
+| `pplx-7b-online`          | 8192           | Chat Completion |
+| `pplx-70b-online`         | 8192           | Chat Completion |
\ No newline at end of file
diff --git a/server/utils/AiProviders/perplexity/scripts/parse.mjs b/server/utils/AiProviders/perplexity/scripts/parse.mjs
new file mode 100644
index 000000000..749a63dce
--- /dev/null
+++ b/server/utils/AiProviders/perplexity/scripts/parse.mjs
@@ -0,0 +1,44 @@
+// Perplexity does not provide a simple REST API to get models,
+// so we have a table which we copy from their documentation
+// https://docs.perplexity.ai/edit/model-cards that we can
+// then parse and get all models from in a format that makes sense
+// Why this does not exist is so bizarre, but whatever.
+
+// To run, cd into this directory and run `node parse.mjs`
+// copy outputs into the export in ../models.js
+
+// Update the date below if you run this again because Perplexity added new models.
+// Last Collected: Feb 22, 2024
+
+import fs from "fs";
+
+function parseChatModels() {
+  const models = {};
+  const tableString = fs.readFileSync("chat_models.txt", { encoding: "utf-8" });
+  const rows = tableString.split("\n").slice(2);
+
+  rows.forEach((row) => {
+    let [model, contextLength] = row
+      .split("|")
+      .slice(1, -1)
+      .map((text) => text.trim());
+    model = model.replace(/`|\s*\[\d+\]\s*/g, "");
+    const maxLength = Number(contextLength.replace(/\s*\[\d+\]\s*/g, ""));
+    if (model && maxLength) {
+      models[model] = {
+        id: model,
+        name: model,
+        maxLength: maxLength,
+      };
+    }
+  });
+
+  fs.writeFileSync(
+    "chat_models.json",
+    JSON.stringify(models, null, 2),
+    "utf-8"
+  );
+  return models;
+}
+
+parseChatModels();
diff --git a/server/utils/helpers/customModels.js b/server/utils/helpers/customModels.js
index 53c641e75..8f8ca0657 100644
--- a/server/utils/helpers/customModels.js
+++ b/server/utils/helpers/customModels.js
@@ -1,3 +1,4 @@
+const { perplexityModels } = require("../AiProviders/perplexity");
 const { togetherAiModels } = require("../AiProviders/togetherAi");
 const SUPPORT_CUSTOM_MODELS = [
   "openai",
@@ -6,6 +7,7 @@ const SUPPORT_CUSTOM_MODELS = [
   "native-llm",
   "togetherai",
   "mistral",
+  "perplexity",
 ];
 
 async function getCustomModels(provider = "", apiKey = null, basePath = null) {
@@ -25,6 +27,8 @@ async function getCustomModels(provider = "", apiKey = null, basePath = null) {
       return await getMistralModels(apiKey);
     case "native-llm":
       return nativeLLMModels();
+    case "perplexity":
+      return await getPerplexityModels();
     default:
       return { models: [], error: "Invalid provider for custom models" };
   }
@@ -120,6 +124,20 @@ async function getTogetherAiModels() {
   return { models, error: null };
 }
 
+async function getPerplexityModels() {
+  const knownModels = perplexityModels();
+  if (!Object.keys(knownModels).length === 0)
+    return { models: [], error: null };
+
+  const models = Object.values(knownModels).map((model) => {
+    return {
+      id: model.id,
+      name: model.name,
+    };
+  });
+  return { models, error: null };
+}
+
 async function getMistralModels(apiKey = null) {
   const { Configuration, OpenAIApi } = require("openai");
   const config = new Configuration({
diff --git a/server/utils/helpers/index.js b/server/utils/helpers/index.js
index 42ed262f9..818d92dbc 100644
--- a/server/utils/helpers/index.js
+++ b/server/utils/helpers/index.js
@@ -58,6 +58,9 @@ function getLLMProvider(modelPreference = null) {
     case "togetherai":
       const { TogetherAiLLM } = require("../AiProviders/togetherAi");
       return new TogetherAiLLM(embedder, modelPreference);
+    case "perplexity":
+      const { PerplexityLLM } = require("../AiProviders/perplexity");
+      return new PerplexityLLM(embedder, modelPreference);
     case "mistral":
       const { MistralLLM } = require("../AiProviders/mistral");
       return new MistralLLM(embedder, modelPreference);
diff --git a/server/utils/helpers/updateENV.js b/server/utils/helpers/updateENV.js
index f89a193f6..5a384740b 100644
--- a/server/utils/helpers/updateENV.js
+++ b/server/utils/helpers/updateENV.js
@@ -239,6 +239,16 @@ const KEY_MAPPING = {
     checks: [isNotEmpty],
   },
 
+  // Perplexity Options
+  PerplexityApiKey: {
+    envKey: "PERPLEXITY_API_KEY",
+    checks: [isNotEmpty],
+  },
+  PerplexityModelPref: {
+    envKey: "PERPLEXITY_MODEL_PREF",
+    checks: [isNotEmpty],
+  },
+
   // System Settings
   AuthToken: {
     envKey: "AUTH_TOKEN",
@@ -314,6 +324,7 @@ function supportedLLM(input = "") {
     "togetherai",
     "mistral",
     "huggingface",
+    "perplexity",
   ].includes(input);
   return validSelection ? null : `${input} is not a valid LLM provider.`;
 }