Spaces:
Build error
Build error
Hisab Cloud
commited on
Commit
·
395f4de
1
Parent(s):
7bb6a88
Upload app.py
Browse files
app.py
CHANGED
|
@@ -63,20 +63,20 @@ def display_chat_history(chain):
|
|
| 63 |
def create_conversational_chain(vector_store):
|
| 64 |
load_dotenv()
|
| 65 |
# Create llm
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
# llm = Replicate(
|
| 71 |
# streaming = True,
|
| 72 |
# model = "meta/llama-2-70b:a52e56fee2269a78c9279800ec88898cecb6c8f1df22a6483132bea266648f00",
|
| 73 |
# callbacks=[StreamingStdOutCallbackHandler()],
|
| 74 |
# input = {"temperature": 0.75, "max_length" :500,"top_p":1})
|
| 75 |
-
llm = GooglePalm(
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
# llm = AzureOpenAI(
|
| 81 |
# streaming = True,
|
| 82 |
# deployment_name="HCloudChat",
|
|
|
|
| 63 |
def create_conversational_chain(vector_store):
|
| 64 |
load_dotenv()
|
| 65 |
# Create llm
|
| 66 |
+
llm = CTransformers(model="llama-2-7b-chat.ggmlv3.q4_0.bin",
|
| 67 |
+
streaming=True,
|
| 68 |
+
callbacks=[StreamingStdOutCallbackHandler()],
|
| 69 |
+
model_type="llama", config={'max_new_tokens': 500, 'temperature': 0.01})
|
| 70 |
# llm = Replicate(
|
| 71 |
# streaming = True,
|
| 72 |
# model = "meta/llama-2-70b:a52e56fee2269a78c9279800ec88898cecb6c8f1df22a6483132bea266648f00",
|
| 73 |
# callbacks=[StreamingStdOutCallbackHandler()],
|
| 74 |
# input = {"temperature": 0.75, "max_length" :500,"top_p":1})
|
| 75 |
+
# llm = GooglePalm(
|
| 76 |
+
# streaming = True,
|
| 77 |
+
# model = "google/flan-t5-xxl", # "models/text-bison-001"
|
| 78 |
+
# callbacks=[StreamingStdOutCallbackHandler()],
|
| 79 |
+
# input = {"temperature": 0.7, "max_length" :800,"top_p":1})
|
| 80 |
# llm = AzureOpenAI(
|
| 81 |
# streaming = True,
|
| 82 |
# deployment_name="HCloudChat",
|