Spaces:
Running
Running
new fix, new release
Browse files- CHANGELOG.md +5 -0
- streamlit_app.py +2 -2
CHANGELOG.md
CHANGED
@@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
|
|
4 |
|
5 |
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
6 |
|
|
|
|
|
|
|
|
|
|
|
7 |
## [0.4.1] - 2024-08-23
|
8 |
|
9 |
### Added
|
|
|
4 |
|
5 |
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
6 |
|
7 |
+
## [0.4.2] - 2024-08-23
|
8 |
+
|
9 |
+
### Fixed
|
10 |
+
+ Correct invalid dependency of promptlayer slipped in the build
|
11 |
+
|
12 |
## [0.4.1] - 2024-08-23
|
13 |
|
14 |
### Added
|
streamlit_app.py
CHANGED
@@ -6,7 +6,7 @@ from tempfile import NamedTemporaryFile
|
|
6 |
import dotenv
|
7 |
from grobid_quantities.quantities import QuantitiesAPI
|
8 |
from langchain.memory import ConversationBufferWindowMemory
|
9 |
-
from langchain_community.callbacks import PromptLayerCallbackHandler
|
10 |
from langchain_community.chat_models import ChatOpenAI
|
11 |
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint
|
12 |
from langchain_huggingface import HuggingFaceEmbeddings
|
@@ -174,7 +174,7 @@ def init_qa(model, embeddings_name=None, api_key=None):
|
|
174 |
temperature=0.01,
|
175 |
max_new_tokens=4092,
|
176 |
model_kwargs={"max_length": 8192},
|
177 |
-
callbacks=[PromptLayerCallbackHandler(pl_tags=[model, "document-qa"])]
|
178 |
)
|
179 |
embeddings = HuggingFaceEmbeddings(
|
180 |
model_name=OPEN_EMBEDDINGS[embeddings_name])
|
|
|
6 |
import dotenv
|
7 |
from grobid_quantities.quantities import QuantitiesAPI
|
8 |
from langchain.memory import ConversationBufferWindowMemory
|
9 |
+
# from langchain_community.callbacks import PromptLayerCallbackHandler
|
10 |
from langchain_community.chat_models import ChatOpenAI
|
11 |
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint
|
12 |
from langchain_huggingface import HuggingFaceEmbeddings
|
|
|
174 |
temperature=0.01,
|
175 |
max_new_tokens=4092,
|
176 |
model_kwargs={"max_length": 8192},
|
177 |
+
# callbacks=[PromptLayerCallbackHandler(pl_tags=[model, "document-qa"])]
|
178 |
)
|
179 |
embeddings = HuggingFaceEmbeddings(
|
180 |
model_name=OPEN_EMBEDDINGS[embeddings_name])
|