0
0
mirror of https://github.com/MIDORIBIN/langchain-gpt4free.git synced 2024-12-24 03:32:57 +03:00

Merge pull request #8 from AntonioSabbatellaUni/main

Implemented the retry mechanism for the G4FLLM.py script (5 times).
This commit is contained in:
MIDORIBIN 2023-09-15 21:02:14 +09:00 committed by GitHub
commit 651e8edb7c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 20 additions and 10 deletions

1
.gitignore vendored
View File

@ -159,3 +159,4 @@ cython_debug/
# option (not recommended) you can uncomment the following to ignore the entire idea folder. # option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/ .idea/
.vscode/ .vscode/
test.py

View File

@ -8,7 +8,7 @@ from langchain.callbacks.manager import CallbackManagerForLLMRun, AsyncCallbackM
from langchain.llms.base import LLM from langchain.llms.base import LLM
from langchain.llms.utils import enforce_stop_tokens from langchain.llms.utils import enforce_stop_tokens
MAX_TRIES = 5
class G4FLLM(LLM): class G4FLLM(LLM):
model: Union[Model, str] model: Union[Model, str]
provider: Optional[type[BaseProvider]] = None provider: Optional[type[BaseProvider]] = None
@ -33,6 +33,8 @@ class G4FLLM(LLM):
if self.auth is not None: if self.auth is not None:
create_kwargs["auth"] = self.auth create_kwargs["auth"] = self.auth
for i in range(MAX_TRIES):
try:
text = ChatCompletion.create( text = ChatCompletion.create(
messages=[{"role": "user", "content": prompt}], messages=[{"role": "user", "content": prompt}],
**create_kwargs, **create_kwargs,
@ -42,7 +44,14 @@ class G4FLLM(LLM):
text = text if type(text) is str else "".join(text) text = text if type(text) is str else "".join(text)
if stop is not None: if stop is not None:
text = enforce_stop_tokens(text, stop) text = enforce_stop_tokens(text, stop)
if text:
return text return text
print(f"Empty response, trying {i+1} of {MAX_TRIES}")
except Exception as e:
print(f"Error in G4FLLM._call: {e}, trying {i+1} of {MAX_TRIES}")
return ""
async def _acall(self, prompt: str, stop: Optional[List[str]] = None, run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, **kwargs: Any) -> str: async def _acall(self, prompt: str, stop: Optional[List[str]] = None, run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, **kwargs: Any) -> str:
create_kwargs = {} if self.create_kwargs is None else self.create_kwargs.copy() create_kwargs = {} if self.create_kwargs is None else self.create_kwargs.copy()
create_kwargs["model"] = self.model create_kwargs["model"] = self.model