0
0
mirror of https://github.com/MIDORIBIN/langchain-gpt4free.git synced 2024-12-24 11:34:39 +03:00
langchain-gpt4free/langchain_g4f/G4FLLM.py

55 lines
1.6 KiB
Python
Raw Normal View History

2023-07-08 11:37:54 +03:00
from types import ModuleType
from typing import Any, List, Mapping, Optional, Union
2023-07-08 11:37:54 +03:00
from g4f import ChatCompletion
from g4f.models import Model
2023-07-08 11:37:54 +03:00
from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.llms.base import LLM
from langchain.llms.utils import enforce_stop_tokens
class G4FLLM(LLM):
# Model.model or str
model: Union[Model, str]
2023-07-08 11:37:54 +03:00
# Provider.Provider
provider: Optional[ModuleType] = None
auth: Optional[Union[str, bool]] = None
create_kwargs: Optional[dict] = None
@property
def _llm_type(self) -> str:
2023-07-23 04:27:25 +03:00
return "custom"
def _call(
self,
prompt: str,
stop: Optional[List[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> str:
2023-07-08 11:37:54 +03:00
create_kwargs = {} if self.create_kwargs is None else self.create_kwargs.copy()
if self.model is not None:
2023-07-23 04:27:25 +03:00
create_kwargs["model"] = self.model
2023-07-08 11:37:54 +03:00
if self.provider is not None:
2023-07-23 04:27:25 +03:00
create_kwargs["provider"] = self.provider
2023-07-08 11:37:54 +03:00
if self.auth is not None:
2023-07-23 04:27:25 +03:00
create_kwargs["auth"] = self.auth
2023-07-08 11:37:54 +03:00
text = ChatCompletion.create(
2023-07-23 04:27:25 +03:00
messages=[{"role": "user", "content": prompt}],
2023-07-08 11:37:54 +03:00
**create_kwargs,
)
if stop is not None:
text = enforce_stop_tokens(text, stop)
return text
@property
def _identifying_params(self) -> Mapping[str, Any]:
"""Get the identifying parameters."""
return {
2023-07-23 04:27:25 +03:00
"model": self.model,
"provider": self.provider,
"auth": self.auth,
"create_kwargs": self.create_kwargs,
2023-07-08 11:37:54 +03:00
}