0
0
mirror of https://github.com/MIDORIBIN/langchain-gpt4free.git synced 2024-12-23 19:22:58 +03:00

fix: fix import statement (Model -> models)

fix #1
This commit is contained in:
MIDORIBIN 2023-08-13 12:28:24 +09:00
parent 81c176cfbd
commit 138abc4a30
5 changed files with 16 additions and 15 deletions

View File

@ -1,7 +1,8 @@
from types import ModuleType
from typing import Optional, List, Any, Mapping, Union
from typing import Any, List, Mapping, Optional, Union
import g4f
from g4f import ChatCompletion
from g4f.models import Model
from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.llms.base import LLM
from langchain.llms.utils import enforce_stop_tokens
@ -9,7 +10,7 @@ from langchain.llms.utils import enforce_stop_tokens
class G4FLLM(LLM):
# Model.model or str
model: Union[type, str]
model: Union[Model, str]
# Provider.Provider
provider: Optional[ModuleType] = None
auth: Optional[Union[str, bool]] = None
@ -34,7 +35,7 @@ class G4FLLM(LLM):
if self.auth is not None:
create_kwargs["auth"] = self.auth
text = g4f.ChatCompletion.create(
text = ChatCompletion.create(
messages=[{"role": "user", "content": prompt}],
**create_kwargs,
)

View File

@ -1,4 +1,4 @@
from g4f import Provider, Model
from g4f import Provider, models
from langchain.llms.base import LLM
from langchain_g4f import G4FLLM
@ -6,7 +6,7 @@ from langchain_g4f import G4FLLM
def main():
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
model=models.gpt_35_turbo,
provider=Provider.Aichat,
)

View File

@ -1,6 +1,6 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from g4f import Provider, models
from langchain import PromptTemplate
from langchain.llms.base import LLM
from langchain_g4f import G4FLLM
@ -10,7 +10,7 @@ def main():
prompt_template = PromptTemplate(template=template, input_variables=["fruit"])
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
model=models.gpt_35_turbo,
provider=Provider.Aichat,
)

View File

@ -1,14 +1,14 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from g4f import Provider, models
from langchain import PromptTemplate
from langchain.chains import LLMChain, SimpleSequentialChain
from langchain.llms.base import LLM
from langchain_g4f import G4FLLM
def main():
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
model=models.gpt_35_turbo,
provider=Provider.DeepAi,
)

View File

@ -1,14 +1,14 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from g4f import Provider, models
from langchain import PromptTemplate
from langchain.chains import LLMChain
from langchain.llms.base import LLM
from langchain_g4f import G4FLLM
def main():
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
model=models.gpt_35_turbo,
provider=Provider.Aichat,
)
prompt_template = PromptTemplate(