0
0
mirror of https://github.com/MIDORIBIN/langchain-gpt4free.git synced 2025-10-10 21:49:38 +03:00

add: sample

This commit is contained in:
MIDORIBIN
2023-07-23 10:20:03 +09:00
parent 5883e4d381
commit 2797d90380
5 changed files with 89 additions and 2 deletions

18
sample/llm_sample.py Normal file
View File

@@ -0,0 +1,18 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from langchain_g4f import G4FLLM
def main():
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
provider=Provider.Aichat,
)
res = llm("hello")
print(res) # Hello! How can I assist you today?
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,27 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from langchain import PromptTemplate
from langchain_g4f import G4FLLM
def main():
template = "What color is the {fruit}?"
prompt_template = PromptTemplate(template=template, input_variables=["fruit"])
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
provider=Provider.Aichat,
)
res = llm(prompt_template.format(fruit="apple"))
print(res)
# The color of an apple can vary, but it is typically red, green, or yellow.
res = llm(prompt_template.format(fruit="lemon"))
print(res)
# The color of a lemon is typically yellow.
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,35 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from langchain import PromptTemplate
from langchain.chains import LLMChain, SimpleSequentialChain
from langchain_g4f import G4FLLM
def main():
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
provider=Provider.DeepAi,
)
prompt_template_1 = PromptTemplate(
input_variables=["location"],
template="Please tell us one tourist attraction in {location}.",
)
chain_1 = LLMChain(llm=llm, prompt=prompt_template_1)
prompt_template_2 = PromptTemplate(
input_variables=["location"],
template="What is the train route from Tokyo Station to {location}?",
)
chain_2 = LLMChain(llm=llm, prompt=prompt_template_2)
simple_sequential_chain = SimpleSequentialChain(
chains=[chain_1, chain_2], verbose=True
)
print(simple_sequential_chain("tokyo"))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,23 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from langchain import PromptTemplate
from langchain.chains import LLMChain
from langchain_g4f import G4FLLM
def main():
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
provider=Provider.Aichat,
)
prompt_template = PromptTemplate(
input_variables=["location"],
template="Where is the best tourist attraction in {location}?",
)
chain = LLMChain(llm=llm, prompt=prompt_template)
print(chain("tokyo"))
if __name__ == "__main__":
main()