mirror of
https://github.com/MIDORIBIN/langchain-gpt4free.git
synced 2025-10-10 21:49:38 +03:00
add: sample
This commit is contained in:
18
sample/llm_sample.py
Normal file
18
sample/llm_sample.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from g4f import Provider, Model
|
||||
from langchain.llms.base import LLM
|
||||
|
||||
from langchain_g4f import G4FLLM
|
||||
|
||||
|
||||
def main():
|
||||
llm: LLM = G4FLLM(
|
||||
model=Model.gpt_35_turbo,
|
||||
provider=Provider.Aichat,
|
||||
)
|
||||
|
||||
res = llm("hello")
|
||||
print(res) # Hello! How can I assist you today?
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
27
sample/prompt_template_sample.py
Normal file
27
sample/prompt_template_sample.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from g4f import Provider, Model
|
||||
from langchain.llms.base import LLM
|
||||
from langchain import PromptTemplate
|
||||
|
||||
from langchain_g4f import G4FLLM
|
||||
|
||||
|
||||
def main():
|
||||
template = "What color is the {fruit}?"
|
||||
prompt_template = PromptTemplate(template=template, input_variables=["fruit"])
|
||||
|
||||
llm: LLM = G4FLLM(
|
||||
model=Model.gpt_35_turbo,
|
||||
provider=Provider.Aichat,
|
||||
)
|
||||
|
||||
res = llm(prompt_template.format(fruit="apple"))
|
||||
print(res)
|
||||
# The color of an apple can vary, but it is typically red, green, or yellow.
|
||||
|
||||
res = llm(prompt_template.format(fruit="lemon"))
|
||||
print(res)
|
||||
# The color of a lemon is typically yellow.
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
35
sample/sequential_chain_sample.py
Normal file
35
sample/sequential_chain_sample.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from g4f import Provider, Model
|
||||
from langchain.llms.base import LLM
|
||||
from langchain import PromptTemplate
|
||||
from langchain.chains import LLMChain, SimpleSequentialChain
|
||||
|
||||
from langchain_g4f import G4FLLM
|
||||
|
||||
|
||||
def main():
|
||||
llm: LLM = G4FLLM(
|
||||
model=Model.gpt_35_turbo,
|
||||
provider=Provider.DeepAi,
|
||||
)
|
||||
|
||||
prompt_template_1 = PromptTemplate(
|
||||
input_variables=["location"],
|
||||
template="Please tell us one tourist attraction in {location}.",
|
||||
)
|
||||
chain_1 = LLMChain(llm=llm, prompt=prompt_template_1)
|
||||
|
||||
prompt_template_2 = PromptTemplate(
|
||||
input_variables=["location"],
|
||||
template="What is the train route from Tokyo Station to {location}?",
|
||||
)
|
||||
chain_2 = LLMChain(llm=llm, prompt=prompt_template_2)
|
||||
|
||||
simple_sequential_chain = SimpleSequentialChain(
|
||||
chains=[chain_1, chain_2], verbose=True
|
||||
)
|
||||
|
||||
print(simple_sequential_chain("tokyo"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
23
sample/simple_chain_sample.py
Normal file
23
sample/simple_chain_sample.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from g4f import Provider, Model
|
||||
from langchain.llms.base import LLM
|
||||
from langchain import PromptTemplate
|
||||
from langchain.chains import LLMChain
|
||||
|
||||
from langchain_g4f import G4FLLM
|
||||
|
||||
|
||||
def main():
|
||||
llm: LLM = G4FLLM(
|
||||
model=Model.gpt_35_turbo,
|
||||
provider=Provider.Aichat,
|
||||
)
|
||||
prompt_template = PromptTemplate(
|
||||
input_variables=["location"],
|
||||
template="Where is the best tourist attraction in {location}?",
|
||||
)
|
||||
chain = LLMChain(llm=llm, prompt=prompt_template)
|
||||
print(chain("tokyo"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Reference in New Issue
Block a user