0
0
mirror of https://github.com/MIDORIBIN/langchain-gpt4free.git synced 2024-12-24 03:32:57 +03:00

add: sample

This commit is contained in:
MIDORIBIN 2023-07-23 10:20:03 +09:00
parent 5883e4d381
commit 2797d90380
5 changed files with 89 additions and 2 deletions

View File

@ -39,6 +39,8 @@ if __name__ == '__main__':
The above sample code demonstrates the basic usage of langchain_g4f. Choose the appropriate model and provider, initialize the LLM, and then pass input text to the LLM object to obtain the result. The above sample code demonstrates the basic usage of langchain_g4f. Choose the appropriate model and provider, initialize the LLM, and then pass input text to the LLM object to obtain the result.
For other samples, please refer to the following [sample directory](./sample/).
## Support and Bug Reports ## Support and Bug Reports
For support and bug reports, please use the GitHub Issues page. For support and bug reports, please use the GitHub Issues page.

View File

@ -10,9 +10,9 @@ def main():
provider=Provider.Aichat, provider=Provider.Aichat,
) )
res = llm('hello') res = llm("hello")
print(res) # Hello! How can I assist you today? print(res) # Hello! How can I assist you today?
if __name__ == '__main__': if __name__ == "__main__":
main() main()

View File

@ -0,0 +1,27 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from langchain import PromptTemplate
from langchain_g4f import G4FLLM
def main():
template = "What color is the {fruit}?"
prompt_template = PromptTemplate(template=template, input_variables=["fruit"])
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
provider=Provider.Aichat,
)
res = llm(prompt_template.format(fruit="apple"))
print(res)
# The color of an apple can vary, but it is typically red, green, or yellow.
res = llm(prompt_template.format(fruit="lemon"))
print(res)
# The color of a lemon is typically yellow.
if __name__ == "__main__":
main()

View File

@ -0,0 +1,35 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from langchain import PromptTemplate
from langchain.chains import LLMChain, SimpleSequentialChain
from langchain_g4f import G4FLLM
def main():
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
provider=Provider.DeepAi,
)
prompt_template_1 = PromptTemplate(
input_variables=["location"],
template="Please tell us one tourist attraction in {location}.",
)
chain_1 = LLMChain(llm=llm, prompt=prompt_template_1)
prompt_template_2 = PromptTemplate(
input_variables=["location"],
template="What is the train route from Tokyo Station to {location}?",
)
chain_2 = LLMChain(llm=llm, prompt=prompt_template_2)
simple_sequential_chain = SimpleSequentialChain(
chains=[chain_1, chain_2], verbose=True
)
print(simple_sequential_chain("tokyo"))
if __name__ == "__main__":
main()

View File

@ -0,0 +1,23 @@
from g4f import Provider, Model
from langchain.llms.base import LLM
from langchain import PromptTemplate
from langchain.chains import LLMChain
from langchain_g4f import G4FLLM
def main():
llm: LLM = G4FLLM(
model=Model.gpt_35_turbo,
provider=Provider.Aichat,
)
prompt_template = PromptTemplate(
input_variables=["location"],
template="Where is the best tourist attraction in {location}?",
)
chain = LLMChain(llm=llm, prompt=prompt_template)
print(chain("tokyo"))
if __name__ == "__main__":
main()