Skip to content

Commit

Permalink
feat: Migrate from OpenAI to Google Gemini Pro
Browse files Browse the repository at this point in the history
Replaces OpenAI's language models with Google's Gemini Pro for enhanced performance and capabilities. The code now utilizes `ChatGoogleGenerativeAI` and leverages environment variables for API key management.
  • Loading branch information
djpapzin committed Aug 14, 2024
1 parent 4402666 commit 58c0a91
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 6 deletions.
12 changes: 10 additions & 2 deletions Projects/2. Large Language Models and LangChain/few_short.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,15 @@
from langchain import PromptTemplate
from langchain import FewShotPromptTemplate
from langchain.chat_models import ChatOpenAI
from langchain_google_genai import ChatGoogleGenerativeAI # Import ChatGoogleGenerativeAI
from langchain import LLMChain
from dotenv import load_dotenv
import os

# Load environment variables from .env file
load_dotenv()

# Create ChatGoogleGenerativeAI instance
llm = ChatGoogleGenerativeAI(model="gemini-pro", google_api_key=os.getenv("GEMINI_API_KEY"))

# create our examples dictionery
examples = [
Expand Down Expand Up @@ -49,7 +57,7 @@
)

# load the model
chat = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.9)
chat = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.9, google_api_key=os.getenv("GEMINI_API_KEY")) # Use 'gemini-pro' instead of 'gemini'

chain = LLMChain(llm=chat, prompt=few_shot_prompt_template, verbose=True)

Expand Down
20 changes: 16 additions & 4 deletions Projects/3. Learning How to Prompt/chain_prompting.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,20 @@
from langchain import PromptTemplate, LLMChain
from langchain.llms import OpenAI
from langchain_google_genai import ChatGoogleGenerativeAI
import os
from dotenv import load_dotenv


# Load environment variables from .env file
load_dotenv()

# Initialize LLM
llm = OpenAI(model_name="text-davinci-003", temperature=0)
api_key = os.getenv("GEMINI_API_KEY")
llm = ChatGoogleGenerativeAI(model="gemini-pro", google_api_key=api_key)
print(
llm.invoke(
"What are some of the pros and cons of Python as a programming language?"
)
)

# Prompt 1
template_question = """What is the name of the famous scientist who developed the theory of general relativity?
Expand Down Expand Up @@ -36,5 +48,5 @@
# Run the LLMChain for the second prompt
response_fact = chain_fact.run(input_data)

print("Scientist:", scientist)
print("Fact:", response_fact)
print("Scientist:", scientist.text)
print("Fact:", response_fact.text)

0 comments on commit 58c0a91

Please sign in to comment.