AI for every developer
Build with state-of-the-art generative AI models and tools to make AI helpful for everyone
Build with the Gemini API
Easily integrate Google’s largest and most capable AI model to your apps
import google.generativeai as genai
import os
genai.configure(api_key=os.environ["GEMINI_API_KEY"])
model = genai.GenerativeModel(model_name="gemini-1.5-flash")
response = model.generate_content("Explain how AI works")
print(response.text)
const { GoogleGenerativeAI } = require("@google/generative-ai");
const genAI = new GoogleGenerativeAI(process.env.GEMINI_API_KEY);
async function run() {
const model = genAI.getGenerativeModel({ model: "gemini-1.5-flash"});
const result = await model.generateContent(["Explain how AI works"]);
console.log(result.response.text());
}
run();
curl https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash:generateContent?key=${GEMINI_API_KEY} \
-H 'Content-Type: application/json' \
-d '{
"contents":[{
"parts":[
{"text": "Explain how AI works"},
]
}]
}'
import "github.com/google/generative-ai-go/genai"
import "google.golang.org/api/option"
ctx := context.Background()
client, err := genai.NewClient(ctx, option.WithAPIKey(os.Getenv("GEMINI_API_KEY")))
model := client.GenerativeModel("gemini-1.5-flash")
resp, err := model.GenerateContent(ctx,genai.Text("Explain how AI works"))
val model = GenerativeModel("gemini-1.5-flash")
val response = model.generateContent(content {
text("Explain how AI works")
})
let model = GenerativeModel(name: "gemini-1.5-flash")
let response = try await model.generateContent("Explain how AI works")
final model = GenerativeModel(model: "gemini-1.5-flash", apiKey: apiKey);
final response = await model.generateContent([
Content.text("Explain how AI works"),
]);
Develop with Gemini assistance
Innovate with Gemma open models
Accelerate responsible AI development with unprecedented flexibility and access to lightweight, customizable models
Join the community
Tap into the power of our community forum. Get answers, build together, and be part of the conversation.