Skip to content

Instantly share code, notes, and snippets.

@kudoh
Created March 12, 2025 07:08
Show Gist options
  • Save kudoh/820338c7134bf9cfe3a245cc6fb71ce7 to your computer and use it in GitHub Desktop.
Save kudoh/820338c7134bf9cfe3a245cc6fb71ce7 to your computer and use it in GitHub Desktop.
Lambda function using LangMem with Aurora serverless
import json
import os
import boto3
from langgraph.func import entrypoint
from langgraph.store.postgres import PostgresStore
from langmem import create_memory_store_manager
from pydantic import BaseModel, Field, conint
from openai import OpenAI
class UserTechInterest(BaseModel):
"""ユーザーの技術に関する関心情報"""
topic: str = Field(..., description="技術トピック(例:プログラミング言語、クラウドサービスなど)")
category: str | None = Field(
None, description="技術の分野(例:フロントエンド、バックエンド、ネットワークなど)"
)
interest_level: conint(ge=0, le=100) | None = Field(
None, description="関心度(0~100のスコアで表現)"
)
description: str | None = Field(
None, description="その他の補足説明(具体例や詳細な知識など)"
)
def create_conn_url():
secret_name = os.environ['SECRET_NAME']
session = boto3.session.Session()
sm_client = session.client(service_name='secretsmanager')
get_secret_value_response = sm_client.get_secret_value(SecretId=secret_name)
secret = json.loads(get_secret_value_response['SecretString'])
return f"postgresql://{secret["username"]}:{secret["password"]}@{secret["host"]}:{secret["port"]}/{secret["dbname"]}"
postgres_conn_url = create_conn_url()
manager = create_memory_store_manager(
"openai:gpt-4o-2024-11-20",
namespace=("chat", "{user_id}"),
schemas=[UserTechInterest],
instructions="ユーザーの興味や技術的背景を詳細に抽出してください",
enable_inserts=True,
enable_deletes=False,
)
client = OpenAI()
def lambda_handler(event, context):
with PostgresStore.from_conn_string(postgres_conn_url, index={
"dims": 1536,
"embed": "openai:text-embedding-3-small",
}) as store:
store.setup()
@entrypoint(store=store)
def app(params: dict):
message = params["message"]
user_id = params["user_id"]
# ストアから関連する長期記憶を検索
memories = store.search(("chat", user_id))
developer_msg = ("You are a helpful assistant.\n"
f"## Memories\n <memories>\n{memories}\n</memories> ")
response = client.responses.create(
model="gpt-4o-2024-11-20",
input=[{"role": "developer", "content": developer_msg}, message],
tools=[{"type": "web_search_preview"}],
)
# 該当ユーザーの長期記憶を更新
manager.invoke(
{"messages": [message]},
config={"configurable": {"user_id": user_id}}
)
return response.output_text
body = json.loads(event["body"])
user_id = body["user_id"]
output = app.invoke({
"message": {
"role": "user",
"content": (body["prompt"])
},
"user_id": user_id
})
return {
"statusCode": 200,
"headers": {"Content-Type": "plain/text"},
"body": output,
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment