OAUTH 2.0
CRM
Granola MCP gives agents cited access to your meeting notes. Ask in plain language: every answer traces back to source.
1import os
2from scalekit import ScalekitClient
3from langchain.agents import create_tool_calling_agent
4
5client = ScalekitClient(
6 env_url=os.environ["SCALEKIT_ENV_URL"],
7 client_id=os.environ["SCALEKIT_CLIENT_ID"],
8 client_secret=os.environ["SCALEKIT_CLIENT_SECRET"],
9)
10
11# LangChain-compatible tools scoped to this user
12tools = client.actions.langchain.get_tools(
13 identifier="user@example.com",
14 connection_names=["granolamcp"],
15)
16
17agent = create_tool_calling_agent(llm, tools, prompt)1import os, openai
2from scalekit import ScalekitClient
3
4client = ScalekitClient(
5 env_url=os.environ["SCALEKIT_ENV_URL"],
6 client_id=os.environ["SCALEKIT_CLIENT_ID"],
7 client_secret=os.environ["SCALEKIT_CLIENT_SECRET"],
8)
9
10# Tool schemas in OpenAI format
11schemas = client.actions.tools.list_scoped_tools(
12 identifier="user@example.com",
13 page_size=100,
14).tools
15
16response = openai.chat.completions.create(
17 model="gpt-4o",
18 tools=schemas,
19 messages=[{"role": "user", "content": "What did we commit to Acme last quarter?"}],
20)
21
22# On tool_call, route back through Scalekit:
23# client.actions.execute_tool(
24# tool_name=tc.function.name,
25# tool_input=tc.function.arguments,
26# identifier="user@example.com",
27# )1import os, anthropic
2from scalekit import ScalekitClient
3
4client = ScalekitClient(
5 env_url=os.environ["SCALEKIT_ENV_URL"],
6 client_id=os.environ["SCALEKIT_CLIENT_ID"],
7 client_secret=os.environ["SCALEKIT_CLIENT_SECRET"],
8)
9
10schemas = client.actions.tools.list_scoped_tools(
11 identifier="user@example.com",
12 page_size=100,
13).tools
14
15ac = anthropic.Anthropic()
16response = ac.messages.create(
17 model="claude-opus-4-7",
18 tools=schemas,
19 messages=[{"role": "user", "content": "Summarise my meetings with Acme last month"}],
20)
21
22# On tool_use block, route back through Scalekit:
23# client.actions.execute_tool(
24# tool_name=block.name,
25# tool_input=block.input,
26# identifier="user@example.com",
27# )1import os
2from scalekit import ScalekitClient
3from google.adk.agents import LlmAgent
4
5client = ScalekitClient(
6 env_url=os.environ["SCALEKIT_ENV_URL"],
7 client_id=os.environ["SCALEKIT_CLIENT_ID"],
8 client_secret=os.environ["SCALEKIT_CLIENT_SECRET"],
9)
10
11tools = client.actions.langchain.get_tools(
12 identifier="user@example.com",
13 connection_names=["granolamcp"],
14)
15
16agent = LlmAgent(
17 model="gemini-2.0-flash",
18 tools=tools,
19 instruction="You are a meeting intelligence assistant.",
20)