Search

3,032 results found for anthropic (2551ms)

Code
3,021

} from "https://deno.land/x/grammy@v1.35.0/mod.ts";
import { DateTime } from "https://esm.sh/luxon@3.4.4";
import Anthropic from "npm:@anthropic-ai/sdk@0.24.3";
import { backstory } from "../backstory.ts";
import {
/**
* Format chat history for Anthropic API
*/
function formatChatHistoryForAI(history) {
*/
async function analyzeMessageContent(
anthropic,
username,
messageText,
console.log({ systemPrompt, formattedHistory });
const response = await anthropic.messages.create({
model: "claude-3-5-sonnet-latest",
max_tokens: 4196,
bot.on("message", async (ctx) => {
try {
// Get Anthropic API key from environment
const apiKey = Deno.env.get("ANTHROPIC_API_KEY");
if (!apiKey) {
console.error("Anthropic API key is not configured.");
ctx.reply(
"I apologize, but I'm not properly configured at the moment. Please inform the household
}
// Initialize Anthropic client
const anthropic = new Anthropic({ apiKey });
// Get message text and user info
// Analyze message content with chat history context
const analysis = await analyzeMessageContent(
anthropic,
username,
messageText,
} from "https://esm.town/v/geoffreylitt/getWeather";
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
import Anthropic from "npm:@anthropic-ai/sdk@0.24.3";
const TABLE_NAME = `memories`;
try {
// Get API key from environment
const apiKey = Deno.env.get("ANTHROPIC_API_KEY");
if (!apiKey) {
console.error("Anthropic API key is not configured.");
return null;
}
// Initialize Anthropic client
const anthropic = new Anthropic({ apiKey });
const response = await anthropic.messages.create({
model: "claude-3-5-sonnet-latest",
max_tokens: 150,
import { nanoid } from "https://esm.sh/nanoid@5.0.5";
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
import Anthropic from "npm:@anthropic-ai/sdk@0.24.3";
const TABLE_NAME = `memories`;
try {
// Get API key from environment
const apiKey = Deno.env.get("ANTHROPIC_API_KEY");
if (!apiKey) {
console.error("Anthropic API key is not configured.");
return null;
}
// Initialize Anthropic client
const anthropic = new Anthropic({ apiKey });
// Format previous facts for the prompt
console.log({ message });
const response = await anthropic.messages.create({
model: "claude-3-5-sonnet-latest",
max_tokens: 1000,
We charge a 50% markup on top of raw LLM costs. If you use $10 in Townie
credits, Anthropic will get $6.66 and we'll get $3.33. We think this is fair,
sustainable, and transparent. We don't want to be in the business of having
murky limits, obfuscated credits, or unsustainable margins.
project,
branchId,
// anthropicApiKey,
// bearerToken,
selectedFiles,
- [x] Add a "view source" / "send me a PR" link
- [x] Show the HTTP preview in second column if there is one (and let the user pick which one to
- [x] Figure out a convention to teach in the anthropic prompt mod where the LLM always checks t
- [x] Ability to create new projects from the interface
- [x] Figure out why OpenTownie can't create HTTP vals. Maybe give it a seperate tool for it?
- [x] Start a timer for messages
- [x] Add more indicators that it's "still working"
- [x] Require users supply their own Anthropic token?
- [x] Add cost indications on messages
- [x] Add a bell noise when the message is done to let us know
import { readFile } from "https://esm.town/v/std/utils/index.ts";
import { createAnthropic } from "npm:@ai-sdk/anthropic@1.2.12";
import {
convertToCoreMessages,
} = await c.req.json();
const apiKey = Deno.env.get("ANTHROPIC_API_KEY");
if (await hasInsufficientCredits({ bearerToken })) {
});
const anthropic = createAnthropic({ apiKey });
let tracedModel = anthropic(model);
if (Deno.env.get("POSTHOG_PROJECT_API_KEY")) {
const traceId = `townie_${rowid}_${Date.now()}`;
// Wrap the Anthropic model with PostHog tracing
tracedModel = withTracing(anthropic(model), phClient, {
posthogDistinctId: user.id,
posthogTraceId: traceId,
// @ts-ignore
lastMessage.content.at(-1).providerOptions = {
anthropic: { cacheControl: { type: "ephemeral" } },
};
}
output_tokens: result.usage.completionTokens,
cache_read_tokens:
result.providerMetadata.anthropic.cacheReadInputTokens,
cache_write_tokens:
result.providerMetadata.anthropic.cacheCreationInputTokens,
});
output_tokens: result.usage.completionTokens,
cache_read_tokens:
result.providerMetadata.anthropic.cacheReadInputTokens,
cache_write_tokens:
result.providerMetadata.anthropic.cacheCreationInputTokens,
});
},
Townie is fully open-source and itself runs on Val Town. Pull requests welcome!
n account, click the **Remix** button and then add your ANTHROPIC_API_KEY. You can leave all the
Authentication in Townie is handled via Val Town Oauth. However, we have not yet opened up our O
</ul>
<p>
The application proxies requests to the Anthropic API and Val Town API, allowing Claud
project files directly.
</p>
import Anthropic from "npm:@anthropic-ai/sdk";
import type { GenericMessageEvent } from "npm:@slack/web-api";
import { TOOLS_LIST, TOOLS_OBJECT } from "../tools/index.ts";
const MAX_STEPS = 10;
const anthropic = new Anthropic({ apiKey: Deno.env.get("CLAUDE_API_KEY") });
const TOOL_RESULTS = {};
export async function reply(
event: GenericMessageEvent,
messages: Anthropic.MessageParam[],
) {
// reply in thread or start one
const thread_ts = event.thread_ts || event.ts;
try {
const input: Anthropic.MessageParam[] = [...messages];
let conversationLogLinkSent = messages.some((message) => {
for (let step = 0; step < MAX_STEPS; step++) {
const response = await anthropic.messages.create({
model: "claude-sonnet-4-20250514",
system: await getFullSystemPrompt(event),
tools: TOOLS_LIST,
// betas: ["context-1m-2025-08-07"],
// Requires usage tier 4: https://docs.anthropic.com/en/docs/build-with-claude/context-w
});

Vals

10
View more
diegoivo
anthropicWorkflow
 
Public
diegoivo
sdkAnthropic
 
Public
maddy
anthropicProxy
 
Public
stevekrouse
anthropicStreamDemo
 
Public
toowired
anthropicCaching
 
Public

Users

No users found
No docs found