Skip to content

Commit 5f2533a

Browse files
committed
Refine LongCat support: update API key format, links, default model, and reorder provider lists
1 parent 4c4b6e5 commit 5f2533a

File tree

6 files changed

+396
-91
lines changed

6 files changed

+396
-91
lines changed

README.md

Lines changed: 21 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ The agents aren't following predefined scripts. They're operating off natural la
2727
**You need:**
2828
- Minecraft 1.20.1 with Forge
2929
- Java 17
30-
- An LLM API key (OpenAI GPT-5, Gemini 3, Groq, or DeepSeek)
30+
- An LLM API key (LongCat, DeepSeek, OpenAI GPT-5, Gemini 3, or Groq)
3131

3232
**Installation:**
3333
1. Download the JAR from releases
@@ -39,13 +39,13 @@ The agents aren't following predefined scripts. They're operating off natural la
3939
**Config example (`config/steve-common.toml`):**
4040
```toml
4141
[ai]
42-
provider = "groq"
42+
provider = "longcat"
4343
maxTokens = 8000
4444
temperature = 0.7
4545

46-
[groq]
47-
apiKey = "your-groq-api-key-here"
48-
model = "llama-3.1-8b-instant"
46+
[longcat]
47+
apiKey = "ak_xxxxxxxxxxxxxxxxxxxxxxxxxxxx"
48+
model = "LongCat-Flash-Thinking-2601"
4949
```
5050

5151
Then spawn a Steve with `/steve spawn Bob` and press K to start giving commands.
@@ -83,7 +83,7 @@ Each Steve runs an autonomous agent loop that processes natural language command
8383
### Core Components
8484

8585
**LLM Integration** (`com.steve.ai.llm`)
86-
- **Multi-Provider Support**: Pluggable clients for OpenAI, Groq, Gemini, and DeepSeek.
86+
- **Multi-Provider Support**: Pluggable clients for LongCat, DeepSeek, OpenAI, Gemini, and Groq.
8787
- **Resilient Clients**: Async implementations with caching, retries, and circuit breaker patterns.
8888
- **TaskPlanner**: Orchestrates LLM calls with context (conversation history, world state, Steve capabilities)
8989
- **PromptBuilder**: Constructs prompts with available actions, examples, and formatting instructions
@@ -236,31 +236,36 @@ Edit `config/steve-common.toml`. Each provider now has its own section for bette
236236

237237
```toml
238238
[ai]
239-
provider = "groq" # Options: openai, groq, gemini, deepseek
239+
provider = "longcat" # Options: longcat, deepseek, openai, gemini, groq
240240
maxTokens = 8000
241241
temperature = 0.7
242242

243+
[longcat]
244+
apiKey = "ak_..."
245+
model = "LongCat-Flash-Thinking-2601"
246+
247+
[deepseek]
248+
apiKey = "sk-..."
249+
model = "deepseek-chat"
250+
243251
[openai]
244252
apiKey = "sk-..."
245253
model = "gpt-5-mini-2025-08-07"
246254

247-
[groq]
248-
apiKey = "gsk_..."
249-
model = "llama-3.1-8b-instant"
250-
251255
[gemini]
252256
apiKey = "AIza..."
253257
model = "gemini-3-flash-preview"
254258

255-
[deepseek]
256-
apiKey = "sk-..."
257-
model = "deepseek-chat"
259+
[groq]
260+
apiKey = "gsk_..."
261+
model = "llama-3.1-8b-instant"
258262
```
259263

260264
**Performance Tips:**
261-
- **Groq**: Fastest inference, best for smooth real-time reactions.
262-
- **Gemini 3 / GPT-5**: Excellent for complex multi-step planning.
265+
- **LongCat**: OpenAI-compatible, great for flash-chat thinking speed.
263266
- **DeepSeek**: Great balance of quality and cost-efficiency.
267+
- **Gemini 3 / GPT-5**: Excellent for complex multi-step planning.
268+
- **Groq**: Fastest inference, best for smooth real-time reactions.
264269
- **Temperature**: Recommended 0.5-0.7 for consistent task execution.
265270

266271
## Known Issues

config/steve-common.toml.example

Lines changed: 25 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,22 @@
11
# AI Provider Configuration
22
[ai]
3-
# AI provider to use: 'groq' (FASTEST, FREE), 'openai', 'gemini', or 'deepseek'
4-
provider = "groq"
3+
# AI provider to use: 'longcat', 'deepseek', 'openai', 'gemini', or 'groq'
4+
provider = "longcat"
55

66
# Maximum tokens per API request (applies to all LLM providers)
77
maxTokens = 8000
88

99
# Temperature for AI responses (0.0-2.0, lower is more deterministic)
1010
temperature = 0.7
1111

12-
# OpenAI API Configuration
13-
[openai]
14-
# Your OpenAI API key
15-
# Get your API key from: https://platform.openai.com/api-keys
16-
apiKey = "sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
12+
# LongCat API Configuration
13+
[longcat]
14+
# Your LongCat API key
15+
# Get your API key from: https://longcat.chat/platform/api_keys
16+
apiKey = "ak_xxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1717

18-
# OpenAI model to use (gpt-5.2, gpt-5.2-codex, gpt-5-mini-2025-08-07, gpt-5-nano-2025-08-07)
19-
model = "gpt-5-mini-2025-08-07"
18+
# LongCat model to use (LongCat-Flash-Chat, LongCat-Flash-Thinking, LongCat-Flash-Thinking-2601)
19+
model = "LongCat-Flash-Thinking-2601"
2020

2121
# DeepSeek API Configuration
2222
[deepseek]
@@ -27,14 +27,14 @@
2727
# DeepSeek model to use (deepseek-chat, deepseek-reasoner)
2828
model = "deepseek-chat"
2929

30-
# Groq API Configuration (FASTEST, FREE tier available)
31-
[groq]
32-
# Your Groq API key
33-
# Get your API key from: https://console.groq.com/keys
34-
apiKey = "gsk_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
30+
# OpenAI API Configuration
31+
[openai]
32+
# Your OpenAI API key
33+
# Get your API key from: https://platform.openai.com/api-keys
34+
apiKey = "sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
3535

36-
# Groq model to use (llama-3.1-8b-instant, llama-3.1-70b-versatile, mixtral-8x7b-32768)
37-
model = "llama-3.1-8b-instant"
36+
# OpenAI model to use (gpt-5.2, gpt-5.2-codex, gpt-5-mini-2025-08-07, gpt-5-nano-2025-08-07)
37+
model = "gpt-5-mini-2025-08-07"
3838

3939
# Google Gemini API Configuration
4040
[gemini]
@@ -45,6 +45,15 @@
4545
# Gemini model to use (gemini-3-pro-preview, gemini-3-flash-preview, gemini-flash-lite-latest)
4646
model = "gemini-3-flash-preview"
4747

48+
# Groq API Configuration (FASTEST, FREE tier available)
49+
[groq]
50+
# Your Groq API key
51+
# Get your API key from: https://console.groq.com/keys
52+
apiKey = "gsk_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
53+
54+
# Groq model to use (llama-3.1-8b-instant, llama-3.1-70b-versatile, mixtral-8x7b-32768)
55+
model = "llama-3.1-8b-instant"
56+
4857
# Steve Behavior Configuration
4958
[behavior]
5059
# Ticks between action checks (20 ticks = 1 second)

src/main/java/com/steve/ai/config/SteveConfig.java

Lines changed: 39 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -10,21 +10,25 @@ public class SteveConfig {
1010
public static final ForgeConfigSpec.IntValue MAX_TOKENS;
1111
public static final ForgeConfigSpec.DoubleValue TEMPERATURE;
1212

13-
// OpenAI
14-
public static final ForgeConfigSpec.ConfigValue<String> OPENAI_API_KEY;
15-
public static final ForgeConfigSpec.ConfigValue<String> OPENAI_MODEL;
16-
13+
// LongCat
14+
public static final ForgeConfigSpec.ConfigValue<String> LONGCAT_API_KEY;
15+
public static final ForgeConfigSpec.ConfigValue<String> LONGCAT_MODEL;
16+
1717
// DeepSeek
1818
public static final ForgeConfigSpec.ConfigValue<String> DEEPSEEK_API_KEY;
1919
public static final ForgeConfigSpec.ConfigValue<String> DEEPSEEK_MODEL;
20-
21-
// Groq
22-
public static final ForgeConfigSpec.ConfigValue<String> GROQ_API_KEY;
23-
public static final ForgeConfigSpec.ConfigValue<String> GROQ_MODEL;
24-
20+
21+
// OpenAI
22+
public static final ForgeConfigSpec.ConfigValue<String> OPENAI_API_KEY;
23+
public static final ForgeConfigSpec.ConfigValue<String> OPENAI_MODEL;
24+
2525
// Gemini
2626
public static final ForgeConfigSpec.ConfigValue<String> GEMINI_API_KEY;
2727
public static final ForgeConfigSpec.ConfigValue<String> GEMINI_MODEL;
28+
29+
// Groq
30+
public static final ForgeConfigSpec.ConfigValue<String> GROQ_API_KEY;
31+
public static final ForgeConfigSpec.ConfigValue<String> GROQ_MODEL;
2832

2933
// Behavior
3034
public static final ForgeConfigSpec.IntValue ACTION_TICK_DELAY;
@@ -37,8 +41,8 @@ public class SteveConfig {
3741
builder.comment("AI API Configuration").push("ai");
3842

3943
AI_PROVIDER = builder
40-
.comment("AI provider to use: 'groq' (FASTEST, FREE), 'openai', 'gemini', or 'deepseek'")
41-
.define("provider", "groq");
44+
.comment("AI provider to use: 'longcat', 'deepseek', 'openai', 'gemini', or 'groq' (FASTEST, FREE)")
45+
.define("provider", "longcat");
4246

4347
MAX_TOKENS = builder
4448
.comment("Maximum tokens per API request (applies to all LLM providers)")
@@ -50,15 +54,15 @@ public class SteveConfig {
5054

5155
builder.pop();
5256

53-
builder.comment("OpenAI API Configuration").push("openai");
57+
builder.comment("LongCat API Configuration").push("longcat");
5458

55-
OPENAI_API_KEY = builder
56-
.comment("Your OpenAI API key (get from: https://platform.openai.com/api-keys)")
59+
LONGCAT_API_KEY = builder
60+
.comment("Your LongCat API key (get from: https://longcat.chat/platform/api_keys)")
5761
.define("apiKey", "");
5862

59-
OPENAI_MODEL = builder
60-
.comment("OpenAI model to use (gpt-5.2, gpt-5.2-codex, gpt-5-mini-2025-08-07, gpt-5-nano-2025-08-07)")
61-
.define("model", "gpt-5-mini-2025-08-07");
63+
LONGCAT_MODEL = builder
64+
.comment("LongCat model to use (LongCat-Flash-Chat, LongCat-Flash-Thinking, LongCat-Flash-Thinking-2601)")
65+
.define("model", "LongCat-Flash-Thinking-2601");
6266

6367
builder.pop();
6468

@@ -74,15 +78,15 @@ public class SteveConfig {
7478

7579
builder.pop();
7680

77-
builder.comment("Groq API Configuration (FASTEST, FREE tier available)").push("groq");
81+
builder.comment("OpenAI API Configuration").push("openai");
7882

79-
GROQ_API_KEY = builder
80-
.comment("Your Groq API key (get from: https://console.groq.com/keys)")
83+
OPENAI_API_KEY = builder
84+
.comment("Your OpenAI API key (get from: https://platform.openai.com/api-keys)")
8185
.define("apiKey", "");
8286

83-
GROQ_MODEL = builder
84-
.comment("Groq model to use (llama-3.1-8b-instant, llama-3.1-70b-versatile, mixtral-8x7b-32768)")
85-
.define("model", "llama-3.1-8b-instant");
87+
OPENAI_MODEL = builder
88+
.comment("OpenAI model to use (gpt-5.2, gpt-5.2-codex, gpt-5-mini-2025-08-07, gpt-5-nano-2025-08-07)")
89+
.define("model", "gpt-5-mini-2025-08-07");
8690

8791
builder.pop();
8892

@@ -98,6 +102,18 @@ public class SteveConfig {
98102

99103
builder.pop();
100104

105+
builder.comment("Groq API Configuration (FASTEST, FREE tier available)").push("groq");
106+
107+
GROQ_API_KEY = builder
108+
.comment("Your Groq API key (get from: https://console.groq.com/keys)")
109+
.define("apiKey", "");
110+
111+
GROQ_MODEL = builder
112+
.comment("Groq model to use (llama-3.1-8b-instant, llama-3.1-70b-versatile, mixtral-8x7b-32768)")
113+
.define("model", "llama-3.1-8b-instant");
114+
115+
builder.pop();
116+
101117
builder.comment("Steve Behavior Configuration").push("behavior");
102118

103119
ACTION_TICK_DELAY = builder
Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,113 @@
1+
package com.steve.ai.llm;
2+
3+
import com.google.gson.JsonArray;
4+
import com.google.gson.JsonObject;
5+
import com.google.gson.JsonParser;
6+
import com.steve.ai.SteveMod;
7+
import com.steve.ai.config.SteveConfig;
8+
9+
import java.net.URI;
10+
import java.net.http.HttpClient;
11+
import java.net.http.HttpRequest;
12+
import java.net.http.HttpResponse;
13+
import java.time.Duration;
14+
15+
/**
16+
* Client for LongCat API - OpenAI-compatible
17+
* Endpoint: https://api.longcat.chat/openai/v1/chat/completions
18+
* API Keys: https://longcat.chat/platform/api_keys
19+
*/
20+
public class LongCatClient {
21+
private static final String LONGCAT_API_URL = "https://api.longcat.chat/openai/v1/chat/completions";
22+
private static final int MAX_RETRIES = 3;
23+
private static final int INITIAL_RETRY_DELAY_MS = 1000;
24+
25+
private final HttpClient client;
26+
private final String apiKey;
27+
28+
public LongCatClient() {
29+
this.apiKey = SteveConfig.LONGCAT_API_KEY.get();
30+
this.client = HttpClient.newBuilder()
31+
.connectTimeout(Duration.ofSeconds(30))
32+
.build();
33+
}
34+
35+
public String sendRequest(String systemPrompt, String userPrompt) {
36+
if (apiKey == null || apiKey.isEmpty()) {
37+
SteveMod.LOGGER.error("LongCat API key not configured!");
38+
return null;
39+
}
40+
41+
JsonObject requestBody = buildRequestBody(systemPrompt, userPrompt);
42+
43+
HttpRequest request = HttpRequest.newBuilder()
44+
.uri(URI.create(LONGCAT_API_URL))
45+
.header("Authorization", "Bearer " + apiKey)
46+
.header("Content-Type", "application/json")
47+
.timeout(Duration.ofSeconds(60))
48+
.POST(HttpRequest.BodyPublishers.ofString(requestBody.toString()))
49+
.build();
50+
51+
for (int attempt = 0; attempt < MAX_RETRIES; attempt++) {
52+
try {
53+
HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
54+
55+
if (response.statusCode() == 200) {
56+
return parseResponse(response.body());
57+
}
58+
59+
if (response.statusCode() == 429 || response.statusCode() >= 500) {
60+
if (attempt < MAX_RETRIES - 1) {
61+
int delayMs = INITIAL_RETRY_DELAY_MS * (int) Math.pow(2, attempt);
62+
SteveMod.LOGGER.warn("LongCat API failed ({}), retrying in {}ms", response.statusCode(), delayMs);
63+
Thread.sleep(delayMs);
64+
continue;
65+
}
66+
}
67+
68+
SteveMod.LOGGER.error("LongCat API request failed: {}", response.statusCode());
69+
return null;
70+
} catch (Exception e) {
71+
if (attempt < MAX_RETRIES - 1) {
72+
try { Thread.sleep(INITIAL_RETRY_DELAY_MS * (int) Math.pow(2, attempt)); } catch (Exception ignored) {}
73+
} else {
74+
SteveMod.LOGGER.error("Error communicating with LongCat API", e);
75+
return null;
76+
}
77+
}
78+
}
79+
return null;
80+
}
81+
82+
private JsonObject buildRequestBody(String systemPrompt, String userPrompt) {
83+
JsonObject body = new JsonObject();
84+
body.addProperty("model", SteveConfig.LONGCAT_MODEL.get());
85+
body.addProperty("temperature", SteveConfig.TEMPERATURE.get());
86+
body.addProperty("max_tokens", SteveConfig.MAX_TOKENS.get());
87+
88+
JsonArray messages = new JsonArray();
89+
JsonObject systemMessage = new JsonObject();
90+
systemMessage.addProperty("role", "system");
91+
systemMessage.addProperty("content", systemPrompt);
92+
messages.add(systemMessage);
93+
94+
JsonObject userMessage = new JsonObject();
95+
userMessage.addProperty("role", "user");
96+
userMessage.addProperty("content", userPrompt);
97+
messages.add(userMessage);
98+
99+
body.add("messages", messages);
100+
return body;
101+
}
102+
103+
private String parseResponse(String responseBody) {
104+
try {
105+
JsonObject json = JsonParser.parseString(responseBody).getAsJsonObject();
106+
return json.getAsJsonArray("choices").get(0).getAsJsonObject()
107+
.getAsJsonObject("message").get("content").getAsString();
108+
} catch (Exception e) {
109+
SteveMod.LOGGER.error("Error parsing LongCat response", e);
110+
return null;
111+
}
112+
}
113+
}

0 commit comments

Comments
 (0)