• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

lucasliet / llm-telegram-bot / 14782393255

01 May 2025 07:53PM UTC coverage: 38.805% (+0.3%) from 38.518%
14782393255

push

github

lucasliet
feat: add support for new models and commands in Telegram bot, including OpenWebUI integration

fix: adjust chunk size limits for message responses to prevent truncation

Adds OpenWebUI integration and new model commands

Introduces support for Perplexity GPT-4.5 and Grok-3 models via OpenWebUI, including new command handlers and help documentation updates. Refines command routing and model mapping to support expanded model selection in the Telegram bot.

Relates to feature expansion for multi-model integration.

26 of 35 branches covered (74.29%)

Branch coverage included in aggregate %.

46 of 89 new or added lines in 9 files covered. (51.69%)

1 existing line in 1 file now uncovered.

896 of 2341 relevant lines covered (38.27%)

1.94 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

28.95
/src/handlers/GithubCopilotHandler.ts
1
import { Context } from 'grammy-context';
2
import { FileUtils } from '@/util/FileUtils.ts';
3✔
3
import GithubCopilotService from '@/service/openai/GithubCopilotService.ts';
3✔
4

5
import { copilotModels } from '@/config/models.ts';
3✔
6

7
const modelMap = {
3✔
8
        'geminiPro': copilotModels.gemini,
3✔
9
        'gpt': copilotModels.gpt41,
3✔
10
        'o4mini': copilotModels.o4mini,
3✔
11
        'claude': copilotModels.sonnetThinking,
3✔
12
        none: undefined,
3✔
13
};
3✔
14

15
/**
1✔
16
 * Handles requests for OpenRouter models
17
 * @param ctx - Telegram context
18
 * @param commandMessage - Optional command message override
19
 */
×
20
export async function handleGithubCopilot(
×
21
        ctx: Context,
×
22
        commandMessage?: string,
×
23
): Promise<void> {
24
        const { userKey, contextMessage, photos, caption, quote } = await ctx
×
25
                .extractContextKeys();
×
26

27
        const message = commandMessage || contextMessage;
×
28

NEW
29
        const command = message?.split(':')[0]?.toLowerCase() || 'none';
×
30

NEW
31
        const model = modelMap[command as keyof typeof modelMap];
×
32

33
        const openAIService = new GithubCopilotService(model);
×
34

35
        if (photos && caption) {
×
36
                const photosUrl = FileUtils.getTelegramFilesUrl(ctx, photos);
×
37
                const { reader, onComplete, responseMap } = await openAIService
×
38
                        .generateTextFromImage(
×
39
                                userKey,
×
40
                                quote,
×
41
                                photosUrl,
×
42
                                caption,
×
43
                        );
44

45
                ctx.streamReply(reader, onComplete, responseMap);
×
46
                return;
×
47
        }
×
48

49
        const { reader, onComplete, responseMap } = await openAIService.generateText(
×
50
                userKey,
×
51
                quote,
×
52
                message!.replace(`${command}:`, ''),
×
53
        );
54

55
        ctx.streamReply(reader, onComplete, responseMap);
×
56
}
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc