Skip to content

Commit

Permalink
Merge branch 'main' into jon/new-user-bug
Browse files Browse the repository at this point in the history
# Conflicts:
#	src/core/database/schema/users/user.dto.ts
#	src/core/database/schema/users/user.schema.ts
#	src/core/database/schema/users/users.service.ts
  • Loading branch information
jonmatthis committed Jan 13, 2024
2 parents 8209d36 + c412733 commit 09126de
Show file tree
Hide file tree
Showing 9 changed files with 164 additions and 27 deletions.
1 change: 1 addition & 0 deletions .env.openai.example
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
OPENAI_API_KEY = <Your OpenAI API Key>
38 changes: 38 additions & 0 deletions .github/ISSUE_TEMPLATE/bug_report.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''

---

**Describe the bug**
A clear and concise description of what the bug is.

**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error

**Expected behavior**
A clear and concise description of what you expected to happen.

**Screenshots**
If applicable, add screenshots to help explain your problem.

**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]

**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]

**Additional context**
Add any other context about the problem here.
20 changes: 20 additions & 0 deletions .github/ISSUE_TEMPLATE/feature_request.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''

---

**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]

**Describe the solution you'd like**
A clear and concise description of what you want to happen.

**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.

**Additional context**
Add any other context or screenshots about the feature request here.
59 changes: 47 additions & 12 deletions src/core/ai/langchain/langchain.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ import { getConnectionToken } from '@nestjs/mongoose';
import { Connection } from 'mongoose';
import { OpenaiSecretsService } from '../openai/openai-secrets.service';
import { ChatPromptTemplate, MessagesPlaceholder } from 'langchain/prompts';
import { BufferMemory } from 'langchain/memory';
import { BufferMemory, ConversationTokenBufferMemory } from 'langchain/memory';
import { RunnableSequence } from 'langchain/runnables';
import { BaseLLM } from '@langchain/core/dist/language_models/llms';

@Injectable()
export class LangchainService {
Expand All @@ -28,21 +29,27 @@ export class LangchainService {
return this._model;
}

public async createBufferMemoryChain(modelName?: string) {
public async createBufferMemoryChain(
modelName?: string,
contextInstructions?: string,
) {
const model = await this._createModel(modelName);
const contextInstructionsOrAtLeastBeChill =
contextInstructions ||
'I keep my answers short (1-2 sentences) unless there is a reason to say more.';
const prompt = ChatPromptTemplate.fromMessages([
// TODO: Feed in a context prompt key
['system', 'You are a helpful chatbot'],
[
'system',
// TODO - set the `contextInstructions` to be a PromptVariable thing that is set each time the chain is called (like human text input)
// https://js.langchain.com/docs/expression_language/cookbook/adding_memory
// https://js.langchain.com/docs/expression_language/how_to/message_history

`${contextInstructionsOrAtLeastBeChill} `,
],
new MessagesPlaceholder('history'),
['human', '{input}'],
]);

const memory = new BufferMemory({
returnMessages: true,
inputKey: 'input',
outputKey: 'output',
memoryKey: 'history',
});
const memory = this._createTokenBufferMemory(model);

const chain = RunnableSequence.from([
{
Expand All @@ -62,6 +69,31 @@ export class LangchainService {
return { chain, memory };
}

private _createBufferMemory() {
const memory = new BufferMemory({
returnMessages: true,
inputKey: 'input',
outputKey: 'output',
memoryKey: 'history',
});
return memory;
}

private _createTokenBufferMemory(
model: BaseLLM,
maxTokenLimit: number = 1000,
) {
const memory = new ConversationTokenBufferMemory({
returnMessages: true,
inputKey: 'input',
outputKey: 'output',
memoryKey: 'history',
llm: model,
maxTokenLimit: maxTokenLimit,
});
return memory;
}

public async demo(chain: RunnableSequence, memory?: BufferMemory) {
if (memory) {
console.log(
Expand All @@ -77,7 +109,10 @@ export class LangchainService {

console.log(`HumanInput:\n\n ${inputs.input}`);

const response = await chain.invoke(inputs);
// TODO - Figure out how to add additional arguments to the chain (i.e. like the topic/ {contextInstructions} thing)
const response = await chain.invoke({
input: 'Hello botto - say an emoji',
});

console.log(`AI Response:\n\n ${response}`);

Expand Down
25 changes: 20 additions & 5 deletions src/core/ai/openai/openai-secrets.service.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,28 @@
import { GcpSecretsService } from '../../gcp/gcp-secrets.service';
import { Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';

@Injectable()
export class OpenaiSecretsService {
constructor(private readonly _sms: GcpSecretsService) {}
private _tokenMap = {
OPENAI_API_KEY:
'projects/588063171007/secrets/OPENAI_API_KEY/versions/latest',
};

constructor(
private readonly _sms: GcpSecretsService,
private readonly _cfgService: ConfigService,
) {}

async getOpenAIKey() {
const [secret] = await this._sms.getSecretsManager().accessSecretVersion({
name: 'projects/588063171007/secrets/OPENAI_API_KEY/versions/latest',
});
return secret.payload.data.toString();
if (process.env.NODE_ENV === 'production') {
const secretName = this._tokenMap.OPENAI_API_KEY;
const [secret] = await this._sms.getSecretsManager().accessSecretVersion({
name: secretName,
});
return secret.payload.data.toString();
}

return this._cfgService.getOrThrow('OPENAI_API_KEY');
}
}
14 changes: 11 additions & 3 deletions src/core/bot/bot.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,19 @@ export class BotService {
private readonly _langchainService: LangchainService,
) {}

public async createBot(chatbotId: string, modelName?: string) {
public async createBot(
chatbotId: string,
modelName?: string,
contextInstructions?: string,
) {
this._logger.log(
`Creating chatbot with id: ${chatbotId} and language model (llm): ${modelName}`,
);
const { chain, memory } =
await this._langchainService.createBufferMemoryChain(modelName);
await this._langchainService.createBufferMemoryChain(
modelName,
contextInstructions,
);

const chatbot = { chain, memory } as Chatbot;
this._chatbots.set(chatbotId, chatbot);
Expand Down Expand Up @@ -80,6 +87,7 @@ export class BotService {
let subStreamResult = '';
let didResetOccur = false;
let tokens = 0;
const chunkSize = 10;
for await (const chunk of chatStream) {
// the full message
streamedResult += chunk;
Expand All @@ -93,7 +101,7 @@ export class BotService {
didResetOccur = true;
}

if (tokens === 30) {
if (tokens === chunkSize) {
this._logger.log(`Streaming chunk of data: ${subStreamResult}`);
yield {
data: streamedResult,
Expand Down
24 changes: 19 additions & 5 deletions src/interfaces/discord/services/discord-thread.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,26 @@ export class DiscordThreadService implements OnModuleDestroy {
reason: 'wow this is a thread',
});

await this._usersService.getOrCreate({
discordId: interaction.user.id,
await this._usersService.getOrCreateUser({
identifiers: {
discord: {
id: interaction.user.id,
username: interaction.user.username,
},
},
});

const contextInstructions = channel.topic || '';
this._logger.log(
`Creating bot with contextInstructions: \n ''' \n ${contextInstructions}\n '''`,
);

// await this._botService.createChatbot(thread.id);
await this._botService.createBot(thread.id);
await this._botService.createBot(
thread.id,
'gpt-4-1106-preview',
channel.topic || '',
);

this._beginWatchingIncomingMessages(interaction, channel, thread);
await this._sendInitialReply(interaction, channel, thread, text);
Expand All @@ -69,7 +83,6 @@ export class DiscordThreadService implements OnModuleDestroy {
channel: TextChannel,
thread: ThreadChannel,
) {
const t = { ...thread };
const handleMessageCreation = async (message: Message) => {
if (message.author.bot) {
return;
Expand Down Expand Up @@ -102,10 +115,11 @@ export class DiscordThreadService implements OnModuleDestroy {
inputText: string,
message: Message<boolean>,
) {
thread.sendTyping();

const tokenStream = this._botService.streamResponse(thread.id, inputText, {
topic: channel.topic,
});
thread.sendTyping();

let initialReply: Message<boolean> = undefined;
let final = '';
Expand Down
2 changes: 1 addition & 1 deletion src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ async function bootstrap() {
SwaggerModule.setup('api', app, document);

await app.listen(3000);
console.log(`Application is running on: ${await app.getUrl()}`);
console.log(`SkellyBot Application is running on: ${await app.getUrl()}`);
}
bootstrap();
8 changes: 7 additions & 1 deletion src/main/main.module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,13 @@ import { DiscordModule } from '../interfaces/discord/discord.module';
imports: [
ConfigModule.forRoot({
isGlobal: true,
envFilePath: ['.env', '.env.slack', '.env.discord', '.env.mongo'],
envFilePath: [
'.env',
'.env.slack',
'.env.discord',
'.env.mongo',
'.env.openai',
],
}),
SlackModule,
DiscordModule,
Expand Down

0 comments on commit 09126de

Please sign in to comment.