Skip to content

Commit

Permalink
slackBolt update
Browse files Browse the repository at this point in the history
  • Loading branch information
endurance committed Jan 2, 2024
1 parent 0d39ac1 commit aaeada3
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 27 deletions.
7 changes: 3 additions & 4 deletions src/interfaces/chat/slack/bolt/slackAppFactory.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
import { App } from '@slack/bolt';
import { SlackConfigService } from '../config/slackConfig.service';
import { SlackLoggerProxy } from '../logging/slack-logger-proxy.service';
import { SlackLoggerAdapter } from '../logging/slack-logger-proxy.service';
import { Provider } from '@nestjs/common';

export const slackServiceFactory: Provider = {
provide: App,
inject: [SlackConfigService, SlackLoggerProxy],
useFactory: async (scs: SlackConfigService, logger: SlackLoggerProxy) => {
inject: [SlackConfigService, SlackLoggerAdapter],
useFactory: async (scs: SlackConfigService, logger: SlackLoggerAdapter) => {
const opts = await scs.createSlackOptions();
return new App({
...opts,
logger,
// logLevel: LogLevel.DEBUG,
});
},
};
12 changes: 8 additions & 4 deletions src/interfaces/chat/slack/bolt/slackBolt.module.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,23 @@
import { slackServiceFactory } from './slackAppFactory';
import { Module, OnModuleInit } from '@nestjs/common';
import { Module, OnApplicationShutdown, OnModuleInit } from '@nestjs/common';
import { App } from '@slack/bolt';
import { SlackConfigService } from '../config/slackConfig.service';
import { GcpModule } from '../../../../shared/gcp/gcp.module';
import { SlackLoggerProxy } from '../logging/slack-logger-proxy.service';
import { SlackLoggerAdapter } from '../logging/slack-logger-proxy.service';

@Module({
imports: [GcpModule],
providers: [slackServiceFactory, SlackConfigService, SlackLoggerProxy],
providers: [slackServiceFactory, SlackConfigService, SlackLoggerAdapter],
exports: [slackServiceFactory],
})
export class SlackBoltModule implements OnModuleInit {
export class SlackBoltModule implements OnModuleInit, OnApplicationShutdown {
constructor(private readonly app: App) {}

onModuleInit() {
this.app.start();
}

async onApplicationShutdown() {
await this.app.stop();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { Injectable, Logger, LogLevel as NestLogLevel } from '@nestjs/common';
import { LogLevel as SlackLogLevel } from '@slack/bolt';

@Injectable()
export class SlackLoggerProxy extends Logger {
export class SlackLoggerAdapter extends Logger {
level: NestLogLevel = 'log';

constructor(name: string) {
Expand Down
46 changes: 28 additions & 18 deletions src/shared/ai/langchain/chain-builder/chain-builder.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,27 +6,33 @@ import { ChatPromptTemplate } from 'langchain/prompts';
@Injectable()
export class ChainBuilderService {
private _model: OpenAI<any>;
private promptTemplate: ChatPromptTemplate<any, any>;

constructor(
private readonly _openAiSecrets: OpenAiSecretsService,
private readonly _logger: Logger,
) {}

async createPrompt() {
if (!this.promptTemplate) {
this._logger.log('Creating prompt...');
const promptStructure = [
['system', 'You were having a conversation with a human about {topic}'],
['human', '{text}'],
];
// @ts-ignore
this.promptTemplate = ChatPromptTemplate.fromMessages(promptStructure);
}
return this.promptTemplate;
/**
* Creates a chain that may be invoked later.
* @param modelName
*/
async createChain(modelName?: string) {
const model = await this._createModel(modelName);
const promptTemplate = await this._createPrompt();
const chain = promptTemplate.pipe(model);
return chain;
}

async createModel(modelName?: string) {
/**
* Creates a model instance.
*
* The model instance is generated as a singleton. Subsequent calls reuse previously created model.

This comment has been minimized.

Copy link
@jonmatthis

jonmatthis Jan 5, 2024

Member

This is fine for now, but once we build up the configuration interface we will need a way to recreate this when the user changes the settings for the model configuration.

WE could simplify things by just saying that you can't change model settings after a Chat is created, but I think it'll be a pretty standard thing to tweak the bot settings mid-conversation

*
* @param {string} [modelName] - The name of the model. If not provided, the default model name 'gpt-4-1106-preview' will be used.
* @private
* @returns {Promise<OpenAI>} - A Promise that resolves to the created model instance.
*/
private async _createModel(modelName?: string) {
if (!this._model) {
this._logger.log('Creating model...');
this._model = new OpenAI({
Expand All @@ -38,10 +44,14 @@ export class ChainBuilderService {
return this._model;
}

async createChain(modelName?: string) {
const model = await this.createModel(modelName);
const promptTemplate = await this.createPrompt();
const chain = promptTemplate.pipe(model);
return chain;
private async _createPrompt() {
const promptStructure = [
['system', 'You were having a conversation with a human about {topic}'],
['human', '{text}'],
];
// @ts-ignore
const template = ChatPromptTemplate.fromMessages(promptStructure);
this._logger.log('Creating prompt...', promptStructure);
return template;
}
}

0 comments on commit aaeada3

Please sign in to comment.