Skip to content

Commit

Permalink
Merge pull request #1399 from FlowiseAI/feature/MistralAI
Browse files Browse the repository at this point in the history
Feature/Add Mistral
  • Loading branch information
HenryHengZJ authored Dec 17, 2023
2 parents 0d73d53 + b5dd970 commit 2767cf0
Show file tree
Hide file tree
Showing 9 changed files with 275 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ class GoogleGenerativeAICredential implements INodeCredential {
this.label = 'Google Generative AI'
this.name = 'googleGenerativeAI'
this.version = 1.0
this.description = 'Get your <a target="_blank" href="https://ai.google.dev/tutorials/setup">API Key</a> here.'
this.description =
'You can get your API key from official <a target="_blank" href="https://ai.google.dev/tutorials/setup">page</a> here.'
this.inputs = [
{
label: 'Google AI API Key',
Expand Down
25 changes: 25 additions & 0 deletions packages/components/credentials/MistralApi.credential.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { INodeParams, INodeCredential } from '../src/Interface'

class MistralAICredential implements INodeCredential {
label: string
name: string
version: number
description: string
inputs: INodeParams[]

constructor() {
this.label = 'MistralAI API'
this.name = 'mistralAIApi'
this.version = 1.0
this.description = 'You can get your API key from official <a target="_blank" href="https://console.mistral.ai/">console</a> here.'
this.inputs = [
{
label: 'MistralAI API Key',
name: 'mistralAIAPIKey',
type: 'password'
}
]
}
}

module.exports = { credClass: MistralAICredential }
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,7 @@ class GoogleGenerativeAI_ChatModels implements INode {
name: 'gemini-pro'
}
],
default: 'gemini-pro',
optional: true
default: 'gemini-pro'
},
{
label: 'Temperature',
Expand Down
150 changes: 150 additions & 0 deletions packages/components/nodes/chatmodels/ChatMistral/ChatMistral.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { BaseCache } from 'langchain/schema'
import { ChatMistralAI, ChatMistralAIInput } from '@langchain/mistralai'

class ChatMistral_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]

constructor() {
this.label = 'ChatMistralAI'
this.name = 'chatMistralAI'
this.version = 1.0
this.type = 'ChatMistralAI'
this.icon = 'mistralai.png'
this.category = 'Chat Models'
this.description = 'Wrapper around Mistral large language models that use the Chat endpoint'
this.baseClasses = [this.type, ...getBaseClasses(ChatMistralAI)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['mistralAIApi']
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Model Name',
name: 'modelName',
type: 'options',
options: [
{
label: 'mistral-tiny',
name: 'mistral-tiny'
},
{
label: 'mistral-small',
name: 'mistral-small'
},
{
label: 'mistral-medium',
name: 'mistral-medium'
}
],
default: 'mistral-tiny'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
description:
'What sampling temperature to use, between 0.0 and 1.0. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.',
step: 0.1,
default: 0.9,
optional: true
},
{
label: 'Max Output Tokens',
name: 'maxOutputTokens',
type: 'number',
description: 'The maximum number of tokens to generate in the completion.',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Top Probability',
name: 'topP',
type: 'number',
description:
'Nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Random Seed',
name: 'randomSeed',
type: 'number',
description: 'The seed to use for random sampling. If set, different calls will generate deterministic results.',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Safe Mode',
name: 'safeMode',
type: 'boolean',
description: 'Whether to inject a safety prompt before all conversations.',
optional: true,
additionalParams: true
},
{
label: 'Override Endpoint',
name: 'overrideEndpoint',
type: 'string',
optional: true,
additionalParams: true
}
]
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('mistralAIAPIKey', credentialData, nodeData)

const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const maxOutputTokens = nodeData.inputs?.maxOutputTokens as string
const topP = nodeData.inputs?.topP as string
const safeMode = nodeData.inputs?.safeMode as boolean
const randomSeed = nodeData.inputs?.safeMode as string
const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string
// Waiting fix from langchain + mistral to enable streaming - https://github.com/mistralai/client-js/issues/18

const cache = nodeData.inputs?.cache as BaseCache

const obj: ChatMistralAIInput = {
apiKey: apiKey,
modelName: modelName
}

if (maxOutputTokens) obj.maxTokens = parseInt(maxOutputTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (cache) obj.cache = cache
if (temperature) obj.temperature = parseFloat(temperature)
if (randomSeed) obj.randomSeed = parseFloat(randomSeed)
if (safeMode) obj.safeMode = safeMode
if (overrideEndpoint) obj.endpoint = overrideEndpoint

const model = new ChatMistralAI(obj)

return model
}
}

module.exports = { nodeClass: ChatMistral_ChatModels }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { MistralAIEmbeddings, MistralAIEmbeddingsParams } from '@langchain/mistralai'

class MistralEmbedding_Embeddings implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
inputs: INodeParams[]
credential: INodeParams

constructor() {
this.label = 'MistralAI Embeddings'
this.name = 'mistralAI Embeddings'
this.version = 1.0
this.type = 'MistralAIEmbeddings'
this.icon = 'mistralai.png'
this.category = 'Embeddings'
this.description = 'MistralAI API to generate embeddings for a given text'
this.baseClasses = [this.type, ...getBaseClasses(MistralAIEmbeddings)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['mistralAIApi']
}
this.inputs = [
{
label: 'Model Name',
name: 'modelName',
type: 'options',
options: [
{
label: 'mistral-embed',
name: 'mistral-embed'
}
],
default: 'mistral-embed'
},
{
label: 'Batch Size',
name: 'batchSize',
type: 'number',
step: 1,
default: 512,
optional: true,
additionalParams: true
},
{
label: 'Strip New Lines',
name: 'stripNewLines',
type: 'boolean',
default: true,
optional: true,
additionalParams: true
},
{
label: 'Override Endpoint',
name: 'overrideEndpoint',
type: 'string',
optional: true,
additionalParams: true
}
]
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const modelName = nodeData.inputs?.modelName as string
const batchSize = nodeData.inputs?.batchSize as string
const stripNewLines = nodeData.inputs?.stripNewLines as boolean
const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string

const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('mistralAIAPIKey', credentialData, nodeData)

const obj: MistralAIEmbeddingsParams = {
apiKey: apiKey,
modelName: modelName
}

if (batchSize) obj.batchSize = parseInt(batchSize, 10)
if (stripNewLines) obj.stripNewLines = stripNewLines
if (overrideEndpoint) obj.endpoint = overrideEndpoint

const model = new MistralAIEmbeddings(obj)
return model
}
}

module.exports = { nodeClass: MistralEmbedding_Embeddings }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions packages/components/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
"@google-ai/generativelanguage": "^0.2.1",
"@huggingface/inference": "^2.6.1",
"@langchain/google-genai": "^0.0.3",
"@langchain/mistralai": "^0.0.3",
"@notionhq/client": "^2.2.8",
"@opensearch-project/opensearch": "^1.2.0",
"@pinecone-database/pinecone": "^1.1.1",
Expand Down
1 change: 1 addition & 0 deletions packages/server/src/utils/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -728,6 +728,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component

/**
* Check to see if flow valid for stream
* TODO: perform check from component level. i.e: set streaming on component, and check here
* @param {IReactFlowNode[]} reactFlowNodes
* @param {INodeData} endingNodeData
* @returns {boolean}
Expand Down

0 comments on commit 2767cf0

Please sign in to comment.