Skip to content

Commit

Permalink
Merge branch 'main' into CHORE/Upgrade-Analytic-Dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
HenryHengZJ authored Jan 3, 2024
2 parents da76a15 + f1f2f71 commit c0d311c
Show file tree
Hide file tree
Showing 11 changed files with 24 additions and 21 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.4.7",
"version": "1.4.9",
"private": true,
"homepage": "https://flowiseai.com",
"workspaces": [
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { OpenAIBaseInput } from 'langchain/dist/types/openai-types'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { AzureOpenAIInput, ChatOpenAI } from 'langchain/chat_models/openai'
import { AzureOpenAIInput, ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai'
import { BaseCache } from 'langchain/schema'
import { BaseLLMParams } from 'langchain/llms/base'

Expand Down Expand Up @@ -123,7 +122,7 @@ class AzureChatOpenAI_ChatModels implements INode {
const azureOpenAIApiDeploymentName = getCredentialParam('azureOpenAIApiDeploymentName', credentialData, nodeData)
const azureOpenAIApiVersion = getCredentialParam('azureOpenAIApiVersion', credentialData, nodeData)

const obj: Partial<AzureOpenAIInput> & BaseLLMParams & Partial<OpenAIBaseInput> = {
const obj: Partial<AzureOpenAIInput> & BaseLLMParams & Partial<OpenAIChatInput> = {
temperature: parseFloat(temperature),
modelName,
azureOpenAIApiKey,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,13 +124,13 @@ class ChatMistral_ChatModels implements INode {
const safeMode = nodeData.inputs?.safeMode as boolean
const randomSeed = nodeData.inputs?.safeMode as string
const overrideEndpoint = nodeData.inputs?.overrideEndpoint as string
// Waiting fix from langchain + mistral to enable streaming - https://github.com/mistralai/client-js/issues/18

const streaming = nodeData.inputs?.streaming as boolean
const cache = nodeData.inputs?.cache as BaseCache

const obj: ChatMistralAIInput = {
apiKey: apiKey,
modelName: modelName
modelName: modelName,
streaming: streaming ?? true
}

if (maxOutputTokens) obj.maxTokens = parseInt(maxOutputTokens, 10)
Expand Down
5 changes: 2 additions & 3 deletions packages/components/nodes/chatmodels/ChatOllama/ChatOllama.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ChatOllama } from 'langchain/chat_models/ollama'
import { ChatOllama, ChatOllamaInput } from 'langchain/chat_models/ollama'
import { BaseCache } from 'langchain/schema'
import { OllamaInput } from 'langchain/dist/util/ollama'
import { BaseLLMParams } from 'langchain/llms/base'

class ChatOllama_ChatModels implements INode {
Expand Down Expand Up @@ -209,7 +208,7 @@ class ChatOllama_ChatModels implements INode {

const cache = nodeData.inputs?.cache as BaseCache

const obj: OllamaInput & BaseLLMParams = {
const obj: ChatOllamaInput & BaseLLMParams = {
baseUrl,
temperature: parseFloat(temperature),
model: modelName
Expand Down
4 changes: 4 additions & 0 deletions packages/components/nodes/documentloaders/Notion/NotionDB.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,10 @@ class NotionDB_DocumentLoaders implements INode {
auth: notionIntegrationToken
},
id: databaseId,
callerOptions: {
maxConcurrency: 64 // Default value
},
propertiesAsHeader: true, // Prepends a front matter header of the page properties to the page contents
type: 'database'
}
const loader = new NotionAPILoader(obj)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { OllamaInput } from 'langchain/llms/ollama'
import { OllamaEmbeddings } from 'langchain/embeddings/ollama'
import { OllamaInput } from 'langchain/dist/util/ollama'

class OllamaEmbedding_Embeddings implements INode {
label: string
Expand Down
3 changes: 1 addition & 2 deletions packages/components/nodes/llms/Ollama/Ollama.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { Ollama } from 'langchain/llms/ollama'
import { Ollama, OllamaInput } from 'langchain/llms/ollama'
import { BaseCache } from 'langchain/schema'
import { OllamaInput } from 'langchain/dist/util/ollama'
import { BaseLLMParams } from 'langchain/llms/base'

class Ollama_LLMs implements INode {
Expand Down
8 changes: 4 additions & 4 deletions packages/components/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "flowise-components",
"version": "1.4.9",
"version": "1.5.0",
"description": "Flowiseai Components",
"main": "dist/src/index",
"types": "dist/src/index.d.ts",
Expand All @@ -26,8 +26,8 @@
"@gomomento/sdk-core": "^1.51.1",
"@google-ai/generativelanguage": "^0.2.1",
"@huggingface/inference": "^2.6.1",
"@langchain/google-genai": "^0.0.3",
"@langchain/mistralai": "^0.0.3",
"@langchain/google-genai": "^0.0.6",
"@langchain/mistralai": "^0.0.6",
"@notionhq/client": "^2.2.8",
"@opensearch-project/opensearch": "^1.2.0",
"@pinecone-database/pinecone": "^1.1.1",
Expand All @@ -52,7 +52,7 @@
"html-to-text": "^9.0.5",
"husky": "^8.0.3",
"ioredis": "^5.3.2",
"langchain": "^0.0.196",
"langchain": "^0.0.213",
"langfuse": "2.0.2",
"langfuse-langchain": "2.0.2",
"langsmith": "0.0.53",
Expand Down
2 changes: 1 addition & 1 deletion packages/server/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.4.7",
"version": "1.4.9",
"description": "Flowiseai Server",
"main": "dist/index",
"types": "dist/index.d.ts",
Expand Down
6 changes: 4 additions & 2 deletions packages/server/src/utils/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -818,7 +818,7 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component
*/
export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => {
const streamAvailableLLMs = {
'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock'],
'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic', 'chatOllama', 'awsChatBedrock', 'chatMistralAI'],
LLMs: ['azureOpenAI', 'openAI', 'ollama']
}

Expand Down Expand Up @@ -875,7 +875,9 @@ export const getEncryptionKey = async (): Promise<string> => {
return await fs.promises.readFile(getEncryptionKeyPath(), 'utf8')
} catch (error) {
const encryptKey = generateEncryptKey()
const defaultLocation = path.join(getUserHome(), '.flowise', 'encryption.key')
const defaultLocation = process.env.SECRETKEY_PATH
? path.join(process.env.SECRETKEY_PATH, 'encryption.key')
: path.join(getUserHome(), '.flowise', 'encryption.key')
await fs.promises.writeFile(defaultLocation, encryptKey)
return encryptKey
}
Expand Down
2 changes: 1 addition & 1 deletion packages/ui/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "flowise-ui",
"version": "1.4.5",
"version": "1.4.6",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://flowiseai.com",
"author": {
Expand Down

0 comments on commit c0d311c

Please sign in to comment.