diff --git a/README.md b/README.md index a0ed086..7433c97 100644 --- a/README.md +++ b/README.md @@ -7,18 +7,27 @@ questions using up-to-date data. This is possible as a result of agents utilizing function calling to interact with the OpenBB platform. -## Set-up -At present, we currently support Python 3.11. If you're using a earlier version -of Python, your mileage may vary. We'll be adding wider support very soon! +## Installation +Currently, we only support Python 3.11. We will be adding support for more version of Python relatively soon. -- Create a new virtual environment, with `poetry ` -- `poetry install` +`openbb-agents` is available as a PyPI package: + +``` sh +pip install openbb-agents --upgrade +``` ## Usage -Use the `run.py` script and pass in your query. -Queries can be simple: +``` python +>>> from openbb_agents.agent import openbb_agent +>>> result = openbb_agent("What is the current market cap of TSLA?") # Will print some logs to show you progress +>>> print(result) +- The current market cap of TSLA (Tesla, Inc.) is approximately $695,833,798,800.00. +- This figure is based on the most recent data available, which is from January 15, 2024. +- The market cap is calculated by multiplying the current stock price ($218.89) by the number of outstanding shares (3,178,920,000). +``` +If you've cloned the repository, you can use the `run.py` script and pass in your query: ``` sh python run.py "What is the current market cap of TSLA?" ``` @@ -37,7 +46,10 @@ python run.py "Who are TSLA's peers? What is their respective market cap? Return There is more functionality coming very soon! + ## Development +- Create a new virtual environment, with `poetry ` +- `poetry install` ### Linting and Formatting We're currently experimenting with `ruff` as a drop-in replacement for `black`, `isort` and `pylint`. diff --git a/openbb_agents/chains.py b/openbb_agents/chains.py index 0d8ddc8..ce493fd 100644 --- a/openbb_agents/chains.py +++ b/openbb_agents/chains.py @@ -10,7 +10,6 @@ JSONAgentOutputParser, OpenAIFunctionsAgentOutputParser, ) -from langchain.chat_models import ChatOpenAI from langchain.output_parsers import PydanticOutputParser from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder from langchain.tools import StructuredTool @@ -19,6 +18,7 @@ render_text_description_and_args, ) from langchain.vectorstores import VectorStore +from langchain_openai import ChatOpenAI from openbb_agents.models import ( AnsweredSubQuestion, diff --git a/openbb_agents/prompts.py b/openbb_agents/prompts.py index 458bf8d..ece2b22 100644 --- a/openbb_agents/prompts.py +++ b/openbb_agents/prompts.py @@ -15,7 +15,6 @@ You are a world-class state-of-the-art search agent. You are excellent at your job. -YOU MUST DO MULTIPLE FUNCTION CALLS! DO NOT RELY ON A SINGLE CALL ONLY. Your purpose is to search for tools that allow you to answer a user's subquestion. The subquestion could be a part of a chain of other subquestions. @@ -27,10 +26,13 @@ ... repeat as many times as necessary until you reach a maximum of 4 tools 4. Return the list of tools using the output schema. +YOU ARE ALLOWED TO DO MULTIPLE FUNCTION CALLS! DO NOT RELY ON A SINGLE CALL ONLY. + You can search for tools using the available tool, which uses your inputs to search a vector databse that relies on similarity search. These are the guidelines to consider when completing your task: +* Immediately return no tools if you do not require any to answer the query. * Don't use the stock ticker or symbol in the query * Use keyword searches * Make multiple searches with different terms diff --git a/openbb_agents/tools.py b/openbb_agents/tools.py index 00e1c00..2275fa2 100644 --- a/openbb_agents/tools.py +++ b/openbb_agents/tools.py @@ -5,11 +5,11 @@ from typing import Callable, List, Union import tiktoken -from langchain.embeddings import OpenAIEmbeddings from langchain.schema import Document from langchain.tools import StructuredTool from langchain.tools.base import ToolException -from langchain.vectorstores import FAISS, VectorStore +from langchain_community.vectorstores import FAISS, VectorStore +from langchain_openai import OpenAIEmbeddings from openbb import obb from pydantic.v1 import ValidationError, create_model from pydantic.v1.fields import FieldInfo diff --git a/poetry.lock b/poetry.lock index f6a53cd..7c84aae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1870,21 +1870,22 @@ files = [ [[package]] name = "langchain" -version = "0.0.348" +version = "0.1.0" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain-0.0.348-py3-none-any.whl", hash = "sha256:35ab72d9e2b9c48d6782f10ec400a9783eaa3c8a6e54fa1cbe0f195c425b3008"}, - {file = "langchain-0.0.348.tar.gz", hash = "sha256:06ac3ab1cdaa7b55497ccae55119559a6b6e870b92dedddc0172be8796dca09d"}, + {file = "langchain-0.1.0-py3-none-any.whl", hash = "sha256:8652e74b039333a55c79faff4400b077ba1bd0ddce5255574e42d301c05c1733"}, + {file = "langchain-0.1.0.tar.gz", hash = "sha256:d43119f8d3fda2c8ddf8c3a19bd5b94b347e27d1867ff14a921b90bdbed0668a"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" dataclasses-json = ">=0.5.7,<0.7" jsonpatch = ">=1.33,<2.0" -langchain-core = ">=0.0.12,<0.1" -langsmith = ">=0.0.63,<0.1.0" +langchain-community = ">=0.0.9,<0.1" +langchain-core = ">=0.1.7,<0.2" +langsmith = ">=0.0.77,<0.1.0" numpy = ">=1,<2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -1893,29 +1894,54 @@ SQLAlchemy = ">=1.4,<3" tenacity = ">=8.1.0,<9.0.0" [package.extras] -all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.8.3,<4.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=1.2.4,<2.0.0)", "momento (>=1.13.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<4)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.6.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"] azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] clarifai = ["clarifai (>=9.1.0)"] cli = ["typer (>=0.9.0,<0.10.0)"] cohere = ["cohere (>=4,<5)"] docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.6.0,<0.7.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] javascript = ["esprima (>=4.0.1,<5.0.0)"] llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] text-helpers = ["chardet (>=5.1.0,<6.0.0)"] +[[package]] +name = "langchain-community" +version = "0.0.12" +description = "Community contributed LangChain integrations." +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_community-0.0.12-py3-none-any.whl", hash = "sha256:13b988afaa24e570d2b9992aecccb2fe36d9c33feafd9804f3066dc2ff042d4d"}, + {file = "langchain_community-0.0.12.tar.gz", hash = "sha256:7cfe36c52b1fb86c1095d4dec0cf466a1c752a7446104e8b39cf0f70512a4851"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.9,<0.2" +langsmith = ">=0.0.63,<0.1.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] + [[package]] name = "langchain-core" -version = "0.0.13" +version = "0.1.10" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_core-0.0.13-py3-none-any.whl", hash = "sha256:36d33a3d280877fb29a1f0f292b9b02b9ba29bf43fb54090b7364f00d5925459"}, - {file = "langchain_core-0.0.13.tar.gz", hash = "sha256:fcfc13d2c314c0441c8f1f8b79395316df5873c1c7a687c8c5c553b3824840b6"}, + {file = "langchain_core-0.1.10-py3-none-any.whl", hash = "sha256:d89952f6d0766cfc88d9f1e25b84d56f8d7bd63a45ad8ec1a9a038c9b49df16d"}, + {file = "langchain_core-0.1.10.tar.gz", hash = "sha256:3c9e1383264c102fcc6f865700dbb9416c4931a25d0ac2195f6311c6b867aa17"}, ] [package.dependencies] @@ -1931,6 +1957,23 @@ tenacity = ">=8.1.0,<9.0.0" [package.extras] extended-testing = ["jinja2 (>=3,<4)"] +[[package]] +name = "langchain-openai" +version = "0.0.2.post1" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_openai-0.0.2.post1-py3-none-any.whl", hash = "sha256:ba468b94c23da9d8ccefe5d5a3c1c65b4b9702292523e53acc689a9110022e26"}, + {file = "langchain_openai-0.0.2.post1.tar.gz", hash = "sha256:f8e78db4a663feeac71d9f036b9422406c199ea3ef4c97d99ff392c93530e073"}, +] + +[package.dependencies] +langchain-core = ">=0.1.7,<0.2" +numpy = ">=1,<2" +openai = ">=1.6.1,<2.0.0" +tiktoken = ">=0.5.2,<0.6.0" + [[package]] name = "langchainhub" version = "0.1.14" @@ -2714,54 +2757,55 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] [[package]] name = "openbb" -version = "4.0.1" +version = "4.1.0" description = "OpenBB" optional = false python-versions = ">=3.8,<3.12" files = [ - {file = "openbb-4.0.1-py3-none-any.whl", hash = "sha256:e848a0170a7cc6784904100c06f2061a05a00b679bffe98cea18d730c96c8a05"}, - {file = "openbb-4.0.1.tar.gz", hash = "sha256:41767ebdfb022058d1e7e480b41ce7caeccb5743de5545979b0ac42463f75260"}, -] - -[package.dependencies] -openbb-benzinga = ">=1.0.0,<2.0.0" -openbb-core = ">=1.0.1,<2.0.0" -openbb-crypto = ">=1.0.0,<2.0.0" -openbb-currency = ">=1.0.0,<2.0.0" -openbb-derivatives = ">=1.0.0,<2.0.0" -openbb-economy = ">=1.0.0,<2.0.0" -openbb-equity = ">=1.0.0,<2.0.0" -openbb-etf = ">=1.0.0,<2.0.0" -openbb-fixedincome = ">=1.0.0,<2.0.0" -openbb-fmp = ">=1.0.0,<2.0.0" -openbb-fred = ">=1.0.0,<2.0.0" -openbb-index = ">=1.0.0,<2.0.0" -openbb-intrinio = ">=1.0.0,<2.0.0" -openbb-news = ">=1.0.0,<2.0.0" -openbb-oecd = ">=1.0.0,<2.0.0" -openbb-polygon = ">=1.0.0,<2.0.0" -openbb-regulators = ">=1.0.0,<2.0.0" -openbb-sec = ">=1.0.0,<2.0.0" -openbb-tiingo = ">=1.0.0,<2.0.0" -openbb-tradingeconomics = ">=1.0.0,<2.0.0" + {file = "openbb-4.1.0-py3-none-any.whl", hash = "sha256:e4e7d7f64d1d3e2a1aa9b7ac9b457fe142a9ce7f858d044c2c32dc3ccaa9ee87"}, + {file = "openbb-4.1.0.tar.gz", hash = "sha256:3f385708a34f536abe619148fe9be0e49fe3cbbc7885bb71fcd9f82938cb0892"}, +] + +[package.dependencies] +openbb-benzinga = ">=1.1.0,<2.0.0" +openbb-core = ">=1.1.0,<2.0.0" +openbb-crypto = ">=1.1.0,<2.0.0" +openbb-currency = ">=1.1.0,<2.0.0" +openbb-derivatives = ">=1.1.0,<2.0.0" +openbb-economy = ">=1.1.0,<2.0.0" +openbb-equity = ">=1.1.0,<2.0.0" +openbb-etf = ">=1.1.0,<2.0.0" +openbb-federal-reserve = ">=1.1.0,<2.0.0" +openbb-fixedincome = ">=1.1.0,<2.0.0" +openbb-fmp = ">=1.1.0,<2.0.0" +openbb-fred = ">=1.1.0,<2.0.0" +openbb-index = ">=1.1.0,<2.0.0" +openbb-intrinio = ">=1.1.0,<2.0.0" +openbb-news = ">=1.1.0,<2.0.0" +openbb-oecd = ">=1.1.0,<2.0.0" +openbb-polygon = ">=1.1.0,<2.0.0" +openbb-regulators = ">=1.1.0,<2.0.0" +openbb-sec = ">=1.1.0,<2.0.0" +openbb-tiingo = ">=1.1.0,<2.0.0" +openbb-tradingeconomics = ">=1.1.0,<2.0.0" [package.extras] -all = ["openbb-alpha-vantage (>=1.0.0,<2.0.0)", "openbb-biztoc (>=1.0.0,<2.0.0)", "openbb-cboe (>=1.0.0,<2.0.0)", "openbb-charting (>=1.0.0,<2.0.0)", "openbb-ecb (>=1.0.0,<2.0.0)", "openbb-econometrics (>=1.0.0,<2.0.0)", "openbb-government-us (>=1.0.0,<2.0.0)", "openbb-nasdaq (>=1.0.0,<2.0.0)", "openbb-quantitative (>=1.0.0,<2.0.0)", "openbb-seeking-alpha (>=1.0.0,<2.0.0)", "openbb-stockgrid (>=1.0.0,<2.0.0)", "openbb-technical (>=1.0.0,<2.0.0)", "openbb-yfinance (>=1.0.0,<2.0.0)"] -alpha-vantage = ["openbb-alpha-vantage (>=1.0.0,<2.0.0)"] -biztoc = ["openbb-biztoc (>=1.0.0,<2.0.0)"] -cboe = ["openbb-cboe (>=1.0.0,<2.0.0)"] -charting = ["openbb-charting (>=1.0.0,<2.0.0)"] -ecb = ["openbb-ecb (>=1.0.0,<2.0.0)"] -econometrics = ["openbb-econometrics (>=1.0.0,<2.0.0)"] -finra = ["openbb-finra (>=1.0.0,<2.0.0)"] -government = ["openbb-government-us (>=1.0.0,<2.0.0)"] -nasdaq = ["openbb-nasdaq (>=1.0.0,<2.0.0)"] -quantitative = ["openbb-quantitative (>=1.0.0,<2.0.0)"] -seeking-alpha = ["openbb-seeking-alpha (>=1.0.0,<2.0.0)"] -stockgrid = ["openbb-stockgrid (>=1.0.0,<2.0.0)"] -technical = ["openbb-technical (>=1.0.0,<2.0.0)"] -wsj = ["openbb-wsj (>=1.0.0,<2.0.0)"] -yfinance = ["openbb-yfinance (>=1.0.0,<2.0.0)"] +all = ["openbb-alpha-vantage (>=1.1.0,<2.0.0)", "openbb-biztoc (>=1.1.0,<2.0.0)", "openbb-cboe (>=1.1.0,<2.0.0)", "openbb-charting (>=1.1.0,<2.0.0)", "openbb-ecb (>=1.1.0,<2.0.0)", "openbb-econometrics (>=1.1.0,<2.0.0)", "openbb-government-us (>=1.1.0,<2.0.0)", "openbb-nasdaq (>=1.1.1,<2.0.0)", "openbb-quantitative (>=1.1.0,<2.0.0)", "openbb-seeking-alpha (>=1.1.0,<2.0.0)", "openbb-stockgrid (>=1.1.0,<2.0.0)", "openbb-technical (>=1.1.0,<2.0.0)", "openbb-yfinance (>=1.1.0,<2.0.0)"] +alpha-vantage = ["openbb-alpha-vantage (>=1.1.0,<2.0.0)"] +biztoc = ["openbb-biztoc (>=1.1.0,<2.0.0)"] +cboe = ["openbb-cboe (>=1.1.0,<2.0.0)"] +charting = ["openbb-charting (>=1.1.0,<2.0.0)"] +ecb = ["openbb-ecb (>=1.1.0,<2.0.0)"] +econometrics = ["openbb-econometrics (>=1.1.0,<2.0.0)"] +finra = ["openbb-finra (>=1.1.0,<2.0.0)"] +government = ["openbb-government-us (>=1.1.0,<2.0.0)"] +nasdaq = ["openbb-nasdaq (>=1.1.1,<2.0.0)"] +quantitative = ["openbb-quantitative (>=1.1.0,<2.0.0)"] +seeking-alpha = ["openbb-seeking-alpha (>=1.1.0,<2.0.0)"] +stockgrid = ["openbb-stockgrid (>=1.1.0,<2.0.0)"] +technical = ["openbb-technical (>=1.1.0,<2.0.0)"] +wsj = ["openbb-wsj (>=1.1.0,<2.0.0)"] +yfinance = ["openbb-yfinance (>=1.1.0,<2.0.0)"] [[package]] name = "openbb-benzinga" @@ -2911,6 +2955,20 @@ files = [ [package.dependencies] openbb-core = ">=1.1.0,<2.0.0" +[[package]] +name = "openbb-federal-reserve" +version = "1.1.0" +description = "US Federal Reserve Data Extension for OpenBB" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "openbb_federal_reserve-1.1.0-py3-none-any.whl", hash = "sha256:629d51819f7be1f52cbff026155002000946e57c5b410e1c3d94e257576a883a"}, + {file = "openbb_federal_reserve-1.1.0.tar.gz", hash = "sha256:aeac532e6e44eaa8efc2ae01608e08129d5fad348d5a8e0db4acd64a57999ce7"}, +] + +[package.dependencies] +openbb-core = ">=1.1.0,<2.0.0" + [[package]] name = "openbb-fixedincome" version = "1.1.0" @@ -5929,4 +5987,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11,<3.12" -content-hash = "d6e2a89b03aadcb9cc5d48ccb2865e25de8cee1d059587fba831f21f5f260405" +content-hash = "2698844cfb41be3b6fa9cc6ccd58a0f3903459defc86c45b23d43bee368f9348" diff --git a/pyproject.toml b/pyproject.toml index 613845f..ba81a84 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "openbb-agents" -version = "0.0.1a" +version = "0.0.1" description = "LLMs X OpenBB" authors = ["Michael Struwig "] readme = "README.md" @@ -8,8 +8,8 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.11,<3.12" jupyterlab = "^4.0.9" -langchain = "^0.0.348" -openbb = "4.0.1" +langchain = "^0.1" +openbb = "4.1.0" openai = "^1.3.5" sentence-transformers = "^2.2.2" tiktoken = "^0.5.1" @@ -20,6 +20,7 @@ fastapi = "^0.104.1" uvicorn = "^0.24.0" matplotlib = "^3.8.2" openbb-charting = "^1.0.0" +langchain-openai = "^0.0.2.post1" [tool.poetry.group.dev.dependencies] pre-commit = "^3.5.0"