diff --git a/.gitignore b/.gitignore index 8cb98906b..75326d9a3 100644 --- a/.gitignore +++ b/.gitignore @@ -217,4 +217,7 @@ __marimo__/ # Downloads *csv *xlsx -*xls \ No newline at end of file +*xls + +# MACOS +.DS_Store \ No newline at end of file diff --git a/python/.env.example b/python/.env.example index d7fba4fc3..3894383c1 100644 --- a/python/.env.example +++ b/python/.env.example @@ -13,7 +13,8 @@ OPENROUTER_API_KEY= # i18n settings, different locales can be set here. # Database only supports UTF time and English Data. LANG=en_US.UTF-8 -TIMEZONE= +# The product default time. +TIMEZONE=America/New_York # API settings API_ENABLED=true diff --git a/python/pyproject.toml b/python/pyproject.toml index b2152bdd6..06b5d5ad8 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -16,7 +16,6 @@ dependencies = [ "uvicorn>=0.24.0", "a2a-sdk[http-server]>=0.3.4", "yfinance>=0.2.65", - "tushare>=1.4.24", "requests>=2.32.5", "akshare>=1.17.44", "agno[openai]>=1.8.2,<2.0", @@ -110,4 +109,4 @@ skip_glob = [ "**/tests/**", "docs/**", "**/docs/**", -] +] \ No newline at end of file diff --git a/python/scripts/launch.py b/python/scripts/launch.py index 6948780ff..ad39c5bb9 100644 --- a/python/scripts/launch.py +++ b/python/scripts/launch.py @@ -4,9 +4,9 @@ """ import os -from pathlib import Path import subprocess from datetime import datetime +from pathlib import Path from typing import Dict import questionary diff --git a/python/third_party/TradingAgents/adapter/__main__.py b/python/third_party/TradingAgents/adapter/__main__.py index dc564d787..256993237 100644 --- a/python/third_party/TradingAgents/adapter/__main__.py +++ b/python/third_party/TradingAgents/adapter/__main__.py @@ -1,7 +1,7 @@ import asyncio import logging from datetime import datetime, date -from typing import List, Dict, Any, Optional +from typing import List, Dict, Any, Optional, AsyncGenerator import re from langchain_core.messages import HumanMessage @@ -10,6 +10,7 @@ from pydantic import BaseModel, Field, field_validator from valuecell.core.agent.decorator import create_wrapped_agent from valuecell.core.types import BaseAgent +from valuecell.core import StreamResponse, streaming from tradingagents.graph.trading_graph import TradingAgentsGraph from tradingagents.default_config import DEFAULT_CONFIG @@ -29,6 +30,48 @@ "AMZN": "Amazon.com Inc.", "META": "Meta Platforms Inc.", "NFLX": "Netflix Inc.", + "BABA": "Alibaba Group Holding Limited", + "BIDU": "Baidu Inc.", + "JD": "JD.com Inc.", + "PDD": "Pinduoduo Inc.", + "WB": "Weibo Corporation", + "TME": "Tencent Music Entertainment Group", + "NTES": "NetEase Inc.", + "BILI": "Bilibili Inc.", + "YFIN": "Yahoo Finance", + "TCEHY": "Tencent Holdings Limited", + "TCOM": "TCOM Inc.", + "ALIB": "Alibaba Group Holding Limited", + "WUBA": "58.com Inc.", + "XOM": "Exxon Mobil Corporation", + "CVX": "Chevron Corporation", + "GE": "General Electric Company", + "BA": "Boeing Company", + "CAT": "Caterpillar Inc.", + "CSCO": "Cisco Systems Inc.", + "DD": "DuPont de Nemours Inc.", + "HON": "Honeywell International Inc.", + "IBM": "International Business Machines Corporation", + "JNJ": "Johnson & Johnson", + "JPM": "JPMorgan Chase & Co.", + "KO": "Coca-Cola Company", + "MCD": "McDonald's Corporation", + "MMM": "3M Company", + "NKE": "Nike Inc.", + "PFE": "Pfizer Inc.", + "PG": "Procter & Gamble Company", + "RTX": "Raytheon Technologies Corporation", + "SBUX": "Starbucks Corporation", + "UNH": "UnitedHealth Group Inc.", + "VZ": "Verizon Communications Inc.", + "WMT": "Walmart Inc.", + "WBA": "Walgreens Boots Alliance Inc.", + "XOM": "Exxon Mobil Corporation", + "AMZN": "Amazon.com Inc.", + "BAC": "Bank of America Corporation", + "HOOD": "Robinhood Markets Inc.", + "META": "Meta Platforms Inc.", + "DJI": "Dow Jones Industrial Average", "SPY": "SPDR S&P 500 ETF" } @@ -144,7 +187,7 @@ class TradingAgentsAdapter(BaseAgent): def __init__(self): super().__init__() # Initialize LLM for query parsing - self.parsing_llm = ChatOpenAI(model="gpt-4o-mini", temperature=0) + self.parsing_llm = ChatOpenAI(model="gpt-4o", temperature=0) # Keep track of current trading graph instance self._current_graph: Optional[TradingAgentsGraph] = None @@ -234,7 +277,9 @@ def determine_next_step(state: DialogueState): return graph.compile() - async def stream(self, query: str, session_id: str, task_id: str): + async def stream( + self, query: str, session_id: str, task_id: str + ) -> AsyncGenerator[StreamResponse, None]: """Process trading analysis request and stream results""" logger.info(f"Processing trading query: {query}. Task ID: {task_id}, Session ID: {session_id}") @@ -254,21 +299,18 @@ async def stream(self, query: str, session_id: str, task_id: str): # Handle different outcomes if final_state.get("is_help_request"): help_content = self._generate_help_content() - yield { - "content": help_content, - "is_task_complete": True, - } + yield streaming.message_chunk(help_content) + yield streaming.done() return if final_state.get("current_step") == "error" or not final_state.get("parsed_request"): - yield { - "content": f"❌ Unable to parse query: {query}\n\nPlease try similar format:\n" - f"- 'Analyze AAPL stock'\n" - f"- 'Use all analysts to analyze NVDA'\n" - f"- 'Use GPT-4 to analyze TSLA, date 2024-01-15'\n" - f"- 'What are the available stock codes?'\n", - "is_task_complete": True, - } + error_content = (f"❌ Unable to parse query: {query}\n\nPlease try similar format:\n" + f"- 'Analyze AAPL stock'\n" + f"- 'Use all analysts to analyze NVDA'\n" + f"- 'Use GPT-4 to analyze TSLA, date 2024-01-15'\n" + f"- 'What are the available stock codes?'\n") + yield streaming.message_chunk(error_content) + yield streaming.done() return trading_request = final_state["parsed_request"] @@ -281,23 +323,18 @@ async def stream(self, query: str, session_id: str, task_id: str): config = self._create_config(trading_request) # Yield configuration info - yield { - "content": f"πŸ”§ **Configuration information**\n" - f"- Stock code: {trading_request.ticker}\n" - f"- Analysis date: {trading_request.trade_date}\n" - f"- Selected analysts: {', '.join(trading_request.selected_analysts)}\n" - f"- LLM provider: {config['llm_provider']}\n" - f"- Deep thinking model: {config['deep_think_llm']}\n" - f"- Quick thinking model: {config['quick_think_llm']}\n" - f"- Debug mode: {'Yes' if trading_request.debug else 'No'}\n\n", - "is_task_complete": False, - } + config_content = (f"πŸ”§ **Configuration information**\n" + f"- Stock code: {trading_request.ticker}\n" + f"- Analysis date: {trading_request.trade_date}\n" + f"- Selected analysts: {', '.join(trading_request.selected_analysts)}\n" + f"- LLM provider: {config['llm_provider']}\n" + f"- Deep thinking model: {config['deep_think_llm']}\n" + f"- Quick thinking model: {config['quick_think_llm']}\n" + f"- Debug mode: {'Yes' if trading_request.debug else 'No'}\n\n") + yield streaming.message_chunk(config_content) # Create TradingAgentsGraph instance - yield { - "content": "πŸš€ **Starting to initialize trading analysis system...**\n", - "is_task_complete": False, - } + yield streaming.message_chunk("πŸš€ **Starting to initialize trading analysis system...**\n") self._current_graph = TradingAgentsGraph( selected_analysts=trading_request.selected_analysts, @@ -305,10 +342,7 @@ async def stream(self, query: str, session_id: str, task_id: str): config=config ) - yield { - "content": "βœ… **System initialized, starting analysis...**\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk("βœ… **System initialized, starting analysis...**\n\n") # Run the analysis final_state, processed_decision = self._current_graph.propagate( @@ -324,11 +358,10 @@ async def stream(self, query: str, session_id: str, task_id: str): except Exception as e: logger.error(f"Error in trading analysis: {e}", exc_info=True) - yield { - "content": f"❌ **Error in analysis process**: {str(e)}\n\n" - f"Please check parameters and try again. If you need help, please enter 'help' or 'help'.", - "is_task_complete": True, - } + error_content = (f"❌ **Error in analysis process**: {str(e)}\n\n" + f"Please check parameters and try again. If you need help, please enter 'help' or 'help'.") + yield streaming.message_chunk(error_content) + yield streaming.done() def _rule_based_parse(self, query: str) -> dict: """Rule-based query parsing to extract trading parameters""" @@ -357,7 +390,7 @@ def _rule_based_parse(self, query: str) -> dict: # Extract analysts selected_analysts = [] - if "ζ‰€ζœ‰εˆ†ζžεΈˆ" in query or "ε…¨ιƒ¨εˆ†ζžεΈˆ" in query: + if "All Analysts" in query or "ε…¨ιƒ¨εˆ†ζžεΈˆ" in query: selected_analysts = AVAILABLE_ANALYSTS else: for analyst in AVAILABLE_ANALYSTS: @@ -459,82 +492,51 @@ def _stream_analysis_results(self, request: TradingRequest, final_state: Dict, p # Market Analysis if final_state.get("market_report"): - yield { - "content": f"πŸ“ˆ **Market analysis report**\n{final_state['market_report']}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"πŸ“ˆ **Market analysis report**\n{final_state['market_report']}\n\n") # Sentiment Analysis if final_state.get("sentiment_report"): - yield { - "content": f"😊 **Sentiment analysis report**\n{final_state['sentiment_report']}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"😊 **Sentiment analysis report**\n{final_state['sentiment_report']}\n\n") # News Analysis if final_state.get("news_report"): - yield { - "content": f"πŸ“° **News analysis report**\n{final_state['news_report']}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"πŸ“° **News analysis report**\n{final_state['news_report']}\n\n") # Fundamentals Analysis if final_state.get("fundamentals_report"): - yield { - "content": f"πŸ“Š **Fundamentals analysis report**\n{final_state['fundamentals_report']}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"πŸ“Š **Fundamentals analysis report**\n{final_state['fundamentals_report']}\n\n") # Investment Debate Results if final_state.get("investment_debate_state", {}).get("judge_decision"): - yield { - "content": f"βš–οΈ **Investment debate results**\n{final_state['investment_debate_state']['judge_decision']}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"βš–οΈ **Investment debate results**\n{final_state['investment_debate_state']['judge_decision']}\n\n") # Trader Decision if final_state.get("trader_investment_plan"): - yield { - "content": f"πŸ’Ό **Trader investment plan**\n{final_state['trader_investment_plan']}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"πŸ’Ό **Trader investment plan**\n{final_state['trader_investment_plan']}\n\n") # Risk Management if final_state.get("risk_debate_state", {}).get("judge_decision"): - yield { - "content": f"⚠️ **Risk management assessment**\n{final_state['risk_debate_state']['judge_decision']}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"⚠️ **Risk management assessment**\n{final_state['risk_debate_state']['judge_decision']}\n\n") # Final Investment Plan if final_state.get("investment_plan"): - yield { - "content": f"πŸ“‹ **Final investment plan**\n{final_state['investment_plan']}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"πŸ“‹ **Final investment plan**\n{final_state['investment_plan']}\n\n") # Final Decision if final_state.get("final_trade_decision"): - yield { - "content": f"🎯 **Final trade decision**\n{final_state['final_trade_decision']}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"🎯 **Final trade decision**\n{final_state['final_trade_decision']}\n\n") # Processed Signal if processed_decision: - yield { - "content": f"🚦 **Processed trade signal**\n{processed_decision}\n\n", - "is_task_complete": False, - } + yield streaming.message_chunk(f"🚦 **Processed trade signal**\n{processed_decision}\n\n") # Summary - yield { - "content": f"βœ… **Analysis completed**\n\n" - f"Stock {request.ticker} on {request.trade_date} analysis completed.\n" - f"Used analysts: {', '.join(request.selected_analysts)}\n\n" - f"If you need to re-analyze or analyze other stocks, please send a new query.", - "is_task_complete": True, - } + summary_content = (f"βœ… **Analysis completed**\n\n" + f"Stock {request.ticker} on {request.trade_date} analysis completed.\n" + f"Used analysts: {', '.join(request.selected_analysts)}\n\n" + f"If you need to re-analyze or analyze other stocks, please send a new query.") + yield streaming.message_chunk(summary_content) + yield streaming.done() if __name__ == "__main__": diff --git a/python/third_party/TradingAgents/requirements.txt b/python/third_party/TradingAgents/requirements.txt index d8de9fe86..94b4b8349 100644 --- a/python/third_party/TradingAgents/requirements.txt +++ b/python/third_party/TradingAgents/requirements.txt @@ -12,8 +12,6 @@ chromadb setuptools backtrader akshare -tushare -finnhub-python parsel requests tqdm diff --git a/python/uv.lock b/python/uv.lock index c99006daa..95434f458 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.12" resolution-markers = [ "python_full_version >= '3.13'", @@ -234,18 +234,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/eb/f4151e0c7377a6e08a38108609ba5cede57986802757848688aeedd1b9e8/beautifulsoup4-4.13.5-py3-none-any.whl", hash = "sha256:642085eaa22233aceadff9c69651bc51e8bf3f874fb6d7104ece2beb24b47c4a", size = 105113, upload-time = "2025-08-24T14:06:14.884Z" }, ] -[[package]] -name = "bs4" -version = "0.0.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "beautifulsoup4" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/aa/4acaf814ff901145da37332e05bb510452ebed97bc9602695059dd46ef39/bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925", size = 698, upload-time = "2024-01-17T18:15:47.371Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/bb/bf7aab772a159614954d84aa832c129624ba6c32faa559dfb200a534e50b/bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc", size = 1189, upload-time = "2024-01-17T18:15:48.613Z" }, -] - [[package]] name = "cachetools" version = "5.5.2" @@ -1833,41 +1821,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] -[[package]] -name = "simplejson" -version = "3.20.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/92/51b417685abd96b31308b61b9acce7ec50d8e1de8fbc39a7fd4962c60689/simplejson-3.20.1.tar.gz", hash = "sha256:e64139b4ec4f1f24c142ff7dcafe55a22b811a74d86d66560c8815687143037d", size = 85591, upload-time = "2025-02-15T05:18:53.15Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/eb/34c16a1ac9ba265d024dc977ad84e1659d931c0a700967c3e59a98ed7514/simplejson-3.20.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f31c4a3a7ab18467ee73a27f3e59158255d1520f3aad74315edde7a940f1be23", size = 93100, upload-time = "2025-02-15T05:16:38.801Z" }, - { url = "https://files.pythonhosted.org/packages/41/fc/2c2c007d135894971e6814e7c0806936e5bade28f8db4dd7e2a58b50debd/simplejson-3.20.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:884e6183d16b725e113b83a6fc0230152ab6627d4d36cb05c89c2c5bccfa7bc6", size = 75464, upload-time = "2025-02-15T05:16:40.905Z" }, - { url = "https://files.pythonhosted.org/packages/0f/05/2b5ecb33b776c34bb5cace5de5d7669f9b60e3ca13c113037b2ca86edfbd/simplejson-3.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03d7a426e416fe0d3337115f04164cd9427eb4256e843a6b8751cacf70abc832", size = 75112, upload-time = "2025-02-15T05:16:42.246Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/1f3609a2792f06cd4b71030485f78e91eb09cfd57bebf3116bf2980a8bac/simplejson-3.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:000602141d0bddfcff60ea6a6e97d5e10c9db6b17fd2d6c66199fa481b6214bb", size = 150182, upload-time = "2025-02-15T05:16:43.557Z" }, - { url = "https://files.pythonhosted.org/packages/2f/b0/053fbda38b8b602a77a4f7829def1b4f316cd8deb5440a6d3ee90790d2a4/simplejson-3.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:af8377a8af78226e82e3a4349efdde59ffa421ae88be67e18cef915e4023a595", size = 158363, upload-time = "2025-02-15T05:16:45.748Z" }, - { url = "https://files.pythonhosted.org/packages/d1/4b/2eb84ae867539a80822e92f9be4a7200dffba609275faf99b24141839110/simplejson-3.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15c7de4c88ab2fbcb8781a3b982ef883696736134e20b1210bca43fb42ff1acf", size = 148415, upload-time = "2025-02-15T05:16:47.861Z" }, - { url = "https://files.pythonhosted.org/packages/e0/bd/400b0bd372a5666addf2540c7358bfc3841b9ce5cdbc5cc4ad2f61627ad8/simplejson-3.20.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:455a882ff3f97d810709f7b620007d4e0aca8da71d06fc5c18ba11daf1c4df49", size = 152213, upload-time = "2025-02-15T05:16:49.25Z" }, - { url = "https://files.pythonhosted.org/packages/50/12/143f447bf6a827ee9472693768dc1a5eb96154f8feb140a88ce6973a3cfa/simplejson-3.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fc0f523ce923e7f38eb67804bc80e0a028c76d7868500aa3f59225574b5d0453", size = 150048, upload-time = "2025-02-15T05:16:51.5Z" }, - { url = "https://files.pythonhosted.org/packages/5e/ea/dd9b3e8e8ed710a66f24a22c16a907c9b539b6f5f45fd8586bd5c231444e/simplejson-3.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76461ec929282dde4a08061071a47281ad939d0202dc4e63cdd135844e162fbc", size = 151668, upload-time = "2025-02-15T05:16:53Z" }, - { url = "https://files.pythonhosted.org/packages/99/af/ee52a8045426a0c5b89d755a5a70cc821815ef3c333b56fbcad33c4435c0/simplejson-3.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19c2da8c043607bde4d4ef3a6b633e668a7d2e3d56f40a476a74c5ea71949f", size = 158840, upload-time = "2025-02-15T05:16:54.851Z" }, - { url = "https://files.pythonhosted.org/packages/68/db/ab32869acea6b5de7d75fa0dac07a112ded795d41eaa7e66c7813b17be95/simplejson-3.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2578bedaedf6294415197b267d4ef678fea336dd78ee2a6d2f4b028e9d07be3", size = 154212, upload-time = "2025-02-15T05:16:56.318Z" }, - { url = "https://files.pythonhosted.org/packages/fa/7a/e3132d454977d75a3bf9a6d541d730f76462ebf42a96fea2621498166f41/simplejson-3.20.1-cp312-cp312-win32.whl", hash = "sha256:339f407373325a36b7fd744b688ba5bae0666b5d340ec6d98aebc3014bf3d8ea", size = 74101, upload-time = "2025-02-15T05:16:57.746Z" }, - { url = "https://files.pythonhosted.org/packages/bc/5d/4e243e937fa3560107c69f6f7c2eed8589163f5ed14324e864871daa2dd9/simplejson-3.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:627d4486a1ea7edf1f66bb044ace1ce6b4c1698acd1b05353c97ba4864ea2e17", size = 75736, upload-time = "2025-02-15T05:16:59.017Z" }, - { url = "https://files.pythonhosted.org/packages/c4/03/0f453a27877cb5a5fff16a975925f4119102cc8552f52536b9a98ef0431e/simplejson-3.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:71e849e7ceb2178344998cbe5ade101f1b329460243c79c27fbfc51c0447a7c3", size = 93109, upload-time = "2025-02-15T05:17:00.377Z" }, - { url = "https://files.pythonhosted.org/packages/74/1f/a729f4026850cabeaff23e134646c3f455e86925d2533463420635ae54de/simplejson-3.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b63fdbab29dc3868d6f009a59797cefaba315fd43cd32ddd998ee1da28e50e29", size = 75475, upload-time = "2025-02-15T05:17:02.544Z" }, - { url = "https://files.pythonhosted.org/packages/e2/14/50a2713fee8ff1f8d655b1a14f4a0f1c0c7246768a1b3b3d12964a4ed5aa/simplejson-3.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1190f9a3ce644fd50ec277ac4a98c0517f532cfebdcc4bd975c0979a9f05e1fb", size = 75112, upload-time = "2025-02-15T05:17:03.875Z" }, - { url = "https://files.pythonhosted.org/packages/45/86/ea9835abb646755140e2d482edc9bc1e91997ed19a59fd77ae4c6a0facea/simplejson-3.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1336ba7bcb722ad487cd265701ff0583c0bb6de638364ca947bb84ecc0015d1", size = 150245, upload-time = "2025-02-15T05:17:06.899Z" }, - { url = "https://files.pythonhosted.org/packages/12/b4/53084809faede45da829fe571c65fbda8479d2a5b9c633f46b74124d56f5/simplejson-3.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e975aac6a5acd8b510eba58d5591e10a03e3d16c1cf8a8624ca177491f7230f0", size = 158465, upload-time = "2025-02-15T05:17:08.707Z" }, - { url = "https://files.pythonhosted.org/packages/a9/7d/d56579468d1660b3841e1f21c14490d103e33cf911886b22652d6e9683ec/simplejson-3.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a6dd11ee282937ad749da6f3b8d87952ad585b26e5edfa10da3ae2536c73078", size = 148514, upload-time = "2025-02-15T05:17:11.323Z" }, - { url = "https://files.pythonhosted.org/packages/19/e3/874b1cca3d3897b486d3afdccc475eb3a09815bf1015b01cf7fcb52a55f0/simplejson-3.20.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab980fcc446ab87ea0879edad41a5c28f2d86020014eb035cf5161e8de4474c6", size = 152262, upload-time = "2025-02-15T05:17:13.543Z" }, - { url = "https://files.pythonhosted.org/packages/32/84/f0fdb3625292d945c2bd13a814584603aebdb38cfbe5fe9be6b46fe598c4/simplejson-3.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f5aee2a4cb6b146bd17333ac623610f069f34e8f31d2f4f0c1a2186e50c594f0", size = 150164, upload-time = "2025-02-15T05:17:15.021Z" }, - { url = "https://files.pythonhosted.org/packages/95/51/6d625247224f01eaaeabace9aec75ac5603a42f8ebcce02c486fbda8b428/simplejson-3.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:652d8eecbb9a3b6461b21ec7cf11fd0acbab144e45e600c817ecf18e4580b99e", size = 151795, upload-time = "2025-02-15T05:17:16.542Z" }, - { url = "https://files.pythonhosted.org/packages/7f/d9/bb921df6b35be8412f519e58e86d1060fddf3ad401b783e4862e0a74c4c1/simplejson-3.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8c09948f1a486a89251ee3a67c9f8c969b379f6ffff1a6064b41fea3bce0a112", size = 159027, upload-time = "2025-02-15T05:17:18.083Z" }, - { url = "https://files.pythonhosted.org/packages/03/c5/5950605e4ad023a6621cf4c931b29fd3d2a9c1f36be937230bfc83d7271d/simplejson-3.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cbbd7b215ad4fc6f058b5dd4c26ee5c59f72e031dfda3ac183d7968a99e4ca3a", size = 154380, upload-time = "2025-02-15T05:17:20.334Z" }, - { url = "https://files.pythonhosted.org/packages/66/ad/b74149557c5ec1e4e4d55758bda426f5d2ec0123cd01a53ae63b8de51fa3/simplejson-3.20.1-cp313-cp313-win32.whl", hash = "sha256:ae81e482476eaa088ef9d0120ae5345de924f23962c0c1e20abbdff597631f87", size = 74102, upload-time = "2025-02-15T05:17:22.475Z" }, - { url = "https://files.pythonhosted.org/packages/db/a9/25282fdd24493e1022f30b7f5cdf804255c007218b2bfaa655bd7ad34b2d/simplejson-3.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:1b9fd15853b90aec3b1739f4471efbf1ac05066a2c7041bf8db821bb73cd2ddc", size = 75736, upload-time = "2025-02-15T05:17:24.122Z" }, - { url = "https://files.pythonhosted.org/packages/4b/30/00f02a0a921556dd5a6db1ef2926a1bc7a8bbbfb1c49cfed68a275b8ab2b/simplejson-3.20.1-py3-none-any.whl", hash = "sha256:8a6c1bbac39fa4a79f83cbf1df6ccd8ff7069582a9fd8db1e52cea073bc2c697", size = 57121, upload-time = "2025-02-15T05:18:51.243Z" }, -] - [[package]] name = "six" version = "1.17.0" @@ -2038,24 +1991,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, ] -[[package]] -name = "tushare" -version = "1.4.24" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "bs4" }, - { name = "lxml" }, - { name = "pandas" }, - { name = "requests" }, - { name = "simplejson" }, - { name = "tqdm" }, - { name = "websocket-client" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/89/09/2141aaccb90a8249edb42d6b31330606d8cf9345237773775a3aa4c71986/tushare-1.4.24.tar.gz", hash = "sha256:786acbf6ee7dfb0b152bdd570b673f74e58b86a0d9908a221c6bdc4254a4e0ea", size = 128539, upload-time = "2025-08-25T02:02:05.451Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/80/75/63810958023595b460f2a5ef6baf5a60ffd8166e5fc06a3c2f22e9ca7b34/tushare-1.4.24-py3-none-any.whl", hash = "sha256:778e3128262747cb0cdadac2e5a5e6cd1a520c239b4ffbde2776652424451b08", size = 143587, upload-time = "2025-08-25T02:02:03.554Z" }, -] - [[package]] name = "typer" version = "0.17.4" @@ -2148,7 +2083,6 @@ dependencies = [ { name = "pytz" }, { name = "requests" }, { name = "sqlalchemy" }, - { name = "tushare" }, { name = "uvicorn" }, { name = "yfinance" }, ] @@ -2203,7 +2137,6 @@ requires-dist = [ { name = "requests", specifier = ">=2.32.5" }, { name = "ruff", marker = "extra == 'dev'" }, { name = "sqlalchemy", specifier = ">=2.0.43" }, - { name = "tushare", specifier = ">=1.4.24" }, { name = "uvicorn", specifier = ">=0.24.0" }, { name = "yfinance", specifier = ">=0.2.65" }, ] @@ -2239,15 +2172,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, ] -[[package]] -name = "websocket-client" -version = "1.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, -] - [[package]] name = "websockets" version = "15.0.1" diff --git a/python/valuecell/adapters/assets/README.md b/python/valuecell/adapters/assets/README.md deleted file mode 100644 index 4c92e7bdd..000000000 --- a/python/valuecell/adapters/assets/README.md +++ /dev/null @@ -1,426 +0,0 @@ -# ValueCell Asset Data Adapter System - -A comprehensive financial asset data management system that supports multiple data sources, internationalization, and user watchlist management. - -## Features - -### 🌐 Multi-Source Data Adapters -- **Yahoo Finance**: Free stock market data for global markets -- **TuShare**: Professional Chinese stock market data (requires API key) -- **AKShare**: Free Chinese financial data library (no API key required) -- **Finnhub**: Professional global stock market data (requires API key) -- **CoinMarketCap**: Cryptocurrency market data (requires API key) -- **Extensible**: Easy to add new data sources - -### πŸ“Š Asset Types Support -- Stocks (US, Chinese, Hong Kong, etc.) -- Cryptocurrencies -- ETFs, Mutual Funds -- Bonds, Commodities -- Forex, Indices -- Options, Futures - -### πŸ”„ Standardized Ticker Format -All assets use the format `[EXCHANGE]:[SYMBOL]`: -- `NASDAQ:AAPL` - Apple Inc. -- `SSE:600519` - Kweichow Moutai -- `CRYPTO:BTC` - Bitcoin -- `HKEX:00700` - Tencent Holdings - -### 🌍 Internationalization (i18n) -- Multi-language asset names -- Localized UI text and messages -- Currency and number formatting -- Support for Chinese, English, and more - -### πŸ“ User Watchlist Management -- Create multiple watchlists per user -- Add/remove assets with personal notes -- Real-time price updates -- Persistent storage ready - -## Quick Start - -### 1. Installation - -```bash -# Install required dependencies - -pip install yfinance tushare requests pydantic -``` - -### 2. Basic Usage - -```python -from valuecell.adapters.assets import get_adapter_manager -from valuecell.services.assets import ( - search_assets, add_to_watchlist, get_watchlist -) - -# Configure data adapters -manager = get_adapter_manager() -manager.configure_yfinance() # Free, no API key needed - -# Search for assets (now via service layer) -results = search_assets("AAPL", language="zh-Hans") -print(f"Found {results['count']} assets") - -# Add to watchlist (now via service layer) -add_to_watchlist( - user_id="user123", - ticker="NASDAQ:AAPL", - notes="θ‹Ήζžœε…¬εΈθ‚‘η₯¨" -) - -# Get watchlist with prices (now via service layer) -watchlist = get_watchlist(user_id="user123", include_prices=True) -``` - -### 3. Configure Data Sources - -```python -# Yahoo Finance (Free) -manager.configure_yfinance() - -# AKShare (Free Chinese markets) -manager.configure_akshare() - -# TuShare (Chinese markets, requires API key) -manager.configure_tushare(api_key="your_tushare_token") - -# Finnhub (Global markets, requires API key) -manager.configure_finnhub(api_key="your_finnhub_token") - -# CoinMarketCap (Crypto, requires API key) -manager.configure_coinmarketcap(api_key="your_cmc_api_key") -``` - -## API Reference - -### Asset Search - -```python -from valuecell.services.assets import search_assets - -# Basic search -results = search_assets("Apple") - -# Advanced search with filters -results = search_assets( - query="tech", - asset_types=["stock", "etf"], - exchanges=["NASDAQ", "NYSE"], - countries=["US"], - limit=20, - language="zh-Hans" -) -``` - -### Asset Information - -```python -from valuecell.services.assets import get_asset_info, get_asset_price - -# Get detailed asset information -info = get_asset_info("NASDAQ:AAPL", language="zh-Hans") -print(info["display_name"]) # "θ‹Ήζžœε…¬εΈ" - -# Get current price -price = get_asset_price("NASDAQ:AAPL", language="zh-Hans") -print(price["price_formatted"]) # "Β₯150.25" -print(price["change_percent_formatted"]) # "+2.5%" -``` - -### Watchlist Management - -```python -from valuecell.services.assets import get_asset_service - -service = get_asset_service() - -# Create watchlist -service.create_watchlist( - user_id="user123", - name="My Tech Stocks", - description="Technology companies" -) - -# Add assets -service.add_to_watchlist("user123", "NASDAQ:AAPL", notes="iPhone maker") -service.add_to_watchlist("user123", "NASDAQ:GOOGL", notes="Search engine") - -# Get watchlist with prices -watchlist = service.get_watchlist("user123", include_prices=True) -``` - -## Data Source Configuration - -### Yahoo Finance -- **Cost**: Free -- **Coverage**: Global stocks, ETFs, indices, crypto -- **Rate Limits**: Reasonable for personal use -- **Setup**: No API key required - -```python -manager.configure_yfinance() -``` - -### TuShare -- **Cost**: Free tier available, paid plans for more data -- **Coverage**: Chinese stocks (A-shares), indices, financials -- **Rate Limits**: Based on subscription plan -- **Setup**: Register at [tushare.pro](https://tushare.pro) - -```python -manager.configure_tushare(api_key="your_token_here") -``` - -### AKShare -- **Cost**: Free -- **Coverage**: Chinese stocks, funds, bonds, economic data -- **Rate Limits**: Reasonable for personal use -- **Setup**: No registration required - -```python -manager.configure_akshare() -``` - -### Finnhub -- **Cost**: Free tier (60 calls/minute), paid plans available -- **Coverage**: Global stocks, forex, crypto, company data -- **Rate Limits**: Based on plan (free: 60 calls/minute) -- **Setup**: Register at [finnhub.io](https://finnhub.io) - -```python -manager.configure_finnhub(api_key="your_api_key_here") -``` - -### CoinMarketCap -- **Cost**: Free tier (10,000 calls/month), paid plans available -- **Coverage**: 9,000+ cryptocurrencies -- **Rate Limits**: Based on plan (free: 333 calls/day) -- **Setup**: Register at [coinmarketcap.com](https://coinmarketcap.com/api/) - -```python -manager.configure_coinmarketcap(api_key="your_api_key_here") -``` - -## Internationalization - -### Supported Languages -- English US (`en-US`) -- English UK (`en-GB`) -- Simplified Chinese (`zh-Hans`) -- Traditional Chinese (`zh-Hant`) -- Easy to add more languages - -### Asset Name Translation -The system includes built-in translations for popular assets: - -```python -# Apple Inc. in different languages -"NASDAQ:AAPL": { - "en-US": "Apple Inc.", - "zh-Hans": "θ‹Ήζžœε…¬εΈ", - "zh-Hant": "θ˜‹ζžœε…¬εΈ" -} -``` - -### Custom Translations -Add your own asset translations: - -```python -from valuecell.adapters.assets import get_asset_i18n_service - -i18n_service = get_asset_i18n_service() -i18n_service.add_asset_translation( - ticker="NASDAQ:TSLA", - language="zh-Hans", - name="特斯拉" -) -``` - -## Architecture - -### Core Components - -1. **Types** (`types.py`): Data structures and models -2. **Base Adapter** (`base.py`): Abstract interface for data sources -3. **Specific Adapters**: Implementation for each data source -4. **Manager** (`manager.py`): Coordinates multiple adapters -5. **I18n Integration** (`i18n_integration.py`): Localization support -6. **Service Layer** (`valuecell.services.assets`): High-level business logic interface - -### Data Flow - -``` -User Request β†’ Service Layer β†’ Manager β†’ Adapter β†’ Data Source - ↓ - I18n Service β†’ Localized Response -``` - -### Ticker Conversion - -Internal format: `EXCHANGE:SYMBOL` -- `NASDAQ:AAPL` β†’ `AAPL` (Yahoo Finance) -- `SSE:600519` β†’ `600519.SH` (TuShare) -- `CRYPTO:BTC` β†’ `BTC` (CoinMarketCap) - -## Error Handling - -The system provides comprehensive error handling: - -```python -# All API functions return structured responses -result = search_assets("invalid_query") - -if result["success"]: - # Process results - assets = result["results"] -else: - # Handle error - error_message = result["error"] - print(f"Search failed: {error_message}") -``` - -### Common Error Types -- `AdapterError`: General adapter issues -- `RateLimitError`: API rate limit exceeded -- `AuthenticationError`: Invalid API credentials -- `DataNotAvailableError`: Requested data not found -- `InvalidTickerError`: Malformed ticker format - -## Performance Considerations - -### Batch Operations -Use batch operations for better performance: - -```python -# Get multiple prices at once (more efficient) -prices = api.get_multiple_prices(["NASDAQ:AAPL", "NASDAQ:GOOGL", "NASDAQ:MSFT"]) - -# Instead of individual calls -# price1 = get_asset_price("NASDAQ:AAPL") # Slower -# price2 = get_asset_price("NASDAQ:GOOGL") # Slower -``` - -### Caching -- Asset information is cached automatically -- Price data is real-time (not cached) -- Translation cache improves i18n performance - -### Rate Limiting -- Built-in rate limiting for each data source -- Automatic retry with exponential backoff -- Respects API provider limits - -## Testing - -Run the example to test your setup: - -```python -python -m valuecell.examples.asset_adapter_example -``` - -### Health Check -Monitor adapter status: - -```python -from valuecell.services.assets import get_asset_service - -service = get_asset_service() -health = service.get_system_health() -print(f"System status: {health['overall_status']}") -``` - -## Extending the System - -### Adding New Data Sources - -1. Create a new adapter class inheriting from `BaseDataAdapter` -2. Implement required methods (`search_assets`, `get_asset_info`, etc.) -3. Add ticker conversion logic -4. Register with the manager - -```python -class MyDataAdapter(BaseDataAdapter): - def search_assets(self, query): - # Implementation - pass - - def get_asset_info(self, ticker): - # Implementation - pass - - # ... other methods - -# Register the adapter -manager.register_adapter(MyDataAdapter()) -``` - -### Adding New Asset Types - -1. Add to `AssetType` enum in `types.py` -2. Update adapter priority mapping -3. Add i18n translations - -## Best Practices - -### API Keys Security -- Store API keys in environment variables -- Never commit API keys to version control -- Use different keys for development/production - -### Error Handling -- Always check the `success` field in responses -- Implement proper retry logic for transient failures -- Log errors for debugging - -### Performance -- Use batch operations when possible -- Implement client-side caching for static data -- Monitor API usage to avoid rate limits - -### Internationalization -- Always specify language parameter for consistent results -- Provide fallback translations -- Test with different locales - -## Troubleshooting - -### Common Issues - -**"No suitable adapter found for ticker"** -- Check ticker format: `EXCHANGE:SYMBOL` -- Verify the exchange is supported by configured adapters -- Ensure at least one adapter is configured - -**"Rate limit exceeded"** -- Wait for the specified retry period -- Consider upgrading to paid API plan -- Implement request batching - -**"Authentication failed"** -- Verify API key is correct and active -- Check API key permissions/subscription status -- Ensure API key is properly configured - -### Debug Mode -Enable detailed logging: - -```python -import logging -logging.basicConfig(level=logging.DEBUG) -``` - -## Contributing - -1. Fork the repository -2. Create a feature branch -3. Add tests for new functionality -4. Update documentation -5. Submit a pull request - -## License - -This project is part of the ValueCell platform and follows the project's licensing terms. diff --git a/python/valuecell/adapters/assets/__init__.py b/python/valuecell/adapters/assets/__init__.py index 01c641e4c..5403ffdf3 100644 --- a/python/valuecell/adapters/assets/__init__.py +++ b/python/valuecell/adapters/assets/__init__.py @@ -21,11 +21,6 @@ get_asset_service, search_assets, add_to_watchlist ) - # Configure data adapters - manager = get_adapter_manager() - manager.configure_yfinance() - manager.configure_tushare(api_key="your_tushare_key") - # Search for assets (now via service layer) results = search_assets("AAPL", language="zh-Hans") @@ -46,8 +41,6 @@ RateLimitError, TickerConverter, ) -from .coinmarketcap_adapter import CoinMarketCapAdapter -from .finnhub_adapter import FinnhubAdapter # Internationalization support from .i18n_integration import ( @@ -64,7 +57,6 @@ get_watchlist_manager, reset_managers, ) -from .tushare_adapter import TuShareAdapter # Core types and data structures from .types import ( @@ -112,10 +104,7 @@ "InvalidTickerError", # Adapters "YFinanceAdapter", - "TuShareAdapter", - "CoinMarketCapAdapter", "AKShareAdapter", - "FinnhubAdapter", # Managers "AdapterManager", "WatchlistManager", diff --git a/python/valuecell/adapters/assets/akshare_adapter.py b/python/valuecell/adapters/assets/akshare_adapter.py index 09c7a11e3..10f4d545a 100644 --- a/python/valuecell/adapters/assets/akshare_adapter.py +++ b/python/valuecell/adapters/assets/akshare_adapter.py @@ -27,6 +27,7 @@ AssetSearchResult, AssetType, DataSource, + Interval, LocalizedName, MarketInfo, MarketStatus, @@ -1297,20 +1298,87 @@ def _get_a_share_historical( end_date: datetime, interval: str, ) -> List[AssetPrice]: - """Get A-share historical price data using direct query.""" + """Get A-share historical price data using direct query. + + Args: + ticker: Asset ticker in internal format + start_date: Start date for historical data, format: YYYY-MM-DD, timezone: UTC + end_date: End date for historical data, format: YYYY-MM-DD, timezone: UTC + interval: Data interval (e.g., "1d", "1h", "5m") + + Returns: + List of historical price data + """ try: - # Format dates for AKShare - start_date_str = start_date.strftime("%Y%m%d") - end_date_str = end_date.strftime("%Y%m%d") + # Map interval to AKShare format and determine if intraday data is needed + akshare_params = self._map_interval_to_akshare_params(interval) + if not akshare_params: + logger.warning(f"Unsupported interval: {interval}") + return [] - # Map interval to AKShare format - if interval in ["1d", "daily"]: - period = "daily" + is_intraday = akshare_params["is_intraday"] + period_or_minutes = akshare_params["period"] + + if is_intraday: + return self._get_a_share_intraday_historical( + ticker, exchange, symbol, start_date, end_date, period_or_minutes + ) else: - logger.warning( - f"AKShare primarily supports daily data. Requested interval: {interval}" + return self._get_a_share_daily_historical( + ticker, exchange, symbol, start_date, end_date, period_or_minutes ) - period = "daily" + + except Exception as e: + logger.error(f"Error fetching A-share historical data for {symbol}: {e}") + return [] + + def _map_interval_to_akshare_params(self, interval: str) -> Optional[dict]: + """Map interval to AKShare parameters, similar to yfinance mapping. + + Returns dict with 'is_intraday' and 'period' keys, or None if unsupported. + """ + # Create interval mapping similar to yfinance adapter + interval_mapping = { + # Minute intervals (intraday data) + f"1{Interval.MINUTE}": {"is_intraday": True, "period": "1"}, + f"5{Interval.MINUTE}": {"is_intraday": True, "period": "5"}, + f"15{Interval.MINUTE}": {"is_intraday": True, "period": "15"}, + f"30{Interval.MINUTE}": {"is_intraday": True, "period": "30"}, + f"60{Interval.MINUTE}": {"is_intraday": True, "period": "60"}, + # Daily and higher intervals + f"1{Interval.DAY}": {"is_intraday": False, "period": "daily"}, + f"1{Interval.WEEK}": {"is_intraday": False, "period": "weekly"}, + f"1{Interval.MONTH}": {"is_intraday": False, "period": "monthly"}, + # Common aliases + "1d": {"is_intraday": False, "period": "daily"}, + "daily": {"is_intraday": False, "period": "daily"}, + "1w": {"is_intraday": False, "period": "weekly"}, + "weekly": {"is_intraday": False, "period": "weekly"}, + "1mo": {"is_intraday": False, "period": "monthly"}, + "monthly": {"is_intraday": False, "period": "monthly"}, + "1m": {"is_intraday": True, "period": "1"}, + "5m": {"is_intraday": True, "period": "5"}, + "15m": {"is_intraday": True, "period": "15"}, + "30m": {"is_intraday": True, "period": "30"}, + "60m": {"is_intraday": True, "period": "60"}, + } + + return interval_mapping.get(interval) + + def _get_a_share_daily_historical( + self, + ticker: str, + exchange: str, + symbol: str, + start_date: datetime, + end_date: datetime, + period: str, + ) -> List[AssetPrice]: + """Get A-share daily historical price data.""" + try: + # Format dates for AKShare + start_date_str = start_date.strftime("%Y%m%d") + end_date_str = end_date.strftime("%Y%m%d") # Use cached data for historical prices cache_key = ( @@ -1324,67 +1392,188 @@ def _get_a_share_historical( period=period, start_date=start_date_str, end_date=end_date_str, - adjust="", # No adjustment + adjust="qfq", # Use forward adjustment ) if df_hist is None or df_hist.empty: - logger.warning(f"No historical data available for {symbol}") + logger.warning(f"No daily historical data available for {symbol}") return [] - prices = [] - for _, row in df_hist.iterrows(): - try: - # Parse date safely - trade_date = pd.to_datetime(row["ζ—₯期"]).to_pydatetime() + return self._process_a_share_daily_data(ticker, df_hist) - # Extract price data safely - open_price = self._safe_decimal_convert(row.get("εΌ€η›˜")) - high_price = self._safe_decimal_convert(row.get("ζœ€ι«˜")) - low_price = self._safe_decimal_convert(row.get("ζœ€δ½Ž")) - close_price = self._safe_decimal_convert(row.get("ζ”Άη›˜")) - volume = self._safe_decimal_convert(row.get("ζˆδΊ€ι‡")) + except Exception as e: + logger.error( + f"Error fetching A-share daily historical data for {symbol}: {e}" + ) + return [] - if not close_price: # Skip if no closing price - continue + def _get_a_share_intraday_historical( + self, + ticker: str, + exchange: str, + symbol: str, + start_date: datetime, + end_date: datetime, + period: str, + ) -> List[AssetPrice]: + """Get A-share intraday historical price data using minute data.""" + try: + # Format dates for AKShare intraday query + start_date_str = start_date.strftime("%Y-%m-%d %H:%M:%S") + end_date_str = end_date.strftime("%Y-%m-%d %H:%M:%S") - # Calculate change from previous day - change = None - change_percent = None - if len(prices) > 0: - prev_close = prices[-1].close_price - if prev_close and prev_close != 0: - change = close_price - prev_close - change_percent = (change / prev_close) * 100 + # Use cached data for intraday historical prices + cache_key = f"a_share_hist_min_{symbol}_{start_date.strftime('%Y%m%d')}_{end_date.strftime('%Y%m%d')}_{period}" - price = AssetPrice( - ticker=ticker, - price=close_price, - currency="CNY", - timestamp=trade_date, - volume=volume, - open_price=open_price, - high_price=high_price, - low_price=low_price, - close_price=close_price, - change=change, - change_percent=change_percent, - source=self.source, - ) - prices.append(price) + # Note: AKShare minute data has limitations - only recent 5 trading days for 1-minute + # and 1-minute data doesn't support forward adjustment + adjust_param = ( + "" if period == "1" else "qfq" + ) # 1-minute data doesn't support adjustment - except Exception as row_error: - logger.warning( - f"Error processing historical data row for {symbol}: {row_error}" - ) - continue + df_hist = self._get_cached_data( + cache_key, + self._safe_akshare_call, + ak.stock_zh_a_hist_min_em, + symbol=symbol, + start_date=start_date_str, + end_date=end_date_str, + period=period, + adjust=adjust_param, + ) - logger.info(f"Retrieved {len(prices)} historical price points for {symbol}") - return prices + if df_hist is None or df_hist.empty: + logger.warning(f"No intraday historical data available for {symbol}") + return [] + + return self._process_a_share_intraday_data(ticker, df_hist, period) except Exception as e: - logger.error(f"Error fetching A-share historical data for {symbol}: {e}") + logger.error( + f"Error fetching A-share intraday historical data for {symbol}: {e}" + ) return [] + def _process_a_share_daily_data( + self, ticker: str, df_hist: pd.DataFrame + ) -> List[AssetPrice]: + """Process A-share daily historical data.""" + prices = [] + for _, row in df_hist.iterrows(): + try: + # Parse date safely + trade_date = pd.to_datetime(row["ζ—₯期"]).to_pydatetime() + + # Extract price data safely + open_price = self._safe_decimal_convert(row.get("εΌ€η›˜")) + high_price = self._safe_decimal_convert(row.get("ζœ€ι«˜")) + low_price = self._safe_decimal_convert(row.get("ζœ€δ½Ž")) + close_price = self._safe_decimal_convert(row.get("ζ”Άη›˜")) + volume = self._safe_decimal_convert(row.get("ζˆδΊ€ι‡")) + + if not close_price: # Skip if no closing price + continue + + # Extract change data if available (AKShare provides this directly) + change = self._safe_decimal_convert(row.get("梨跌钝")) + change_percent = self._safe_decimal_convert(row.get("ζΆ¨θ·ŒεΉ…")) + + # If change data not available, calculate from previous day + if change is None and len(prices) > 0: + prev_close = prices[-1].close_price + if prev_close and prev_close != 0: + change = close_price - prev_close + change_percent = (change / prev_close) * 100 + + price = AssetPrice( + ticker=ticker, + price=close_price, + currency="CNY", + timestamp=trade_date, + volume=volume, + open_price=open_price, + high_price=high_price, + low_price=low_price, + close_price=close_price, + change=change, + change_percent=change_percent, + source=self.source, + ) + prices.append(price) + + except Exception as row_error: + logger.warning(f"Error processing daily data row: {row_error}") + continue + + logger.info(f"Retrieved {len(prices)} daily price points") + return prices + + def _process_a_share_intraday_data( + self, ticker: str, df_hist: pd.DataFrame, period: str + ) -> List[AssetPrice]: + """Process A-share intraday historical data.""" + prices = [] + for _, row in df_hist.iterrows(): + try: + # Parse timestamp safely + trade_time = pd.to_datetime(row["ζ—Άι—΄"]).to_pydatetime() + + # Extract price data safely + open_price = self._safe_decimal_convert(row.get("εΌ€η›˜")) + high_price = self._safe_decimal_convert(row.get("ζœ€ι«˜")) + low_price = self._safe_decimal_convert(row.get("ζœ€δ½Ž")) + close_price = self._safe_decimal_convert(row.get("ζ”Άη›˜")) + + # Volume is in 手 (lots), convert to shares (1 lot = 100 shares) + volume_lots = self._safe_decimal_convert(row.get("ζˆδΊ€ι‡")) + volume = volume_lots * 100 if volume_lots else None + + if not close_price: # Skip if no closing price + continue + + # For intraday data, calculate change from previous period + change = None + change_percent = None + if len(prices) > 0: + prev_close = prices[-1].close_price + if prev_close and prev_close != 0: + change = close_price - prev_close + change_percent = (change / prev_close) * 100 + + # For periods > 1 minute, AKShare provides change data directly + if period != "1": + akshare_change = self._safe_decimal_convert(row.get("梨跌钝")) + akshare_change_percent = self._safe_decimal_convert( + row.get("ζΆ¨θ·ŒεΉ…") + ) + if akshare_change is not None: + change = akshare_change + if akshare_change_percent is not None: + change_percent = akshare_change_percent + + price = AssetPrice( + ticker=ticker, + price=close_price, + currency="CNY", + timestamp=trade_time, + volume=volume, + open_price=open_price, + high_price=high_price, + low_price=low_price, + close_price=close_price, + change=change, + change_percent=change_percent, + source=self.source, + ) + prices.append(price) + + except Exception as row_error: + logger.warning(f"Error processing intraday data row: {row_error}") + continue + + logger.info(f"Retrieved {len(prices)} intraday ({period}m) price points") + return prices + def _get_hk_stock_historical( self, ticker: str, @@ -1396,29 +1585,148 @@ def _get_hk_stock_historical( ) -> List[AssetPrice]: """Get Hong Kong stock historical price data.""" try: - # Use AKShare HK stock historical data - df_hist = ak.stock_hk_daily(symbol=symbol, adjust="qfq") + # Map interval to AKShare format + akshare_params = self._map_interval_to_akshare_params(interval) + if not akshare_params: + logger.warning(f"Unsupported interval for HK stocks: {interval}") + return [] + + is_intraday = akshare_params["is_intraday"] + period_or_minutes = akshare_params["period"] + + if is_intraday: + return self._get_hk_stock_intraday_historical( + ticker, exchange, symbol, start_date, end_date, period_or_minutes + ) + else: + return self._get_hk_stock_daily_historical( + ticker, exchange, symbol, start_date, end_date, period_or_minutes + ) + + except Exception as e: + logger.error(f"Error fetching HK stock historical data for {symbol}: {e}") + return [] + + def _get_hk_stock_daily_historical( + self, + ticker: str, + exchange: str, + symbol: str, + start_date: datetime, + end_date: datetime, + period: str, + ) -> List[AssetPrice]: + """Get Hong Kong stock daily historical price data.""" + try: + # Format dates for AKShare + start_date_str = start_date.strftime("%Y%m%d") + end_date_str = end_date.strftime("%Y%m%d") + + # Use cached data for historical prices + cache_key = ( + f"hk_stock_hist_{symbol}_{start_date_str}_{end_date_str}_{period}" + ) + df_hist = self._get_cached_data( + cache_key, + self._safe_akshare_call, + ak.stock_hk_hist, + symbol=symbol, + period=period, + start_date=start_date_str, + end_date=end_date_str, + adjust="qfq", # Use forward adjustment (前倍权) + ) if df_hist is None or df_hist.empty: + logger.warning( + f"No HK stock daily historical data available for {symbol}" + ) return [] - # Filter by date range - df_hist["date"] = pd.to_datetime(df_hist["date"]) - mask = (df_hist["date"] >= start_date) & (df_hist["date"] <= end_date) - df_hist = df_hist[mask] - - prices = [] - for _, row in df_hist.iterrows(): - trade_date = row["date"].to_pydatetime() - - # Extract price data (adjust field names based on actual data structure) - open_price = Decimal(str(row.get("open", 0))) - high_price = Decimal(str(row.get("high", 0))) - low_price = Decimal(str(row.get("low", 0))) - close_price = Decimal(str(row.get("close", 0))) - volume = ( - Decimal(str(row.get("volume", 0))) if row.get("volume") else None + return self._process_hk_stock_daily_data(ticker, df_hist) + + except Exception as e: + logger.error( + f"Error fetching HK stock daily historical data for {symbol}: {e}" + ) + return [] + + def _get_hk_stock_intraday_historical( + self, + ticker: str, + exchange: str, + symbol: str, + start_date: datetime, + end_date: datetime, + period: str, + ) -> List[AssetPrice]: + """Get Hong Kong stock intraday historical price data.""" + try: + # Format dates for AKShare intraday query + start_date_str = start_date.strftime("%Y-%m-%d %H:%M:%S") + end_date_str = end_date.strftime("%Y-%m-%d %H:%M:%S") + + # Use cached data for intraday historical prices + cache_key = f"hk_stock_hist_min_{symbol}_{start_date.strftime('%Y%m%d')}_{end_date.strftime('%Y%m%d')}_{period}" + + # Note: HK stock minute data has limitations - only recent 5 trading days for 1-minute + adjust_param = "" if period == "1" else "qfq" + + df_hist = self._get_cached_data( + cache_key, + self._safe_akshare_call, + ak.stock_hk_hist_min_em, + symbol=symbol, + start_date=start_date_str, + end_date=end_date_str, + period=period, + adjust=adjust_param, + ) + + if df_hist is None or df_hist.empty: + logger.warning( + f"No HK stock intraday historical data available for {symbol}" ) + return [] + + return self._process_hk_stock_intraday_data(ticker, df_hist, period) + + except Exception as e: + logger.error( + f"Error fetching HK stock intraday historical data for {symbol}: {e}" + ) + return [] + + def _process_hk_stock_daily_data( + self, ticker: str, df_hist: pd.DataFrame + ) -> List[AssetPrice]: + """Process Hong Kong stock daily historical data.""" + prices = [] + for _, row in df_hist.iterrows(): + try: + # Parse date safely + trade_date = pd.to_datetime(row["ζ—₯期"]).to_pydatetime() + + # Extract price data safely + open_price = self._safe_decimal_convert(row.get("εΌ€η›˜")) + high_price = self._safe_decimal_convert(row.get("ζœ€ι«˜")) + low_price = self._safe_decimal_convert(row.get("ζœ€δ½Ž")) + close_price = self._safe_decimal_convert(row.get("ζ”Άη›˜")) + volume = self._safe_decimal_convert(row.get("ζˆδΊ€ι‡")) + + if not close_price: # Skip if no closing price + continue + + # Extract change data if available (AKShare provides this directly) + change = self._safe_decimal_convert(row.get("梨跌钝")) + change_percent = self._safe_decimal_convert(row.get("ζΆ¨θ·ŒεΉ…")) + + # If change data not available, calculate from previous day + if change is None and len(prices) > 0: + prev_close = prices[-1].close_price + if prev_close and prev_close != 0: + change = close_price - prev_close + change_percent = (change / prev_close) * 100 price = AssetPrice( ticker=ticker, @@ -1430,17 +1738,74 @@ def _get_hk_stock_historical( high_price=high_price, low_price=low_price, close_price=close_price, - change=None, - change_percent=None, + change=change, + change_percent=change_percent, source=self.source, ) prices.append(price) - return prices + except Exception as row_error: + logger.warning(f"Error processing HK stock daily data row: {row_error}") + continue - except Exception as e: - logger.error(f"Error fetching HK stock historical data for {symbol}: {e}") - return [] + logger.info(f"Retrieved {len(prices)} HK stock daily price points") + return prices + + def _process_hk_stock_intraday_data( + self, ticker: str, df_hist: pd.DataFrame, period: str + ) -> List[AssetPrice]: + """Process Hong Kong stock intraday historical data.""" + prices = [] + for _, row in df_hist.iterrows(): + try: + # Parse timestamp safely + trade_time = pd.to_datetime(row["ζ—Άι—΄"]).to_pydatetime() + + # Extract price data safely + open_price = self._safe_decimal_convert(row.get("εΌ€η›˜")) + high_price = self._safe_decimal_convert(row.get("ζœ€ι«˜")) + low_price = self._safe_decimal_convert(row.get("ζœ€δ½Ž")) + close_price = self._safe_decimal_convert(row.get("ζ”Άη›˜")) + volume = self._safe_decimal_convert(row.get("ζˆδΊ€ι‡")) + + if not close_price: # Skip if no closing price + continue + + # For intraday data, calculate change from previous period + change = None + change_percent = None + if len(prices) > 0: + prev_close = prices[-1].close_price + if prev_close and prev_close != 0: + change = close_price - prev_close + change_percent = (change / prev_close) * 100 + + price = AssetPrice( + ticker=ticker, + price=close_price, + currency="HKD", + timestamp=trade_time, + volume=volume, + open_price=open_price, + high_price=high_price, + low_price=low_price, + close_price=close_price, + change=change, + change_percent=change_percent, + source=self.source, + ) + prices.append(price) + + except Exception as row_error: + logger.warning( + f"Error processing HK stock intraday data row: {row_error}" + ) + continue + + logger.info( + f"Retrieved {len(prices)} HK stock intraday ({period}m) price points" + ) + return prices def _get_us_stock_historical( self, @@ -1453,29 +1818,144 @@ def _get_us_stock_historical( ) -> List[AssetPrice]: """Get US stock historical price data.""" try: - # Use AKShare US stock historical data - df_hist = ak.stock_us_daily(symbol=symbol, adjust="qfq") + # Map interval to AKShare format + akshare_params = self._map_interval_to_akshare_params(interval) + if not akshare_params: + logger.warning(f"Unsupported interval for US stocks: {interval}") + return [] + + is_intraday = akshare_params["is_intraday"] + period_or_minutes = akshare_params["period"] + + if is_intraday: + return self._get_us_stock_intraday_historical( + ticker, exchange, symbol, start_date, end_date, period_or_minutes + ) + else: + return self._get_us_stock_daily_historical( + ticker, exchange, symbol, start_date, end_date, period_or_minutes + ) + + except Exception as e: + logger.error(f"Error fetching US stock historical data for {symbol}: {e}") + return [] + + def _get_us_stock_daily_historical( + self, + ticker: str, + exchange: str, + symbol: str, + start_date: datetime, + end_date: datetime, + period: str, + ) -> List[AssetPrice]: + """Get US stock daily historical price data.""" + try: + # Format dates for AKShare + start_date_str = start_date.strftime("%Y%m%d") + end_date_str = end_date.strftime("%Y%m%d") + + # Use cached data for historical prices + cache_key = ( + f"us_stock_hist_{symbol}_{start_date_str}_{end_date_str}_{period}" + ) + df_hist = self._get_cached_data( + cache_key, + self._safe_akshare_call, + ak.stock_us_hist, + symbol=symbol, + period=period, + start_date=start_date_str, + end_date=end_date_str, + adjust="qfq", # Use forward adjustment + ) if df_hist is None or df_hist.empty: + logger.warning( + f"No US stock daily historical data available for {symbol}" + ) return [] - # Filter by date range - df_hist["date"] = pd.to_datetime(df_hist["date"]) - mask = (df_hist["date"] >= start_date) & (df_hist["date"] <= end_date) - df_hist = df_hist[mask] - - prices = [] - for _, row in df_hist.iterrows(): - trade_date = row["date"].to_pydatetime() - - # Extract price data - open_price = Decimal(str(row.get("open", 0))) - high_price = Decimal(str(row.get("high", 0))) - low_price = Decimal(str(row.get("low", 0))) - close_price = Decimal(str(row.get("close", 0))) - volume = ( - Decimal(str(row.get("volume", 0))) if row.get("volume") else None + return self._process_us_stock_daily_data(ticker, df_hist) + + except Exception as e: + logger.error( + f"Error fetching US stock daily historical data for {symbol}: {e}" + ) + return [] + + def _get_us_stock_intraday_historical( + self, + ticker: str, + exchange: str, + symbol: str, + start_date: datetime, + end_date: datetime, + period: str, + ) -> List[AssetPrice]: + """Get US stock intraday historical price data.""" + try: + # Format dates for AKShare intraday query + start_date_str = start_date.strftime("%Y-%m-%d %H:%M:%S") + end_date_str = end_date.strftime("%Y-%m-%d %H:%M:%S") + + # Use cached data for intraday historical prices + cache_key = f"us_stock_hist_min_{symbol}_{start_date.strftime('%Y%m%d')}_{end_date.strftime('%Y%m%d')}_{period}" + + # Note: US stock minute data has limitations - only recent 5 trading days + df_hist = self._get_cached_data( + cache_key, + self._safe_akshare_call, + ak.stock_us_hist_min_em, + symbol=symbol, + start_date=start_date_str, + end_date=end_date_str, + ) + + if df_hist is None or df_hist.empty: + logger.warning( + f"No US stock intraday historical data available for {symbol}" ) + return [] + + return self._process_us_stock_intraday_data(ticker, df_hist, period) + + except Exception as e: + logger.error( + f"Error fetching US stock intraday historical data for {symbol}: {e}" + ) + return [] + + def _process_us_stock_daily_data( + self, ticker: str, df_hist: pd.DataFrame + ) -> List[AssetPrice]: + """Process US stock daily historical data.""" + prices = [] + for _, row in df_hist.iterrows(): + try: + # Parse date safely + trade_date = pd.to_datetime(row["ζ—₯期"]).to_pydatetime() + + # Extract price data safely + open_price = self._safe_decimal_convert(row.get("εΌ€η›˜")) + high_price = self._safe_decimal_convert(row.get("ζœ€ι«˜")) + low_price = self._safe_decimal_convert(row.get("ζœ€δ½Ž")) + close_price = self._safe_decimal_convert(row.get("ζ”Άη›˜")) + volume = self._safe_decimal_convert(row.get("ζˆδΊ€ι‡")) + + if not close_price: # Skip if no closing price + continue + + # Extract change data if available (AKShare provides this directly) + change = self._safe_decimal_convert(row.get("梨跌钝")) + change_percent = self._safe_decimal_convert(row.get("ζΆ¨θ·ŒεΉ…")) + + # If change data not available, calculate from previous day + if change is None and len(prices) > 0: + prev_close = prices[-1].close_price + if prev_close and prev_close != 0: + change = close_price - prev_close + change_percent = (change / prev_close) * 100 price = AssetPrice( ticker=ticker, @@ -1487,17 +1967,74 @@ def _get_us_stock_historical( high_price=high_price, low_price=low_price, close_price=close_price, - change=None, - change_percent=None, + change=change, + change_percent=change_percent, source=self.source, ) prices.append(price) - return prices + except Exception as row_error: + logger.warning(f"Error processing US stock daily data row: {row_error}") + continue - except Exception as e: - logger.error(f"Error fetching US stock historical data for {symbol}: {e}") - return [] + logger.info(f"Retrieved {len(prices)} US stock daily price points") + return prices + + def _process_us_stock_intraday_data( + self, ticker: str, df_hist: pd.DataFrame, period: str + ) -> List[AssetPrice]: + """Process US stock intraday historical data.""" + prices = [] + for _, row in df_hist.iterrows(): + try: + # Parse timestamp safely + trade_time = pd.to_datetime(row["ζ—Άι—΄"]).to_pydatetime() + + # Extract price data safely + open_price = self._safe_decimal_convert(row.get("εΌ€η›˜")) + high_price = self._safe_decimal_convert(row.get("ζœ€ι«˜")) + low_price = self._safe_decimal_convert(row.get("ζœ€δ½Ž")) + close_price = self._safe_decimal_convert(row.get("ζ”Άη›˜")) + volume = self._safe_decimal_convert(row.get("ζˆδΊ€ι‡")) + + if not close_price: # Skip if no closing price + continue + + # For intraday data, calculate change from previous period + change = None + change_percent = None + if len(prices) > 0: + prev_close = prices[-1].close_price + if prev_close and prev_close != 0: + change = close_price - prev_close + change_percent = (change / prev_close) * 100 + + price = AssetPrice( + ticker=ticker, + price=close_price, + currency="USD", + timestamp=trade_time, + volume=volume, + open_price=open_price, + high_price=high_price, + low_price=low_price, + close_price=close_price, + change=change, + change_percent=change_percent, + source=self.source, + ) + prices.append(price) + + except Exception as row_error: + logger.warning( + f"Error processing US stock intraday data row: {row_error}" + ) + continue + + logger.info( + f"Retrieved {len(prices)} US stock intraday ({period}m) price points" + ) + return prices def get_supported_asset_types(self) -> List[AssetType]: """Get asset types supported by AKShare.""" diff --git a/python/valuecell/adapters/assets/base.py b/python/valuecell/adapters/assets/base.py index a780285ab..ce1d23567 100644 --- a/python/valuecell/adapters/assets/base.py +++ b/python/valuecell/adapters/assets/base.py @@ -36,27 +36,11 @@ def __init__(self): "HKEX": ".HK", # Hong Kong Exchange "TSE": ".T", # Tokyo Stock Exchange }, - DataSource.TUSHARE: { - "SSE": ".SH", # Shanghai Stock Exchange in TuShare - "SZSE": ".SZ", # Shenzhen Stock Exchange in TuShare - }, DataSource.AKSHARE: { "SSE": "", # AKShare uses plain symbols for Chinese stocks "SZSE": "", # AKShare uses plain symbols for Chinese stocks "BSE": "", # Beijing Stock Exchange }, - DataSource.FINNHUB: { - "NASDAQ": "", # Finnhub uses plain symbols for US stocks - "NYSE": "", # Finnhub uses plain symbols for US stocks - "AMEX": "", # American Stock Exchange - "HKEX": ".HK", # Hong Kong stocks need .HK suffix - "TSE": ".T", # Tokyo Stock Exchange - "LSE": ".L", # London Stock Exchange - "XETRA": ".DE", # German Exchange - }, - DataSource.COINMARKETCAP: { - "CRYPTO": "", # Crypto symbols are used as-is - }, } # Reverse mappings for converting back to internal format @@ -77,6 +61,27 @@ def to_source_format(self, internal_ticker: str, source: DataSource) -> str: try: exchange, symbol = internal_ticker.split(":", 1) + # Special handling for indices in yfinance (use ^ prefix) + if source == DataSource.YFINANCE: + index_mapping = { + # US Indices + "NASDAQ:IXIC": "^IXIC", # NASDAQ Composite + "NYSE:DJI": "^DJI", # Dow Jones Industrial Average + "NYSE:GSPC": "^GSPC", # S&P 500 + "NASDAQ:NDX": "^NDX", # NASDAQ 100 + # Hong Kong Indices + "HKEX:HSI": "^HSI", # Hang Seng Index + "HKEX:HSCEI": "^HSCEI", # Hang Seng China Enterprises Index + # Chinese Indices (already work with .SS/.SZ suffixes) + # European Indices + "LSE:FTSE": "^FTSE", # FTSE 100 + "EURONEXT:FCHI": "^FCHI", # CAC 40 + "XETRA:GDAXI": "^GDAXI", # DAX + } + + if internal_ticker in index_mapping: + return index_mapping[internal_ticker] + # Special handling for crypto tickers in yfinance if exchange == "CRYPTO" and source == DataSource.YFINANCE: # Map common crypto symbols to yfinance format @@ -135,6 +140,26 @@ def to_internal_format( Ticker in internal format (e.g., "SZSE:000001") """ try: + # Special handling for indices from yfinance (reverse ^ prefix mapping) + if source == DataSource.YFINANCE and source_ticker.startswith("^"): + index_reverse_mapping = { + # US Indices + "^IXIC": "NASDAQ:IXIC", # NASDAQ Composite + "^DJI": "NYSE:DJI", # Dow Jones Industrial Average + "^GSPC": "NYSE:GSPC", # S&P 500 + "^NDX": "NASDAQ:NDX", # NASDAQ 100 + # Hong Kong Indices + "^HSI": "HKEX:HSI", # Hang Seng Index + "^HSCEI": "HKEX:HSCEI", # Hang Seng China Enterprises Index + # European Indices + "^FTSE": "LSE:FTSE", # FTSE 100 + "^FCHI": "EURONEXT:FCHI", # CAC 40 + "^GDAXI": "XETRA:GDAXI", # DAX + } + + if source_ticker in index_reverse_mapping: + return index_reverse_mapping[source_ticker] + # Special handling for Hong Kong stocks from yfinance if source == DataSource.YFINANCE and source_ticker.endswith(".HK"): symbol = source_ticker[:-3] # Remove .HK suffix @@ -161,10 +186,7 @@ def to_internal_format( if default_exchange: return f"{default_exchange}:{source_ticker}" - # For crypto and other assets without clear exchange mapping - if source == DataSource.COINMARKETCAP: - return f"CRYPTO:{source_ticker}" - + # For other assets without clear exchange mapping # Fallback to using the source as exchange return f"{source.value.upper()}:{source_ticker}" @@ -250,8 +272,8 @@ def get_historical_prices( Args: ticker: Asset ticker in internal format - start_date: Start date for historical data - end_date: End date for historical data + start_date: Start date for historical data, format: YYYY-MM-DD, timezone: UTC + end_date: End date for historical data, format: YYYY-MM-DD, timezone: UTC interval: Data interval (e.g., "1d", "1h", "5m") Returns: diff --git a/python/valuecell/adapters/assets/coinmarketcap_adapter.py b/python/valuecell/adapters/assets/coinmarketcap_adapter.py deleted file mode 100644 index 69e5a6833..000000000 --- a/python/valuecell/adapters/assets/coinmarketcap_adapter.py +++ /dev/null @@ -1,478 +0,0 @@ -"""CoinMarketCap adapter for cryptocurrency data. - -This adapter provides integration with CoinMarketCap API to fetch cryptocurrency -market data, including prices, market caps, and metadata. -""" - -import logging -import time -from datetime import datetime -from decimal import Decimal -from typing import Any, Dict, List, Optional - -import requests - -from .base import ( - AuthenticationError, - BaseDataAdapter, - DataNotAvailableError, - RateLimitError, -) -from .types import ( - Asset, - AssetPrice, - AssetSearchQuery, - AssetSearchResult, - AssetType, - DataSource, - LocalizedName, - MarketInfo, - MarketStatus, -) - -logger = logging.getLogger(__name__) - - -class CoinMarketCapAdapter(BaseDataAdapter): - """CoinMarketCap data adapter for cryptocurrency markets.""" - - def __init__(self, api_key: str, **kwargs): - """Initialize CoinMarketCap adapter. - - Args: - api_key: CoinMarketCap API key - **kwargs: Additional configuration parameters - """ - super().__init__(DataSource.COINMARKETCAP, api_key, **kwargs) - - if not api_key: - raise AuthenticationError("CoinMarketCap API key is required") - - def _initialize(self) -> None: - """Initialize CoinMarketCap adapter configuration.""" - self.base_url = "https://pro-api.coinmarketcap.com/v1" - self.headers = { - "Accepts": "application/json", - "X-CMC_PRO_API_KEY": self.api_key, - } - self.session = requests.Session() - self.session.headers.update(self.headers) - - # Rate limiting - self.last_request_time = 0 - self.min_request_interval = 1.0 # Minimum 1 second between requests - - # Test connection - try: - self._perform_health_check() - logger.info("CoinMarketCap adapter initialized successfully") - except Exception as e: - logger.error(f"Failed to initialize CoinMarketCap adapter: {e}") - raise AuthenticationError(f"CoinMarketCap initialization failed: {e}") - - def _make_request( - self, endpoint: str, params: Optional[Dict] = None - ) -> Dict[str, Any]: - """Make rate-limited request to CoinMarketCap API.""" - # Rate limiting - current_time = time.time() - time_since_last_request = current_time - self.last_request_time - if time_since_last_request < self.min_request_interval: - time.sleep(self.min_request_interval - time_since_last_request) - - url = f"{self.base_url}{endpoint}" - - try: - response = self.session.get(url, params=params or {}) - self.last_request_time = time.time() - - if response.status_code == 429: - # Rate limit exceeded - retry_after = int(response.headers.get("Retry-After", 60)) - raise RateLimitError( - f"Rate limit exceeded. Retry after {retry_after} seconds", - retry_after=retry_after, - source=self.source, - ) - elif response.status_code == 401: - raise AuthenticationError("Invalid API key", source=self.source) - elif response.status_code != 200: - raise DataNotAvailableError( - f"API request failed with status {response.status_code}: {response.text}", - source=self.source, - ) - - data = response.json() - if data.get("status", {}).get("error_code") != 0: - error_message = data.get("status", {}).get( - "error_message", "Unknown error" - ) - raise DataNotAvailableError( - f"API error: {error_message}", source=self.source - ) - - return data - - except requests.RequestException as e: - raise DataNotAvailableError(f"Network error: {e}", source=self.source) - - def search_assets(self, query: AssetSearchQuery) -> List[AssetSearchResult]: - """Search for cryptocurrencies using CoinMarketCap.""" - try: - # CoinMarketCap doesn't have a direct search endpoint in free tier - # We'll get the top cryptocurrencies and filter by name/symbol - params = { - "start": 1, - "limit": 5000, # Get more coins to search through - "convert": "USD", - } - - data = self._make_request("/cryptocurrency/listings/latest", params) - coins = data.get("data", []) - - search_term = query.query.lower().strip() - results = [] - - for coin in coins: - # Search by symbol or name - symbol = coin.get("symbol", "").lower() - name = coin.get("name", "").lower() - - if ( - search_term in symbol - or search_term in name - or symbol.startswith(search_term) - ): - # Convert to internal ticker format - internal_ticker = f"CRYPTO:{coin['symbol']}" - - # Create localized names - names = { - "en-US": coin["name"], - "zh-Hans": coin["name"], # Could be enhanced with translations - } - - # Calculate relevance score - relevance_score = 1.0 - if symbol == search_term: - relevance_score = 2.0 # Exact symbol match - elif symbol.startswith(search_term): - relevance_score = 1.5 # Symbol starts with search term - - result = AssetSearchResult( - ticker=internal_ticker, - asset_type=AssetType.CRYPTO, - names=names, - exchange="CRYPTO", - country="GLOBAL", - currency="USD", - market_status=MarketStatus.OPEN, # Crypto markets are always open - relevance_score=relevance_score, - ) - - results.append(result) - - # Sort by relevance score and market cap - results.sort(key=lambda x: (x.relevance_score, -1), reverse=True) - - # Apply filters - if query.asset_types: - results = [r for r in results if r.asset_type in query.asset_types] - - return results[: query.limit] - - except Exception as e: - logger.error(f"Error searching cryptocurrencies: {e}") - return [] - - def get_asset_info(self, ticker: str) -> Optional[Asset]: - """Get detailed cryptocurrency information from CoinMarketCap.""" - try: - # Extract symbol from ticker - symbol = self.get_symbol() - - # Get cryptocurrency metadata - params = {"symbol": symbol} - data = self._make_request("/cryptocurrency/info", params) - - coin_data = data.get("data", {}).get(symbol) - if not coin_data: - return None - - # Create localized names - names = LocalizedName() - names.set_name("en-US", coin_data["name"]) - names.set_name("zh-Hans", coin_data["name"]) # Could be enhanced - - # Create market info - market_info = MarketInfo( - exchange="CRYPTO", country="GLOBAL", currency="USD", timezone="UTC" - ) - - # Create asset - asset = Asset( - ticker=ticker, - asset_type=AssetType.CRYPTO, - names=names, - market_info=market_info, - ) - - # Set source mapping - asset.set_source_ticker(self.source, symbol) - - # Add additional properties - properties = { - "description": coin_data.get("description"), - "category": coin_data.get("category"), - "tags": coin_data.get("tags", []), - "platform": coin_data.get("platform"), - "date_added": coin_data.get("date_added"), - "date_launched": coin_data.get("date_launched"), - "is_hidden": coin_data.get("is_hidden"), - "notice": coin_data.get("notice"), - "logo": coin_data.get("logo"), - "subreddit": coin_data.get("subreddit"), - "twitter_username": coin_data.get("twitter_username"), - "website_url": coin_data.get("urls", {}).get("website", []), - "technical_doc": coin_data.get("urls", {}).get("technical_doc", []), - "explorer": coin_data.get("urls", {}).get("explorer", []), - "source_code": coin_data.get("urls", {}).get("source_code", []), - } - - # Filter out None values - properties = {k: v for k, v in properties.items() if v is not None} - asset.properties.update(properties) - - return asset - - except Exception as e: - logger.error(f"Error fetching asset info for {ticker}: {e}") - return None - - def get_real_time_price(self, ticker: str) -> Optional[AssetPrice]: - """Get real-time cryptocurrency price from CoinMarketCap.""" - try: - symbol = self.get_symbol(ticker) - - params = {"symbol": symbol, "convert": "USD"} - - data = self._make_request("/cryptocurrency/quotes/latest", params) - coin_data = data.get("data", {}).get(symbol) - - if not coin_data: - return None - - quote = coin_data["quote"]["USD"] - - # Convert timestamp - last_updated = datetime.fromisoformat( - coin_data["last_updated"].replace("Z", "+00:00") - ).replace(tzinfo=None) - - return AssetPrice( - ticker=ticker, - price=Decimal(str(quote["price"])), - currency="USD", - timestamp=last_updated, - volume=Decimal(str(quote["volume_24h"])) - if quote.get("volume_24h") - else None, - change=None, # CoinMarketCap doesn't provide absolute change - change_percent=Decimal(str(quote["percent_change_24h"])) - if quote.get("percent_change_24h") - else None, - market_cap=Decimal(str(quote["market_cap"])) - if quote.get("market_cap") - else None, - source=self.source, - ) - - except Exception as e: - logger.error(f"Error fetching real-time price for {ticker}: {e}") - return None - - def get_historical_prices( - self, - ticker: str, - start_date: datetime, - end_date: datetime, - interval: str = "1d", - ) -> List[AssetPrice]: - """Get historical cryptocurrency prices from CoinMarketCap. - - Note: Historical data requires a paid CoinMarketCap plan. - This implementation provides a placeholder structure. - """ - try: - # CoinMarketCap historical data requires paid plan - # This is a placeholder implementation - logger.warning( - f"Historical data for {ticker} requires CoinMarketCap paid plan. " - f"Consider using alternative data sources for historical crypto data." - ) - - return [] - - except Exception as e: - logger.error(f"Error fetching historical prices for {ticker}: {e}") - return [] - - def get_multiple_prices( - self, tickers: List[str] - ) -> Dict[str, Optional[AssetPrice]]: - """Get real-time prices for multiple cryptocurrencies efficiently.""" - try: - # Extract symbols from tickers - symbols = [self.get_symbol(ticker) for ticker in tickers] - - # CoinMarketCap supports comma-separated symbols - params = {"symbol": ",".join(symbols), "convert": "USD"} - - data = self._make_request("/cryptocurrency/quotes/latest", params) - coin_data = data.get("data", {}) - - results = {} - - for ticker in tickers: - symbol = self.get_symbol(ticker) - - if symbol in coin_data: - coin_info = coin_data[symbol] - quote = coin_info["quote"]["USD"] - - last_updated = datetime.fromisoformat( - coin_info["last_updated"].replace("Z", "+00:00") - ).replace(tzinfo=None) - - results[ticker] = AssetPrice( - ticker=ticker, - price=Decimal(str(quote["price"])), - currency="USD", - timestamp=last_updated, - volume=Decimal(str(quote["volume_24h"])) - if quote.get("volume_24h") - else None, - change=None, - change_percent=Decimal(str(quote["percent_change_24h"])) - if quote.get("percent_change_24h") - else None, - market_cap=Decimal(str(quote["market_cap"])) - if quote.get("market_cap") - else None, - source=self.source, - ) - else: - results[ticker] = None - - return results - - except Exception as e: - logger.error(f"Error fetching multiple prices: {e}") - # Fallback to individual requests - return super().get_multiple_prices(tickers) - - def get_supported_asset_types(self) -> List[AssetType]: - """Get asset types supported by CoinMarketCap.""" - return [AssetType.CRYPTO] - - def _perform_health_check(self) -> Any: - """Perform health check by fetching API info.""" - try: - data = self._make_request("/key/info") - - if "data" in data: - return { - "status": "ok", - "plan": data["data"].get("plan", {}).get("name"), - "credits_left": data["data"] - .get("usage", {}) - .get("current_month", {}) - .get("credits_left"), - "credits_used": data["data"] - .get("usage", {}) - .get("current_month", {}) - .get("credits_used"), - } - else: - return {"status": "error", "message": "No data received"} - - except Exception as e: - return {"status": "error", "message": str(e)} - - def validate_ticker(self, ticker: str) -> bool: - """Validate if ticker is a cryptocurrency ticker.""" - try: - exchange, symbol = ticker.split(":", 1) - - # CoinMarketCap supports crypto tickers - supported_exchanges = ["CRYPTO"] - - return exchange in supported_exchanges - - except ValueError: - return False - - def get_symbol(self, ticker: str) -> str: - """Extract symbol from internal ticker format.""" - try: - return ticker.split(":", 1)[1] - except (ValueError, IndexError): - return ticker - - def get_global_metrics(self) -> Dict[str, Any]: - """Get global cryptocurrency market metrics.""" - try: - data = self._make_request("/global-metrics/quotes/latest") - return data.get("data", {}) - - except Exception as e: - logger.error(f"Error fetching global metrics: {e}") - return {} - - def get_trending_cryptocurrencies(self, limit: int = 10) -> List[AssetSearchResult]: - """Get trending cryptocurrencies by market cap.""" - try: - params = { - "start": 1, - "limit": limit, - "convert": "USD", - "sort": "market_cap", - "sort_dir": "desc", - } - - data = self._make_request("/cryptocurrency/listings/latest", params) - coins = data.get("data", []) - - results = [] - for coin in coins: - internal_ticker = f"CRYPTO:{coin['symbol']}" - - names = { - "en-US": coin["name"], - "zh-Hans": coin["name"], - } - - result = AssetSearchResult( - ticker=internal_ticker, - asset_type=AssetType.CRYPTO, - names=names, - exchange="CRYPTO", - country="GLOBAL", - currency="USD", - market_status=MarketStatus.OPEN, - relevance_score=1.0, - ) - - results.append(result) - - return results - - except Exception as e: - logger.error(f"Error fetching trending cryptocurrencies: {e}") - return [] - - def is_market_open(self, exchange: str) -> bool: - """Cryptocurrency markets are always open.""" - if exchange in ["CRYPTO"]: - return True - return False diff --git a/python/valuecell/adapters/assets/finnhub_adapter.py b/python/valuecell/adapters/assets/finnhub_adapter.py deleted file mode 100644 index 07d94df8d..000000000 --- a/python/valuecell/adapters/assets/finnhub_adapter.py +++ /dev/null @@ -1,697 +0,0 @@ -"""Finnhub adapter for global stock market data. - -This adapter provides integration with Finnhub API to fetch global stock market data, -including US stocks, international markets, company profiles, and financial metrics. -""" - -import logging -import time -from datetime import datetime -from decimal import Decimal -from typing import Any, Dict, List, Optional - -import requests - -from .base import ( - AuthenticationError, - BaseDataAdapter, - DataNotAvailableError, - RateLimitError, -) -from .types import ( - Asset, - AssetPrice, - AssetSearchQuery, - AssetSearchResult, - AssetType, - DataSource, - LocalizedName, - MarketInfo, - MarketStatus, -) - -logger = logging.getLogger(__name__) - - -class FinnhubAdapter(BaseDataAdapter): - """Finnhub data adapter for global stock markets.""" - - def __init__(self, api_key: str, **kwargs): - """Initialize Finnhub adapter. - - Args: - api_key: Finnhub API key - **kwargs: Additional configuration parameters - """ - super().__init__(DataSource.FINNHUB, api_key, **kwargs) - - if not api_key: - raise AuthenticationError("Finnhub API key is required") - - def _initialize(self) -> None: - """Initialize Finnhub adapter configuration.""" - self.base_url = "https://finnhub.io/api/v1" - self.session = requests.Session() - - # Rate limiting - self.last_request_time = 0 - self.min_request_interval = ( - 1.0 # Minimum 1 second between requests for free tier - ) - - # Asset type mapping for Finnhub - self.asset_type_mapping = { - "Common Stock": AssetType.STOCK, - "ETF": AssetType.ETF, - "Mutual Fund": AssetType.MUTUAL_FUND, - "Index": AssetType.INDEX, - "Bond": AssetType.BOND, - } - - # Exchange mapping - self.exchange_mapping = { - "US": ["NASDAQ", "NYSE", "AMEX"], - "HK": ["HKEX"], - "CN": ["SSE", "SZSE"], - "JP": ["TSE"], - "GB": ["LSE"], - "DE": ["XETRA"], - } - - # Test connection - try: - self._perform_health_check() - logger.info("Finnhub adapter initialized successfully") - except Exception as e: - logger.error(f"Failed to initialize Finnhub adapter: {e}") - raise AuthenticationError(f"Finnhub initialization failed: {e}") - - def _make_request( - self, endpoint: str, params: Optional[Dict] = None - ) -> Dict[str, Any]: - """Make rate-limited request to Finnhub API.""" - # Rate limiting - current_time = time.time() - time_since_last_request = current_time - self.last_request_time - if time_since_last_request < self.min_request_interval: - time.sleep(self.min_request_interval - time_since_last_request) - - url = f"{self.base_url}{endpoint}" - request_params = params or {} - request_params["token"] = self.api_key - - try: - response = self.session.get(url, params=request_params, timeout=30) - self.last_request_time = time.time() - - if response.status_code == 429: - # Rate limit exceeded - retry_after = int(response.headers.get("Retry-After", 60)) - raise RateLimitError( - f"Rate limit exceeded. Retry after {retry_after} seconds", - retry_after=retry_after, - source=self.source, - ) - elif response.status_code == 401: - raise AuthenticationError("Invalid API key", source=self.source) - elif response.status_code != 200: - raise DataNotAvailableError( - f"API request failed with status {response.status_code}: {response.text}", - source=self.source, - ) - - data = response.json() - - # Check for API errors - if isinstance(data, dict) and data.get("error"): - raise DataNotAvailableError( - f"API error: {data['error']}", source=self.source - ) - - return data - - except requests.RequestException as e: - raise DataNotAvailableError(f"Network error: {e}", source=self.source) - - def search_assets(self, query: AssetSearchQuery) -> List[AssetSearchResult]: - """Search for assets using Finnhub symbol lookup.""" - try: - results = [] - search_term = query.query.upper().strip() - - # Search US stocks - try: - data = self._make_request("/search", {"q": search_term}) - - if data and "result" in data: - for item in data["result"][: query.limit]: - try: - symbol = item.get("symbol", "") - description = item.get("description", "") - asset_type = item.get("type", "Common Stock") - - if not symbol or not description: - continue - - # Determine exchange and create internal ticker - exchange = self._determine_exchange(symbol) - internal_ticker = f"{exchange}:{symbol}" - - # Map asset type - mapped_asset_type = self.asset_type_mapping.get( - asset_type, AssetType.STOCK - ) - - # Create localized names - names = { - "en-US": description, - "en-GB": description, - "zh-Hans": description, # Could be enhanced with translation - "zh-Hant": description, - } - - # Calculate relevance score - relevance_score = self._calculate_relevance( - search_term, symbol, description - ) - - result = AssetSearchResult( - ticker=internal_ticker, - asset_type=mapped_asset_type, - names=names, - exchange=exchange, - country=self._get_country_for_exchange(exchange), - currency=self._get_currency_for_exchange(exchange), - market_status=MarketStatus.UNKNOWN, - relevance_score=relevance_score, - ) - - results.append(result) - - except Exception as e: - logger.warning(f"Error processing search result: {e}") - continue - - except Exception as e: - logger.error(f"Error searching symbols: {e}") - - # Apply filters - if query.asset_types: - results = [r for r in results if r.asset_type in query.asset_types] - - if query.exchanges: - results = [r for r in results if r.exchange in query.exchanges] - - if query.countries: - results = [r for r in results if r.country in query.countries] - - # Sort by relevance - results.sort(key=lambda x: x.relevance_score, reverse=True) - - return results[: query.limit] - - except Exception as e: - logger.error(f"Error searching assets: {e}") - return [] - - def _calculate_relevance( - self, search_term: str, symbol: str, description: str - ) -> float: - """Calculate relevance score for search results.""" - search_term_lower = search_term.lower() - symbol_lower = symbol.lower() - description_lower = description.lower() - - # Exact symbol match gets highest score - if search_term_lower == symbol_lower: - return 2.0 - - # Symbol starts with search term - if symbol_lower.startswith(search_term_lower): - return 1.8 - - # Description starts with search term - if description_lower.startswith(search_term_lower): - return 1.6 - - # Symbol contains search term - if search_term_lower in symbol_lower: - return 1.4 - - # Description contains search term - if search_term_lower in description_lower: - return 1.2 - - return 1.0 - - def _determine_exchange(self, symbol: str) -> str: - """Determine exchange from symbol format.""" - # Simple heuristics for exchange determination - if "." in symbol: - suffix = symbol.split(".")[-1] - if suffix == "HK": - return "HKEX" - elif suffix == "T": - return "TSE" - elif suffix == "L": - return "LSE" - elif suffix == "DE": - return "XETRA" - - # Default to NASDAQ for US symbols - return "NASDAQ" - - def _get_country_for_exchange(self, exchange: str) -> str: - """Get country code for exchange.""" - country_mapping = { - "NASDAQ": "US", - "NYSE": "US", - "AMEX": "US", - "HKEX": "HK", - "TSE": "JP", - "LSE": "GB", - "XETRA": "DE", - "SSE": "CN", - "SZSE": "CN", - } - return country_mapping.get(exchange, "US") - - def _get_currency_for_exchange(self, exchange: str) -> str: - """Get currency for exchange.""" - currency_mapping = { - "NASDAQ": "USD", - "NYSE": "USD", - "AMEX": "USD", - "HKEX": "HKD", - "TSE": "JPY", - "LSE": "GBP", - "XETRA": "EUR", - "SSE": "CNY", - "SZSE": "CNY", - } - return currency_mapping.get(exchange, "USD") - - def get_asset_info(self, ticker: str) -> Optional[Asset]: - """Get detailed asset information from Finnhub.""" - try: - exchange, symbol = ticker.split(":") - - # Get company profile - try: - profile_data = self._make_request("/stock/profile2", {"symbol": symbol}) - - if not profile_data: - return None - - # Create localized names - names = LocalizedName() - company_name = profile_data.get("name", symbol) - names.set_name("en-US", company_name) - names.set_name("en-GB", company_name) - names.set_name("zh-Hans", company_name) # Could be enhanced - names.set_name("zh-Hant", company_name) - - # Create market info - country = profile_data.get( - "country", self._get_country_for_exchange(exchange) - ) - currency = profile_data.get( - "currency", self._get_currency_for_exchange(exchange) - ) - - market_info = MarketInfo( - exchange=exchange, - country=country, - currency=currency, - timezone=self._get_timezone_for_country(country), - ) - - # Create asset - asset = Asset( - ticker=ticker, - asset_type=AssetType.STOCK, # Default to stock, could be enhanced - names=names, - market_info=market_info, - ) - - # Set source mapping - asset.set_source_ticker(self.source, symbol) - - # Add additional properties - properties = { - "country": profile_data.get("country"), - "currency": profile_data.get("currency"), - "exchange": profile_data.get("exchange"), - "ipo": profile_data.get("ipo"), - "market_capitalization": profile_data.get("marketCapitalization"), - "outstanding_shares": profile_data.get("shareOutstanding"), - "name": profile_data.get("name"), - "phone": profile_data.get("phone"), - "weburl": profile_data.get("weburl"), - "logo": profile_data.get("logo"), - "finnhub_industry": profile_data.get("finnhubIndustry"), - } - - # Filter out None values - properties = {k: v for k, v in properties.items() if v is not None} - asset.properties.update(properties) - - return asset - - except Exception as e: - logger.error(f"Error fetching company profile for {symbol}: {e}") - return None - - except Exception as e: - logger.error(f"Error getting asset info for {ticker}: {e}") - return None - - def _get_timezone_for_country(self, country: str) -> str: - """Get timezone for country.""" - timezone_mapping = { - "US": "America/New_York", - "HK": "Asia/Hong_Kong", - "JP": "Asia/Tokyo", - "GB": "Europe/London", - "DE": "Europe/Berlin", - "CN": "Asia/Shanghai", - } - return timezone_mapping.get(country, "America/New_York") - - def get_real_time_price(self, ticker: str) -> Optional[AssetPrice]: - """Get real-time price data from Finnhub.""" - try: - exchange, symbol = ticker.split(":") - - # Get real-time quote - try: - quote_data = self._make_request("/quote", {"symbol": symbol}) - - if not quote_data or "c" not in quote_data: - return None - - current_price = Decimal(str(quote_data["c"])) # Current price - open_price = Decimal(str(quote_data["o"])) # Open price - high_price = Decimal(str(quote_data["h"])) # High price - low_price = Decimal(str(quote_data["l"])) # Low price - previous_close = Decimal(str(quote_data["pc"])) # Previous close - - # Calculate change - change = current_price - previous_close - change_percent = ( - (change / previous_close) * 100 if previous_close else Decimal("0") - ) - - # Timestamp (Unix timestamp) - timestamp = ( - datetime.fromtimestamp(quote_data["t"]) - if quote_data.get("t") - else datetime.now() - ) - - return AssetPrice( - ticker=ticker, - price=current_price, - currency=self._get_currency_for_exchange(exchange), - timestamp=timestamp, - volume=None, # Volume not provided in basic quote - open_price=open_price, - high_price=high_price, - low_price=low_price, - close_price=current_price, - change=change, - change_percent=change_percent, - source=self.source, - ) - - except Exception as e: - logger.error(f"Error fetching quote for {symbol}: {e}") - return None - - except Exception as e: - logger.error(f"Error getting real-time price for {ticker}: {e}") - return None - - def get_historical_prices( - self, - ticker: str, - start_date: datetime, - end_date: datetime, - interval: str = "1d", - ) -> List[AssetPrice]: - """Get historical price data from Finnhub.""" - try: - exchange, symbol = ticker.split(":") - - # Convert dates to Unix timestamps - start_timestamp = int(start_date.timestamp()) - end_timestamp = int(end_date.timestamp()) - - # Map interval to Finnhub resolution - resolution_mapping = { - "1m": "1", - "5m": "5", - "15m": "15", - "30m": "30", - "1h": "60", - "1d": "D", - "daily": "D", - "1w": "W", - "1mo": "M", - } - - resolution = resolution_mapping.get(interval, "D") - - try: - # Get historical data (candles) - candle_data = self._make_request( - "/stock/candle", - { - "symbol": symbol, - "resolution": resolution, - "from": start_timestamp, - "to": end_timestamp, - }, - ) - - if not candle_data or candle_data.get("s") != "ok": - return [] - - # Extract data arrays - timestamps = candle_data.get("t", []) - opens = candle_data.get("o", []) - highs = candle_data.get("h", []) - lows = candle_data.get("l", []) - closes = candle_data.get("c", []) - volumes = candle_data.get("v", []) - - if not all([timestamps, opens, highs, lows, closes]): - return [] - - prices = [] - currency = self._get_currency_for_exchange(exchange) - - for i in range(len(timestamps)): - # Convert timestamp - trade_date = datetime.fromtimestamp(timestamps[i]) - - # Extract price data - open_price = Decimal(str(opens[i])) - high_price = Decimal(str(highs[i])) - low_price = Decimal(str(lows[i])) - close_price = Decimal(str(closes[i])) - volume = ( - Decimal(str(volumes[i])) - if i < len(volumes) and volumes[i] - else None - ) - - # Calculate change from previous day - change = None - change_percent = None - if i > 0: - prev_close = Decimal(str(closes[i - 1])) - change = close_price - prev_close - change_percent = ( - (change / prev_close) * 100 if prev_close else Decimal("0") - ) - - price = AssetPrice( - ticker=ticker, - price=close_price, - currency=currency, - timestamp=trade_date, - volume=volume, - open_price=open_price, - high_price=high_price, - low_price=low_price, - close_price=close_price, - change=change, - change_percent=change_percent, - source=self.source, - ) - prices.append(price) - - return prices - - except Exception as e: - logger.error(f"Error fetching historical data for {symbol}: {e}") - return [] - - except Exception as e: - logger.error(f"Error getting historical prices for {ticker}: {e}") - return [] - - def get_supported_asset_types(self) -> List[AssetType]: - """Get asset types supported by Finnhub.""" - return [ - AssetType.STOCK, - AssetType.ETF, - AssetType.MUTUAL_FUND, - AssetType.INDEX, - ] - - def _perform_health_check(self) -> Any: - """Perform health check by fetching API status.""" - try: - # Test with a simple quote request for AAPL - data = self._make_request("/quote", {"symbol": "AAPL"}) - - if data and "c" in data: - return { - "status": "ok", - "test_symbol": "AAPL", - "current_price": data["c"], - } - else: - return {"status": "error", "message": "No data received"} - - except Exception as e: - return {"status": "error", "message": str(e)} - - def validate_ticker(self, ticker: str) -> bool: - """Validate if ticker is supported by Finnhub.""" - try: - exchange, symbol = ticker.split(":", 1) - - # Finnhub supports major global exchanges - supported_exchanges = [ - "NASDAQ", - "NYSE", - "AMEX", # US - "HKEX", # Hong Kong - "TSE", # Tokyo - "LSE", # London - "XETRA", # Germany - ] - - return exchange in supported_exchanges - - except ValueError: - return False - - def get_company_news( - self, ticker: str, start_date: datetime, end_date: datetime - ) -> List[Dict[str, Any]]: - """Get company news from Finnhub.""" - try: - exchange, symbol = ticker.split(":") - - # Convert dates to YYYY-MM-DD format - start_date_str = start_date.strftime("%Y-%m-%d") - end_date_str = end_date.strftime("%Y-%m-%d") - - news_data = self._make_request( - "/company-news", - {"symbol": symbol, "from": start_date_str, "to": end_date_str}, - ) - - if not news_data: - return [] - - news_items = [] - for item in news_data: - news_item = { - "id": item.get("id"), - "category": item.get("category"), - "datetime": datetime.fromtimestamp(item.get("datetime", 0)), - "headline": item.get("headline"), - "image": item.get("image"), - "related": item.get("related"), - "source": item.get("source"), - "summary": item.get("summary"), - "url": item.get("url"), - } - news_items.append(news_item) - - return news_items - - except Exception as e: - logger.error(f"Error fetching company news for {ticker}: {e}") - return [] - - def get_basic_financials(self, ticker: str) -> Dict[str, Any]: - """Get basic financial metrics from Finnhub.""" - try: - exchange, symbol = ticker.split(":") - - financials_data = self._make_request( - "/stock/metric", {"symbol": symbol, "metric": "all"} - ) - - if not financials_data: - return {} - - # Extract key metrics - metrics = financials_data.get("metric", {}) - - return { - "market_cap": metrics.get("marketCapitalization"), - "pe_ratio": metrics.get("peBasicExclExtraTTM"), - "pb_ratio": metrics.get("pbQuarterly"), - "dividend_yield": metrics.get("dividendYieldIndicatedAnnual"), - "beta": metrics.get("beta"), - "eps_ttm": metrics.get("epsBasicExclExtraItemsTTM"), - "revenue_ttm": metrics.get("revenueTTM"), - "gross_margin": metrics.get("grossMarginTTM"), - "operating_margin": metrics.get("operatingMarginTTM"), - "net_margin": metrics.get("netProfitMarginTTM"), - "roe": metrics.get("roeTTM"), - "roa": metrics.get("roaTTM"), - "debt_to_equity": metrics.get("totalDebt/totalEquityQuarterly"), - "52_week_high": metrics.get("52WeekHigh"), - "52_week_low": metrics.get("52WeekLow"), - } - - except Exception as e: - logger.error(f"Error fetching basic financials for {ticker}: {e}") - return {} - - def is_market_open(self, exchange: str) -> bool: - """Check if a specific market is currently open.""" - now = datetime.utcnow() - hour = now.hour - weekday = now.weekday() - - # Skip weekends - if weekday >= 5: # Saturday = 5, Sunday = 6 - return False - - # Basic market hours (approximate) - if exchange in ["NASDAQ", "NYSE", "AMEX"]: - # US market hours: 9:30 AM - 4:00 PM EST = 14:30 - 21:00 UTC - return 14 <= hour < 21 - elif exchange == "HKEX": - # Hong Kong: 9:30 AM - 4:00 PM HKT = 1:30 - 8:00 UTC - return 1 <= hour < 8 - elif exchange == "TSE": - # Tokyo: 9:00 AM - 3:00 PM JST = 0:00 - 6:00 UTC - return 0 <= hour < 6 - elif exchange == "LSE": - # London: 8:00 AM - 4:30 PM GMT = 8:00 - 16:30 UTC - return 8 <= hour < 17 - elif exchange == "XETRA": - # Germany: 9:00 AM - 5:30 PM CET = 8:00 - 16:30 UTC - return 8 <= hour < 17 - - return False diff --git a/python/valuecell/adapters/assets/manager.py b/python/valuecell/adapters/assets/manager.py index e9d8e157a..5fcb8f1cf 100644 --- a/python/valuecell/adapters/assets/manager.py +++ b/python/valuecell/adapters/assets/manager.py @@ -12,9 +12,6 @@ from .akshare_adapter import AKShareAdapter from .base import BaseDataAdapter -from .coinmarketcap_adapter import CoinMarketCapAdapter -from .finnhub_adapter import FinnhubAdapter -from .tushare_adapter import TuShareAdapter from .types import ( Asset, AssetPrice, @@ -48,22 +45,17 @@ def _set_default_priorities(self) -> None: self.adapter_priorities = { AssetType.STOCK: [ DataSource.YFINANCE, - DataSource.FINNHUB, - DataSource.TUSHARE, DataSource.AKSHARE, ], AssetType.ETF: [ DataSource.YFINANCE, - DataSource.FINNHUB, DataSource.AKSHARE, ], AssetType.CRYPTO: [ - DataSource.COINMARKETCAP, DataSource.YFINANCE, ], AssetType.INDEX: [ DataSource.YFINANCE, - DataSource.TUSHARE, DataSource.AKSHARE, ], } @@ -97,32 +89,6 @@ def configure_yfinance(self, **kwargs) -> None: except Exception as e: logger.error(f"Failed to configure Yahoo Finance adapter: {e}") - def configure_tushare(self, api_key: str, **kwargs) -> None: - """Configure and register TuShare adapter. - - Args: - api_key: TuShare API key - **kwargs: Additional configuration - """ - try: - adapter = TuShareAdapter(api_key=api_key, **kwargs) - self.register_adapter(adapter) - except Exception as e: - logger.error(f"Failed to configure TuShare adapter: {e}") - - def configure_coinmarketcap(self, api_key: str, **kwargs) -> None: - """Configure and register CoinMarketCap adapter. - - Args: - api_key: CoinMarketCap API key - **kwargs: Additional configuration - """ - try: - adapter = CoinMarketCapAdapter(api_key=api_key, **kwargs) - self.register_adapter(adapter) - except Exception as e: - logger.error(f"Failed to configure CoinMarketCap adapter: {e}") - def configure_akshare(self, **kwargs) -> None: """Configure and register AKShare adapter. @@ -135,19 +101,6 @@ def configure_akshare(self, **kwargs) -> None: except Exception as e: logger.error(f"Failed to configure AKShare adapter: {e}") - def configure_finnhub(self, api_key: str, **kwargs) -> None: - """Configure and register Finnhub adapter. - - Args: - api_key: Finnhub API key - **kwargs: Additional configuration - """ - try: - adapter = FinnhubAdapter(api_key=api_key, **kwargs) - self.register_adapter(adapter) - except Exception as e: - logger.error(f"Failed to configure Finnhub adapter: {e}") - def get_available_adapters(self) -> List[DataSource]: """Get list of available data adapters.""" with self.lock: diff --git a/python/valuecell/adapters/assets/tushare_adapter.py b/python/valuecell/adapters/assets/tushare_adapter.py deleted file mode 100644 index 1ae5cc153..000000000 --- a/python/valuecell/adapters/assets/tushare_adapter.py +++ /dev/null @@ -1,517 +0,0 @@ -"""TuShare adapter for Chinese stock market data. - -This adapter provides integration with TuShare API to fetch Chinese stock market data, -including A-shares, indices, and fundamental data. -""" - -import logging -from datetime import datetime, timedelta -from decimal import Decimal -from typing import Any, Dict, List, Optional - -try: - import tushare as ts -except ImportError: - ts = None - -from .base import AuthenticationError, BaseDataAdapter -from .types import ( - Asset, - AssetPrice, - AssetSearchQuery, - AssetSearchResult, - AssetType, - DataSource, - LocalizedName, - MarketInfo, - MarketStatus, -) - -logger = logging.getLogger(__name__) - - -class TuShareAdapter(BaseDataAdapter): - """TuShare data adapter for Chinese stock markets.""" - - def __init__(self, api_key: str, **kwargs): - """Initialize TuShare adapter. - - Args: - api_key: TuShare API token - **kwargs: Additional configuration parameters - """ - super().__init__(DataSource.TUSHARE, api_key, **kwargs) - - if ts is None: - raise ImportError( - "tushare library is required. Install with: pip install tushare" - ) - - if not api_key: - raise AuthenticationError("TuShare API key is required") - - def _initialize(self) -> None: - """Initialize TuShare adapter configuration.""" - try: - # Set TuShare token - ts.set_token(self.api_key) - self.pro = ts.pro_api() - - # Test connection - self.pro.query( - "stock_basic", - exchange="", - list_status="L", - fields="ts_code,symbol,name,area,industry,list_date", - ) - - logger.info("TuShare adapter initialized successfully") - - except Exception as e: - logger.error(f"Failed to initialize TuShare adapter: {e}") - raise AuthenticationError(f"TuShare initialization failed: {e}") - - def search_assets(self, query: AssetSearchQuery) -> List[AssetSearchResult]: - """Search for assets using TuShare stock basic info.""" - try: - results = [] - search_term = query.query.strip() - - # Get all stock basic info - df = self.pro.query( - "stock_basic", - exchange="", - list_status="L", - fields="ts_code,symbol,name,area,industry,market,exchange,list_date", - ) - - if df.empty: - return results - - # Search by symbol or name - mask = ( - df["symbol"].str.contains(search_term, case=False, na=False) - | df["name"].str.contains(search_term, case=False, na=False) - | df["ts_code"].str.contains(search_term, case=False, na=False) - ) - - matched_stocks = df[mask] - - for _, row in matched_stocks.iterrows(): - try: - # Convert TuShare code to internal format - ts_code = row["ts_code"] # Format: 000001.SZ - internal_ticker = self.convert_to_internal_ticker(ts_code) - - # Determine exchange - exchange_suffix = ts_code.split(".")[1] - exchange_mapping = {"SH": "SSE", "SZ": "SZSE"} - exchange = exchange_mapping.get(exchange_suffix, exchange_suffix) - - # Create localized names - names = { - "zh-Hans": row["name"], - "en-US": row["name"], # TuShare primarily has Chinese names - } - - result = AssetSearchResult( - ticker=internal_ticker, - asset_type=AssetType.STOCK, - names=names, - exchange=exchange, - country="CN", - currency="CNY", - market_status=MarketStatus.UNKNOWN, - relevance_score=1.0, - ) - - results.append(result) - - except Exception as e: - logger.warning( - f"Error processing search result for {row.get('ts_code')}: {e}" - ) - continue - - # Apply filters - if query.asset_types: - results = [r for r in results if r.asset_type in query.asset_types] - - if query.exchanges: - results = [r for r in results if r.exchange in query.exchanges] - - if query.countries: - results = [r for r in results if r.country in query.countries] - - return results[: query.limit] - - except Exception as e: - logger.error(f"Error searching assets: {e}") - return [] - - def get_asset_info(self, ticker: str) -> Optional[Asset]: - """Get detailed asset information from TuShare.""" - try: - source_ticker = self.convert_to_source_ticker(ticker) - - # Get basic stock info - df_basic = self.pro.query( - "stock_basic", - ts_code=source_ticker, - fields="ts_code,symbol,name,area,industry,market,exchange,curr_type,list_date,delist_date,is_hs", - ) - - if df_basic.empty: - return None - - stock_info = df_basic.iloc[0] - - # Create localized names - names = LocalizedName() - names.set_name("zh-Hans", stock_info["name"]) - names.set_name( - "en-US", stock_info["name"] - ) # Could be enhanced with translation - - # Determine exchange - exchange_suffix = source_ticker.split(".")[1] - exchange_mapping = {"SH": "SSE", "SZ": "SZSE"} - exchange = exchange_mapping.get(exchange_suffix, exchange_suffix) - - # Create market info - market_info = MarketInfo( - exchange=exchange, - country="CN", - currency=stock_info.get("curr_type", "CNY"), - timezone="Asia/Shanghai", - ) - - # Create asset - asset = Asset( - ticker=ticker, - asset_type=AssetType.STOCK, - names=names, - market_info=market_info, - ) - - # Set source mapping - asset.set_source_ticker(self.source, source_ticker) - - # Add additional properties - properties = { - "area": stock_info.get("area"), - "industry": stock_info.get("industry"), - "market": stock_info.get("market"), - "list_date": stock_info.get("list_date"), - "is_hs": stock_info.get("is_hs"), # Hong Kong-Shanghai Stock Connect - } - - # Get company info if available - try: - df_company = self.pro.query( - "stock_company", - ts_code=source_ticker, - fields="ts_code,chairman,manager,secretary,reg_capital,setup_date,province,city,introduction,website,email,office,employees,main_business,business_scope", - ) - - if not df_company.empty: - company_info = df_company.iloc[0] - properties.update( - { - "chairman": company_info.get("chairman"), - "manager": company_info.get("manager"), - "reg_capital": company_info.get("reg_capital"), - "setup_date": company_info.get("setup_date"), - "province": company_info.get("province"), - "city": company_info.get("city"), - "introduction": company_info.get("introduction"), - "website": company_info.get("website"), - "employees": company_info.get("employees"), - "main_business": company_info.get("main_business"), - } - ) - except Exception as e: - logger.warning(f"Could not fetch company info for {source_ticker}: {e}") - - # Filter out None values - properties = {k: v for k, v in properties.items() if v is not None} - asset.properties.update(properties) - - return asset - - except Exception as e: - logger.error(f"Error fetching asset info for {ticker}: {e}") - return None - - def get_real_time_price(self, ticker: str) -> Optional[AssetPrice]: - """Get real-time price data from TuShare.""" - try: - source_ticker = self.convert_to_source_ticker(ticker) - - # Get real-time quotes - df = self.pro.query( - "daily", - ts_code=source_ticker, - trade_date="", - start_date="", - end_date="", - ) - - if df.empty: - return None - - # Get the most recent trading day - latest_data = df.iloc[0] # TuShare returns data in descending order - - # Convert to AssetPrice - current_price = Decimal(str(latest_data["close"])) - open_price = Decimal(str(latest_data["open"])) - - # Calculate change - change = ( - Decimal(str(latest_data["change"])) - if latest_data["change"] - else Decimal("0") - ) - change_percent = ( - Decimal(str(latest_data["pct_chg"])) - if latest_data["pct_chg"] - else Decimal("0") - ) - - # Parse trade date - trade_date_str = str(latest_data["trade_date"]) - trade_date = datetime.strptime(trade_date_str, "%Y%m%d") - - return AssetPrice( - ticker=ticker, - price=current_price, - currency="CNY", - timestamp=trade_date, - volume=Decimal(str(latest_data["vol"])) if latest_data["vol"] else None, - open_price=open_price, - high_price=Decimal(str(latest_data["high"])), - low_price=Decimal(str(latest_data["low"])), - close_price=current_price, - change=change, - change_percent=change_percent, - source=self.source, - ) - - except Exception as e: - logger.error(f"Error fetching real-time price for {ticker}: {e}") - return None - - def get_historical_prices( - self, - ticker: str, - start_date: datetime, - end_date: datetime, - interval: str = "1d", - ) -> List[AssetPrice]: - """Get historical price data from TuShare.""" - try: - source_ticker = self.convert_to_source_ticker(ticker) - - # TuShare uses YYYYMMDD format - start_date_str = start_date.strftime("%Y%m%d") - end_date_str = end_date.strftime("%Y%m%d") - - # TuShare primarily supports daily data - if interval not in ["1d", "daily"]: - logger.warning( - f"TuShare primarily supports daily data. Requested interval: {interval}" - ) - - # Get daily price data - df = self.pro.query( - "daily", - ts_code=source_ticker, - start_date=start_date_str, - end_date=end_date_str, - ) - - if df.empty: - return [] - - # Sort by trade_date ascending - df = df.sort_values("trade_date") - - prices = [] - for _, row in df.iterrows(): - # Parse trade date - trade_date_str = str(row["trade_date"]) - trade_date = datetime.strptime(trade_date_str, "%Y%m%d") - - # Calculate change and change_percent - change = Decimal(str(row["change"])) if row["change"] else Decimal("0") - change_percent = ( - Decimal(str(row["pct_chg"])) if row["pct_chg"] else Decimal("0") - ) - - price = AssetPrice( - ticker=ticker, - price=Decimal(str(row["close"])), - currency="CNY", - timestamp=trade_date, - volume=Decimal(str(row["vol"])) if row["vol"] else None, - open_price=Decimal(str(row["open"])), - high_price=Decimal(str(row["high"])), - low_price=Decimal(str(row["low"])), - close_price=Decimal(str(row["close"])), - change=change, - change_percent=change_percent, - source=self.source, - ) - prices.append(price) - - return prices - - except Exception as e: - logger.error(f"Error fetching historical prices for {ticker}: {e}") - return [] - - def get_supported_asset_types(self) -> List[AssetType]: - """Get asset types supported by TuShare.""" - return [ - AssetType.STOCK, - AssetType.INDEX, - AssetType.ETF, - AssetType.BOND, - ] - - def _perform_health_check(self) -> Any: - """Perform health check by fetching stock basic info.""" - try: - # Test with a simple query - df = self.pro.query( - "stock_basic", - exchange="", - list_status="L", - fields="ts_code,symbol,name", - ) - - if not df.empty: - return { - "status": "ok", - "stocks_count": len(df), - "sample_stock": df.iloc[0]["ts_code"] if len(df) > 0 else None, - } - else: - return {"status": "error", "message": "No data received"} - - except Exception as e: - return {"status": "error", "message": str(e)} - - def validate_ticker(self, ticker: str) -> bool: - """Validate if ticker is supported by TuShare (Chinese markets only).""" - try: - exchange, symbol = ticker.split(":", 1) - - # TuShare supports Chinese exchanges - supported_exchanges = ["SSE", "SZSE"] - - return exchange in supported_exchanges - - except ValueError: - return False - - def get_market_calendar( - self, start_date: datetime, end_date: datetime - ) -> List[datetime]: - """Get trading calendar for Chinese markets.""" - try: - start_date_str = start_date.strftime("%Y%m%d") - end_date_str = end_date.strftime("%Y%m%d") - - df = self.pro.query( - "trade_cal", - exchange="SSE", - start_date=start_date_str, - end_date=end_date_str, - is_open="1", - ) - - if df.empty: - return [] - - trading_days = [] - for _, row in df.iterrows(): - trade_date = datetime.strptime(str(row["cal_date"]), "%Y%m%d") - trading_days.append(trade_date) - - return trading_days - - except Exception as e: - logger.error(f"Error fetching market calendar: {e}") - return [] - - def get_stock_financials( - self, ticker: str, year: Optional[int] = None - ) -> Dict[str, Any]: - """Get financial data for a stock.""" - try: - source_ticker = self.convert_to_source_ticker(ticker) - - # Get income statement - params = {"ts_code": source_ticker} - if year: - params["period"] = f"{year}1231" # Year-end - - financials = {} - - # Income statement - try: - df_income = self.pro.query("income", **params) - if not df_income.empty: - financials["income_statement"] = df_income.to_dict("records") - except Exception as e: - logger.warning(f"Could not fetch income statement: {e}") - - # Balance sheet - try: - df_balance = self.pro.query("balancesheet", **params) - if not df_balance.empty: - financials["balance_sheet"] = df_balance.to_dict("records") - except Exception as e: - logger.warning(f"Could not fetch balance sheet: {e}") - - # Cash flow - try: - df_cashflow = self.pro.query("cashflow", **params) - if not df_cashflow.empty: - financials["cash_flow"] = df_cashflow.to_dict("records") - except Exception as e: - logger.warning(f"Could not fetch cash flow: {e}") - - return financials - - except Exception as e: - logger.error(f"Error fetching financials for {ticker}: {e}") - return {} - - def is_market_open(self, exchange: str) -> bool: - """Check if Chinese market is currently open.""" - if exchange not in ["SSE", "SZSE"]: - return False - - # Chinese market hours: 9:30-11:30, 13:00-15:00 (GMT+8) - now = datetime.utcnow() - # Convert to Beijing time (UTC+8) - beijing_time = now.replace(tzinfo=None) + timedelta(hours=8) - - # Check if it's a weekday - if beijing_time.weekday() >= 5: # Saturday = 5, Sunday = 6 - return False - - # Check trading hours - current_time = beijing_time.time() - morning_open = datetime.strptime("09:30", "%H:%M").time() - morning_close = datetime.strptime("11:30", "%H:%M").time() - afternoon_open = datetime.strptime("13:00", "%H:%M").time() - afternoon_close = datetime.strptime("15:00", "%H:%M").time() - - return ( - morning_open <= current_time <= morning_close - or afternoon_open <= current_time <= afternoon_close - ) diff --git a/python/valuecell/adapters/assets/types.py b/python/valuecell/adapters/assets/types.py index 404cea0d4..f51486821 100644 --- a/python/valuecell/adapters/assets/types.py +++ b/python/valuecell/adapters/assets/types.py @@ -45,13 +45,25 @@ class MarketStatus(str, Enum): class DataSource(str, Enum): """Supported data source providers.""" - TUSHARE = "tushare" - AKSHARE = "akshare" YFINANCE = "yfinance" - FINNHUB = "finnhub" - COINMARKETCAP = "coinmarketcap" - BINANCE = "binance" - ALPHA_VANTAGE = "alpha_vantage" + AKSHARE = "akshare" + # TODO: Add other data sources later + # TUSHARE = "tushare" + # FINNHUB = "finnhub" + # COINMARKETCAP = "coinmarketcap" + # BINANCE = "binance" + # ALPHA_VANTAGE = "alpha_vantage" + + +class Interval(str, Enum): + """Supported intervals for historical data.""" + + MINUTE = "m" + HOUR = "h" + DAY = "d" + WEEK = "w" + MONTH = "mo" + YEAR = "y" @dataclass diff --git a/python/valuecell/adapters/assets/yfinance_adapter.py b/python/valuecell/adapters/assets/yfinance_adapter.py index 44ba35488..ec3b67c10 100644 --- a/python/valuecell/adapters/assets/yfinance_adapter.py +++ b/python/valuecell/adapters/assets/yfinance_adapter.py @@ -22,6 +22,7 @@ AssetSearchResult, AssetType, DataSource, + Interval, LocalizedName, MarketInfo, MarketStatus, @@ -428,19 +429,19 @@ def get_historical_prices( # Map interval to Yahoo Finance format interval_mapping = { - "1m": "1m", - "2m": "2m", - "5m": "5m", - "15m": "15m", - "30m": "30m", - "60m": "60m", - "90m": "90m", - "1h": "1h", - "1d": "1d", - "5d": "5d", - "1w": "1wk", - "1mo": "1mo", - "3mo": "3mo", + f"1{Interval.MINUTE}": "1m", + f"2{Interval.MINUTE}": "2m", + f"5{Interval.MINUTE}": "5m", + f"15{Interval.MINUTE}": "15m", + f"30{Interval.MINUTE}": "30m", + f"60{Interval.MINUTE}": "60m", + f"90{Interval.MINUTE}": "90m", + f"1{Interval.HOUR}": "1h", + f"1{Interval.DAY}": "1d", + f"5{Interval.DAY}": "5d", + f"1{Interval.WEEK}": "1wk", + f"1{Interval.MONTH}": "1mo", + f"3{Interval.MONTH}": "3mo", } yf_interval = interval_mapping.get(interval, "1d") diff --git a/python/valuecell/adapters/mcp/__init__.py b/python/valuecell/adapters/mcp/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/python/valuecell/adapters/memory/__init__.py b/python/valuecell/adapters/memory/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/python/valuecell/core/constants.py b/python/valuecell/config/constants.py similarity index 100% rename from python/valuecell/core/constants.py rename to python/valuecell/config/constants.py diff --git a/python/valuecell/core/agent/tests/test_client.py b/python/valuecell/core/agent/tests/test_client.py index 6a79ce5de..8ed907789 100644 --- a/python/valuecell/core/agent/tests/test_client.py +++ b/python/valuecell/core/agent/tests/test_client.py @@ -267,17 +267,22 @@ async def test_close_closes_httpx_and_resets_state(self): async def test_ensure_initialized_card_resolution_failure(self): """Test that ensure_initialized raises RuntimeError with helpful message on card resolution failure.""" client = AgentClient("http://invalid-url.com") - - with patch('valuecell.core.agent.client.A2ACardResolver') as mock_resolver_class, \ - patch('httpx.AsyncClient'): - + + with ( + patch("valuecell.core.agent.client.A2ACardResolver") as mock_resolver_class, + patch("httpx.AsyncClient"), + ): mock_resolver = mock_resolver_class.return_value - mock_resolver.get_agent_card = AsyncMock(side_effect=Exception("Connection timeout")) - + mock_resolver.get_agent_card = AsyncMock( + side_effect=Exception("Connection timeout") + ) + with pytest.raises(RuntimeError) as exc_info: await client.ensure_initialized() - + error_message = str(exc_info.value) assert "Failed to resolve agent card" in error_message assert "scripts/launch_agent.py" in error_message - assert "Connection timeout" in str(exc_info.value.__cause__) # Original exception should be chained + assert "Connection timeout" in str( + exc_info.value.__cause__ + ) # Original exception should be chained diff --git a/python/valuecell/core/exceptions.py b/python/valuecell/core/exceptions.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/python/valuecell/server/api/i18n_api.py b/python/valuecell/server/api/i18n_api.py index 8cf1ef571..c14b796ba 100644 --- a/python/valuecell/server/api/i18n_api.py +++ b/python/valuecell/server/api/i18n_api.py @@ -5,7 +5,7 @@ from fastapi import APIRouter, Header, HTTPException -from ...core.constants import LANGUAGE_TIMEZONE_MAPPING, SUPPORTED_LANGUAGES +from ...config.constants import LANGUAGE_TIMEZONE_MAPPING, SUPPORTED_LANGUAGES from ...utils.i18n_utils import ( detect_browser_language, get_common_timezones, diff --git a/python/valuecell/server/api/routers/watchlist.py b/python/valuecell/server/api/routers/watchlist.py index 6560a7a3f..e38489850 100644 --- a/python/valuecell/server/api/routers/watchlist.py +++ b/python/valuecell/server/api/routers/watchlist.py @@ -8,7 +8,7 @@ from ...db.repositories.watchlist_repository import get_watchlist_repository from ...services.assets.asset_service import get_asset_service from ..schemas import ( - AddStockRequest, + AddAssetRequest, AssetDetailData, AssetHistoricalPriceData, AssetHistoricalPricesData, @@ -17,7 +17,7 @@ AssetSearchResultData, CreateWatchlistRequest, SuccessResponse, - UpdateStockNotesRequest, + UpdateAssetNotesRequest, WatchlistData, WatchlistItemData, ) @@ -190,6 +190,9 @@ async def get_watchlists(): item_dict = item.to_dict() item_dict["exchange"] = item.exchange item_dict["symbol"] = item.symbol + # Use display_name if available, otherwise fallback to symbol + if not item_dict.get("display_name"): + item_dict["display_name"] = item.symbol items_data.append(WatchlistItemData(**item_dict)) # Convert watchlist to data format @@ -246,9 +249,16 @@ async def get_watchlist( # Convert assets to WatchlistItemData format items_data = [] for asset in watchlist_info.get("assets", []): + symbol = ( + asset["ticker"].split(":")[1] + if ":" in asset["ticker"] + else asset["ticker"] + ) item_data = { "id": 0, # This would be set from database "ticker": asset["ticker"], + "display_name": asset.get("display_name") + or symbol, # Use display_name or fallback to symbol "notes": asset.get("notes", ""), "order_index": asset.get("order", 0), "added_at": asset["added_at"], @@ -256,9 +266,7 @@ async def get_watchlist( "exchange": asset["ticker"].split(":")[0] if ":" in asset["ticker"] else "", - "symbol": asset["ticker"].split(":")[1] - if ":" in asset["ticker"] - else asset["ticker"], + "symbol": symbol, } items_data.append(WatchlistItemData(**item_data)) @@ -328,25 +336,26 @@ async def create_watchlist( ) @router.post( - "/stocks", + "/asset", response_model=SuccessResponse[dict], - summary="Add stock to watchlist", - description="Add a stock to a watchlist", + summary="Add asset to watchlist", + description="Add a asset to a watchlist", ) - async def add_stock_to_watchlist(request: AddStockRequest = None): - """Add a stock to a watchlist.""" + async def add_asset_to_watchlist(request: AddAssetRequest = None): + """Add a asset to a watchlist.""" try: - success = watchlist_repo.add_stock_to_watchlist( + success = watchlist_repo.add_asset_to_watchlist( user_id=DEFAULT_USER_ID, ticker=request.ticker, watchlist_name=request.watchlist_name, + display_name=request.display_name, notes=request.notes or "", ) if not success: raise HTTPException( status_code=400, - detail=f"Failed to add stock '{request.ticker}' to watchlist. Stock may already exist or watchlist not found.", + detail=f"Failed to add asset '{request.ticker}' to watchlist. Asset may already exist or watchlist not found.", ) return SuccessResponse.create( @@ -355,38 +364,38 @@ async def add_stock_to_watchlist(request: AddStockRequest = None): "watchlist_name": request.watchlist_name, "notes": request.notes, }, - msg="Stock added to watchlist successfully", + msg="Asset added to watchlist successfully", ) except HTTPException: raise except Exception as e: raise HTTPException( - status_code=500, detail=f"Failed to add stock: {str(e)}" + status_code=500, detail=f"Failed to add asset: {str(e)}" ) @router.delete( - "/stocks/{ticker}", + "/asset/{ticker}", response_model=SuccessResponse[dict], - summary="Remove stock from watchlist", - description="Remove a stock from a watchlist", + summary="Remove asset from watchlist", + description="Remove a asset from a watchlist", ) - async def remove_stock_from_watchlist( - ticker: str = Path(..., description="Stock ticker to remove"), + async def remove_asset_from_watchlist( + ticker: str = Path(..., description="Asset ticker to remove"), watchlist_name: Optional[str] = Query( None, description="Watchlist name (uses default if not provided)" ), ): - """Remove a stock from a watchlist.""" + """Remove a asset from a watchlist.""" try: - success = watchlist_repo.remove_stock_from_watchlist( + success = watchlist_repo.remove_asset_from_watchlist( user_id=DEFAULT_USER_ID, ticker=ticker, watchlist_name=watchlist_name ) if not success: raise HTTPException( status_code=404, - detail=f"Stock '{ticker}' not found in watchlist or watchlist not found", + detail=f"Asset '{ticker}' not found in watchlist or watchlist not found", ) return SuccessResponse.create( @@ -394,14 +403,14 @@ async def remove_stock_from_watchlist( "ticker": ticker, "watchlist_name": watchlist_name, }, - msg="Stock removed from watchlist successfully", + msg="Asset removed from watchlist successfully", ) except HTTPException: raise except Exception as e: raise HTTPException( - status_code=500, detail=f"Failed to remove stock: {str(e)}" + status_code=500, detail=f"Failed to remove asset: {str(e)}" ) @router.delete( @@ -438,21 +447,21 @@ async def delete_watchlist( ) @router.put( - "/stocks/{ticker}/notes", + "/asset/{ticker}/notes", response_model=SuccessResponse[dict], - summary="Update stock notes", - description="Update notes for a stock in a watchlist", + summary="Update asset notes", + description="Update notes for a asset in a watchlist", ) - async def update_stock_notes( - ticker: str = Path(..., description="Stock ticker"), - request: UpdateStockNotesRequest = None, + async def update_asset_notes( + ticker: str = Path(..., description="Asset ticker"), + request: UpdateAssetNotesRequest = None, watchlist_name: Optional[str] = Query( None, description="Watchlist name (uses default if not provided)" ), ): - """Update notes for a stock in a watchlist.""" + """Update notes for a asset in a watchlist.""" try: - success = watchlist_repo.update_stock_notes( + success = watchlist_repo.update_asset_notes( user_id=DEFAULT_USER_ID, ticker=ticker, notes=request.notes, @@ -462,7 +471,7 @@ async def update_stock_notes( if not success: raise HTTPException( status_code=404, - detail=f"Stock '{ticker}' not found in watchlist or watchlist not found", + detail=f"Asset '{ticker}' not found in watchlist or watchlist not found", ) return SuccessResponse.create( @@ -471,7 +480,7 @@ async def update_stock_notes( "notes": request.notes, "watchlist_name": watchlist_name, }, - msg="Stock notes updated successfully", + msg="Asset notes updated successfully", ) except HTTPException: diff --git a/python/valuecell/server/api/schemas/__init__.py b/python/valuecell/server/api/schemas/__init__.py index 93e5e5560..a80922265 100644 --- a/python/valuecell/server/api/schemas/__init__.py +++ b/python/valuecell/server/api/schemas/__init__.py @@ -31,7 +31,7 @@ UserI18nSettingsRequest, ) from .watchlist import ( - AddStockRequest, + AddAssetRequest, AssetDetailData, AssetHistoricalPriceData, AssetHistoricalPricesData, @@ -40,7 +40,7 @@ AssetSearchQuery, AssetSearchResultData, CreateWatchlistRequest, - UpdateStockNotesRequest, + UpdateAssetNotesRequest, WatchlistData, WatchlistItemData, WatchlistWithPricesData, @@ -79,8 +79,8 @@ "WatchlistItemData", "WatchlistData", "CreateWatchlistRequest", - "AddStockRequest", - "UpdateStockNotesRequest", + "AddAssetRequest", + "UpdateAssetNotesRequest", "AssetSearchQuery", "AssetInfoData", "AssetSearchResultData", diff --git a/python/valuecell/server/api/schemas/i18n.py b/python/valuecell/server/api/schemas/i18n.py index 3a2a2078f..206dfa8dc 100644 --- a/python/valuecell/server/api/schemas/i18n.py +++ b/python/valuecell/server/api/schemas/i18n.py @@ -5,7 +5,7 @@ from pydantic import BaseModel, Field, validator -from ....core.constants import SUPPORTED_LANGUAGE_CODES +from ....config.constants import SUPPORTED_LANGUAGE_CODES # I18n related data models diff --git a/python/valuecell/server/api/schemas/watchlist.py b/python/valuecell/server/api/schemas/watchlist.py index 1cd37722d..8e947f1b1 100644 --- a/python/valuecell/server/api/schemas/watchlist.py +++ b/python/valuecell/server/api/schemas/watchlist.py @@ -10,10 +10,14 @@ class WatchlistItemData(BaseModel): """Watchlist item data schema.""" id: int = Field(..., description="Item ID") - ticker: str = Field(..., description="Stock ticker in format 'EXCHANGE:SYMBOL'") - notes: Optional[str] = Field(None, description="User notes about the stock") + ticker: str = Field(..., description="Asset ticker in format 'EXCHANGE:SYMBOL'") + display_name: Optional[str] = Field( + None, + description="Display name from search results, falls back to symbol if not available", + ) + notes: Optional[str] = Field(None, description="User notes about the asset") order_index: int = Field(..., description="Display order in the watchlist") - added_at: datetime = Field(..., description="When the stock was added") + added_at: datetime = Field(..., description="When the asset was added") updated_at: datetime = Field(..., description="When the item was last updated") # Derived properties @@ -49,25 +53,28 @@ class CreateWatchlistRequest(BaseModel): is_public: bool = Field(False, description="Whether this watchlist is public") -class AddStockRequest(BaseModel): - """Request schema for adding a stock to watchlist.""" +class AddAssetRequest(BaseModel): + """Request schema for adding a asset to watchlist.""" ticker: str = Field( ..., - description="Stock ticker in format 'EXCHANGE:SYMBOL'", + description="Asset ticker in format 'EXCHANGE:SYMBOL'", min_length=1, max_length=50, ) + display_name: Optional[str] = Field( + None, description="Display name from search results", max_length=200 + ) watchlist_name: Optional[str] = Field( None, description="Watchlist name (uses default if not provided)" ) notes: Optional[str] = Field( - "", description="User notes about the stock", max_length=1000 + "", description="User notes about the asset", max_length=1000 ) -class UpdateStockNotesRequest(BaseModel): - """Request schema for updating stock notes.""" +class UpdateAssetNotesRequest(BaseModel): + """Request schema for updating asset notes.""" notes: str = Field(..., description="Updated notes", max_length=1000) diff --git a/python/valuecell/server/config/i18n.py b/python/valuecell/server/config/i18n.py index 9f49062c8..061fa1dbf 100644 --- a/python/valuecell/server/config/i18n.py +++ b/python/valuecell/server/config/i18n.py @@ -6,7 +6,7 @@ import pytz -from ...core.constants import ( +from ...config.constants import ( CURRENCY_SYMBOLS, DATE_FORMATS, DATETIME_FORMATS, diff --git a/python/valuecell/server/db/models/watchlist.py b/python/valuecell/server/db/models/watchlist.py index 0fb2235e0..0c0ef0d5b 100644 --- a/python/valuecell/server/db/models/watchlist.py +++ b/python/valuecell/server/db/models/watchlist.py @@ -131,6 +131,13 @@ class WatchlistItem(Base): comment="Stock ticker in format 'EXCHANGE:SYMBOL' (e.g., NASDAQ:AAPL)", ) + # Display name from search results, falls back to symbol if not available + display_name = Column( + String(200), + nullable=True, + comment="Display name from search results, falls back to symbol if not available", + ) + # User notes about this stock notes = Column(Text, nullable=True, comment="User notes about this stock") @@ -170,6 +177,7 @@ def to_dict(self) -> Dict[str, Any]: "id": self.id, "watchlist_id": self.watchlist_id, "ticker": self.ticker, + "display_name": self.display_name, "notes": self.notes, "order_index": self.order_index, "added_at": self.added_at.isoformat() if self.added_at else None, diff --git a/python/valuecell/server/db/repositories/watchlist_repository.py b/python/valuecell/server/db/repositories/watchlist_repository.py index 2f7ab8b1e..096646732 100644 --- a/python/valuecell/server/db/repositories/watchlist_repository.py +++ b/python/valuecell/server/db/repositories/watchlist_repository.py @@ -218,15 +218,16 @@ def delete_watchlist(self, user_id: str, watchlist_name: str) -> bool: if not self.db_session: session.close() - def add_stock_to_watchlist( + def add_asset_to_watchlist( self, user_id: str, ticker: str, watchlist_name: Optional[str] = None, + display_name: Optional[str] = None, notes: str = "", order_index: Optional[int] = None, ) -> bool: - """Add a stock to a watchlist.""" + """Add a asset to a watchlist.""" session = self._get_session() try: @@ -273,6 +274,7 @@ def add_stock_to_watchlist( item = WatchlistItem( watchlist_id=watchlist.id, ticker=ticker, + display_name=display_name, notes=notes, order_index=order_index, ) @@ -292,10 +294,10 @@ def add_stock_to_watchlist( if not self.db_session: session.close() - def remove_stock_from_watchlist( + def remove_asset_from_watchlist( self, user_id: str, ticker: str, watchlist_name: Optional[str] = None ) -> bool: - """Remove a stock from a watchlist.""" + """Remove a asset from a watchlist.""" session = self._get_session() try: @@ -343,10 +345,10 @@ def remove_stock_from_watchlist( if not self.db_session: session.close() - def get_watchlist_stocks( + def get_watchlist_assets( self, user_id: str, watchlist_name: Optional[str] = None ) -> List[WatchlistItem]: - """Get all stocks in a watchlist.""" + """Get all assets in a watchlist.""" session = self._get_session() try: @@ -386,10 +388,10 @@ def get_watchlist_stocks( if not self.db_session: session.close() - def is_stock_in_watchlist( + def is_asset_in_watchlist( self, user_id: str, ticker: str, watchlist_name: Optional[str] = None ) -> bool: - """Check if a stock is in a watchlist.""" + """Check if a asset is in a watchlist.""" session = self._get_session() try: @@ -417,14 +419,14 @@ def is_stock_in_watchlist( if not self.db_session: session.close() - def update_stock_notes( + def update_asset_notes( self, user_id: str, ticker: str, notes: str, watchlist_name: Optional[str] = None, ) -> bool: - """Update notes for a stock in a watchlist.""" + """Update notes for a asset in a watchlist.""" session = self._get_session() try: diff --git a/python/valuecell/server/services/i18n_service.py b/python/valuecell/server/services/i18n_service.py index b56ad6711..6a97fe1ee 100644 --- a/python/valuecell/server/services/i18n_service.py +++ b/python/valuecell/server/services/i18n_service.py @@ -5,7 +5,7 @@ from pathlib import Path from typing import Any, Dict, List, Optional -from ...core.constants import DEFAULT_LANGUAGE, SUPPORTED_LANGUAGE_CODES +from ...config.constants import DEFAULT_LANGUAGE, SUPPORTED_LANGUAGE_CODES from ..config.i18n import get_i18n_config from ..config.settings import get_settings @@ -229,7 +229,7 @@ def get_supported_languages(self) -> List[tuple]: Returns: List of (code, name) tuples """ - from ...core.constants import SUPPORTED_LANGUAGES + from ...config.constants import SUPPORTED_LANGUAGES return SUPPORTED_LANGUAGES @@ -242,7 +242,7 @@ def get_language_name(self, language_code: str) -> str: Returns: Display name or code if not found """ - from ...core.constants import SUPPORTED_LANGUAGES + from ...config.constants import SUPPORTED_LANGUAGES for code, name in SUPPORTED_LANGUAGES: if code == language_code: diff --git a/python/valuecell/utils/i18n_utils.py b/python/valuecell/utils/i18n_utils.py index 9ebb7f930..80d0231c5 100644 --- a/python/valuecell/utils/i18n_utils.py +++ b/python/valuecell/utils/i18n_utils.py @@ -7,7 +7,7 @@ import pytz -from ..core.constants import ( +from ..config.constants import ( DEFAULT_LANGUAGE, DEFAULT_TIMEZONE, LANGUAGE_TIMEZONE_MAPPING,